■ ■ ■ ■ ■ ■
changedetectionio/fetch_site_status.py
| skipped 20 lines |
21 | 21 | | self.datastore = datastore |
22 | 22 | | |
23 | 23 | | # If there was a proxy list enabled, figure out what proxy_args/which proxy to use |
| 24 | + | # Returns the proxy as a URL |
24 | 25 | | # if watch.proxy use that |
25 | 26 | | # fetcher.proxy_override = watch.proxy or main config proxy |
26 | 27 | | # Allows override the proxy on a per-request basis |
| skipped 6 lines |
33 | 34 | | |
34 | 35 | | # If its a valid one |
35 | 36 | | if any([watch['proxy'] in p for p in self.datastore.proxy_list]): |
36 | | - | proxy_args = watch['proxy'] |
| 37 | + | proxy_args = self.datastore.proxy_list.get(watch['proxy']).get('url') |
37 | 38 | | |
38 | 39 | | # not valid (including None), try the system one |
39 | 40 | | else: |
40 | 41 | | system_proxy = self.datastore.data['settings']['requests']['proxy'] |
41 | 42 | | # Is not None and exists |
42 | | - | if any([system_proxy in p for p in self.datastore.proxy_list]): |
43 | | - | proxy_args = system_proxy |
| 43 | + | if self.datastore.proxy_list.get(): |
| 44 | + | proxy_args = self.datastore.proxy_list.get(system_proxy).get('url') |
44 | 45 | | |
45 | 46 | | # Fallback - Did not resolve anything, use the first available |
46 | 47 | | if proxy_args is None: |
47 | | - | proxy_args = self.datastore.proxy_list[0][0] |
| 48 | + | first_default = list(self.datastore.proxy_list)[0] |
| 49 | + | proxy_args = self.datastore.proxy_list.get(first_default).get('url') |
48 | 50 | | |
49 | 51 | | return proxy_args |
50 | 52 | | |
| skipped 17 lines |
68 | 70 | | stripped_text_from_html = "" |
69 | 71 | | |
70 | 72 | | watch = self.datastore.data['watching'].get(uuid) |
| 73 | + | if not watch: |
| 74 | + | return |
71 | 75 | | |
72 | 76 | | # Protect against file:// access |
73 | 77 | | if re.search(r'^file', watch['url'], re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False): |
| skipped 16 lines |
90 | 94 | | if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']: |
91 | 95 | | request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '') |
92 | 96 | | |
93 | | - | timeout = self.datastore.data['settings']['requests']['timeout'] |
| 97 | + | timeout = self.datastore.data['settings']['requests'].get('timeout') |
94 | 98 | | url = watch.get('url') |
95 | 99 | | request_body = self.datastore.data['watching'][uuid].get('body') |
96 | 100 | | request_method = self.datastore.data['watching'][uuid].get('method') |
| skipped 13 lines |
110 | 114 | | # If the klass doesnt exist, just use a default |
111 | 115 | | klass = getattr(content_fetcher, "html_requests") |
112 | 116 | | |
113 | | - | |
114 | | - | proxy_args = self.set_proxy_from_list(watch) |
115 | | - | fetcher = klass(proxy_override=proxy_args) |
| 117 | + | proxy_url = self.set_proxy_from_list(watch) |
| 118 | + | if proxy_url: |
| 119 | + | print ("UUID {} Using proxy {}".format(uuid, proxy_url)) |
| 120 | + | fetcher = klass(proxy_override=proxy_url) |
116 | 121 | | |
117 | 122 | | # Configurable per-watch or global extra delay before extracting text (for webDriver types) |
118 | 123 | | system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None) |
| skipped 201 lines |