| skipped 53 lines |
54 | 54 | | decoded_content = response_content.decode(charset, "ignore") |
55 | 55 | | html_text = decoded_content |
56 | 56 | | |
| 57 | + | error = None |
57 | 58 | | if status_code == 0: |
58 | 59 | | error = CheckError("Connection lost") |
59 | | - | else: |
60 | | - | error = None |
61 | 60 | | |
62 | 61 | | logger.debug(html_text) |
63 | 62 | | |
| skipped 9 lines |
73 | 72 | | error = CheckError("Interrupted") |
74 | 73 | | except Exception as e: |
75 | 74 | | # python-specific exceptions |
76 | | - | if sys.version_info.minor > 6: |
77 | | - | if isinstance(e, ssl.SSLCertVerificationError) or isinstance( |
78 | | - | e, ssl.SSLError |
79 | | - | ): |
80 | | - | error = CheckError("SSL", str(e)) |
| 75 | + | if sys.version_info.minor > 6 and ( |
| 76 | + | isinstance(e, ssl.SSLCertVerificationError) or isinstance(e, ssl.SSLError) |
| 77 | + | ): |
| 78 | + | error = CheckError("SSL", str(e)) |
81 | 79 | | else: |
82 | 80 | | logger.debug(e, exc_info=True) |
83 | 81 | | error = CheckError("Unexpected", str(e)) |
| skipped 25 lines |
109 | 107 | | return None |
110 | 108 | | |
111 | 109 | | |
| 110 | + | def debug_response_logging(url, html_text, status_code, check_error): |
| 111 | + | with open("debug.log", "a") as f: |
| 112 | + | status = status_code or "No response" |
| 113 | + | f.write(f"url: {url}\nerror: {check_error}\nr: {status}\n") |
| 114 | + | if html_text: |
| 115 | + | f.write(f"code: {status}\nresponse: {str(html_text)}\n") |
| 116 | + | |
| 117 | + | |
112 | 118 | | def process_site_result( |
113 | 119 | | response, query_notify, logger, results_info: QueryResultWrapper, site: MaigretSite |
114 | 120 | | ): |
| skipped 27 lines |
142 | 148 | | response_time = None |
143 | 149 | | |
144 | 150 | | if logger.level == logging.DEBUG: |
145 | | - | with open("debug.txt", "a") as f: |
146 | | - | status = status_code or "No response" |
147 | | - | f.write(f"url: {url}\nerror: {check_error}\nr: {status}\n") |
148 | | - | if html_text: |
149 | | - | f.write(f"code: {status}\nresponse: {str(html_text)}\n") |
| 151 | + | debug_response_logging(url, html_text, status_code, check_error) |
150 | 152 | | |
151 | 153 | | # additional check for errors |
152 | 154 | | if status_code and not check_error: |
| skipped 1 lines |
154 | 156 | | html_text, status_code, site.errors, site.ignore403 |
155 | 157 | | ) |
156 | 158 | | |
157 | | - | if site.activation and html_text: |
158 | | - | is_need_activation = any( |
159 | | - | [s for s in site.activation["marks"] if s in html_text] |
160 | | - | ) |
161 | | - | if is_need_activation: |
162 | | - | method = site.activation["method"] |
163 | | - | try: |
164 | | - | activate_fun = getattr(ParsingActivator(), method) |
165 | | - | # TODO: async call |
166 | | - | activate_fun(site, logger) |
167 | | - | except AttributeError: |
168 | | - | logger.warning( |
169 | | - | f"Activation method {method} for site {site.name} not found!" |
170 | | - | ) |
171 | | - | except Exception as e: |
172 | | - | logger.warning(f"Failed activation {method} for site {site.name}: {str(e)}", exc_info=True) |
173 | | - | # TODO: temporary check error |
| 159 | + | # parsing activation |
| 160 | + | is_need_activation = any( |
| 161 | + | [s for s in site.activation.get("marks", []) if s in html_text] |
| 162 | + | ) |
| 163 | + | |
| 164 | + | if site.activation and html_text and is_need_activation: |
| 165 | + | method = site.activation["method"] |
| 166 | + | try: |
| 167 | + | activate_fun = getattr(ParsingActivator(), method) |
| 168 | + | # TODO: async call |
| 169 | + | activate_fun(site, logger) |
| 170 | + | except AttributeError: |
| 171 | + | logger.warning( |
| 172 | + | f"Activation method {method} for site {site.name} not found!" |
| 173 | + | ) |
| 174 | + | except Exception as e: |
| 175 | + | logger.warning( |
| 176 | + | f"Failed activation {method} for site {site.name}: {str(e)}", |
| 177 | + | exc_info=True, |
| 178 | + | ) |
| 179 | + | # TODO: temporary check error |
174 | 180 | | |
175 | 181 | | site_name = site.pretty_name |
176 | 182 | | # presense flags |
177 | 183 | | # True by default |
178 | 184 | | presense_flags = site.presense_strs |
179 | 185 | | is_presense_detected = False |
| 186 | + | |
180 | 187 | | if html_text: |
181 | 188 | | if not presense_flags: |
182 | 189 | | is_presense_detected = True |
| skipped 79 lines |
262 | 269 | | results_info["ids_usernames"] = new_usernames |
263 | 270 | | results_info["ids_links"] = eval(extracted_ids_data.get("links", "[]")) |
264 | 271 | | result.ids_data = extracted_ids_data |
265 | | - | |
266 | | - | # Notify caller about results of query. |
267 | | - | query_notify.update(result, site.similar_search) |
268 | 272 | | |
269 | 273 | | # Save status of request |
270 | 274 | | results_info["status"] = result |
| skipped 142 lines |
413 | 417 | | response, query_notify, logger, default_result, site |
414 | 418 | | ) |
415 | 419 | | |
| 420 | + | query_notify.update(response_result['status'], site.similar_search) |
| 421 | + | |
416 | 422 | | return site.name, response_result |
417 | 423 | | |
418 | 424 | | |
| skipped 198 lines |
617 | 623 | | "disabled": False, |
618 | 624 | | } |
619 | 625 | | |
620 | | - | try: |
621 | | - | check_data = [ |
622 | | - | (site.username_claimed, QueryStatus.CLAIMED), |
623 | | - | (site.username_unclaimed, QueryStatus.AVAILABLE), |
624 | | - | ] |
625 | | - | except Exception as e: |
626 | | - | logger.error(e) |
627 | | - | logger.error(site.__dict__) |
628 | | - | check_data = [] |
| 626 | + | check_data = [ |
| 627 | + | (site.username_claimed, QueryStatus.CLAIMED), |
| 628 | + | (site.username_unclaimed, QueryStatus.AVAILABLE), |
| 629 | + | ] |
629 | 630 | | |
630 | 631 | | logger.info(f"Checking {site.name}...") |
631 | 632 | | |
| skipped 94 lines |