| skipped 59 lines |
60 | 60 | | ) |
61 | 61 | | |
62 | 62 | | |
| 63 | + | def extract_ids_from_url(url: str, db: MaigretDatabase) -> dict: |
| 64 | + | results = {} |
| 65 | + | for s in db.sites: |
| 66 | + | result = s.extract_id_from_url(url) |
| 67 | + | if not result: |
| 68 | + | continue |
| 69 | + | _id, _type = result |
| 70 | + | results[_id] = _type |
| 71 | + | return results |
| 72 | + | |
| 73 | + | |
63 | 74 | | def extract_ids_from_page(url, logger, timeout=5) -> dict: |
64 | 75 | | results = {} |
65 | 76 | | # url, headers |
| skipped 39 lines |
105 | 116 | | ids_results[u] = utype |
106 | 117 | | |
107 | 118 | | for url in dictionary.get('ids_links', []): |
108 | | - | for s in db.sites: |
109 | | - | u = s.detect_username(url) |
110 | | - | if u: |
111 | | - | ids_results[u] = 'username' |
| 119 | + | ids_results.update(extract_ids_from_url(url, db)) |
| 120 | + | |
112 | 121 | | return ids_results |
113 | 122 | | |
114 | 123 | | |
| skipped 14 lines |
129 | 138 | | ) |
130 | 139 | | parser.add_argument( |
131 | 140 | | "username", |
132 | | - | nargs='?', |
| 141 | + | nargs='*', |
133 | 142 | | metavar="USERNAMES", |
134 | | - | action="append", |
135 | | - | help="One or more usernames to check with social networks.", |
| 143 | + | help="One or more usernames to search by.", |
136 | 144 | | ) |
137 | 145 | | parser.add_argument( |
138 | 146 | | "--version", |
| skipped 92 lines |
231 | 239 | | help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080", |
232 | 240 | | ) |
233 | 241 | | |
234 | | - | filter_group = parser.add_argument_group('Site filtering', 'Options to set site search scope') |
| 242 | + | filter_group = parser.add_argument_group( |
| 243 | + | 'Site filtering', 'Options to set site search scope' |
| 244 | + | ) |
235 | 245 | | filter_group.add_argument( |
236 | 246 | | "-a", |
237 | 247 | | "--all-sites", |
| skipped 31 lines |
269 | 279 | | modes_group = parser.add_argument_group( |
270 | 280 | | 'Operating modes', |
271 | 281 | | 'Various functions except the default search by a username. ' |
272 | | - | 'Modes are executed sequentially in the order of declaration.' |
| 282 | + | 'Modes are executed sequentially in the order of declaration.', |
273 | 283 | | ) |
274 | 284 | | modes_group.add_argument( |
275 | 285 | | "--parse", |
| skipped 20 lines |
296 | 306 | | "--stats", |
297 | 307 | | action="store_true", |
298 | 308 | | default=False, |
299 | | - | help="Show database statistics (most frequent sites engines and tags)." |
| 309 | + | help="Show database statistics (most frequent sites engines and tags).", |
300 | 310 | | ) |
301 | 311 | | |
302 | | - | output_group = parser.add_argument_group('Output options', 'Options to change verbosity and view of the console output') |
| 312 | + | output_group = parser.add_argument_group( |
| 313 | + | 'Output options', 'Options to change verbosity and view of the console output' |
| 314 | + | ) |
303 | 315 | | output_group.add_argument( |
304 | 316 | | "--print-not-found", |
305 | 317 | | action="store_true", |
| skipped 48 lines |
354 | 366 | | help="Don't show progressbar.", |
355 | 367 | | ) |
356 | 368 | | |
357 | | - | report_group = parser.add_argument_group('Report formats', 'Supported formats of report files') |
| 369 | + | report_group = parser.add_argument_group( |
| 370 | + | 'Report formats', 'Supported formats of report files' |
| 371 | + | ) |
358 | 372 | | report_group.add_argument( |
359 | 373 | | "-T", |
360 | 374 | | "--txt", |
| skipped 85 lines |
446 | 460 | | print("Using the proxy: " + args.proxy) |
447 | 461 | | |
448 | 462 | | if args.parse_url: |
449 | | - | extracted_ids = extract_ids_from_page(args.parse_url, logger, timeout=args.timeout) |
| 463 | + | extracted_ids = extract_ids_from_page( |
| 464 | + | args.parse_url, logger, timeout=args.timeout |
| 465 | + | ) |
450 | 466 | | usernames.update(extracted_ids) |
451 | 467 | | |
452 | 468 | | if args.tags: |
| skipped 194 lines |