Projects STRLCPY maigret Commits 3b91a9cd
🤬
  • CLI arguments improvements, tests added

  • Loading...
  • Soxoj committed 3 years ago
    3b91a9cd
    1 parent 9858e713
Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
  • ■ ■ ■ ■ ■
    .gitignore
    skipped 28 lines
    29 29  .DS_Store
    30 30  /reports/
    31 31   
     32 +# Testing
     33 +.coverage
     34 +dist/
     35 +htmlcov/
     36 +test_*
  • ■ ■ ■ ■ ■ ■
    maigret/checking.py
    skipped 26 lines
    27 27  from .utils import get_random_user_agent
    28 28   
    29 29   
    30  -supported_recursive_search_ids = (
     30 +SUPPORTED_IDS = (
    31 31   "yandex_public_id",
    32 32   "gaia_id",
    33 33   "vk_id",
    skipped 229 lines
    263 263   for k, v in extracted_ids_data.items():
    264 264   if "username" in k:
    265 265   new_usernames[v] = "username"
    266  - if k in supported_recursive_search_ids:
     266 + if k in SUPPORTED_IDS:
    267 267   new_usernames[v] = k
    268 268   
    269 269   results_info["ids_usernames"] = new_usernames
    skipped 457 lines
  • ■ ■ ■ ■ ■ ■
    maigret/maigret.py
    skipped 13 lines
    14 14   
    15 15  from .checking import (
    16 16   timeout_check,
    17  - supported_recursive_search_ids,
     17 + SUPPORTED_IDS,
    18 18   self_check,
    19 19   unsupported_characters,
    20 20   maigret,
    skipped 8 lines
    29 29   generate_report_context,
    30 30   save_txt_report,
    31 31   SUPPORTED_JSON_REPORT_FORMATS,
    32  - check_supported_json_format,
    33 32   save_json_report,
    34 33  )
    35 34  from .sites import MaigretDatabase
    skipped 39 lines
    75 74   description=f"Maigret v{__version__}",
    76 75   )
    77 76   parser.add_argument(
     77 + "username",
     78 + nargs='?',
     79 + metavar="USERNAMES",
     80 + action="append",
     81 + help="One or more usernames to check with social networks.",
     82 + )
     83 + parser.add_argument(
    78 84   "--version",
    79 85   action="version",
    80 86   version=version_string,
    81 87   help="Display version information and dependencies.",
    82 88   )
    83 89   parser.add_argument(
    84  - "--info",
    85  - "-vv",
    86  - action="store_true",
    87  - dest="info",
    88  - default=False,
    89  - help="Display service information.",
     90 + "--timeout",
     91 + action="store",
     92 + metavar='TIMEOUT',
     93 + dest="timeout",
     94 + type=timeout_check,
     95 + default=30,
     96 + help="Time in seconds to wait for response to requests. "
     97 + "Default timeout of 30.0s. "
     98 + "A longer timeout will be more likely to get results from slow sites. "
     99 + "On the other hand, this may cause a long delay to gather all results. ",
     100 + )
     101 + parser.add_argument(
     102 + "--retries",
     103 + action="store",
     104 + type=int,
     105 + metavar='RETRIES',
     106 + default=1,
     107 + help="Attempts to restart temporary failed requests.",
     108 + )
     109 + parser.add_argument(
     110 + "-n",
     111 + "--max-connections",
     112 + action="store",
     113 + type=int,
     114 + dest="connections",
     115 + default=100,
     116 + help="Allowed number of concurrent connections.",
    90 117   )
    91 118   parser.add_argument(
    92  - "--verbose",
    93  - "-v",
     119 + "--no-recursion",
    94 120   action="store_true",
    95  - dest="verbose",
     121 + dest="disable_recursive_search",
    96 122   default=False,
    97  - help="Display extra information and metrics.",
     123 + help="Disable recursive search by additional data extracted from pages.",
    98 124   )
    99 125   parser.add_argument(
    100  - "-d",
    101  - "--debug",
    102  - "-vvv",
     126 + "--no-extracting",
    103 127   action="store_true",
    104  - dest="debug",
     128 + dest="disable_extracting",
    105 129   default=False,
    106  - help="Saving debugging information and sites responses in debug.txt.",
     130 + help="Disable parsing pages for additional data and other usernames.",
    107 131   )
    108 132   parser.add_argument(
    109  - "--site",
    110  - action="append",
    111  - metavar='SITE_NAME',
    112  - dest="site_list",
    113  - default=[],
    114  - help="Limit analysis to just the listed sites (use several times to specify more than one)",
    115  - )
    116  - parser.add_argument(
    117  - "--proxy",
    118  - "-p",
    119  - metavar='PROXY_URL',
    120  - action="store",
    121  - dest="proxy",
    122  - default=None,
    123  - help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080",
     133 + "--id-type",
     134 + dest="id_type",
     135 + default='username',
     136 + choices=SUPPORTED_IDS,
     137 + help="Specify identifier(s) type (default: username).",
    124 138   )
    125 139   parser.add_argument(
    126 140   "--db",
    skipped 10 lines
    137 151   help="File with cookies.",
    138 152   )
    139 153   parser.add_argument(
    140  - "--timeout",
    141  - action="store",
    142  - metavar='TIMEOUT',
    143  - dest="timeout",
    144  - type=timeout_check,
    145  - default=30,
    146  - help="Time (in seconds) to wait for response to requests. "
    147  - "Default timeout of 30.0s. "
    148  - "A longer timeout will be more likely to get results from slow sites. "
    149  - "On the other hand, this may cause a long delay to gather all results. ",
     154 + "--ignore-ids",
     155 + action="append",
     156 + metavar='IGNORED_IDS',
     157 + dest="ignore_ids_list",
     158 + default=[],
     159 + help="Do not make search by the specified username or other ids.",
    150 160   )
     161 + # reports options
    151 162   parser.add_argument(
    152  - "--retries",
    153  - action="store",
    154  - type=int,
    155  - metavar='RETRIES',
    156  - default=1,
    157  - help="Attempts to restart temporary failed requests.",
     163 + "--folderoutput",
     164 + "-fo",
     165 + dest="folderoutput",
     166 + default="reports",
     167 + metavar="PATH",
     168 + help="If using multiple usernames, the output of the results will be saved to this folder.",
    158 169   )
    159 170   parser.add_argument(
    160  - "-n",
    161  - "--max-connections",
     171 + "--proxy",
     172 + "-p",
     173 + metavar='PROXY_URL',
    162 174   action="store",
    163  - type=int,
    164  - dest="connections",
    165  - default=100,
    166  - help="Allowed number of concurrent connections.",
     175 + dest="proxy",
     176 + default=None,
     177 + help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080",
    167 178   )
    168  - parser.add_argument(
     179 + 
     180 + filter_group = parser.add_argument_group('Site filtering', 'Options to set site search scope')
     181 + filter_group.add_argument(
    169 182   "-a",
    170 183   "--all-sites",
    171 184   action="store_true",
    skipped 1 lines
    173 186   default=False,
    174 187   help="Use all sites for scan.",
    175 188   )
    176  - parser.add_argument(
     189 + filter_group.add_argument(
    177 190   "--top-sites",
    178 191   action="store",
    179 192   default=500,
     193 + metavar="N",
    180 194   type=int,
    181 195   help="Count of sites for scan ranked by Alexa Top (default: 500).",
    182 196   )
    183  - parser.add_argument(
    184  - "--print-not-found",
    185  - action="store_true",
    186  - dest="print_not_found",
    187  - default=False,
    188  - help="Print sites where the username was not found.",
     197 + filter_group.add_argument(
     198 + "--tags", dest="tags", default='', help="Specify tags of sites (see `--stats`)."
     199 + )
     200 + filter_group.add_argument(
     201 + "--site",
     202 + action="append",
     203 + metavar='SITE_NAME',
     204 + dest="site_list",
     205 + default=[],
     206 + help="Limit analysis to just the specified sites (multiple option).",
    189 207   )
    190  - parser.add_argument(
    191  - "--print-errors",
     208 + filter_group.add_argument(
     209 + "--use-disabled-sites",
    192 210   action="store_true",
    193  - dest="print_check_errors",
    194 211   default=False,
    195  - help="Print errors messages: connection, captcha, site country ban, etc.",
     212 + help="Use disabled sites to search (may cause many false positives).",
    196 213   )
    197  - parser.add_argument(
     214 + 
     215 + modes_group = parser.add_argument_group(
     216 + 'Operating modes',
     217 + 'Various functions except the default search by a username. '
     218 + 'Modes are executed sequentially in the order of declaration.'
     219 + )
     220 + modes_group.add_argument(
     221 + "--parse",
     222 + dest="parse_url",
     223 + default='',
     224 + metavar='URL',
     225 + help="Parse page by URL and extract username and IDs to use for search.",
     226 + )
     227 + modes_group.add_argument(
    198 228   "--submit",
    199  - metavar='EXISTING_USER_URL',
     229 + metavar='URL',
    200 230   type=str,
    201 231   dest="new_site_to_submit",
    202 232   default=False,
    203 233   help="URL of existing profile in new site to submit.",
    204 234   )
    205  - parser.add_argument(
    206  - "--no-color",
     235 + modes_group.add_argument(
     236 + "--self-check",
    207 237   action="store_true",
    208  - dest="no_color",
    209 238   default=False,
    210  - help="Don't color terminal output",
     239 + help="Do self check for sites and database and disable non-working ones.",
    211 240   )
    212  - parser.add_argument(
    213  - "--no-progressbar",
     241 + modes_group.add_argument(
     242 + "--stats",
    214 243   action="store_true",
    215  - dest="no_progressbar",
    216 244   default=False,
    217  - help="Don't show progressbar.",
     245 + help="Show database statistics (most frequent sites engines and tags)."
    218 246   )
    219  - parser.add_argument(
    220  - "--browse",
    221  - "-b",
     247 + 
     248 + output_group = parser.add_argument_group('Output options', 'Options to change verbosity and view of the console output')
     249 + output_group.add_argument(
     250 + "--print-not-found",
    222 251   action="store_true",
    223  - dest="browse",
     252 + dest="print_not_found",
    224 253   default=False,
    225  - help="Browse to all results on default bowser.",
     254 + help="Print sites where the username was not found.",
    226 255   )
    227  - parser.add_argument(
    228  - "--no-recursion",
     256 + output_group.add_argument(
     257 + "--print-errors",
    229 258   action="store_true",
    230  - dest="disable_recursive_search",
     259 + dest="print_check_errors",
    231 260   default=False,
    232  - help="Disable recursive search by additional data extracted from pages.",
     261 + help="Print errors messages: connection, captcha, site country ban, etc.",
    233 262   )
    234  - parser.add_argument(
    235  - "--no-extracting",
     263 + output_group.add_argument(
     264 + "--verbose",
     265 + "-v",
    236 266   action="store_true",
    237  - dest="disable_extracting",
     267 + dest="verbose",
    238 268   default=False,
    239  - help="Disable parsing pages for additional data and other usernames.",
     269 + help="Display extra information and metrics.",
    240 270   )
    241  - parser.add_argument(
    242  - "--self-check",
     271 + output_group.add_argument(
     272 + "--info",
     273 + "-vv",
    243 274   action="store_true",
     275 + dest="info",
    244 276   default=False,
    245  - help="Do self check for sites and database and disable non-working ones.",
     277 + help="Display extra/service information and metrics.",
    246 278   )
    247  - parser.add_argument(
    248  - "--stats", action="store_true", default=False, help="Show database statistics."
     279 + output_group.add_argument(
     280 + "--debug",
     281 + "-vvv",
     282 + "-d",
     283 + action="store_true",
     284 + dest="debug",
     285 + default=False,
     286 + help="Display extra/service/debug information and metrics, save responses in debug.log.",
    249 287   )
    250  - parser.add_argument(
    251  - "--use-disabled-sites",
     288 + output_group.add_argument(
     289 + "--no-color",
    252 290   action="store_true",
     291 + dest="no_color",
    253 292   default=False,
    254  - help="Use disabled sites to search (may cause many false positives).",
    255  - )
    256  - parser.add_argument(
    257  - "--parse",
    258  - dest="parse_url",
    259  - default='',
    260  - help="Parse page by URL and extract username and IDs to use for search.",
    261  - )
    262  - parser.add_argument(
    263  - "--id-type",
    264  - dest="id_type",
    265  - default='username',
    266  - help="Specify identifier(s) type (default: username).",
    267  - )
    268  - parser.add_argument(
    269  - "--ignore-ids",
    270  - action="append",
    271  - metavar='IGNORED_IDS',
    272  - dest="ignore_ids_list",
    273  - default=[],
    274  - help="Do not make search by the specified username or other ids.",
    275  - )
    276  - parser.add_argument(
    277  - "username",
    278  - nargs='+',
    279  - metavar='USERNAMES',
    280  - action="store",
    281  - help="One or more usernames to check with social networks.",
    282  - )
    283  - parser.add_argument(
    284  - "--tags", dest="tags", default='', help="Specify tags of sites."
     293 + help="Don't color terminal output",
    285 294   )
    286  - # reports options
    287  - parser.add_argument(
    288  - "--folderoutput",
    289  - "-fo",
    290  - dest="folderoutput",
    291  - default="reports",
    292  - help="If using multiple usernames, the output of the results will be saved to this folder.",
     295 + output_group.add_argument(
     296 + "--no-progressbar",
     297 + action="store_true",
     298 + dest="no_progressbar",
     299 + default=False,
     300 + help="Don't show progressbar.",
    293 301   )
    294  - parser.add_argument(
     302 + 
     303 + report_group = parser.add_argument_group('Report formats', 'Supported formats of report files')
     304 + report_group.add_argument(
    295 305   "-T",
    296 306   "--txt",
    297 307   action="store_true",
    skipped 1 lines
    299 309   default=False,
    300 310   help="Create a TXT report (one report per username).",
    301 311   )
    302  - parser.add_argument(
     312 + report_group.add_argument(
    303 313   "-C",
    304 314   "--csv",
    305 315   action="store_true",
    skipped 1 lines
    307 317   default=False,
    308 318   help="Create a CSV report (one report per username).",
    309 319   )
    310  - parser.add_argument(
     320 + report_group.add_argument(
    311 321   "-H",
    312 322   "--html",
    313 323   action="store_true",
    skipped 1 lines
    315 325   default=False,
    316 326   help="Create an HTML report file (general report on all usernames).",
    317 327   )
    318  - parser.add_argument(
     328 + report_group.add_argument(
    319 329   "-X",
    320 330   "--xmind",
    321 331   action="store_true",
    skipped 1 lines
    323 333   default=False,
    324 334   help="Generate an XMind 8 mindmap report (one report per username).",
    325 335   )
    326  - parser.add_argument(
     336 + report_group.add_argument(
    327 337   "-P",
    328 338   "--pdf",
    329 339   action="store_true",
    skipped 1 lines
    331 341   default=False,
    332 342   help="Generate a PDF report (general report on all usernames).",
    333 343   )
    334  - parser.add_argument(
     344 + report_group.add_argument(
    335 345   "-J",
    336 346   "--json",
    337 347   action="store",
    338  - metavar='REPORT_TYPE',
     348 + metavar='TYPE',
    339 349   dest="json",
    340 350   default='',
    341  - type=check_supported_json_format,
     351 + choices=SUPPORTED_JSON_REPORT_FORMATS,
    342 352   help=f"Generate a JSON report of specific type: {', '.join(SUPPORTED_JSON_REPORT_FORMATS)}"
    343 353   " (one report per username).",
    344 354   )
    skipped 26 lines
    371 381   usernames = {
    372 382   u: args.id_type
    373 383   for u in args.username
    374  - if u not in ['-'] and u not in args.ignore_ids_list
     384 + if u and u not in ['-'] and u not in args.ignore_ids_list
    375 385   }
    376 386   
    377 387   parsing_enabled = not args.disable_extracting
    skipped 27 lines
    405 415   for k, v in info.items():
    406 416   if 'username' in k:
    407 417   usernames[v] = 'username'
    408  - if k in supported_recursive_search_ids:
     418 + if k in SUPPORTED_IDS:
    409 419   usernames[v] = k
    410 420   
    411 421   if args.tags:
    skipped 214 lines
  • ■ ■ ■ ■ ■ ■
    maigret/report.py
    skipped 359 lines
    360 360   currentsublabel = undefinedsection.addSubTopic()
    361 361   currentsublabel.setTitle("%s: %s" % (k, v))
    362 362   
    363  - 
    364  -def check_supported_json_format(value):
    365  - if value and value not in SUPPORTED_JSON_REPORT_FORMATS:
    366  - raise ArgumentTypeError(
    367  - "JSON report type must be one of the following types: "
    368  - + ", ".join(SUPPORTED_JSON_REPORT_FORMATS)
    369  - )
    370  - return value
    371  - 
  • ■ ■ ■ ■ ■
    maigret/resources/data.json
    skipped 981 lines
    982 982   "\u0412\u044b \u043d\u0435 \u043c\u043e\u0436\u0435\u0442\u0435 \u043f\u0440\u043e\u0438\u0437\u0432\u0435\u0441\u0442\u0438 \u043f\u043e\u0438\u0441\u043a \u0441\u0440\u0430\u0437\u0443 \u043f\u043e\u0441\u043b\u0435 \u043f\u0440\u0435\u0434\u044b\u0434\u0443\u0449\u0435\u0433\u043e": "Too many searhes per IP",
    983 983   "\u0414\u043e\u0441\u0442\u0443\u043f \u043a \u043a\u043e\u043d\u0444\u0435\u0440\u0435\u043d\u0446\u0438\u0438 \u0437\u0430\u043a\u0440\u044b\u0442 \u0434\u043b\u044f \u0432\u0430\u0448\u0435\u0433\u043e IP-\u0430\u0434\u0440\u0435\u0441\u0430.": "IP ban"
    984 984   },
    985  - "checkType": "message",
    986  - "absenceStrs": [
    987  - "\u041f\u043e\u0434\u0445\u043e\u0434\u044f\u0449\u0438\u0445 \u0442\u0435\u043c \u0438\u043b\u0438 \u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0439 \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d\u043e."
    988  - ],
     985 + "engine": "phpBB/Search",
    989 986   "alexaRank": 284203,
    990 987   "urlMain": "https://antiwomen.ru",
    991  - "url": "https://antiwomen.ru/search.php?keywords=&terms=all&author={username}",
    992 988   "usernameClaimed": "adam",
    993 989   "usernameUnclaimed": "noonewouldeverusethis7"
    994 990   },
    skipped 11789 lines
    12784 12780   "us"
    12785 12781   ],
    12786 12782   "headers": {
    12787  - "authorization": "Bearer BQBxsP-d2_tKY0erevviPs9sqxt3qgBU-R1Hpjh-1VV3rCoMm4qVjckkDvPctosbWStF0myG4aJ-7xO2LRg"
     12783 + "authorization": "Bearer BQAlQVJgjkpZgzYiYPT1DgdyrvwTwWkYAgu3lET0zKuXZK7E28z60A00m2y6ITwkVXskqtWkxbKdfHodCao"
    12788 12784   },
    12789 12785   "errors": {
    12790 12786   "Spotify is currently not available in your country.": "Access denied in your country, use proxy/vpn"
    skipped 1380 lines
    14171 14167   "sec-ch-ua": "Google Chrome\";v=\"87\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"87\"",
    14172 14168   "authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA",
    14173 14169   "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
    14174  - "x-guest-token": "1388922761482022917"
     14170 + "x-guest-token": "1389716834983759872"
    14175 14171   },
    14176 14172   "errors": {
    14177 14173   "Bad guest token": "x-guest-token update required"
    skipped 390 lines
    14568 14564   "video"
    14569 14565   ],
    14570 14566   "headers": {
    14571  - "Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTk5NzQ4MDAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.LJFXICpOC7e-a67hz6kOUY1Mz9wP_60L8mCz2kZawHs"
     14567 + "Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjAxNzAyMjAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.TbxzgFVMQsgYz4vTiFE-_P1qydzqP9ADUsPxl8U4bZE"
    14572 14568   },
    14573 14569   "activation": {
    14574 14570   "url": "https://vimeo.com/_rv/viewer",
    skipped 933 lines
    15508 15504   "source": "Yandex",
    15509 15505   "usernameClaimed": "yandex",
    15510 15506   "usernameUnclaimed": "noonewouldeverusethis7"
    15511  - },
    15512  - "YandexLocal": {
    15513  - "tags": [
    15514  - "ru"
    15515  - ],
    15516  - "type": "yandex_public_id",
    15517  - "checkType": "status_code",
    15518  - "alexaRank": 49,
    15519  - "urlMain": "https://local.yandex.ru/",
    15520  - "url": "https://local.yandex.ru/users/{username}",
    15521  - "source": "Yandex",
    15522  - "usernameClaimed": "gp7v6ufryzw3m1nvdj4ycexa8g",
    15523  - "usernameUnclaimed": "noonewouldeverusethis77777"
    15524 15507   },
    15525 15508   "YandexMarket": {
    15526 15509   "tags": [
    skipped 10690 lines
  • ■ ■ ■ ■ ■ ■
    maigret/sites.py
    skipped 435 lines
    436 436   tags[tag] = tags.get(tag, 0) + 1
    437 437   
    438 438   output += f"Enabled/total sites: {total_count - disabled_count}/{total_count}\n"
    439  - output += "Top sites' profile URLs:\n"
     439 + output += "Top profile URLs:\n"
    440 440   for url, count in sorted(urls.items(), key=lambda x: x[1], reverse=True)[:20]:
    441 441   if count == 1:
    442 442   break
    443 443   output += f"{count}\t{url}\n"
    444 444   
    445  - output += "Top sites' tags:\n"
     445 + output += "Top tags:\n"
    446 446   for tag, count in sorted(tags.items(), key=lambda x: x[1], reverse=True)[:20]:
    447 447   mark = ""
    448 448   if tag not in SUPPORTED_TAGS:
    skipped 5 lines
  • ■ ■ ■ ■ ■
    test.sh
    1 1  #!/bin/sh
    2  -pytest tests
     2 +coverage run --source=./maigret -m pytest tests
     3 +coverage report -m
     4 +coverage html
    3 5   
  • ■ ■ ■ ■ ■ ■
    tests/conftest.py
    skipped 5 lines
    6 6  from _pytest.mark import Mark
    7 7   
    8 8  from maigret.sites import MaigretDatabase
     9 +from maigret.maigret import setup_arguments_parser
     10 + 
    9 11   
    10 12  CUR_PATH = os.path.dirname(os.path.realpath(__file__))
    11 13  JSON_FILE = os.path.join(CUR_PATH, '../maigret/resources/data.json')
    skipped 40 lines
    52 54   yield
    53 55   remove_test_reports()
    54 56   
     57 + 
     58 +@pytest.fixture(scope='session')
     59 +def argparser():
     60 + return setup_arguments_parser()
     61 + 
  • ■ ■ ■ ■ ■ ■
    tests/test_cli.py
     1 +"""Maigret command-line arguments parsing tests"""
     2 +from argparse import Namespace
     3 +from typing import Dict, Any
     4 + 
     5 +DEFAULT_ARGS: Dict[str, Any] = {
     6 + 'all_sites': False,
     7 + 'connections': 100,
     8 + 'cookie_file': None,
     9 + 'csv': False,
     10 + 'db_file': None,
     11 + 'debug': False,
     12 + 'disable_extracting': False,
     13 + 'disable_recursive_search': False,
     14 + 'folderoutput': 'reports',
     15 + 'html': False,
     16 + 'id_type': 'username',
     17 + 'ignore_ids_list': [],
     18 + 'info': False,
     19 + 'json': '',
     20 + 'new_site_to_submit': False,
     21 + 'no_color': False,
     22 + 'no_progressbar': False,
     23 + 'parse_url': '',
     24 + 'pdf': False,
     25 + 'print_check_errors': False,
     26 + 'print_not_found': False,
     27 + 'proxy': None,
     28 + 'retries': 1,
     29 + 'self_check': False,
     30 + 'site_list': [],
     31 + 'stats': False,
     32 + 'tags': '',
     33 + 'timeout': 30,
     34 + 'top_sites': 500,
     35 + 'txt': False,
     36 + 'use_disabled_sites': False,
     37 + 'username': [],
     38 + 'verbose': False,
     39 + 'xmind': False,
     40 +}
     41 + 
     42 + 
     43 +def test_args_search_mode(argparser):
     44 + args = argparser.parse_args('username'.split())
     45 + 
     46 + assert args.username == ['username']
     47 + 
     48 + want_args = dict(DEFAULT_ARGS)
     49 + want_args.update({'username': ['username']})
     50 + 
     51 + assert args == Namespace(**want_args)
     52 + 
     53 + 
     54 +def test_args_self_check_mode(argparser):
     55 + args = argparser.parse_args('--self-check --site GitHub'.split())
     56 + 
     57 + want_args = dict(DEFAULT_ARGS)
     58 + want_args.update(
     59 + {
     60 + 'self_check': True,
     61 + 'site_list': ['GitHub'],
     62 + 'username': [None],
     63 + }
     64 + )
     65 + 
     66 + assert args == Namespace(**want_args)
     67 + 
     68 + 
     69 +def test_args_multiple_sites(argparser):
     70 + args = argparser.parse_args('--site GitHub VK --site PornHub --site Taringa,Steam'.split())
     71 + 
     72 + want_args = dict(DEFAULT_ARGS)
     73 + want_args.update(
     74 + {
     75 + 'site_list': ['GitHub', 'PornHub', 'Taringa,Steam'],
     76 + 'username': ['VK'],
     77 + }
     78 + )
     79 + 
     80 + assert args == Namespace(**want_args)
     81 + 
  • ■ ■ ■ ■ ■ ■
    tests/test_utils.py
    skipped 97 lines
    98 98   'legacy_id': '26403415',
    99 99   'username': 'alexaimephotographycars',
    100 100   'name': 'Alex Aimé',
     101 + 'links': "['www.instagram.com/street.reality.photography/']",
    101 102   'created_at': '2018-05-04T10:17:01.000+0000',
    102 103   'image': 'https://drscdn.500px.org/user_avatar/26403415/q%3D85_w%3D300_h%3D300/v2?webp=true&v=2&sig=0235678a4f7b65e007e864033ebfaf5ef6d87fad34f80a8639d985320c20fe3b',
    103 104   'image_bg': 'https://drscdn.500px.org/user_cover/26403415/q%3D65_m%3D2048/v2?webp=true&v=1&sig=bea411fb158391a4fdad498874ff17088f91257e59dfb376ff67e3a44c3a4201',
    skipped 3 lines
    107 108   'twitter_username': 'Alexaimephotogr',
    108 109   }
    109 110   
    110  - ascii_tree = get_dict_ascii_tree(data.items())
     111 + ascii_tree = get_dict_ascii_tree(data.items(), prepend=" ")
    111 112   
    112 113   assert (
    113 114   ascii_tree
    114 115   == """
    115  -┣╸uid: dXJpOm5vZGU6VXNlcjoyNjQwMzQxNQ==
    116  -┣╸legacy_id: 26403415
    117  -┣╸username: alexaimephotographycars
    118  -┣╸name: Alex Aimé
    119  -┣╸created_at: 2018-05-04T10:17:01.000+0000
    120  -┣╸image: https://drscdn.500px.org/user_avatar/26403415/q%3D85_w%3D300_h%3D300/v2?webp=true&v=2&sig=0235678a4f7b65e007e864033ebfaf5ef6d87fad34f80a8639d985320c20fe3b
    121  -┣╸image_bg: https://drscdn.500px.org/user_cover/26403415/q%3D65_m%3D2048/v2?webp=true&v=1&sig=bea411fb158391a4fdad498874ff17088f91257e59dfb376ff67e3a44c3a4201
    122  -┣╸website: www.instagram.com/street.reality.photography/
    123  -┣╸facebook_link: www.instagram.com/street.reality.photography/
    124  -┣╸instagram_username: Street.Reality.Photography
    125  -┗╸twitter_username: Alexaimephotogr"""
     116 + ┣╸uid: dXJpOm5vZGU6VXNlcjoyNjQwMzQxNQ==
     117 + ┣╸legacy_id: 26403415
     118 + ┣╸username: alexaimephotographycars
     119 + ┣╸name: Alex Aimé
     120 + ┣╸links:
     121 + ┃ ┗╸ www.instagram.com/street.reality.photography/
     122 + ┣╸created_at: 2018-05-04T10:17:01.000+0000
     123 + ┣╸image: https://drscdn.500px.org/user_avatar/26403415/q%3D85_w%3D300_h%3D300/v2?webp=true&v=2&sig=0235678a4f7b65e007e864033ebfaf5ef6d87fad34f80a8639d985320c20fe3b
     124 + ┣╸image_bg: https://drscdn.500px.org/user_cover/26403415/q%3D65_m%3D2048/v2?webp=true&v=1&sig=bea411fb158391a4fdad498874ff17088f91257e59dfb376ff67e3a44c3a4201
     125 + ┣╸website: www.instagram.com/street.reality.photography/
     126 + ┣╸facebook_link: www.instagram.com/street.reality.photography/
     127 + ┣╸instagram_username: Street.Reality.Photography
     128 + ┗╸twitter_username: Alexaimephotogr"""
    126 129   )
    127 130   
Please wait...
Page is in error, reload to recover