Projects STRLCPY maigret Commits fd236dd1
🤬
Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
  • ■ ■ ■ ■ ■ ■
    CHANGELOG.md
    1 1  # Changelog
    2 2   
    3 3  ## [Unreleased]
     4 +* tags bugfix
     5 +* custom data checks bugfix
    4 6   
    5 7  ## [0.1.10] - 2021-01-13
    6 8  * added report static resources into package
    skipped 34 lines
  • ■ ■ ■ ■ ■ ■
    maigret/maigret.py
    skipped 115 lines
    116 116   
    117 117   site_dict[sitename] = process_site_result(response, query_notify, logger, results_info, site_obj)
    118 118   
    119  - 
    120 119  # TODO: move info separate module
    121 120  def detect_error_page(html_text, status_code, fail_flags, ignore_403):
    122 121   # Detect service restrictions such as a country restriction
    skipped 74 lines
    197 196   # presense flags
    198 197   # True by default
    199 198   presense_flags = site.presense_strs
    200  - is_presense_detected = html_text and all(
    201  - [(presense_flag in html_text) for presense_flag in presense_flags]) or not presense_flags
     199 + if html_text:
     200 + is_presense_detected = False
     201 + if not presense_flags:
     202 + is_presense_detected = True
     203 + site.stats['presense_flag'] = None
     204 + else:
     205 + for presense_flag in presense_flags:
     206 + if presense_flag in html_text:
     207 + is_presense_detected = True
     208 + site.stats['presense_flag'] = presense_flag
     209 + logger.info(presense_flag)
     210 + break
    202 211   
    203 212   if error_text is not None:
    204 213   logger.debug(error_text)
    skipped 95 lines
    300 309   
    301 310  async def maigret(username, site_dict, query_notify, logger,
    302 311   proxy=None, timeout=None, recursive_search=False,
    303  - id_type='username', tags=None, debug=False, forced=False,
     312 + id_type='username', debug=False, forced=False,
    304 313   max_connections=100, no_progressbar=False):
    305 314   """Main search func
    306 315   
    skipped 26 lines
    333 342   """
    334 343   
    335 344   # Notify caller that we are starting the query.
    336  - if tags is None:
    337  - tags = set()
    338 345   query_notify.start(username, id_type)
    339 346   
    340 347   # TODO: connector
    skipped 17 lines
    358 365   # First create futures for all requests. This allows for the requests to run in parallel
    359 366   for site_name, site in site_dict.items():
    360 367   
    361  - fulltags = site.tags
    362  - 
    363 368   if site.type != id_type:
    364 369   continue
    365 370   
    366  - site_tags = set(fulltags)
    367  - if tags:
    368  - if not set(tags).intersection(site_tags):
    369  - continue
    370  - 
    371 371   if site.disabled and not forced:
     372 + logger.debug(f'Site {site.name} is disabled, skipping...')
    372 373   continue
    373 374   
    374 375   # Results from analysis of this specific site
    skipped 204 lines
    579 580   site.disabled = changes['disabled']
    580 581   db.update_site(site)
    581 582   if not silent:
    582  - action = 'Disabled' if not site.disabled else 'Enabled'
     583 + action = 'Disabled' if site.disabled else 'Enabled'
    583 584   print(f'{action} site {site.name}...')
    584 585   
    585 586   return changes
    586 587   
    587 588   
    588  -async def self_check(db: MaigretDatabase, site_data: dict, logger, silent=False):
     589 +async def self_check(db: MaigretDatabase, site_data: dict, logger, silent=False) -> bool:
    589 590   sem = asyncio.Semaphore(10)
    590 591   tasks = []
    591 592   all_sites = site_data
    skipped 21 lines
    613 614   total_disabled *= -1
    614 615   
    615 616   if not silent:
    616  - print(f'{message} {total_disabled} checked sites. Run with `--info` flag to get more information')
     617 + print(f'{message} {total_disabled} ({disabled_old_count} => {disabled_new_count}) checked sites. Run with `--info` flag to get more information')
     618 + 
     619 + return total_disabled != 0
    617 620   
    618 621   
    619 622  async def main():
    skipped 44 lines
    664 667   "A longer timeout will be more likely to get results from slow sites."
    665 668   "On the other hand, this may cause a long delay to gather all results."
    666 669   )
     670 + parser.add_argument("-n", "--max-connections",
     671 + action="store", type=int,
     672 + dest="connections", default=100,
     673 + help="Allowed number of concurrent connections."
     674 + )
     675 + parser.add_argument("-a", "--all-sites",
     676 + action="store_true", dest="all_sites", default=False,
     677 + help="Use all sites for scan."
     678 + )
    667 679   parser.add_argument("--top-sites",
    668 680   action="store", default=500, type=int,
    669  - help="Count of sites for checking ranked by Alexa Top (default: 500)."
     681 + help="Count of sites for scan ranked by Alexa Top (default: 500)."
    670 682   )
    671 683   parser.add_argument("--print-not-found",
    672 684   action="store_true", dest="print_not_found", default=False,
    skipped 116 lines
    789 801   "resources/data.json"
    790 802   )
    791 803   
    792  - if args.top_sites == 0:
     804 + if args.top_sites == 0 or args.all_sites:
    793 805   args.top_sites = sys.maxsize
    794 806   
    795 807   # Create object with all information about sites we are aware of.
    skipped 7 lines
    803 815   # Database self-checking
    804 816   if args.self_check:
    805 817   print('Maigret sites database self-checking...')
    806  - await self_check(db, site_data, logger)
    807  - if input('Do you want to save changes permanently? [yYnN]\n').lower() == 'y':
    808  - db.save_to_file(args.json_file)
    809  - print('Database was successfully updated.')
    810  - else:
    811  - print('Updates will be applied only for current search session.')
     818 + is_need_update = await self_check(db, site_data, logger)
     819 + if is_need_update:
     820 + if input('Do you want to save changes permanently? [yYnN]\n').lower() == 'y':
     821 + db.save_to_file(args.json_file)
     822 + print('Database was successfully updated.')
     823 + else:
     824 + print('Updates will be applied only for current search session.')
     825 + print(db.get_stats(site_data))
    812 826   
    813 827   # Make reports folder is not exists
    814 828   os.makedirs(args.folderoutput, exist_ok=True)
    skipped 50 lines
    865 879   timeout=args.timeout,
    866 880   recursive_search=recursive_search_enabled,
    867 881   id_type=id_type,
    868  - tags=args.tags,
    869 882   debug=args.verbose,
    870 883   logger=logger,
    871 884   forced=args.use_disabled_sites,
     885 + max_connections=args.connections,
    872 886   )
    873 887   
    874 888   username_result = (username, id_type, results)
    skipped 27 lines
    902 916   print(f'TXT report for {username} saved in {filename}')
    903 917   
    904 918   # reporting for all the result
    905  - report_context = generate_report_context(general_results)
    906  - # determine main username
    907  - username = report_context['username']
    908  - 
    909  - if args.html:
    910  - filename = report_filepath_tpl.format(username=username, postfix='.html')
    911  - save_html_report(filename, report_context)
    912  - print(f'HTML report on all usernames saved in {filename}')
    913  - 
    914  - if args.pdf:
    915  - filename = report_filepath_tpl.format(username=username, postfix='.pdf')
    916  - save_pdf_report(filename, report_context)
    917  - print(f'PDF report on all usernames saved in {filename}')
     919 + if general_results:
     920 + report_context = generate_report_context(general_results)
     921 + # determine main username
     922 + username = report_context['username']
    918 923   
     924 + if args.html:
     925 + filename = report_filepath_tpl.format(username=username, postfix='.html')
     926 + save_html_report(filename, report_context)
     927 + print(f'HTML report on all usernames saved in {filename}')
    919 928   
     929 + if args.pdf:
     930 + filename = report_filepath_tpl.format(username=username, postfix='.pdf')
     931 + save_pdf_report(filename, report_context)
     932 + print(f'PDF report on all usernames saved in {filename}')
    920 933   # update database
    921 934   db.save_to_file(args.json_file)
    922 935   
    skipped 11 lines
  • maigret/resources/data.json
    Diff is too large to be displayed.
  • ■ ■ ■ ■ ■
    maigret/sites.py
    skipped 40 lines
    41 41   
    42 42   self.presense_strs = []
    43 43   self.absence_strs = []
     44 + self.stats = {}
    44 45   
    45 46   self.engine = None
    46 47   self.engine_data = {}
    skipped 21 lines
    68 69   # strip empty elements
    69 70   if v in (False, '', [], {}, None, sys.maxsize, 'username'):
    70 71   continue
    71  - if field in ['name', 'engineData', 'requestFuture', 'detectedEngine', 'engineObj']:
     72 + if field in ['name', 'engineData', 'requestFuture', 'detectedEngine', 'engineObj', 'stats']:
    72 73   continue
    73 74   result[field] = v
    74 75   
    skipped 12 lines
    87 88   # TODO: assertion of intersecting keys
    88 89   # update dicts like errors
    89 90   self.__dict__.get(field, {}).update(v)
     91 + elif isinstance(v, list):
     92 + self.__dict__[field] = self.__dict__.get(field, []) + v
    90 93   else:
    91 94   self.__dict__[field] = v
    92 95   
    skipped 8 lines
    101 104   self.request_future = None
    102 105   self_copy = copy.deepcopy(self)
    103 106   engine_data = self_copy.engine_obj.site
    104  - for field in engine_data.keys():
    105  - if isinstance(engine_data[field], dict):
    106  - for k in engine_data[field].keys():
    107  - del self_copy.__dict__[field][k]
    108  - continue
     107 + site_data_keys = list(self_copy.__dict__.keys())
    109 108   
    110  - if field in list(self_copy.__dict__.keys()):
     109 + for k in engine_data.keys():
     110 + field = CaseConverter.camel_to_snake(k)
     111 + is_exists = field in site_data_keys
     112 + # remove dict keys
     113 + if isinstance(engine_data[k], dict) and is_exists:
     114 + for f in engine_data[k].keys():
     115 + del self_copy.__dict__[field][f]
     116 + continue
     117 + # remove list items
     118 + if isinstance(engine_data[k], list) and is_exists:
     119 + for f in engine_data[k]:
     120 + self_copy.__dict__[field].remove(f)
     121 + continue
     122 + if is_exists:
    111 123   del self_copy.__dict__[field]
    112  - if CaseConverter.camel_to_snake(field) in list(self_copy.__dict__.keys()):
    113  - del self_copy.__dict__[CaseConverter.camel_to_snake(field)]
    114 124   
    115 125   return self_copy
    116 126   
    skipped 139 lines
    256 266   
    257 267   return self.load_from_json(data)
    258 268   
     269 + def get_stats(self, sites_dict):
     270 + sites = sites_dict or self.sites_dict
     271 + found_flags = {}
     272 + for _, s in sites.items():
     273 + if 'presense_flag' in s.stats:
     274 + flag = s.stats['presense_flag']
     275 + found_flags[flag] = found_flags.get(flag, 0) + 1
     276 + 
     277 + return found_flags
     278 + 
  • ■ ■ ■ ■ ■
    tests/test_report.py
    skipped 26 lines
    27 27   }
    28 28  }
    29 29   
    30  - 
    31 30  GOOD_RESULT = QueryResult('', '', '', QueryStatus.CLAIMED)
    32 31  BAD_RESULT = QueryResult('', '', '', QueryStatus.AVAILABLE)
    33 32   
    34 33  GOOD_500PX_RESULT = copy.deepcopy(GOOD_RESULT)
    35 34  GOOD_500PX_RESULT.tags = ['photo', 'us', 'global']
    36  -GOOD_500PX_RESULT.ids_data = {"uid": "dXJpOm5vZGU6VXNlcjoyNjQwMzQxNQ==", "legacy_id": "26403415", "username": "alexaimephotographycars", "name": "Alex Aim\u00e9", "website": "www.flickr.com/photos/alexaimephotography/", "facebook_link": " www.instagram.com/street.reality.photography/", "instagram_username": "alexaimephotography", "twitter_username": "Alexaimephotogr"}
     35 +GOOD_500PX_RESULT.ids_data = {"uid": "dXJpOm5vZGU6VXNlcjoyNjQwMzQxNQ==", "legacy_id": "26403415",
     36 + "username": "alexaimephotographycars", "name": "Alex Aim\u00e9",
     37 + "website": "www.flickr.com/photos/alexaimephotography/",
     38 + "facebook_link": " www.instagram.com/street.reality.photography/",
     39 + "instagram_username": "alexaimephotography", "twitter_username": "Alexaimephotogr"}
    37 40   
    38 41  GOOD_REDDIT_RESULT = copy.deepcopy(GOOD_RESULT)
    39 42  GOOD_REDDIT_RESULT.tags = ['news', 'us']
    40  -GOOD_REDDIT_RESULT.ids_data = {"reddit_id": "t5_1nytpy", "reddit_username": "alexaimephotography", "fullname": "alexaimephotography", "image": "https://styles.redditmedia.com/t5_1nytpy/styles/profileIcon_7vmhdwzd3g931.jpg?width=256&height=256&crop=256:256,smart&frame=1&s=4f355f16b4920844a3f4eacd4237a7bf76b2e97e", "is_employee": "False", "is_nsfw": "False", "is_mod": "True", "is_following": "True", "has_user_profile": "True", "hide_from_robots": "False", "created_at": "2019-07-10 12:20:03", "total_karma": "53959", "post_karma": "52738"}
     43 +GOOD_REDDIT_RESULT.ids_data = {"reddit_id": "t5_1nytpy", "reddit_username": "alexaimephotography",
     44 + "fullname": "alexaimephotography",
     45 + "image": "https://styles.redditmedia.com/t5_1nytpy/styles/profileIcon_7vmhdwzd3g931.jpg?width=256&height=256&crop=256:256,smart&frame=1&s=4f355f16b4920844a3f4eacd4237a7bf76b2e97e",
     46 + "is_employee": "False", "is_nsfw": "False", "is_mod": "True", "is_following": "True",
     47 + "has_user_profile": "True", "hide_from_robots": "False",
     48 + "created_at": "2019-07-10 12:20:03", "total_karma": "53959", "post_karma": "52738"}
    41 49   
    42 50  GOOD_IG_RESULT = copy.deepcopy(GOOD_RESULT)
    43 51  GOOD_IG_RESULT.tags = ['photo', 'global']
    44  -GOOD_IG_RESULT.ids_data = {"instagram_username": "alexaimephotography", "fullname": "Alexaimephotography", "id": "6828488620", "image": "https://scontent-hel3-1.cdninstagram.com/v/t51.2885-19/s320x320/95420076_1169632876707608_8741505804647006208_n.jpg?_nc_ht=scontent-hel3-1.cdninstagram.com&_nc_ohc=jd87OUGsX4MAX_Ym5GX&tp=1&oh=0f42badd68307ba97ec7fb1ef7b4bfd4&oe=601E5E6F", "bio": "Photographer \nChild of fine street arts", "external_url": "https://www.flickr.com/photos/alexaimephotography2020/"}
     52 +GOOD_IG_RESULT.ids_data = {"instagram_username": "alexaimephotography", "fullname": "Alexaimephotography",
     53 + "id": "6828488620",
     54 + "image": "https://scontent-hel3-1.cdninstagram.com/v/t51.2885-19/s320x320/95420076_1169632876707608_8741505804647006208_n.jpg?_nc_ht=scontent-hel3-1.cdninstagram.com&_nc_ohc=jd87OUGsX4MAX_Ym5GX&tp=1&oh=0f42badd68307ba97ec7fb1ef7b4bfd4&oe=601E5E6F",
     55 + "bio": "Photographer \nChild of fine street arts",
     56 + "external_url": "https://www.flickr.com/photos/alexaimephotography2020/"}
    45 57   
    46 58  GOOD_TWITTER_RESULT = copy.deepcopy(GOOD_RESULT)
    47 59  GOOD_TWITTER_RESULT.tags = ['social', 'us']
    48 60   
    49  - 
    50  -TEST = [('alexaimephotographycars', 'username', {'500px': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://500px.com/', 'url_user': 'https://500px.com/p/alexaimephotographycars', 'ids_usernames': {'alexaimephotographycars': 'username', 'alexaimephotography': 'username', 'Alexaimephotogr': 'username'}, 'status': GOOD_500PX_RESULT, 'http_status': 200, 'is_similar': False, 'rank': 2981}, 'Reddit': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/', 'url_user': 'https://www.reddit.com/user/alexaimephotographycars', 'status': BAD_RESULT, 'http_status': 404, 'is_similar': False, 'rank': 17}, 'Twitter': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/', 'url_user': 'https://twitter.com/alexaimephotographycars', 'status': BAD_RESULT, 'http_status': 400, 'is_similar': False, 'rank': 55}, 'Instagram': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://www.instagram.com/', 'url_user': 'https://www.instagram.com/alexaimephotographycars', 'status': BAD_RESULT, 'http_status': 404, 'is_similar': False, 'rank': 29}}), ('alexaimephotography', 'username', {'500px': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://500px.com/', 'url_user': 'https://500px.com/p/alexaimephotography', 'status': BAD_RESULT, 'http_status': 200, 'is_similar': False, 'rank': 2981}, 'Reddit': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/', 'url_user': 'https://www.reddit.com/user/alexaimephotography', 'ids_usernames': {'alexaimephotography': 'username'}, 'status': GOOD_REDDIT_RESULT, 'http_status': 200, 'is_similar': False, 'rank': 17}, 'Twitter': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/', 'url_user': 'https://twitter.com/alexaimephotography', 'status': BAD_RESULT, 'http_status': 400, 'is_similar': False, 'rank': 55}, 'Instagram': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.instagram.com/', 'url_user': 'https://www.instagram.com/alexaimephotography', 'ids_usernames': {'alexaimephotography': 'username'}, 'status': GOOD_IG_RESULT, 'http_status': 200, 'is_similar': False, 'rank': 29}}), ('Alexaimephotogr', 'username', {'500px': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://500px.com/', 'url_user': 'https://500px.com/p/Alexaimephotogr', 'status': BAD_RESULT, 'http_status': 200, 'is_similar': False, 'rank': 2981}, 'Reddit': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/', 'url_user': 'https://www.reddit.com/user/Alexaimephotogr', 'status': BAD_RESULT, 'http_status': 404, 'is_similar': False, 'rank': 17}, 'Twitter': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/', 'url_user': 'https://twitter.com/Alexaimephotogr', 'status': GOOD_TWITTER_RESULT, 'http_status': 400, 'is_similar': False, 'rank': 55}, 'Instagram': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.instagram.com/', 'url_user': 'https://www.instagram.com/Alexaimephotogr', 'status':BAD_RESULT, 'http_status': 404, 'is_similar': False, 'rank': 29}})]
    51  - 
     61 +TEST = [('alexaimephotographycars', 'username', {
     62 + '500px': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://500px.com/',
     63 + 'url_user': 'https://500px.com/p/alexaimephotographycars',
     64 + 'ids_usernames': {'alexaimephotographycars': 'username', 'alexaimephotography': 'username',
     65 + 'Alexaimephotogr': 'username'}, 'status': GOOD_500PX_RESULT, 'http_status': 200,
     66 + 'is_similar': False, 'rank': 2981},
     67 + 'Reddit': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/',
     68 + 'url_user': 'https://www.reddit.com/user/alexaimephotographycars', 'status': BAD_RESULT,
     69 + 'http_status': 404, 'is_similar': False, 'rank': 17},
     70 + 'Twitter': {'username': 'alexaimephotographycars', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/',
     71 + 'url_user': 'https://twitter.com/alexaimephotographycars', 'status': BAD_RESULT, 'http_status': 400,
     72 + 'is_similar': False, 'rank': 55},
     73 + 'Instagram': {'username': 'alexaimephotographycars', 'parsing_enabled': True,
     74 + 'url_main': 'https://www.instagram.com/',
     75 + 'url_user': 'https://www.instagram.com/alexaimephotographycars', 'status': BAD_RESULT,
     76 + 'http_status': 404, 'is_similar': False, 'rank': 29}}), ('alexaimephotography', 'username', {
     77 + '500px': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://500px.com/',
     78 + 'url_user': 'https://500px.com/p/alexaimephotography', 'status': BAD_RESULT, 'http_status': 200,
     79 + 'is_similar': False, 'rank': 2981},
     80 + 'Reddit': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/',
     81 + 'url_user': 'https://www.reddit.com/user/alexaimephotography',
     82 + 'ids_usernames': {'alexaimephotography': 'username'}, 'status': GOOD_REDDIT_RESULT, 'http_status': 200,
     83 + 'is_similar': False, 'rank': 17},
     84 + 'Twitter': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/',
     85 + 'url_user': 'https://twitter.com/alexaimephotography', 'status': BAD_RESULT, 'http_status': 400,
     86 + 'is_similar': False, 'rank': 55},
     87 + 'Instagram': {'username': 'alexaimephotography', 'parsing_enabled': True, 'url_main': 'https://www.instagram.com/',
     88 + 'url_user': 'https://www.instagram.com/alexaimephotography',
     89 + 'ids_usernames': {'alexaimephotography': 'username'}, 'status': GOOD_IG_RESULT, 'http_status': 200,
     90 + 'is_similar': False, 'rank': 29}}), ('Alexaimephotogr', 'username', {
     91 + '500px': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://500px.com/',
     92 + 'url_user': 'https://500px.com/p/Alexaimephotogr', 'status': BAD_RESULT, 'http_status': 200,
     93 + 'is_similar': False, 'rank': 2981},
     94 + 'Reddit': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.reddit.com/',
     95 + 'url_user': 'https://www.reddit.com/user/Alexaimephotogr', 'status': BAD_RESULT, 'http_status': 404,
     96 + 'is_similar': False, 'rank': 17},
     97 + 'Twitter': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.twitter.com/',
     98 + 'url_user': 'https://twitter.com/Alexaimephotogr', 'status': GOOD_TWITTER_RESULT, 'http_status': 400,
     99 + 'is_similar': False, 'rank': 55},
     100 + 'Instagram': {'username': 'Alexaimephotogr', 'parsing_enabled': True, 'url_main': 'https://www.instagram.com/',
     101 + 'url_user': 'https://www.instagram.com/Alexaimephotogr', 'status': BAD_RESULT, 'http_status': 404,
     102 + 'is_similar': False, 'rank': 29}})]
    52 103   
    53 104  SUPPOSED_BRIEF = """Search by username alexaimephotographycars returned 1 accounts. Found target's other IDs: alexaimephotography, Alexaimephotogr. Search by username alexaimephotography returned 2 accounts. Search by username Alexaimephotogr returned 1 accounts. Extended info extracted from 3 accounts."""
    54 105   
    skipped 79 lines
  • ■ ■ ■ ■ ■ ■
    tests/test_sites.py
    skipped 85 lines
    86 86   assert amperka_stripped.json == EXAMPLE_DB['sites']['Amperka']
    87 87   
    88 88   
     89 +def test_site_strip_engine_data_with_site_prior_updates():
     90 + db = MaigretDatabase()
     91 + UPDATED_EXAMPLE_DB = dict(EXAMPLE_DB)
     92 + UPDATED_EXAMPLE_DB['sites']['Amperka']['absenceStrs'] = ["test"]
     93 + db.load_from_json(UPDATED_EXAMPLE_DB)
     94 + 
     95 + amperka = db.sites[0]
     96 + amperka_stripped = amperka.strip_engine_data()
     97 + 
     98 + assert amperka_stripped.json == UPDATED_EXAMPLE_DB['sites']['Amperka']
     99 + 
     100 + 
    89 101  def test_saving_site_error():
    90 102   db = MaigretDatabase()
    91 103   
    skipped 41 lines
Please wait...
Page is in error, reload to recover