Projects STRLCPY maigret Commits 2a073773
🤬
Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
  • ■ ■ ■ ■ ■ ■
    cookies.txt
     1 +# HTTP Cookie File downloaded with cookies.txt by Genuinous @genuinous
     2 +# This file can be used by wget, curl, aria2c and other standard compliant tools.
     3 +# Usage Examples:
     4 +# 1) wget -x --load-cookies cookies.txt "https://xss.is/search/"
     5 +# 2) curl --cookie cookies.txt "https://xss.is/search/"
     6 +# 3) aria2c --load-cookies cookies.txt "https://xss.is/search/"
     7 +#
     8 +xss.is FALSE / TRUE 0 xf_csrf PMnZNsr42HETwYEr
     9 +xss.is FALSE / TRUE 0 xf_from_search google
     10 +xss.is FALSE / TRUE 1642709308 xf_user 215268%2CZNKB_-64Wk-BOpsdtLYy-1UxfS5zGpxWaiEGUhmX
     11 +xss.is FALSE / TRUE 0 xf_session sGdxJtP_sKV0LCG8vUQbr6cL670_EFWM
     12 +.xss.is TRUE / FALSE 0 muchacho_cache ["00fbb0f2772c9596b0483d6864563cce"]
     13 +.xss.is TRUE / FALSE 0 muchacho_png ["00fbb0f2772c9596b0483d6864563cce"]
     14 +.xss.is TRUE / FALSE 0 muchacho_etag ["00fbb0f2772c9596b0483d6864563cce"]
     15 +.xss.is TRUE / FALSE 1924905600 2e66e4dd94a7a237d0d1b4d50f01e179_evc ["00fbb0f2772c9596b0483d6864563cce"]
     16 + 
  • ■ ■ ■ ■ ■ ■
    maigret/activation.py
     1 +import aiohttp
     2 +from aiohttp import CookieJar
     3 +import asyncio
     4 +import json
     5 +from http.cookiejar import MozillaCookieJar
     6 +from http.cookies import Morsel
     7 + 
    1 8  import requests
    2 9   
    3 10  class ParsingActivator:
    4 11   @staticmethod
    5  - def twitter(site, logger):
     12 + def twitter(site, logger, cookies={}):
    6 13   headers = dict(site.headers)
    7 14   del headers['x-guest-token']
    8 15   r = requests.post(site.activation['url'], headers=headers)
    skipped 3 lines
    12 19   site.headers['x-guest-token'] = guest_token
    13 20   
    14 21   @staticmethod
    15  - def vimeo(site, logger):
     22 + def vimeo(site, logger, cookies={}):
    16 23   headers = dict(site.headers)
    17 24   if 'Authorization' in headers:
    18 25   del headers['Authorization']
    skipped 1 lines
    20 27   jwt_token = r.json()['jwt']
    21 28   site.headers['Authorization'] = 'jwt ' + jwt_token
    22 29   
     30 + @staticmethod
     31 + def xssis(site, logger, cookies={}):
     32 + if not cookies:
     33 + logger.debug('You must have cookies to activate xss.is parsing!')
     34 + return
     35 + 
     36 + headers = dict(site.headers)
     37 + post_data = {
     38 + '_xfResponseType': 'json',
     39 + '_xfToken': '1611177919,a2710362e45dad9aa1da381e21941a38'
     40 + }
     41 + headers['content-type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
     42 + r = requests.post(site.activation['url'], headers=headers, cookies=cookies, data=post_data)
     43 + csrf = r.json()['csrf']
     44 + site.get_params['_xfToken'] = csrf
     45 + 
     46 + 
     47 +async def import_aiohttp_cookies(cookiestxt_filename):
     48 + cookies_obj = MozillaCookieJar(cookiestxt_filename)
     49 + cookies_obj.load(ignore_discard=True, ignore_expires=True)
     50 + 
     51 + cookies = CookieJar()
     52 + 
     53 + cookies_list = []
     54 + for domain in cookies_obj._cookies.values():
     55 + for key, cookie in list(domain.values())[0].items():
     56 + c = Morsel()
     57 + c.set(key, cookie.value, cookie.value)
     58 + c['domain'] = cookie.domain
     59 + c['path'] = cookie.path
     60 + cookies_list.append((key, c))
     61 + 
     62 + cookies.update_cookies(cookies_list)
     63 + 
     64 + return cookies
     65 + 
  • ■ ■ ■ ■ ■ ■
    maigret/maigret.py
    skipped 1 lines
    2 2  Maigret main module
    3 3  """
    4 4   
    5  -import aiohttp
    6 5  import asyncio
    7  -import csv
    8  -import http.cookiejar as cookielib
    9  -import json
    10 6  import logging
    11 7  import os
    12 8  import platform
    13 9  import re
    14  -import requests
    15 10  import ssl
    16 11  import sys
     12 +from argparse import ArgumentParser, RawDescriptionHelpFormatter
     13 + 
     14 +import aiohttp
     15 +import requests
    17 16  import tqdm.asyncio
    18  -import xmind
    19 17  from aiohttp_socks import ProxyConnector
    20  -from argparse import ArgumentParser, RawDescriptionHelpFormatter
    21  -from http.cookies import SimpleCookie
    22 18  from mock import Mock
    23 19  from python_socks import _errors as proxy_errors
    24 20  from socid_extractor import parse, extract, __version__ as socid_version
    25 21   
    26  -from .activation import ParsingActivator
     22 +from .activation import ParsingActivator, import_aiohttp_cookies
    27 23  from .notify import QueryNotifyPrint
    28 24  from .report import save_csv_report, save_xmind_report, save_html_report, save_pdf_report, \
    29  - generate_report_context, save_txt_report
     25 + generate_report_context, save_txt_report
    30 26  from .result import QueryResult, QueryStatus
    31 27  from .sites import MaigretDatabase, MaigretSite
    32 28   
    skipped 19 lines
    52 48  }
    53 49   
    54 50  unsupported_characters = '#'
    55  - 
    56  -cookies_file = 'cookies.txt'
    57  - 
    58 51   
    59 52  async def get_response(request_future, site_name, logger):
    60 53   html_text = None
    skipped 249 lines
    310 303  async def maigret(username, site_dict, query_notify, logger,
    311 304   proxy=None, timeout=None, recursive_search=False,
    312 305   id_type='username', debug=False, forced=False,
    313  - max_connections=100, no_progressbar=False):
     306 + max_connections=100, no_progressbar=False,
     307 + cookies=None):
    314 308   """Main search func
    315 309   
    316 310   Checks for existence of username on various social media sites.
    skipped 31 lines
    348 342   connector = ProxyConnector.from_url(proxy) if proxy else aiohttp.TCPConnector(ssl=False)
    349 343   # connector = aiohttp.TCPConnector(ssl=False)
    350 344   connector.verify_ssl=False
    351  - session = aiohttp.ClientSession(connector=connector, trust_env=True)
     345 + 
     346 + cookie_jar = None
     347 + if cookies:
     348 + cookie_jar = await import_aiohttp_cookies(cookies)
     349 + 
     350 + session = aiohttp.ClientSession(connector=connector, trust_env=True, cookie_jar=cookie_jar)
    352 351   
    353 352   if logger.level == logging.DEBUG:
    354 353   future = session.get(url='https://icanhazip.com')
    skipped 24 lines
    379 378   results_site['username'] = username
    380 379   results_site['parsing_enabled'] = recursive_search
    381 380   results_site['url_main'] = site.url_main
     381 + results_site['cookies'] = cookie_jar and cookie_jar.filter_cookies(site.url_main) or None
    382 382   
    383 383   headers = {
    384 384   'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 11.1; rv:55.0) Gecko/20100101 Firefox/55.0',
    skipped 39 lines
    424 424   username=username,
    425 425   )
    426 426   
     427 + for k, v in site.get_params.items():
     428 + url_probe += f'&{k}={v}'
    427 429   
    428 430   if site.check_type == 'status_code' and site.request_head_only:
    429 431   # In most cases when we are detecting by status code,
    skipped 16 lines
    446 448   # The final result of the request will be what is available.
    447 449   allow_redirects = True
    448 450   
    449  - # TODO: cookies using
    450  - # def parse_cookies(cookies_str):
    451  - # cookies = SimpleCookie()
    452  - # cookies.load(cookies_str)
    453  - # return {key: morsel.value for key, morsel in cookies.items()}
    454  - #
    455  - # if os.path.exists(cookies_file):
    456  - # cookies_obj = cookielib.MozillaCookieJar(cookies_file)
    457  - # cookies_obj.load(ignore_discard=True, ignore_expires=True)
    458  - 
    459 451   future = request_method(url=url_probe, headers=headers,
    460 452   allow_redirects=allow_redirects,
    461 453   timeout=timeout,
    skipped 199 lines
    661 653   parser.add_argument("--json", "-j", metavar="JSON_FILE",
    662 654   dest="json_file", default=None,
    663 655   help="Load data from a JSON file or an online, valid, JSON file.")
     656 + parser.add_argument("--cookies-jar-file", metavar="COOKIE_FILE",
     657 + dest="cookie_file", default=None,
     658 + help="File with cookies.")
    664 659   parser.add_argument("--timeout",
    665 660   action="store", metavar='TIMEOUT',
    666 661   dest="timeout", type=timeout_check, default=10,
    skipped 220 lines
    887 882   id_type=id_type,
    888 883   debug=args.verbose,
    889 884   logger=logger,
     885 + cookies=args.cookie_file,
    890 886   forced=args.use_disabled_sites,
    891 887   max_connections=args.connections,
    892 888   )
    skipped 62 lines
  • ■ ■ ■ ■ ■ ■
    maigret/resources/data.json
    skipped 1584 lines
    1585 1585   "usernameClaimed": "adam",
    1586 1586   "usernameUnclaimed": "noonewouldeverusethis7"
    1587 1587   },
     1588 + "XSS.is": {
     1589 + "activation": {
     1590 + "method": "xssis",
     1591 + "marks": [
     1592 + "errorHtml"
     1593 + ],
     1594 + "url": "https://xss.is/login/keep-alive",
     1595 + "src": "csrf",
     1596 + "dst": "x-guest-token"
     1597 + },
     1598 + "checkType": "status_code",
     1599 + "getParams": {
     1600 + "_xfToken": "1611179947,a2710362e45dad9aa1da381e21941a38"
     1601 + },
     1602 + "url": "https://xss.is/index.php?members/find&q={username}&_xfRequestUri=%2Fmembers%2F%3Fkey%3Dmost_messages&_xfWithData=1&_xfResponseType=json",
     1603 + "urlMain": "https://xss.is",
     1604 + "usernameClaimed": "adam",
     1605 + "usernameUnclaimed": "noonewouldeverusethis7"
     1606 + },
    1588 1607   "Battleraprus": {
    1589 1608   "tags": [
    1590 1609   "ru",
    skipped 11911 lines
    13502 13521   "sec-ch-ua": "Google Chrome\";v=\"87\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"87\"",
    13503 13522   "authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA",
    13504 13523   "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
    13505  - "x-guest-token": "1352321534451470337"
     13524 + "x-guest-token": "1351631725676388352"
    13506 13525   },
    13507 13526   "errors": {
    13508 13527   "Bad guest token": "x-guest-token update required"
    skipped 2670 lines
    16179 16198   "alexaRank": 1615328,
    16180 16199   "urlMain": "http://free-pass.ru",
    16181 16200   "usernameClaimed": "alex",
     16201 + "usernameUnclaimed": "noonewouldeverusethis7"
     16202 + },
     16203 + "Codeby.net": {
     16204 + "tags": [
     16205 + "ru",
     16206 + "hacking"
     16207 + ],
     16208 + "engine": "XenForo",
     16209 + "urlMain": "https://codeby.net",
     16210 + "usernameClaimed": "pragmalion",
    16182 16211   "usernameUnclaimed": "noonewouldeverusethis7"
    16183 16212   },
    16184 16213   "freelance.codeby.net": {
    skipped 6801 lines
  • ■ ■ ■ ■ ■
    maigret/sites.py
    skipped 1 lines
    2 2  """Maigret Sites Information"""
    3 3  import copy
    4 4  import json
    5  -import operator
    6  -import requests
    7 5  import sys
     6 + 
     7 +import requests
    8 8   
    9 9  from .utils import CaseConverter
    10 10   
    skipped 27 lines
    38 38   self.url_probe = None
    39 39   self.check_type = ''
    40 40   self.request_head_only = ''
     41 + self.get_params = {}
    41 42   
    42 43   self.presense_strs = []
    43 44   self.absence_strs = []
    skipped 240 lines
  • ■ ■ ■ ■ ■ ■
    tests/test_activation.py
    1 1  """Maigret activation test functions"""
     2 +import json
     3 +import aiohttp
    2 4  import pytest
    3 5  from mock import Mock
    4 6   
    5  -from maigret.activation import ParsingActivator
     7 +from maigret.activation import ParsingActivator, import_aiohttp_cookies
     8 + 
     9 +COOKIES_TXT = """# HTTP Cookie File downloaded with cookies.txt by Genuinous @genuinous
     10 +# This file can be used by wget, curl, aria2c and other standard compliant tools.
     11 +# Usage Examples:
     12 +# 1) wget -x --load-cookies cookies.txt "https://xss.is/search/"
     13 +# 2) curl --cookie cookies.txt "https://xss.is/search/"
     14 +# 3) aria2c --load-cookies cookies.txt "https://xss.is/search/"
     15 +#
     16 +xss.is FALSE / TRUE 0 xf_csrf test
     17 +xss.is FALSE / TRUE 1642709308 xf_user tset
     18 +.xss.is TRUE / FALSE 0 muchacho_cache test
     19 +.xss.is TRUE / FALSE 1924905600 132_evc test
     20 +httpbin.org FALSE / FALSE 0 a b
     21 +"""
    6 22   
    7 23   
    8 24  @pytest.mark.slow
    skipped 6 lines
    15 31   
    16 32   assert token1 != token2
    17 33   
     34 + 
     35 +@pytest.mark.asyncio
     36 +async def test_import_aiohttp_cookies():
     37 + cookies_filename = 'cookies_test.txt'
     38 + with open(cookies_filename, 'w') as f:
     39 + f.write(COOKIES_TXT)
     40 + 
     41 + cookie_jar = await import_aiohttp_cookies(cookies_filename)
     42 + assert list(cookie_jar._cookies.keys()) == ['xss.is', 'httpbin.org']
     43 + 
     44 + url = 'https://httpbin.org/cookies'
     45 + connector = aiohttp.TCPConnector(ssl=False)
     46 + session = aiohttp.ClientSession(connector=connector, trust_env=True,
     47 + cookie_jar=cookie_jar)
     48 + 
     49 + response = await session.get(url=url)
     50 + result = json.loads(await response.content.read())
     51 + await session.close()
     52 + 
     53 + assert result == {'cookies': {'a': 'b'}}
     54 + 
Please wait...
Page is in error, reload to recover