Projects STRLCPY Maryam Commits f847b623
🤬
  • ■ ■ ■ ■
    README.md
    skipped 31 lines
    32 32  maryam -e dnsbrute -d domain.tld
    33 33  # Show framework modules
    34 34  maryam -e show modules
    35  -# Set framework options. It'll save in the workspace.
     35 +# Set framework options.
    36 36  maryam -e set proxy ..
    37 37  maryam -e set agent ..
    38 38  maryam -e set timeout ..
    skipped 36 lines
  • ■ ■ ■ ■ ■ ■
    maryam/core/util/osint/keyserver.py
     1 +#!/usr/bin/env python3
     2 +"""
     3 +OWASP Maryam!
     4 + 
     5 +This program is free software: you can redistribute it and/or modify
     6 +it under the terms of the GNU General Public License as published by
     7 +the Free Software Foundation, either version 3 of the License, or
     8 +any later version.
     9 + 
     10 +This program is distributed in the hope that it will be useful,
     11 +but WITHOUT ANY WARRANTY; without even the implied warranty of
     12 +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
     13 +GNU General Public License for more details.
     14 + 
     15 +You should have received a copy of the GNU General Public License
     16 +along with this program. If not, see <http://www.gnu.org/licenses/>.
     17 +"""
     18 + 
     19 +class main:
     20 + 
     21 + def __init__(self, q, limit=10):
     22 + """ keyserver.ubuntu.com search engine
     23 + 
     24 + q : query for search
     25 + limit : Number of pages
     26 + """
     27 + self.framework = main.framework
     28 + self.q = q
     29 + self.limit = limit
     30 + self._pages = ''
     31 + self._json_pages = ''
     32 + self.keyserver_api = f"https://keyserver.ubuntu.com/pks/lookup?search=@{self.q}&op=index"
     33 + self.acceptable = False
     34 + 
     35 + def run_crawl(self):
     36 + self.framework.verbose('[KEYSERVER] Searching in keyserver...')
     37 + try:
     38 + req = self.framework.request(self.keyserver_api)
     39 + except:
     40 + self.framework.debug('ConnectionError', 'util/keyserver', 'run_crawl')
     41 + self.framework.error('Keyserver is missed!', 'util/keyserver', 'run_crawl')
     42 + return
     43 + self._pages += req.text
     44 + return self.framework.page_parse(self._pages).get_emails(self.q)
     45 + 
     46 + @property
     47 + def pages(self):
     48 + return self._pages
     49 +
     50 + @property
     51 + def json_pages(self):
     52 + return self._json_pages
     53 + 
     54 + @property
     55 + def emails(self):
     56 + return self.framework.page_parse(self._pages).get_emails(self.q)
     57 + 
     58 + @property
     59 + def dns(self):
     60 + return self.framework.page_parse(self.pages).get_dns(self.q)
     61 + 
     62 + 
  • ■ ■ ■ ■ ■ ■
    maryam/modules/osint/email_search.py
    skipped 17 lines
    18 18   'version': '1.0',
    19 19   'description': 'Search in open-sources to find emails.',
    20 20   'sources': ('bing', 'pastebin', 'google', 'yahoo', 'metacrawler',
    21  - 'baidu', 'startpage', 'qwant', 'duckduckgo', 'hunter', 'gigablast', 'github'),
     21 + 'baidu', 'startpage', 'qwant', 'duckduckgo', 'hunter', 'gigablast', 'github', 'keyserver',),
    22 22   'options': (
    23 23   ('query', None, True, 'Domain name or company name', '-q', 'store', str),
    24 24   ('limit', 3, False, 'Search limit(number of pages, default=3)', '-l', 'store', int),
    skipped 45 lines
    70 70   'default': f'"%40{domain}"',
    71 71   'ask': f"%40{domain}",
    72 72   'hunter': f"{domain}&api_key={key}",
    73  - 'github': domain
     73 + 'github': domain,
     74 + 'keyserver': domain
    74 75   }
    75 76   self.thread(search, self.options['thread'], engines, query, q_formats, limit, count, meta['sources'])
    76 77   output = {'emails': list(set(EMAILS))}
    77  -
    78 78   self.save_gather(output, 'osint/email_search', domain,\
    79 79   output=self.options['output'])
    80 80   return output
    skipped 4 lines
  • ■ ■ ■ ■ ■
    maryam/modules/osint/onion_search.py
    skipped 20 lines
    21 21   'version': '0.4',
    22 22   'description': 'onion_search is used to create the premier \
    23 23   search engine for services residing on the Tor anonymity network.',
    24  - 'sources': ('ahmia', 'onionland', 'darksearch'),
     24 + 'sources': ('ahmia', 'onionland'),
    25 25   'options': (
    26 26   ('query', None, True, 'Domain Name,\
    27 27   Company Name, keyword, etc', '-q', 'store', str),
    skipped 10 lines
    38 38   onionland = self.onionland(q, limit=5)
    39 39   onionland.run_crawl()
    40 40   links.extend(onionland.links)
    41  - 
    42  - darksearch = self.darksearch(q, limit=1)
    43  - darksearch.run_crawl()
    44  - links.extend(darksearch.links)
    45 41   
    46 42   output['links'] = list(set(links))
    47 43   self.save_gather(output, 'osint/onion_search', q, output=self.options['output'])
    skipped 5 lines
Please wait...
Page is in error, reload to recover