Projects STRLCPY SQLi-Hunter-v2 Commits e2560c49
🤬
  • ■ ■ ■ ■ ■ ■
    SQLi Hunter v2.py
     1 +import argparse,sys,threading,schedule,requests
     2 +from config_file import *
     3 +from user_agent import generate_user_agent
     4 +from time import sleep
     5 +from random import choice
     6 +import urllib.parse
     7 +
     8 +'''
     9 +This tool is made for finding SQL injection vulnerability in web pages. The intention of this tool is to include
     10 +it in your ethical Bug Bounty Hunting methodology. Please do not use this tool on any website
     11 +without having its permission.
     12 +
     13 +IG: @a7.acc
     14 +GitHub: @3a7
     15 +Linktree: https://linktr.ee/a7.acc
     16 +
     17 +
     18 +functions in order:
     19 +arguments()
     20 +telegram()
     21 +vulnerability()
     22 +run()
     23 +main()
     24 +
     25 +'''
     26 +
     27 +# Create an ArgumentParser object
     28 +parser = argparse.ArgumentParser()
     29 +
     30 +# Add a command line argument
     31 +parser.add_argument('--blind',required=False, action='store_true', help='To tell the program that you want to test for blind SQL injection. Default detectors in config/blind-SQLi-detectors.txt. You can change it if you want.')
     32 +parser.add_argument('--blind-timeout',required=False, type=int, metavar='<int>', help='The blind sql detector timeout. ex. if the detector asks the website to wait 2 seconds, write 2 here. Default is 5 seconds')
     33 +parser.add_argument('-url' , required=True,type=file_or_url,metavar='URL or FILE',help='Could be a single URL or a file of URL\'s to check, ex (-url file.txt) or (-url https://example.com/page.php?id=2')
     34 +parser.add_argument('--clean', action='store_true', help='Clean un-wanted URL\'s before checking.')
     35 +parser.add_argument('--proxy', type=argparse.FileType("r"),required=False,metavar='<FILE>', help='Use proxies file to check the URL\'s')
     36 +parser.add_argument('--proxy-type',required=False,type=proxy_types ,help='Proxies type (HTTP/S, SOCKS4 or SOCKS5)')
     37 +parser.add_argument('-t', type=int,required=False,metavar='<int>', help='Amount threads. Default is 10')
     38 +parser.add_argument('--timeout', type=int,required=False, metavar='<int>',help='The amount of milliseconds to wait until making a request to the next link (is it has the same domain) to avoid false DoS attack against the domain. Default is 0 (one second is 1000)')
     39 +parser.add_argument('--telegram', action='store_true', help='To get hits on telegram, you can provide your bot\'s token and your telegram ID in this file config/tele.txt in this format token/id')
     40 +parser.add_argument('--user-agent', type=str,required=False,metavar='<str>', help='Specify certain user-agent. Default is random')
     41 +parser.add_argument('-v', action='store_true', help='Increase verbosity')
     42 +
     43 +# Parse the command line arguments
     44 +args = parser.parse_args()
     45 +
     46 +
     47 +# Dealing with arguments and setting them
     48 +def arguments():
     49 + global detectors, urls, proxies, isproxy, threads, timeout, istelegram, telegram_info, agent, errors, blind_timeout, blind, verbose
     50 + blind = False
     51 + blind_timeout = 5
     52 + urls = set()
     53 + proxies = set()
     54 + proxy_type = None
     55 + clean = args.clean
     56 + isproxy = False
     57 + threads = 10
     58 + timeout = 0
     59 + istelegram = False
     60 + telegram_info = None
     61 + agent = generate_user_agent
     62 + verbose = False
     63 + # Most Common SQL Injection Errors
     64 + errors = [x for x in open('config/SQLi-errrors.txt','r',encoding='utf-8').read().splitlines()]
     65 +
     66 +
     67 + # Retrieving the detectors / eihter blind or normal
     68 + if args.blind:
     69 + blind = True
     70 + detectors = [x for x in open('config/blind-SQLi-detectors.txt','r',encoding='utf-8').read().splitlines()]
     71 + else:
     72 + detectors = [x for x in open('config/SQLi-detectors.txt','r',encoding='utf-8').read().splitlines()]
     73 +
     74 + if args.blind_timeout is not None:
     75 + blind_timeout = args.blind_timeout
     76 +
     77 + # Checking -url argument
     78 + if args.url[0] == 'FILE':
     79 + # Here we are using .readlines() instead of .read().splitlines() to avoid MemoryError
     80 + try:
     81 + # file deepcode ignore PT: IGNORE
     82 + with open(args.url[1],'r',encoding='utf-8') as file:
     83 + for line in file.readlines():
     84 + urls.add(line.strip('\n'))
     85 + except Exception as e:
     86 + print(time,mark,yellow(str(e)))
     87 +
     88 + elif args.url[0] == 'URL':
     89 + urls.add(args.url[1])
     90 +
     91 +
     92 + # Checking --clean argument
     93 + if clean:
     94 + if args.url[0] == 'FILE':
     95 + urls = clean_it(urls)
     96 +
     97 + # Checking proxies and proxies type (--proxy and --proxy-type)
     98 + if args.proxy is not None:
     99 + isproxy = True
     100 + proxies_temp = [args.proxy.read().splitlines()]
     101 + if args.proxy_type is None:
     102 + msg = f'{time} You need to provide proxies type in order to use the proxy file. Please provide proxies type by using {cyan("--proxy-type")} [{cyan("HTTP HTTPS SOCKS4 SOCKS5")}]'
     103 + raise NameError(msg)
     104 + else:
     105 + proxy_type = args.proxy_type
     106 + for proxy in proxies_temp:
     107 + if proxy_type == 'HTTP' or proxy_type == 'HTTPS':
     108 + proxies.add({
     109 + 'http':f'https://{proxy}',
     110 + 'https':f'http://{proxy}'
     111 + })
     112 + elif proxy_type == 'SOCKS4':
     113 + proxies.add({
     114 + 'http':f'socks4://{proxy}',
     115 + 'https':f'socks4://{proxy}'
     116 + })
     117 + elif proxy_type == 'SOCKS5':
     118 + proxies.add({
     119 + 'http':f'socks5://{proxy}',
     120 + 'https':f'socks5://{proxy}'
     121 + })
     122 + proxies_temp.clear()
     123 +
     124 + # Checking --timeout
     125 + if args.timeout is not None:
     126 + timeout = args.timeout/1000
     127 +
     128 + # Checking threads -t
     129 + if args.t is not None:
     130 + threads = args.t
     131 +
     132 + # Checking --telegram
     133 + if args.telegram:
     134 + istelegram = True
     135 + f = open('config/tele.txt','r',encoding='utf-8')
     136 + telegram_info = f.read().strip('\n').split('/') #[token,id]
     137 + f.close()
     138 +
     139 + # Checking --user-agent
     140 + if args.user_agent is not None:
     141 + agent = lambda : args.user_agent
     142 +
     143 + if args.v is not None:
     144 + if args.v:
     145 + verbose = True
     146 + urls = list(urls)
     147 +
     148 +
     149 +# Sends information to telegram. info -> string (url encoded)
     150 +def telegram(info):
     151 + if istelegram:
     152 + try:
     153 + requests.post(f'https://api.telegram.org/bot{telegram_info[0]}/sendMessage?chat_id={telegram_info[1]}&text={info}')
     154 + except Exception as ex:
     155 + print(time,mark,'Error while sending info via telegram: ',yellow(str(ex)))
     156 +
     157 +
     158 +# To keep track of everything
     159 +bad = 0 # Un-vulnerable pages // requests that sent
     160 +hits = 0 # Vulnerable pages
     161 +error = 0 # Errored requests (timed-out, no response)
     162 +checked = 0 # Checked urls
     163 +
     164 +# The core function that checks every URL
     165 +def vulnerability():
     166 + global checked, bad, hits, error, urls, proxies
     167 +
     168 + # While loop for every url
     169 + while checked < len(urls):
     170 + site = urls[checked]
     171 + checked += 1
     172 +
     173 + # 1. Printing the information
     174 + if ops == 'Windows':
     175 + system(f'title ALL:{str(checked)}/{str(len(urls))} HIT:{str(hits)} BAD:{str(bad)} ERROR:{str(error)} THREADS:{str(threading.active_count()-1)}')
     176 + else:
     177 + sys.stdout.flush()
     178 + print(f"\r{cyan('ALL')}:{str(checked)}/{str(len(urls))} {green('HIT')}:{str(hits)} {red('BAD')}:{str(bad)} {yellow('ERROR')}:{str(error)} {blue('THREADS')}:{str(threading.active_count()-1)}",end=' ')
     179 +
     180 +
     181 + if site.count('=') > 1 and '&' in site: # This means we have multiple parameters to check
     182 + params = site.split('?')[1].split('&')
     183 + else:
     184 + params = [site]
     185 +
     186 +
     187 + # Loop through parameters
     188 + for param in params:
     189 + done = False
     190 + after_param = site.index(param)+len(param) # the index of after the parameter. ex. id=3<here>
     191 + urli = site[0:after_param] # Website including the param
     192 +
     193 + # Loop through all detectors
     194 + for symbol in detectors:
     195 + url = urli+symbol+site[after_param:] # adding the symbol and completing the url
     196 + curl = urli+blue(symbol)+site[after_param:]
     197 + blind_error = False
     198 +
     199 + try:
     200 + if checked != 0 and timeout > 0:
     201 + if urls[checked-1].split('/')[2] == site.split('/')[2]: # if the current url and previuos url have the same domain
     202 + sleep(timeout)
     203 +
     204 + if 'http' in param and '://' in param:
     205 + param = param.split('?')[-1]
     206 +
     207 + if verbose:
     208 + print(f"[{cyan(str(param))}] [{blue(symbol)}] Checking: ",curl)
     209 +
     210 + if blind:
     211 + try:
     212 + if isproxy:
     213 + res = requests.get(str(url),headers={'user-agent':agent()},timeout=int(blind_timeout)-1,proxies=choice(proxies))
     214 + else:
     215 + res = requests.get(str(url),headers={'user-agent':agent()},timeout=int(blind_timeout)-1)
     216 + except requests.exceptions.ReadTimeout:
     217 + blind_error = True
     218 + else:
     219 + if isproxy:
     220 + res = requests.get(str(url),headers={'user-agent':agent()},timeout=10,proxies=choice(proxies))
     221 + else:
     222 + res = requests.get(str(url),headers={'user-agent':agent()},timeout=10)
     223 + except:
     224 + error += 1
     225 + continue
     226 +
     227 + response = res.text
     228 +
     229 + # If we're checking for blind sql injection
     230 + if blind:
     231 + if blind_error:
     232 + hits += 1
     233 + inf = hit(url,requests.exceptions.ReadTimeout,symbol,param,False)
     234 + telegram(urllib.parse.quote(inf))
     235 + break
     236 + else:
     237 + bad += 1
     238 + continue
     239 +
     240 + else:
     241 + # loops through errors to check them
     242 + for er in errors:
     243 + if er in response:
     244 + # Checks again because sometimes the page includes that string inside it without being vulnerable
     245 + try:
     246 + if isproxy:
     247 + res2 = requests.get(str(site),headers={'user-agent':agent()},timeout=10,proxies=choice(proxies))
     248 + else:
     249 + res2 = requests.get(str(site),headers={'user-agent':agent()},timeout=10)
     250 + if er in res2.text: # page includes the error without being vulnerable
     251 + continue
     252 + else: # the page is 99% vulnerable
     253 + hits += 1
     254 + inf = hit(url,er,symbol,param,True)
     255 + telegram(urllib.parse.quote(inf))
     256 + done = True
     257 + break
     258 + except:
     259 + pass
     260 + # the page is 50% vulnerable
     261 + hits += 1
     262 + inf = hit(url,er,symbol,param,False)
     263 + telegram(urllib.parse.quote(inf))
     264 + done = True
     265 +
     266 + break
     267 + else:
     268 + bad += 1
     269 + if done:
     270 + break
     271 +
     272 + # 2. Printing the information
     273 + if ops == 'Windows':
     274 + system(f'title ALL:{str(checked)}/{str(len(urls))} HIT:{str(hits)} BAD:{str(bad)} ERROR:{str(error)} THREADS:{str(threading.active_count()-1)}')
     275 + else:
     276 + sys.stdout.flush()
     277 + print(f"\r{cyan('ALL')}:{str(checked)}/{str(len(urls))} {green('HIT')}:{str(hits)} {red('BAD')}:{str(bad)} {yellow('ERROR')}:{str(error)} {blue('THREADS')}:{str(threading.active_count()-1)}",end=' ')
     278 +
     279 +# Dealing with threads and starting them
     280 +def run():
     281 + global running
     282 + running = True
     283 +
     284 + # Checks every second if the program is finished or not by checking if the active threads are only 1
     285 + def CheckThreads():
     286 + global running
     287 + if threading.active_count() == 1:
     288 + running = False
     289 +
     290 + # Starting the threads
     291 + for _ in range(threads):
     292 + thread1 = threading.Thread(target=vulnerability)
     293 + thread1.start()
     294 +
     295 + # Running CheckThreads() function every second
     296 + schedule.every().second.do(CheckThreads)
     297 +
     298 + # Checking the running variable if it's True or False
     299 + while running:
     300 + schedule.run_pending()
     301 + sleep(1)
     302 + else: # means the program is stopped
     303 + thread1.join()
     304 + return
     305 +
     306 +
     307 +
     308 +# The main function
     309 +def main():
     310 + try:
     311 + # Brings all arguments
     312 + arguments()
     313 +
     314 + # Starting the program (Only returns if the program stopped)
     315 + run()
     316 +
     317 + # Status
     318 + print(time, hashtag, 'Done checking all url\'s!',hashtag)
     319 + print(mult,f"{cyan('ALL')}: {str(checked)}/{str(len(urls))}")
     320 + print(mult,f"{green('HIT')}: {str(hits)}")
     321 + print(mult,f"{red('BAD')}: {str(bad)}")
     322 + print(mult,f"{yellow('ERROR')}: {str(error)}")
     323 + except KeyboardInterrupt:
     324 + print('Bye :)')
     325 + sys.exit()
     326 +
     327 +
     328 +if __name__ == '__main__':
     329 + main()
     330 + 
  • ■ ■ ■ ■ ■ ■
    config_file.py
     1 +import platform,datetime
     2 +from colored import fg,attr
     3 +from os import system
     4 +from time import sleep
     5 +
     6 +'''
     7 +This file must be included in the same folder as SQLi Hunter v2.
     8 +'''
     9 +
     10 +# Detecting the OS
     11 +ops_release = str(platform.release())
     12 +ops = str(platform.system())
     13 +if '2012ServerR2' not in ops_release and ops == 'Windows' or ops == 'Linux':
     14 + green = lambda x : fg('green')+x+attr('reset')
     15 + red = lambda x : fg('red')+x+attr('reset')
     16 + blue = lambda x : fg('blue')+x+attr('reset')
     17 + yellow = lambda x : fg('yellow')+x+attr('reset')
     18 + cyan = lambda x : fg('cyan')+x+attr('reset')
     19 + magenta = lambda x : fg('magenta')+x+attr('reset')
     20 + clear = lambda: system("cls")
     21 + if ops == 'Linux':
     22 + clear = lambda: system("clear")
     23 +else:
     24 + green = lambda x : x
     25 + red = lambda x : x
     26 + blue = lambda x : x
     27 + yellow = lambda x : x
     28 + cyan = lambda x : x
     29 + magenta = lambda x : x
     30 + clear = lambda: system("cls")
     31 +
     32 +
     33 +# Markers
     34 +t = lambda : str(datetime.datetime.now())
     35 +mark = '['+red('!')+']'
     36 +question = '['+magenta('?')+']'
     37 +hashtag ='['+green('#')+']'
     38 +mult = '['+blue('*')+']'
     39 +time = '['+cyan(str(t()))+'] '
     40 +
     41 +
     42 +'''
     43 +FUNCTIONS TO CHECK THE GIVEN ARGUMENTS
     44 +'''
     45 +
     46 +# Define a custom type for a list of integers separated by a comma
     47 +def comma_separated_strings(string):
     48 + return [x for x in string.split(',')]
     49 +
     50 +
     51 +# Checks whether the given argument is a url or file
     52 +def file_or_url(string):
     53 + global url_detectors
     54 + url_detectors = ['http','://','?','.','/','=']
     55 + if len(string.split('.')) == 2 and '.txt' in string:
     56 + return 'FILE',string
     57 + else:
     58 + if [d for d in url_detectors if d in string] == url_detectors:
     59 + return 'URL',string
     60 + else:
     61 + msg = f"'{red(string)}' is invalid url or file. Example of a url: {green('https://example.com/page.php?key=value')}. Example of a file: {green('url_file.txt')}\n URL must include {yellow(str(' '.join(url_detectors)))} and file must be .txt file"
     62 + raise SyntaxError(msg)
     63 +
     64 +def proxy_types(string):
     65 + p_types = 'HTTP HTTPS SOCKS4 SOCKS5'
     66 + if string.upper() in p_types:
     67 + return string.upper()
     68 + else:
     69 + msg = f"'{string.upper()}' is invalid proxy type. Available proxy types are: {p_types}"
     70 + raise SyntaxError(msg)
     71 +
     72 +
     73 +
     74 +# Cleaning function
     75 +def clean_it(url_file):
     76 +
     77 + stage1 = set()
     78 + before = len(url_file)
     79 +
     80 + print(time, mult, 'Cleaning the file, this process may take few minutes, please be patient...')
     81 + for url in url_file:
     82 + if ([d for d in url_detectors if d in url] == url_detectors) and (not url.endswith('/')):
     83 + stage1.add(url)
     84 + print(time, mult, 'Cleaning is done.')
     85 +
     86 + with open('clean.txt','w',encoding='utf-8') as cl:
     87 + for u in stage1:
     88 + cl.write(u+'\n')
     89 +
     90 + print(time,hashtag, 'Clean URL\'s saved in',cyan('clean.txt'))
     91 + print(time,hashtag, 'Befor cleaning:',blue(str(before)),'After cleaning:',green(str(len(stage1))))
     92 + sleep(2)
     93 + return stage1
     94 +
     95 +
     96 +def hit(site,error,symbol,param,possibility):
     97 + if possibility:
     98 + possibility = '90% Possibility'
     99 + else:
     100 + possibility = '50% Possibility'
     101 +
     102 + information = f"\n===========================\n{hashtag} PAGE: {green(str(site))}\n{hashtag} VULNERABLE PARAMETER: {blue(param)}\n{hashtag} SYMBOL: {cyan(symbol)}\n{hashtag} ERROR: {yellow(str(error))}\n{hashtag} VULNERABLE: {red(str(possibility))}\n{hashtag} DATE: {str(datetime.datetime.now())}\n===========================\n"
     103 + info_raw = f"===========================\n[#] PAGE: {str(site)}\n[#] VULNERABLE PARAMETER: {str(param)}\n[#] SYMBOL: {str(symbol)}\n[#] ERROR: {str(error)}\n[#] VULNERABLE: {str(possibility)}\n[#] DATE: {str(datetime.datetime.now())}\n[#] Program By: @A7_acc\n===========================\n"
     104 +
     105 + print(information)
     106 +
     107 + try:
     108 + file = open('vulnerable_sites.txt','a', encoding='utf-8')
     109 + except:
     110 + file = open('vulnerable_sites.txt','w', encoding='utf-8')
     111 +
     112 + file.write(info_raw)
     113 + file.close()
     114 +
     115 + return info_raw
Please wait...
Page is in error, reload to recover