Projects STRLCPY kitsec-core Commits 5c9594a9
🤬
  • ■ ■ ■ ■ ■ ■
    README.md
    skipped 178 lines
    179 179   X-Cache: CONFIG_NOCACHE
    180 180   X-MSEdge-Ref: Ref A: BB20069DED8C4CF68A735496B4DAFD79 Ref B: PAR02EDGE0721 Ref C: 2023-03-07T10:04:11Z
    181 181   Date: Tue, 07 Mar 2023 10:04:11 GMT
     182 + 
     183 +Missing headers:
     184 +X-XSS-Protection, X-Content-Type-Options, Strict-Transport-Security, Content-Security-Policy, Referrer-Policy, Feature-Policy
    182 185  ``````
    183 186  </details>
    184 187   
    skipped 316 lines
  • ■ ■ ■ ■ ■ ■
    core/README.md
    skipped 173 lines
    174 174   X-Cache: CONFIG_NOCACHE
    175 175   X-MSEdge-Ref: Ref A: BB20069DED8C4CF68A735496B4DAFD79 Ref B: PAR02EDGE0721 Ref C: 2023-03-07T10:04:11Z
    176 176   Date: Tue, 07 Mar 2023 10:04:11 GMT
     177 + 
     178 +Missing headers:
     179 +X-XSS-Protection, X-Content-Type-Options, Strict-Transport-Security, Content-Security-Policy, Referrer-Policy, Feature-Policy
    177 180  ``````
    178 181  </details>
    179 182   
    skipped 316 lines
  • ■ ■ ■ ■ ■
    core/kitsec/cli/cve.py
    skipped 2 lines
    3 3  from bs4 import BeautifulSoup
    4 4   
    5 5   
    6  - 
    7 6  def fetch_cwe(cwe_code):
    8 7   """Fetches the CWE name given a CWE code.
    9 8   
    skipped 5 lines
    15 14   # Construct the URL for the CWE code
    16 15   cwe_url = f"https://cwe.mitre.org/data/definitions/{cwe_code[4:].lower()}.html"
    17 16   
    18  - # Send a GET request to the URL and parse the HTML response with BeautifulSoup
     17 + # Send a GET request to the URL and parse the HTML response with
     18 + # BeautifulSoup
    19 19   response = requests.get(cwe_url)
    20 20   soup = BeautifulSoup(response.text, 'html.parser')
    21 21   
    skipped 34 lines
    56 56   severity_msg = "Severity information not available"
    57 57   else:
    58 58   severity_msg = severity
    59  - summary = item.get("cve", {}).get("description", {}).get("description_data", [])
    60  - summary = next((x.get("value") for x in summary if x.get("lang") == "en"), "")
     59 + summary = item.get(
     60 + "cve",
     61 + {}).get(
     62 + "description",
     63 + {}).get(
     64 + "description_data",
     65 + [])
     66 + summary = next((x.get("value")
     67 + for x in summary if x.get("lang") == "en"), "")
    61 68   
    62 69   # Extract CWE information and append it to the data
    63  - cwe_nodes = item.get("cve", {}).get("problemtype", {}).get("problemtype_data", [])
    64  - cwe_codes = [n.get("description", [{}])[0].get("value", "") for n in cwe_nodes if n.get("description")]
     70 + cwe_nodes = item.get(
     71 + "cve",
     72 + {}).get(
     73 + "problemtype",
     74 + {}).get(
     75 + "problemtype_data",
     76 + [])
     77 + cwe_codes = [n.get("description", [{}])[0].get("value", "")
     78 + for n in cwe_nodes if n.get("description")]
    65 79   for cwe_code in cwe_codes:
    66 80   cwe_name = fetch_cwe(cwe_code)
    67 81   result += f"CVE ID: {cve_id}\nCWE: {cwe_name}\nSeverity: {severity_msg}\nSummary: {summary}\n\n"
    68 82   
    69 83   return result
     84 + 
  • ■ ■ ■ ■ ■
    core/kitsec/cli/dependencies.py
    1 1  import subprocess
    2 2   
     3 + 
    3 4  def install_dependencies():
    4 5   """
    5 6   Installs the required dependencies for KitSec.
    6 7   """
    7 8   # Install subfinder
    8  - subprocess.run(['go', 'install', '-v', 'github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest'])
     9 + subprocess.run(['go', 'install', '-v',
     10 + 'github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest'])
    9 11   
    10 12   # Install assetfinder
    11 13  # subprocess.run(['go', 'install', 'github.com/tomnomnom/assetfinder@latest'])
    skipped 2 lines
    14 16  # subprocess.run(['go', 'install', 'github.com/tomnomnom/waybackurls@latest'])
    15 17   
    16 18   # Install Amass
    17  - subprocess.run(['go', 'install', '-v', 'github.com/OWASP/Amass/v3/...@master'])
     19 + subprocess.run(
     20 + ['go', 'install', '-v', 'github.com/OWASP/Amass/v3/...@master'])
     21 + 
  • ■ ■ ■ ■ ■
    core/kitsec/cli/enumerator.py
    1 1  # Standard library modules
     2 +from concurrent.futures import ThreadPoolExecutor, as_completed
    2 3  import os
    3 4  import subprocess
    4 5  import time
    skipped 6 lines
    11 12  import warnings
    12 13  from tabulate import tabulate
    13 14  from tqdm import tqdm
    14  -from urllib.parse import urlparse
    15 15  from Wappalyzer import Wappalyzer, WebPage
    16 16   
    17 17   
    skipped 14 lines
    32 32   try:
    33 33   print('Enumerating using Subfinder...')
    34 34   with open(os.devnull, 'w') as nullfile:
    35  - output = subprocess.check_output(['subfinder', '-d', domain], stderr=nullfile)
     35 + output = subprocess.check_output(
     36 + ['subfinder', '-d', domain], stderr=nullfile)
    36 37   subdomains.update(output.decode('utf-8').strip().split('\n'))
    37  - except:
     38 + except BaseException:
    38 39   print('Subfinder is not installed or encountered an error, skipping..."')
    39 40   
    40 41   # Enumerate using Amass
    41 42   try:
    42 43   print('Enumerating using Amass...')
    43 44   with open(os.devnull, 'w') as nullfile:
    44  - output = subprocess.check_output(['amass', 'enum', '--passive', '-d', domain], stderr=nullfile)
    45  - subdomains.update([s.split('.')[0] + '.' + domain for s in output.decode('utf-8').strip().split('\n')])
    46  - except:
     45 + output = subprocess.check_output(
     46 + ['amass', 'enum', '--passive', '-d', domain], stderr=nullfile)
     47 + subdomains.update([s.split(
     48 + '.')[0] + '.' + domain for s in output.decode('utf-8').strip().split('\n')])
     49 + except BaseException:
    47 50   print('Amass is not installed or encountered an error, skipping... / debug by running "amass enum --passive -d example.com"')
    48 51   
    49 52   # Enumerate using Findomain
    skipped 30 lines
    80 83   return subdomains
    81 84   
    82 85   
    83  -from concurrent.futures import ThreadPoolExecutor, as_completed
    84  - 
    85  -def fetch_response_worker(subdomain: str, session: requests.Session) -> List[str]:
     86 +def fetch_response_worker(
     87 + subdomain: str,
     88 + session: requests.Session) -> List[str]:
    86 89   try:
    87 90   response = session.get(f'http://{subdomain}', timeout=5)
    88 91   return [subdomain, response.status_code, response.reason, '']
    skipped 5 lines
    94 97   print(f"Skipped '{subdomain}': {str(e)}")
    95 98   return None
    96 99   
    97  -def fetch_response(subdomains: List[str], technology: bool, max_workers: int = 10) -> List[List[str]]:
     100 + 
     101 +def fetch_response(subdomains: List[str],
     102 + technology: bool,
     103 + max_workers: int = 10) -> List[List[str]]:
    98 104   response_table = []
    99 105   session = requests.Session()
    100 106   
    101 107   with ThreadPoolExecutor(max_workers=max_workers) as executor:
    102  - future_to_subdomain = {executor.submit(fetch_response_worker, subdomain, session): subdomain for subdomain in subdomains}
    103  - for future in tqdm(as_completed(future_to_subdomain), desc='Fetching response', total=len(subdomains), unit='subdomain', leave=False):
     108 + future_to_subdomain = {
     109 + executor.submit(
     110 + fetch_response_worker,
     111 + subdomain,
     112 + session): subdomain for subdomain in subdomains}
     113 + for future in tqdm(
     114 + as_completed(future_to_subdomain),
     115 + desc='Fetching response',
     116 + total=len(subdomains),
     117 + unit='subdomain',
     118 + leave=False):
    104 119   result = future.result()
    105 120   if result:
    106 121   response_table.append(result)
    skipped 15 lines
    122 137   """
    123 138   # Ignore JAVA warnings on wappalyzer & skip
    124 139   warnings.filterwarnings("ignore", category=UserWarning, module='bs4')
    125  - warnings.filterwarnings("ignore", category=UserWarning, message=".*It looks like you're parsing an XML document using an HTML parser.*")
    126  - warnings.filterwarnings("ignore", message="""Caught 'unbalanced parenthesis at position 119' compiling regex""", category=UserWarning )
     140 + warnings.filterwarnings(
     141 + "ignore",
     142 + category=UserWarning,
     143 + message=".*It looks like you're parsing an XML document using an HTML parser.*")
     144 + warnings.filterwarnings(
     145 + "ignore",
     146 + message="""Caught 'unbalanced parenthesis at position 119' compiling regex""",
     147 + category=UserWarning)
    127 148   
    128 149   # Ensure URL starts with http(s)
    129 150   if not url.startswith('http'):
    skipped 49 lines
    179 200   response_table = fetch_response(subdomains, False)
    180 201   # sort response_table by status in ascending order
    181 202   response_table = sorted(response_table, key=lambda x: x[1])
    182  - click.echo(tabulate(response_table, headers=['Subdomain', 'Status', 'Reason']))
     203 + click.echo(
     204 + tabulate(
     205 + response_table,
     206 + headers=[
     207 + 'Subdomain',
     208 + 'Status',
     209 + 'Reason']))
    183 210   
    184 211   if technology and not request:
    185 212   # Analyze technology used by subdomains
    skipped 16 lines
    202 229   tech_table.append([subdomain, tech])
    203 230   
    204 231   # Combine the two tables into a single table
    205  - response_df = pd.DataFrame(response_table, columns=['Subdomain', 'Status', 'Reason', 'Technology'])
     232 + response_df = pd.DataFrame(
     233 + response_table,
     234 + columns=[
     235 + 'Subdomain',
     236 + 'Status',
     237 + 'Reason',
     238 + 'Technology'])
    206 239   tech_df = pd.DataFrame(tech_table, columns=['Subdomain', 'Technology'])
    207  - combined_df = pd.merge(response_df, tech_df, on='Subdomain', how='outer')
    208  - combined_df.fillna('', inplace=True) # replace NaN values with empty string
     240 + combined_df = pd.merge(
     241 + response_df,
     242 + tech_df,
     243 + on='Subdomain',
     244 + how='outer')
     245 + # replace NaN values with empty string
     246 + combined_df.fillna('', inplace=True)
    209 247   combined_table = combined_df.to_records(index=False).tolist()
    210 248   
    211  - click.echo(tabulate(combined_table, headers=['Subdomain', 'Status', 'Reason', 'Technology']))
     249 + click.echo(
     250 + tabulate(
     251 + combined_table,
     252 + headers=[
     253 + 'Subdomain',
     254 + 'Status',
     255 + 'Reason',
     256 + 'Technology']))
    212 257   
    213 258   if not request and not technology:
    214 259   # Just print the subdomains
    skipped 6 lines
  • ■ ■ ■ ■ ■
    core/kitsec/cli/fuzz.py
    skipped 26 lines
    27 27   
    28 28   with ThreadPoolExecutor(max_workers=max_workers) as executor:
    29 29   if os.path.isdir(path):
    30  - # If the path is a directory, iterate through each file in the directory
     30 + # If the path is a directory, iterate through each file in the
     31 + # directory
    31 32   futures = []
    32 33   for filename in os.listdir(path):
    33 34   filepath = os.path.join(path, filename)
    34 35   if os.path.isfile(filepath):
    35  - # If the file is a regular file, read each line in the file and send a request to the URL
     36 + # If the file is a regular file, read each line in the file
     37 + # and send a request to the URL
    36 38   with open(filepath) as f:
    37 39   file_formats = f.read().splitlines()
    38  - progress_bar = tqdm(file_formats, desc=os.path.splitext(filename)[0], position=0, leave=True)
     40 + progress_bar = tqdm(
     41 + file_formats,
     42 + desc=os.path.splitext(filename)[0],
     43 + position=0,
     44 + leave=True)
    39 45   for file_format in progress_bar:
    40 46   url = f"{base_url}/{file_format}"
    41 47   future = executor.submit(send_request, url)
    skipped 3 lines
    45 51   future.result()
    46 52   
    47 53   elif os.path.isfile(path):
    48  - # If the path is a regular file, read each line in the file and send a request to the URL
     54 + # If the path is a regular file, read each line in the file and
     55 + # send a request to the URL
    49 56   with open(path) as f:
    50 57   file_formats = f.read().splitlines()
    51  - progress_bar = tqdm(file_formats, desc=os.path.basename(path), position=0, leave=True)
     58 + progress_bar = tqdm(
     59 + file_formats,
     60 + desc=os.path.basename(path),
     61 + position=0,
     62 + leave=True)
    52 63   futures = []
    53 64   for file_format in progress_bar:
    54 65   url = f"{base_url}/{file_format}"
    skipped 7 lines
    62 73   print(f"{path} does not exist")
    63 74   
    64 75   
    65  -def apply_file_format_fuzz(base_url, path='lists/fuzz/file_fuzz', max_workers=10):
     76 +def apply_file_format_fuzz(
     77 + base_url,
     78 + path='lists/fuzz/file_fuzz',
     79 + max_workers=10):
    66 80   if not base_url.startswith('http'):
    67 81   base_url = 'http://' + base_url
    68 82   
    skipped 5 lines
    74 88   
    75 89   with ThreadPoolExecutor(max_workers=max_workers) as executor:
    76 90   if os.path.isdir(path):
    77  - # If the path is a directory, iterate through each file in the directory
     91 + # If the path is a directory, iterate through each file in the
     92 + # directory
    78 93   futures = []
    79 94   for filename in os.listdir(path):
    80 95   filepath = os.path.join(path, filename)
    81 96   if os.path.isfile(filepath):
    82  - # If the file is a regular file, read each line in the file and send a request to the URL
     97 + # If the file is a regular file, read each line in the file
     98 + # and send a request to the URL
    83 99   with open(filepath) as f:
    84 100   file_formats = f.read().splitlines()
    85  - progress_bar = tqdm(file_formats, desc=os.path.splitext(filename)[0], position=0, leave=True)
     101 + progress_bar = tqdm(
     102 + file_formats,
     103 + desc=os.path.splitext(filename)[0],
     104 + position=0,
     105 + leave=True)
    86 106   for file_format in progress_bar:
    87 107   url = f"{base_url}/{file_format}"
    88 108   future = executor.submit(send_request, url)
    skipped 3 lines
    92 112   future.result()
    93 113   
    94 114   elif os.path.isfile(path):
    95  - # If the path is a regular file, read each line in the file and send a request to the URL
     115 + # If the path is a regular file, read each line in the file and
     116 + # send a request to the URL
    96 117   with open(path) as f:
    97 118   file_formats = f.read().splitlines()
    98  - progress_bar = tqdm(file_formats, desc=os.path.basename(path), position=0, leave=True)
     119 + progress_bar = tqdm(
     120 + file_formats,
     121 + desc=os.path.basename(path),
     122 + position=0,
     123 + leave=True)
    99 124   futures = []
    100 125   for file_format in progress_bar:
    101 126   url = f"{base_url}/{file_format}"
    skipped 5 lines
    107 132   
    108 133   else:
    109 134   print(f"{path} does not exist")
     135 + 
  • ■ ■ ■ ■ ■ ■
    core/kitsec/cli/main.py
    1 1  # Standard library modules
    2  -import base64
    3 2  import binascii
    4  -import concurrent
    5  -import hashlib
    6  -import html
    7  -import json
    8  -import os
    9  -import platform
    10 3  import sys
    11 4   
    12 5  # Third-party modules
    13 6  import click
    14  -import ipaddress
    15 7  import ipwhois
    16 8  import paramiko
    17 9  import requests
    skipped 7 lines
    25 17  from kitsec.cli.cve import query_cve
    26 18  from kitsec.cli.enumerator import apply_enumerator
    27 19  from kitsec.cli.fuzz import apply_file_format_fuzz, apply_path_fuzz
    28  -from kitsec.cli.network import (apply_capture, apply_cidr, apply_disturb, apply_storm,
    29  - apply_scan_ports, apply_check_certificate)
     20 +from kitsec.cli.network import (
     21 + apply_capture,
     22 + apply_cidr,
     23 + apply_disturb,
     24 + apply_storm,
     25 + apply_scan_ports,
     26 + apply_check_certificate)
    30 27  from kitsec.cli.dependencies import install_dependencies
    31 28  from kitsec.cli.utils import apply_transformation
    32 29   
    33 30   
    34  - 
    35  - 
    36  - 
    37  - 
    38  -#todo: run kitsec from any directory
     31 +# todo: run kitsec from any directory
    39 32   
    40 33  @click.group()
    41 34  def cli():
    42 35   """
    43 36   KitSec - A CLI tool for security testing and reconnaissance.
    44 37   """
    45  - pass
     38 + 
    46 39   
    47 40  @click.command()
    48 41  def deps():
    skipped 6 lines
    55 48   
    56 49   
    57 50  @click.command()
    58  -@click.option('--host', prompt='Enter the IP address of the VPS server to connect to')
    59  -@click.option('--username', prompt='Enter the limited user account to use for connecting to the VPS server')
    60  -@click.option('--password', prompt='Enter the password for the user account', hide_input=True)
     51 +@click.option('--host',
     52 + prompt='Enter the IP address of the VPS server to connect to')
     53 +@click.option('--username',
     54 + prompt='Enter the limited user account to use for connecting to the VPS server')
     55 +@click.option('--password',
     56 + prompt='Enter the password for the user account',
     57 + hide_input=True)
    61 58  def vps(host, username, password):
    62 59   """
    63 60   Connects to a remote server using SSH and logs in as the specified user.
    skipped 20 lines
    84 81   client.close()
    85 82   
    86 83   
    87  - 
    88 84  @click.command()
    89 85  @click.argument('url')
    90 86  def capture(url):
    skipped 1 lines
    92 88   Captures the request headers for a given URL.
    93 89   """
    94 90   apply_capture(url)
     91 + 
    95 92   
    96 93  @click.command()
    97 94  @click.argument('url')
    skipped 4 lines
    102 99   """
    103 100   hostname = url.split('//')[-1].split('/')[0]
    104 101   apply_check_certificate(hostname)
     102 + 
    105 103   
    106 104  @click.command()
    107 105  @click.argument('data')
    108  -@click.option('--type', '-t', 'transformation_type', type=click.Choice(['URL', 'HTML', 'Base64', 'ASCII', 'Hex', 'Octal', 'Binary', 'MD5', 'SHA1', 'SHA256', 'BLAKE2B-160', 'GZIP']), default='Base64', help='The type of transformation to apply to the input data.')
     106 +@click.option('--type',
     107 + '-t',
     108 + 'transformation_type',
     109 + type=click.Choice(['URL',
     110 + 'HTML',
     111 + 'Base64',
     112 + 'ASCII',
     113 + 'Hex',
     114 + 'Octal',
     115 + 'Binary',
     116 + 'MD5',
     117 + 'SHA1',
     118 + 'SHA256',
     119 + 'BLAKE2B-160',
     120 + 'GZIP']),
     121 + default='Base64',
     122 + help='The type of transformation to apply to the input data.')
    109 123  @click.help_option('--help', '-h')
    110 124  def convert(data, transformation_type):
    111 125   """
    112 126   Applies a specified decoding or hashing function to input data.
    113 127   """
    114 128   try:
    115  - result = apply_transformation(data.encode('utf-8'), transformation_type)
     129 + result = apply_transformation(
     130 + data.encode('utf-8'), transformation_type)
    116 131   except Exception as e:
    117 132   click.echo(f"Error: {str(e)}")
    118 133   sys.exit(1)
    skipped 1 lines
    120 135   click.echo(result)
    121 136   
    122 137   
    123  - 
    124 138  @click.command()
    125  -@click.option('--request', '-r', is_flag=True, default=False, help='Test subdomains and print http response for active ones.')
    126  -@click.option('--technology', '-t', is_flag=True, default=False, help='Analyze technology used by subdomains.')
     139 +@click.option('--request', '-r', is_flag=True, default=False,
     140 + help='Test subdomains and print http response for active ones.')
     141 +@click.option('--technology', '-t', is_flag=True, default=False,
     142 + help='Analyze technology used by subdomains.')
    127 143  @click.argument('domain')
    128  -@click.option('-h', '--help', 'display_help', is_flag=True, help='Display this help message')
     144 +@click.option('-h', '--help', 'display_help', is_flag=True,
     145 + help='Display this help message')
    129 146  def enumerator(request, technology, domain, display_help):
    130 147   """
    131 148   Enumerate subdomains for a given domain.
    skipped 4 lines
    136 153   apply_enumerator(request=request, technology=technology, domain=domain)
    137 154   
    138 155   
    139  - 
    140 156  @click.command()
    141 157  @click.argument('url', required=True)
    142 158  @click.option('-m', '--method', default='GET', help='HTTP method to use')
    143  -@click.option('-p', '--payload', default='', help='Payload to include in the request body')
    144  -@click.option('-H', '--headers', default='', help='Headers to include in the request')
    145  -@click.option('-c', '--cookies', default='', help='Cookies to include in the request')
    146  -@click.option('-n', '--count', default=1, type=int, help='Number of times to repeat the request')
     159 +@click.option('-p', '--payload', default='',
     160 + help='Payload to include in the request body')
     161 +@click.option('-H', '--headers', default='',
     162 + help='Headers to include in the request')
     163 +@click.option('-c', '--cookies', default='',
     164 + help='Cookies to include in the request')
     165 +@click.option('-n', '--count', default=1, type=int,
     166 + help='Number of times to repeat the request')
    147 167  @click.option('--show-help', '-h', is_flag=True, help='Show help message.')
    148 168  def disturb(url, method, payload, headers, cookies, count, show_help):
    149 169   """
    skipped 4 lines
    154 174   else:
    155 175   responses = disturb(url, method, payload, headers, cookies, count)
    156 176   for i, response in enumerate(responses):
    157  - click.echo(f'Response {i + 1}: {response.status_code} - {response.reason}')
    158  - 
     177 + click.echo(
     178 + f'Response {i + 1}: {response.status_code} - {response.reason}')
    159 179   
    160 180   
    161 181  @click.command()
    162 182  @click.argument('url')
    163  -@click.option('--num-attacks', '-a', type=int, default=6, help='Number of parallel threats to send requests from.')
    164  -@click.option('--num-requests', '-r', type=int, default=200, help='Number of requests to send from each threat.')
    165  -@click.option('--num-retries', '-y', type=int, default=4, help='Number of times to retry failed requests.')
    166  -@click.option('--pause-before-retry', '-p', type=int, default=3000, help='Number of milliseconds to wait before retrying a failed request.')
    167  -@click.option('-h', '--help', 'display_help', is_flag=True, help='Display this help message')
    168  -def storm(url, num_attacks, num_requests, num_retries, pause_before_retry, display_help):
     183 +@click.option('--num-attacks', '-a', type=int, default=6,
     184 + help='Number of parallel threats to send requests from.')
     185 +@click.option('--num-requests', '-r', type=int, default=200,
     186 + help='Number of requests to send from each threat.')
     187 +@click.option('--num-retries', '-y', type=int, default=4,
     188 + help='Number of times to retry failed requests.')
     189 +@click.option('--pause-before-retry', '-p', type=int, default=3000,
     190 + help='Number of milliseconds to wait before retrying a failed request.')
     191 +@click.option('-h', '--help', 'display_help', is_flag=True,
     192 + help='Display this help message')
     193 +def storm(
     194 + url,
     195 + num_attacks,
     196 + num_requests,
     197 + num_retries,
     198 + pause_before_retry,
     199 + display_help):
    169 200   """
    170 201   Sends HTTP requests to a given URL with a specified number of threats and requests.
    171 202   """
    172 203   if display_help:
    173 204   click.echo(storm.get_help(click.Context(storm)))
    174 205   else:
    175  - results = apply_storm(url, num_attacks, num_requests, num_retries, pause_before_retry)
     206 + results = apply_storm(
     207 + url,
     208 + num_attacks,
     209 + num_requests,
     210 + num_retries,
     211 + pause_before_retry)
    176 212   click.echo(results)
    177 213   
    178 214   
    179 215  @click.command()
    180 216  @click.argument('url')
    181  -@click.option('-c', '--common-ports', is_flag=True, help='Scan only the most common HTTP ports (80, 8080, and 443)')
     217 +@click.option('-c', '--common-ports', is_flag=True,
     218 + help='Scan only the most common HTTP ports (80, 8080, and 443)')
    182 219  def portscan(url, common_ports):
    183 220   """
    184 221   Performs a TCP port scan on a specified hostname or URL and a range of ports.
    skipped 6 lines
    191 228   
    192 229  @click.command()
    193 230  @click.argument('base_url')
    194  -@click.option('-f', '--file-fuzz', is_flag=True, help='Use file format fuzzing')
     231 +@click.option('-f', '--file-fuzz', is_flag=True,
     232 + help='Use file format fuzzing')
    195 233  @click.option('-p', '--path-fuzz', is_flag=True, help='Use path fuzzing')
    196 234  @click.help_option('--help', '-h')
    197 235  def fuzz(base_url, file_fuzz, path_fuzz):
    skipped 25 lines
    223 261   print("Please specify either --file-fuzz or --path-fuzz.")
    224 262   
    225 263   
    226  - 
    227 264  @click.command()
    228 265  @click.argument('company_name')
    229 266  @click.help_option('--help', '-h')
    skipped 7 lines
    237 274   
    238 275  @click.command()
    239 276  @click.argument('product_name')
    240  -@click.option('--limit', '-l', type=int, default=10, help='Number of results to display (default=10)')
     277 +@click.option('--limit', '-l', type=int, default=10,
     278 + help='Number of results to display (default=10)')
    241 279  @click.help_option('--help', '-h')
    242 280  def cve(product_name, limit):
    243 281   """
    skipped 19 lines
    263 301   
    264 302  if __name__ == '__main__':
    265 303   cli()
     304 + 
  • ■ ■ ■ ■ ■ ■
    core/kitsec/cli/network.py
    skipped 13 lines
    14 14  from urllib.parse import urlparse
    15 15   
    16 16  # Built-in modules
    17  -import pty
    18 17  import random
    19 18  import textwrap
    20 19  import urllib
    skipped 23 lines
    44 43   # Function for checking SSL/TLS certificate
    45 44   # Create a socket object and wrap it with an SSL context
    46 45   context = ssl.create_default_context()
    47  - conn = context.wrap_socket(socket.socket(socket.AF_INET), server_hostname=hostname)
     46 + conn = context.wrap_socket(
     47 + socket.socket(
     48 + socket.AF_INET),
     49 + server_hostname=hostname)
    48 50   
    49 51   # Connect to the server and get the certificate
    50 52   conn.connect((hostname, port))
    51 53   cert = conn.getpeercert()
    52 54   
    53 55   # Extract relevant information from the certificate
    54  - not_before = datetime.datetime.strptime(cert['notBefore'], '%b %d %H:%M:%S %Y %Z')
    55  - not_after = datetime.datetime.strptime(cert['notAfter'], '%b %d %H:%M:%S %Y %Z')
     56 + not_before = datetime.datetime.strptime(
     57 + cert['notBefore'], '%b %d %H:%M:%S %Y %Z')
     58 + not_after = datetime.datetime.strptime(
     59 + cert['notAfter'], '%b %d %H:%M:%S %Y %Z')
    56 60   remaining_days = (not_after - datetime.datetime.now()).days
    57 61   
    58 62   # Check if the certificate is expired or expiring soon
    59 63   if remaining_days <= 0:
    60 64   click.echo(f"The SSL/TLS certificate for {hostname} has expired!")
    61 65   elif remaining_days <= 30:
    62  - click.echo(f"The SSL/TLS certificate for {hostname} will expire in {remaining_days} days.")
     66 + click.echo(
     67 + f"The SSL/TLS certificate for {hostname} will expire in {remaining_days} days.")
    63 68   
    64 69   # Output some information about the certificate
    65 70   click.echo(f"Hostname: {hostname}")
    skipped 33 lines
    99 104   try:
    100 105   sock.connect((ip_address, port))
    101 106   open_ports.append(f"{hostname}:{port}")
    102  - except:
     107 + except BaseException:
    103 108   pass
    104 109   finally:
    105 110   sock.close()
    skipped 12 lines
    118 123   
    119 124   # Return the open ports
    120 125   return open_ports
    121  - 
    122 126   
    123 127   
    124 128  def apply_capture(url):
    skipped 18 lines
    143 147   
    144 148   # Check for missing headers
    145 149   missing_headers = []
    146  - for header in ["X-XSS-Protection", "X-Content-Type-Options", "Strict-Transport-Security", "Content-Security-Policy", "Referrer-Policy", "Feature-Policy"]:
     150 + for header in [
     151 + "X-XSS-Protection",
     152 + "X-Content-Type-Options",
     153 + "Strict-Transport-Security",
     154 + "Content-Security-Policy",
     155 + "Referrer-Policy",
     156 + "Feature-Policy"]:
    147 157   if header not in headers:
    148 158   missing_headers.append(header)
    149 159   
    150 160   # Add the "Response headers" section
    151 161   request_info += "Response headers:\n"
    152  - request_info += textwrap.indent("\n".join([f" {header}: {value}" for header, value in response.headers.items()]), " ")
     162 + request_info += textwrap.indent("\n".join(
     163 + [f" {header}: {value}" for header, value in response.headers.items()]), " ")
    153 164   request_info += "\n"
    154 165   
    155 166   # Add the "Connection" header
    skipped 5 lines
    161 172   cookies = "\n".join(cookie_lines)
    162 173   request_info += f"Cookie: {cookies}\n"
    163 174   
    164  - request_info += "\n".join([f"{header}: {value}" for header, value in headers.items() if header not in ["Host", "User-Agent", "Cookie", "Connection"]])
     175 + request_info += "\n".join([f"{header}: {value}" for header,
     176 + value in headers.items() if header not in ["Host",
     177 + "User-Agent",
     178 + "Cookie",
     179 + "Connection"]])
    165 180   request_info += "\n\n"
    166 181   
    167 182   # Add the "Missing headers" section if there are missing headers
    skipped 4 lines
    172 187   print(request_info)
    173 188   
    174 189   
    175  - 
    176  -def apply_disturb(url, method='GET', payload='', headers={}, cookies={}, count=1):
     190 +def apply_disturb(
     191 + url,
     192 + method='GET',
     193 + payload='',
     194 + headers={},
     195 + cookies={},
     196 + count=1):
    177 197   """
    178 198   Sends multiple HTTP requests to the specified URL with the same payload.
    179 199   
    skipped 10 lines
    190 210   """
    191 211   responses = []
    192 212   for i in range(count):
    193  - response = requests.request(method, url, data=payload, headers=headers, cookies=cookies)
     213 + response = requests.request(
     214 + method,
     215 + url,
     216 + data=payload,
     217 + headers=headers,
     218 + cookies=cookies)
    194 219   responses.append(response)
    195 220   return responses
    196 221   
    197 222   
    198  -def storm(url, num_attacks=6, num_requests=200, num_retries=4, pause_before_retry=3000):
     223 +def storm(
     224 + url,
     225 + num_attacks=6,
     226 + num_requests=200,
     227 + num_retries=4,
     228 + pause_before_retry=3000):
    199 229   """
    200 230   Sends HTTP GET requests to the specified URL with a specified number of attacks and requests.
    201 231   
    skipped 30 lines
    232 262   threat_results.append(response)
    233 263   if response.status_code == 200:
    234 264   break
    235  - time.sleep(pause_before_retry/1000)
     265 + time.sleep(pause_before_retry / 1000)
    236 266   pbar.update(1)
    237 267   results.append(threat_results)
    238 268   return results
    skipped 19 lines
    258 288   # Define proxies, ports, user agents, and headers to shuffle
    259 289   proxies = ['1.2.3.4:8080', '5.6.7.8:3128', '9.10.11.12:80']
    260 290   ports = ['80', '8080', '3128']
    261  - user_agents = ['Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
    262  - 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0']
     291 + user_agents = [
     292 + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
     293 + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0']
    263 294   headers = {'Accept-Language': 'en-US,en;q=0.5', 'Connection': 'keep-alive'}
    264 295   
    265 296   # Shuffle the proxies, ports, user agents, and headers
    skipped 9 lines
    275 306   header = headers[0]
    276 307   
    277 308   # Create dictionary of shuffled proxy and header parameters
    278  - proxies_dict = {'http': f'http://{proxy}:{port}', 'https': f'https://{proxy}:{port}'}
     309 + proxies_dict = {
     310 + 'http': f'http://{proxy}:{port}',
     311 + 'https': f'https://{proxy}:{port}'}
    279 312   headers_dict = {'User-Agent': user_agent, **header}
    280 313   
    281 314   # Send GET request with shuffled parameters and handle exceptions
    282 315   try:
    283  - response = requests.get(url, proxies=proxies_dict, headers=headers_dict)
     316 + response = requests.get(
     317 + url,
     318 + proxies=proxies_dict,
     319 + headers=headers_dict)
    284 320   response.raise_for_status()
    285 321   return response.text
    286 322   except (requests.exceptions.RequestException, ValueError):
    287 323   return None
    288 324   
    289 325   
    290  -def apply_storm(url, num_attacks=6, num_requests=200, num_retries=4, pause_before_retry=3000):
     326 +def apply_storm(
     327 + url,
     328 + num_attacks=6,
     329 + num_requests=200,
     330 + num_retries=4,
     331 + pause_before_retry=3000):
    291 332   """
    292 333   Sends HTTP requests to a given URL with a specified number of threats and requests.
    293 334   """
    skipped 14 lines
    308 349   threat_results.append(response)
    309 350   if response.status_code == 200:
    310 351   break
    311  - time.sleep(pause_before_retry/1000)
     352 + time.sleep(pause_before_retry / 1000)
    312 353   pbar.update(1)
    313 354   results.append(threat_results)
    314 355   return results
    skipped 38 lines
    353 394   the auth.log file using sudo. It then continuously checks for new output from the file and
    354 395   prints it to the console as it is received.
    355 396   """
    356  - # Create an SSH client object and set the missing host key policy to auto add
     397 + # Create an SSH client object and set the missing host key policy to auto
     398 + # add
    357 399   ssh = paramiko.SSHClient()
    358 400   ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    359 401   
    skipped 4 lines
    364 406   channel = ssh.invoke_shell()
    365 407   channel.send('sudo tail -f /var/log/auth.log\n')
    366 408   
    367  - # Continuously check for new output from the auth.log file and print it to the console
     409 + # Continuously check for new output from the auth.log file and print it to
     410 + # the console
    368 411   while True:
    369 412   if channel.recv_ready():
    370 413   output = channel.recv(1024).decode('utf-8')
    371 414   click.echo(output, nl=False)
     415 + 
  • ■ ■ ■ ■ ■ ■
    core/kitsec/cli/utils.py
    skipped 45 lines
    46 46   result = "Invalid hex input"
    47 47   elif transformation_type == "Octal":
    48 48   try:
    49  - result = ''.join([chr(int(octet, 8)) for octet in data.split()])
     49 + result = ''.join([chr(int(octet, 8))
     50 + for octet in data.split()])
    50 51   except ValueError:
    51 52   result = "Invalid octal input"
    52 53   elif transformation_type == "Binary":
    53 54   try:
    54  - result = ''.join([chr(int(octet, 2)) for octet in data.split()])
     55 + result = ''.join([chr(int(octet, 2))
     56 + for octet in data.split()])
    55 57   except ValueError:
    56 58   result = "Invalid binary input"
    57 59   elif transformation_type == "GZIP":
    skipped 17 lines
    75 77   result = "Invalid decoding or hashing type"
    76 78   
    77 79   return result
     80 + 
Please wait...
Page is in error, reload to recover