| 1 | + | # |
| 2 | + | # BurpLinkFinder - Find links within JS files. |
| 3 | + | # |
| 4 | + | # Copyright (c) 2019 Frans Hendrik Botes |
| 5 | + | # Credit to https://github.com/GerbenJavado/LinkFinder for the idea and regex |
| 6 | + | # |
| 7 | + | from burp import IBurpExtender, IScannerCheck, IScanIssue, ITab |
| 8 | + | from java.io import PrintWriter |
| 9 | + | from java.net import URL |
| 10 | + | from java.util import ArrayList, List |
| 11 | + | from java.util.regex import Matcher, Pattern |
| 12 | + | import binascii |
| 13 | + | import base64 |
| 14 | + | import re |
| 15 | + | from javax import swing |
| 16 | + | from java.awt import Font, Color |
| 17 | + | from threading import Thread |
| 18 | + | from array import array |
| 19 | + | |
| 20 | + | class BurpExtender(IBurpExtender, IScannerCheck, ITab): |
| 21 | + | def registerExtenderCallbacks(self, callbacks): |
| 22 | + | self.callbacks = callbacks |
| 23 | + | self.helpers = callbacks.getHelpers() |
| 24 | + | callbacks.setExtensionName("BurpLinkFinder") |
| 25 | + | |
| 26 | + | callbacks.issueAlert("BurpLinkFinder Passive Scanner enabled") |
| 27 | + | |
| 28 | + | stdout = PrintWriter(callbacks.getStdout(), True) |
| 29 | + | stderr = PrintWriter(callbacks.getStderr(), True) |
| 30 | + | callbacks.registerScannerCheck(self) |
| 31 | + | self.initUI() |
| 32 | + | self.callbacks.addSuiteTab(self) |
| 33 | + | |
| 34 | + | print ("Burp JS LinkFinder loaded.") |
| 35 | + | print ("Copyright (c) 2019 Frans Hendrik Botes") |
| 36 | + | self.outputTxtArea.setText("Burp JS LinkFinder loaded." + "\n" + "Copyright (c) 2019 Frans Hendrik Botes" + "\n") |
| 37 | + | |
| 38 | + | def initUI(self): |
| 39 | + | self.tab = swing.JPanel() |
| 40 | + | |
| 41 | + | # UI for Output |
| 42 | + | self.outputLabel = swing.JLabel("LinkFinder Log:") |
| 43 | + | self.outputLabel.setFont(Font("Tahoma", Font.BOLD, 14)) |
| 44 | + | self.outputLabel.setForeground(Color(255,102,52)) |
| 45 | + | self.logPane = swing.JScrollPane() |
| 46 | + | self.outputTxtArea = swing.JTextArea() |
| 47 | + | self.outputTxtArea.setFont(Font("Consolas", Font.PLAIN, 12)) |
| 48 | + | self.outputTxtArea.setLineWrap(True) |
| 49 | + | self.logPane.setViewportView(self.outputTxtArea) |
| 50 | + | self.clearBtn = swing.JButton("Clear Log", actionPerformed=self.clearLog) |
| 51 | + | |
| 52 | + | # Layout |
| 53 | + | layout = swing.GroupLayout(self.tab) |
| 54 | + | layout.setAutoCreateGaps(True) |
| 55 | + | layout.setAutoCreateContainerGaps(True) |
| 56 | + | self.tab.setLayout(layout) |
| 57 | + | |
| 58 | + | layout.setHorizontalGroup( |
| 59 | + | layout.createParallelGroup() |
| 60 | + | .addGroup(layout.createSequentialGroup() |
| 61 | + | .addGroup(layout.createParallelGroup() |
| 62 | + | .addComponent(self.outputLabel) |
| 63 | + | .addComponent(self.logPane) |
| 64 | + | .addComponent(self.clearBtn) |
| 65 | + | ) |
| 66 | + | ) |
| 67 | + | ) |
| 68 | + | |
| 69 | + | layout.setVerticalGroup( |
| 70 | + | layout.createParallelGroup() |
| 71 | + | .addGroup(layout.createParallelGroup() |
| 72 | + | .addGroup(layout.createSequentialGroup() |
| 73 | + | .addComponent(self.outputLabel) |
| 74 | + | .addComponent(self.logPane) |
| 75 | + | .addComponent(self.clearBtn) |
| 76 | + | ) |
| 77 | + | ) |
| 78 | + | ) |
| 79 | + | |
| 80 | + | def getTabCaption(self): |
| 81 | + | return "BurpJSLinkFinder" |
| 82 | + | |
| 83 | + | def getUiComponent(self): |
| 84 | + | return self.tab |
| 85 | + | |
| 86 | + | def clearLog(self, event): |
| 87 | + | self.outputTxtArea.setText("Burp JS LinkFinder loaded." + "\n" + "Copyright (c) 2019 Frans Hendrik Botes" + "\n" ) |
| 88 | + | |
| 89 | + | def doPassiveScan(self, ihrr): |
| 90 | + | try: |
| 91 | + | url = ihrr.getUrl() |
| 92 | + | linkA = linkAnalyse(ihrr,self.helpers) |
| 93 | + | if ".js" in str(url): |
| 94 | + | self.outputTxtArea.append("\n" + "[+] Valid URL found: " + str(url)) |
| 95 | + | issueText = linkA.analyseURL() |
| 96 | + | for counter, issueText in enumerate(issueText): |
| 97 | + | #print("TEST Value returned SUCCESS") |
| 98 | + | self.outputTxtArea.append("\n" + "\t" + str(counter)+' - ' +issueText['link']) |
| 99 | + | |
| 100 | + | issues = ArrayList() |
| 101 | + | issues.add(SRI(ihrr, self.helpers)) |
| 102 | + | return issues |
| 103 | + | except UnicodeEncodeError: |
| 104 | + | print ("Error in URL decode.") |
| 105 | + | return None |
| 106 | + | |
| 107 | + | |
| 108 | + | def consolidateDuplicateIssues(self, isb, isa): |
| 109 | + | return -1 |
| 110 | + | |
| 111 | + | |
| 112 | + | class linkAnalyse(): |
| 113 | + | |
| 114 | + | def __init__(self, reqres, helpers): |
| 115 | + | self.helpers = helpers |
| 116 | + | self.reqres = reqres |
| 117 | + | |
| 118 | + | |
| 119 | + | regex_str = """ |
| 120 | + | |
| 121 | + | (?:"|') # Start newline delimiter |
| 122 | + | |
| 123 | + | ( |
| 124 | + | ((?:[a-zA-Z]{1,10}://|//) # Match a scheme [a-Z]*1-10 or // |
| 125 | + | [^"'/]{1,}\. # Match a domainname (any character + dot) |
| 126 | + | [a-zA-Z]{2,}[^"']{0,}) # The domainextension and/or path |
| 127 | + | |
| 128 | + | | |
| 129 | + | |
| 130 | + | ((?:/|\.\./|\./) # Start with /,../,./ |
| 131 | + | [^"'><,;| *()(%%$^/\\\[\]] # Next character can't be... |
| 132 | + | [^"'><,;|()]{1,}) # Rest of the characters can't be |
| 133 | + | |
| 134 | + | | |
| 135 | + | |
| 136 | + | ([a-zA-Z0-9_\-/]{1,}/ # Relative endpoint with / |
| 137 | + | [a-zA-Z0-9_\-/]{1,} # Resource name |
| 138 | + | \.(?:[a-zA-Z]{1,4}|action) # Rest + extension (length 1-4 or action) |
| 139 | + | (?:[\?|/][^"|']{0,}|)) # ? mark with parameters |
| 140 | + | |
| 141 | + | | |
| 142 | + | |
| 143 | + | ([a-zA-Z0-9_\-]{1,} # filename |
| 144 | + | \.(?:php|asp|aspx|jsp|json| |
| 145 | + | action|html|js|txt|xml) # . + extension |
| 146 | + | (?:\?[^"|']{0,}|)) # ? mark with parameters |
| 147 | + | |
| 148 | + | ) |
| 149 | + | |
| 150 | + | (?:"|') # End newline delimiter |
| 151 | + | |
| 152 | + | """ |
| 153 | + | |
| 154 | + | def parser_file(self, content, regex_str, mode=1, more_regex=None, no_dup=1): |
| 155 | + | #print ("TEST parselfile #2") |
| 156 | + | regex = re.compile(regex_str, re.VERBOSE) |
| 157 | + | items = [{"link": m.group(1)} for m in re.finditer(regex, content)] |
| 158 | + | if no_dup: |
| 159 | + | # Remove duplication |
| 160 | + | all_links = set() |
| 161 | + | no_dup_items = [] |
| 162 | + | for item in items: |
| 163 | + | if item["link"] not in all_links: |
| 164 | + | all_links.add(item["link"]) |
| 165 | + | no_dup_items.append(item) |
| 166 | + | items = no_dup_items |
| 167 | + | |
| 168 | + | # Match Regex |
| 169 | + | filtered_items = [] |
| 170 | + | for item in items: |
| 171 | + | # Remove other capture groups from regex results |
| 172 | + | if more_regex: |
| 173 | + | if re.search(more_regex, item["link"]): |
| 174 | + | #print ("TEST parselfile #3") |
| 175 | + | filtered_items.append(item) |
| 176 | + | else: |
| 177 | + | filtered_items.append(item) |
| 178 | + | return filtered_items |
| 179 | + | |
| 180 | + | |
| 181 | + | def analyseURL(self): |
| 182 | + | endpoints = "" |
| 183 | + | #print("TEST AnalyseURL #1") |
| 184 | + | mime_type=self.helpers.analyzeResponse(self.reqres.getResponse()).getStatedMimeType() |
| 185 | + | if mime_type.lower() == 'script': |
| 186 | + | url = self.reqres.getUrl() |
| 187 | + | encoded_resp=binascii.b2a_base64(self.reqres.getResponse()) |
| 188 | + | decoded_resp=base64.b64decode(encoded_resp) |
| 189 | + | endpoints=self.parser_file(decoded_resp, self.regex_str) |
| 190 | + | #print("TEST AnalyseURL #2") |
| 191 | + | return endpoints |
| 192 | + | return endpoints |
| 193 | + | |
| 194 | + | |
| 195 | + | class SRI(IScanIssue,ITab): |
| 196 | + | def __init__(self, reqres, helpers): |
| 197 | + | self.helpers = helpers |
| 198 | + | self.reqres = reqres |
| 199 | + | |
| 200 | + | def getHost(self): |
| 201 | + | return self.reqres.getHost() |
| 202 | + | |
| 203 | + | def getPort(self): |
| 204 | + | return self.reqres.getPort() |
| 205 | + | |
| 206 | + | def getProtocol(self): |
| 207 | + | return self.reqres.getProtocol() |
| 208 | + | |
| 209 | + | def getUrl(self): |
| 210 | + | return self.reqres.getUrl() |
| 211 | + | |
| 212 | + | def getIssueName(self): |
| 213 | + | return "Linkfinder Analysed JS files" |
| 214 | + | |
| 215 | + | def getIssueType(self): |
| 216 | + | return 0x08000000 # See http:#portswigger.net/burp/help/scanner_issuetypes.html |
| 217 | + | |
| 218 | + | def getSeverity(self): |
| 219 | + | return "Information" # "High", "Medium", "Low", "Information" or "False positive" |
| 220 | + | |
| 221 | + | def getConfidence(self): |
| 222 | + | return "Certain" # "Certain", "Firm" or "Tentative" |
| 223 | + | |
| 224 | + | def getIssueBackground(self): |
| 225 | + | return str("JS files holds links to other parts of web applications. Refer to TAB for results.") |
| 226 | + | |
| 227 | + | def getRemediationBackground(self): |
| 228 | + | return "This is an <b>informational</b> finding only.<br>" |
| 229 | + | |
| 230 | + | def getIssueDetail(self): |
| 231 | + | return str("Burp Scanner has analysed the following JS file for links: <b>" |
| 232 | + | "%s</b><br><br>" % (self.reqres.getUrl().toString())) |
| 233 | + | |
| 234 | + | def getRemediationDetail(self): |
| 235 | + | return None |
| 236 | + | |
| 237 | + | def getHttpMessages(self): |
| 238 | + | #print ("................raising issue................") |
| 239 | + | rra = [self.reqres] |
| 240 | + | return rra |
| 241 | + | |
| 242 | + | def getHttpService(self): |
| 243 | + | return self.reqres.getHttpService() |
| 244 | + | |
| 245 | + | |
| 246 | + | |
| 247 | + | |