├── PassiveRecon.py └── README.md /PassiveRecon.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from burp import IBurpExtender, IScannerCheck, ITab, IHttpListener, IScanIssue, IMessageEditorTab, IMessageEditorTabFactory 3 | from java.io import PrintWriter 4 | from java.net import URL 5 | from java.net import URLDecoder 6 | from java.util import ArrayList 7 | import re 8 | import json 9 | from urllib import unquote 10 | from javax import swing 11 | from java.awt import Font, Color 12 | from java.awt import EventQueue 13 | from java.lang import Runnable, Thread 14 | from javax.swing import JFileChooser 15 | from javax.swing.event import DocumentListener 16 | from javax.swing import JTabbedPane, JPanel, JScrollPane, JTextArea, BoxLayout, JButton 17 | 18 | # Runnable wrapper for Swing thread safety 19 | class Run(Runnable): 20 | def __init__(self, runner): 21 | self.runner = runner 22 | def run(self): 23 | self.runner() 24 | 25 | # Exclusion list 26 | JSExclusionList = ['jquery', 'google-analytics', 'gpt.js'] 27 | 28 | # MIME types we will scan 29 | TEXT_MIME_TYPES = [ 30 | "text/", 31 | "application/javascript", 32 | "application/x-javascript", 33 | "application/json", 34 | "application/xml", 35 | "application/xhtml+xml" 36 | ] 37 | 38 | class BurpExtender(IBurpExtender, IScannerCheck, ITab, IHttpListener, IMessageEditorTabFactory): 39 | def registerExtenderCallbacks(self, callbacks): 40 | self.callbacks = callbacks 41 | self.helpers = callbacks.getHelpers() 42 | callbacks.setExtensionName("PassiveRecon") 43 | 44 | callbacks.issueAlert("PassiveRecon Passive Scanner + Live HTTP Listener enabled") 45 | self.stdout = PrintWriter(callbacks.getStdout(), True) 46 | self.stderr = PrintWriter(callbacks.getStderr(), True) 47 | callbacks.registerScannerCheck(self) 48 | callbacks.registerHttpListener(self) # Added for live scanning 49 | callbacks.registerMessageEditorTabFactory(self) # Register Meta GraphQL tab 50 | 51 | self.fullLog = [] 52 | self.seenLinks = set() 53 | self.graphqlQueries = [] 54 | self.subdomains = [] 55 | self.urls = [] 56 | 57 | self.initUI() 58 | self.callbacks.addSuiteTab(self) 59 | 60 | self.appendLog("PassiveRecon loaded (live scan active).") 61 | 62 | def createNewInstance(self, controller, editable): 63 | return GraphQLRequestTab(self, controller, editable) 64 | 65 | def initUI(self): 66 | self.tab = JTabbedPane() 67 | 68 | # Main tab 69 | mainPanel = swing.JPanel() 70 | self.setupMainTab(mainPanel) 71 | self.tab.addTab("Main", mainPanel) 72 | 73 | # GraphQL tab 74 | graphqlPanel = swing.JPanel() 75 | self.setupGraphQLTab(graphqlPanel) 76 | self.tab.addTab("GraphQL", graphqlPanel) 77 | 78 | # Subdomains tab 79 | subdomainsPanel = swing.JPanel() 80 | self.setupSubdomainsTab(subdomainsPanel) 81 | self.tab.addTab("Subdomains", subdomainsPanel) 82 | 83 | # URLs tab 84 | urlsPanel = swing.JPanel() 85 | self.setupURLsTab(urlsPanel) 86 | self.tab.addTab("URLs", urlsPanel) 87 | 88 | 89 | def setupMainTab(self, panel): 90 | self.outputLabel = swing.JLabel("Results:") 91 | self.outputLabel.setFont(Font("Tahoma", Font.BOLD, 14)) 92 | self.outputLabel.setForeground(Color(255, 102, 52)) 93 | 94 | self.logPane = swing.JScrollPane() 95 | self.outputTxtArea = swing.JTextArea() 96 | self.outputTxtArea.setFont(Font("Consolas", Font.BOLD, 18)) 97 | self.outputTxtArea.setLineWrap(True) 98 | self.outputTxtArea.setEditable(False) 99 | self.logPane.setViewportView(self.outputTxtArea) 100 | 101 | self.clearBtn = swing.JButton("Clear Log", actionPerformed=self.clearLog) 102 | self.exportBtn = swing.JButton("Export Log", actionPerformed=self.exportLog) 103 | self.parentFrm = swing.JFileChooser() 104 | self.inScopeCheckBox = swing.JCheckBox("Scan in-scope URLs only", actionPerformed=self.toggleInScope) 105 | self.inScopeOnly = False 106 | 107 | self.searchLabel = swing.JLabel("Search:") 108 | self.searchField = swing.JTextField(15) 109 | self.searchField.setFont(Font("Consolas", Font.PLAIN, 14)) 110 | self.searchField.setMaximumSize(self.searchField.getPreferredSize()) 111 | self.searchField.getDocument().addDocumentListener(self.SearchListener(self, "main")) 112 | 113 | layout = swing.GroupLayout(panel) 114 | layout.setAutoCreateGaps(True) 115 | layout.setAutoCreateContainerGaps(True) 116 | panel.setLayout(layout) 117 | 118 | layout.setHorizontalGroup( 119 | layout.createParallelGroup() 120 | .addComponent(self.outputLabel) 121 | .addComponent(self.logPane) 122 | .addGroup(layout.createSequentialGroup() 123 | .addComponent(self.clearBtn) 124 | .addComponent(self.exportBtn)) 125 | .addComponent(self.inScopeCheckBox) 126 | .addGroup(layout.createSequentialGroup() 127 | .addComponent(self.searchLabel) 128 | .addComponent(self.searchField)) 129 | ) 130 | 131 | layout.setVerticalGroup( 132 | layout.createSequentialGroup() 133 | .addComponent(self.outputLabel) 134 | .addComponent(self.logPane) 135 | .addGroup(layout.createParallelGroup() 136 | .addComponent(self.clearBtn) 137 | .addComponent(self.exportBtn)) 138 | .addComponent(self.inScopeCheckBox) 139 | .addGroup(layout.createParallelGroup() 140 | .addComponent(self.searchLabel) 141 | .addComponent(self.searchField)) 142 | ) 143 | 144 | def setupGraphQLTab(self, panel): 145 | self.graphqlLabel = swing.JLabel("GraphQL Queries:") 146 | self.graphqlLabel.setFont(Font("Tahoma", Font.BOLD, 14)) 147 | self.graphqlLabel.setForeground(Color(0, 102, 204)) 148 | 149 | self.graphqlPane = swing.JScrollPane() 150 | self.graphqlTxtArea = swing.JTextArea() 151 | self.graphqlTxtArea.setFont(Font("Consolas", Font.BOLD, 18)) 152 | self.graphqlTxtArea.setLineWrap(True) 153 | self.graphqlTxtArea.setEditable(False) 154 | self.graphqlPane.setViewportView(self.graphqlTxtArea) 155 | 156 | self.clearGraphQLBtn = swing.JButton("Clear GraphQL", actionPerformed=self.clearGraphQL) 157 | self.exportGraphQLBtn = swing.JButton("Export GraphQL", actionPerformed=self.exportGraphQL) 158 | 159 | self.graphqlSearchLabel = swing.JLabel("Search:") 160 | self.graphqlSearchField = swing.JTextField(15) 161 | self.graphqlSearchField.setFont(Font("Consolas", Font.PLAIN, 14)) 162 | self.graphqlSearchField.setMaximumSize(self.graphqlSearchField.getPreferredSize()) 163 | self.graphqlSearchField.getDocument().addDocumentListener(self.SearchListener(self, "graphql")) 164 | 165 | layout = swing.GroupLayout(panel) 166 | layout.setAutoCreateGaps(True) 167 | layout.setAutoCreateContainerGaps(True) 168 | panel.setLayout(layout) 169 | 170 | layout.setHorizontalGroup( 171 | layout.createParallelGroup() 172 | .addComponent(self.graphqlLabel) 173 | .addComponent(self.graphqlPane) 174 | .addGroup(layout.createSequentialGroup() 175 | .addComponent(self.clearGraphQLBtn) 176 | .addComponent(self.exportGraphQLBtn)) 177 | .addGroup(layout.createSequentialGroup() 178 | .addComponent(self.graphqlSearchLabel) 179 | .addComponent(self.graphqlSearchField)) 180 | ) 181 | 182 | layout.setVerticalGroup( 183 | layout.createSequentialGroup() 184 | .addComponent(self.graphqlLabel) 185 | .addComponent(self.graphqlPane) 186 | .addGroup(layout.createParallelGroup() 187 | .addComponent(self.clearGraphQLBtn) 188 | .addComponent(self.exportGraphQLBtn)) 189 | .addGroup(layout.createParallelGroup() 190 | .addComponent(self.graphqlSearchLabel) 191 | .addComponent(self.graphqlSearchField)) 192 | ) 193 | 194 | def setupSubdomainsTab(self, panel): 195 | self.subdomainsLabel = swing.JLabel("Subdomains:") 196 | self.subdomainsLabel.setFont(Font("Tahoma", Font.BOLD, 14)) 197 | self.subdomainsLabel.setForeground(Color(0, 153, 76)) 198 | 199 | self.subdomainsPane = swing.JScrollPane() 200 | self.subdomainsTxtArea = swing.JTextArea() 201 | self.subdomainsTxtArea.setFont(Font("Consolas", Font.BOLD, 18)) 202 | self.subdomainsTxtArea.setLineWrap(True) 203 | self.subdomainsTxtArea.setEditable(False) 204 | self.subdomainsPane.setViewportView(self.subdomainsTxtArea) 205 | 206 | self.clearSubdomainsBtn = swing.JButton("Clear Subdomains", actionPerformed=self.clearSubdomains) 207 | self.exportSubdomainsBtn = swing.JButton("Export Subdomains", actionPerformed=self.exportSubdomains) 208 | 209 | self.subdomainsSearchLabel = swing.JLabel("Search:") 210 | self.subdomainsSearchField = swing.JTextField(15) 211 | self.subdomainsSearchField.setFont(Font("Consolas", Font.PLAIN, 14)) 212 | self.subdomainsSearchField.setMaximumSize(self.subdomainsSearchField.getPreferredSize()) 213 | self.subdomainsSearchField.getDocument().addDocumentListener(self.SearchListener(self, "subdomains")) 214 | 215 | layout = swing.GroupLayout(panel) 216 | layout.setAutoCreateGaps(True) 217 | layout.setAutoCreateContainerGaps(True) 218 | panel.setLayout(layout) 219 | 220 | layout.setHorizontalGroup( 221 | layout.createParallelGroup() 222 | .addComponent(self.subdomainsLabel) 223 | .addComponent(self.subdomainsPane) 224 | .addGroup(layout.createSequentialGroup() 225 | .addComponent(self.clearSubdomainsBtn) 226 | .addComponent(self.exportSubdomainsBtn)) 227 | .addGroup(layout.createSequentialGroup() 228 | .addComponent(self.subdomainsSearchLabel) 229 | .addComponent(self.subdomainsSearchField)) 230 | ) 231 | 232 | layout.setVerticalGroup( 233 | layout.createSequentialGroup() 234 | .addComponent(self.subdomainsLabel) 235 | .addComponent(self.subdomainsPane) 236 | .addGroup(layout.createParallelGroup() 237 | .addComponent(self.clearSubdomainsBtn) 238 | .addComponent(self.exportSubdomainsBtn)) 239 | .addGroup(layout.createParallelGroup() 240 | .addComponent(self.subdomainsSearchLabel) 241 | .addComponent(self.subdomainsSearchField)) 242 | ) 243 | 244 | def setupURLsTab(self, panel): 245 | self.urlsLabel = swing.JLabel("URLs:") 246 | self.urlsLabel.setFont(Font("Tahoma", Font.BOLD, 14)) 247 | self.urlsLabel.setForeground(Color(153, 0, 153)) 248 | 249 | self.urlsPane = swing.JScrollPane() 250 | self.urlsTxtArea = swing.JTextArea() 251 | self.urlsTxtArea.setFont(Font("Consolas", Font.BOLD, 18)) 252 | self.urlsTxtArea.setLineWrap(True) 253 | self.urlsTxtArea.setEditable(False) 254 | self.urlsPane.setViewportView(self.urlsTxtArea) 255 | 256 | self.clearURLsBtn = swing.JButton("Clear URLs", actionPerformed=self.clearURLs) 257 | self.exportURLsBtn = swing.JButton("Export URLs", actionPerformed=self.exportURLs) 258 | 259 | self.urlsSearchLabel = swing.JLabel("Search:") 260 | self.urlsSearchField = swing.JTextField(15) 261 | self.urlsSearchField.setFont(Font("Consolas", Font.PLAIN, 14)) 262 | self.urlsSearchField.setMaximumSize(self.urlsSearchField.getPreferredSize()) 263 | self.urlsSearchField.getDocument().addDocumentListener(self.SearchListener(self, "urls")) 264 | 265 | layout = swing.GroupLayout(panel) 266 | layout.setAutoCreateGaps(True) 267 | layout.setAutoCreateContainerGaps(True) 268 | panel.setLayout(layout) 269 | 270 | layout.setHorizontalGroup( 271 | layout.createParallelGroup() 272 | .addComponent(self.urlsLabel) 273 | .addComponent(self.urlsPane) 274 | .addGroup(layout.createSequentialGroup() 275 | .addComponent(self.clearURLsBtn) 276 | .addComponent(self.exportURLsBtn)) 277 | .addGroup(layout.createSequentialGroup() 278 | .addComponent(self.urlsSearchLabel) 279 | .addComponent(self.urlsSearchField)) 280 | ) 281 | 282 | layout.setVerticalGroup( 283 | layout.createSequentialGroup() 284 | .addComponent(self.urlsLabel) 285 | .addComponent(self.urlsPane) 286 | .addGroup(layout.createParallelGroup() 287 | .addComponent(self.clearURLsBtn) 288 | .addComponent(self.exportURLsBtn)) 289 | .addGroup(layout.createParallelGroup() 290 | .addComponent(self.urlsSearchLabel) 291 | .addComponent(self.urlsSearchField)) 292 | ) 293 | 294 | 295 | def appendLog(self, text): 296 | self.fullLog.append(text) 297 | self.outputTxtArea.append(text + "\n") 298 | 299 | def appendGraphQL(self, text): 300 | self.graphqlQueries.append(text) 301 | self.graphqlTxtArea.append(text + "\n") 302 | 303 | def appendSubdomain(self, text): 304 | self.subdomains.append(text) 305 | self.subdomainsTxtArea.append(text + "\n") 306 | 307 | def appendURL(self, text): 308 | self.urls.append(text) 309 | self.urlsTxtArea.append(text + "\n") 310 | 311 | 312 | class SearchListener(DocumentListener): 313 | def __init__(self, extender, tab_type): 314 | self.extender = extender 315 | self.tab_type = tab_type 316 | 317 | def insertUpdate(self, e): self.filterContent() 318 | def removeUpdate(self, e): self.filterContent() 319 | def changedUpdate(self, e): self.filterContent() 320 | 321 | def filterContent(self): 322 | if self.tab_type == "main": 323 | query = self.extender.searchField.getText().lower() 324 | self.extender.outputTxtArea.setText("") 325 | for line in self.extender.fullLog: 326 | if query in line.lower(): 327 | self.extender.outputTxtArea.append(line + "\n") 328 | elif self.tab_type == "graphql": 329 | query = self.extender.graphqlSearchField.getText().lower() 330 | self.extender.graphqlTxtArea.setText("") 331 | for line in self.extender.graphqlQueries: 332 | if query in line.lower(): 333 | self.extender.graphqlTxtArea.append(line + "\n") 334 | elif self.tab_type == "subdomains": 335 | query = self.extender.subdomainsSearchField.getText().lower() 336 | self.extender.subdomainsTxtArea.setText("") 337 | for line in self.extender.subdomains: 338 | if query in line.lower(): 339 | self.extender.subdomainsTxtArea.append(line + "\n") 340 | elif self.tab_type == "urls": 341 | query = self.extender.urlsSearchField.getText().lower() 342 | self.extender.urlsTxtArea.setText("") 343 | for line in self.extender.urls: 344 | if query in line.lower(): 345 | self.extender.urlsTxtArea.append(line + "\n") 346 | 347 | def getTabCaption(self): 348 | return "PassiveRecon" 349 | 350 | def getUiComponent(self): 351 | return self.tab 352 | 353 | def toggleInScope(self, event): 354 | self.inScopeOnly = not self.inScopeOnly 355 | self.appendLog("[+] Scan In-Scope URLs Only: " + str(self.inScopeOnly)) 356 | 357 | def clearLog(self, event): 358 | self.outputTxtArea.setText("PassiveRecon loaded.\n") 359 | self.fullLog = ["PassiveRecon loaded."] 360 | self.seenLinks.clear() 361 | 362 | def clearGraphQL(self, event): 363 | self.graphqlTxtArea.setText("") 364 | self.graphqlQueries = [] 365 | 366 | def clearSubdomains(self, event): 367 | self.subdomainsTxtArea.setText("") 368 | self.subdomains = [] 369 | 370 | def clearURLs(self, event): 371 | self.urlsTxtArea.setText("") 372 | self.urls = [] 373 | 374 | 375 | def exportLog(self, event): 376 | chooseFile = JFileChooser() 377 | ret = chooseFile.showDialog(self.logPane, "Choose file") 378 | if ret == JFileChooser.APPROVE_OPTION: 379 | filename = chooseFile.getSelectedFile().getCanonicalPath() 380 | with open(filename, 'w') as f: 381 | f.write("\n".join(self.fullLog)) 382 | 383 | def exportGraphQL(self, event): 384 | chooseFile = JFileChooser() 385 | ret = chooseFile.showDialog(self.graphqlPane, "Choose file") 386 | if ret == JFileChooser.APPROVE_OPTION: 387 | filename = chooseFile.getSelectedFile().getCanonicalPath() 388 | with open(filename, 'w') as f: 389 | f.write("\n".join(self.graphqlQueries)) 390 | 391 | def exportSubdomains(self, event): 392 | chooseFile = JFileChooser() 393 | ret = chooseFile.showDialog(self.subdomainsPane, "Choose file") 394 | if ret == JFileChooser.APPROVE_OPTION: 395 | filename = chooseFile.getSelectedFile().getCanonicalPath() 396 | with open(filename, 'w') as f: 397 | f.write("\n".join(self.subdomains)) 398 | 399 | def exportURLs(self, event): 400 | chooseFile = JFileChooser() 401 | ret = chooseFile.showDialog(self.urlsPane, "Choose file") 402 | if ret == JFileChooser.APPROVE_OPTION: 403 | filename = chooseFile.getSelectedFile().getCanonicalPath() 404 | with open(filename, 'w') as f: 405 | f.write("\n".join(self.urls)) 406 | 407 | 408 | 409 | 410 | def is_text_based(self, ihrr): 411 | try: 412 | # can accept either IHttpRequestResponse or message bytes; handle gracefully 413 | resp = ihrr.getResponse() 414 | if resp is None: 415 | return False 416 | headers = self.helpers.analyzeResponse(resp).getHeaders() 417 | for header in headers: 418 | if header.lower().startswith("content-type:"): 419 | ctype = header.split(":", 1)[1].strip().lower() 420 | return any(ctype.startswith(mt) for mt in TEXT_MIME_TYPES) 421 | except: 422 | pass 423 | return False 424 | 425 | def scanJS(self, ihrr): 426 | try: 427 | if ihrr.getResponse() is None: 428 | return None 429 | linkA = linkAnalyse(ihrr, self.helpers) 430 | return linkA.analyseURL() 431 | except UnicodeEncodeError: 432 | return None 433 | 434 | def doPassiveScan(self, ihrr): 435 | try: 436 | urlReq = ihrr.getUrl() 437 | if self.inScopeOnly and not self.callbacks.isInScope(urlReq): 438 | return None 439 | if not self.is_text_based(ihrr): 440 | return None 441 | if any(x in str(urlReq) for x in JSExclusionList): 442 | return None 443 | if str(urlReq) in self.seenLinks: 444 | return None 445 | self.appendLog("[+] Valid URL found: " + str(urlReq)) 446 | self.seenLinks.add(str(urlReq)) 447 | linkA = linkAnalyse(ihrr, self.helpers) 448 | issueText = linkA.analyseURL() 449 | for counter, issue in enumerate(issueText): 450 | link = issue['link'] 451 | if link not in self.seenLinks: 452 | self.appendLog("\t" + str(counter) + ' - ' + link) 453 | self.seenLinks.add(link) 454 | 455 | # Extract GraphQL queries, subdomains, URLs 456 | self.extractGraphQL(ihrr) 457 | self.extractSubdomains(ihrr) 458 | self.extractURLs(ihrr) 459 | 460 | issues = ArrayList() 461 | issues.add(SRI(ihrr, self.helpers)) 462 | return issues 463 | except UnicodeEncodeError: 464 | return None 465 | except Exception as e: 466 | self.appendLog("[!] Passive scan error: " + str(e)) 467 | return None 468 | 469 | def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): 470 | """ 471 | Live listener for all HTTP messages. 472 | """ 473 | try: 474 | urlReq = messageInfo.getUrl() 475 | 476 | # Respect in-scope filter 477 | if urlReq and self.inScopeOnly and not self.callbacks.isInScope(urlReq): 478 | return 479 | 480 | self.extractGraphQL(messageInfo) 481 | 482 | # For responses only, do additional processing 483 | if not messageIsRequest: 484 | if urlReq and any(x in str(urlReq) for x in JSExclusionList): 485 | return 486 | 487 | if urlReq and str(urlReq) in self.seenLinks: 488 | return 489 | 490 | if urlReq: 491 | self.appendLog("[+] URL found: " + str(urlReq)) 492 | self.seenLinks.add(str(urlReq)) 493 | 494 | issueText = self.scanJS(messageInfo) 495 | if issueText: 496 | for counter, issue in enumerate(issueText): 497 | link = issue['link'] 498 | if link not in self.seenLinks: 499 | self.appendLog("\t" + str(counter) + ' - ' + link) 500 | self.seenLinks.add(link) 501 | 502 | # Additional extraction for responses 503 | self.extractSubdomains(messageInfo) 504 | self.extractURLs(messageInfo) 505 | 506 | except Exception as e: 507 | self.appendLog("[!] Live scan error: " + str(e)) 508 | 509 | def extractValue(self, body, key): 510 | """Helper method to extract values from request body (from your second extension)""" 511 | start = body.find(key) + len(key) + 1 512 | end = body.find("&", start) 513 | if end == -1: 514 | end = len(body) 515 | return body[start:end] 516 | 517 | 518 | def extractGraphQL(self, ihrr): 519 | try: 520 | # Get both request and response 521 | request_info = None 522 | response_body = None 523 | request_body = "" 524 | 525 | # Extract request information (Meta GraphQL parsing) 526 | try: 527 | request_bytes = ihrr.getRequest() 528 | if request_bytes: 529 | request_info = self.helpers.analyzeRequest(request_bytes) 530 | request_headers = request_info.getHeaders() 531 | request_body = self.helpers.bytesToString(request_bytes[request_info.getBodyOffset():]) 532 | 533 | # Meta GraphQL detection from your second extension 534 | meta_graphql_detected = False 535 | meta_graphql_info = "" 536 | 537 | # Check for Facebook-style GraphQL patterns 538 | if "fb_api_req_friendly_name" in request_body: 539 | meta_graphql_detected = True 540 | meta_graphql_info += "Friendly Name: " + self.extractValue(request_body, "fb_api_req_friendly_name") + "\n" 541 | 542 | # Check for variables 543 | if "variables" in request_body: 544 | meta_graphql_detected = True 545 | variables = self.extractValue(request_body, "variables") 546 | try: 547 | decoded_vars = unquote(variables) 548 | pretty_vars = json.dumps(json.loads(decoded_vars), indent=2) 549 | meta_graphql_info += "Variables:\n" + pretty_vars + "\n" 550 | except: 551 | meta_graphql_info += "Variables (raw): " + variables + "\n" 552 | 553 | # Check for doc_id 554 | if "doc_id" in request_body: 555 | meta_graphql_detected = True 556 | meta_graphql_info += "Document ID: " + self.extractValue(request_body, "doc_id") + "\n" 557 | 558 | if meta_graphql_detected: 559 | graphql_entry = "=== GRAPHQL REQUEST ===\n" 560 | graphql_entry += "URL: " + str(ihrr.getUrl()) + "\n" 561 | graphql_entry += meta_graphql_info + "-"*50 + "\n" 562 | 563 | if graphql_entry not in self.graphqlQueries: 564 | self.appendGraphQL(graphql_entry) 565 | 566 | except Exception as e: 567 | self.appendLog("[!] Error in Meta GraphQL request parsing: " + str(e)) 568 | 569 | # Extract response information (traditional GraphQL patterns) 570 | if ihrr.getResponse() is not None: 571 | response_body = self.helpers.bytesToString(ihrr.getResponse()) 572 | 573 | # Traditional GraphQL patterns in response 574 | graphql_query_regex = r"\bquery\s+\w+\s*[\{\(][^,]*\}" 575 | graphql_mutation_regex = r"\bmutation\s+\w+\s*[\{\(][^,]*\}" 576 | graphql_fragment_regex = r"\bfragment\s[^\",]*\}" 577 | 578 | queries = re.findall(graphql_query_regex, response_body, re.IGNORECASE | re.DOTALL) 579 | mutations = re.findall(graphql_mutation_regex, response_body, re.IGNORECASE | re.DOTALL) 580 | fragments = re.findall(graphql_fragment_regex, response_body, re.IGNORECASE | re.DOTALL) 581 | 582 | if queries or mutations or fragments: 583 | graphql_entry = "=== GRAPHQL RESPONSE CONTENT ===\n" 584 | graphql_entry += "URL: " + str(ihrr.getUrl()) + "\n" 585 | 586 | for query in queries: 587 | graphql_entry += "Query found:\n" + query + "\n" + "-"*50 + "\n" 588 | 589 | for mutation in mutations: 590 | graphql_entry += "Mutation found:\n" + mutation + "\n" + "-"*50 + "\n" 591 | 592 | for fragment in fragments: 593 | graphql_entry += "Fragment found:\n" + fragment + "\n" + "-"*50 + "\n" 594 | 595 | if graphql_entry not in self.graphqlQueries: 596 | self.appendGraphQL(graphql_entry) 597 | 598 | except Exception as e: 599 | self.appendLog("[!] Error extracting GraphQL: " + str(e)) 600 | 601 | 602 | 603 | def extractValue(self, body, key): 604 | """Helper method to extract values from request body""" 605 | start = body.find(key) + len(key) + 1 606 | end = body.find("&", start) 607 | if end == -1: 608 | end = len(body) 609 | return body[start:end] 610 | 611 | def extractSubdomains(self, ihrr): 612 | try: 613 | if ihrr.getResponse() is None: 614 | return 615 | body = self.helpers.bytesToString(ihrr.getResponse()) 616 | url = ihrr.getUrl().toString() 617 | domain = URL(url).getHost() 618 | 619 | # Extract subdomains from the response body 620 | subdomain_regex = r"\b(?:[a-zA-Z0-9-]+\.)+(?:com|net|org|io|gov|edu|co|in|us|uk|info|biz|me|online|dev|app|cloud|tech|ai|xyz|pro)(?::\d{1,5})?\b" 621 | subdomains = re.findall(subdomain_regex, body, re.IGNORECASE) 622 | 623 | for subdomain in subdomains: 624 | if subdomain != domain and subdomain not in self.subdomains: 625 | self.appendSubdomain(subdomain) 626 | 627 | except Exception as e: 628 | self.appendLog("[!] Error extracting subdomains: " + str(e)) 629 | 630 | def extractURLs(self, ihrr): 631 | try: 632 | if ihrr.getResponse() is None: 633 | return 634 | body = self.helpers.bytesToString(ihrr.getResponse()) 635 | 636 | # URL regex pattern 637 | url_regex = r"https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+[/\w\.-]*\??[/\w\.-=&%]*" 638 | urls = re.findall(url_regex, body, re.IGNORECASE) 639 | 640 | for url in urls: 641 | if url not in self.urls: 642 | self.appendURL(url) 643 | 644 | except Exception as e: 645 | self.appendLog("[!] Error extracting URLs: " + str(e)) 646 | 647 | 648 | def consolidateDuplicateIssues(self, isb, isa): 649 | return -1 650 | 651 | class linkAnalyse(): 652 | def __init__(self, reqres, helpers): 653 | self.helpers = helpers 654 | self.reqres = reqres 655 | 656 | regex_str = r""" 657 | (?:"|'|`) 658 | ( 659 | ((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,}) 660 | | 661 | ((?:/|\.\./)?[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,}) 662 | | 663 | (?:\${?[a-zA-Z]+}?|/)(?:/\${?[a-zA-Z0-9_]+})*[a-zA-Z0-9_\-/]+(?::[a-zA-Z]+)?(?:/[a-zA-Z0-9_\-/]*)*(?:\.(?:[a-zA-Z]{1,4}|action))? 664 | | 665 | ([a-zA-Z0-9_\-/]+/[a-zA-Z0-9_\-/]+\.(?:[a-zA-Z]{1,4}|action)(?:[\?|/][^"|']{0,}|)) 666 | | 667 | ([a-zA-Z0-9_\-]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:\?[^"|']{0,}|)) 668 | ) 669 | (?:"|'|`) 670 | """ 671 | 672 | def analyseURL(self): 673 | issueLinks = [] 674 | try: 675 | if self.reqres.getResponse() is None: 676 | return issueLinks 677 | body = self.helpers.bytesToString(self.reqres.getResponse()) 678 | links = re.findall(self.regex_str, body, re.VERBOSE) 679 | for link in links: 680 | link_str = link[0] if isinstance(link, tuple) else link 681 | link_str = link_str.strip() 682 | 683 | if ( 684 | '/' in link_str 685 | and ' ' not in link_str 686 | and ',' not in link_str 687 | and not link_str.startswith('./') 688 | and not link_str.startswith('}') 689 | and not link_str.startswith('=') 690 | and not link_str.startswith('@') 691 | and not link_str.startswith('../') 692 | and self.is_valid_link(link_str) 693 | # 👇 extra filter for unwanted MIME types 694 | and not link_str.startswith(( 695 | "application", 696 | "text", 697 | "image", 698 | "svg", 699 | "xml", 700 | "css", 701 | "Asia", 702 | "Europe", 703 | "Pacific", 704 | "America", 705 | "video", 706 | "audio", 707 | 708 | )) 709 | ): 710 | issueLinks.append({'link': link_str}) 711 | return issueLinks 712 | except: 713 | return None 714 | 715 | 716 | def is_valid_link(self, link): 717 | if re.search(r'[^\x20-\x7E]', link): 718 | return False 719 | if len(link) > 2048: 720 | return False 721 | return True 722 | 723 | class SRI(IScanIssue): 724 | def __init__(self, reqres, helpers): 725 | self.reqres = reqres 726 | self.helpers = helpers 727 | 728 | def getIssueType(self): 729 | return "JavaScript Link Analysis" 730 | def getSeverity(self): 731 | return "Informational" 732 | def getConfidence(self): 733 | return "Certain" 734 | def getIssueDetail(self): 735 | return "This text-based file has been analyzed." 736 | def getRemediationDetail(self): 737 | return "No remediation required." 738 | def getHttpMessages(self): 739 | return [self.reqres] 740 | def getIssueName(self): 741 | return "Text-based URL Found" 742 | def getUrl(self): 743 | return self.reqres.getUrl() 744 | 745 | class GraphQLRequestTab(IMessageEditorTab): 746 | def __init__(self, extender, controller, editable): 747 | self._extender = extender 748 | self._helpers = extender.helpers 749 | self._editable = editable 750 | self._modified = False # Track modification state 751 | 752 | # Use Burp Suite's built-in message editor 753 | self._editor = extender.callbacks.createMessageEditor(controller, editable) 754 | 755 | # Create panel UI 756 | self._panel = JPanel() 757 | self._panel.setLayout(BoxLayout(self._panel, BoxLayout.Y_AXIS)) 758 | self._scanButton = JButton("Scan", actionPerformed=self.scanRequest) 759 | self._panel.add(self._editor.getComponent()) 760 | self._panel.add(self._scanButton) 761 | 762 | self._currentMessage = None 763 | self._currentRequestInfo = None 764 | self._responseTab = None 765 | self._tabbedPane = None 766 | 767 | def getTabCaption(self): 768 | return "Meta GraphQL" 769 | 770 | def getUiComponent(self): 771 | return self._panel 772 | 773 | def isEnabled(self, content, isRequest): 774 | if isRequest: 775 | try: 776 | request_info = self._helpers.analyzeRequest(content) 777 | headers = request_info.getHeaders() 778 | body = content[request_info.getBodyOffset():].tostring() 779 | return "graphql" in headers[0].lower() and ( 780 | "fb_api_req_friendly_name" in body 781 | or "variables" in body 782 | or "doc_id" in body 783 | ) 784 | except: 785 | return False 786 | return False 787 | 788 | def setMessage(self, content, isRequest): 789 | self._modified = False # Reset modified state when new message is set 790 | if content is None: 791 | self._editor.setMessage(None, isRequest) 792 | else: 793 | if isRequest: 794 | self._currentMessage = content 795 | self._currentRequestInfo = self._helpers.analyzeRequest(content) 796 | body = content[self._currentRequestInfo.getBodyOffset():].tostring() 797 | parsed_body = self.parseGraphQLBody(body) 798 | self._editor.setMessage(self._helpers.stringToBytes(parsed_body), isRequest) 799 | else: 800 | self._editor.setMessage(None, isRequest) 801 | 802 | # NEW: Required method - check if content has been modified 803 | def getMessage(self): 804 | if self._editor.isMessageModified(): 805 | return self._editor.getMessage() 806 | return self._currentMessage 807 | 808 | def isModified(self): 809 | return self._editor.isMessageModified() 810 | 811 | def getSelectedData(self): 812 | return self._editor.getSelectedData() 813 | 814 | 815 | def parseGraphQLBody(self, body): 816 | parsed_output = [] 817 | 818 | if "fb_api_req_friendly_name" in body: 819 | parsed_output.append("fb_api_req_friendly_name: " + 820 | self.extractValue(body, "fb_api_req_friendly_name")) 821 | 822 | if "variables" in body: 823 | variables = self.extractValue(body, "variables") 824 | decoded_variables = unquote(variables) 825 | try: 826 | pretty_json = json.dumps(json.loads(decoded_variables), indent=4, ensure_ascii=False) 827 | parsed_output.append("variables:\n" + pretty_json) 828 | except: 829 | parsed_output.append("variables (raw): " + decoded_variables) 830 | 831 | if "doc_id" in body: 832 | parsed_output.append("doc_id: " + self.extractValue(body, "doc_id")) 833 | 834 | return "\n".join(parsed_output) 835 | 836 | def extractValue(self, body, key): 837 | start = body.find(key) + len(key) + 1 838 | end = body.find("&", start) 839 | if end == -1: 840 | end = len(body) 841 | return body[start:end] 842 | 843 | def scanRequest(self, event): 844 | if self._currentMessage is not None and self._currentRequestInfo is not None: 845 | try: 846 | modified_body = self._editor.getMessage() 847 | headers = self._currentRequestInfo.getHeaders() 848 | new_request = self._helpers.buildHttpMessage(headers, modified_body) 849 | http_service = self._currentMessage.getHttpService() 850 | response = self._extender.callbacks.makeHttpRequest(http_service, new_request) 851 | self.displayResponse(response) 852 | except: 853 | pass 854 | 855 | def displayResponse(self, response): 856 | if self._responseTab is None: 857 | self._responseTab = self._extender.callbacks.createMessageEditor(None, False) 858 | self._responsePanel = JPanel() 859 | self._responsePanel.setLayout(BoxLayout(self._responsePanel, BoxLayout.Y_AXIS)) 860 | self._responsePanel.add(self._responseTab.getComponent()) 861 | self._tabbedPane = JTabbedPane() 862 | self._tabbedPane.addTab("Meta GraphQL", self._panel) 863 | self._tabbedPane.addTab("GraphQL Response", self._responsePanel) 864 | self._panel.getParent().add(self._tabbedPane) 865 | self._panel.getParent().revalidate() 866 | 867 | self._responseTab.setMessage(response.getResponse(), False) 868 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Passive Recon Burp Extension 2 | 3 | Passive Recon is a **Burp Suite extension** that performs **all-in-one passive reconnaissance** while you browse or test a target. 4 | It automatically detects and collects **endpoints**, **subdomains**, **GraphQL queries (including meta-GraphQL)**, and **URLs** from every request/response. 5 | 6 | ![p (1)](https://github.com/user-attachments/assets/a8b00ecb-a3c3-4ec9-8a21-381308c00793) 7 | 8 | --- 9 | 10 | ## ✨ Features 11 | - **GraphQL Detection** 12 | - Parses GraphQL requests and responses 13 | - Supports normal queries, mutations, fragments 14 | - Detects **meta-GraphQL** style requests often missed by other tools 15 | 16 | - **Subdomain Collection** 17 | - Extracts subdomains passively from traffic 18 | - Displays unique findings in a dedicated tab 19 | 20 | - **Endpoint & URL Extraction** 21 | - Collects parameters, API endpoints, and in-scope URLs 22 | - Deduplicated and shown in clean lists 23 | - Helps build a quick map of the attack surface 24 | 25 | - **Burp UI Integration** 26 | - Four tabs inside Burp: **GraphQL**, **Subdomains**, **Endpoints**, **URLs** 27 | - Easy copy/paste for recon workflows 28 | 29 | --- 30 | 31 | ## 🔗 Works Great with Wayback Recon 32 | Use together with [Wayback Recon](https://github.com/aditisingh2707/Wayback-Recon) for maximum coverage: 33 | 1. Run **Wayback Recon** to fetch archived URLs from Wayback Machine 34 | 2. Send those URLs to Burp’s sitemap 35 | 3. **Passive Recon** will automatically scan them for: 36 | - Subdomains 37 | - Endpoints 38 | - GraphQL queries (including meta-GraphQL) 39 | - URLs 40 | 41 | --- 42 | 43 | ## ⚠️ Notes 44 | - Some large responses may slow down parsing 45 | - May produce **false positives or noisy results**, depending on the target and response contents 46 | 47 | --- 48 | --------------------------------------------------------------------------------