From 8278ee998121ee9014b749b0ebcd6ea98f29c87f Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 6 Mar 2025 15:39:45 -0500 Subject: [PATCH 01/26] domxss initial --- bbot/modules/domxss.py | 183 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 bbot/modules/domxss.py diff --git a/bbot/modules/domxss.py b/bbot/modules/domxss.py new file mode 100644 index 0000000000..b71bcb1fc5 --- /dev/null +++ b/bbot/modules/domxss.py @@ -0,0 +1,183 @@ +from bbot.modules.base import BaseModule +from webcap.browser import Browser +from webcap import defaults +import tempfile +import os +import uuid +import json +import csv +import shutil + + +class domxss(BaseModule): + watched_events = ["URL"] + produced_events = ["HTTP_RESPONSE_DOM"] + flags = ["active"] + meta = { + "description": "experimental dom xss module using webcap", + "created_date": "2025-03-05", + "author": "@liquidsec", + } + deps_pip = ["numpy", "webcap"] + + deps_ansible = [ + { + "name": "Create codeql directory", + "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, + }, + { + "name": "Create databases directory", + "file": {"path": "#{BBOT_TOOLS}/codeql/databases", "state": "directory", "mode": "0755"}, + }, + { + "name": "Download codeql", + "unarchive": { + "src": "https://github.com/github/codeql-cli-binaries/releases/download/v2.20.6/codeql-linux64.zip", + "dest": "#{BBOT_TOOLS}/", + "remote_src": True, + }, + }, + { + "name": "Make codeql executable", + "file": { + "path": "#{BBOT_TOOLS}/codeql/codeql", + "mode": "u+x,g+x,o+x" + } + }, + { + "name": "Install JavaScript query pack", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries", + } + ] + + async def execute_codeql_create_db(self, source_root, database_path): + command = [ + f"{self.scan.helpers.tools_dir}/codeql/codeql", + "database", "create", database_path, + "--language=javascript", + f"--source-root={source_root}" + ] + async for line in self.run_process_live(command): + self.hugeinfo(line) + + async def execute_codeql_analyze_db(self, database_path): + # Create a temporary file for the output + with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as temp_file: + output_path = temp_file.name + + command = [ + f"{self.scan.helpers.tools_dir}/codeql/codeql", + "database", "analyze", database_path, + "codeql/javascript-queries@1.5.0", + "--format=csv", + f"--output={output_path}" + ] + + # Run the command and capture the output + async for line in self.run_process_live(command): + self.hugeinfo(line) + + # Read the contents of the temporary file + with open(output_path, "r") as file: + analysis_results = file.readlines() + + # Initialize an empty list for JSON results + json_results = [] + + # Parse CSV and convert to JSON + csv_reader = csv.reader(analysis_results) + for row in csv_reader: + json_results.append({ + "type": row[0], + "description": row[1], + "severity": row[2], + "details": row[3], + "file": row[4], + "start_line": row[5], + "start_column": row[6], + "end_line": row[7], + "end_column": row[8] + }) + + # Log or process the JSON results + self.critical(f"Analysis results:\n{json.dumps(json_results, indent=2)}") + + # Clean up the temporary file + os.remove(output_path) + + # Return the JSON results + return json_results + + async def handle_event(self, event): + self.critical(event) + + # Create a temporary directory + with tempfile.TemporaryDirectory() as temp_dir: + # Uncomment and modify the following lines to gather DOM and JS files + b = Browser( + threads=defaults.threads, + resolution=defaults.resolution, + user_agent=defaults.user_agent, + proxy=None, + delay=3, + full_page=False, + dom=True, + javascript=True, + requests=False, + responses=False, + base64=False, + ocr=False, + ) + await b.start() + async for url, webscreenshot in b.screenshot_urls([event.data]): + dom = webscreenshot.dom + dom_file_path = os.path.join(temp_dir, "dom.html") + with open(dom_file_path, "w") as dom_file: + dom_file.write(dom) + + self.critical(f"DOM file: {dom_file_path} written") + scripts = webscreenshot.scripts + for i, js in enumerate(scripts): + loaded_js = js.json["script"] + js_file_path = os.path.join(temp_dir, f"script_{i}.js") + with open(js_file_path, "w") as js_file: + js_file.write(loaded_js) + + self.critical(f"JS file: {js_file_path} written") + + # Generate a unique GUID for the database + guid = str(uuid.uuid4()) + database_path = os.path.join(f"{self.helpers.tools_dir}/codeql/databases", guid) + self.critical(f"Database path: {database_path}") + self.critical("executing codeql to create db") + # Run the execute_codeql_create_db method with the temp directory + await self.execute_codeql_create_db(temp_dir, database_path) + + # Call the execute_codeql_analyze_db method + self.critical("executing codeql to analyze db") + results = await self.execute_codeql_analyze_db(database_path) + for result in results: + self.critical(result) + + details_string = f"Type: {result['type']} Description: {result['description']} Details: {result['details']}" + + data = { + "description": f"POSSIBLE Client-side Vulnerability. {details_string}", + "host": str(event.host) + } + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}', + ) + + # Clean up the database directory + shutil.rmtree(database_path) + self.critical(f"Cleaned up database directory: {database_path}") + +# matches = list(set(await self.yara_helper.match(self.compiled_rules, loaded_js))) +# if matches: +# self.critical(f'Matches found (script): {matches}') + +# self.critical(f'Matches found (dom): {matches}') From 7a451b38e127c7c108bed0d1a45cdfebeecc7232 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 7 Mar 2025 11:33:27 -0500 Subject: [PATCH 02/26] working prototype --- bbot/modules/codeql.py | 307 ++++++++++++++++++++++++++++ bbot/modules/domxss.py | 183 ----------------- bbot/presets/web/codeql-intense.yml | 11 + bbot/presets/web/codeql.yml | 12 ++ 4 files changed, 330 insertions(+), 183 deletions(-) create mode 100644 bbot/modules/codeql.py delete mode 100644 bbot/modules/domxss.py create mode 100644 bbot/presets/web/codeql-intense.yml create mode 100644 bbot/presets/web/codeql.yml diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py new file mode 100644 index 0000000000..e8d61255e7 --- /dev/null +++ b/bbot/modules/codeql.py @@ -0,0 +1,307 @@ +from bbot.modules.base import BaseModule +from webcap.browser import Browser +from webcap import defaults +import tempfile +import os +import uuid +import json +import csv +import shutil + + +class codeql(BaseModule): + watched_events = ["URL"] + produced_events = ["HTTP_RESPONSE_DOM"] + flags = ["active"] + meta = { + "description": "experimental dom xss module using webcap", + "created_date": "2025-03-05", + "author": "@liquidsec", + } + deps_pip = ["numpy", "webcap"] + + options = { + "ignore_scope": False, + "min_severity": "error" + } + options_desc = { + "ignore_scope": "Ignore scope and process all scripts", + "min_severity": "Minimum severity level to report (error, warning, recommendation, note)" + } + + deps_ansible = [ + { + "name": "Create codeql directory", + "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, + }, + { + "name": "Create databases directory", + "file": {"path": "#{BBOT_TOOLS}/codeql/databases", "state": "directory", "mode": "0755"}, + }, + { + "name": "Download codeql", + "unarchive": { + "src": "https://github.com/github/codeql-cli-binaries/releases/download/v2.20.6/codeql-linux64.zip", + "dest": "#{BBOT_TOOLS}/", + "remote_src": True, + }, + }, + { + "name": "Make codeql executable", + "file": { + "path": "#{BBOT_TOOLS}/codeql/codeql", + "mode": "u+x,g+x,o+x" + } + }, + { + "name": "Install JavaScript query pack", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries", + } + ] + async def setup(self): + self.ignore_scope = self.config.get("ignore_scope", False) + self.severity_levels = { + "error": 4, + "warning": 3, + "recommendation": 2, + "note": 1 + } + + self.min_severity = self.config.get("min_severity", "error").lower() + if self.min_severity not in self.severity_levels: + return False, f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}" + + # Clean up any stale database files + database_dir = os.path.join(self.scan.helpers.tools_dir, "codeql", "databases") + if os.path.exists(database_dir): + for item in os.listdir(database_dir): + item_path = os.path.join(database_dir, item) + if os.path.isfile(item_path): + os.unlink(item_path) + elif os.path.isdir(item_path): + shutil.rmtree(item_path) + self.debug(f"Cleaned up stale CodeQL databases in {database_dir}") + + return True + + async def execute_codeql_create_db(self, source_root, database_path): + command = [ + f"{self.scan.helpers.tools_dir}/codeql/codeql", + "database", "create", database_path, + "--language=javascript", + f"--source-root={source_root}" + ] + self.verbose(f"Executing CodeQL command to create db") + async for line in self.run_process_live(command): + pass + + async def execute_codeql_analyze_db(self, database_path): + # Create a temporary file for the output + with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as temp_file: + output_path = temp_file.name + + command = [ + f"{self.scan.helpers.tools_dir}/codeql/codeql", + "database", "analyze", database_path, + "--format=csv", + "codeql/javascript-queries:Security/CWE-079/ExceptionXss.ql", + "codeql/javascript-queries:Security/CWE-079/XssThroughDom.ql", + "codeql/javascript-queries:Security/CWE-079/StoredXss.ql", + "codeql/javascript-queries:Security/CWE-079/UnsafeJQueryPlugin.ql", + # "codeql/javascript-queries:Security/CWE-079/UnsafeHtmlConstruction.ql", + "codeql/javascript-queries:Security/CWE-079/Xss.ql", + "codeql/javascript-queries:Security/CWE-079/ReflectedXss.ql", + "codeql/javascript-queries:Security/CWE-601/ClientSideUrlRedirect.ql", + "codeql/javascript-queries:Security/CWE-201/PostMessageStar.ql", + "codeql/javascript-queries:Security/CWE-094/CodeInjection.ql", + "codeql/javascript-queries:Security/CWE-094/ExpressionInjection.ql", + "codeql/javascript-queries:AngularJS/InsecureUrlWhitelist.ql", + "codeql/javascript-queries:AngularJS/DisablingSce.ql", + f"--output={output_path}" + ] + + self.verbose(f"Executing CodeQL command to analyze db") + + # Run the command and capture the output + async for line in self.run_process_live(command): + self.hugeinfo(line) + + # Read and parse the CSV results + results = [] + with open(output_path, "r") as file: + csv_reader = csv.reader(file) + for row in csv_reader: + if len(row) >= 9: # Ensure we have all expected fields + results.append({ + "title": row[0], + "full_description": row[1], + "severity": row[2], + "message": row[3], + "file": row[4], + "start_line": int(row[5]) if row[5].isdigit() else "N/A", + "start_column": int(row[6]) if row[6].isdigit() else "N/A", + "end_line": int(row[7]) if row[7].isdigit() else "N/A", + "end_column": int(row[8]) if row[8].isdigit() else "N/A" + }) + + # Clean up the temporary file + os.remove(output_path) + + return results + + async def handle_event(self, event): + findings = set() # Track unique findings + + # Create a temporary directory + with tempfile.TemporaryDirectory() as temp_dir: + # Initialize script_urls dictionary + script_urls = {} + + b = Browser( + threads=defaults.threads, + resolution=defaults.resolution, + user_agent=defaults.user_agent, + proxy=None, + delay=3, + full_page=False, + dom=True, + javascript=True, + requests=False, + responses=False, + base64=False, + ocr=False, + ) + await b.start() + async for url, webscreenshot in b.screenshot_urls([event.data]): + dom = webscreenshot.dom + dom_file_path = os.path.join(temp_dir, "dom.html") + with open(dom_file_path, "w") as dom_file: + dom_file.write(dom) + + self.debug(f"DOM file: {dom_file_path} written to temp directory") + scripts = webscreenshot.scripts + for i, js in enumerate(scripts): + script_url = js.json.get("url", "unknown_url") + + # Skip out-of-scope scripts if configured + if not self.ignore_scope: + try: + parsed_url = self.helpers.urlparse(script_url) + script_domain = parsed_url.netloc + if not self.scan.in_scope(script_domain): + self.debug(f"Skipping out-of-scope script: {script_url}") + continue + except Exception as e: + self.debug(f"Error parsing script URL {script_url}: {e}") + continue + + loaded_js = js.json["script"] + script_urls[i] = script_url + js_file_path = os.path.join(temp_dir, f"script_{i}.js") + with open(js_file_path, "w") as js_file: + js_file.write(loaded_js) + self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + + # Generate a unique GUID for the database + guid = str(uuid.uuid4()) + database_path = os.path.join(f"{self.helpers.tools_dir}/codeql/databases", guid) + self.debug(f"Writing database to {database_path}") + # Run the execute_codeql_create_db method with the temp directory + await self.execute_codeql_create_db(temp_dir, database_path) + + # Call the execute_codeql_analyze_db method + results = await self.execute_codeql_analyze_db(database_path) + + # Post-process results and extract code + for result in results: + # Extract relevant code portion + file_path = os.path.join(temp_dir, result['file'].lstrip('/')) + with open(file_path, 'r') as f: + lines = f.readlines() + + # Attempt to extract code snippet if line numbers are valid + start_line = result.get('start_line') + start_column = result.get('start_column') + end_column = result.get('end_column') + + code_snippet = None + if isinstance(start_line, int): + start_line -= 1 # Adjust for zero-based index + # Get the full line and sanitize for console output + full_line = lines[start_line].strip().encode('ascii', 'replace').decode() + + # If line is under 150 chars, use the whole line + if len(full_line) <= 150: + code_snippet = full_line + # Otherwise use the column positions + elif all(isinstance(x, int) for x in [start_column, end_column]): + start_column -= 1 # Adjust for zero-based index + code_snippet = full_line[start_column:end_column] + else: + # If we can't use columns, truncate with ellipsis + code_snippet = full_line[:147] + "..." + + self.debug(f"Extracted code snippet (line {start_line + 1}):\n{code_snippet}") + else: + self.debug(f"Could not extract code snippet due to invalid line numbers: {result}") + + # Skip results that don't meet severity threshold + if not self.severity_threshold(result['severity']): + continue + + # Format the location string based on the file name + file_name = result['file'].lstrip('/') + if file_name.startswith('script_'): + script_num = int(file_name.split('_')[1].split('.')[0]) + location = script_urls.get(script_num, "unknown_url") + elif file_name == 'dom.html': + location = f"{event.data} (DOM)" + else: + location = file_name + + # Add line and column information + location_details = f"Line: {start_line + 1}" + if isinstance(start_column, int) and isinstance(end_column, int): + location_details += f" Cols: {start_column}-{end_column}" + + # Prepare details string with all the information + details_string = f"{result['title']}. Description: [{result['full_description']}] Severity: [{result['severity']}] Location: [{location} ({location_details})] Code Snippet: [{code_snippet}]" + + # Create a hash of the finding + finding_hash = hash(( + result['title'], + result['full_description'], + result['severity'], + code_snippet + )) + + if finding_hash in findings: + self.debug(f"Skipping duplicate finding: {result['title']} with code snippet: {code_snippet}") + continue + + findings.add(finding_hash) + + # Prepare data for the event + data = { + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "host": str(event.host) + } + + # Emit event with the extracted information + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}', + ) + + # Clean up the database directory + shutil.rmtree(database_path) + self.debug(f"Cleaned up database directory: {database_path}") + + + def severity_threshold(self, severity): + severity = severity.lower() + min_level = self.severity_levels.get(self.min_severity, 4) # Default to error if invalid + current_level = self.severity_levels.get(severity, 0) # Default to 0 if unknown severity + return current_level >= min_level diff --git a/bbot/modules/domxss.py b/bbot/modules/domxss.py deleted file mode 100644 index b71bcb1fc5..0000000000 --- a/bbot/modules/domxss.py +++ /dev/null @@ -1,183 +0,0 @@ -from bbot.modules.base import BaseModule -from webcap.browser import Browser -from webcap import defaults -import tempfile -import os -import uuid -import json -import csv -import shutil - - -class domxss(BaseModule): - watched_events = ["URL"] - produced_events = ["HTTP_RESPONSE_DOM"] - flags = ["active"] - meta = { - "description": "experimental dom xss module using webcap", - "created_date": "2025-03-05", - "author": "@liquidsec", - } - deps_pip = ["numpy", "webcap"] - - deps_ansible = [ - { - "name": "Create codeql directory", - "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, - }, - { - "name": "Create databases directory", - "file": {"path": "#{BBOT_TOOLS}/codeql/databases", "state": "directory", "mode": "0755"}, - }, - { - "name": "Download codeql", - "unarchive": { - "src": "https://github.com/github/codeql-cli-binaries/releases/download/v2.20.6/codeql-linux64.zip", - "dest": "#{BBOT_TOOLS}/", - "remote_src": True, - }, - }, - { - "name": "Make codeql executable", - "file": { - "path": "#{BBOT_TOOLS}/codeql/codeql", - "mode": "u+x,g+x,o+x" - } - }, - { - "name": "Install JavaScript query pack", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries", - } - ] - - async def execute_codeql_create_db(self, source_root, database_path): - command = [ - f"{self.scan.helpers.tools_dir}/codeql/codeql", - "database", "create", database_path, - "--language=javascript", - f"--source-root={source_root}" - ] - async for line in self.run_process_live(command): - self.hugeinfo(line) - - async def execute_codeql_analyze_db(self, database_path): - # Create a temporary file for the output - with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as temp_file: - output_path = temp_file.name - - command = [ - f"{self.scan.helpers.tools_dir}/codeql/codeql", - "database", "analyze", database_path, - "codeql/javascript-queries@1.5.0", - "--format=csv", - f"--output={output_path}" - ] - - # Run the command and capture the output - async for line in self.run_process_live(command): - self.hugeinfo(line) - - # Read the contents of the temporary file - with open(output_path, "r") as file: - analysis_results = file.readlines() - - # Initialize an empty list for JSON results - json_results = [] - - # Parse CSV and convert to JSON - csv_reader = csv.reader(analysis_results) - for row in csv_reader: - json_results.append({ - "type": row[0], - "description": row[1], - "severity": row[2], - "details": row[3], - "file": row[4], - "start_line": row[5], - "start_column": row[6], - "end_line": row[7], - "end_column": row[8] - }) - - # Log or process the JSON results - self.critical(f"Analysis results:\n{json.dumps(json_results, indent=2)}") - - # Clean up the temporary file - os.remove(output_path) - - # Return the JSON results - return json_results - - async def handle_event(self, event): - self.critical(event) - - # Create a temporary directory - with tempfile.TemporaryDirectory() as temp_dir: - # Uncomment and modify the following lines to gather DOM and JS files - b = Browser( - threads=defaults.threads, - resolution=defaults.resolution, - user_agent=defaults.user_agent, - proxy=None, - delay=3, - full_page=False, - dom=True, - javascript=True, - requests=False, - responses=False, - base64=False, - ocr=False, - ) - await b.start() - async for url, webscreenshot in b.screenshot_urls([event.data]): - dom = webscreenshot.dom - dom_file_path = os.path.join(temp_dir, "dom.html") - with open(dom_file_path, "w") as dom_file: - dom_file.write(dom) - - self.critical(f"DOM file: {dom_file_path} written") - scripts = webscreenshot.scripts - for i, js in enumerate(scripts): - loaded_js = js.json["script"] - js_file_path = os.path.join(temp_dir, f"script_{i}.js") - with open(js_file_path, "w") as js_file: - js_file.write(loaded_js) - - self.critical(f"JS file: {js_file_path} written") - - # Generate a unique GUID for the database - guid = str(uuid.uuid4()) - database_path = os.path.join(f"{self.helpers.tools_dir}/codeql/databases", guid) - self.critical(f"Database path: {database_path}") - self.critical("executing codeql to create db") - # Run the execute_codeql_create_db method with the temp directory - await self.execute_codeql_create_db(temp_dir, database_path) - - # Call the execute_codeql_analyze_db method - self.critical("executing codeql to analyze db") - results = await self.execute_codeql_analyze_db(database_path) - for result in results: - self.critical(result) - - details_string = f"Type: {result['type']} Description: {result['description']} Details: {result['details']}" - - data = { - "description": f"POSSIBLE Client-side Vulnerability. {details_string}", - "host": str(event.host) - } - await self.emit_event( - data, - "FINDING", - event, - context=f'{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}', - ) - - # Clean up the database directory - shutil.rmtree(database_path) - self.critical(f"Cleaned up database directory: {database_path}") - -# matches = list(set(await self.yara_helper.match(self.compiled_rules, loaded_js))) -# if matches: -# self.critical(f'Matches found (script): {matches}') - -# self.critical(f'Matches found (dom): {matches}') diff --git a/bbot/presets/web/codeql-intense.yml b/bbot/presets/web/codeql-intense.yml new file mode 100644 index 0000000000..d6d2c2701a --- /dev/null +++ b/bbot/presets/web/codeql-intense.yml @@ -0,0 +1,11 @@ +description: Discover client-side web vulnerabilities using CodeQL. Limit to "error" and "warning" level findings, and include out of scope JS files. + + +include: + - codeql + +config: + modules: + codeql: + min_severity: "warning" + include_out_of_scope: True \ No newline at end of file diff --git a/bbot/presets/web/codeql.yml b/bbot/presets/web/codeql.yml new file mode 100644 index 0000000000..27ab7787fe --- /dev/null +++ b/bbot/presets/web/codeql.yml @@ -0,0 +1,12 @@ +description: Discover client-side web vulnerabilities using CodeQL. Limit to "error" level findings, and skip out of scope JS files. + +modules: + - httpx + - portfilter + - codeql + +config: + url_querystring_remove: False + modules: + excavate: + retain_querystring: True \ No newline at end of file From d582514265014dc1e2546d65cb7ed41f18ba0885 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 7 Mar 2025 14:11:22 -0500 Subject: [PATCH 03/26] ruff format --- bbot/modules/codeql.py | 130 ++++++++++++++++++++--------------------- 1 file changed, 62 insertions(+), 68 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index e8d61255e7..d8f8925bcd 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -4,7 +4,6 @@ import tempfile import os import uuid -import json import csv import shutil @@ -19,14 +18,11 @@ class codeql(BaseModule): "author": "@liquidsec", } deps_pip = ["numpy", "webcap"] - - options = { - "ignore_scope": False, - "min_severity": "error" - } + + options = {"ignore_scope": False, "min_severity": "error"} options_desc = { "ignore_scope": "Ignore scope and process all scripts", - "min_severity": "Minimum severity level to report (error, warning, recommendation, note)" + "min_severity": "Minimum severity level to report (error, warning, recommendation, note)", } deps_ansible = [ @@ -46,31 +42,27 @@ class codeql(BaseModule): "remote_src": True, }, }, - { - "name": "Make codeql executable", - "file": { - "path": "#{BBOT_TOOLS}/codeql/codeql", - "mode": "u+x,g+x,o+x" - } - }, + {"name": "Make codeql executable", "file": {"path": "#{BBOT_TOOLS}/codeql/codeql", "mode": "u+x,g+x,o+x"}}, { "name": "Install JavaScript query pack", "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries", - } + }, ] + + in_scope_only = True + _module_threads = 4 + async def setup(self): self.ignore_scope = self.config.get("ignore_scope", False) - self.severity_levels = { - "error": 4, - "warning": 3, - "recommendation": 2, - "note": 1 - } - + self.severity_levels = {"error": 4, "warning": 3, "recommendation": 2, "note": 1} + self.min_severity = self.config.get("min_severity", "error").lower() if self.min_severity not in self.severity_levels: - return False, f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}" - + return ( + False, + f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}", + ) + # Clean up any stale database files database_dir = os.path.join(self.scan.helpers.tools_dir, "codeql", "databases") if os.path.exists(database_dir): @@ -81,15 +73,17 @@ async def setup(self): elif os.path.isdir(item_path): shutil.rmtree(item_path) self.debug(f"Cleaned up stale CodeQL databases in {database_dir}") - + return True async def execute_codeql_create_db(self, source_root, database_path): command = [ f"{self.scan.helpers.tools_dir}/codeql/codeql", - "database", "create", database_path, + "database", + "create", + database_path, "--language=javascript", - f"--source-root={source_root}" + f"--source-root={source_root}", ] self.verbose(f"Executing CodeQL command to create db") async for line in self.run_process_live(command): @@ -102,13 +96,15 @@ async def execute_codeql_analyze_db(self, database_path): command = [ f"{self.scan.helpers.tools_dir}/codeql/codeql", - "database", "analyze", database_path, + "database", + "analyze", + database_path, "--format=csv", "codeql/javascript-queries:Security/CWE-079/ExceptionXss.ql", "codeql/javascript-queries:Security/CWE-079/XssThroughDom.ql", "codeql/javascript-queries:Security/CWE-079/StoredXss.ql", "codeql/javascript-queries:Security/CWE-079/UnsafeJQueryPlugin.ql", - # "codeql/javascript-queries:Security/CWE-079/UnsafeHtmlConstruction.ql", + "codeql/javascript-queries:Security/CWE-079/UnsafeHtmlConstruction.ql", "codeql/javascript-queries:Security/CWE-079/Xss.ql", "codeql/javascript-queries:Security/CWE-079/ReflectedXss.ql", "codeql/javascript-queries:Security/CWE-601/ClientSideUrlRedirect.ql", @@ -117,7 +113,7 @@ async def execute_codeql_analyze_db(self, database_path): "codeql/javascript-queries:Security/CWE-094/ExpressionInjection.ql", "codeql/javascript-queries:AngularJS/InsecureUrlWhitelist.ql", "codeql/javascript-queries:AngularJS/DisablingSce.ql", - f"--output={output_path}" + f"--output={output_path}", ] self.verbose(f"Executing CodeQL command to analyze db") @@ -132,17 +128,19 @@ async def execute_codeql_analyze_db(self, database_path): csv_reader = csv.reader(file) for row in csv_reader: if len(row) >= 9: # Ensure we have all expected fields - results.append({ - "title": row[0], - "full_description": row[1], - "severity": row[2], - "message": row[3], - "file": row[4], - "start_line": int(row[5]) if row[5].isdigit() else "N/A", - "start_column": int(row[6]) if row[6].isdigit() else "N/A", - "end_line": int(row[7]) if row[7].isdigit() else "N/A", - "end_column": int(row[8]) if row[8].isdigit() else "N/A" - }) + results.append( + { + "title": row[0], + "full_description": row[1], + "severity": row[2], + "message": row[3], + "file": row[4], + "start_line": int(row[5]) if row[5].isdigit() else "N/A", + "start_column": int(row[6]) if row[6].isdigit() else "N/A", + "end_line": int(row[7]) if row[7].isdigit() else "N/A", + "end_column": int(row[8]) if row[8].isdigit() else "N/A", + } + ) # Clean up the temporary file os.remove(output_path) @@ -151,12 +149,12 @@ async def execute_codeql_analyze_db(self, database_path): async def handle_event(self, event): findings = set() # Track unique findings - + # Create a temporary directory with tempfile.TemporaryDirectory() as temp_dir: # Initialize script_urls dictionary script_urls = {} - + b = Browser( threads=defaults.threads, resolution=defaults.resolution, @@ -182,7 +180,7 @@ async def handle_event(self, event): scripts = webscreenshot.scripts for i, js in enumerate(scripts): script_url = js.json.get("url", "unknown_url") - + # Skip out-of-scope scripts if configured if not self.ignore_scope: try: @@ -215,21 +213,21 @@ async def handle_event(self, event): # Post-process results and extract code for result in results: # Extract relevant code portion - file_path = os.path.join(temp_dir, result['file'].lstrip('/')) - with open(file_path, 'r') as f: + file_path = os.path.join(temp_dir, result["file"].lstrip("/")) + with open(file_path, "r") as f: lines = f.readlines() # Attempt to extract code snippet if line numbers are valid - start_line = result.get('start_line') - start_column = result.get('start_column') - end_column = result.get('end_column') + start_line = result.get("start_line") + start_column = result.get("start_column") + end_column = result.get("end_column") code_snippet = None if isinstance(start_line, int): start_line -= 1 # Adjust for zero-based index # Get the full line and sanitize for console output - full_line = lines[start_line].strip().encode('ascii', 'replace').decode() - + full_line = lines[start_line].strip().encode("ascii", "replace").decode() + # If line is under 150 chars, use the whole line if len(full_line) <= 150: code_snippet = full_line @@ -246,15 +244,15 @@ async def handle_event(self, event): self.debug(f"Could not extract code snippet due to invalid line numbers: {result}") # Skip results that don't meet severity threshold - if not self.severity_threshold(result['severity']): + if not self.severity_threshold(result["severity"]): continue # Format the location string based on the file name - file_name = result['file'].lstrip('/') - if file_name.startswith('script_'): - script_num = int(file_name.split('_')[1].split('.')[0]) + file_name = result["file"].lstrip("/") + if file_name.startswith("script_"): + script_num = int(file_name.split("_")[1].split(".")[0]) location = script_urls.get(script_num, "unknown_url") - elif file_name == 'dom.html': + elif file_name == "dom.html": location = f"{event.data} (DOM)" else: location = file_name @@ -268,23 +266,20 @@ async def handle_event(self, event): details_string = f"{result['title']}. Description: [{result['full_description']}] Severity: [{result['severity']}] Location: [{location} ({location_details})] Code Snippet: [{code_snippet}]" # Create a hash of the finding - finding_hash = hash(( - result['title'], - result['full_description'], - result['severity'], - code_snippet - )) - + finding_hash = hash( + (result["title"], result["full_description"], result["severity"], code_snippet) + ) + if finding_hash in findings: self.debug(f"Skipping duplicate finding: {result['title']} with code snippet: {code_snippet}") continue - + findings.add(finding_hash) # Prepare data for the event data = { - "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "host": str(event.host) + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "host": str(event.host), } # Emit event with the extracted information @@ -292,14 +287,13 @@ async def handle_event(self, event): data, "FINDING", event, - context=f'{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}', + context=f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}", ) # Clean up the database directory shutil.rmtree(database_path) self.debug(f"Cleaned up database directory: {database_path}") - def severity_threshold(self, severity): severity = severity.lower() min_level = self.severity_levels.get(self.min_severity, 4) # Default to error if invalid From 2a671e9afa6ad7aab15465c516c837f62521561e Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 11 Mar 2025 14:25:24 -0400 Subject: [PATCH 04/26] fixed ansible and paths --- bbot/modules/codeql.py | 62 ++++++++++++++++++++++++++++++------------ 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index d8f8925bcd..005f8b3510 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -27,7 +27,7 @@ class codeql(BaseModule): deps_ansible = [ { - "name": "Create codeql directory", + "name": "Create CodeQL directory", "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, }, { @@ -35,20 +35,33 @@ class codeql(BaseModule): "file": {"path": "#{BBOT_TOOLS}/codeql/databases", "state": "directory", "mode": "0755"}, }, { - "name": "Download codeql", + "name": "Create packages directory", + "file": {"path": "#{BBOT_TOOLS}/codeql/packages", "state": "directory", "mode": "0755"}, + }, + { + "name": "Download CodeQL CLI", "unarchive": { "src": "https://github.com/github/codeql-cli-binaries/releases/download/v2.20.6/codeql-linux64.zip", "dest": "#{BBOT_TOOLS}/", "remote_src": True, }, }, - {"name": "Make codeql executable", "file": {"path": "#{BBOT_TOOLS}/codeql/codeql", "mode": "u+x,g+x,o+x"}}, { - "name": "Install JavaScript query pack", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries", + "name": "Make CodeQL executable", + "file": {"path": "#{BBOT_TOOLS}/codeql/codeql", "mode": "u+x,g+x,o+x"}, + }, + { + "name": "Download JavaScript Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries --dir=#{BBOT_TOOLS}/codeql/packages", + }, + { + "name": "Install JavaScript Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries --common-caches=#{BBOT_TOOLS}/codeql/packages", }, ] + + in_scope_only = True _module_threads = 4 @@ -63,6 +76,31 @@ async def setup(self): f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}", ) + # Build the query list during setup + self.queries = [ + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/ExceptionXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/XssThroughDom.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/StoredXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/UnsafeJQueryPlugin.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/UnsafeHtmlConstruction.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/Xss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/ReflectedXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-601/ClientSideUrlRedirect.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-201/PostMessageStar.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/CodeInjection.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/ExpressionInjection.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/InsecureUrlWhitelist.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/DisablingSce.ql", + ] + + # # Add custom queries from wordlists directory + # custom_queries_dir = os.path.join(self.scan.helpers.wordlist_dir, "codeql_queries") + # if os.path.exists(custom_queries_dir): + # for file in os.listdir(custom_queries_dir): + # if file.endswith('.ql'): + # self.queries.append(os.path.join(custom_queries_dir, file)) + # self.debug(f"Added custom query: {file}") + # Clean up any stale database files database_dir = os.path.join(self.scan.helpers.tools_dir, "codeql", "databases") if os.path.exists(database_dir): @@ -100,19 +138,7 @@ async def execute_codeql_analyze_db(self, database_path): "analyze", database_path, "--format=csv", - "codeql/javascript-queries:Security/CWE-079/ExceptionXss.ql", - "codeql/javascript-queries:Security/CWE-079/XssThroughDom.ql", - "codeql/javascript-queries:Security/CWE-079/StoredXss.ql", - "codeql/javascript-queries:Security/CWE-079/UnsafeJQueryPlugin.ql", - "codeql/javascript-queries:Security/CWE-079/UnsafeHtmlConstruction.ql", - "codeql/javascript-queries:Security/CWE-079/Xss.ql", - "codeql/javascript-queries:Security/CWE-079/ReflectedXss.ql", - "codeql/javascript-queries:Security/CWE-601/ClientSideUrlRedirect.ql", - "codeql/javascript-queries:Security/CWE-201/PostMessageStar.ql", - "codeql/javascript-queries:Security/CWE-094/CodeInjection.ql", - "codeql/javascript-queries:Security/CWE-094/ExpressionInjection.ql", - "codeql/javascript-queries:AngularJS/InsecureUrlWhitelist.ql", - "codeql/javascript-queries:AngularJS/DisablingSce.ql", + *self.queries, f"--output={output_path}", ] From a61f7bbd72d5f3bc94b12be5266ada440569adc2 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 11 Mar 2025 22:47:46 -0400 Subject: [PATCH 05/26] add wordlist environ --- bbot/core/core.py | 4 ++++ bbot/scanner/preset/environ.py | 2 ++ 2 files changed, 6 insertions(+) diff --git a/bbot/core/core.py b/bbot/core/core.py index 5814052771..609c989ac9 100644 --- a/bbot/core/core.py +++ b/bbot/core/core.py @@ -84,6 +84,10 @@ def lib_dir(self): def scans_dir(self): return self.home / "scans" + @property + def wordlist_dir(self): + return Path(__file__).parent.parent / "wordlists" + @property def config(self): """ diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py index 6dc5d8adae..0eaecc89a1 100644 --- a/bbot/scanner/preset/environ.py +++ b/bbot/scanner/preset/environ.py @@ -90,6 +90,8 @@ def prepare(self): # ensure bbot_tools environ["BBOT_TOOLS"] = str(self.preset.core.tools_dir) add_to_path(str(self.preset.core.tools_dir), environ=environ) + # ensure bbot_wordlists + environ["BBOT_WORDLISTS"] = str(self.preset.core.wordlist_dir) # ensure bbot_cache environ["BBOT_CACHE"] = str(self.preset.core.cache_dir) # ensure bbot_temp From 51b72c016546c5b4ad7bfab7052a8a5514ca23c1 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 11 Mar 2025 22:49:52 -0400 Subject: [PATCH 06/26] refactor codeql ansible --- bbot/modules/codeql.py | 99 +++++++++++++++++-- .../codeql_queries/dom-xss-jquery-contains.ql | 59 +++++++++++ 2 files changed, 150 insertions(+), 8 deletions(-) create mode 100644 bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 005f8b3510..32a67be807 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -1,3 +1,4 @@ +from pathlib import Path from bbot.modules.base import BaseModule from webcap.browser import Browser from webcap import defaults @@ -29,14 +30,25 @@ class codeql(BaseModule): { "name": "Create CodeQL directory", "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, + "register": "codeql_dir_created", }, { "name": "Create databases directory", - "file": {"path": "#{BBOT_TOOLS}/codeql/databases", "state": "directory", "mode": "0755"}, + "file": { + "path": "#{BBOT_TOOLS}/codeql/databases", + "state": "directory", + "mode": "0755", + }, + "when": "codeql_dir_created is success", }, { "name": "Create packages directory", - "file": {"path": "#{BBOT_TOOLS}/codeql/packages", "state": "directory", "mode": "0755"}, + "file": { + "path": "#{BBOT_TOOLS}/codeql/packages", + "state": "directory", + "mode": "0755", + }, + "when": "codeql_dir_created is success", }, { "name": "Download CodeQL CLI", @@ -45,23 +57,90 @@ class codeql(BaseModule): "dest": "#{BBOT_TOOLS}/", "remote_src": True, }, + "register": "codeql_downloaded", + "when": "codeql_dir_created is success", }, { "name": "Make CodeQL executable", "file": {"path": "#{BBOT_TOOLS}/codeql/codeql", "mode": "u+x,g+x,o+x"}, + "when": "codeql_downloaded is success", + }, + { + "name": "Download JavaScript-all Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-all --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "register": "query_pack_all_downloaded", }, { - "name": "Download JavaScript Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries --dir=#{BBOT_TOOLS}/codeql/packages", + "name": "Install JavaScript-all Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-all/2.5.0 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "when": "query_pack_all_downloaded is success", + "register": "query_pack_all_installed", }, { - "name": "Install JavaScript Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries --common-caches=#{BBOT_TOOLS}/codeql/packages", + "name": "Download suite-helpers Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/suite-helpers --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "when": "query_pack_all_installed is success", + "register": "suite_helpers_downloaded", + }, + { + "name": "Install suite-helpers Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/suite-helpers/1.0.18 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "when": "suite_helpers_downloaded is success", + "register": "suite_helpers_installed", + }, + { + "name": "Download typos Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/typos --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "when": "suite_helpers_installed is success", + "register": "typos_downloaded", + }, + { + "name": "Install typos Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/typos/1.0.18 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "when": "typos_downloaded is success", + "register": "typos_installed", + }, + { + "name": "Download util Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/util --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "when": "typos_installed is success", + "register": "util_downloaded", + }, + { + "name": "Install util Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/util/2.0.5 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "when": "util_downloaded is success", + "register": "util_installed", + }, + { + "name": "Download JavaScript-queries Query Pack to Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "when": "util_installed is success", + "register": "query_pack_downloaded", + }, + { + "name": "Install JavaScript-queries Query Pack from Custom Directory", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "when": "query_pack_downloaded is success", + }, + { + "name": "Create CodeQL custom queries directory", + "file": { + "path": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0/custom", + "state": "directory", + "mode": "0755", + }, + }, + { + "name": "Copy custom queries to CodeQL Custom Query Pack directory", + "copy": { + "src": "#{BBOT_WORDLISTS}/codeql_queries/", + "dest": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0/custom/", + "remote_src": False, + }, }, ] - - in_scope_only = True _module_threads = 4 @@ -91,6 +170,7 @@ async def setup(self): f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/ExpressionInjection.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/InsecureUrlWhitelist.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/DisablingSce.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/dom-xss-jquery-contains.ql", ] # # Add custom queries from wordlists directory @@ -121,6 +201,7 @@ async def execute_codeql_create_db(self, source_root, database_path): "create", database_path, "--language=javascript", + f"--common-caches={self.scan.helpers.tools_dir}/codeql/", f"--source-root={source_root}", ] self.verbose(f"Executing CodeQL command to create db") @@ -138,6 +219,8 @@ async def execute_codeql_analyze_db(self, database_path): "analyze", database_path, "--format=csv", + f"--common-caches={self.scan.helpers.tools_dir}/codeql", + f"--additional-packs={self.scan.helpers.tools_dir}/codeql/packages", *self.queries, f"--output={output_path}", ] diff --git a/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql b/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql new file mode 100644 index 0000000000..b4ee5e0cfe --- /dev/null +++ b/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql @@ -0,0 +1,59 @@ +/** + * @name DOM-based XSS via jQuery :contains selector + * @description Untrusted input like location.hash in a jQuery :contains selector can lead to XSS when jQuery processes the selector. + * @kind path-problem + * @problem.severity error + * @security-severity 7.5 + * @precision high + * @id js/dom-xss-jquery-contains-selector + * @tags security + * external/cwe/cwe-079 + * external/cwe/cwe-116 + */ + +import javascript +import DataFlow +import DataFlow::PathGraph + +/** + * Taint tracking configuration for location.hash being used unsafely in jQuery selectors. + */ +class HashToJQueryContainsConfig extends TaintTracking::Configuration { + HashToJQueryContainsConfig() { this = "HashToJQueryContainsConfig" } + + override predicate isSource(DataFlow::Node source) { + exists(DataFlow::PropRead hashProp | + hashProp = source and + hashProp.getPropertyName() = "hash" and + exists(DataFlow::PropRead locationProp | + locationProp = hashProp.getBase() and + locationProp.getPropertyName() = "location" + ) + ) + } + + override predicate isSink(DataFlow::Node sink) { + exists(DataFlow::CallNode jqueryCall | + (jqueryCall.getCalleeName() = "$" or jqueryCall.getCalleeName() = "jQuery") and + sink = jqueryCall.getArgument(0) and + ( + // String concatenation or template literals used in the selector + sink.asExpr() instanceof BinaryExpr + or sink.asExpr() instanceof AddExpr + or exists(string val | + val = sink.getStringValue() and + val.indexOf(":contains(") >= 0 + ) + ) + ) + } +} + +/** + * Execute the taint tracking analysis. + */ +from HashToJQueryContainsConfig config, DataFlow::PathNode source, DataFlow::PathNode sink +where config.hasFlowPath(source, sink) +select sink.getNode(), source, sink, + "The value from $@ is used unsafely in a jQuery `:contains()` selector, potentially leading to DOM XSS.", + source.getNode(), "location.hash" From a02ad49d526eb9de17c0cc21e93b7202eddb727f Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 12 Mar 2025 20:04:23 -0400 Subject: [PATCH 07/26] fixed false positives --- .../codeql_queries/dom-xss-jquery-contains.ql | 69 +++++++++++++++---- 1 file changed, 55 insertions(+), 14 deletions(-) diff --git a/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql b/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql index b4ee5e0cfe..af54db7aed 100644 --- a/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql +++ b/bbot/wordlists/codeql_queries/dom-xss-jquery-contains.ql @@ -1,14 +1,14 @@ /** - * @name DOM-based XSS via jQuery :contains selector - * @description Untrusted input like location.hash in a jQuery :contains selector can lead to XSS when jQuery processes the selector. + * @name DOM-based XSS via potentially dangerous jQuery selectors + * @description Untrusted input like location.hash used in potentially dangerous jQuery selectors (such as :contains, has(), or other non-ID selectors) can lead to XSS when jQuery processes the selector. * @kind path-problem * @problem.severity error * @security-severity 7.5 * @precision high - * @id js/dom-xss-jquery-contains-selector + * @id js/dom-xss-jquery-unsafe-selectors * @tags security - * external/cwe/cwe-079 - * external/cwe/cwe-116 + * external/cwe/cwe-079 + * external/cwe/cwe-116 */ import javascript @@ -37,15 +37,58 @@ class HashToJQueryContainsConfig extends TaintTracking::Configuration { (jqueryCall.getCalleeName() = "$" or jqueryCall.getCalleeName() = "jQuery") and sink = jqueryCall.getArgument(0) and ( - // String concatenation or template literals used in the selector - sink.asExpr() instanceof BinaryExpr - or sink.asExpr() instanceof AddExpr - or exists(string val | - val = sink.getStringValue() and - val.indexOf(":contains(") >= 0 + // Check for direct :contains usage in string literals + exists(string val | val = sink.getStringValue() and val.indexOf(":contains(") >= 0) or + + // Check for string concatenation or binary expressions that aren't properly sanitized + exists(Expr expr | + expr = sink.asExpr() and + (expr instanceof BinaryExpr or expr instanceof AddExpr) and + not isSafeIdSelector(expr) ) ) ) + or + // Also check for unsafe usage in jQuery has() method + exists(DataFlow::MethodCallNode hasCall | + hasCall.getMethodName() = "has" and + sink = hasCall.getArgument(0) and + not isSafeSelector(hasCall.getArgument(0)) + ) + } + + /** + * Determines if an expression represents a safe ID selector (starting with #) + */ + private predicate isSafeIdSelector(Expr expr) { + // Case: '#' + hash + exists(AddExpr addExpr, StringLiteral hashChar | + addExpr = expr and + hashChar = addExpr.getLeftOperand() and + hashChar.getValue() = "#" + ) + or + // Case: $('#' + hash) + exists(StringLiteral strLit | + strLit = expr and + strLit.getValue().charAt(0) = "#" + ) + } + + /** + * Determines if a node represents a safe selector + */ + private predicate isSafeSelector(DataFlow::Node node) { + exists(StringLiteral strLit | + strLit = node.asExpr() and + strLit.getValue().charAt(0) = "#" + ) + or + exists(AddExpr addExpr, StringLiteral hashChar | + addExpr = node.asExpr() and + hashChar = addExpr.getLeftOperand() and + hashChar.getValue() = "#" + ) } } @@ -54,6 +97,4 @@ class HashToJQueryContainsConfig extends TaintTracking::Configuration { */ from HashToJQueryContainsConfig config, DataFlow::PathNode source, DataFlow::PathNode sink where config.hasFlowPath(source, sink) -select sink.getNode(), source, sink, - "The value from $@ is used unsafely in a jQuery `:contains()` selector, potentially leading to DOM XSS.", - source.getNode(), "location.hash" +select sink.getNode(), source, sink, "The value from $@ is used unsafely in a jQuery selector, potentially leading to DOM XSS.", source.getNode(), "location.hash" \ No newline at end of file From 0b462ac90445a8059285bf24a7b7e9947296c9ed Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 12 Mar 2025 20:04:43 -0400 Subject: [PATCH 08/26] reduced cpu load --- bbot/modules/codeql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 32a67be807..3438ffdf79 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -142,7 +142,7 @@ class codeql(BaseModule): ] in_scope_only = True - _module_threads = 4 + _module_threads = 2 async def setup(self): self.ignore_scope = self.config.get("ignore_scope", False) From 14042c7fee39e66d563e77c62865b1e649ddeb7a Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 13 Mar 2025 10:04:08 -0400 Subject: [PATCH 09/26] adding dom only mode --- bbot/modules/codeql.py | 63 ++-- bbot/presets/web/codeql-intense.yml | 12 +- bbot/presets/web/codeql-min.yml | 15 + bbot/presets/web/codeql.yml | 5 +- poetry.lock | 444 ++++++++++++++-------------- 5 files changed, 284 insertions(+), 255 deletions(-) create mode 100644 bbot/presets/web/codeql-min.yml diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 3438ffdf79..bc8631c6d9 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -18,11 +18,11 @@ class codeql(BaseModule): "created_date": "2025-03-05", "author": "@liquidsec", } - deps_pip = ["numpy", "webcap"] + deps_pip = ["webcap"] - options = {"ignore_scope": False, "min_severity": "error"} + options = {"mode": "all", "min_severity": "error"} options_desc = { - "ignore_scope": "Ignore scope and process all scripts", + "mode": "Script processing mode: 'all' (process all scripts), 'in_scope' (only process in-scope scripts), or 'dom_only' (only process DOM)", "min_severity": "Minimum severity level to report (error, warning, recommendation, note)", } @@ -145,9 +145,12 @@ class codeql(BaseModule): _module_threads = 2 async def setup(self): - self.ignore_scope = self.config.get("ignore_scope", False) - self.severity_levels = {"error": 4, "warning": 3, "recommendation": 2, "note": 1} + self.mode = self.config.get("mode", "in_scope").lower() + valid_modes = {"all", "in_scope", "dom_only"} + if self.mode not in valid_modes: + return False, f"Invalid mode '{self.mode}'. Valid options are: {', '.join(valid_modes)}" + self.severity_levels = {"error": 4, "warning": 3, "recommendation": 2, "note": 1} self.min_severity = self.config.get("min_severity", "error").lower() if self.min_severity not in self.severity_levels: return ( @@ -204,7 +207,7 @@ async def execute_codeql_create_db(self, source_root, database_path): f"--common-caches={self.scan.helpers.tools_dir}/codeql/", f"--source-root={source_root}", ] - self.verbose(f"Executing CodeQL command to create db") + self.verbose("Executing CodeQL command to create db") async for line in self.run_process_live(command): pass @@ -225,7 +228,7 @@ async def execute_codeql_analyze_db(self, database_path): f"--output={output_path}", ] - self.verbose(f"Executing CodeQL command to analyze db") + self.verbose("Executing CodeQL command to analyze db") # Run the command and capture the output async for line in self.run_process_live(command): @@ -261,7 +264,6 @@ async def handle_event(self, event): # Create a temporary directory with tempfile.TemporaryDirectory() as temp_dir: - # Initialize script_urls dictionary script_urls = {} b = Browser( @@ -286,28 +288,31 @@ async def handle_event(self, event): dom_file.write(dom) self.debug(f"DOM file: {dom_file_path} written to temp directory") - scripts = webscreenshot.scripts - for i, js in enumerate(scripts): - script_url = js.json.get("url", "unknown_url") - - # Skip out-of-scope scripts if configured - if not self.ignore_scope: - try: - parsed_url = self.helpers.urlparse(script_url) - script_domain = parsed_url.netloc - if not self.scan.in_scope(script_domain): - self.debug(f"Skipping out-of-scope script: {script_url}") + + # Only process scripts if not in dom_only mode + if self.mode != "dom_only": + scripts = webscreenshot.scripts + for i, js in enumerate(scripts): + script_url = js.json.get("url", "unknown_url") + + # Skip out-of-scope scripts in in_scope mode + if self.mode == "in_scope": + try: + parsed_url = self.helpers.urlparse(script_url) + script_domain = parsed_url.netloc + if not self.scan.in_scope(script_domain): + self.debug(f"Skipping out-of-scope script: {script_url}") + continue + except Exception as e: + self.debug(f"Error parsing script URL {script_url}: {e}") continue - except Exception as e: - self.debug(f"Error parsing script URL {script_url}: {e}") - continue - - loaded_js = js.json["script"] - script_urls[i] = script_url - js_file_path = os.path.join(temp_dir, f"script_{i}.js") - with open(js_file_path, "w") as js_file: - js_file.write(loaded_js) - self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + + loaded_js = js.json["script"] + script_urls[i] = script_url + js_file_path = os.path.join(temp_dir, f"script_{i}.js") + with open(js_file_path, "w") as js_file: + js_file.write(loaded_js) + self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") # Generate a unique GUID for the database guid = str(uuid.uuid4()) diff --git a/bbot/presets/web/codeql-intense.yml b/bbot/presets/web/codeql-intense.yml index d6d2c2701a..4b26805f2d 100644 --- a/bbot/presets/web/codeql-intense.yml +++ b/bbot/presets/web/codeql-intense.yml @@ -1,11 +1,15 @@ description: Discover client-side web vulnerabilities using CodeQL. Limit to "error" and "warning" level findings, and include out of scope JS files. - -include: +modules: + - httpx + - portfilter - codeql config: + url_querystring_remove: False modules: + excavate: + retain_querystring: True codeql: - min_severity: "warning" - include_out_of_scope: True \ No newline at end of file + mode: all + min_severity: warning diff --git a/bbot/presets/web/codeql-min.yml b/bbot/presets/web/codeql-min.yml new file mode 100644 index 0000000000..1439e45989 --- /dev/null +++ b/bbot/presets/web/codeql-min.yml @@ -0,0 +1,15 @@ +description: Discover client-side web vulnerabilities using CodeQL. Limit to "error" level findings, and only analyze the DOM itself. + +modules: + - httpx + - portfilter + - codeql + +config: + url_querystring_remove: False + modules: + excavate: + retain_querystring: True + codeql: + mode: dom_only + min_severity: error diff --git a/bbot/presets/web/codeql.yml b/bbot/presets/web/codeql.yml index 27ab7787fe..5c1b481ab7 100644 --- a/bbot/presets/web/codeql.yml +++ b/bbot/presets/web/codeql.yml @@ -9,4 +9,7 @@ config: url_querystring_remove: False modules: excavate: - retain_querystring: True \ No newline at end of file + retain_querystring: True + codeql: + mode: in_scope + min_severity: error diff --git a/poetry.lock b/poetry.lock index 0f14d81504..c2df5c1060 100644 --- a/poetry.lock +++ b/poetry.lock @@ -94,6 +94,24 @@ files = [ [package.extras] dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +[[package]] +name = "backrefs" +version = "5.8" +description = "A wrapper around re and regex that adds additional back references." +optional = false +python-versions = ">=3.9" +files = [ + {file = "backrefs-5.8-py310-none-any.whl", hash = "sha256:c67f6638a34a5b8730812f5101376f9d41dc38c43f1fdc35cb54700f6ed4465d"}, + {file = "backrefs-5.8-py311-none-any.whl", hash = "sha256:2e1c15e4af0e12e45c8701bd5da0902d326b2e200cafcd25e49d9f06d44bb61b"}, + {file = "backrefs-5.8-py312-none-any.whl", hash = "sha256:bbef7169a33811080d67cdf1538c8289f76f0942ff971222a16034da88a73486"}, + {file = "backrefs-5.8-py313-none-any.whl", hash = "sha256:e3a63b073867dbefd0536425f43db618578528e3896fb77be7141328642a1585"}, + {file = "backrefs-5.8-py39-none-any.whl", hash = "sha256:a66851e4533fb5b371aa0628e1fee1af05135616b86140c9d787a2ffdf4b8fdc"}, + {file = "backrefs-5.8.tar.gz", hash = "sha256:2cab642a205ce966af3dd4b38ee36009b31fa9502a35fd61d59ccc116e40a6bd"}, +] + +[package.extras] +extras = ["regex"] + [[package]] name = "beautifulsoup4" version = "4.13.3" @@ -500,13 +518,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "deepdiff" -version = "8.2.0" +version = "8.3.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false python-versions = ">=3.8" files = [ - {file = "deepdiff-8.2.0-py3-none-any.whl", hash = "sha256:5091f2cdfd372b1b9f6bfd8065ba323ae31118dc4e42594371b38c8bea3fd0a4"}, - {file = "deepdiff-8.2.0.tar.gz", hash = "sha256:6ec78f65031485735545ffbe7a61e716c3c2d12ca6416886d5e9291fc76c46c3"}, + {file = "deepdiff-8.3.0-py3-none-any.whl", hash = "sha256:838acf1b17d228f4155bcb69bb265c41cbb5b2aba2575f07efa67ad9b9b7a0b5"}, + {file = "deepdiff-8.3.0.tar.gz", hash = "sha256:92a8d7c75a4b26b385ec0372269de258e20082307ccf74a4314341add3d88391"}, ] [package.dependencies] @@ -577,18 +595,18 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.115.9" +version = "0.115.11" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.9-py3-none-any.whl", hash = "sha256:4a439d7923e4de796bcc88b64e9754340fcd1574673cbd865ba8a99fe0d28c56"}, - {file = "fastapi-0.115.9.tar.gz", hash = "sha256:9d7da3b196c5eed049bc769f9475cd55509a112fbe031c0ef2f53768ae68d13f"}, + {file = "fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64"}, + {file = "fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.40.0,<0.46.0" +starlette = ">=0.40.0,<0.47.0" typing-extensions = ">=4.8.0" [package.extras] @@ -630,13 +648,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "1.5.7" +version = "1.6.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.9" files = [ - {file = "griffe-1.5.7-py3-none-any.whl", hash = "sha256:4af8ec834b64de954d447c7b6672426bb145e71605c74a4e22d510cc79fe7d8b"}, - {file = "griffe-1.5.7.tar.gz", hash = "sha256:465238c86deaf1137761f700fb343edd8ffc846d72f6de43c3c345ccdfbebe92"}, + {file = "griffe-1.6.0-py3-none-any.whl", hash = "sha256:9f1dfe035d4715a244ed2050dfbceb05b1f470809ed4f6bb10ece5a7302f8dd1"}, + {file = "griffe-1.6.0.tar.gz", hash = "sha256:eb5758088b9c73ad61c7ac014f3cdfb4c57b5c2fcbfca69996584b702aefa354"}, ] [package.dependencies] @@ -701,13 +719,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" -version = "2.6.8" +version = "2.6.9" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, - {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, + {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, + {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, ] [package.extras] @@ -774,13 +792,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1140,13 +1158,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "1.4.0" +version = "1.4.1" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocs_autorefs-1.4.0-py3-none-any.whl", hash = "sha256:bad19f69655878d20194acd0162e29a89c3f7e6365ffe54e72aa3fd1072f240d"}, - {file = "mkdocs_autorefs-1.4.0.tar.gz", hash = "sha256:a9c0aa9c90edbce302c09d050a3c4cb7c76f8b7b2c98f84a7a05f53d00392156"}, + {file = "mkdocs_autorefs-1.4.1-py3-none-any.whl", hash = "sha256:9793c5ac06a6ebbe52ec0f8439256e66187badf4b5334b5fde0b128ec134df4f"}, + {file = "mkdocs_autorefs-1.4.1.tar.gz", hash = "sha256:4b5b6235a4becb2b10425c2fa191737e415b37aa3418919db33e5d774c9db079"}, ] [package.dependencies] @@ -1189,17 +1207,18 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.6.5" +version = "9.6.8" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.6.5-py3-none-any.whl", hash = "sha256:aad3e6fb860c20870f75fb2a69ef901f1be727891e41adb60b753efcae19453b"}, - {file = "mkdocs_material-9.6.5.tar.gz", hash = "sha256:b714679a8c91b0ffe2188e11ed58c44d2523e9c2ae26a29cc652fa7478faa21f"}, + {file = "mkdocs_material-9.6.8-py3-none-any.whl", hash = "sha256:0a51532dd8aa80b232546c073fe3ef60dfaef1b1b12196ac7191ee01702d1cf8"}, + {file = "mkdocs_material-9.6.8.tar.gz", hash = "sha256:8de31bb7566379802532b248bd56d9c4bc834afc4625884bf5769f9412c6a354"}, ] [package.dependencies] babel = ">=2.10,<3.0" +backrefs = ">=5.7.post1,<6.0" colorama = ">=0.4,<1.0" jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" @@ -1208,7 +1227,6 @@ mkdocs-material-extensions = ">=1.3,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" pymdown-extensions = ">=10.2,<11.0" -regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] @@ -1229,13 +1247,13 @@ files = [ [[package]] name = "mkdocstrings" -version = "0.28.2" +version = "0.28.3" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocstrings-0.28.2-py3-none-any.whl", hash = "sha256:57f79c557e2718d217d6f6a81bf75a0de097f10e922e7e5e00f085c3f0ff6895"}, - {file = "mkdocstrings-0.28.2.tar.gz", hash = "sha256:9b847266d7a588ea76a8385eaebe1538278b4361c0d1ce48ed005be59f053569"}, + {file = "mkdocstrings-0.28.3-py3-none-any.whl", hash = "sha256:df5351ffd10477aa3c2ff5cdf17544b936477195436923660274d084a5c1359c"}, + {file = "mkdocstrings-0.28.3.tar.gz", hash = "sha256:c753516b1b6cee12d00bf9c28255e22c0d71f34c721ca668971fce885d846e0f"}, ] [package.dependencies] @@ -1251,24 +1269,24 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] crystal = ["mkdocstrings-crystal (>=0.3.4)"] -python = ["mkdocstrings-python (>=0.5.2)"] +python = ["mkdocstrings-python (>=1.16.2)"] python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.16.2" +version = "1.16.5" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocstrings_python-1.16.2-py3-none-any.whl", hash = "sha256:ff7e719404e59ad1a72f1afbe854769984c889b8fa043c160f6c988e1ad9e966"}, - {file = "mkdocstrings_python-1.16.2.tar.gz", hash = "sha256:942ec1a2e0481d28f96f93be3d6e343cab92a21e5baf01c37dd2d7236c4d0bd7"}, + {file = "mkdocstrings_python-1.16.5-py3-none-any.whl", hash = "sha256:0899a12e356eab8e83720c63e15d0ff51cd96603216c837618de346e086b39ba"}, + {file = "mkdocstrings_python-1.16.5.tar.gz", hash = "sha256:706b28dd0f59249a7c22cc5d517c9521e06c030b57e2a5478e1928a58f900abb"}, ] [package.dependencies] griffe = ">=0.49" mkdocs-autorefs = ">=1.4" -mkdocstrings = ">=0.28.2" +mkdocstrings = ">=0.28.3" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [[package]] @@ -1907,13 +1925,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -2155,120 +2173,104 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "26.2.1" +version = "26.3.0" description = "Python bindings for 0MQ" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, - {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3"}, - {file = "pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa"}, - {file = "pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473"}, - {file = "pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594"}, - {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a"}, - {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd"}, - {file = "pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7"}, - {file = "pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1"}, - {file = "pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7"}, - {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3"}, - {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e"}, - {file = "pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a"}, - {file = "pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13"}, - {file = "pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5"}, - {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23"}, - {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad"}, - {file = "pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb"}, - {file = "pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf"}, - {file = "pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce"}, - {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e"}, - {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460"}, - {file = "pyzmq-26.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ef584f13820d2629326fe20cc04069c21c5557d84c26e277cfa6235e523b10f"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:160194d1034902937359c26ccfa4e276abffc94937e73add99d9471e9f555dd6"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:574b285150afdbf0a0424dddf7ef9a0d183988eb8d22feacb7160f7515e032cb"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44dba28c34ce527cf687156c81f82bf1e51f047838d5964f6840fd87dfecf9fe"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9fbdb90b85c7624c304f72ec7854659a3bd901e1c0ffb2363163779181edeb68"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a7ad34a2921e8f76716dc7205c9bf46a53817e22b9eec2e8a3e08ee4f4a72468"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:866c12b7c90dd3a86983df7855c6f12f9407c8684db6aa3890fc8027462bda82"}, - {file = "pyzmq-26.2.1-cp37-cp37m-win32.whl", hash = "sha256:eeb37f65350d5c5870517f02f8bbb2ac0fbec7b416c0f4875219fef305a89a45"}, - {file = "pyzmq-26.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4eb3197f694dfb0ee6af29ef14a35f30ae94ff67c02076eef8125e2d98963cd0"}, - {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:36d4e7307db7c847fe37413f333027d31c11d5e6b3bacbb5022661ac635942ba"}, - {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c6ae0e95d0a4b0cfe30f648a18e764352d5415279bdf34424decb33e79935b8"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5b4fc44f5360784cc02392f14235049665caaf7c0fe0b04d313e763d3338e463"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:51431f6b2750eb9b9d2b2952d3cc9b15d0215e1b8f37b7a3239744d9b487325d"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbc78ae2065042de48a65f1421b8af6b76a0386bb487b41955818c3c1ce7bed"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d14f50d61a89b0925e4d97a0beba6053eb98c426c5815d949a43544f05a0c7ec"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:004837cb958988c75d8042f5dac19a881f3d9b3b75b2f574055e22573745f841"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b2007f28ce1b8acebdf4812c1aab997a22e57d6a73b5f318b708ef9bcabbe95"}, - {file = "pyzmq-26.2.1-cp38-cp38-win32.whl", hash = "sha256:269c14904da971cb5f013100d1aaedb27c0a246728c341d5d61ddd03f463f2f3"}, - {file = "pyzmq-26.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:31fff709fef3b991cfe7189d2cfe0c413a1d0e82800a182cfa0c2e3668cd450f"}, - {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a4bffcadfd40660f26d1b3315a6029fd4f8f5bf31a74160b151f5c577b2dc81b"}, - {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e76ad4729c2f1cf74b6eb1bdd05f6aba6175999340bd51e6caee49a435a13bf5"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8b0f5bab40a16e708e78a0c6ee2425d27e1a5d8135c7a203b4e977cee37eb4aa"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8e47050412f0ad3a9b2287779758073cbf10e460d9f345002d4779e43bb0136"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f18ce33f422d119b13c1363ed4cce245b342b2c5cbbb76753eabf6aa6f69c7d"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ceb0d78b7ef106708a7e2c2914afe68efffc0051dc6a731b0dbacd8b4aee6d68"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ebdd96bd637fd426d60e86a29ec14b8c1ab64b8d972f6a020baf08a30d1cf46"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03719e424150c6395b9513f53a5faadcc1ce4b92abdf68987f55900462ac7eec"}, - {file = "pyzmq-26.2.1-cp39-cp39-win32.whl", hash = "sha256:ef5479fac31df4b304e96400fc67ff08231873ee3537544aa08c30f9d22fce38"}, - {file = "pyzmq-26.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:f92a002462154c176dac63a8f1f6582ab56eb394ef4914d65a9417f5d9fde218"}, - {file = "pyzmq-26.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:1fd4b3efc6f62199886440d5e27dd3ccbcb98dfddf330e7396f1ff421bfbb3c2"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ee7152f32c88e0e1b5b17beb9f0e2b14454235795ef68c0c120b6d3d23d12833"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:baa1da72aecf6a490b51fba7a51f1ce298a1e0e86d0daef8265c8f8f9848eb77"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:49135bb327fca159262d8fd14aa1f4a919fe071b04ed08db4c7c37d2f0647162"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bacc1a10c150d58e8a9ee2b2037a70f8d903107e0f0b6e079bf494f2d09c091"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:09dac387ce62d69bec3f06d51610ca1d660e7849eb45f68e38e7f5cf1f49cbcb"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70b3a46ecd9296e725ccafc17d732bfc3cdab850b54bd913f843a0a54dfb2c04"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:59660e15c797a3b7a571c39f8e0b62a1f385f98ae277dfe95ca7eaf05b5a0f12"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0f50db737d688e96ad2a083ad2b453e22865e7e19c7f17d17df416e91ddf67eb"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a003200b6cd64e89b5725ff7e284a93ab24fd54bbac8b4fa46b1ed57be693c27"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f9ba5def063243793dec6603ad1392f735255cbc7202a3a484c14f99ec290705"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1238c2448c58b9c8d6565579393148414a42488a5f916b3f322742e561f6ae0d"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eddb3784aed95d07065bcf94d07e8c04024fdb6b2386f08c197dfe6b3528fda"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0f19c2097fffb1d5b07893d75c9ee693e9cbc809235cf3f2267f0ef6b015f24"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0995fd3530f2e89d6b69a2202e340bbada3191014352af978fa795cb7a446331"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7c6160fe513654e65665332740f63de29ce0d165e053c0c14a161fa60dd0da01"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8ec8e3aea6146b761d6c57fcf8f81fcb19f187afecc19bf1701a48db9617a217"}, - {file = "pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca"}, + {file = "pyzmq-26.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1586944f4736515af5c6d3a5b150c7e8ca2a2d6e46b23057320584d6f2438f4a"}, + {file = "pyzmq-26.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa7efc695d1fc9f72d91bf9b6c6fe2d7e1b4193836ec530a98faf7d7a7577a58"}, + {file = "pyzmq-26.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd84441e4021cec6e4dd040550386cd9c9ea1d9418ea1a8002dbb7b576026b2b"}, + {file = "pyzmq-26.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9176856f36c34a8aa5c0b35ddf52a5d5cd8abeece57c2cd904cfddae3fd9acd3"}, + {file = "pyzmq-26.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:49334faa749d55b77f084389a80654bf2e68ab5191c0235066f0140c1b670d64"}, + {file = "pyzmq-26.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fd30fc80fe96efb06bea21667c5793bbd65c0dc793187feb39b8f96990680b00"}, + {file = "pyzmq-26.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b2eddfbbfb473a62c3a251bb737a6d58d91907f6e1d95791431ebe556f47d916"}, + {file = "pyzmq-26.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:70b3acb9ad729a53d4e751dace35404a024f188aad406013454216aba5485b4e"}, + {file = "pyzmq-26.3.0-cp310-cp310-win32.whl", hash = "sha256:c1bd75d692cd7c6d862a98013bfdf06702783b75cffbf5dae06d718fecefe8f2"}, + {file = "pyzmq-26.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d7165bcda0dbf203e5ad04d79955d223d84b2263df4db92f525ba370b03a12ab"}, + {file = "pyzmq-26.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:e34a63f71d2ecffb3c643909ad2d488251afeb5ef3635602b3448e609611a7ed"}, + {file = "pyzmq-26.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:2833602d9d42c94b9d0d2a44d2b382d3d3a4485be018ba19dddc401a464c617a"}, + {file = "pyzmq-26.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8270d104ec7caa0bdac246d31d48d94472033ceab5ba142881704350b28159c"}, + {file = "pyzmq-26.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c208a977843d18d3bd185f323e4eaa912eb4869cb230947dc6edd8a27a4e558a"}, + {file = "pyzmq-26.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eddc2be28a379c218e0d92e4a432805dcb0ca5870156a90b54c03cd9799f9f8a"}, + {file = "pyzmq-26.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c0b519fa2159c42272f8a244354a0e110d65175647e5185b04008ec00df9f079"}, + {file = "pyzmq-26.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1595533de3a80bf8363372c20bafa963ec4bf9f2b8f539b1d9a5017f430b84c9"}, + {file = "pyzmq-26.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bbef99eb8d18ba9a40f00e8836b8040cdcf0f2fa649684cf7a66339599919d21"}, + {file = "pyzmq-26.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:979486d444ca3c469cd1c7f6a619ce48ff08b3b595d451937db543754bfacb65"}, + {file = "pyzmq-26.3.0-cp311-cp311-win32.whl", hash = "sha256:4b127cfe10b4c56e4285b69fd4b38ea1d368099ea4273d8fb349163fce3cd598"}, + {file = "pyzmq-26.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cf736cc1298ef15280d9fcf7a25c09b05af016656856dc6fe5626fd8912658dd"}, + {file = "pyzmq-26.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:2dc46ec09f5d36f606ac8393303149e69d17121beee13c8dac25e2a2078e31c4"}, + {file = "pyzmq-26.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:c80653332c6136da7f4d4e143975e74ac0fa14f851f716d90583bc19e8945cea"}, + {file = "pyzmq-26.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e317ee1d4528a03506cb1c282cd9db73660a35b3564096de37de7350e7d87a7"}, + {file = "pyzmq-26.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:943a22ebb3daacb45f76a9bcca9a7b74e7d94608c0c0505da30af900b998ca8d"}, + {file = "pyzmq-26.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fc9e71490d989144981ea21ef4fdfaa7b6aa84aff9632d91c736441ce2f6b00"}, + {file = "pyzmq-26.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e281a8071a06888575a4eb523c4deeefdcd2f5fe4a2d47e02ac8bf3a5b49f695"}, + {file = "pyzmq-26.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:be77efd735bb1064605be8dec6e721141c1421ef0b115ef54e493a64e50e9a52"}, + {file = "pyzmq-26.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a4ac2ffa34f1212dd586af90f4ba894e424f0cabb3a49cdcff944925640f6ac"}, + {file = "pyzmq-26.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ba698c7c252af83b6bba9775035263f0df5f807f0404019916d4b71af8161f66"}, + {file = "pyzmq-26.3.0-cp312-cp312-win32.whl", hash = "sha256:214038aaa88e801e54c2ef0cfdb2e6df27eb05f67b477380a452b595c5ecfa37"}, + {file = "pyzmq-26.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:bad7fe0372e505442482ca3ccbc0d6f38dae81b1650f57a0aa6bbee18e7df495"}, + {file = "pyzmq-26.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:b7b578d604e79e99aa39495becea013fd043fa9f36e4b490efa951f3d847a24d"}, + {file = "pyzmq-26.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:fa85953df84beb7b8b73cb3ec3f5d92b62687a09a8e71525c6734e020edf56fd"}, + {file = "pyzmq-26.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:209d09f0ab6ddbcebe64630d1e6ca940687e736f443c265ae15bc4bfad833597"}, + {file = "pyzmq-26.3.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d35cc1086f1d4f907df85c6cceb2245cb39a04f69c3f375993363216134d76d4"}, + {file = "pyzmq-26.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b380e9087078ba91e45fb18cdd0c25275ffaa045cf63c947be0ddae6186bc9d9"}, + {file = "pyzmq-26.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6d64e74143587efe7c9522bb74d1448128fdf9897cc9b6d8b9927490922fd558"}, + {file = "pyzmq-26.3.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:efba4f53ac7752eea6d8ca38a4ddac579e6e742fba78d1e99c12c95cd2acfc64"}, + {file = "pyzmq-26.3.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:9b0137a1c40da3b7989839f9b78a44de642cdd1ce20dcef341de174c8d04aa53"}, + {file = "pyzmq-26.3.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a995404bd3982c089e57b428c74edd5bfc3b0616b3dbcd6a8e270f1ee2110f36"}, + {file = "pyzmq-26.3.0-cp313-cp313-win32.whl", hash = "sha256:240b1634b9e530ef6a277d95cbca1a6922f44dfddc5f0a3cd6c722a8de867f14"}, + {file = "pyzmq-26.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:fe67291775ea4c2883764ba467eb389c29c308c56b86c1e19e49c9e1ed0cbeca"}, + {file = "pyzmq-26.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:73ca9ae9a9011b714cf7650450cd9c8b61a135180b708904f1f0a05004543dce"}, + {file = "pyzmq-26.3.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:fea7efbd7e49af9d7e5ed6c506dfc7de3d1a628790bd3a35fd0e3c904dc7d464"}, + {file = "pyzmq-26.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4430c7cba23bb0e2ee203eee7851c1654167d956fc6d4b3a87909ccaf3c5825"}, + {file = "pyzmq-26.3.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:016d89bee8c7d566fad75516b4e53ec7c81018c062d4c51cd061badf9539be52"}, + {file = "pyzmq-26.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04bfe59852d76d56736bfd10ac1d49d421ab8ed11030b4a0332900691507f557"}, + {file = "pyzmq-26.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:1fe05bd0d633a0f672bb28cb8b4743358d196792e1caf04973b7898a0d70b046"}, + {file = "pyzmq-26.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:2aa1a9f236d5b835fb8642f27de95f9edcfd276c4bc1b6ffc84f27c6fb2e2981"}, + {file = "pyzmq-26.3.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:21399b31753bf321043ea60c360ed5052cc7be20739785b1dff1820f819e35b3"}, + {file = "pyzmq-26.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d015efcd96aca8882057e7e6f06224f79eecd22cad193d3e6a0a91ec67590d1f"}, + {file = "pyzmq-26.3.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:18183cc3851b995fdc7e5f03d03b8a4e1b12b0f79dff1ec1da75069af6357a05"}, + {file = "pyzmq-26.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:da87e977f92d930a3683e10ba2b38bcc59adfc25896827e0b9d78b208b7757a6"}, + {file = "pyzmq-26.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf6db401f4957afbf372a4730c6d5b2a234393af723983cbf4bcd13d54c71e1a"}, + {file = "pyzmq-26.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03caa2ffd64252122139d50ec92987f89616b9b92c9ba72920b40e92709d5e26"}, + {file = "pyzmq-26.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fbf206e5329e20937fa19bd41cf3af06d5967f8f7e86b59d783b26b40ced755c"}, + {file = "pyzmq-26.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6fb539a6382a048308b409d8c66d79bf636eda1b24f70c78f2a1fd16e92b037b"}, + {file = "pyzmq-26.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7897b8c8bbbb2bd8cad887bffcb07aede71ef1e45383bd4d6ac049bf0af312a4"}, + {file = "pyzmq-26.3.0-cp38-cp38-win32.whl", hash = "sha256:91dead2daca698ae52ce70ee2adbb94ddd9b5f96877565fd40aa4efd18ecc6a3"}, + {file = "pyzmq-26.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:8c088e009a6d6b9f563336adb906e3a8d3fd64db129acc8d8fd0e9fe22b2dac8"}, + {file = "pyzmq-26.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2eaed0d911fb3280981d5495978152fab6afd9fe217fd16f411523665089cef1"}, + {file = "pyzmq-26.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7998b60ef1c105846fb3bfca494769fde3bba6160902e7cd27a8df8257890ee9"}, + {file = "pyzmq-26.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:96c0006a8d1d00e46cb44c8e8d7316d4a232f3d8f2ed43179d4578dbcb0829b6"}, + {file = "pyzmq-26.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e17cc198dc50a25a0f245e6b1e56f692df2acec3ccae82d1f60c34bfb72bbec"}, + {file = "pyzmq-26.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:92a30840f4f2a31f7049d0a7de5fc69dd03b19bd5d8e7fed8d0bde49ce49b589"}, + {file = "pyzmq-26.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f52eba83272a26b444f4b8fc79f2e2c83f91d706d693836c9f7ccb16e6713c31"}, + {file = "pyzmq-26.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:952085a09ff32115794629ba47f8940896d7842afdef1283332109d38222479d"}, + {file = "pyzmq-26.3.0-cp39-cp39-win32.whl", hash = "sha256:0240289e33e3fbae44a5db73e54e955399179332a6b1d47c764a4983ec1524c3"}, + {file = "pyzmq-26.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b2db7c82f08b8ce44c0b9d1153ce63907491972a7581e8b6adea71817f119df8"}, + {file = "pyzmq-26.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:2d3459b6311463c96abcb97808ee0a1abb0d932833edb6aa81c30d622fd4a12d"}, + {file = "pyzmq-26.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ad03f4252d9041b0635c37528dfa3f44b39f46024ae28c8567f7423676ee409b"}, + {file = "pyzmq-26.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f3dfb68cf7bf4cfdf34283a75848e077c5defa4907506327282afe92780084d"}, + {file = "pyzmq-26.3.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356ec0e39c5a9cda872b65aca1fd8a5d296ffdadf8e2442b70ff32e73ef597b1"}, + {file = "pyzmq-26.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:749d671b0eec8e738bbf0b361168369d8c682b94fcd458c20741dc4d69ef5278"}, + {file = "pyzmq-26.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f950f17ae608e0786298340163cac25a4c5543ef25362dd5ddb6dcb10b547be9"}, + {file = "pyzmq-26.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4fc9903a73c25be9d5fe45c87faababcf3879445efa16140146b08fccfac017"}, + {file = "pyzmq-26.3.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c15b69af22030960ac63567e98ad8221cddf5d720d9cf03d85021dfd452324ef"}, + {file = "pyzmq-26.3.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cf9ab0dff4dbaa2e893eb608373c97eb908e53b7d9793ad00ccbd082c0ee12f"}, + {file = "pyzmq-26.3.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ec332675f6a138db57aad93ae6387953763f85419bdbd18e914cb279ee1c451"}, + {file = "pyzmq-26.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:eb96568a22fe070590942cd4780950e2172e00fb033a8b76e47692583b1bd97c"}, + {file = "pyzmq-26.3.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:009a38241c76184cb004c869e82a99f0aee32eda412c1eb44df5820324a01d25"}, + {file = "pyzmq-26.3.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c22a12713707467abedc6d75529dd365180c4c2a1511268972c6e1d472bd63e"}, + {file = "pyzmq-26.3.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1614fcd116275d24f2346ffca4047a741c546ad9d561cbf7813f11226ca4ed2c"}, + {file = "pyzmq-26.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e2cafe7e9c7fed690e8ecf65af119f9c482923b5075a78f6f7629c63e1b4b1d"}, + {file = "pyzmq-26.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:14e0b81753424bd374075df6cc30b87f2c99e5f022501d97eff66544ca578941"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:21c6ddb98557a77cfe3366af0c5600fb222a1b2de5f90d9cd052b324e0c295e8"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc81d5d60c9d40e692de14b8d884d43cf67562402b931681f0ccb3ce6b19875"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b064fafef772d0f5dbf52d4c39f092be7bc62d9a602fe6e82082e001326de3"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b72206eb041f780451c61e1e89dbc3705f3d66aaaa14ee320d4f55864b13358a"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab78dc21c7b1e13053086bcf0b4246440b43b5409904b73bfd1156654ece8a1"}, + {file = "pyzmq-26.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0b42403ad7d1194dca9574cd3c56691c345f4601fa2d0a33434f35142baec7ac"}, + {file = "pyzmq-26.3.0.tar.gz", hash = "sha256:f1cd68b8236faab78138a8fc703f7ca0ad431b17a3fcac696358600d4e6243b3"}, ] [package.dependencies] @@ -2610,13 +2612,13 @@ files = [ [[package]] name = "starlette" -version = "0.45.3" +version = "0.46.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" files = [ - {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, - {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, + {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, + {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, ] [package.dependencies] @@ -2807,13 +2809,13 @@ test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.29.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, - {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, + {file = "virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170"}, + {file = "virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac"}, ] [package.dependencies] @@ -2869,80 +2871,80 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "websockets" -version = "15.0" +version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" files = [ - {file = "websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0"}, - {file = "websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3"}, - {file = "websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4"}, - {file = "websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f"}, - {file = "websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8"}, - {file = "websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f"}, - {file = "websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133"}, - {file = "websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965"}, - {file = "websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7"}, - {file = "websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1"}, - {file = "websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3"}, - {file = "websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4"}, - {file = "websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680"}, - {file = "websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37"}, - {file = "websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f"}, - {file = "websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d"}, - {file = "websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72"}, - {file = "websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99"}, - {file = "websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc"}, - {file = "websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904"}, - {file = "websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa"}, - {file = "websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1"}, - {file = "websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7"}, - {file = "websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b"}, - {file = "websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375"}, - {file = "websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72"}, - {file = "websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c"}, - {file = "websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8"}, - {file = "websockets-15.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c348abc5924caa02a62896300e32ea80a81521f91d6db2e853e6b1994017c9f6"}, - {file = "websockets-15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5294fcb410ed0a45d5d1cdedc4e51a60aab5b2b3193999028ea94afc2f554b05"}, - {file = "websockets-15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c24ba103ecf45861e2e1f933d40b2d93f5d52d8228870c3e7bf1299cd1cb8ff1"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8821a03bcfb36e4e4705316f6b66af28450357af8a575dc8f4b09bf02a3dee"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc5ae23ada6515f31604f700009e2df90b091b67d463a8401c1d8a37f76c1d7"}, - {file = "websockets-15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ac67b542505186b3bbdaffbc303292e1ee9c8729e5d5df243c1f20f4bb9057e"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c86dc2068f1c5ca2065aca34f257bbf4f78caf566eb230f692ad347da191f0a1"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:30cff3ef329682b6182c01c568f551481774c476722020b8f7d0daacbed07a17"}, - {file = "websockets-15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98dcf978d4c6048965d1762abd534c9d53bae981a035bfe486690ba11f49bbbb"}, - {file = "websockets-15.0-cp39-cp39-win32.whl", hash = "sha256:37d66646f929ae7c22c79bc73ec4074d6db45e6384500ee3e0d476daf55482a9"}, - {file = "websockets-15.0-cp39-cp39-win_amd64.whl", hash = "sha256:24d5333a9b2343330f0f4eb88546e2c32a7f5c280f8dd7d3cc079beb0901781b"}, - {file = "websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506"}, - {file = "websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3"}, - {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842"}, - {file = "websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5"}, - {file = "websockets-15.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:190bc6ef8690cd88232a038d1b15714c258f79653abad62f7048249b09438af3"}, - {file = "websockets-15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:327adab7671f3726b0ba69be9e865bba23b37a605b585e65895c428f6e47e766"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd8ef197c87afe0a9009f7a28b5dc613bfc585d329f80b7af404e766aa9e8c7"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:789c43bf4a10cd067c24c321238e800b8b2716c863ddb2294d2fed886fa5a689"}, - {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7394c0b7d460569c9285fa089a429f58465db930012566c03046f9e3ab0ed181"}, - {file = "websockets-15.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ea4f210422b912ebe58ef0ad33088bc8e5c5ff9655a8822500690abc3b1232d"}, - {file = "websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3"}, - {file = "websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] [[package]] From 5801933e85b57b0e503cc2d3c278465d7a7574f8 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 13 Mar 2025 10:47:22 -0400 Subject: [PATCH 10/26] move browser to setup --- bbot/modules/codeql.py | 41 ++++++++++++++++++----------------------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index bc8631c6d9..08b148ece9 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -157,6 +157,22 @@ async def setup(self): False, f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}", ) + + self.b = Browser( + threads=defaults.threads, + resolution=defaults.resolution, + user_agent=defaults.user_agent, + proxy=None, + delay=3, + full_page=False, + dom=True, + javascript=True, + requests=False, + responses=False, + base64=False, + ocr=False, + ) + await self.b.start() # Build the query list during setup self.queries = [ @@ -176,13 +192,6 @@ async def setup(self): f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/dom-xss-jquery-contains.ql", ] - # # Add custom queries from wordlists directory - # custom_queries_dir = os.path.join(self.scan.helpers.wordlist_dir, "codeql_queries") - # if os.path.exists(custom_queries_dir): - # for file in os.listdir(custom_queries_dir): - # if file.endswith('.ql'): - # self.queries.append(os.path.join(custom_queries_dir, file)) - # self.debug(f"Added custom query: {file}") # Clean up any stale database files database_dir = os.path.join(self.scan.helpers.tools_dir, "codeql", "databases") @@ -266,22 +275,8 @@ async def handle_event(self, event): with tempfile.TemporaryDirectory() as temp_dir: script_urls = {} - b = Browser( - threads=defaults.threads, - resolution=defaults.resolution, - user_agent=defaults.user_agent, - proxy=None, - delay=3, - full_page=False, - dom=True, - javascript=True, - requests=False, - responses=False, - base64=False, - ocr=False, - ) - await b.start() - async for url, webscreenshot in b.screenshot_urls([event.data]): + + async for url, webscreenshot in self.b.screenshot_urls([event.data]): dom = webscreenshot.dom dom_file_path = os.path.join(temp_dir, "dom.html") with open(dom_file_path, "w") as dom_file: From f096d82e3fbf3eaa68a32a95e1c4f90e084b4182 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 14 Mar 2025 21:37:25 -0400 Subject: [PATCH 11/26] new custom signatures, refactor --- bbot/modules/codeql.py | 55 +++++++++++-------- .../codeql_queries/xmlhttprequest-to-eval.ql | 53 ++++++++++++++++++ 2 files changed, 86 insertions(+), 22 deletions(-) create mode 100644 bbot/wordlists/codeql_queries/xmlhttprequest-to-eval.ql diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 08b148ece9..edecc9b011 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -7,6 +7,7 @@ import uuid import csv import shutil +import time class codeql(BaseModule): @@ -157,26 +158,28 @@ async def setup(self): False, f"Invalid severity level '{self.min_severity}'. Valid options are: {', '.join(self.severity_levels.keys())}", ) - + self.b = Browser( - threads=defaults.threads, - resolution=defaults.resolution, - user_agent=defaults.user_agent, - proxy=None, - delay=3, - full_page=False, - dom=True, - javascript=True, - requests=False, - responses=False, - base64=False, - ocr=False, - ) + threads=defaults.threads, + resolution=defaults.resolution, + user_agent=defaults.user_agent, + proxy=None, + delay=3, + full_page=False, + dom=True, + javascript=True, + requests=False, + responses=False, + base64=False, + ocr=False, + ) await self.b.start() # Build the query list during setup self.queries = [ + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-020/MissingOriginCheck.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/ExceptionXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-346/CorsMisconfigurationForCredentials.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/XssThroughDom.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/StoredXss.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/UnsafeJQueryPlugin.ql", @@ -190,19 +193,28 @@ async def setup(self): f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/InsecureUrlWhitelist.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/DisablingSce.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/dom-xss-jquery-contains.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/xmlhttprequest-to-eval.ql", ] - - # Clean up any stale database files + # Clean up any stale database files older than 3 days database_dir = os.path.join(self.scan.helpers.tools_dir, "codeql", "databases") if os.path.exists(database_dir): + current_time = time.time() + three_days_in_seconds = 3 * 24 * 60 * 60 + for item in os.listdir(database_dir): item_path = os.path.join(database_dir, item) - if os.path.isfile(item_path): - os.unlink(item_path) - elif os.path.isdir(item_path): - shutil.rmtree(item_path) - self.debug(f"Cleaned up stale CodeQL databases in {database_dir}") + # Get the last modification time of the file/directory + try: + mtime = os.path.getmtime(item_path) + if (current_time - mtime) > three_days_in_seconds: + if os.path.isfile(item_path): + os.unlink(item_path) + elif os.path.isdir(item_path): + shutil.rmtree(item_path) + self.debug(f"Cleaned up stale CodeQL database: {item_path}") + except Exception as e: + self.debug(f"Error checking/removing {item_path}: {e}") return True @@ -275,7 +287,6 @@ async def handle_event(self, event): with tempfile.TemporaryDirectory() as temp_dir: script_urls = {} - async for url, webscreenshot in self.b.screenshot_urls([event.data]): dom = webscreenshot.dom dom_file_path = os.path.join(temp_dir, "dom.html") diff --git a/bbot/wordlists/codeql_queries/xmlhttprequest-to-eval.ql b/bbot/wordlists/codeql_queries/xmlhttprequest-to-eval.ql new file mode 100644 index 0000000000..aac7ce5a25 --- /dev/null +++ b/bbot/wordlists/codeql_queries/xmlhttprequest-to-eval.ql @@ -0,0 +1,53 @@ +/** + * @name DOM-based XSS via dangerous eval of XMLHttpRequest responseText + * @description Evaluating untrusted data from an XMLHttpRequest's responseText via eval() can lead to code injection. + * @kind path-problem + * @problem.severity error + * @security.severity 9.5 + * @precision high + * @id js/xhr-dynamic-eval-modified + * @tags security + * external/cwe/cwe-95 + */ + + import javascript + import DataFlow + import DataFlow::PathGraph + + /** + * Modified taint tracking configuration to catch cases where `this.responseText` + * flows into an eval call even when used in string concatenation. + */ + class XHRResponseToEvalConfigModified extends TaintTracking::Configuration { + XHRResponseToEvalConfigModified() { this = "XHRResponseToEvalConfigModified" } + + override predicate isSource(DataFlow::Node source) { + // Mark any property read of "responseText" as a taint source. + exists(DataFlow::PropRead propRead | + propRead = source and + propRead.getPropertyName() = "responseText" + ) + } + + override predicate isSink(DataFlow::Node sink) { + // Mark the argument of eval() as a taint sink. + exists(CallExpr call | + call.getCallee().(Identifier).getName() = "eval" and + sink.asExpr() = call.getArgument(0) + ) + } + + override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) { + // Propagate taint through binary expressions (e.g. string concatenation). + exists(BinaryExpr binop | + // Check if the tainted value appears in either operand. + (binop.getLeftOperand() = pred.asExpr() or binop.getRightOperand() = pred.asExpr()) and + succ.asExpr() = binop + ) + } + } + + from XHRResponseToEvalConfigModified config, DataFlow::PathNode source, DataFlow::PathNode sink + where config.hasFlowPath(source, sink) + select sink.getNode(), source, sink, "Untrusted data from XMLHttpRequest.responseText flows to eval() after string concatenation.", source.getNode(), "XMLHttpRequest.responseText" + \ No newline at end of file From 37f7b63deb10d2876c2ac64683e681dc7636a709 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 15 Mar 2025 19:12:59 -0400 Subject: [PATCH 12/26] initial yara expansion --- bbot/core/helpers/yara_helper.py | 32 +++++++-- bbot/modules/codeql.py | 108 ++++++++++++++++++++++++++++--- 2 files changed, 124 insertions(+), 16 deletions(-) diff --git a/bbot/core/helpers/yara_helper.py b/bbot/core/helpers/yara_helper.py index 7f9428b55b..946f092f38 100644 --- a/bbot/core/helpers/yara_helper.py +++ b/bbot/core/helpers/yara_helper.py @@ -35,16 +35,36 @@ def compile_strings(self, strings: list[str], nocase=False): def compile(self, *args, **kwargs): return yara.compile(*args, **kwargs) - async def match(self, compiled_rules, text): + async def match(self, compiled_rules, text, full_result=False): """ - Given a compiled YARA rule and a body of text, return a list of strings that match the rule + Given a compiled YARA rule and a body of text, return matches. + + Args: + compiled_rules: Compiled YARA rules + text: Text to match against + full_result (bool): If True, returns full match information including + rule names and metadata. If False, returns only matched strings. + + Returns: + If full_result=False: List[str] of matched strings + If full_result=True: List[dict] with full match information including: + - matched_string: The full matched string + - rule: The name of the matched rule + - meta: The metadata of the matched rule """ - matched_strings = [] + results = [] matches = await self.parent_helper.run_in_executor(compiled_rules.match, data=text) if matches: for match in matches: for string_match in match.strings: for instance in string_match.instances: - matched_string = instance.matched_data.decode("utf-8") - matched_strings.append(matched_string) - return matched_strings + matched_string = instance.matched_data.decode('utf-8') + if full_result: + results.append({ + 'matched_string': matched_string, + 'rule': match.rule, + 'meta': match.meta + }) + else: + results.append(matched_string) + return results diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index edecc9b011..310a986e32 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -145,7 +145,34 @@ class codeql(BaseModule): in_scope_only = True _module_threads = 2 + # Define YARA rule at class level + yara_rules = r""" + rule dom_xss_innerHTML { + meta: + name = "DOM XSS via innerHTML" + description = "Detected potential DOM XSS vulnerability where URL parameters are used in innerHTML assignments" + confidence = "high" + strings: + $vuln1 = /\.innerHTML\s*=.*getUrlParameter\([^;\n]{0,100}/ nocase + $vuln2 = /\.innerHTML\s*=.*URLSearchParams\(.*\)\.get\([^;\n]{0,100}/ nocase + $vuln3 = /\.innerHTML\s*=.*location\.search.*split[\('\[]&[^;\n]{0,100}/ nocase + $vuln4 = /\.innerHTML\s*=.*location\.search.*match[^;\n]{0,100}/ nocase + $vuln5 = /\.innerHTML\s*=.*params\.get\([^;\n]{0,100}/ nocase + $vuln6 = /\.innerHTML\s*=.*searchParams\.get\([^;\n]{0,100}/ nocase + $vuln7 = /\.innerHTML\s*=.*querySelector[\('\[]\?[^;\n]{0,100}/ nocase + $vuln8 = /\.innerHTML\s*=.*RegExp\([^\)]+location[^;\n]{0,100}/ nocase + $vuln9 = /\.innerHTML\s*=.*parse_str\([^;\n]{0,100}/ nocase + $vuln10 = /\.innerHTML\s*=.*parseQueryString\([^;\n]{0,100}/ nocase + $vuln11 = /\.innerHTML\s*=.*getParameter\([^;\n]{0,100}/ nocase + condition: + any of them + } + """ + async def setup(self): + # Compile YARA rules during setup + self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) + self.mode = self.config.get("mode", "in_scope").lower() valid_modes = {"all", "in_scope", "dom_only"} if self.mode not in valid_modes: @@ -216,6 +243,9 @@ async def setup(self): except Exception as e: self.debug(f"Error checking/removing {item_path}: {e}") + # Compile YARA rules during setup + self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) + return True async def execute_codeql_create_db(self, source_root, database_path): @@ -280,6 +310,26 @@ async def execute_codeql_analyze_db(self, database_path): return results + def format_location(self, file_name, script_urls, event_data): + """Format the location string based on the file name. + + Args: + file_name (str): The name of the file being processed + script_urls (dict): Mapping of script numbers to their URLs + event_data (str): The event data (typically URL) being processed + + Returns: + str: Formatted location string + """ + file_name = file_name.lstrip("/") + if file_name.startswith("script_"): + script_num = int(file_name.split("_")[1].split(".")[0]) + script_url = script_urls.get(script_num, "unknown_url") + return f"(script: {script_url})" + elif file_name == "dom.html": + return f"{event_data} (DOM)" + return file_name + async def handle_event(self, event): findings = set() # Track unique findings @@ -300,7 +350,13 @@ async def handle_event(self, event): scripts = webscreenshot.scripts for i, js in enumerate(scripts): script_url = js.json.get("url", "unknown_url") - + + # Skip scripts that are from the same URL as the page + if script_url == str(event.data): + self.critical(f"Skipping script with same URL as page: {script_url}") + continue + else: + self.critical(f"Processing script: {script_url}") # Skip out-of-scope scripts in in_scope mode if self.mode == "in_scope": try: @@ -320,6 +376,45 @@ async def handle_event(self, event): js_file.write(loaded_js) self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + # Scan files with YARA before CodeQL analysis + for root, _, files in os.walk(temp_dir): + for file in files: + file_path = os.path.join(root, file) + with open(file_path, 'r') as f: + content = f.read() + results = await self.helpers.yara.match(self.compiled_yara_rules, content, full_result=True) + for result in results: + + # Get rule metadata and name from the match + yara_description = result['meta'].get('description', '') + confidence = result['meta'].get('confidence', '') + rule_name = result['meta'].get('name', result['rule']) + + # Build description components + description = f"{rule_name}: {yara_description}." + + if confidence: + description += f" Confidence: [{confidence}]" + + matched_text = result['matched_string'] + if len(matched_text) > 150: + matched_text = matched_text[:147] + "..." + description += f" Matched Text: [{matched_text}]" + + # Format the location using the same helper function + location = self.format_location(os.path.basename(file_path), script_urls, event.data) + description += f" Location: [{location}]" + + await self.emit_event( + { + "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", + "host": str(event.host), + }, + "FINDING", + event, + context=f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" + ) + # Generate a unique GUID for the database guid = str(uuid.uuid4()) database_path = os.path.join(f"{self.helpers.tools_dir}/codeql/databases", guid) @@ -367,15 +462,8 @@ async def handle_event(self, event): if not self.severity_threshold(result["severity"]): continue - # Format the location string based on the file name - file_name = result["file"].lstrip("/") - if file_name.startswith("script_"): - script_num = int(file_name.split("_")[1].split(".")[0]) - location = script_urls.get(script_num, "unknown_url") - elif file_name == "dom.html": - location = f"{event.data} (DOM)" - else: - location = file_name + # Format the location string using the new function + location = self.format_location(result["file"], script_urls, event.data) # Add line and column information location_details = f"Line: {start_line + 1}" From 5feb26f9cf6a6c300fd13f8ead90093c22f8e9db Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 15 Mar 2025 19:31:03 -0400 Subject: [PATCH 13/26] remove debug --- bbot/modules/codeql.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 310a986e32..d07452a2f4 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -350,13 +350,12 @@ async def handle_event(self, event): scripts = webscreenshot.scripts for i, js in enumerate(scripts): script_url = js.json.get("url", "unknown_url") - + # Skip scripts that are from the same URL as the page if script_url == str(event.data): - self.critical(f"Skipping script with same URL as page: {script_url}") + self.debug(f"Skipping script with same URL as page: {script_url}") continue - else: - self.critical(f"Processing script: {script_url}") + # Skip out-of-scope scripts in in_scope mode if self.mode == "in_scope": try: From 06a4c0035b78a8d63806fdb8d577dd756a5b8918 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 15 Mar 2025 21:00:36 -0400 Subject: [PATCH 14/26] adjusting yara rule --- bbot/core/helpers/yara_helper.py | 8 ++--- bbot/modules/codeql.py | 50 ++++++++++++-------------------- 2 files changed, 21 insertions(+), 37 deletions(-) diff --git a/bbot/core/helpers/yara_helper.py b/bbot/core/helpers/yara_helper.py index 946f092f38..867d58bb04 100644 --- a/bbot/core/helpers/yara_helper.py +++ b/bbot/core/helpers/yara_helper.py @@ -58,13 +58,9 @@ async def match(self, compiled_rules, text, full_result=False): for match in matches: for string_match in match.strings: for instance in string_match.instances: - matched_string = instance.matched_data.decode('utf-8') + matched_string = instance.matched_data.decode("utf-8") if full_result: - results.append({ - 'matched_string': matched_string, - 'rule': match.rule, - 'meta': match.meta - }) + results.append({"matched_string": matched_string, "rule": match.rule, "meta": match.meta}) else: results.append(matched_string) return results diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index d07452a2f4..884c804de4 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -145,34 +145,23 @@ class codeql(BaseModule): in_scope_only = True _module_threads = 2 - # Define YARA rule at class level yara_rules = r""" - rule dom_xss_innerHTML { + rule sourcevarassign { meta: - name = "DOM XSS via innerHTML" - description = "Detected potential DOM XSS vulnerability where URL parameters are used in innerHTML assignments" - confidence = "high" + name = "Source to Variable Assignment" + description = "Variable assignment tainted by user input source" + confidence = "possible" strings: - $vuln1 = /\.innerHTML\s*=.*getUrlParameter\([^;\n]{0,100}/ nocase - $vuln2 = /\.innerHTML\s*=.*URLSearchParams\(.*\)\.get\([^;\n]{0,100}/ nocase - $vuln3 = /\.innerHTML\s*=.*location\.search.*split[\('\[]&[^;\n]{0,100}/ nocase - $vuln4 = /\.innerHTML\s*=.*location\.search.*match[^;\n]{0,100}/ nocase - $vuln5 = /\.innerHTML\s*=.*params\.get\([^;\n]{0,100}/ nocase - $vuln6 = /\.innerHTML\s*=.*searchParams\.get\([^;\n]{0,100}/ nocase - $vuln7 = /\.innerHTML\s*=.*querySelector[\('\[]\?[^;\n]{0,100}/ nocase - $vuln8 = /\.innerHTML\s*=.*RegExp\([^\)]+location[^;\n]{0,100}/ nocase - $vuln9 = /\.innerHTML\s*=.*parse_str\([^;\n]{0,100}/ nocase - $vuln10 = /\.innerHTML\s*=.*parseQueryString\([^;\n]{0,100}/ nocase - $vuln11 = /\.innerHTML\s*=.*getParameter\([^;\n]{0,100}/ nocase + $varassign = /var\s+[^=]+=[^;]*(location\.(href|hash|pathname|search)|document\.(URL|documentURI|baseURI))[^;\n]*(;|\n|$)/ nocase condition: - any of them + $varassign } """ async def setup(self): # Compile YARA rules during setup self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) - + self.mode = self.config.get("mode", "in_scope").lower() valid_modes = {"all", "in_scope", "dom_only"} if self.mode not in valid_modes: @@ -312,12 +301,12 @@ async def execute_codeql_analyze_db(self, database_path): def format_location(self, file_name, script_urls, event_data): """Format the location string based on the file name. - + Args: file_name (str): The name of the file being processed script_urls (dict): Mapping of script numbers to their URLs event_data (str): The event data (typically URL) being processed - + Returns: str: Formatted location string """ @@ -379,31 +368,30 @@ async def handle_event(self, event): for root, _, files in os.walk(temp_dir): for file in files: file_path = os.path.join(root, file) - with open(file_path, 'r') as f: + with open(file_path, "r") as f: content = f.read() results = await self.helpers.yara.match(self.compiled_yara_rules, content, full_result=True) for result in results: - # Get rule metadata and name from the match - yara_description = result['meta'].get('description', '') - confidence = result['meta'].get('confidence', '') - rule_name = result['meta'].get('name', result['rule']) + yara_description = result["meta"].get("description", "") + confidence = result["meta"].get("confidence", "") + rule_name = result["meta"].get("name", result["rule"]) # Build description components description = f"{rule_name}: {yara_description}." - + if confidence: description += f" Confidence: [{confidence}]" - - matched_text = result['matched_string'] + + matched_text = result["matched_string"] if len(matched_text) > 150: matched_text = matched_text[:147] + "..." description += f" Matched Text: [{matched_text}]" - + # Format the location using the same helper function location = self.format_location(os.path.basename(file_path), script_urls, event.data) description += f" Location: [{location}]" - + await self.emit_event( { "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", @@ -411,7 +399,7 @@ async def handle_event(self, event): }, "FINDING", event, - context=f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" + context=f"{{module}} module found a YARA match for rule '{rule_name}' in {location}", ) # Generate a unique GUID for the database From 81f51dff32d2c29012854321507e39fa3311b2fe Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sun, 16 Mar 2025 13:01:46 -0400 Subject: [PATCH 15/26] changing yara signature --- bbot/modules/codeql.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 884c804de4..422fb308eb 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -146,15 +146,15 @@ class codeql(BaseModule): _module_threads = 2 yara_rules = r""" - rule sourcevarassign { + rule source_decode { meta: - name = "Source to Variable Assignment" - description = "Variable assignment tainted by user input source" + name = "Source Decoded with decodeURIComponent()" + description = "URL-decoded user-controlled data from a source can facilitate XSS attacks" confidence = "possible" strings: - $varassign = /var\s+[^=]+=[^;]*(location\.(href|hash|pathname|search)|document\.(URL|documentURI|baseURI))[^;\n]*(;|\n|$)/ nocase + $source_decode = /decodeURIComponent\s*\(\s*[^)]+(location\.(href|hash|pathname|search)|document\.(URL|documentURI|baseURI))[^)]*\)/ nocase condition: - $varassign + $source_decode } """ From 29a5074f22c73189195f05a3d774b2a903989769 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sun, 16 Mar 2025 13:08:58 -0400 Subject: [PATCH 16/26] adding url into finding event --- bbot/modules/codeql.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 422fb308eb..9652e0dbc6 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -396,6 +396,7 @@ async def handle_event(self, event): { "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", "host": str(event.host), + "url": str(event.data) }, "FINDING", event, @@ -475,6 +476,8 @@ async def handle_event(self, event): data = { "description": f"POSSIBLE Client-side Vulnerability: {details_string}", "host": str(event.host), + "url": str(event.data) + } # Emit event with the extracted information From e5fcfecf239b76692b205a490fff638d90ac28d3 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 18 Mar 2025 17:39:01 -0400 Subject: [PATCH 17/26] add prototype pollution ql --- bbot/modules/codeql.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 9652e0dbc6..3613eede95 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -208,6 +208,9 @@ async def setup(self): f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/ExpressionInjection.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/InsecureUrlWhitelist.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/DisablingSce.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingAssignment.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingFunction.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingMergeCall.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/dom-xss-jquery-contains.ql", f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/xmlhttprequest-to-eval.ql", ] From a602558231cd320d4cd0581f131a087ece439f49 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 12:15:15 -0400 Subject: [PATCH 18/26] fixing dependency issues, cache system to prevent duplicate processing --- bbot/modules/codeql.py | 191 ++++++++++++++++++++++++++++------------- 1 file changed, 133 insertions(+), 58 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 3613eede95..68e2ae0692 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -9,7 +9,6 @@ import shutil import time - class codeql(BaseModule): watched_events = ["URL"] produced_events = ["HTTP_RESPONSE_DOM"] @@ -21,13 +20,21 @@ class codeql(BaseModule): } deps_pip = ["webcap"] - options = {"mode": "all", "min_severity": "error"} + options = {"mode": "all", "min_severity": "error", "suppress_duplicates": False} options_desc = { "mode": "Script processing mode: 'all' (process all scripts), 'in_scope' (only process in-scope scripts), or 'dom_only' (only process DOM)", "min_severity": "Minimum severity level to report (error, warning, recommendation, note)", + "suppress_duplicates": "Skip findings when identical files are analyzed on the same host (default: False)" } deps_ansible = [ + { + "name": "Remove existing CodeQL directory", + "file": { + "path": "#{BBOT_TOOLS}/codeql", + "state": "absent" + } + }, { "name": "Create CodeQL directory", "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, @@ -68,66 +75,66 @@ class codeql(BaseModule): }, { "name": "Download JavaScript-all Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-all --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-all@2.5.1 --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", "register": "query_pack_all_downloaded", }, { "name": "Install JavaScript-all Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-all/2.5.0 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-all/2.5.1 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", "when": "query_pack_all_downloaded is success", "register": "query_pack_all_installed", }, { "name": "Download suite-helpers Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/suite-helpers --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/suite-helpers@1.0.19 --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", "when": "query_pack_all_installed is success", "register": "suite_helpers_downloaded", }, { "name": "Install suite-helpers Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/suite-helpers/1.0.18 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/suite-helpers/1.0.19 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", "when": "suite_helpers_downloaded is success", "register": "suite_helpers_installed", }, { "name": "Download typos Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/typos --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/typos@1.0.19 --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", "when": "suite_helpers_installed is success", "register": "typos_downloaded", }, { "name": "Install typos Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/typos/1.0.18 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/typos/1.0.19 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", "when": "typos_downloaded is success", "register": "typos_installed", }, { "name": "Download util Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/util --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/util@2.0.6 --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", "when": "typos_installed is success", "register": "util_downloaded", }, { "name": "Install util Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/util/2.0.5 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/util/2.0.6 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", "when": "util_downloaded is success", "register": "util_installed", }, { "name": "Download JavaScript-queries Query Pack to Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack download codeql/javascript-queries@1.5.1 --dir=#{BBOT_TOOLS}/codeql/packages --common-caches=#{BBOT_TOOLS}/codeql", "when": "util_installed is success", "register": "query_pack_downloaded", }, { "name": "Install JavaScript-queries Query Pack from Custom Directory", - "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", + "command": "#{BBOT_TOOLS}/codeql/codeql pack install #{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.1 --no-strict-mode --common-caches=#{BBOT_TOOLS}/codeql", "when": "query_pack_downloaded is success", }, { "name": "Create CodeQL custom queries directory", "file": { - "path": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0/custom", + "path": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.1/custom", "state": "directory", "mode": "0755", }, @@ -136,14 +143,14 @@ class codeql(BaseModule): "name": "Copy custom queries to CodeQL Custom Query Pack directory", "copy": { "src": "#{BBOT_WORDLISTS}/codeql_queries/", - "dest": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.0/custom/", + "dest": "#{BBOT_TOOLS}/codeql/packages/codeql/javascript-queries/1.5.1/custom/", "remote_src": False, }, }, ] in_scope_only = True - _module_threads = 2 + _module_threads = 1 yara_rules = r""" rule source_decode { @@ -159,6 +166,9 @@ class codeql(BaseModule): """ async def setup(self): + # Modify the cache to store findings + self.processed_hashes = {} # hash -> list of findings + # Compile YARA rules during setup self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) @@ -193,26 +203,26 @@ async def setup(self): # Build the query list during setup self.queries = [ - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-020/MissingOriginCheck.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/ExceptionXss.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-346/CorsMisconfigurationForCredentials.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/XssThroughDom.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/StoredXss.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/UnsafeJQueryPlugin.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/UnsafeHtmlConstruction.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/Xss.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-079/ReflectedXss.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-601/ClientSideUrlRedirect.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-201/PostMessageStar.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/CodeInjection.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-094/ExpressionInjection.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/InsecureUrlWhitelist.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/AngularJS/DisablingSce.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingAssignment.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingFunction.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/Security/CWE-915/PrototypePollutingMergeCall.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/dom-xss-jquery-contains.ql", - f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.0/custom/xmlhttprequest-to-eval.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-020/MissingOriginCheck.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/ExceptionXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-346/CorsMisconfigurationForCredentials.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/XssThroughDom.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/StoredXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/UnsafeJQueryPlugin.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/UnsafeHtmlConstruction.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/Xss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-079/ReflectedXss.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-601/ClientSideUrlRedirect.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-201/PostMessageStar.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-094/CodeInjection.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-094/ExpressionInjection.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/AngularJS/InsecureUrlWhitelist.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/AngularJS/DisablingSce.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-915/PrototypePollutingAssignment.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-915/PrototypePollutingFunction.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/Security/CWE-915/PrototypePollutingMergeCall.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/custom/dom-xss-jquery-contains.ql", + f"{self.helpers.tools_dir}/codeql/packages/codeql/javascript-queries/1.5.1/custom/xmlhttprequest-to-eval.ql", ] # Clean up any stale database files older than 3 days @@ -235,9 +245,6 @@ async def setup(self): except Exception as e: self.debug(f"Error checking/removing {item_path}: {e}") - # Compile YARA rules during setup - self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) - return True async def execute_codeql_create_db(self, source_root, database_path): @@ -322,10 +329,44 @@ def format_location(self, file_name, script_urls, event_data): return f"{event_data} (DOM)" return file_name + async def store_and_emit_finding(self, finding_data, event, files_hash=None): + """Store finding in cache and emit event.""" + if files_hash: + if files_hash not in self.processed_hashes: + self.processed_hashes[files_hash] = [] + # Store everything except URL and host + cache_data = finding_data.copy() + cache_data["data"] = finding_data["data"].copy() + cache_data["data"].pop("url", None) # Remove URL from cached data + cache_data["data"].pop("host", None) # Remove host from cached data + self.processed_hashes[files_hash].append(cache_data) + self.verbose(f"Storing finding in cache for hash: {files_hash}") + await self.emit_event( + finding_data["data"], + "FINDING", + event, + context=finding_data["context"] + ) + + async def emit_cached_findings(self, files_hash, event): + """Emit all findings from cache for a given hash.""" + for cached_finding in self.processed_hashes[files_hash]: + # Create new finding data with current URL and host + finding_data = cached_finding.copy() + finding_data["data"] = cached_finding["data"].copy() + finding_data["data"]["url"] = str(event.data) # Add current URL + finding_data["data"]["host"] = str(event.host) # Add current host + + await self.emit_event( + finding_data["data"], + "FINDING", + event, + context=finding_data["context"] + ) + async def handle_event(self, event): findings = set() # Track unique findings - # Create a temporary directory with tempfile.TemporaryDirectory() as temp_dir: script_urls = {} @@ -367,6 +408,20 @@ async def handle_event(self, event): js_file.write(loaded_js) self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + + + # Calculate hash of all files in temp directory + files_hash = await self.get_directory_hash(temp_dir) + + # Check cache before proceeding with analysis + if files_hash in self.processed_hashes: + if self.config.get("suppress_duplicates", False): + self.verbose(f"Suppressing duplicate findings for hash: {files_hash} on host {event.host}") + return + self.verbose(f"Cache hit - reemitting findings for hash: {files_hash}") + await self.emit_cached_findings(files_hash, event) + return + # Scan files with YARA before CodeQL analysis for root, _, files in os.walk(temp_dir): for file in files: @@ -395,16 +450,16 @@ async def handle_event(self, event): location = self.format_location(os.path.basename(file_path), script_urls, event.data) description += f" Location: [{location}]" - await self.emit_event( - { + finding_data = { + "data": { "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", "host": str(event.host), "url": str(event.data) }, - "FINDING", - event, - context=f"{{module}} module found a YARA match for rule '{rule_name}' in {location}", - ) + "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" + } + + await self.store_and_emit_finding(finding_data, event, files_hash) # Generate a unique GUID for the database guid = str(uuid.uuid4()) @@ -475,21 +530,16 @@ async def handle_event(self, event): findings.add(finding_hash) - # Prepare data for the event - data = { - "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "host": str(event.host), - "url": str(event.data) - + finding_data = { + "data": { + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "host": str(event.host), + "url": str(event.data) + }, + "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" } - # Emit event with the extracted information - await self.emit_event( - data, - "FINDING", - event, - context=f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}", - ) + await self.store_and_emit_finding(finding_data, event, files_hash) # Clean up the database directory shutil.rmtree(database_path) @@ -500,3 +550,28 @@ def severity_threshold(self, severity): min_level = self.severity_levels.get(self.min_severity, 4) # Default to error if invalid current_level = self.severity_levels.get(severity, 0) # Default to 0 if unknown severity return current_level >= min_level + + async def get_directory_hash(self, directory): + """Calculate a fast hash of all files in a directory using built-in hash function.""" + # Get all files and sort them for deterministic ordering + all_files = [] + for root, _, files in os.walk(directory): + for filename in files: + file_path = os.path.join(root, filename) + rel_path = os.path.relpath(file_path, directory) + all_files.append((rel_path, file_path)) + + all_files.sort() + + hash_value = 0 + for rel_path, file_path in all_files: + try: + with open(file_path, 'rb') as f: + hash_value = ((hash_value * 31) + hash(rel_path)) & 0xFFFFFFFF + while chunk := f.read(8192): + hash_value = ((hash_value * 31) + hash(chunk)) & 0xFFFFFFFF + except Exception as e: + self.debug(f"Error hashing file {file_path}: {e}") + continue + + return str(hash_value) From 982238d84b3afea3bdffed0b2a0bfd20fc62fb50 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 12:26:47 -0400 Subject: [PATCH 19/26] refining cache logic --- bbot/modules/codeql.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 68e2ae0692..896c30fecb 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -332,8 +332,6 @@ def format_location(self, file_name, script_urls, event_data): async def store_and_emit_finding(self, finding_data, event, files_hash=None): """Store finding in cache and emit event.""" if files_hash: - if files_hash not in self.processed_hashes: - self.processed_hashes[files_hash] = [] # Store everything except URL and host cache_data = finding_data.copy() cache_data["data"] = finding_data["data"].copy() @@ -341,6 +339,7 @@ async def store_and_emit_finding(self, finding_data, event, files_hash=None): cache_data["data"].pop("host", None) # Remove host from cached data self.processed_hashes[files_hash].append(cache_data) self.verbose(f"Storing finding in cache for hash: {files_hash}") + await self.emit_event( finding_data["data"], "FINDING", @@ -422,7 +421,11 @@ async def handle_event(self, event): await self.emit_cached_findings(files_hash, event) return - # Scan files with YARA before CodeQL analysis + # Initialize empty list for this hash before processing + self.processed_hashes[files_hash] = [] + + # Now proceed with analysis + # YARA scanning for root, _, files in os.walk(temp_dir): for file in files: file_path = os.path.join(root, file) From c6ef57d691dc228cc8fdf515867c9ec736144ab1 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 12:29:07 -0400 Subject: [PATCH 20/26] simplifying method --- bbot/modules/codeql.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 896c30fecb..bc0eb0fe8c 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -329,16 +329,15 @@ def format_location(self, file_name, script_urls, event_data): return f"{event_data} (DOM)" return file_name - async def store_and_emit_finding(self, finding_data, event, files_hash=None): + async def store_and_emit_finding(self, finding_data, event, files_hash): """Store finding in cache and emit event.""" - if files_hash: - # Store everything except URL and host - cache_data = finding_data.copy() - cache_data["data"] = finding_data["data"].copy() - cache_data["data"].pop("url", None) # Remove URL from cached data - cache_data["data"].pop("host", None) # Remove host from cached data - self.processed_hashes[files_hash].append(cache_data) - self.verbose(f"Storing finding in cache for hash: {files_hash}") + # Store everything except URL and host + cache_data = finding_data.copy() + cache_data["data"] = finding_data["data"].copy() + cache_data["data"].pop("url", None) # Remove URL from cached data + cache_data["data"].pop("host", None) # Remove host from cached data + self.processed_hashes[files_hash].append(cache_data) + self.verbose(f"Storing finding in cache for hash: {files_hash}") await self.emit_event( finding_data["data"], From 925ef5f4659d2400596f65847e18c5094f419ed2 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 13:03:14 -0400 Subject: [PATCH 21/26] fixing empty folder bug --- bbot/modules/codeql.py | 256 ++++++++++++++++++++--------------------- 1 file changed, 124 insertions(+), 132 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index bc0eb0fe8c..9d45d40f03 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -362,69 +362,27 @@ async def emit_cached_findings(self, files_hash, event): context=finding_data["context"] ) - async def handle_event(self, event): - findings = set() # Track unique findings - - with tempfile.TemporaryDirectory() as temp_dir: - script_urls = {} - - async for url, webscreenshot in self.b.screenshot_urls([event.data]): - dom = webscreenshot.dom - dom_file_path = os.path.join(temp_dir, "dom.html") - with open(dom_file_path, "w") as dom_file: - dom_file.write(dom) - - self.debug(f"DOM file: {dom_file_path} written to temp directory") - - # Only process scripts if not in dom_only mode - if self.mode != "dom_only": - scripts = webscreenshot.scripts - for i, js in enumerate(scripts): - script_url = js.json.get("url", "unknown_url") - - # Skip scripts that are from the same URL as the page - if script_url == str(event.data): - self.debug(f"Skipping script with same URL as page: {script_url}") - continue - - # Skip out-of-scope scripts in in_scope mode - if self.mode == "in_scope": - try: - parsed_url = self.helpers.urlparse(script_url) - script_domain = parsed_url.netloc - if not self.scan.in_scope(script_domain): - self.debug(f"Skipping out-of-scope script: {script_url}") - continue - except Exception as e: - self.debug(f"Error parsing script URL {script_url}: {e}") - continue - - loaded_js = js.json["script"] - script_urls[i] = script_url - js_file_path = os.path.join(temp_dir, f"script_{i}.js") - with open(js_file_path, "w") as js_file: - js_file.write(loaded_js) - self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") - - - - # Calculate hash of all files in temp directory - files_hash = await self.get_directory_hash(temp_dir) - - # Check cache before proceeding with analysis - if files_hash in self.processed_hashes: - if self.config.get("suppress_duplicates", False): - self.verbose(f"Suppressing duplicate findings for hash: {files_hash} on host {event.host}") - return - self.verbose(f"Cache hit - reemitting findings for hash: {files_hash}") - await self.emit_cached_findings(files_hash, event) + async def codeql_process(self, temp_dir, files_hash, event, script_urls=None): + """Process files in a directory with CodeQL and handle caching.""" + # Check cache + if files_hash in self.processed_hashes: + if self.config.get("suppress_duplicates", False): + self.verbose(f"Suppressing duplicate findings for hash: {files_hash} on host {event.host}") return + self.verbose(f"Cache hit - reemitting findings for hash: {files_hash}") + await self.emit_cached_findings(files_hash, event) + return + + # Initialize empty list for this hash + self.processed_hashes[files_hash] = [] - # Initialize empty list for this hash before processing - self.processed_hashes[files_hash] = [] + # Check if directory has any files before proceeding + if not any(Path(temp_dir).iterdir()): + self.debug(f"No files to analyze in {temp_dir}") + return - # Now proceed with analysis - # YARA scanning + # YARA scanning (for JS files only) + if script_urls is not None: # This indicates we're processing JS files for root, _, files in os.walk(temp_dir): for file in files: file_path = os.path.join(root, file) @@ -463,89 +421,123 @@ async def handle_event(self, event): await self.store_and_emit_finding(finding_data, event, files_hash) - # Generate a unique GUID for the database - guid = str(uuid.uuid4()) - database_path = os.path.join(f"{self.helpers.tools_dir}/codeql/databases", guid) - self.debug(f"Writing database to {database_path}") - # Run the execute_codeql_create_db method with the temp directory - await self.execute_codeql_create_db(temp_dir, database_path) - - # Call the execute_codeql_analyze_db method - results = await self.execute_codeql_analyze_db(database_path) - - # Post-process results and extract code - for result in results: - # Extract relevant code portion - file_path = os.path.join(temp_dir, result["file"].lstrip("/")) - with open(file_path, "r") as f: - lines = f.readlines() - - # Attempt to extract code snippet if line numbers are valid - start_line = result.get("start_line") - start_column = result.get("start_column") - end_column = result.get("end_column") - - code_snippet = None - if isinstance(start_line, int): - start_line -= 1 # Adjust for zero-based index - # Get the full line and sanitize for console output - full_line = lines[start_line].strip().encode("ascii", "replace").decode() - - # If line is under 150 chars, use the whole line - if len(full_line) <= 150: - code_snippet = full_line - # Otherwise use the column positions - elif all(isinstance(x, int) for x in [start_column, end_column]): - start_column -= 1 # Adjust for zero-based index - code_snippet = full_line[start_column:end_column] - else: - # If we can't use columns, truncate with ellipsis - code_snippet = full_line[:147] + "..." - - self.debug(f"Extracted code snippet (line {start_line + 1}):\n{code_snippet}") + # Process with CodeQL + database_path = f"{self.helpers.tools_dir}/codeql/databases/{str(uuid.uuid4())}" + await self.execute_codeql_create_db(temp_dir, database_path) + results = await self.execute_codeql_analyze_db(database_path) + + # Process results + for result in results: + # Extract relevant code portion + file_path = os.path.join(temp_dir, result["file"].lstrip("/")) + with open(file_path, "r") as f: + lines = f.readlines() + + # Attempt to extract code snippet if line numbers are valid + start_line = result.get("start_line") + start_column = result.get("start_column") + end_column = result.get("end_column") + + code_snippet = None + if isinstance(start_line, int): + start_line -= 1 # Adjust for zero-based index + # Get the full line and sanitize for console output + full_line = lines[start_line].strip().encode("ascii", "replace").decode() + + # If line is under 150 chars, use the whole line + if len(full_line) <= 150: + code_snippet = full_line + # Otherwise use the column positions + elif all(isinstance(x, int) for x in [start_column, end_column]): + start_column -= 1 # Adjust for zero-based index + code_snippet = full_line[start_column:end_column] else: - self.debug(f"Could not extract code snippet due to invalid line numbers: {result}") + # If we can't use columns, truncate with ellipsis + code_snippet = full_line[:147] + "..." - # Skip results that don't meet severity threshold - if not self.severity_threshold(result["severity"]): - continue + self.debug(f"Extracted code snippet (line {start_line + 1}):\n{code_snippet}") + else: + self.debug(f"Could not extract code snippet due to invalid line numbers: {result}") - # Format the location string using the new function - location = self.format_location(result["file"], script_urls, event.data) + # Skip results that don't meet severity threshold + if not self.severity_threshold(result["severity"]): + continue - # Add line and column information - location_details = f"Line: {start_line + 1}" - if isinstance(start_column, int) and isinstance(end_column, int): - location_details += f" Cols: {start_column}-{end_column}" + # Format the location string using the new function + location = self.format_location(result["file"], script_urls, event.data) - # Prepare details string with all the information - details_string = f"{result['title']}. Description: [{result['full_description']}] Severity: [{result['severity']}] Location: [{location} ({location_details})] Code Snippet: [{code_snippet}]" + # Add line and column information + location_details = f"Line: {start_line + 1}" + if isinstance(start_column, int) and isinstance(end_column, int): + location_details += f" Cols: {start_column}-{end_column}" - # Create a hash of the finding - finding_hash = hash( - (result["title"], result["full_description"], result["severity"], code_snippet) - ) + # Prepare details string with all the information + details_string = f"{result['title']}. Description: [{result['full_description']}] Severity: [{result['severity']}] Location: [{location} ({location_details})] Code Snippet: [{code_snippet}]" - if finding_hash in findings: - self.debug(f"Skipping duplicate finding: {result['title']} with code snippet: {code_snippet}") - continue + finding_data = { + "data": { + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "host": str(event.host), + "url": str(event.data) + }, + "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" + } - findings.add(finding_hash) + await self.store_and_emit_finding(finding_data, event, files_hash) - finding_data = { - "data": { - "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "host": str(event.host), - "url": str(event.data) - }, - "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" - } + # Clean up + shutil.rmtree(database_path) + self.debug(f"Cleaned up database directory: {database_path}") - await self.store_and_emit_finding(finding_data, event, files_hash) + async def handle_event(self, event): + with tempfile.TemporaryDirectory() as js_temp_dir, tempfile.TemporaryDirectory() as dom_temp_dir: + script_urls = {} + + async for url, webscreenshot in self.b.screenshot_urls([event.data]): + # Handle DOM + dom = webscreenshot.dom + dom_file_path = os.path.join(dom_temp_dir, "dom.html") + with open(dom_file_path, "w") as dom_file: + dom_file.write(dom) + self.debug(f"DOM file: {dom_file_path} written to temp directory") + + # Process DOM + dom_hash = await self.get_directory_hash(dom_temp_dir) + await self.codeql_process(dom_temp_dir, dom_hash, event) + + # Process JS files if not in dom_only mode + if self.mode != "dom_only": + scripts = webscreenshot.scripts + for i, js in enumerate(scripts): + script_url = js.json.get("url", "unknown_url") + + # Skip scripts that are from the same URL as the page + if script_url == str(event.data): + self.debug(f"Skipping script with same URL as page: {script_url}") + continue + + # Skip out-of-scope scripts in in_scope mode + if self.mode == "in_scope": + try: + parsed_url = self.helpers.urlparse(script_url) + script_domain = parsed_url.netloc + if not self.scan.in_scope(script_domain): + self.debug(f"Skipping out-of-scope script: {script_url}") + continue + except Exception as e: + self.debug(f"Error parsing script URL {script_url}: {e}") + continue + + loaded_js = js.json["script"] + script_urls[i] = script_url + js_file_path = os.path.join(js_temp_dir, f"script_{i}.js") + with open(js_file_path, "w") as js_file: + js_file.write(loaded_js) + self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") - # Clean up the database directory - shutil.rmtree(database_path) - self.debug(f"Cleaned up database directory: {database_path}") + # Process JS files + files_hash = await self.get_directory_hash(js_temp_dir) + await self.codeql_process(js_temp_dir, files_hash, event, script_urls) def severity_threshold(self, severity): severity = severity.lower() From 63ec8bced6edb43ea10ec3d677135f8df1aa631d Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 14:05:08 -0400 Subject: [PATCH 22/26] refactor --- bbot/modules/codeql.py | 227 ++++++++++++++++++++++------------------- 1 file changed, 124 insertions(+), 103 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 9d45d40f03..97615286c1 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -362,64 +362,133 @@ async def emit_cached_findings(self, files_hash, event): context=finding_data["context"] ) - async def codeql_process(self, temp_dir, files_hash, event, script_urls=None): + async def handle_event(self, event): + with tempfile.TemporaryDirectory() as js_temp_dir, tempfile.TemporaryDirectory() as dom_temp_dir: + script_urls = {} + + async for url, webscreenshot in self.b.screenshot_urls([event.data]): + # Handle DOM + dom = webscreenshot.dom + dom_file_path = os.path.join(dom_temp_dir, "dom.html") + with open(dom_file_path, "w") as dom_file: + dom_file.write(dom) + self.debug(f"DOM file: {dom_file_path} written to temp directory") + + # Check if DOM directory has files + if not any(Path(dom_temp_dir).iterdir()): + self.debug(f"No files to analyze in {dom_temp_dir}") + else: + # Process DOM + dom_hash = await self.get_directory_hash(dom_temp_dir) + if dom_hash in self.processed_hashes: + if self.config.get("suppress_duplicates", False): + self.critical(f"Suppressing duplicate DOM findings for hash: {dom_hash} on host {event.host}") + else: + self.critical(f"Cache hit - reemitting DOM findings for hash: {dom_hash}") + await self.emit_cached_findings(dom_hash, event) + else: + self.critical(f"No hash match for DOM: {dom_hash}") + self.processed_hashes[dom_hash] = [] + findings = await self.codeql_process(dom_temp_dir, event, script_urls) + for finding in findings: + await self.store_and_emit_finding(finding, event, dom_hash) + + # Process JS files if not in dom_only mode + if self.mode != "dom_only": + scripts = webscreenshot.scripts + for i, js in enumerate(scripts): + script_url = js.json.get("url", "unknown_url") + + # Skip scripts that are from the same URL as the page + if script_url == str(event.data): + self.debug(f"Skipping script with same URL as page: {script_url}") + continue + + # Skip out-of-scope scripts in in_scope mode + if self.mode == "in_scope": + try: + parsed_url = self.helpers.urlparse(script_url) + script_domain = parsed_url.netloc + if not self.scan.in_scope(script_domain): + self.debug(f"Skipping out-of-scope script: {script_url}") + continue + except Exception as e: + self.debug(f"Error parsing script URL {script_url}: {e}") + continue + + loaded_js = js.json["script"] + script_urls[i] = script_url + js_file_path = os.path.join(js_temp_dir, f"script_{i}.js") + with open(js_file_path, "w") as js_file: + js_file.write(loaded_js) + self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + + # Check if JS directory has files + if not any(Path(js_temp_dir).iterdir()): + self.debug(f"No files to analyze in {js_temp_dir}") + else: + # Process JS files + files_hash = await self.get_directory_hash(js_temp_dir) + if files_hash in self.processed_hashes: + if self.config.get("suppress_duplicates", False): + self.critical(f"Suppressing duplicate JS findings for hash: {files_hash} on host {event.host}") + else: + self.critical(f"Cache hit - reemitting JS findings for hash: {files_hash}") + await self.emit_cached_findings(files_hash, event) + else: + self.critical(f"No hash match for JS: {files_hash}") + self.processed_hashes[files_hash] = [] + findings = await self.codeql_process(js_temp_dir, event, script_urls) + for finding in findings: + await self.store_and_emit_finding(finding, event, files_hash) + + async def codeql_process(self, temp_dir, event, script_urls): """Process files in a directory with CodeQL and handle caching.""" - # Check cache - if files_hash in self.processed_hashes: - if self.config.get("suppress_duplicates", False): - self.verbose(f"Suppressing duplicate findings for hash: {files_hash} on host {event.host}") - return - self.verbose(f"Cache hit - reemitting findings for hash: {files_hash}") - await self.emit_cached_findings(files_hash, event) - return - - # Initialize empty list for this hash - self.processed_hashes[files_hash] = [] + findings = [] # Check if directory has any files before proceeding if not any(Path(temp_dir).iterdir()): self.debug(f"No files to analyze in {temp_dir}") - return - - # YARA scanning (for JS files only) - if script_urls is not None: # This indicates we're processing JS files - for root, _, files in os.walk(temp_dir): - for file in files: - file_path = os.path.join(root, file) - with open(file_path, "r") as f: - content = f.read() - results = await self.helpers.yara.match(self.compiled_yara_rules, content, full_result=True) - for result in results: - # Get rule metadata and name from the match - yara_description = result["meta"].get("description", "") - confidence = result["meta"].get("confidence", "") - rule_name = result["meta"].get("name", result["rule"]) - - # Build description components - description = f"{rule_name}: {yara_description}." - - if confidence: - description += f" Confidence: [{confidence}]" - - matched_text = result["matched_string"] - if len(matched_text) > 150: - matched_text = matched_text[:147] + "..." - description += f" Matched Text: [{matched_text}]" - - # Format the location using the same helper function - location = self.format_location(os.path.basename(file_path), script_urls, event.data) - description += f" Location: [{location}]" - - finding_data = { - "data": { - "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", - "host": str(event.host), - "url": str(event.data) - }, - "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" - } - - await self.store_and_emit_finding(finding_data, event, files_hash) + return findings + + + for root, _, files in os.walk(temp_dir): + for file in files: + file_path = os.path.join(root, file) + with open(file_path, "r") as f: + content = f.read() + results = await self.helpers.yara.match(self.compiled_yara_rules, content, full_result=True) + for result in results: + # Get rule metadata and name from the match + yara_description = result["meta"].get("description", "") + confidence = result["meta"].get("confidence", "") + rule_name = result["meta"].get("name", result["rule"]) + + # Build description components + description = f"{rule_name}: {yara_description}." + + if confidence: + description += f" Confidence: [{confidence}]" + + matched_text = result["matched_string"] + if len(matched_text) > 150: + matched_text = matched_text[:147] + "..." + description += f" Matched Text: [{matched_text}]" + + # Format the location using the same helper function + location = self.format_location(os.path.basename(file_path), script_urls, event.data) + description += f" Location: [{location}]" + + finding_data = { + "data": { + "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", + "host": str(event.host), + "url": str(event.data) + }, + "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" + } + + findings.append(finding_data) # Process with CodeQL database_path = f"{self.helpers.tools_dir}/codeql/databases/{str(uuid.uuid4())}" @@ -483,61 +552,13 @@ async def codeql_process(self, temp_dir, files_hash, event, script_urls=None): "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" } - await self.store_and_emit_finding(finding_data, event, files_hash) + findings.append(finding_data) # Clean up shutil.rmtree(database_path) self.debug(f"Cleaned up database directory: {database_path}") - async def handle_event(self, event): - with tempfile.TemporaryDirectory() as js_temp_dir, tempfile.TemporaryDirectory() as dom_temp_dir: - script_urls = {} - - async for url, webscreenshot in self.b.screenshot_urls([event.data]): - # Handle DOM - dom = webscreenshot.dom - dom_file_path = os.path.join(dom_temp_dir, "dom.html") - with open(dom_file_path, "w") as dom_file: - dom_file.write(dom) - self.debug(f"DOM file: {dom_file_path} written to temp directory") - - # Process DOM - dom_hash = await self.get_directory_hash(dom_temp_dir) - await self.codeql_process(dom_temp_dir, dom_hash, event) - - # Process JS files if not in dom_only mode - if self.mode != "dom_only": - scripts = webscreenshot.scripts - for i, js in enumerate(scripts): - script_url = js.json.get("url", "unknown_url") - - # Skip scripts that are from the same URL as the page - if script_url == str(event.data): - self.debug(f"Skipping script with same URL as page: {script_url}") - continue - - # Skip out-of-scope scripts in in_scope mode - if self.mode == "in_scope": - try: - parsed_url = self.helpers.urlparse(script_url) - script_domain = parsed_url.netloc - if not self.scan.in_scope(script_domain): - self.debug(f"Skipping out-of-scope script: {script_url}") - continue - except Exception as e: - self.debug(f"Error parsing script URL {script_url}: {e}") - continue - - loaded_js = js.json["script"] - script_urls[i] = script_url - js_file_path = os.path.join(js_temp_dir, f"script_{i}.js") - with open(js_file_path, "w") as js_file: - js_file.write(loaded_js) - self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") - - # Process JS files - files_hash = await self.get_directory_hash(js_temp_dir) - await self.codeql_process(js_temp_dir, files_hash, event, script_urls) + return findings def severity_threshold(self, severity): severity = severity.lower() From 09f159c0e595db1183058e96f3544e198747034e Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 19:48:28 -0400 Subject: [PATCH 23/26] removing debug --- bbot/modules/codeql.py | 347 +++++++++++++++++------------------------ 1 file changed, 143 insertions(+), 204 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 97615286c1..539de7482f 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -20,9 +20,9 @@ class codeql(BaseModule): } deps_pip = ["webcap"] - options = {"mode": "all", "min_severity": "error", "suppress_duplicates": False} + options = {"in_scope_only": False, "min_severity": "error", "suppress_duplicates": True} options_desc = { - "mode": "Script processing mode: 'all' (process all scripts), 'in_scope' (only process in-scope scripts), or 'dom_only' (only process DOM)", + "in_scope_only": "Only process scripts residing on in-scope hosts", "min_severity": "Minimum severity level to report (error, warning, recommendation, note)", "suppress_duplicates": "Skip findings when identical files are analyzed on the same host (default: False)" } @@ -172,11 +172,7 @@ async def setup(self): # Compile YARA rules during setup self.compiled_yara_rules = self.helpers.yara.compile(source=self.yara_rules) - self.mode = self.config.get("mode", "in_scope").lower() - valid_modes = {"all", "in_scope", "dom_only"} - if self.mode not in valid_modes: - return False, f"Invalid mode '{self.mode}'. Valid options are: {', '.join(valid_modes)}" - + self.in_scope_only = self.config.get("in_scope_only", False) self.severity_levels = {"error": 4, "warning": 3, "recommendation": 2, "note": 1} self.min_severity = self.config.get("min_severity", "error").lower() if self.min_severity not in self.severity_levels: @@ -282,7 +278,7 @@ async def execute_codeql_analyze_db(self, database_path): # Run the command and capture the output async for line in self.run_process_live(command): - self.hugeinfo(line) + self.debug(f"CodeQL analysis output: {line}") # Read and parse the CSV results results = [] @@ -309,25 +305,6 @@ async def execute_codeql_analyze_db(self, database_path): return results - def format_location(self, file_name, script_urls, event_data): - """Format the location string based on the file name. - - Args: - file_name (str): The name of the file being processed - script_urls (dict): Mapping of script numbers to their URLs - event_data (str): The event data (typically URL) being processed - - Returns: - str: Formatted location string - """ - file_name = file_name.lstrip("/") - if file_name.startswith("script_"): - script_num = int(file_name.split("_")[1].split(".")[0]) - script_url = script_urls.get(script_num, "unknown_url") - return f"(script: {script_url})" - elif file_name == "dom.html": - return f"{event_data} (DOM)" - return file_name async def store_and_emit_finding(self, finding_data, event, files_hash): """Store finding in cache and emit event.""" @@ -337,7 +314,7 @@ async def store_and_emit_finding(self, finding_data, event, files_hash): cache_data["data"].pop("url", None) # Remove URL from cached data cache_data["data"].pop("host", None) # Remove host from cached data self.processed_hashes[files_hash].append(cache_data) - self.verbose(f"Storing finding in cache for hash: {files_hash}") + self.verbose(f"Storing finding in cache for hash: {files_hash}, url: {finding_data['data'].get('url', 'N/A')}") await self.emit_event( finding_data["data"], @@ -363,110 +340,92 @@ async def emit_cached_findings(self, files_hash, event): ) async def handle_event(self, event): - with tempfile.TemporaryDirectory() as js_temp_dir, tempfile.TemporaryDirectory() as dom_temp_dir: - script_urls = {} + with tempfile.TemporaryDirectory() as temp_dir: + files_to_process = {} # hash -> (file_path, script_url, original_url) async for url, webscreenshot in self.b.screenshot_urls([event.data]): - # Handle DOM - dom = webscreenshot.dom - dom_file_path = os.path.join(dom_temp_dir, "dom.html") - with open(dom_file_path, "w") as dom_file: - dom_file.write(dom) - self.debug(f"DOM file: {dom_file_path} written to temp directory") - - # Check if DOM directory has files - if not any(Path(dom_temp_dir).iterdir()): - self.debug(f"No files to analyze in {dom_temp_dir}") - else: - # Process DOM - dom_hash = await self.get_directory_hash(dom_temp_dir) - if dom_hash in self.processed_hashes: - if self.config.get("suppress_duplicates", False): - self.critical(f"Suppressing duplicate DOM findings for hash: {dom_hash} on host {event.host}") - else: - self.critical(f"Cache hit - reemitting DOM findings for hash: {dom_hash}") - await self.emit_cached_findings(dom_hash, event) - else: - self.critical(f"No hash match for DOM: {dom_hash}") - self.processed_hashes[dom_hash] = [] - findings = await self.codeql_process(dom_temp_dir, event, script_urls) - for finding in findings: - await self.store_and_emit_finding(finding, event, dom_hash) - - # Process JS files if not in dom_only mode - if self.mode != "dom_only": - scripts = webscreenshot.scripts - for i, js in enumerate(scripts): - script_url = js.json.get("url", "unknown_url") - - # Skip scripts that are from the same URL as the page - if script_url == str(event.data): - self.debug(f"Skipping script with same URL as page: {script_url}") + scripts = webscreenshot.scripts + + for i, js in enumerate(scripts): + script_url = js.json.get("url", url) + + # Skip out-of-scope scripts if in_scope_only is True + if self.in_scope_only: + try: + parsed_url = self.helpers.urlparse(script_url) + script_domain = parsed_url.netloc + if not self.scan.in_scope(script_domain): + self.debug(f"Skipping out-of-scope script: {script_url}") + continue + except Exception as e: + self.debug(f"Error parsing script URL {script_url}: {e}") continue - # Skip out-of-scope scripts in in_scope mode - if self.mode == "in_scope": - try: - parsed_url = self.helpers.urlparse(script_url) - script_domain = parsed_url.netloc - if not self.scan.in_scope(script_domain): - self.debug(f"Skipping out-of-scope script: {script_url}") - continue - except Exception as e: - self.debug(f"Error parsing script URL {script_url}: {e}") - continue + loaded_js = js.json["script"] + file_path = os.path.join(temp_dir, f"script_{i}.js") - loaded_js = js.json["script"] - script_urls[i] = script_url - js_file_path = os.path.join(js_temp_dir, f"script_{i}.js") - with open(js_file_path, "w") as js_file: - js_file.write(loaded_js) - self.debug(f"JS file: {js_file_path} written to temp directory. Source: [{script_url}]") + # Write file contents + with open(file_path, "w") as js_file: + js_file.write(loaded_js) - # Check if JS directory has files - if not any(Path(js_temp_dir).iterdir()): - self.debug(f"No files to analyze in {js_temp_dir}") - else: - # Process JS files - files_hash = await self.get_directory_hash(js_temp_dir) - if files_hash in self.processed_hashes: - if self.config.get("suppress_duplicates", False): - self.critical(f"Suppressing duplicate JS findings for hash: {files_hash} on host {event.host}") - else: - self.critical(f"Cache hit - reemitting JS findings for hash: {files_hash}") - await self.emit_cached_findings(files_hash, event) + # Calculate hash + file_hash = await self.get_file_hash(file_path) + + if file_hash in self.processed_hashes: + if self.config.get("suppress_duplicates", False): + self.debug(f"Suppressing duplicate findings for hash: {file_hash} on host {event.host}") else: - self.critical(f"No hash match for JS: {files_hash}") - self.processed_hashes[files_hash] = [] - findings = await self.codeql_process(js_temp_dir, event, script_urls) - for finding in findings: - await self.store_and_emit_finding(finding, event, files_hash) - - async def codeql_process(self, temp_dir, event, script_urls): - """Process files in a directory with CodeQL and handle caching.""" - findings = [] - - # Check if directory has any files before proceeding - if not any(Path(temp_dir).iterdir()): - self.debug(f"No files to analyze in {temp_dir}") - return findings - - - for root, _, files in os.walk(temp_dir): - for file in files: - file_path = os.path.join(root, file) + self.verbose(f"Cache hit - reemitting findings for hash: {file_hash}") + await self.emit_cached_findings(file_hash, event) + # Delete the file if it's already processed + os.remove(file_path) + else: + self.debug(f"No hash match for [{script_url}]: {file_hash}") + self.processed_hashes[file_hash] = [] + files_to_process[file_hash] = (file_path, script_url, str(event.data)) + + # Check if there are files to process + if files_to_process: + self.debug(f"Processing {len(files_to_process)} files") + + # Generate a consistent database path + database_path = os.path.join(self.helpers.tools_dir, "codeql", "databases", str(uuid.uuid4())) + os.makedirs(database_path, exist_ok=True) + + findings_map = await self.codeql_process_files(temp_dir, files_to_process, database_path) + await self.process_findings(files_to_process, findings_map, event) + + # Clean up the specific database directory used + if os.path.exists(database_path): + shutil.rmtree(database_path) + self.debug(f"Cleaned up database directory: {database_path}") + + async def process_findings(self, batch_files, findings_map, event): + """Process and emit findings for a batch of files.""" + for file_hash, findings in findings_map.items(): + _, script_url, original_url = batch_files[file_hash] + for finding in findings: + # Update finding with correct URLs + finding["data"]["url"] = original_url + finding["data"]["script_url"] = script_url + await self.store_and_emit_finding(finding, event, file_hash) + + async def codeql_process_files(self, temp_dir, files_to_process, database_path): + """Process multiple files in a single CodeQL database.""" + findings_map = {hash_: [] for hash_ in files_to_process.keys()} + + # First run YARA checks for all files + for file_hash, (file_path, script_url, _) in files_to_process.items(): + try: with open(file_path, "r") as f: content = f.read() results = await self.helpers.yara.match(self.compiled_yara_rules, content, full_result=True) for result in results: - # Get rule metadata and name from the match yara_description = result["meta"].get("description", "") confidence = result["meta"].get("confidence", "") rule_name = result["meta"].get("name", result["rule"]) - # Build description components description = f"{rule_name}: {yara_description}." - if confidence: description += f" Confidence: [{confidence}]" @@ -474,91 +433,84 @@ async def codeql_process(self, temp_dir, event, script_urls): if len(matched_text) > 150: matched_text = matched_text[:147] + "..." description += f" Matched Text: [{matched_text}]" - - # Format the location using the same helper function - location = self.format_location(os.path.basename(file_path), script_urls, event.data) - description += f" Location: [{location}]" + description += f" Location: [{script_url}]" finding_data = { "data": { "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", - "host": str(event.host), - "url": str(event.data) + "script_url": script_url }, - "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {location}" + "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {script_url}" } - - findings.append(finding_data) + findings_map[file_hash].append(finding_data) + except Exception as e: + self.debug(f"Error processing YARA for {file_path}: {e}") - # Process with CodeQL - database_path = f"{self.helpers.tools_dir}/codeql/databases/{str(uuid.uuid4())}" + # Create and analyze CodeQL database await self.execute_codeql_create_db(temp_dir, database_path) results = await self.execute_codeql_analyze_db(database_path) - # Process results + # Process CodeQL results for result in results: - # Extract relevant code portion file_path = os.path.join(temp_dir, result["file"].lstrip("/")) - with open(file_path, "r") as f: - lines = f.readlines() - - # Attempt to extract code snippet if line numbers are valid - start_line = result.get("start_line") - start_column = result.get("start_column") - end_column = result.get("end_column") - - code_snippet = None - if isinstance(start_line, int): - start_line -= 1 # Adjust for zero-based index - # Get the full line and sanitize for console output - full_line = lines[start_line].strip().encode("ascii", "replace").decode() - - # If line is under 150 chars, use the whole line - if len(full_line) <= 150: - code_snippet = full_line - # Otherwise use the column positions - elif all(isinstance(x, int) for x in [start_column, end_column]): - start_column -= 1 # Adjust for zero-based index - code_snippet = full_line[start_column:end_column] - else: - # If we can't use columns, truncate with ellipsis - code_snippet = full_line[:147] + "..." - - self.debug(f"Extracted code snippet (line {start_line + 1}):\n{code_snippet}") - else: - self.debug(f"Could not extract code snippet due to invalid line numbers: {result}") - - # Skip results that don't meet severity threshold - if not self.severity_threshold(result["severity"]): - continue + + # Map result back to original file + file_hash = None + script_url = None + for h, (fp, url, _) in files_to_process.items(): + if os.path.samefile(fp, file_path): + file_hash = h + script_url = url + break + + if not file_hash: + self.debug(f"Could not map result back to original file: {file_path}") + continue - # Format the location string using the new function - location = self.format_location(result["file"], script_urls, event.data) + # Extract code snippet and process finding + try: + with open(file_path, "r") as f: + lines = f.readlines() + start_line = result.get("start_line") + code_snippet = None + + if isinstance(start_line, int): + start_line -= 1 + full_line = lines[start_line].strip().encode("ascii", "replace").decode() + + if len(full_line) <= 150: + code_snippet = full_line + elif all(isinstance(x, int) for x in [result.get("start_column"), result.get("end_column")]): + code_snippet = full_line[result["start_column"]-1:result["end_column"]] + else: + code_snippet = full_line[:147] + "..." - # Add line and column information - location_details = f"Line: {start_line + 1}" - if isinstance(start_column, int) and isinstance(end_column, int): - location_details += f" Cols: {start_column}-{end_column}" + if not self.severity_threshold(result["severity"]): + continue - # Prepare details string with all the information - details_string = f"{result['title']}. Description: [{result['full_description']}] Severity: [{result['severity']}] Location: [{location} ({location_details})] Code Snippet: [{code_snippet}]" + location_details = f"Line: {start_line + 1}" + if isinstance(result.get("start_column"), int) and isinstance(result.get("end_column"), int): + location_details += f" Cols: {result['start_column']}-{result['end_column']}" - finding_data = { - "data": { - "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "host": str(event.host), - "url": str(event.data) - }, - "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" - } + details_string = ( + f"{result['title']}. Description: [{result['full_description']}] " + f"Severity: [{result['severity']}] Location: [{script_url} ({location_details})] " + f"Code Snippet: [{code_snippet}]" + ) - findings.append(finding_data) + finding_data = { + "data": { + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "script_url": script_url + }, + "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" + } + findings_map[file_hash].append(finding_data) - # Clean up - shutil.rmtree(database_path) - self.debug(f"Cleaned up database directory: {database_path}") + except Exception as e: + self.debug(f"Error processing finding for {file_path}: {e}") - return findings + return findings_map def severity_threshold(self, severity): severity = severity.lower() @@ -566,27 +518,14 @@ def severity_threshold(self, severity): current_level = self.severity_levels.get(severity, 0) # Default to 0 if unknown severity return current_level >= min_level - async def get_directory_hash(self, directory): - """Calculate a fast hash of all files in a directory using built-in hash function.""" - # Get all files and sort them for deterministic ordering - all_files = [] - for root, _, files in os.walk(directory): - for filename in files: - file_path = os.path.join(root, filename) - rel_path = os.path.relpath(file_path, directory) - all_files.append((rel_path, file_path)) - - all_files.sort() - + async def get_file_hash(self, file_path): + """Calculate a fast hash of a single file using built-in hash function.""" hash_value = 0 - for rel_path, file_path in all_files: - try: - with open(file_path, 'rb') as f: - hash_value = ((hash_value * 31) + hash(rel_path)) & 0xFFFFFFFF - while chunk := f.read(8192): - hash_value = ((hash_value * 31) + hash(chunk)) & 0xFFFFFFFF - except Exception as e: - self.debug(f"Error hashing file {file_path}: {e}") - continue + try: + with open(file_path, 'rb') as f: + while chunk := f.read(8192): + hash_value = ((hash_value * 31) + hash(chunk)) & 0xFFFFFFFF + except Exception as e: + self.debug(f"Error hashing file {file_path}: {e}") - return str(hash_value) + return str(hash_value) \ No newline at end of file From 01b82d0246544abb43e783e6e4b5b3b07b0fca0a Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 19 Mar 2025 23:53:27 -0400 Subject: [PATCH 24/26] adding details field --- bbot/modules/codeql.py | 111 ++++++++++++++++++++++++++--------------- 1 file changed, 72 insertions(+), 39 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 539de7482f..60ac80b0c0 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -8,6 +8,7 @@ import csv import shutil import time +import re class codeql(BaseModule): watched_events = ["URL"] @@ -339,6 +340,29 @@ async def emit_cached_findings(self, files_hash, event): context=finding_data["context"] ) + def extract_code_snippet(self, file_path, start_line, start_col, end_line, end_col): + """Extract a code snippet from a file given line and column numbers.""" + try: + with open(file_path, "r") as f: + lines = f.readlines() + code_snippet = None + + if isinstance(start_line, int): + start_line -= 1 # Adjust for zero-based index + full_line = lines[start_line].strip().encode("ascii", "replace").decode() + + if len(full_line) <= 150: + code_snippet = full_line + elif all(isinstance(x, int) for x in [start_col, end_col]): + code_snippet = full_line[start_col-1:end_col] + else: + code_snippet = full_line[:147] + "..." + + return code_snippet + except Exception as e: + self.debug(f"Error extracting code snippet from {file_path}: {e}") + return "N/A" + async def handle_event(self, event): with tempfile.TemporaryDirectory() as temp_dir: files_to_process = {} # hash -> (file_path, script_url, original_url) @@ -400,16 +424,35 @@ async def handle_event(self, event): shutil.rmtree(database_path) self.debug(f"Cleaned up database directory: {database_path}") - async def process_findings(self, batch_files, findings_map, event): + async def process_findings(self, files_to_process, findings_map, event): """Process and emit findings for a batch of files.""" for file_hash, findings in findings_map.items(): - _, script_url, original_url = batch_files[file_hash] + _, script_url, original_url = files_to_process[file_hash] for finding in findings: # Update finding with correct URLs finding["data"]["url"] = original_url finding["data"]["script_url"] = script_url await self.store_and_emit_finding(finding, event, file_hash) + async def process_message(self, message, file_path): + """Process the message to replace double-bracketed sections with detailed information.""" + + def replace_brackets(match): + details = match.group(1) + parts = details.split("|") + description = parts[0].strip().strip('"') + location = parts[1].replace("relative:///","").strip().strip('"').split(":") + start_line, start_col, end_line, end_col = map(int, location[1:]) + code_snippet = self.extract_code_snippet(file_path, start_line, start_col, end_line, end_col) + if len(code_snippet) > 150: + code_snippet = code_snippet[:197] + "..." + return f"{description}: [{code_snippet}]" + + # Use regex to find and replace double-bracketed sections + pattern = r'\[\[(.*?)\]\]' + processed_message = re.sub(pattern, replace_brackets, message) + return processed_message.replace("\n", " ") + async def codeql_process_files(self, temp_dir, files_to_process, database_path): """Process multiple files in a single CodeQL database.""" findings_map = {hash_: [] for hash_ in files_to_process.keys()} @@ -469,43 +512,33 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): # Extract code snippet and process finding try: - with open(file_path, "r") as f: - lines = f.readlines() - start_line = result.get("start_line") - code_snippet = None - - if isinstance(start_line, int): - start_line -= 1 - full_line = lines[start_line].strip().encode("ascii", "replace").decode() - - if len(full_line) <= 150: - code_snippet = full_line - elif all(isinstance(x, int) for x in [result.get("start_column"), result.get("end_column")]): - code_snippet = full_line[result["start_column"]-1:result["end_column"]] - else: - code_snippet = full_line[:147] + "..." - - if not self.severity_threshold(result["severity"]): - continue - - location_details = f"Line: {start_line + 1}" - if isinstance(result.get("start_column"), int) and isinstance(result.get("end_column"), int): - location_details += f" Cols: {result['start_column']}-{result['end_column']}" - - details_string = ( - f"{result['title']}. Description: [{result['full_description']}] " - f"Severity: [{result['severity']}] Location: [{script_url} ({location_details})] " - f"Code Snippet: [{code_snippet}]" - ) - - finding_data = { - "data": { - "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "script_url": script_url - }, - "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" - } - findings_map[file_hash].append(finding_data) + start_line = result.get("start_line") + code_snippet = self.extract_code_snippet(file_path, start_line, result.get("start_column"), result.get("end_line"), result.get("end_column") ) + + if not self.severity_threshold(result["severity"]): + continue + + location_details = f"Line: {start_line + 1}" + if isinstance(result.get("start_column"), int) and isinstance(result.get("end_column"), int): + location_details += f" Cols: {result['start_column']}-{result['end_column']}" + + details_string = ( + f"{result['title']}. Description: {result['full_description']} " + f"Severity: [{result['severity']}] Location: [{script_url} ({location_details})] " + f"Code Snippet: [{code_snippet}]" + ) + if result.get("message"): + processed_message = await self.process_message(result['message'], file_path) + details_string += f" Details: {processed_message}" + + finding_data = { + "data": { + "description": f"POSSIBLE Client-side Vulnerability: {details_string}", + "script_url": script_url + }, + "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" + } + findings_map[file_hash].append(finding_data) except Exception as e: self.debug(f"Error processing finding for {file_path}: {e}") From fe8fd77296d02eb4bda7c9f9ec61a27836c4eae0 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 28 Aug 2025 15:10:21 -0400 Subject: [PATCH 25/26] poetry.lock --- poetry.lock | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 435d44ed97..acee0cf365 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -186,7 +186,7 @@ description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" +markers = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -809,7 +809,7 @@ files = [ {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, ] -markers = {main = "python_version < \"3.10\""} +markers = {main = "python_version == \"3.9\""} [package.dependencies] zipp = ">=0.5" @@ -830,7 +830,7 @@ files = [ {file = "importlib_resources-5.0.7-py3-none-any.whl", hash = "sha256:2238159eb743bd85304a16e0536048b3e991c531d1cd51c4a834d1ccf2829057"}, {file = "importlib_resources-5.0.7.tar.gz", hash = "sha256:4df460394562b4581bb4e4087ad9447bd433148fba44241754ec3152499f1d1b"}, ] -markers = {main = "python_version < \"3.10\""} +markers = {main = "python_version == \"3.9\""} [package.extras] docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] @@ -1794,7 +1794,7 @@ description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" +markers = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -3212,10 +3212,6 @@ groups = ["main"] files = [ {file = "yara_python-4.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20aee068c8f14e8ebb40ebf03e7e2c14031736fbf6f32fca58ad89d211e4aaa0"}, {file = "yara_python-4.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9899c3a80e6c543585daf49c5b06ba5987e2f387994a5455d841262ea6e8577c"}, - {file = "yara_python-4.5.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:399bb09f81d38876a06e269f68bbe810349aa0bb47fe79866ea3fc58ce38d30f"}, - {file = "yara_python-4.5.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:c78608c6bf3d2c379514b1c118a104874df1844bf818087e1bf6bfec0edfd1aa"}, - {file = "yara_python-4.5.2-cp310-cp310-macosx_15_0_arm64.whl", hash = "sha256:f25db30f8ae88a4355e5090a5d6191ee6f2abfdd529b3babc68a1faeba7c2ac8"}, - {file = "yara_python-4.5.2-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:f2866c0b8404086c5acb68cab20854d439009a1b02077aca22913b96138d2f6a"}, {file = "yara_python-4.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fc5abddf8767ca923a5a88b38b8057d4fab039323d5c6b2b5be6cba5e6e7350"}, {file = "yara_python-4.5.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2216bc73d4918012a4b270a93f9042445c7246b4a668a1bea220fbf64c7990"}, {file = "yara_python-4.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5558325eb7366f610a06e8c7c4845062d6880ee88f1fbc35e92fae333c3333c"}, @@ -3278,7 +3274,7 @@ files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] -markers = {main = "python_version < \"3.10\""} +markers = {main = "python_version == \"3.9\""} [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] @@ -3291,5 +3287,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "36c6c9d03ee3203db19e00a91fd4431ab91be9c6a9a6db3cabcd47eba5284727" - +content-hash = "04c8a14265ed2a4d885157ce6e6e6cb91be57485eb94cedeb5125bab6b2f6017" From bafcd75abff35550baff5e08a53f8df25c604f3f Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 28 Aug 2025 15:10:51 -0400 Subject: [PATCH 26/26] lint --- bbot/modules/codeql.py | 65 ++++++++++++++++-------------------------- 1 file changed, 25 insertions(+), 40 deletions(-) diff --git a/bbot/modules/codeql.py b/bbot/modules/codeql.py index 60ac80b0c0..5e3ecec262 100644 --- a/bbot/modules/codeql.py +++ b/bbot/modules/codeql.py @@ -1,4 +1,3 @@ -from pathlib import Path from bbot.modules.base import BaseModule from webcap.browser import Browser from webcap import defaults @@ -10,6 +9,7 @@ import time import re + class codeql(BaseModule): watched_events = ["URL"] produced_events = ["HTTP_RESPONSE_DOM"] @@ -25,17 +25,11 @@ class codeql(BaseModule): options_desc = { "in_scope_only": "Only process scripts residing on in-scope hosts", "min_severity": "Minimum severity level to report (error, warning, recommendation, note)", - "suppress_duplicates": "Skip findings when identical files are analyzed on the same host (default: False)" + "suppress_duplicates": "Skip findings when identical files are analyzed on the same host (default: False)", } deps_ansible = [ - { - "name": "Remove existing CodeQL directory", - "file": { - "path": "#{BBOT_TOOLS}/codeql", - "state": "absent" - } - }, + {"name": "Remove existing CodeQL directory", "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "absent"}}, { "name": "Create CodeQL directory", "file": {"path": "#{BBOT_TOOLS}/codeql", "state": "directory", "mode": "0755"}, @@ -306,7 +300,6 @@ async def execute_codeql_analyze_db(self, database_path): return results - async def store_and_emit_finding(self, finding_data, event, files_hash): """Store finding in cache and emit event.""" # Store everything except URL and host @@ -316,13 +309,8 @@ async def store_and_emit_finding(self, finding_data, event, files_hash): cache_data["data"].pop("host", None) # Remove host from cached data self.processed_hashes[files_hash].append(cache_data) self.verbose(f"Storing finding in cache for hash: {files_hash}, url: {finding_data['data'].get('url', 'N/A')}") - - await self.emit_event( - finding_data["data"], - "FINDING", - event, - context=finding_data["context"] - ) + + await self.emit_event(finding_data["data"], "FINDING", event, context=finding_data["context"]) async def emit_cached_findings(self, files_hash, event): """Emit all findings from cache for a given hash.""" @@ -332,13 +320,8 @@ async def emit_cached_findings(self, files_hash, event): finding_data["data"] = cached_finding["data"].copy() finding_data["data"]["url"] = str(event.data) # Add current URL finding_data["data"]["host"] = str(event.host) # Add current host - - await self.emit_event( - finding_data["data"], - "FINDING", - event, - context=finding_data["context"] - ) + + await self.emit_event(finding_data["data"], "FINDING", event, context=finding_data["context"]) def extract_code_snippet(self, file_path, start_line, start_col, end_line, end_col): """Extract a code snippet from a file given line and column numbers.""" @@ -354,7 +337,7 @@ def extract_code_snippet(self, file_path, start_line, start_col, end_line, end_c if len(full_line) <= 150: code_snippet = full_line elif all(isinstance(x, int) for x in [start_col, end_col]): - code_snippet = full_line[start_col-1:end_col] + code_snippet = full_line[start_col - 1 : end_col] else: code_snippet = full_line[:147] + "..." @@ -394,7 +377,7 @@ async def handle_event(self, event): # Calculate hash file_hash = await self.get_file_hash(file_path) - + if file_hash in self.processed_hashes: if self.config.get("suppress_duplicates", False): self.debug(f"Suppressing duplicate findings for hash: {file_hash} on host {event.host}") @@ -441,7 +424,7 @@ def replace_brackets(match): details = match.group(1) parts = details.split("|") description = parts[0].strip().strip('"') - location = parts[1].replace("relative:///","").strip().strip('"').split(":") + location = parts[1].replace("relative:///", "").strip().strip('"').split(":") start_line, start_col, end_line, end_col = map(int, location[1:]) code_snippet = self.extract_code_snippet(file_path, start_line, start_col, end_line, end_col) if len(code_snippet) > 150: @@ -449,7 +432,7 @@ def replace_brackets(match): return f"{description}: [{code_snippet}]" # Use regex to find and replace double-bracketed sections - pattern = r'\[\[(.*?)\]\]' + pattern = r"\[\[(.*?)\]\]" processed_message = re.sub(pattern, replace_brackets, message) return processed_message.replace("\n", " ") @@ -481,9 +464,9 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): finding_data = { "data": { "description": f"POSSIBLE Client-side Vulnerability (YARA Match). {description})", - "script_url": script_url + "script_url": script_url, }, - "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {script_url}" + "context": f"{{module}} module found a YARA match for rule '{rule_name}' in {script_url}", } findings_map[file_hash].append(finding_data) except Exception as e: @@ -492,11 +475,11 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): # Create and analyze CodeQL database await self.execute_codeql_create_db(temp_dir, database_path) results = await self.execute_codeql_analyze_db(database_path) - + # Process CodeQL results for result in results: file_path = os.path.join(temp_dir, result["file"].lstrip("/")) - + # Map result back to original file file_hash = None script_url = None @@ -505,7 +488,7 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): file_hash = h script_url = url break - + if not file_hash: self.debug(f"Could not map result back to original file: {file_path}") continue @@ -513,7 +496,9 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): # Extract code snippet and process finding try: start_line = result.get("start_line") - code_snippet = self.extract_code_snippet(file_path, start_line, result.get("start_column"), result.get("end_line"), result.get("end_column") ) + code_snippet = self.extract_code_snippet( + file_path, start_line, result.get("start_column"), result.get("end_line"), result.get("end_column") + ) if not self.severity_threshold(result["severity"]): continue @@ -528,15 +513,15 @@ async def codeql_process_files(self, temp_dir, files_to_process, database_path): f"Code Snippet: [{code_snippet}]" ) if result.get("message"): - processed_message = await self.process_message(result['message'], file_path) + processed_message = await self.process_message(result["message"], file_path) details_string += f" Details: {processed_message}" finding_data = { "data": { "description": f"POSSIBLE Client-side Vulnerability: {details_string}", - "script_url": script_url + "script_url": script_url, }, - "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}" + "context": f"{{module}} module found POSSIBLE Client-side Vulnerability: {details_string}", } findings_map[file_hash].append(finding_data) @@ -555,10 +540,10 @@ async def get_file_hash(self, file_path): """Calculate a fast hash of a single file using built-in hash function.""" hash_value = 0 try: - with open(file_path, 'rb') as f: + with open(file_path, "rb") as f: while chunk := f.read(8192): hash_value = ((hash_value * 31) + hash(chunk)) & 0xFFFFFFFF except Exception as e: self.debug(f"Error hashing file {file_path}: {e}") - - return str(hash_value) \ No newline at end of file + + return str(hash_value)