diff --git a/.gitattributes b/.gitattributes index 49edcb7119..00bf2637dc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,4 +5,4 @@ *.txt text eol=lf *.json text eol=lf *.md text eol=lf -*.sh text eol=lf \ No newline at end of file +*.sh text eol=lf diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..55fccf323f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,24 @@ +--- +name: Bug Report +about: Create a report to help us improve +title: "" +labels: bug +assignees: "" +--- + +**Describe the bug** +What happened vs what was expected? + +**BBOT Command** +Example: `bbot -m httpx -t evilcorp.com` + +**OS, BBOT Installation Method + Version** +Example: `OS: Arch Linux, Installation method: pip, BBOT version: 1.0.3.545` +Note: You can get the BBOT version with `bbot --version` +Note: BBOT is designed from the ground up to run on Linux. Windows and MacOS are not officially supported. If you are using one of these platforms, it's recommended to use Docker. + +**BBOT Config** +Attach your full BBOT preset (to show it, add `--current-preset` to your BBOT command). + +**Logs/Screenshots** +If possible, produce the bug while `--debug` is enabled, and attach the relevant parts of the output. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..101c5fe600 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,10 @@ +--- +name: Feature Request +about: Request a new feature +title: "" +labels: enhancement +assignees: "" +--- + +**Description** +Which feature would you like to see added to BBOT? What are its use cases? diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..1ad88ccb68 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,17 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + target-branch: "dev" + open-pull-requests-limit: 10 + - package-ecosystem: github-actions + directory: / + groups: + github-actions: + patterns: + - "*" # Group all Actions updates into a single larger pull request + schedule: + interval: weekly + target-branch: "dev" diff --git a/.github/workflows/distro_tests.yml b/.github/workflows/distro_tests.yml new file mode 100644 index 0000000000..7cee967d7d --- /dev/null +++ b/.github/workflows/distro_tests.yml @@ -0,0 +1,72 @@ +name: Tests (Linux Distros) +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + test-distros: + runs-on: ubuntu-latest + container: + image: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ["ubuntu:22.04", "ubuntu:24.04", "debian", "archlinux", "fedora", "kalilinux/kali-rolling", "parrotsec/security"] + steps: + - uses: actions/checkout@v4 + - name: Install Python and Poetry + run: | + if [ -f /etc/os-release ]; then + . /etc/os-release + if [ "$ID" = "ubuntu" ] || [ "$ID" = "debian" ] || [ "$ID" = "kali" ] || [ "$ID" = "parrotsec" ]; then + export DEBIAN_FRONTEND=noninteractive + apt-get update + apt-get -y install curl git bash build-essential docker.io libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev + elif [ "$ID" = "alpine" ]; then + apk add --no-cache bash gcc g++ musl-dev libffi-dev docker curl git make openssl-dev bzip2-dev zlib-dev xz-dev sqlite-dev + elif [ "$ID" = "arch" ]; then + pacman -Syu --noconfirm curl docker git bash base-devel + elif [ "$ID" = "fedora" ]; then + dnf install -y curl docker git bash gcc make openssl-devel bzip2-devel libffi-devel zlib-devel xz-devel tk-devel gdbm-devel readline-devel sqlite-devel python3-libdnf5 + elif [ "$ID" = "gentoo" ]; then + echo "media-libs/libglvnd X" >> /etc/portage/package.use/libglvnd + emerge-webrsync + emerge --update --newuse dev-vcs/git media-libs/mesa curl docker bash + fi + fi + + # Re-run the script with bash + exec bash -c " + curl https://pyenv.run | bash + export PATH=\"$HOME/.pyenv/bin:\$PATH\" + export PATH=\"$HOME/.local/bin:\$PATH\" + eval \"\$(pyenv init --path)\" + eval \"\$(pyenv init -)\" + eval \"\$(pyenv virtualenv-init -)\" + pyenv install 3.11 + pyenv global 3.11 + pyenv rehash + python3.11 -m pip install --user pipx + python3.11 -m pipx ensurepath + pipx install poetry + " + - name: Set OS Environment Variable + run: echo "OS_NAME=${{ matrix.os }}" | sed 's|[:/]|_|g' >> $GITHUB_ENV + - name: Run tests + run: | + export PATH="$HOME/.local/bin:$PATH" + export PATH="$HOME/.pyenv/bin:$PATH" + export PATH="$HOME/.pyenv/shims:$PATH" + export BBOT_DISTRO_TESTS=true + poetry env use python3.11 + poetry install + poetry run pytest --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO . + - name: Upload Debug Logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: pytest-debug-logs-${{ env.OS_NAME }} + path: pytest_debug.log diff --git a/.github/workflows/docs_updater.yml b/.github/workflows/docs_updater.yml new file mode 100644 index 0000000000..a63d0987a6 --- /dev/null +++ b/.github/workflows/docs_updater.yml @@ -0,0 +1,34 @@ +name: Daily Docs Update + +on: + schedule: + - cron: '30 2 * * *' # Runs daily at 2:30 AM UTC, a less congested time + workflow_dispatch: # Allows manual triggering of the workflow + +jobs: + update_docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + ref: dev # Checkout the dev branch + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install dependencies + run: | + pip install poetry + poetry install + - name: Generate docs + run: | + poetry run bbot/scripts/docs.py + - name: Create or Update Pull Request + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + branch: update-docs + base: dev + title: "Automated Docs Update" + body: "This is an automated pull request to update the documentation." diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1bddf62dfd..0000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: lint - -on: [push, pull_request] - -jobs: - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: psf/black@stable - with: - options: "--check" - - name: Install Python 3 - uses: actions/setup-python@v1 - with: - python-version: 3.9 - - name: Install dependencies - run: | - pip install flake8 - - name: flake8 - run: | - flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3db1d8fc6c..40a8c27287 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,47 +1,171 @@ -name: tests -on: [push, pull_request] +name: Tests +on: + push: + branches: + - stable + - dev + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: test: runs-on: ubuntu-latest + strategy: + # if one python version fails, let the others finish + fail-fast: false + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: ${{ matrix.python-version }} + - name: Set Python Version Environment Variable + run: echo "PYTHON_VERSION=${{ matrix.python-version }}" | sed 's|[:/]|_|g' >> $GITHUB_ENV - name: Install dependencies run: | pip install poetry poetry install - - name: Run tests with pytest + - name: Lint run: | - poetry run pytest --disable-warnings --cov-report xml:cov.xml --cov=bbot . - - uses: codecov/codecov-action@v2 + poetry run ruff check + poetry run ruff format --check + - name: Run tests + run: | + poetry run pytest -vv --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . + - name: Upload Debug Logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: pytest-debug-logs-${{ env.PYTHON_VERSION }} + path: pytest_debug.log + - name: Upload Code Coverage + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} files: ./cov.xml - fail_ci_if_error: true verbose: true - pypi: - runs-on: ubuntu-latest + publish_code: needs: test - if: github.event_name == 'push' && github.ref == 'refs/heads/stable' + runs-on: ubuntu-latest + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') continue-on-error: true steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: "3.x" - name: Install dependencies run: | python -m pip install --upgrade pip - pip install build - - name: Build package + pip install poetry build + poetry self add "poetry-dynamic-versioning[plugin]" + - name: Build Pypi package + if: github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/dev' run: python -m build - - name: Publish package - uses: pypa/gh-action-pypi-publish@release/v1 + - name: Publish Pypi package + if: github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/dev' + uses: pypa/gh-action-pypi-publish@release/v1.12 with: password: ${{ secrets.PYPI_API_TOKEN }} + - name: Get BBOT version + id: version + run: echo "BBOT_VERSION=$(poetry version | cut -d' ' -f2)" >> $GITHUB_OUTPUT + - name: Publish to Docker Hub (dev) + if: github.event_name == 'push' && github.ref == 'refs/heads/dev' + uses: elgohr/Publish-Docker-Github-Action@v5 + with: + name: blacklanternsecurity/bbot + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + tags: "latest,dev,${{ steps.version.outputs.BBOT_VERSION }}" + - name: Publish to Docker Hub (stable) + if: github.event_name == 'push' && github.ref == 'refs/heads/stable' + uses: elgohr/Publish-Docker-Github-Action@v5 + with: + name: blacklanternsecurity/bbot + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + tags: "stable,${{ steps.version.outputs.BBOT_VERSION }}" + - name: Docker Hub Description + if: github.event_name == 'push' && github.ref == 'refs/heads/dev' + uses: peter-evans/dockerhub-description@v4 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: blacklanternsecurity/bbot + outputs: + BBOT_VERSION: ${{ steps.version.outputs.BBOT_VERSION }} + publish_docs: + runs-on: ubuntu-latest + if: github.event_name == 'push' && (github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/dev') + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + key: mkdocs-material-${{ env.cache_id }} + path: .cache + restore-keys: | + mkdocs-material- + - name: Install dependencies + run: | + pip install poetry + poetry install --only=docs + - name: Configure Git + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git fetch origin gh-pages:refs/remotes/origin/gh-pages + if git show-ref --verify --quiet refs/heads/gh-pages; then + git branch -f gh-pages origin/gh-pages + else + git branch --track gh-pages origin/gh-pages + fi + - name: Generate docs (stable branch) + if: github.ref == 'refs/heads/stable' + run: | + poetry run mike deploy Stable + - name: Generate docs (dev branch) + if: github.ref == 'refs/heads/dev' + run: | + poetry run mike deploy Dev + - name: Publish docs + run: | + git switch gh-pages + git push + # tag_commit: + # needs: publish_code + # runs-on: ubuntu-latest + # if: github.event_name == 'push' && github.ref == 'refs/heads/stable' + # steps: + # - uses: actions/checkout@v4 + # with: + # ref: ${{ github.head_ref }} + # fetch-depth: 0 # Fetch all history for all tags and branches + # - name: Configure git + # run: | + # git config --local user.email "info@blacklanternsecurity.com" + # git config --local user.name "GitHub Actions" + # - name: Tag commit + # run: | + # VERSION="${{ needs.publish_code.outputs.BBOT_VERSION }}" + # if [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then + # TAG_MESSAGE="Dev Release $VERSION" + # elif [[ "${{ github.ref }}" == "refs/heads/stable" ]]; then + # TAG_MESSAGE="Stable Release $VERSION" + # fi + # git tag -a $VERSION -m "$TAG_MESSAGE" + # git push origin --tags diff --git a/.github/workflows/version_updater.yml b/.github/workflows/version_updater.yml new file mode 100644 index 0000000000..81f4490514 --- /dev/null +++ b/.github/workflows/version_updater.yml @@ -0,0 +1,112 @@ +name: Version Updater +on: + schedule: + # Runs at 00:00 every day + - cron: '0 0 * * *' + workflow_dispatch: # Adds the ability to manually trigger the workflow + +jobs: + update-nuclei-version: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: dev + fetch-depth: 0 + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install requests + - name: Get latest version + id: get-latest-version + run: | + import os, requests + response = requests.get('https://api.github.com/repos/projectdiscovery/nuclei/releases/latest') + version = response.json()['tag_name'].lstrip('v') + release_notes = response.json()['body'] + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes<> $GITHUB_ENV + - name: Update version + id: update-version + if: env.latest_version != env.current_version + run: "sed -i '0,/\"version\": \".*\",/ s/\"version\": \".*\",/\"version\": \"${{ env.latest_version }}\",/g' bbot/modules/deadly/nuclei.py" + - name: Create pull request to update the version + if: steps.update-version.outcome == 'success' + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + commit-message: "Update nuclei" + title: "Update nuclei to ${{ env.latest_version }}" + body: | + This PR uses https://api.github.com/repos/projectdiscovery/nuclei/releases/latest to obtain the latest version of nuclei and update the version in bbot/modules/deadly/nuclei.py." + + # Release notes: + ${{ env.release_notes }} + branch: "update-nuclei" + committer: blsaccess + author: blsaccess + assignees: "TheTechromancer" + update-trufflehog-version: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: dev + fetch-depth: 0 + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install requests + - name: Get latest version + id: get-latest-version + run: | + import os, requests + response = requests.get('https://api.github.com/repos/trufflesecurity/trufflehog/releases/latest') + version = response.json()['tag_name'].lstrip('v') + release_notes = response.json()['body'] + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes<> $GITHUB_ENV + - name: Update version + id: update-version + if: env.latest_version != env.current_version + run: "sed -i '0,/\"version\": \".*\",/ s/\"version\": \".*\",/\"version\": \"${{ env.latest_version }}\",/g' bbot/modules/trufflehog.py" + - name: Create pull request to update the version + if: steps.update-version.outcome == 'success' + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + commit-message: "Update trufflehog" + title: "Update trufflehog to ${{ env.latest_version }}" + body: | + This PR uses https://api.github.com/repos/trufflesecurity/trufflehog/releases/latest to obtain the latest version of trufflehog and update the version in bbot/modules/trufflehog.py. + + # Release notes: + ${{ env.release_notes }} + branch: "update-trufflehog" + committer: blsaccess + author: blsaccess + assignees: "TheTechromancer" diff --git a/.gitignore b/.gitignore index c18dd8d83c..0c6b86a341 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ __pycache__/ +.coverage* +/data/ +/neo4j/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..c85f090f5f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "bbot/modules/playground"] + path = bbot/modules/playground + url = https://github.com/blacklanternsecurity/bbot-module-playground + branch = main diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..d6643f2ad3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,48 @@ +# Learn more about this config here: https://pre-commit.com/ + +# To enable these pre-commit hooks run: +# `pipx install pre-commit` or `brew install pre-commit` +# Then in the project root directory run `pre-commit install` + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-builtin-literals + - id: check-byte-order-marker + - id: check-case-conflict + # - id: check-docstring-first + # - id: check-executables-have-shebangs + - id: check-json + - id: check-merge-conflict + # - id: check-shebang-scripts-are-executable + - id: check-symlinks + - id: check-toml + - id: check-vcs-permalinks + - id: check-xml + # - id: check-yaml + - id: debug-statements + - id: destroyed-symlinks + # - id: detect-private-key + - id: end-of-file-fixer + - id: file-contents-sorter + - id: fix-byte-order-marker + - id: forbid-new-submodules + - id: forbid-submodules + - id: mixed-line-ending + - id: requirements-txt-fixer + - id: sort-simple-yaml + - id: trailing-whitespace + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.0 + hooks: + - id: ruff + - id: ruff-format + + - repo: https://github.com/abravalheri/validate-pyproject + rev: v0.23 + hooks: + - id: validate-pyproject diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..e893c7fb1a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3.10-slim + +ENV LANG=C.UTF-8 +ENV LC_ALL=C.UTF-8 +ENV PIP_NO_CACHE_DIR=off + +WORKDIR /usr/src/bbot + +RUN apt-get update && apt-get install -y openssl gcc git make unzip curl wget vim nano sudo + +COPY . . + +RUN pip install . + +WORKDIR /root + +ENTRYPOINT [ "bbot" ] diff --git a/README.md b/README.md index 96ca9ba37c..a9782bb62b 100644 --- a/README.md +++ b/README.md @@ -1,228 +1,437 @@ -![bbot_banner](https://user-images.githubusercontent.com/20261699/158000235-6c1ace81-a267-4f8e-90a1-f4c16884ebac.png) +[![bbot_banner](https://github.com/user-attachments/assets/f02804ce-9478-4f1e-ac4d-9cf5620a3214)](https://github.com/blacklanternsecurity/bbot) -# BEE·bot -### OSINT automation for hackers. +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Recon Village 2024](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://www.reconvillage.org/talks) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![Tests](https://github.com/blacklanternsecurity/bbot/workflows/tests/badge.svg)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) +### **BEE·bot** is a multipurpose scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), built to automate your **Recon**, **Bug Bounties**, and **ASM**! -![subdomain demo](https://user-images.githubusercontent.com/20261699/182274919-d4f5aa69-993a-40aa-95d5-f5e69e96026c.gif) +https://github.com/blacklanternsecurity/bbot/assets/20261699/e539e89b-92ea-46fa-b893-9cde94eebf81 -### **BBOT** is a **recursive**, **modular** OSINT framework written in Python. - -It is capable of executing the entire OSINT process in a single command, including subdomain enumeration, port scanning, web screenshots (with its `gowitness` module), vulnerability scanning (with `nuclei`), and much more. - -BBOT currently has over **50 modules** and counting. +_A BBOT scan in real-time - visualization with [VivaGraphJS](https://github.com/blacklanternsecurity/bbot-vivagraphjs)_ ## Installation -~~~bash + +```bash +# stable version pipx install bbot -~~~ -Prerequisites: -- Python 3.9 or newer MUST be installed -- `pipx` is recommended as an alternative to `pip` because it installs BBOT in its own Python environment. -If you need help with installation, please refer to the [wiki](https://github.com/blacklanternsecurity/bbot/wiki#installation). +# bleeding edge (dev branch) +pipx install --pip-args '\--pre' bbot +``` + +_For more installation methods, including [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot), see [Getting Started](https://www.blacklanternsecurity.com/bbot/Stable/)_ + +## Example Commands + +### 1) Subdomain Finder + +Passive API sources plus a recursive DNS brute-force with target-specific subdomain mutations. + +```bash +# find subdomains of evilcorp.com +bbot -t evilcorp.com -p subdomain-enum + +# passive sources only +bbot -t evilcorp.com -p subdomain-enum -rf passive +``` + + + +
+subdomain-enum.yml + +```yaml +description: Enumerate subdomains via APIs, brute-force + +flags: + # enable every module with the subdomain-enum flag + - subdomain-enum + +output_modules: + # output unique subdomains to TXT file + - subdomains + +config: + dns: + threads: 25 + brute_threads: 1000 + # put your API keys here + # modules: + # github: + # api_key: "" + # chaos: + # api_key: "" + # securitytrails: + # api_key: "" + +``` + +
+ + + +BBOT consistently finds 20-50% more subdomains than other tools. The bigger the domain, the bigger the difference. To learn how this is possible, see [How It Works](https://www.blacklanternsecurity.com/bbot/Dev/how_it_works/). + +![subdomain-stats-ebay](https://github.com/blacklanternsecurity/bbot/assets/20261699/de3e7f21-6f52-4ac4-8eab-367296cd385f) + +### 2) Web Spider + +```bash +# crawl evilcorp.com, extracting emails and other goodies +bbot -t evilcorp.com -p spider +``` + + + +
+spider.yml + +```yaml +description: Recursive web spider + +modules: + - httpx + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + +config: + web: + # how many links to follow in a row + spider_distance: 2 + # don't follow links whose directory depth is higher than 4 + spider_depth: 4 + # maximum number of links to follow per page + spider_links_per_page: 25 + +``` + +
+ + + +### 3) Email Gatherer + +```bash +# quick email enum with free APIs + scraping +bbot -t evilcorp.com -p email-enum + +# pair with subdomain enum + web spider for maximum yield +bbot -t evilcorp.com -p email-enum subdomain-enum spider +``` + + + +
+email-enum.yml + +```yaml +description: Enumerate email addresses from APIs, web crawling, etc. + +flags: + - email-enum -## Scanning with BBOT +output_modules: + - emails -#### Note: the `httpx` module is needed in most scans because it is used by BBOT to visit webpages. For details, see the [wiki](https://github.com/blacklanternsecurity/bbot/wiki#note-on-the-httpx-module). +``` -### Examples -~~~bash -# list modules -bbot -l +
-# subdomain enumeration -bbot --flags subdomain-enum --modules httpx --targets evilcorp.com + -# passive only -bbot --flags passive --targets evilcorp.com +### 4) Web Scanner -# web screenshots with gowitness -bbot -m naabu httpx gowitness --name my_scan --output-dir . -t evilcorp.com 1.2.3.4/28 4.3.2.1 targets.txt +```bash +# run a light web scan against www.evilcorp.com +bbot -t www.evilcorp.com -p web-basic + +# run a heavy web scan against www.evilcorp.com +bbot -t www.evilcorp.com -p web-thorough +``` + + + +
+web-basic.yml + +```yaml +description: Quick web scan + +include: + - iis-shortnames + +flags: + - web-basic + +``` + +
+ + + + + +
+web-thorough.yml + +```yaml +description: Aggressive web scan + +include: + # include the web-basic preset + - web-basic + +flags: + - web-thorough + +``` + +
+ + + +### 5) Everything Everywhere All at Once + +```bash +# everything everywhere all at once +bbot -t evilcorp.com -p kitchen-sink --allow-deadly + +# roughly equivalent to: +bbot -t evilcorp.com -p subdomain-enum cloud-enum code-enum email-enum spider web-basic paramminer dirbust-light web-screenshots --allow-deadly +``` + + + +
+kitchen-sink.yml + +```yaml +description: Everything everywhere all at once + +include: + - subdomain-enum + - cloud-enum + - code-enum + - email-enum + - spider + - web-basic + - paramminer + - dirbust-light + - web-screenshots + - baddns-intense + +config: + modules: + baddns: + enable_references: True + +``` + +
+ + + +## How it Works + +Click the graph below to explore the [inner workings](https://www.blacklanternsecurity.com/bbot/Stable/how_it_works/) of BBOT. + +[![image](https://github.com/blacklanternsecurity/bbot/assets/20261699/e55ba6bd-6d97-48a6-96f0-e122acc23513)](https://www.blacklanternsecurity.com/bbot/Stable/how_it_works/) + +## Output Modules + +- [Neo4j](docs/scanning/output.md#neo4j) +- [Teams](docs/scanning/output.md#teams) +- [Discord](docs/scanning/output.md#discord) +- [Slack](docs/scanning/output.md#slack) +- [Postgres](docs/scanning/output.md#postgres) +- [MySQL](docs/scanning/output.md#mysql) +- [SQLite](docs/scanning/output.md#sqlite) +- [Splunk](docs/scanning/output.md#splunk) +- [Elasticsearch](docs/scanning/output.md#elasticsearch) +- [CSV](docs/scanning/output.md#csv) +- [JSON](docs/scanning/output.md#json) +- [HTTP](docs/scanning/output.md#http) +- [Websocket](docs/scanning/output.md#websocket) + +...and [more](docs/scanning/output.md)! + +## BBOT as a Python Library + +#### Synchronous +```python +from bbot.scanner import Scanner -# web spider (search for emails, etc.) -bbot -m httpx -c web_spider_distance=2 -t www.evilcorp.com -~~~ +if __name__ == "__main__": + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + for event in scan.start(): + print(event) +``` -## Using BBOT as a Python library -~~~python +#### Asynchronous +```python from bbot.scanner import Scanner -# this will prompt for a sudo password on first run -# if you prefer, you can export BBOT_SUDO_PASS instead -scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["naabu"], output_modules=["http"]) - -len(scan.target) # --> 257 -"1.2.3.4" in scan.target # --> True -"4.3.2.1" in scan.target # --> False -"www.evilcorp.com" in scan.target # --> True - -scan.start() -~~~ - -# Output -BBOT outputs to STDOUT by default, but it can output in multiple formats simultaneously (with `--output-module`). -~~~bash -# tee to a file -bbot -f subdomain-enum -t evilcorp.com | tee evilcorp.txt - -# output to JSON -bbot --output-module json -f subdomain-enum -t evilcorp.com | jq - -# output to CSV, TXT, and JSON, in current directory -bbot -o . --output-module human csv json -f subdomain-enum -t evilcorp.com -~~~ -For every scan, BBOT generates a unique and mildly-entertaining name like `fuzzy_gandalf`. Output for that scan, including the word cloud and any gowitness screenshots, etc., are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. You can change the location of BBOT's output with `--output`, and you can also pick a custom scan name with `--name`. - -If you reuse a scan name, it will append to its original output files and leverage the previous word cloud. - -# Neo4j -Neo4j is the funnest (and prettiest) way to view and interact with BBOT data. - -![neo4j](https://user-images.githubusercontent.com/20261699/182398274-729f3c48-c23c-4db0-8c2e-8b403c1bf790.png) - -- You can get Neo4j up and running with a single docker command: -~~~bash -docker run -p 7687:7687 -p 7474:7474 --env NEO4J_AUTH=neo4j/bbotislife neo4j -~~~ -- After that, run bbot with `--output-modules neo4j` -~~~bash -bbot -f subdomain-enum -t evilcorp.com --output-modules human neo4j -~~~ -- Browse data at http://localhost:7474 - -# Usage -~~~ -$ bbot --help -usage: bbot [-h] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] [-s] [-n SCAN_NAME] [-m MODULE [MODULE ...]] [-l] [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] - [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] [-om MODULE [MODULE ...]] [-o DIR] [-c [CONFIG ...]] [--allow-deadly] [-v] [-d] [--force] [-y] [--dry-run] [--current-config] [--save-wordcloud FILE] - [--load-wordcloud FILE] [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps] [-a] - -Bighuge BLS OSINT Tool - -options: - -h, --help show this help message and exit - -n SCAN_NAME, --name SCAN_NAME - Name of scan (default: random) - -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,asn,aspnet_viewstate,azure_tenant,binaryedge,blind_ssrf,bypass403,c99,censys,certspotter,cookie_brute,crobat,crt,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,generic_ssrf,getparam_brute,github,gowitness,hackertarget,header_brute,host_header,httpx,hunt,hunterio,iis_shortnames,ipneighbor,leakix,massdns,naabu,ntlm,nuclei,passivetotal,pgp,securitytrails,shodan_dns,skymem,smuggler,sslcert,sublist3r,telerik,threatminer,urlscan,viewdns,wappalyzer,wayback,zoomeye - -l, --list-modules List available modules. - -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] - Exclude these modules. - -f FLAG [FLAG ...], --flags FLAG [FLAG ...] - Enable modules by flag. Choices: active,aggressive,brute-force,deadly,passive,portscan,report,safe,slow,subdomain-enum,web - -rf FLAG [FLAG ...], --require-flags FLAG [FLAG ...] - Disable modules that don't have these flags (e.g. --require-flags passive) - -ef FLAG [FLAG ...], --exclude-flags FLAG [FLAG ...] - Disable modules with these flags. (e.g. --exclude-flags brute-force) - -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: csv,http,human,json,neo4j,websocket - -o DIR, --output-dir DIR - -c [CONFIG ...], --config [CONFIG ...] - custom config file, or configuration options in key=value format: 'modules.shodan.api_key=1234' - --allow-deadly Enable running modules tagged as "deadly" - -v, --verbose Be more verbose - -d, --debug Enable debugging - --force Run scan even if module setups fail - -y, --yes Skip scan confirmation prompt - --dry-run Abort before executing scan - --current-config Show current config in YAML format - -Target: - -t TARGET [TARGET ...], --targets TARGET [TARGET ...] - Targets to seed the scan - -w WHITELIST [WHITELIST ...], --whitelist WHITELIST [WHITELIST ...] - What's considered in-scope (by default it's the same as --targets) - -b BLACKLIST [BLACKLIST ...], --blacklist BLACKLIST [BLACKLIST ...] - Don't touch these things - -s, --strict-scope Don't consider subdomains of target/whitelist to be in-scope - -Word cloud: - Save/load wordlist of common words gathered during a scan - - --save-wordcloud FILE - Output wordcloud to custom file when the scan completes - --load-wordcloud FILE - Load wordcloud from a custom file - -Module dependencies: - Control how modules install their dependencies - - --no-deps Don't install module dependencies - --force-deps Force install all module dependencies - --retry-deps Try again to install failed module dependencies - --ignore-failed-deps Run modules even if they have failed dependencies - -Agent: - Report back to a central server - - -a, --agent-mode Start in agent mode -~~~ - -# BBOT Config -BBOT loads its config from these places in the following order: - -- `~/.config/bbot/defaults.yml` -- `~/.config/bbot/bbot.yml` <-- Use this one as your main config -- `~/.config/bbot/secrets.yml` <-- Use this one for sensitive stuff like API keys -- command line (via `--config`) - -Command-line arguments take precedence over all others. You can give BBOT a custom config file with `--config myconf.yml`, or individual arguments like this: `--config http_proxy=http://127.0.0.1:8080 modules.shodan_dns.api_key=1234`. To display the full and current BBOT config, including any command-line arguments, use `bbot --current-config`. - -For explanations of config options, see `defaults.yml` or the [wiki](https://github.com/blacklanternsecurity/bbot/wiki#yaml-config) - -# Modules -| Module | Needs API Key | Description | Flags | Produced Events | -|------------------|-----------------|-------------------------------------------------------------------|----------------------------------------------------|------------------------------------------------------| -| aspnet_viewstate | | Parse web pages for viewstates and check them against blacklist3r | active,safe,web | VULNERABILITY | -| bypass403 | | Check 403 pages for common bypasses | active,aggressive,web | FINDING | -| cookie_brute | | Check for common HTTP cookie parameters | active,aggressive,brute-force,slow,web | FINDING | -| dnszonetransfer | | Attempt DNS zone transfers | active,safe,subdomain-enum | DNS_NAME | -| ffuf | | A fast web fuzzer written in Go | active,aggressive,brute-force,deadly,web | URL | -| ffuf_shortnames | | Use ffuf in combination IIS shortnames | active,aggressive,brute-force,web | URL | -| generic_ssrf | | Check for generic SSRFs | active,aggressive,web | VULNERABILITY | -| getparam_brute | | Check for common HTTP GET parameters | active,aggressive,brute-force,slow,web | FINDING | -| gowitness | | Take screenshots of webpages | active,safe,web | SCREENSHOT | -| header_brute | | Check for common HTTP header parameters | active,aggressive,brute-force,slow,web | FINDING | -| host_header | | Try common HTTP Host header spoofing techniques | active,aggressive,web | FINDING | -| httpx | | Visit webpages. Many other modules rely on httpx | active,safe,web | HTTP_RESPONSE,URL | -| hunt | | Watch for commonly-exploitable HTTP parameters | active,safe,web | FINDING | -| iis_shortnames | | Check for IIS shortname vulnerability | active,safe | URL_HINT | -| naabu | | Execute port scans with naabu | active,aggressive,portscan | OPEN_TCP_PORT | -| ntlm | | Watch for HTTP endpoints that support NTLM authentication | active,safe,web | DNS_NAME,FINDING | -| nuclei | | Fast and customisable vulnerability scanner | active,aggressive,deadly,web | VULNERABILITY | -| smuggler | | Check for HTTP smuggling | active,aggressive,brute-force,slow,web | FINDING | -| sslcert | | Visit open ports and retrieve SSL certificates | active,email-enum,safe,subdomain-enum | DNS_NAME,EMAIL_ADDRESS | -| telerik | | Scan for critical Telerik vulnerabilities | active,aggressive,web | FINDING,VULNERABILITY | -| vhost | | Fuzz for virtual hosts | active,aggressive,brute-force,deadly,slow,web | DNS_NAME,VHOST | -| wappalyzer | | Extract technologies from web responses | active,safe,web | TECHNOLOGY | -| affiliates | | Summarize affiliate domains at the end of a scan | passive,report,safe | | -| asn | | Query bgpview.io for ASNs | passive,report,safe,subdomain-enum | ASN | -| azure_tenant | | Query Azure for tenant sister domains | passive,safe,subdomain-enum | DNS_NAME | -| binaryedge | X | Query the BinaryEdge API | passive,safe,subdomain-enum | DNS_NAME,EMAIL_ADDRESS,IP_ADDRESS,OPEN_PORT,PROTOCOL | -| c99 | X | Query the C99 API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| censys | X | Query the Censys API | email-enum,passive,safe,subdomain-enum | DNS_NAME,EMAIL_ADDRESS,IP_ADDRESS,OPEN_PORT,PROTOCOL | -| certspotter | | Query Certspotter's API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| crobat | | Query Project Crobat for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| crt | | Query crt.sh (certificate transparency) for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| dnscommonsrv | | Check for common SRV records | passive,safe,subdomain-enum | DNS_NAME | -| dnsdumpster | | Query dnsdumpster for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| emailformat | | Query email-format.com for email addresses | email-enum,passive,safe | EMAIL_ADDRESS | -| github | X | Query Github's API for related repositories | passive,safe,subdomain-enum | URL_UNVERIFIED | -| hackertarget | | Query the hackertarget.com API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| hunterio | X | Query hunter.io for emails | email-enum,passive,safe,subdomain-enum | DNS_NAME,EMAIL_ADDRESS,URL_UNVERIFIED | -| ipneighbor | | Look beside IPs in their surrounding subnet | aggressive,passive,subdomain-enum | IP_ADDRESS | -| leakix | | Query leakix.net for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| massdns | | Brute-force subdomains with massdns (highly effective) | aggressive,brute-force,passive,slow,subdomain-enum | DNS_NAME | -| passivetotal | X | Query the PassiveTotal API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| pgp | | Query common PGP servers for email addresses | email-enum,passive,safe | EMAIL_ADDRESS | -| securitytrails | X | Query the SecurityTrails API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| shodan_dns | X | Query Shodan for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| skymem | | Query skymem.info for email addresses | email-enum,passive,safe | EMAIL_ADDRESS | -| sublist3r | | Query sublist3r's API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| threatminer | | Query threatminer's API for subdomains | passive,safe,subdomain-enum | DNS_NAME | -| urlscan | | Query urlscan.io for subdomains | passive,safe,subdomain-enum | DNS_NAME,URL_UNVERIFIED | -| viewdns | | Query viewdns.info's reverse whois for related domains | passive,safe,subdomain-enum | DNS_NAME | -| wayback | | Query archive.org's API for subdomains | passive,safe,subdomain-enum | DNS_NAME,URL_UNVERIFIED | -| zoomeye | X | Query ZoomEye's API for subdomains | passive,safe,subdomain-enum | DNS_NAME | +async def main(): + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + async for event in scan.async_start(): + print(event.json()) + +if __name__ == "__main__": + import asyncio + asyncio.run(main()) +``` + +
+SEE: This Nefarious Discord Bot + +A [BBOT Discord Bot](https://www.blacklanternsecurity.com/bbot/Stable/dev/#discord-bot-example) that responds to the `/scan` command. Scan the internet from the comfort of your discord server! + +![bbot-discord](https://github.com/blacklanternsecurity/bbot/assets/20261699/22b268a2-0dfd-4c2a-b7c5-548c0f2cc6f9) + +
+ +## Feature Overview + +- Support for Multiple Targets +- Web Screenshots +- Suite of Offensive Web Modules +- NLP-powered Subdomain Mutations +- Native Output to Neo4j (and more) +- Automatic dependency install with Ansible +- Search entire attack surface with custom YARA rules +- Python API + Developer Documentation + +## Targets + +BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!): + +```bash +bbot -t evilcorp.com evilcorp.org 1.2.3.0/24 -p subdomain-enum +``` + +Targets can be any of the following: + +- DNS Name (`evilcorp.com`) +- IP Address (`1.2.3.4`) +- IP Range (`1.2.3.0/24`) +- Open TCP Port (`192.168.0.1:80`) +- URL (`https://www.evilcorp.com`) +- Email Address (`bob@evilcorp.com`) +- Organization (`ORG:evilcorp`) +- Username (`USER:bobsmith`) +- Filesystem (`FILESYSTEM:/tmp/asdf`) +- Mobile App (`MOBILE_APP:https://play.google.com/store/apps/details?id=com.evilcorp.app`) + +For more information, see [Targets](https://www.blacklanternsecurity.com/bbot/Stable/scanning/#targets-t). To learn how BBOT handles scope, see [Scope](https://www.blacklanternsecurity.com/bbot/Stable/scanning/#scope). + +## API Keys + +Similar to Amass or Subfinder, BBOT supports API keys for various third-party services such as SecurityTrails, etc. + +The standard way to do this is to enter your API keys in **`~/.config/bbot/bbot.yml`**. Note that multiple API keys are allowed: +```yaml +modules: + shodan_dns: + api_key: 4f41243847da693a4f356c0486114bc6 + c99: + # multiple API keys + api_key: + - 21a270d5f59c9b05813a72bb41707266 + - ea8f243d9885cf8ce9876a580224fd3c + - 5bc6ed268ab6488270e496d3183a1a27 + virustotal: + api_key: dd5f0eee2e4a99b71a939bded450b246 + securitytrails: + api_key: d9a05c3fd9a514497713c54b4455d0b0 +``` + +If you like, you can also specify them on the command line: +```bash +bbot -c modules.virustotal.api_key=dd5f0eee2e4a99b71a939bded450b246 +``` + +For details, see [Configuration](https://www.blacklanternsecurity.com/bbot/Stable/scanning/configuration/). + +## Complete Lists of Modules, Flags, etc. + +- Complete list of [Modules](https://www.blacklanternsecurity.com/bbot/Stable/modules/list_of_modules/). +- Complete list of [Flags](https://www.blacklanternsecurity.com/bbot/Stable/scanning/#list-of-flags). +- Complete list of [Presets](https://www.blacklanternsecurity.com/bbot/Stable/scanning/presets_list/). + - Complete list of [Global Config Options](https://www.blacklanternsecurity.com/bbot/Stable/scanning/configuration/#global-config-options). + - Complete list of [Module Config Options](https://www.blacklanternsecurity.com/bbot/Stable/scanning/configuration/#module-config-options). + +## Documentation + + +- **User Manual** + - **Basics** + - [Getting Started](https://www.blacklanternsecurity.com/bbot/Stable/) + - [How it Works](https://www.blacklanternsecurity.com/bbot/Stable/how_it_works) + - [Comparison to Other Tools](https://www.blacklanternsecurity.com/bbot/Stable/comparison) + - **Scanning** + - [Scanning Overview](https://www.blacklanternsecurity.com/bbot/Stable/scanning/) + - **Presets** + - [Overview](https://www.blacklanternsecurity.com/bbot/Stable/scanning/presets) + - [List of Presets](https://www.blacklanternsecurity.com/bbot/Stable/scanning/presets_list) + - [Events](https://www.blacklanternsecurity.com/bbot/Stable/scanning/events) + - [Output](https://www.blacklanternsecurity.com/bbot/Stable/scanning/output) + - [Tips and Tricks](https://www.blacklanternsecurity.com/bbot/Stable/scanning/tips_and_tricks) + - [Advanced Usage](https://www.blacklanternsecurity.com/bbot/Stable/scanning/advanced) + - [Configuration](https://www.blacklanternsecurity.com/bbot/Stable/scanning/configuration) + - **Modules** + - [List of Modules](https://www.blacklanternsecurity.com/bbot/Stable/modules/list_of_modules) + - [Nuclei](https://www.blacklanternsecurity.com/bbot/Stable/modules/nuclei) + - [Custom YARA Rules](https://www.blacklanternsecurity.com/bbot/Stable/modules/custom_yara_rules) + - **Misc** + - [Contribution](https://www.blacklanternsecurity.com/bbot/Stable/contribution) + - [Release History](https://www.blacklanternsecurity.com/bbot/Stable/release_history) + - [Troubleshooting](https://www.blacklanternsecurity.com/bbot/Stable/troubleshooting) +- **Developer Manual** + - [Development Overview](https://www.blacklanternsecurity.com/bbot/Stable/dev/) + - [Setting Up a Dev Environment](https://www.blacklanternsecurity.com/bbot/Stable/dev/dev_environment) + - [BBOT Internal Architecture](https://www.blacklanternsecurity.com/bbot/Stable/dev/architecture) + - [How to Write a BBOT Module](https://www.blacklanternsecurity.com/bbot/Stable/dev/module_howto) + - [Unit Tests](https://www.blacklanternsecurity.com/bbot/Stable/dev/tests) + - [Discord Bot Example](https://www.blacklanternsecurity.com/bbot/Stable/dev/discord_bot) + - **Code Reference** + - [Scanner](https://www.blacklanternsecurity.com/bbot/Stable/dev/scanner) + - [Presets](https://www.blacklanternsecurity.com/bbot/Stable/dev/presets) + - [Event](https://www.blacklanternsecurity.com/bbot/Stable/dev/event) + - [Target](https://www.blacklanternsecurity.com/bbot/Stable/dev/target) + - [BaseModule](https://www.blacklanternsecurity.com/bbot/Stable/dev/basemodule) + - [BBOTCore](https://www.blacklanternsecurity.com/bbot/Stable/dev/core) + - [Engine](https://www.blacklanternsecurity.com/bbot/Stable/dev/engine) + - **Helpers** + - [Overview](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/) + - [Command](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/command) + - [DNS](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/dns) + - [Interactsh](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/interactsh) + - [Miscellaneous](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/misc) + - [Web](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/web) + - [Word Cloud](https://www.blacklanternsecurity.com/bbot/Stable/dev/helpers/wordcloud) + + +## Contribution + +Some of the best BBOT modules were written by the community. BBOT is being constantly improved; every day it grows more powerful! + +We welcome contributions. Not just code, but ideas too! If you have an idea for a new feature, please let us know in [Discussions](https://github.com/blacklanternsecurity/bbot/discussions). If you want to get your hands dirty, see [Contribution](https://www.blacklanternsecurity.com/bbot/Stable/contribution/). There you can find setup instructions and a simple tutorial on how to write a BBOT module. We also have extensive [Developer Documentation](https://www.blacklanternsecurity.com/bbot/Stable/dev/). + +Thanks to these amazing people for contributing to BBOT! :heart: + +

+ + + +

+ +Special thanks to: + +- @TheTechromancer for creating BBOT +- @liquidsec for his extensive work on BBOT's web hacking features, including [badsecrets](https://github.com/blacklanternsecurity/badsecrets) and [baddns](https://github.com/blacklanternsecurity/baddns) +- Steve Micallef (@smicallef) for creating Spiderfoot +- @kerrymilan for his Neo4j and Ansible expertise +- @domwhewell-sage for his family of badass code-looting modules +- @aconite33 and @amiremami for their ruthless testing +- Aleksei Kornev (@alekseiko) for granting us ownership of the bbot Pypi repository <3 diff --git a/bbot-docker.sh b/bbot-docker.sh new file mode 100755 index 0000000000..3db958f94a --- /dev/null +++ b/bbot-docker.sh @@ -0,0 +1,3 @@ +# OUTPUTS SCAN DATA TO ~/.bbot/scans + +docker run --rm -it -v "$HOME/.bbot/scans:/root/.bbot/scans" -v "$HOME/.config/bbot:/root/.config/bbot" blacklanternsecurity/bbot:stable "$@" diff --git a/bbot/__init__.py b/bbot/__init__.py index e15b2ce3ac..8746d8131d 100644 --- a/bbot/__init__.py +++ b/bbot/__init__.py @@ -1,7 +1,4 @@ -# global app config -from .core import configurator +# version placeholder (replaced by poetry-dynamic-versioning) +__version__ = "v0.0.0" -config = configurator.config - -# helpers -from .core import helpers +from .scanner import Scanner, Preset diff --git a/bbot/agent/__init__.py b/bbot/agent/__init__.py deleted file mode 100644 index d2361b7a3c..0000000000 --- a/bbot/agent/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .agent import Agent diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py deleted file mode 100644 index aa344f7adf..0000000000 --- a/bbot/agent/agent.py +++ /dev/null @@ -1,176 +0,0 @@ -import json -import logging -import threading -import websocket -from time import sleep -from omegaconf import OmegaConf - -from . import messages -from bbot.scanner import Scanner -from bbot.scanner.dispatcher import Dispatcher - -log = logging.getLogger("bbot.core.agent") - - -class Agent: - def __init__(self, config): - self.config = config - self.url = self.config.get("agent_url", "") - self.token = self.config.get("agent_token", "") - self.scan = None - self.thread = None - self._scan_lock = threading.Lock() - - self.dispatcher = Dispatcher() - self.dispatcher.on_status = self.on_scan_status - self.dispatcher.on_finish = self.on_scan_finish - - def setup(self): - websocket.enableTrace(False) - if not self.url: - log.error(f"Must specify agent_url") - return False - if not self.token: - log.error(f"Must specify agent_token") - return False - self.ws = websocket.WebSocketApp( - f"{self.url}/control/", - on_open=self.on_open, - on_message=self.on_message, - on_error=self.on_error, - on_close=self.on_close, - header={"Authorization": f"Bearer {self.token}"}, - ) - return True - - def start(self): - not_keyboardinterrupt = False - while 1: - not_keyboardinterrupt = self.ws.run_forever() - if not not_keyboardinterrupt: - break - sleep(1) - - def send(self, message): - while 1: - try: - self.ws.send(json.dumps(message)) - break - except Exception as e: - log.warning(f"Error sending message: {e}, retrying") - sleep(1) - continue - - def on_message(self, ws, message): - try: - message = json.loads(message) - except Exception as e: - log.warning(f'Failed to JSON-decode message "{message}": {e}') - return - message = messages.Message(**message) - - if message.command == "ping": - if self.scan is None: - self.send({"conversation": str(message.conversation), "message_type": "pong"}) - return - - command_type = None - try: - command_type = getattr(messages, message.command) - except AttributeError: - log.warning(f'Invalid command: "{message.command}"') - - command_args = command_type(**message.arguments) - command_fn = getattr(self, message.command) - response = self.err_handle(command_fn, **command_args.dict()) - log.info(str(response)) - self.send({"conversation": str(message.conversation), "message": response}) - - def on_error(self, ws, error): - log.warning(f"on_error: {error}") - - def on_close(self, ws, close_status_code, close_msg): - log.warning("Closed connection") - - def on_open(self, ws): - log.success("Opened connection") - - def start_scan(self, scan_id="", targets=[], modules=[], output_modules=[], config={}): - with self._scan_lock: - if self.scan is None: - log.success( - f"Starting scan with targets={targets}, modules={modules}, output_modules={output_modules}" - ) - output_module_config = OmegaConf.create( - {"output_modules": {"websocket": {"url": f"{self.url}/scan/{scan_id}/", "token": self.token}}} - ) - config = OmegaConf.create(config) - config = OmegaConf.merge(self.config, config, output_module_config) - output_modules = list(set(output_modules + ["websocket"])) - self.scan = Scanner( - *targets, - scan_id=scan_id, - modules=modules, - output_modules=output_modules, - config=config, - dispatcher=self.dispatcher, - ) - self.thread = threading.Thread(target=self.scan.start, daemon=True) - self.thread.start() - - return {"success": f"Started scan", "scan_id": self.scan.id} - else: - msg = f"Scan {self.scan.id} already in progress" - log.warning(msg) - return {"error": msg, "scan_id": self.scan.id} - - def stop_scan(self): - log.warning("Stopping scan") - try: - with self._scan_lock: - if self.scan is None: - msg = "Scan not in progress" - log.warning(msg) - return {"error": msg} - scan_id = str(self.scan.id) - self.scan.stop(wait=True) - msg = f"Stopped scan {scan_id}" - log.warning(msg) - self.scan = None - return {"success": msg, "scan_id": scan_id} - except Exception as e: - import traceback - - log.warning(f"Error while stopping scan: {e}") - log.debug(traceback.format_exc()) - finally: - self.scan = None - self.thread = None - - def scan_status(self): - with self._scan_lock: - if self.scan is None: - self.thread = None - msg = "Scan not in progress" - log.warning(msg) - return {"error": msg} - return {"success": "Polled scan", "scan_status": self.scan.status} - - def on_scan_status(self, status, scan_id): - self.send({"message_type": "scan_status_change", "status": str(status), "scan_id": scan_id}) - - def on_scan_finish(self, scan): - self.scan = None - self.thread = None - - @staticmethod - def err_handle(callback, *args, **kwargs): - try: - return callback(*args, **kwargs) - except Exception as e: - msg = f"Error in {callback.__qualname__}(): {e}" - log.error(msg) - import traceback - - log.debug(traceback.format_exc()) - return {"error": msg} diff --git a/bbot/agent/messages.py b/bbot/agent/messages.py deleted file mode 100644 index 3530fed114..0000000000 --- a/bbot/agent/messages.py +++ /dev/null @@ -1,27 +0,0 @@ -from uuid import UUID -from pydantic import BaseModel - - -class Message(BaseModel): - conversation: UUID - command: str - arguments: dict - - -### COMMANDS ### - - -class start_scan(BaseModel): - scan_id: str - targets: list - modules: list - output_modules: list = [] - config: dict = {} - - -class stop_scan(BaseModel): - pass - - -class scan_status(BaseModel): - pass diff --git a/bbot/cli.py b/bbot/cli.py index 0f7771b184..7f1e91c72e 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -1,266 +1,294 @@ #!/usr/bin/env python3 -import os import sys import logging -from omegaconf import OmegaConf -from contextlib import suppress +import multiprocessing +from bbot.errors import * +from bbot import __version__ +from bbot.logger import log_to_stderr +from bbot.core.helpers.misc import chain_lists -# fix tee buffering -sys.stdout.reconfigure(line_buffering=True) -# logging -from bbot.core.logger import init_logging, get_log_level +if multiprocessing.current_process().name == "MainProcess": + silent = "-s" in sys.argv or "--silent" in sys.argv -logging_queue, logging_handlers = init_logging() + if not silent: + ascii_art = rf"""  ______  _____ ____ _______ + | ___ \| __ \ / __ \__ __| + | |___) | |__) | | | | | | + | ___ <| __ <| | | | | | + | |___) | |__) | |__| | | | + |______/|_____/ \____/ |_| + BIGHUGE BLS OSINT TOOL {__version__} -import bbot.core.errors -from bbot.modules import module_loader -from bbot.core.configurator.args import parser +www.blacklanternsecurity.com/bbot +""" + print(ascii_art, file=sys.stderr) -log = logging.getLogger("bbot.cli") -sys.stdout.reconfigure(line_buffering=True) +scan_name = "" -log_level = get_log_level() +async def _main(): + import asyncio + import traceback + from contextlib import suppress + # fix tee buffering + sys.stdout.reconfigure(line_buffering=True) -def log_to_stderr(msg, level=logging.INFO): - if log_level <= level: - handler = logging_handlers["stderr"] - record = logging.LogRecord( - name="bbot.cli", msg=str(msg), level=level, pathname=None, lineno=0, args=None, exc_info=None - ) - print(handler.formatter.format(record), file=sys.stderr) + log = logging.getLogger("bbot.cli") + from bbot.scanner import Scanner + from bbot.scanner.preset import Preset -from . import config - - -def main(): - - err = False - scan_name = "" + global scan_name try: + # start by creating a default scan preset + preset = Preset(_log=True, name="bbot_cli_main") + # parse command line arguments and merge into preset + try: + preset.parse_args() + except BBOTArgumentError as e: + log_to_stderr(str(e), level="WARNING") + log.trace(traceback.format_exc()) + return + # ensure arguments (-c config options etc.) are valid + options = preset.args.parsed + # print help if no arguments if len(sys.argv) == 1: - parser.print_help() + print(preset.args.parser.format_help()) sys.exit(1) + return + + # --version + if options.version: + print(__version__) + sys.exit(0) + return + + # --list-presets + if options.list_presets: + print("") + print("### PRESETS ###") + print("") + for row in preset.presets_table().splitlines(): + print(row) + return + + # if we're listing modules or their options + if options.list_modules or options.list_output_modules or options.list_module_options: + # if no modules or flags are specified, enable everything + if not (options.modules or options.output_modules or options.flags): + for module, preloaded in preset.module_loader.preloaded().items(): + module_type = preloaded.get("type", "scan") + preset.add_module(module, module_type=module_type) + + if options.modules or options.output_modules or options.flags: + preset._default_output_modules = options.output_modules + preset._default_internal_modules = [] + + preset.bake() + + # --list-modules + if options.list_modules: + print("") + print("### MODULES ###") + print("") + modules = sorted(set(preset.scan_modules + preset.internal_modules)) + for row in preset.module_loader.modules_table(modules).splitlines(): + print(row) + return + + # --list-output-modules + if options.list_output_modules: + print("") + print("### OUTPUT MODULES ###") + print("") + for row in preset.module_loader.modules_table(preset.output_modules).splitlines(): + print(row) + return + + # --list-module-options + if options.list_module_options: + print("") + print("### MODULE OPTIONS ###") + print("") + for row in preset.module_loader.modules_options_table(preset.modules).splitlines(): + print(row) + return + + # --list-flags + if options.list_flags: + flags = preset.flags if preset.flags else None + print("") + print("### FLAGS ###") + print("") + for row in preset.module_loader.flags_table(flags=flags).splitlines(): + print(row) + return - # note: command line arguments are in bbot/core/configurator/args.py try: - options = parser.parse_args() - except bbot.core.errors.ArgumentError as e: - log.warning(e) - sys.exit(1) - # this is intentional since sys.exit() is monkeypatched in the tests + scan = Scanner(preset=preset) + except (PresetAbortError, ValidationError) as e: + log.warning(str(e)) return - # --current-config - if options.current_config: - log.stdout(f"{OmegaConf.to_yaml(config)}") + deadly_modules = [ + m for m in scan.preset.scan_modules if "deadly" in preset.preloaded_module(m).get("flags", []) + ] + if deadly_modules and not options.allow_deadly: + log.hugewarning(f"You enabled the following deadly modules: {','.join(deadly_modules)}") + log.hugewarning("Deadly modules are highly intrusive") + log.hugewarning("Please specify --allow-deadly to continue") + return False + + # --current-preset + if options.current_preset: + print(scan.preset.to_yaml()) sys.exit(0) + return - log.verbose(f'Command: {" ".join(sys.argv)}') - - if options.agent_mode: - from bbot.agent import Agent - - agent = Agent(config) - success = agent.setup() - if success: - agent.start() - - else: - from bbot.scanner import Scanner - - try: - if options.list_modules and not any([options.flags, options.modules]): - modules = set(module_loader.preloaded(type="scan")) - else: - modules = set(options.modules) - # enable modules by flags - for m, c in module_loader.preloaded().items(): - if m not in modules: - flags = c.get("flags", []) - if "deadly" in flags: - continue - for f in options.flags: - if f in flags: - log.verbose(f'Enabling {m} because it has flag "{f}"') - modules.add(m) - - scanner = Scanner( - *options.targets, - modules=list(modules), - output_modules=options.output_modules, - config=config, - name=options.name, - whitelist=options.whitelist, - blacklist=options.blacklist, - strict_scope=options.strict_scope, - force_start=options.force, - ) - - scan_name = str(scanner.name) - - # enable modules by dependency - # this is only a basic surface-level check - # todo: recursive dependency graph with networkx or topological sort? - all_modules = list(set(scanner._scan_modules + scanner._internal_modules + scanner._output_modules)) - while 1: - changed = False - dep_choices = module_loader.recommend_dependencies(all_modules) - if not dep_choices: - break - for event_type, deps in dep_choices.items(): - if event_type in ("*", "all"): - continue - # skip resolving dependency if a target provides the missing type - if any(e.type == event_type for e in scanner.target.events): - continue - required_by = deps.get("required_by", []) - recommended = deps.get("recommended", []) - if not recommended: - log.hugewarning( - f"{len(required_by):,} modules ({','.join(required_by)}) rely on {event_type} but no modules produce it" - ) - elif len(recommended) == 1: - log.verbose( - f"Enabling {next(iter(recommended))} because {len(required_by):,} modules ({','.join(required_by)}) rely on it for {event_type}" - ) - all_modules = list(set(all_modules + list(recommended))) - scanner._scan_modules = list(set(scanner._scan_modules + list(recommended))) - changed = True - else: - log.hugewarning( - f"{len(required_by):,} modules ({','.join(required_by)}) rely on {event_type} but no enabled module produces it" - ) - log.warning( - f"Recommend enabling one or more of the following modules which produce {event_type}:" - ) - for m in recommended: - log.warning(f" - {m}") - if not changed: - break - - # required flags - modules = set(scanner._scan_modules) - for m in scanner._scan_modules: - flags = module_loader._preloaded.get(m, {}).get("flags", []) - if not all(f in flags for f in options.require_flags): - log.verbose( - f"Removing {m} because it does not have the required flags: {'+'.join(options.require_flags)}" - ) - modules.remove(m) - - # excluded flags - for m in scanner._scan_modules: - flags = module_loader._preloaded.get(m, {}).get("flags", []) - if any(f in flags for f in options.exclude_flags): - log.verbose(f"Removing {m} because of excluded flag: {','.join(options.exclude_flags)}") - modules.remove(m) - - # excluded modules - for m in options.exclude_modules: - if m in modules: - log.verbose(f"Removing {m} because it is excluded") - modules.remove(m) - scanner._scan_modules = list(modules) - - log_fn = log.info - if options.list_modules: - log_fn = log.stdout - - module_list = list(module_loader.preloaded(type="scan").items()) - module_list.sort(key=lambda x: x[0]) - module_list.sort(key=lambda x: "passive" in x[-1]["flags"]) - header = ["Module", "Needs API Key", "Description", "Flags", "Produced Events"] - table = [] - for module_name, preloaded in module_list: - if module_name in modules: - produced_events = sorted(preloaded.get("produced_events", [])) - flags = sorted(preloaded.get("flags", [])) - api_key_required = "" - meta = preloaded.get("meta", {}) - if meta.get("auth_required", False): - api_key_required = "X" - description = meta.get("description", "") - table.append( - [module_name, api_key_required, description, ",".join(flags), ",".join(produced_events)] - ) - for row in scanner.helpers.make_table(table, header).splitlines(): - log_fn(row) - if options.list_modules: - return - - deadly_modules = [ - m[0] for m in module_list if "deadly" in m[-1]["flags"] and m[0] in scanner._scan_modules - ] - if scanner._scan_modules and deadly_modules: - if not options.allow_deadly: - log.hugewarning(f"You enabled the following deadly modules: {','.join(deadly_modules)}") - log.hugewarning(f"Deadly modules are highly intrusive") - log.hugewarning(f"Please specify --allow-deadly to continue") - return - - scanner.helpers.word_cloud.load(options.load_wordcloud) - - scanner.prep() - - if not options.dry_run: - if not options.agent_mode and not options.yes: - log.hugesuccess(f"Scan ready. Press enter to execute {scanner.name}") - input() - - scanner.start() - - except Exception: - raise - finally: - with suppress(NameError): - scanner.cleanup() - - except bbot.core.errors.BBOTError as e: - import traceback - - log_to_stderr(e, level=logging.ERROR) - log_to_stderr(traceback.format_exc(), level=logging.DEBUG) - err = True - - except Exception: - import traceback - - log_to_stderr(f"Encountered unknown error: {traceback.format_exc()}", level=logging.ERROR) - err = True + # --current-preset-full + if options.current_preset_full: + print(scan.preset.to_yaml(full_config=True)) + sys.exit(0) + return - except KeyboardInterrupt: - msg = "Interrupted" - if scan_name: - msg = f"You killed {scan_name}" - log_to_stderr(msg, level=logging.ERROR) - err = True + # --install-all-deps + if options.install_all_deps: + all_modules = list(preset.module_loader.preloaded()) + scan.helpers.depsinstaller.force_deps = True + succeeded, failed = await scan.helpers.depsinstaller.install(*all_modules) + if failed: + log.hugewarning(f"Failed to install dependencies for the following modules: {', '.join(failed)}") + return False + log.hugesuccess(f"Successfully installed dependencies for the following modules: {', '.join(succeeded)}") + return True + + scan_name = str(scan.name) + + log.verbose("") + log.verbose("### MODULES ENABLED ###") + log.verbose("") + for row in scan.preset.module_loader.modules_table(scan.preset.modules).splitlines(): + log.verbose(row) + + scan.helpers.word_cloud.load() + await scan._prep() + + if not options.dry_run: + log.trace(f"Command: {' '.join(sys.argv)}") + + if sys.stdin.isatty(): + # warn if any targets belong directly to a cloud provider + if not scan.preset.strict_scope: + for event in scan.target.seeds.events: + if event.type == "DNS_NAME": + cloudcheck_result = scan.helpers.cloudcheck(event.host) + if cloudcheck_result: + scan.hugewarning( + f'YOUR TARGET CONTAINS A CLOUD DOMAIN: "{event.host}". You\'re in for a wild ride!' + ) + + if not options.yes: + log.hugesuccess(f"Scan ready. Press enter to execute {scan.name}") + input() + + import os + import re + import fcntl + from bbot.core.helpers.misc import smart_decode + + def handle_keyboard_input(keyboard_input): + kill_regex = re.compile(r"kill (?P[a-z0-9_ ,]+)") + if keyboard_input: + log.verbose(f'Got keyboard input: "{keyboard_input}"') + kill_match = kill_regex.match(keyboard_input) + if kill_match: + modules = kill_match.group("modules") + if modules: + modules = chain_lists(modules) + for module in modules: + if module in scan.modules: + log.hugewarning(f'Killing module: "{module}"') + scan.kill_module(module, message="killed by user") + else: + log.warning(f'Invalid module: "{module}"') + else: + scan.preset.core.logger.toggle_log_level(logger=log) + scan.modules_status(_log=True) + + reader = asyncio.StreamReader() + protocol = asyncio.StreamReaderProtocol(reader) + await asyncio.get_running_loop().connect_read_pipe(lambda: protocol, sys.stdin) + + # set stdout and stderr to blocking mode + # this is needed to prevent BlockingIOErrors in logging etc. + fds = [sys.stdout.fileno(), sys.stderr.fileno()] + for fd in fds: + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) + + async def akeyboard_listen(): + try: + allowed_errors = 10 + while 1: + keyboard_input = None + try: + keyboard_input = smart_decode((await reader.readline()).strip()) + allowed_errors = 10 + except Exception as e: + log_to_stderr(f"Error in keyboard listen loop: {e}", level="TRACE") + log_to_stderr(traceback.format_exc(), level="TRACE") + allowed_errors -= 1 + if keyboard_input is not None: + handle_keyboard_input(keyboard_input) + if allowed_errors <= 0: + break + except Exception as e: + log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") + log_to_stderr(traceback.format_exc(), level="TRACE") + + keyboard_listen_task = asyncio.create_task(akeyboard_listen()) # noqa F841 + + await scan.async_start_without_generator() + + return True + + except BBOTError as e: + log.error(str(e)) + log.trace(traceback.format_exc()) finally: # save word cloud - with suppress(Exception): - save_success, filename = scanner.helpers.word_cloud.save(options.save_wordcloud) - if save_success: - log_to_stderr(f"Saved word cloud ({len(scanner.helpers.word_cloud):,} words) to {filename}") + with suppress(BaseException): + scan.helpers.word_cloud.save() # remove output directory if empty - with suppress(Exception): - scanner.home.rmdir() - if err: - os._exit(1) - - # debug troublesome modules - """ - from time import sleep - while 1: - scanner.manager.modules_status(_log=True) - sleep(1) - """ + with suppress(BaseException): + scan.home.rmdir() + + +def main(): + import asyncio + import traceback + from bbot.core import CORE + + global scan_name + try: + asyncio.run(_main()) + except asyncio.CancelledError: + if CORE.logger.log_level <= logging.DEBUG: + log_to_stderr(traceback.format_exc(), level="DEBUG") + except KeyboardInterrupt: + msg = "Interrupted" + if scan_name: + msg = f"You killed {scan_name}" + log_to_stderr(msg, level="WARNING") + if CORE.logger.log_level <= logging.DEBUG: + log_to_stderr(traceback.format_exc(), level="DEBUG") + exit(1) if __name__ == "__main__": diff --git a/bbot/core/__init__.py b/bbot/core/__init__.py index e69de29bb2..6cfaecf0f9 100644 --- a/bbot/core/__init__.py +++ b/bbot/core/__init__.py @@ -0,0 +1,3 @@ +from .core import BBOTCore + +CORE = BBOTCore() diff --git a/bbot/core/config/__init__.py b/bbot/core/config/__init__.py new file mode 100644 index 0000000000..c36d91f487 --- /dev/null +++ b/bbot/core/config/__init__.py @@ -0,0 +1,12 @@ +import sys +import multiprocessing as mp + +try: + mp.set_start_method("spawn") +except Exception: + start_method = mp.get_start_method() + if start_method != "spawn": + print( + f"[WARN] Multiprocessing spawn method is set to {start_method}. This may negatively affect performance.", + file=sys.stderr, + ) diff --git a/bbot/core/config/files.py b/bbot/core/config/files.py new file mode 100644 index 0000000000..2be7bbaa1a --- /dev/null +++ b/bbot/core/config/files.py @@ -0,0 +1,41 @@ +import sys +from pathlib import Path +from omegaconf import OmegaConf + +from ...logger import log_to_stderr +from ...errors import ConfigLoadError + + +bbot_code_dir = Path(__file__).parent.parent.parent + + +class BBOTConfigFiles: + config_dir = (Path.home() / ".config" / "bbot").resolve() + defaults_filename = (bbot_code_dir / "defaults.yml").resolve() + config_filename = (config_dir / "bbot.yml").resolve() + secrets_filename = (config_dir / "secrets.yml").resolve() + + def __init__(self, core): + self.core = core + + def _get_config(self, filename, name="config"): + filename = Path(filename).resolve() + try: + conf = OmegaConf.load(str(filename)) + cli_silent = any(x in sys.argv for x in ("-s", "--silent")) + if __name__ == "__main__" and not cli_silent: + log_to_stderr(f"Loaded {name} from {filename}") + return conf + except Exception as e: + if filename.exists(): + raise ConfigLoadError(f"Error parsing config at {filename}:\n\n{e}") + return OmegaConf.create() + + def get_custom_config(self): + return OmegaConf.merge( + self._get_config(self.config_filename, name="config"), + self._get_config(self.secrets_filename, name="secrets"), + ) + + def get_default_config(self): + return self._get_config(self.defaults_filename, name="defaults") diff --git a/bbot/core/config/logger.py b/bbot/core/config/logger.py new file mode 100644 index 0000000000..54866a63b6 --- /dev/null +++ b/bbot/core/config/logger.py @@ -0,0 +1,276 @@ +import os +import sys +import atexit +import logging +from copy import copy +import multiprocessing +import logging.handlers +from pathlib import Path +from contextlib import suppress + +from ..helpers.misc import mkdir, error_and_exit +from ...logger import colorize, loglevel_mapping +from ..multiprocess import SHARED_INTERPRETER_STATE + + +debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") + + +class ColoredFormatter(logging.Formatter): + """ + Pretty colors for terminal + """ + + formatter = logging.Formatter("%(levelname)s %(message)s") + module_formatter = logging.Formatter("%(levelname)s %(name)s: %(message)s") + + def format(self, record): + colored_record = copy(record) + levelname = colored_record.levelname + levelshort = loglevel_mapping.get(levelname, "INFO") + colored_record.levelname = colorize(f"[{levelshort}]", level=levelname) + if levelname == "CRITICAL" or levelname.startswith("HUGE"): + colored_record.msg = colorize(colored_record.msg, level=levelname) + # remove name + if colored_record.name.startswith("bbot.modules."): + colored_record.name = colored_record.name.split("bbot.modules.")[-1] + return self.module_formatter.format(colored_record) + return self.formatter.format(colored_record) + + +class BBOTLogger: + """ + The main BBOT logger. + + The job of this class is to manage the different log handlers in BBOT, + allow adding new log handlers, and easily switching log levels on the fly. + """ + + def __init__(self, core): + # custom logging levels + if getattr(logging, "HUGEWARNING", None) is None: + self.addLoggingLevel("TRACE", 49) + self.addLoggingLevel("HUGEWARNING", 31) + self.addLoggingLevel("HUGESUCCESS", 26) + self.addLoggingLevel("SUCCESS", 25) + self.addLoggingLevel("HUGEINFO", 21) + self.addLoggingLevel("HUGEVERBOSE", 16) + self.addLoggingLevel("VERBOSE", 15) + self.verbosity_levels_toggle = [logging.INFO, logging.VERBOSE, logging.DEBUG] + + self._loggers = None + self._log_handlers = None + self._log_level = None + self.root_logger = logging.getLogger() + self.core_logger = logging.getLogger("bbot") + self.core = core + + self.listener = None + + # if we haven't set up logging yet, do it now + if "_BBOT_LOGGING_SETUP" not in os.environ: + os.environ["_BBOT_LOGGING_SETUP"] = "1" + self.queue = multiprocessing.Queue() + self.setup_queue_handler() + # Start the QueueListener + self.listener = logging.handlers.QueueListener(self.queue, *self.log_handlers.values()) + self.listener.start() + atexit.register(self.cleanup_logging) + + self.log_level = logging.INFO + + def cleanup_logging(self): + # Close the queue handler + with suppress(Exception): + self.queue_handler.close() + + # Clean root logger + root_logger = logging.getLogger() + for handler in list(root_logger.handlers): + with suppress(Exception): + root_logger.removeHandler(handler) + with suppress(Exception): + handler.close() + + # Clean all other loggers + for logger in logging.Logger.manager.loggerDict.values(): + if hasattr(logger, "handlers"): # Logger, not PlaceHolder + for handler in list(logger.handlers): + with suppress(Exception): + logger.removeHandler(handler) + with suppress(Exception): + handler.close() + + # Stop queue listener + with suppress(Exception): + self.listener.stop() + + def setup_queue_handler(self, logging_queue=None, log_level=logging.DEBUG): + if logging_queue is None: + logging_queue = self.queue + else: + self.queue = logging_queue + self.queue_handler = logging.handlers.QueueHandler(logging_queue) + + self.root_logger.addHandler(self.queue_handler) + + self.core_logger.setLevel(log_level) + # disable asyncio logging for child processes + if not SHARED_INTERPRETER_STATE.is_main_process: + logging.getLogger("asyncio").setLevel(logging.ERROR) + + def addLoggingLevel(self, levelName, levelNum, methodName=None): + """ + Comprehensively adds a new logging level to the `logging` module and the + currently configured logging class. + + `levelName` becomes an attribute of the `logging` module with the value + `levelNum`. `methodName` becomes a convenience method for both `logging` + itself and the class returned by `logging.getLoggerClass()` (usually just + `logging.Logger`). If `methodName` is not specified, `levelName.lower()` is + used. + + To avoid accidental clobberings of existing attributes, this method will + raise an `AttributeError` if the level name is already an attribute of the + `logging` module or if the method name is already present + + Example + ------- + >>> addLoggingLevel('TRACE', logging.DEBUG - 5) + >>> logging.getLogger(__name__).setLevel('TRACE') + >>> logging.getLogger(__name__).trace('that worked') + >>> logging.trace('so did this') + >>> logging.TRACE + 5 + + """ + if not methodName: + methodName = levelName.lower() + + if hasattr(logging, levelName): + raise AttributeError(f"{levelName} already defined in logging module") + if hasattr(logging, methodName): + raise AttributeError(f"{methodName} already defined in logging module") + if hasattr(logging.getLoggerClass(), methodName): + raise AttributeError(f"{methodName} already defined in logger class") + + # This method was inspired by the answers to Stack Overflow post + # http://stackoverflow.com/q/2183233/2988730, especially + # http://stackoverflow.com/a/13638084/2988730 + def logForLevel(self, message, *args, **kwargs): + if self.isEnabledFor(levelNum): + self._log(levelNum, message, args, **kwargs) + + def logToRoot(message, *args, **kwargs): + logging.log(levelNum, message, *args, **kwargs) + + logging.addLevelName(levelNum, levelName) + setattr(logging, levelName, levelNum) + setattr(logging.getLoggerClass(), methodName, logForLevel) + setattr(logging, methodName, logToRoot) + + @property + def loggers(self): + if self._loggers is None: + self._loggers = [ + logging.getLogger("bbot"), + logging.getLogger("asyncio"), + ] + return self._loggers + + def add_log_handler(self, handler, formatter=None): + if self.listener is None: + return + if handler.formatter is None: + handler.setFormatter(debug_format) + if handler not in self.listener.handlers: + self.listener.handlers = self.listener.handlers + (handler,) + + def remove_log_handler(self, handler): + if self.listener is None: + return + if handler in self.listener.handlers: + new_handlers = list(self.listener.handlers) + new_handlers.remove(handler) + self.listener.handlers = tuple(new_handlers) + + def include_logger(self, logger): + if logger not in self.loggers: + self.loggers.append(logger) + if self.log_level is not None: + logger.setLevel(self.log_level) + for handler in self.log_handlers.values(): + self.add_log_handler(handler) + + def stderr_filter(self, record): + if record.levelno == logging.TRACE and self.log_level > logging.DEBUG: + return False + if record.levelno < self.log_level: + return False + return True + + @property + def log_handlers(self): + if self._log_handlers is None: + log_dir = Path(self.core.home) / "logs" + if not mkdir(log_dir, raise_error=False): + error_and_exit(f"Failure creating or error writing to BBOT logs directory ({log_dir})") + + # Main log file + main_handler = logging.handlers.TimedRotatingFileHandler( + f"{log_dir}/bbot.log", when="d", interval=1, backupCount=14 + ) + + # Separate log file for debugging + debug_handler = logging.handlers.TimedRotatingFileHandler( + f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14 + ) + + # Log to stderr + stderr_handler = logging.StreamHandler(sys.stderr) + stderr_handler.addFilter(self.stderr_filter) + # log to files + debug_handler.addFilter(lambda x: x.levelno == logging.TRACE or (x.levelno < logging.VERBOSE)) + main_handler.addFilter(lambda x: x.levelno != logging.TRACE and x.levelno >= logging.VERBOSE) + + # Set log format + debug_handler.setFormatter(debug_format) + main_handler.setFormatter(debug_format) + stderr_handler.setFormatter(ColoredFormatter("%(levelname)s %(name)s: %(message)s")) + + self._log_handlers = { + "stderr": stderr_handler, + "file_debug": debug_handler, + "file_main": main_handler, + } + return self._log_handlers + + @property + def log_level(self): + if self._log_level is None: + return logging.INFO + return self._log_level + + @log_level.setter + def log_level(self, level): + self.set_log_level(level) + + def set_log_level(self, level, logger=None): + if isinstance(level, str): + level = logging.getLevelName(level) + if logger is not None: + logger.hugeinfo(f"Setting log level to {logging.getLevelName(level)}") + self._log_level = level + for logger in self.loggers: + logger.setLevel(level) + + def toggle_log_level(self, logger=None): + if self.log_level in self.verbosity_levels_toggle: + for i, level in enumerate(self.verbosity_levels_toggle): + if self.log_level == level: + self.set_log_level( + self.verbosity_levels_toggle[(i + 1) % len(self.verbosity_levels_toggle)], logger=logger + ) + break + else: + self.set_log_level(self.verbosity_levels_toggle[0], logger=logger) diff --git a/bbot/core/configurator/__init__.py b/bbot/core/configurator/__init__.py deleted file mode 100644 index f7efeb497f..0000000000 --- a/bbot/core/configurator/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -import sys -from pathlib import Path -from omegaconf import OmegaConf - -from . import files, args, environ -from ..errors import ConfigLoadError -from ...modules import module_loader -from ..helpers.misc import mkdir, error_and_exit, filter_dict, clean_dict - -try: - config = OmegaConf.merge( - # first, pull module defaults - OmegaConf.create( - { - "modules": module_loader.configs(type="scan"), - "output_modules": module_loader.configs(type="output"), - "internal_modules": module_loader.configs(type="internal"), - } - ), - # then look in .yaml files - files.get_config(), - # finally, pull from CLI arguments - args.get_config(), - ) -except ConfigLoadError as e: - error_and_exit(e) - -# ensure bbot_home -if not "home" in config: - config["home"] = "~/.bbot" -home = Path(config["home"]).expanduser().resolve() -config["home"] = str(home) - -# ensure bbot.yml -if not files.config_filename.exists(): - print(f"[INFO] Creating BBOT config at {files.config_filename}") - no_secrets_config = OmegaConf.to_object(config) - no_secrets_config = clean_dict(no_secrets_config, "api_key", "username", "password", "token", fuzzy=True) - OmegaConf.save(config=OmegaConf.create(no_secrets_config), f=str(files.config_filename)) - -# ensure secrets.yml -if not files.secrets_filename.exists(): - print(f"[INFO] Creating BBOT secrets at {files.secrets_filename}") - secrets_only_config = OmegaConf.to_object(config) - secrets_only_config = filter_dict(secrets_only_config, "api_key", "username", "password", "token", fuzzy=True) - OmegaConf.save(config=OmegaConf.create(secrets_only_config), f=str(files.secrets_filename)) - -# if we're running in a virtual environment, make sure to include its /bin in PATH -if sys.prefix != sys.base_prefix: - bin_dir = str(Path(sys.prefix) / "bin") - if bin_dir not in os.environ["PATH"].split(":"): - os.environ["PATH"] = f'{bin_dir}:{os.environ.get("PATH", "")}' - -# ensure bbot_tools -bbot_tools = home / "tools" -os.environ["BBOT_TOOLS"] = str(bbot_tools) -os.environ["PATH"] = f'{bbot_tools}:{os.environ.get("PATH", "")}' -# ensure bbot_cache -bbot_cache = home / "cache" -os.environ["BBOT_CACHE"] = str(bbot_cache) -# ensure bbot_temp -bbot_temp = home / "temp" -os.environ["BBOT_TEMP"] = str(bbot_temp) -# ensure bbot_lib -bbot_lib = home / "lib" -os.environ["BBOT_LIB"] = str(bbot_lib) - -# exchange certain options between CLI args and config -if args.cli_options is not None: - # deps - config["retry_deps"] = args.cli_options.retry_deps - config["force_deps"] = args.cli_options.force_deps - config["no_deps"] = args.cli_options.no_deps - config["ignore_failed_deps"] = args.cli_options.ignore_failed_deps - # debug - config["debug"] = args.cli_options.debug - if args.cli_options.output_dir: - config["output_dir"] = args.cli_options.output_dir - -# copy config to environment -bbot_environ = environ.flatten_config(config) -os.environ.update(bbot_environ) - -# handle HTTP proxy -http_proxy = config.get("http_proxy", "") -if http_proxy: - os.environ["HTTP_PROXY"] = http_proxy - os.environ["HTTPS_PROXY"] = http_proxy - -# export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:~/.bbot/lib/ -os.environ["LD_LIBRARY_PATH"] = ":".join(os.environ.get("LD_LIBRARY_PATH", "").split(":") + [str(bbot_lib)]).strip(":") - -# replace environment variables in preloaded modules -module_loader.find_and_replace(**os.environ) - -# ssl verification -import urllib3 - -urllib3.disable_warnings() -ssl_verify = config.get("ssl_verify", False) -if not ssl_verify: - import requests - import functools - - requests.adapters.BaseAdapter.send = functools.partialmethod(requests.adapters.BaseAdapter.send, verify=False) - requests.adapters.HTTPAdapter.send = functools.partialmethod(requests.adapters.HTTPAdapter.send, verify=False) - requests.Session.request = functools.partialmethod(requests.Session.request, verify=False) - requests.request = functools.partial(requests.request, verify=False) diff --git a/bbot/core/configurator/args.py b/bbot/core/configurator/args.py deleted file mode 100644 index a9b45df6d7..0000000000 --- a/bbot/core/configurator/args.py +++ /dev/null @@ -1,187 +0,0 @@ -import sys -import argparse -from pathlib import Path -from omegaconf import OmegaConf -from contextlib import suppress - -from ..errors import ArgumentError -from ...modules import module_loader -from ..helpers.misc import chain_lists - -module_choices = sorted(set(module_loader.configs(type="scan"))) -output_module_choices = sorted(set(module_loader.configs(type="output"))) - -flag_choices = set() -for m, c in module_loader.preloaded().items(): - flag_choices.update(set(c.get("flags", []))) - - -class BBOTArgumentParser(argparse.ArgumentParser): - _dummy = False - - def parse_args(self, *args, **kwargs): - """ - Allow space or comma-separated entries for modules and targets - For targets, also allow input files containing additional targets - """ - ret = super().parse_args(*args, **kwargs) - ret.modules = chain_lists(ret.modules) - ret.output_modules = chain_lists(ret.output_modules) - ret.targets = chain_lists(ret.targets, try_files=True, msg="Reading targets from file: {filename}") - ret.whitelist = chain_lists(ret.whitelist, try_files=True, msg="Reading whitelist from file: {filename}") - ret.blacklist = chain_lists(ret.blacklist, try_files=True, msg="Reading blacklist from file: {filename}") - ret.flags = chain_lists(ret.flags) - ret.require_flags = chain_lists(ret.require_flags) - for m in ret.modules: - if m not in module_choices and not self._dummy: - raise ArgumentError(f'Module "{m}" is not valid. Choose from: {",".join(module_choices)}') - for m in ret.exclude_modules: - if m not in module_choices and not self._dummy: - raise ArgumentError(f'Cannot exclude module "{m}". Choose from: {",".join(module_choices)}') - for m in ret.output_modules: - if m not in output_module_choices and not self._dummy: - raise ArgumentError( - f'Output module "{m}" is not valid. Choose from: {",".join(output_module_choices)}' - ) - for f in set(ret.flags + ret.require_flags): - if f not in flag_choices and not self._dummy: - raise ArgumentError(f'Flag "{f}" is not valid. Choose from: {",".join(sorted(flag_choices))}') - return ret - - -class DummyArgumentParser(BBOTArgumentParser): - _dummy = True - - def error(self, message): - pass - - -parser = BBOTArgumentParser(description="Bighuge BLS OSINT Tool", formatter_class=argparse.RawTextHelpFormatter) -dummy_parser = DummyArgumentParser(description="Bighuge BLS OSINT Tool", formatter_class=argparse.RawTextHelpFormatter) -for p in (parser, dummy_parser): - target = p.add_argument_group(title="Target") - target.add_argument("-t", "--targets", nargs="+", default=[], help="Targets to seed the scan", metavar="TARGET") - target.add_argument( - "-w", - "--whitelist", - nargs="+", - default=[], - help="What's considered in-scope (by default it's the same as --targets)", - ) - target.add_argument("-b", "--blacklist", nargs="+", default=[], help="Don't touch these things") - target.add_argument( - "-s", - "--strict-scope", - action="store_true", - help="Don't consider subdomains of target/whitelist to be in-scope", - ) - p.add_argument("-n", "--name", help="Name of scan (default: random)", metavar="SCAN_NAME") - p.add_argument( - "-m", - "--modules", - nargs="+", - default=[], - help=f'Modules to enable. Choices: {",".join(module_choices)}', - metavar="MODULE", - ) - p.add_argument("-l", "--list-modules", action="store_true", help=f"List available modules.") - p.add_argument("-em", "--exclude-modules", nargs="+", default=[], help=f"Exclude these modules.", metavar="MODULE") - p.add_argument( - "-f", - "--flags", - nargs="+", - default=[], - help=f'Enable modules by flag. Choices: {",".join(sorted(flag_choices))}', - metavar="FLAG", - ) - p.add_argument( - "-rf", - "--require-flags", - nargs="+", - default=[], - help=f"Disable modules that don't have these flags (e.g. --require-flags passive)", - metavar="FLAG", - ) - p.add_argument( - "-ef", - "--exclude-flags", - nargs="+", - default=[], - help=f"Disable modules with these flags. (e.g. --exclude-flags brute-force)", - metavar="FLAG", - ) - p.add_argument( - "-om", - "--output-modules", - nargs="+", - default=["human"], - help=f'Output module(s). Choices: {",".join(output_module_choices)}', - metavar="MODULE", - ) - p.add_argument( - "-o", - "--output-dir", - metavar="DIR", - ) - p.add_argument( - "-c", - "--config", - nargs="*", - help="custom config file, or configuration options in key=value format: 'modules.shodan.api_key=1234'", - metavar="CONFIG", - ) - p.add_argument("--allow-deadly", action="store_true", help="Enable the use of highly aggressive modules") - p.add_argument("-v", "--verbose", action="store_true", help="Be more verbose") - p.add_argument("-d", "--debug", action="store_true", help="Enable debugging") - p.add_argument("--force", action="store_true", help="Run scan even if module setups fail") - p.add_argument("-y", "--yes", action="store_true", help="Skip scan confirmation prompt") - p.add_argument("--dry-run", action="store_true", help=f"Abort before executing scan") - p.add_argument( - "--current-config", - action="store_true", - help="Show current config in YAML format", - ) - wordcloud = p.add_argument_group( - title="Word cloud", description="Save/load wordlist of common words gathered during a scan" - ) - wordcloud.add_argument( - "--save-wordcloud", help="Output wordcloud to custom file when the scan completes", metavar="FILE" - ) - wordcloud.add_argument("--load-wordcloud", help="Load wordcloud from a custom file", metavar="FILE") - deps = p.add_argument_group( - title="Module dependencies", description="Control how modules install their dependencies" - ) - g2 = deps.add_mutually_exclusive_group() - g2.add_argument("--no-deps", action="store_true", help="Don't install module dependencies") - g2.add_argument("--force-deps", action="store_true", help="Force install all module dependencies") - g2.add_argument("--retry-deps", action="store_true", help="Try again to install failed module dependencies") - g2.add_argument( - "--ignore-failed-deps", action="store_true", help="Run modules even if they have failed dependencies" - ) - agent = p.add_argument_group(title="Agent", description="Report back to a central server") - agent.add_argument("-a", "--agent-mode", action="store_true", help="Start in agent mode") - - -cli_options = None -with suppress(Exception): - cli_options = dummy_parser.parse_args() - - -def get_config(): - cli_config = [] - with suppress(Exception): - if cli_options.config: - cli_config = cli_options.config - if len(cli_config) == 1: - filename = Path(cli_config[0]).resolve() - if filename.is_file(): - try: - return OmegaConf.load(str(filename)) - except Exception as e: - print(f"[ERR] Error parsing custom config at {filename}: {e}") - sys.exit(2) - try: - return OmegaConf.from_cli(cli_config) - except Exception as e: - print(f"[ERR] Error parsing command-line config: {e}") - sys.exit(2) diff --git a/bbot/core/configurator/environ.py b/bbot/core/configurator/environ.py deleted file mode 100644 index c3b156ea3d..0000000000 --- a/bbot/core/configurator/environ.py +++ /dev/null @@ -1,15 +0,0 @@ -import omegaconf - - -def flatten_config(config, base="bbot"): - """ - Flatten a JSON-like config into a list of environment variables: - {"modules": [{"httpx": {"timeout": 5}}]} --> "BBOT_MODULES_HTTPX_TIMEOUT=5" - """ - if type(config) == omegaconf.dictconfig.DictConfig: - for k, v in config.items(): - new_base = f"{base}_{k}" - if type(v) == omegaconf.dictconfig.DictConfig: - yield from flatten_config(v, base=new_base) - elif type(v) != omegaconf.listconfig.ListConfig: - yield (new_base.upper(), str(v)) diff --git a/bbot/core/configurator/files.py b/bbot/core/configurator/files.py deleted file mode 100644 index d1ab408a63..0000000000 --- a/bbot/core/configurator/files.py +++ /dev/null @@ -1,33 +0,0 @@ -from pathlib import Path -from shutil import copyfile -from omegaconf import OmegaConf - -from ..helpers.misc import mkdir -from ..errors import ConfigLoadError - -config_dir = (Path.home() / ".config" / "bbot").resolve() -defaults_filename = (Path(__file__).parent.parent.parent / "defaults.yml").resolve() -defaults_destination = config_dir / "defaults.yml" -mkdir(config_dir) -copyfile(defaults_filename, defaults_destination) -config_filename = (config_dir / "bbot.yml").resolve() -secrets_filename = (config_dir / "secrets.yml").resolve() - - -def _get_config(filename): - filename = Path(filename).resolve() - try: - return OmegaConf.load(str(filename)) - except Exception as e: - if filename.exists(): - raise ConfigLoadError(f"Error parsing config at {filename}:\n\n{e}") - return OmegaConf.create() - - -def get_config(): - - return OmegaConf.merge( - _get_config(defaults_filename), - _get_config(config_filename), - _get_config(secrets_filename), - ) diff --git a/bbot/core/core.py b/bbot/core/core.py new file mode 100644 index 0000000000..5814052771 --- /dev/null +++ b/bbot/core/core.py @@ -0,0 +1,228 @@ +import os +import logging +from copy import copy +from pathlib import Path +from contextlib import suppress +from omegaconf import OmegaConf + +from bbot.errors import BBOTError +from .multiprocess import SHARED_INTERPRETER_STATE + + +DEFAULT_CONFIG = None + + +class BBOTCore: + """ + This is the first thing that loads when you import BBOT. + + Unlike a Preset, BBOTCore holds only the config, not scan-specific stuff like targets, flags, modules, etc. + + Its main jobs are: + + - set up logging + - keep separation between the `default` and `custom` config (this allows presets to only display the config options that have changed) + - allow for easy merging of configs + - load quickly + """ + + # used for filtering out sensitive config values + secrets_strings = ["api_key", "username", "password", "token", "secret", "_id"] + # don't filter/remove entries under this key + secrets_exclude_keys = ["modules"] + + def __init__(self): + self._logger = None + self._files_config = None + + self._config = None + self._custom_config = None + + # bare minimum == logging + self.logger + self.log = logging.getLogger("bbot.core") + + self._prep_multiprocessing() + + def _prep_multiprocessing(self): + import multiprocessing + from .helpers.process import BBOTProcess + + if SHARED_INTERPRETER_STATE.is_main_process: + # if this is the main bbot process, set the logger and queue for the first time + from functools import partialmethod + + BBOTProcess.__init__ = partialmethod( + BBOTProcess.__init__, log_level=self.logger.log_level, log_queue=self.logger.queue + ) + + # this makes our process class the default for process pools, etc. + mp_context = multiprocessing.get_context("spawn") + mp_context.Process = BBOTProcess + + @property + def home(self): + return Path(self.config["home"]).expanduser().resolve() + + @property + def cache_dir(self): + return self.home / "cache" + + @property + def tools_dir(self): + return self.home / "tools" + + @property + def temp_dir(self): + return self.home / "temp" + + @property + def lib_dir(self): + return self.home / "lib" + + @property + def scans_dir(self): + return self.home / "scans" + + @property + def config(self): + """ + .config is just .default_config + .custom_config merged together + + any new values should be added to custom_config. + """ + if self._config is None: + self._config = OmegaConf.merge(self.default_config, self.custom_config) + # set read-only flag (change .custom_config instead) + OmegaConf.set_readonly(self._config, True) + return self._config + + @property + def default_config(self): + """ + The default BBOT config (from `defaults.yml`). Read-only. + """ + global DEFAULT_CONFIG + if DEFAULT_CONFIG is None: + self.default_config = self.files_config.get_default_config() + # ensure bbot home dir + if "home" not in self.default_config: + self.default_config["home"] = "~/.bbot" + return DEFAULT_CONFIG + + @default_config.setter + def default_config(self, value): + # we temporarily clear out the config so it can be refreshed if/when default_config changes + global DEFAULT_CONFIG + self._config = None + DEFAULT_CONFIG = value + # set read-only flag (change .custom_config instead) + OmegaConf.set_readonly(DEFAULT_CONFIG, True) + + @property + def custom_config(self): + """ + Custom BBOT config (from `~/.config/bbot/bbot.yml`) + """ + # we temporarily clear out the config so it can be refreshed if/when custom_config changes + self._config = None + if self._custom_config is None: + self.custom_config = self.files_config.get_custom_config() + return self._custom_config + + @custom_config.setter + def custom_config(self, value): + # we temporarily clear out the config so it can be refreshed if/when custom_config changes + self._config = None + # ensure the modules key is always a dictionary + modules_entry = value.get("modules", None) + if modules_entry is not None and not OmegaConf.is_dict(modules_entry): + value["modules"] = {} + self._custom_config = value + + def no_secrets_config(self, config): + from .helpers.misc import clean_dict + + with suppress(ValueError): + config = OmegaConf.to_object(config) + + return clean_dict( + config, + *self.secrets_strings, + fuzzy=True, + exclude_keys=self.secrets_exclude_keys, + ) + + def secrets_only_config(self, config): + from .helpers.misc import filter_dict + + with suppress(ValueError): + config = OmegaConf.to_object(config) + + return filter_dict( + config, + *self.secrets_strings, + fuzzy=True, + exclude_keys=self.secrets_exclude_keys, + ) + + def merge_custom(self, config): + """ + Merge a config into the custom config. + """ + self.custom_config = OmegaConf.merge(self.custom_config, OmegaConf.create(config)) + + def merge_default(self, config): + """ + Merge a config into the default config. + """ + self.default_config = OmegaConf.merge(self.default_config, OmegaConf.create(config)) + + def copy(self): + """ + Return a semi-shallow copy of self. (`custom_config` is copied, but `default_config` stays the same) + """ + core_copy = copy(self) + core_copy._custom_config = self._custom_config.copy() + return core_copy + + @property + def files_config(self): + """ + Get the configs from `bbot.yml` and `defaults.yml` + """ + if self._files_config is None: + from .config import files + + self.files = files + self._files_config = files.BBOTConfigFiles(self) + return self._files_config + + def create_process(self, *args, **kwargs): + if os.environ.get("BBOT_TESTING", "") == "True": + process = self.create_thread(*args, **kwargs) + else: + if SHARED_INTERPRETER_STATE.is_scan_process: + from .helpers.process import BBOTProcess + + process = BBOTProcess(*args, **kwargs) + else: + import multiprocessing + + raise BBOTError(f"Tried to start server from process {multiprocessing.current_process().name}") + process.daemon = True + return process + + def create_thread(self, *args, **kwargs): + from .helpers.process import BBOTThread + + return BBOTThread(*args, **kwargs) + + @property + def logger(self): + self.config + if self._logger is None: + from .config.logger import BBOTLogger + + self._logger = BBOTLogger(self) + return self._logger diff --git a/bbot/core/engine.py b/bbot/core/engine.py new file mode 100644 index 0000000000..e8ddd87bd5 --- /dev/null +++ b/bbot/core/engine.py @@ -0,0 +1,687 @@ +import os +import sys +import zmq +import pickle +import asyncio +import inspect +import logging +import tempfile +import traceback +import contextlib +import contextvars +import zmq.asyncio +import multiprocessing +from pathlib import Path +from concurrent.futures import CancelledError +from contextlib import asynccontextmanager, suppress + +from bbot.core import CORE +from bbot.errors import BBOTEngineError +from bbot.core.helpers.async_helpers import get_event_loop +from bbot.core.multiprocess import SHARED_INTERPRETER_STATE +from bbot.core.helpers.misc import rand_string, in_exception_chain + + +error_sentinel = object() + + +class EngineBase: + """ + Base Engine class for Server and Client. + + An Engine is a simple and lightweight RPC implementation that allows offloading async tasks + to a separate process. It leverages ZeroMQ in a ROUTER-DEALER configuration. + + BBOT makes use of this by spawning a dedicated engine for DNS and HTTP tasks. + This offloads I/O and helps free up the main event loop for other tasks. + + To use Engine, you must subclass both EngineClient and EngineServer. + + See the respective EngineClient and EngineServer classes for usage examples. + """ + + ERROR_CLASS = BBOTEngineError + + def __init__(self, debug=False): + self._shutdown_status = False + self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") + self._engine_debug = debug + + def pickle(self, obj): + try: + return pickle.dumps(obj) + except Exception as e: + self.log.error(f"Error serializing object: {obj}: {e}") + self.log.trace(traceback.format_exc()) + return error_sentinel + + def unpickle(self, binary): + try: + return pickle.loads(binary) + except Exception as e: + self.log.error(f"Error deserializing binary: {e}") + self.log.trace(f"Offending binary: {binary}") + self.log.trace(traceback.format_exc()) + return error_sentinel + + async def _infinite_retry(self, callback, *args, **kwargs): + interval = kwargs.pop("_interval", 300) + context = kwargs.pop("_context", "") + # default overall timeout of 10 minutes (300 second interval * 2 iterations) + max_retries = kwargs.pop("_max_retries", 1) + if not context: + context = f"{callback.__name__}({args}, {kwargs})" + retries = 0 + while not self._shutdown_status: + try: + return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval) + except (TimeoutError, asyncio.exceptions.TimeoutError): + self.log.debug(f"{self.name}: Timeout after {interval:,} seconds {context}, retrying...") + retries += 1 + if max_retries is not None and retries > max_retries: + raise TimeoutError(f"Timed out after {(max_retries + 1) * interval:,} seconds {context}") + + def engine_debug(self, *args, **kwargs): + if self._engine_debug: + self.log.trace(*args, **kwargs) + + +class EngineClient(EngineBase): + """ + The client portion of BBOT's RPC Engine. + + To create an engine, you must create a subclass of this class and also + define methods for each of your desired functions. + + Note that this only supports async functions. If you need to offload a synchronous function to another CPU, use BBOT's multiprocessing pool instead. + + Any CPU or I/O intense logic should be implemented in the EngineServer. + + These functions are typically stubs whose only job is to forward the arguments to the server. + + Functions with the same names should be defined on the EngineServer. + + The EngineClient must specify its associated server class via the `SERVER_CLASS` variable. + + Depending on whether your function is a generator, you will use either `run_and_return()`, or `run_and_yield`. + + Examples: + >>> from bbot.core.engine import EngineClient + >>> + >>> class MyClient(EngineClient): + >>> SERVER_CLASS = MyServer + >>> + >>> async def my_function(self, **kwargs) + >>> return await self.run_and_return("my_function", **kwargs) + >>> + >>> async def my_generator(self, **kwargs): + >>> async for _ in self.run_and_yield("my_generator", **kwargs): + >>> yield _ + """ + + SERVER_CLASS = None + + def __init__(self, debug=False, **kwargs): + self.name = f"EngineClient {self.__class__.__name__}" + super().__init__(debug=debug) + self.process = None + if self.SERVER_CLASS is None: + raise ValueError(f"Must set EngineClient SERVER_CLASS, {self.SERVER_CLASS}") + self.CMDS = dict(self.SERVER_CLASS.CMDS) + for k, v in list(self.CMDS.items()): + self.CMDS[v] = k + self.socket_address = f"zmq_{rand_string(8)}.sock" + self.socket_path = Path(tempfile.gettempdir()) / self.socket_address + self.server_kwargs = kwargs.pop("server_kwargs", {}) + self._server_process = None + self.context = zmq.asyncio.Context() + self.context.setsockopt(zmq.LINGER, 0) + self.sockets = set() + + def check_error(self, message): + if isinstance(message, dict) and len(message) == 1 and "_e" in message: + self.engine_debug(f"{self.name}: got error message: {message}") + error, trace = message["_e"] + error = self.ERROR_CLASS(error) + error.engine_traceback = trace + self.engine_debug(f"{self.name}: raising {error.__class__.__name__}") + raise error + return False + + async def run_and_return(self, command, *args, **kwargs): + fn_str = f"{command}({args}, {kwargs})" + self.engine_debug(f"{self.name}: executing run-and-return {fn_str}") + if self._shutdown_status and not command == "_shutdown": + self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks") + return + async with self.new_socket() as socket: + try: + message = self.make_message(command, args=args, kwargs=kwargs) + if message is error_sentinel: + return + await socket.send(message) + binary = await self._infinite_retry(socket.recv, _context=f"waiting for return value from {fn_str}") + except BaseException: + try: + await self.send_cancel_message(socket, fn_str) + except Exception: + self.log.debug(f"{self.name}: {fn_str} failed to send cancel message after exception") + self.log.trace(traceback.format_exc()) + raise + # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") + message = self.unpickle(binary) + self.engine_debug(f"{self.name}: {fn_str} got return value: {message}") + # error handling + if self.check_error(message): + return + return message + + async def run_and_yield(self, command, *args, **kwargs): + fn_str = f"{command}({args}, {kwargs})" + self.engine_debug(f"{self.name}: executing run-and-yield {fn_str}") + if self._shutdown_status: + self.log.verbose("Engine has been shut down and is not accepting new tasks") + return + message = self.make_message(command, args=args, kwargs=kwargs) + if message is error_sentinel: + return + async with self.new_socket() as socket: + # TODO: synchronize server-side generator by limiting qsize + # socket.setsockopt(zmq.RCVHWM, 1) + # socket.setsockopt(zmq.SNDHWM, 1) + await socket.send(message) + while 1: + try: + binary = await self._infinite_retry( + socket.recv, _context=f"waiting for new iteration from {fn_str}" + ) + # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") + message = self.unpickle(binary) + self.engine_debug(f"{self.name}: {fn_str} got iteration: {message}") + # error handling + if self.check_error(message) or self.check_stop(message): + break + yield message + except (StopAsyncIteration, GeneratorExit) as e: + exc_name = e.__class__.__name__ + self.engine_debug(f"{self.name}.{command} got {exc_name}") + try: + await self.send_cancel_message(socket, fn_str) + except Exception: + self.engine_debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") + self.log.trace(traceback.format_exc()) + break + + async def send_cancel_message(self, socket, context): + """ + Send a cancel message and wait for confirmation from the server + """ + # -1 == special "cancel" signal + message = pickle.dumps({"c": -1}) + await self._infinite_retry(socket.send, message) + while 1: + response = await self._infinite_retry( + socket.recv, _context=f"waiting for CANCEL_OK from {context}", _max_retries=4 + ) + response = pickle.loads(response) + if isinstance(response, dict): + response = response.get("m", "") + if response == "CANCEL_OK": + break + + async def send_shutdown_message(self): + async with self.new_socket() as socket: + # -99 == special shutdown message + message = pickle.dumps({"c": -99}) + with suppress(TimeoutError, asyncio.exceptions.TimeoutError): + await asyncio.wait_for(socket.send(message), 0.5) + with suppress(TimeoutError, asyncio.exceptions.TimeoutError): + while 1: + response = await asyncio.wait_for(socket.recv(), 0.5) + response = pickle.loads(response) + if isinstance(response, dict): + response = response.get("m", "") + if response == "SHUTDOWN_OK": + break + + def check_stop(self, message): + if isinstance(message, dict) and len(message) == 1 and "_s" in message: + return True + return False + + def make_message(self, command, args=None, kwargs=None): + try: + cmd_id = self.CMDS[command] + except KeyError: + raise KeyError(f'Command "{command}" not found. Available commands: {",".join(self.available_commands)}') + message = {"c": cmd_id} + if args: + message["a"] = args + if kwargs: + message["k"] = kwargs + return pickle.dumps(message) + + @property + def available_commands(self): + return [s for s in self.CMDS if isinstance(s, str)] + + def start_server(self): + process_name = multiprocessing.current_process().name + if SHARED_INTERPRETER_STATE.is_scan_process: + kwargs = dict(self.server_kwargs) + # if we're in tests, we use a single event loop to avoid weird race conditions + # this allows us to more easily mock http, etc. + if os.environ.get("BBOT_TESTING", "") == "True": + kwargs["_loop"] = get_event_loop() + kwargs["debug"] = self._engine_debug + self.process = CORE.create_process( + target=self.server_process, + args=( + self.SERVER_CLASS, + self.socket_path, + ), + kwargs=kwargs, + custom_name=f"BBOT {self.__class__.__name__}", + ) + self.process.start() + return self.process + else: + raise BBOTEngineError( + f"Tried to start server from process {process_name}. Did you forget \"if __name__ == '__main__'?\"" + ) + + @staticmethod + def server_process(server_class, socket_path, **kwargs): + try: + loop = kwargs.pop("_loop", None) + engine_server = server_class(socket_path, **kwargs) + if loop is not None: + future = asyncio.run_coroutine_threadsafe(engine_server.worker(), loop) + future.result() + else: + asyncio.run(engine_server.worker()) + except (asyncio.CancelledError, KeyboardInterrupt, CancelledError): + return + except Exception: + import traceback + + log = logging.getLogger("bbot.core.engine.server") + log.critical(f"Unhandled error in {server_class.__name__} server process: {traceback.format_exc()}") + + @asynccontextmanager + async def new_socket(self): + if self._server_process is None: + self._server_process = self.start_server() + while not self.socket_path.exists(): + self.engine_debug(f"{self.name}: waiting for server process to start...") + await asyncio.sleep(0.1) + socket = self.context.socket(zmq.DEALER) + socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer + socket.connect(f"ipc://{self.socket_path}") + self.sockets.add(socket) + try: + yield socket + finally: + self.sockets.remove(socket) + with suppress(Exception): + socket.close() + + async def shutdown(self): + if not self._shutdown_status: + self._shutdown_status = True + self.log.verbose(f"{self.name}: shutting down...") + # send shutdown signal + await self.send_shutdown_message() + # then terminate context + try: + self.context.destroy(linger=0) + except Exception: + print(traceback.format_exc(), file=sys.stderr) + try: + self.context.term() + except Exception: + print(traceback.format_exc(), file=sys.stderr) + # delete socket file on exit + self.socket_path.unlink(missing_ok=True) + + +class EngineServer(EngineBase): + """ + The server portion of BBOT's RPC Engine. + + Methods defined here must match the methods in your EngineClient. + + To use the functions, you must create mappings for them in the CMDS attribute, as shown below. + + Examples: + >>> from bbot.core.engine import EngineServer + >>> + >>> class MyServer(EngineServer): + >>> CMDS = { + >>> 0: "my_function", + >>> 1: "my_generator", + >>> } + >>> + >>> def my_function(self, arg1=None): + >>> await asyncio.sleep(1) + >>> return str(arg1) + >>> + >>> def my_generator(self): + >>> for i in range(10): + >>> await asyncio.sleep(1) + >>> yield i + """ + + CMDS = {} + + def __init__(self, socket_path, debug=False): + self.name = f"EngineServer {self.__class__.__name__}" + super().__init__(debug=debug) + self.engine_debug(f"{self.name}: finished setup 1 (_debug={self._engine_debug})") + self.socket_path = socket_path + self.client_id_var = contextvars.ContextVar("client_id", default=None) + # task <--> client id mapping + self.tasks = {} + # child tasks spawned by main tasks + self.child_tasks = {} + self.engine_debug(f"{self.name}: finished setup 2 (_debug={self._engine_debug})") + if self.socket_path is not None: + # create ZeroMQ context + self.context = zmq.asyncio.Context() + # ROUTER socket can handle multiple concurrent requests + self.socket = self.context.socket(zmq.ROUTER) + self.socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + self.socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + self.socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer + # create socket file + self.socket.bind(f"ipc://{self.socket_path}") + self.engine_debug(f"{self.name}: finished setup 3 (_debug={self._engine_debug})") + + @contextlib.contextmanager + def client_id_context(self, value): + token = self.client_id_var.set(value) + try: + yield + finally: + self.client_id_var.reset(token) + + async def run_and_return(self, client_id, command_fn, *args, **kwargs): + fn_str = f"{command_fn.__name__}({args}, {kwargs})" + self.engine_debug(fn_str) + with self.client_id_context(client_id): + try: + self.engine_debug(f"{self.name}: starting run-and-return {fn_str}") + try: + result = await command_fn(*args, **kwargs) + except BaseException as e: + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"{self.name}: error in {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} + finally: + self.tasks.pop(client_id, None) + self.engine_debug(f"{self.name}: sending response to {fn_str}: {result}") + await self.send_socket_multipart(client_id, result) + except BaseException as e: + self.log.critical( + f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}" + ) + self.log.critical(traceback.format_exc()) + finally: + self.engine_debug(f"{self.name} finished run-and-return {fn_str}") + + async def run_and_yield(self, client_id, command_fn, *args, **kwargs): + fn_str = f"{command_fn.__name__}({args}, {kwargs})" + with self.client_id_context(client_id): + try: + self.engine_debug(f"{self.name}: starting run-and-yield {fn_str}") + try: + async for _ in command_fn(*args, **kwargs): + self.engine_debug(f"{self.name}: sending iteration for {fn_str}: {_}") + await self.send_socket_multipart(client_id, _) + except BaseException as e: + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"{self.name}: error in {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} + await self.send_socket_multipart(client_id, result) + finally: + self.engine_debug(f"{self.name}: reached end of run-and-yield iteration for {fn_str}") + # _s == special signal that means StopIteration + await self.send_socket_multipart(client_id, {"_s": None}) + self.tasks.pop(client_id, None) + except BaseException as e: + self.log.critical( + f"Unhandled exception in {self.name}.run_and_yield({client_id}, {command_fn}, {args}, {kwargs}): {e}" + ) + self.log.critical(traceback.format_exc()) + finally: + self.engine_debug(f"{self.name}: finished run-and-yield {fn_str}") + + async def send_socket_multipart(self, client_id, message): + try: + message = pickle.dumps(message) + await self._infinite_retry(self.socket.send_multipart, [client_id, message]) + except Exception as e: + self.log.verbose(f"{self.name}: error sending ZMQ message: {e}") + self.log.trace(traceback.format_exc()) + + def check_error(self, message): + if message is error_sentinel: + return True + + async def worker(self): + self.engine_debug(f"{self.name}: starting worker") + try: + while 1: + client_id, binary = await self.socket.recv_multipart() + message = self.unpickle(binary) + self.engine_debug(f"{self.name} got message: {message}") + if self.check_error(message): + continue + + cmd = message.get("c", None) + if not isinstance(cmd, int): + self.log.warning(f"{self.name}: no command sent in message: {message}") + continue + + # -1 == cancel task + if cmd == -1: + self.engine_debug(f"{self.name} got cancel signal") + await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"}) + await self.cancel_task(client_id) + continue + + # -99 == shutdown task + if cmd == -99: + self.log.verbose(f"{self.name} got shutdown signal") + await self.send_socket_multipart(client_id, {"m": "SHUTDOWN_OK"}) + await self._shutdown() + return + + args = message.get("a", ()) + if not isinstance(args, tuple): + self.log.warning(f"{self.name}: received invalid args of type {type(args)}, should be tuple") + continue + kwargs = message.get("k", {}) + if not isinstance(kwargs, dict): + self.log.warning(f"{self.name}: received invalid kwargs of type {type(kwargs)}, should be dict") + continue + + command_name = self.CMDS[cmd] + command_fn = getattr(self, command_name, None) + + if command_fn is None: + self.log.warning(f'{self.name} has no function named "{command_fn}"') + continue + + if inspect.isasyncgenfunction(command_fn): + self.engine_debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") + coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs) + else: + self.engine_debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") + coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs) + + self.engine_debug(f"{self.name}: creating task for {command_name}() coroutine") + task = asyncio.create_task(coroutine) + self.tasks[client_id] = task, command_fn, args, kwargs + self.engine_debug(f"{self.name}: finished creating task for {command_name}() coroutine") + except BaseException as e: + await self._shutdown() + if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + self.log.error(f"{self.name}: error in EngineServer worker: {e}") + self.log.trace(traceback.format_exc()) + finally: + self.engine_debug(f"{self.name}: finished worker()") + + async def _shutdown(self): + if not self._shutdown_status: + self.log.verbose(f"{self.name}: shutting down...") + self._shutdown_status = True + await self.cancel_all_tasks() + context = getattr(self, "context", None) + if context is not None: + try: + context.destroy(linger=0) + except Exception: + self.log.trace(traceback.format_exc()) + try: + context.term() + except Exception: + self.log.trace(traceback.format_exc()) + self.log.verbose(f"{self.name}: finished shutting down") + + async def task_pool(self, fn, args_kwargs, threads=10, timeout=300, global_kwargs=None): + if global_kwargs is None: + global_kwargs = {} + + tasks = {} + args_kwargs = list(args_kwargs) + + def new_task(): + if args_kwargs: + kwargs = {} + tracker = None + args = args_kwargs.pop(0) + if isinstance(args, (list, tuple)): + # you can specify a custom tracker value if you want + # this helps with correlating results + with suppress(ValueError): + args, kwargs, tracker = args + # or you can just specify args/kwargs + with suppress(ValueError): + args, kwargs = args + + if not isinstance(kwargs, dict): + raise ValueError(f"kwargs must be dict (got: {kwargs})") + if not isinstance(args, (list, tuple)): + args = [args] + + task = self.new_child_task(fn(*args, **kwargs, **global_kwargs)) + tasks[task] = (args, kwargs, tracker) + + for _ in range(threads): # Start initial batch of tasks + new_task() + + while tasks: # While there are tasks pending + # Wait for the first task to complete + finished = await self.finished_tasks(tasks, timeout=timeout) + for task in finished: + result = task.result() + (args, kwargs, tracker) = tasks.pop(task) + yield (args, kwargs, tracker), result + new_task() + + def new_child_task(self, coro): + """ + Create a new asyncio task, making sure to track it based on the client id. + + This allows the task to be automatically cancelled if its parent is cancelled. + """ + client_id = self.client_id_var.get() + task = asyncio.create_task(coro) + + if client_id: + + def remove_task(t): + tasks = self.child_tasks.get(client_id, set()) + tasks.discard(t) + if not tasks: + self.child_tasks.pop(client_id, None) + + task.add_done_callback(remove_task) + + try: + self.child_tasks[client_id].add(task) + except KeyError: + self.child_tasks[client_id] = {task} + + return task + + async def finished_tasks(self, tasks, timeout=None): + """ + Given a list of asyncio tasks, return the ones that are finished with an optional timeout + """ + if tasks: + try: + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout) + return done + except BaseException as e: + if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): + self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({tasks})") + for task in list(tasks): + task.cancel() + self._await_cancelled_task(task) + else: + if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + self.log.error(f"{self.name}: Unhandled exception in finished_tasks({tasks}): {e}") + self.log.trace(traceback.format_exc()) + raise + return set() + + async def cancel_task(self, client_id): + parent_task = self.tasks.pop(client_id, None) + if parent_task is None: + return + parent_task, _cmd, _args, _kwargs = parent_task + self.engine_debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") + parent_task.cancel() + child_tasks = self.child_tasks.pop(client_id, set()) + if child_tasks: + self.engine_debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") + for child_task in child_tasks: + child_task.cancel() + + for task in [parent_task] + list(child_tasks): + await self._await_cancelled_task(task) + + async def _await_cancelled_task(self, task): + try: + await asyncio.wait_for(task, timeout=10) + except (TimeoutError, asyncio.exceptions.TimeoutError): + self.log.trace(f"{self.name}: Timeout cancelling task: {task}") + return + except (KeyboardInterrupt, asyncio.CancelledError): + return + except BaseException as e: + self.log.error(f"Unhandled error in {task.get_coro().__name__}(): {e}") + self.log.trace(traceback.format_exc()) + + async def cancel_all_tasks(self): + for client_id in list(self.tasks): + await self.cancel_task(client_id) + for client_id, tasks in self.child_tasks.items(): + for task in list(tasks): + await self._await_cancelled_task(task) diff --git a/bbot/core/errors.py b/bbot/core/errors.py deleted file mode 100644 index d7251c14aa..0000000000 --- a/bbot/core/errors.py +++ /dev/null @@ -1,49 +0,0 @@ -from requests.exceptions import RequestException # noqa F401 - - -class BBOTError(Exception): - pass - - -class ScanError(BBOTError): - pass - - -class ScanCancelledError(BBOTError): - pass - - -class ArgumentError(BBOTError): - pass - - -class ValidationError(BBOTError): - pass - - -class ConfigLoadError(BBOTError): - pass - - -class HttpCompareError(BBOTError): - pass - - -class DirectoryCreationError(BBOTError): - pass - - -class DirectoryDeletionError(BBOTError): - pass - - -class NTLMError(BBOTError): - pass - - -class InteractshError(BBOTError): - pass - - -class WordlistError(BBOTError): - pass diff --git a/bbot/core/event/__init__.py b/bbot/core/event/__init__.py index 481dee5498..b5d1c86085 100644 --- a/bbot/core/event/__init__.py +++ b/bbot/core/event/__init__.py @@ -1,2 +1 @@ -from .base import make_event, is_event -from .helpers import make_event_id, is_event_id +from .base import make_event, is_event, event_from_json diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index ee9d3398fc..2c4718844c 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1,27 +1,44 @@ +import io +import re +import uuid import json +import base64 import logging +import tarfile +import datetime import ipaddress +import traceback + +from copy import copy +from pathlib import Path from typing import Optional -from datetime import datetime from contextlib import suppress -from pydantic import BaseModel, validator -from threading import Event as ThreadingEvent +from radixtarget import RadixTarget +from pydantic import BaseModel, field_validator +from urllib.parse import urlparse, urljoin, parse_qs + from .helpers import * -from bbot.core.errors import * +from bbot.errors import * from bbot.core.helpers import ( extract_words, - split_host_port, - host_in_host, is_domain, is_subdomain, is_ip, + is_ip_type, + is_ptr, + is_uri, + url_depth, domain_stem, make_netloc, make_ip_type, + recursive_decode, + sha1, smart_decode, - get_file_extension, + split_host_port, + tagify, validators, + get_file_extension, ) @@ -29,105 +46,269 @@ class BaseEvent: + """ + Represents a piece of data discovered during a BBOT scan. + + An Event contains various attributes that provide metadata about the discovered data. + The attributes assist in understanding the context of the Event and facilitate further + filtering and querying. Events are integral in the construction of visual graphs and + are the cornerstone of data exchange between BBOT modules. + + You can inherit from this class when creating a new event type. However, it's not always + necessary. You only need to subclass if you want to layer additional functionality on + top of the base class. + + Attributes: + type (str): Specifies the type of the event, e.g., `IP_ADDRESS`, `DNS_NAME`. + id (str): An identifier for the event (event type + sha1 hash of data). NOT universally unique. + uuid (UUID): A universally unique identifier for the event. + data (str or dict): The main data for the event, e.g., a URL or IP address. + data_graph (str): Representation of `self.data` for graph nodes (e.g. Neo4j). + data_human (str): Representation of `self.data` for human output. + data_id (str): Representation of `self.data` used to calculate the event's ID (and ultimately its hash, which is used for deduplication) + data_json (str): Representation of `self.data` to be used in JSON serialization. + host (str, IPvXAddress, or IPvXNetwork): The associated IP address or hostname for the event + host_stem (str): An abbreviated representation of hostname that removes the TLD, e.g. "www.evilcorp". Used by the word cloud. + port (int or None): The port associated with the event, if applicable, else None. + words (set): A list of relevant keywords extracted from the event. Used by the word cloud. + scope_distance (int): Indicates how many hops the event is from the main scope; 0 means in-scope. + web_spider_distance (int): The spider distance from the web root, specific to web crawling. + scan (Scanner): The scan object that generated the event. + timestamp (datetime.datetime): The time at which the data was discovered. + resolved_hosts (list of str): List of hosts to which the event data resolves, applicable for URLs and DNS names. + parent (BaseEvent): The parent event that led to the discovery of this event. + parent_id (str): The `id` attribute of the parent event. + parent_uuid (str): The `uuid` attribute of the parent event. + tags (set of str): Descriptive tags for the event, e.g., `mx-record`, `in-scope`. + module (BaseModule): The module that discovered the event. + module_sequence (str): The sequence of modules that participated in the discovery. + + Examples: + ```json + { + "type": "URL", + "id": "URL:017ec8e5dc158c0fd46f07169f8577fb4b45e89a", + "data": "http://www.blacklanternsecurity.com/", + "web_spider_distance": 0, + "scope_distance": 0, + "scan": "SCAN:4d786912dbc97be199da13074699c318e2067a7f", + "timestamp": 1688526222.723366, + "resolved_hosts": ["185.199.108.153"], + "parent": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", + "tags": ["in-scope", "distance-0", "dir", "ip-185-199-108-153", "status-301", "http-title-301-moved-permanently"], + "module": "httpx", + "module_sequence": "httpx" + } + ``` + """ - # Exclude from output modules - _omit = False - # Priority, 1-5, lower numbers == higher priority - _priority = 3 + # Always emit this event type even if it's not in scope + _always_emit = False + # Always emit events with these tags even if they're not in scope + _always_emit_tags = ["affiliate", "target"] + # Bypass scope checking and dns resolution, distribute immediately to modules + # This is useful for "end-of-line" events like FINDING and VULNERABILITY + _quick_emit = False + # Whether this event has been retroactively marked as part of an important discovery chain + _graph_important = False # Disables certain data validations _dummy = False # Data validation, if data is a dictionary _data_validator = None + # Whether to increment scope distance if the child and parent hosts are the same + _scope_distance_increment_same_host = False + # Don't allow duplicates to occur within a parent chain + # In other words, don't emit the event if the same one already exists in its discovery context + _suppress_chain_dupes = False def __init__( self, data, - event_type=None, - source=None, + event_type, + parent=None, + context=None, module=None, scan=None, + scans=None, tags=None, confidence=100, + timestamp=None, _dummy=False, _internal=None, ): - + """ + Initializes an Event object with the given parameters. + + In most cases, you should use `make_event()` instead of instantiating this class directly. + `make_event()` is much friendlier, and can auto-detect the event type for you. + + Attributes: + data (str, dict): The primary data for the event. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. + parent (BaseEvent, optional): Parent event that led to this event's discovery. Defaults to None. + module (str, optional): Module that discovered the event. Defaults to None. + scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. + scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. + tags (list of str, optional): Descriptive tags for the event. Defaults to None. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. + timestamp (datetime, optional): Time of event discovery. Defaults to current UTC time. + _dummy (bool, optional): If True, disables certain data validations. Defaults to False. + _internal (Any, optional): If specified, makes the event internal. Defaults to None. + + Raises: + ValidationError: If either `scan` or `parent` are not specified and `_dummy` is False. + """ + self._uuid = uuid.uuid4() self._id = None self._hash = None + self._data = None self.__host = None + self._tags = set() self._port = None + self._omit = False self.__words = None - self._made_internal = False - # whether to force-send to output modules - self._force_output = False + self._parent = None + self._priority = None + self._parent_id = None + self._parent_uuid = None + self._host_original = None + self._scope_distance = None + self._module_priority = None + self._resolved_hosts = set() + self.dns_children = {} + self.raw_dns_records = {} + self._discovery_context = "" + self._discovery_context_regex = re.compile(r"\{(?:event|module)[^}]*\}") + self.web_spider_distance = 0 + + # for creating one-off events without enforcing parent requirement + self._dummy = _dummy + self.module = module + self._type = event_type - self.timestamp = datetime.utcnow() + # keep track of whether this event has been recorded by the scan + self._stats_recorded = False - if tags is None: - tags = set() + if timestamp is not None: + self.timestamp = timestamp + else: + try: + self.timestamp = datetime.datetime.now(datetime.UTC) + except AttributeError: + self.timestamp = datetime.datetime.utcnow() - self._data = None - self.type = event_type - self.tags = set(tags) self.confidence = int(confidence) - - # for creating one-off events without enforcing source requirement - self._dummy = _dummy self._internal = False - self.module = module + # self.scan holds the instantiated scan object (for helpers, etc.) self.scan = scan if (not self.scan) and (not self._dummy): - raise ValidationError(f"Must specify scan") - - # check type blacklist - if self.scan is not None: - omit_event_types = self.scan.config.get("omit_event_types", []) - if omit_event_types and self.type in omit_event_types: - self._omit = True - - self._scope_distance = -1 + raise ValidationError("Must specify scan") + # self.scans holds a list of scan IDs from scans that encountered this event + self.scans = [] + if scans is not None: + self.scans = scans + if self.scan: + self.scans = list(set([self.scan.id] + self.scans)) try: self.data = self._sanitize_data(data) except Exception as e: - import traceback - - log.debug(traceback.format_exc()) + log.trace(traceback.format_exc()) raise ValidationError(f'Error sanitizing event data "{data}" for type "{self.type}": {e}') if not self.data: raise ValidationError(f'Invalid event data "{data}" for type "{self.type}"') - self._source = None - self.source_id = None - self.source = source - if (not self.source) and (not self._dummy): - raise ValidationError(f"Must specify event source") + self.parent = parent + if (not self.parent) and (not self._dummy): + raise ValidationError("Must specify event parent") - if not self._dummy: - self._setup() + if tags is not None: + for tag in tags: + self.add_tag(tag) # internal events are not ingested by output modules if not self._dummy: # removed this second part because it was making certain sslcert events internal - if _internal: # or source._internal: - self.make_internal() + if _internal: # or parent._internal: + self.internal = True - self._resolved = ThreadingEvent() + if not context: + context = getattr(self.module, "default_discovery_context", "") + if context: + self.discovery_context = context @property def data(self): return self._data + @property + def confidence(self): + return self._confidence + + @confidence.setter + def confidence(self, confidence): + self._confidence = min(100, max(1, int(confidence))) + + @property + def cumulative_confidence(self): + """ + Considers the confidence of parent events. This is useful for filtering out speculative/unreliable events. + + E.g. an event with a confidence of 50 whose parent is also 50 would have a cumulative confidence of 25. + + A confidence of 100 will reset the cumulative confidence to 100. + """ + if self._confidence == 100 or self.parent is None or self.parent is self: + return self._confidence + return int(self._confidence * self.parent.cumulative_confidence / 100) + + @property + def resolved_hosts(self): + if is_ip(self.host): + return { + self.host, + } + return self._resolved_hosts + @data.setter def data(self, data): self._hash = None + self._data_hash = None self._id = None self.__host = None self._port = None self._data = data + @property + def internal(self): + return self._internal + + @internal.setter + def internal(self, value): + """ + Marks the event as internal, excluding it from output but allowing normal exchange between scan modules. + + Internal events are typically speculative and may not be interesting by themselves but can lead to + the discovery of interesting events. This method sets the `_internal` attribute to True and adds the + "internal" tag. + + Examples of internal events include `OPEN_TCP_PORT`s from the `speculate` module, + `IP_ADDRESS`es from the `ipneighbor` module, or out-of-scope `DNS_NAME`s that originate + from DNS resolutions. + + The purpose of internal events is to enable speculative/explorative discovery without cluttering + the console with irrelevant or uninteresting events. + """ + if value not in (True, False): + raise ValueError(f'"internal" must be boolean, not {type(value)}') + if value is True: + self.add_tag("internal") + else: + self.remove_tag("internal") + self._internal = value + @property def host(self): """ @@ -142,14 +323,57 @@ def host(self): E.g. for IP_ADDRESS, it could be an ipaddress.IPv4Address() or IPv6Address() object """ if self.__host is None: - self.__host = self._host() + self.host = self._host() return self.__host + @host.setter + def host(self, host): + if self._host_original is None: + self._host_original = host + self.__host = host + + @property + def host_original(self): + """ + Original host data, in case it was changed due to a wildcard DNS, etc. + """ + if self._host_original is None: + return self.host + return self._host_original + + @property + def host_filterable(self): + """ + A string version of the event that's used for regex-based blacklisting. + + For example, the user can specify "REGEX:.*.evilcorp.com" in their blacklist, and this regex + will be applied against this property. + """ + parsed_url = getattr(self, "parsed_url", None) + if parsed_url is not None: + return parsed_url.geturl() + if self.host is not None: + return str(self.host) + return "" + @property def port(self): self.host + if getattr(self, "parsed_url", None): + if self.parsed_url.port is not None: + return self.parsed_url.port + elif self.parsed_url.scheme == "https": + return 443 + elif self.parsed_url.scheme == "http": + return 80 return self._port + @property + def netloc(self): + if self.host and is_ip_type(self.host, network=False): + return make_netloc(self.host, self.port) + return None + @property def host_stem(self): """ @@ -161,6 +385,42 @@ def host_stem(self): else: return f"{self.host}" + @property + def discovery_context(self): + return self._discovery_context + + @discovery_context.setter + def discovery_context(self, context): + def replace(match): + s = match.group() + return s.format(module=self.module, event=self) + + try: + self._discovery_context = self._discovery_context_regex.sub(replace, context) + except Exception as e: + log.trace(f"Error formatting discovery context for {self}: {e} (context: '{context}')") + self._discovery_context = context + + @property + def discovery_path(self): + """ + This event's full discovery context, including those of all its parents + """ + discovery_path = [] + if self.parent is not None and self.parent is not self: + discovery_path = self.parent.discovery_path + return discovery_path + [self.discovery_context] + + @property + def parent_chain(self): + """ + This event's full discovery context, including those of all its parents + """ + parent_chain = [] + if self.parent is not None and self.parent is not self: + parent_chain = self.parent.parent_chain + return parent_chain + [str(self.uuid)] + @property def words(self): if self.__words is None: @@ -170,109 +430,244 @@ def words(self): def _words(self): return set() + @property + def tags(self): + return self._tags + + @tags.setter + def tags(self, tags): + self._tags = set() + if isinstance(tags, str): + tags = (tags,) + for tag in tags: + self.add_tag(tag) + + def add_tag(self, tag): + self._tags.add(tagify(tag)) + + def add_tags(self, tags): + for tag in set(tags): + self.add_tag(tag) + + def remove_tag(self, tag): + with suppress(KeyError): + self._tags.remove(tagify(tag)) + + @property + def always_emit(self): + """ + If this returns True, the event will always be distributed to output modules regardless of scope distance + """ + always_emit_tags = any(t in self.tags for t in self._always_emit_tags) + no_host_information = not bool(self.host) + return self._always_emit or always_emit_tags or no_host_information + @property def id(self): + """ + A uniquely identifiable hash of the event from the event type + a SHA1 of its data + """ if self._id is None: - self._id = make_event_id(self.data_id, self.type) + self._id = f"{self.type}:{self.data_hash.hex()}" return self._id + @property + def uuid(self): + """ + A universally unique identifier for the event + """ + return f"{self.type}:{self._uuid}" + + @property + def data_hash(self): + """ + A raw byte hash of the event's data + """ + if self._data_hash is None: + self._data_hash = sha1(self.data_id).digest() + return self._data_hash + @property def scope_distance(self): return self._scope_distance @scope_distance.setter def scope_distance(self, scope_distance): - if scope_distance >= 0: - new_scope_distance = None - # ensure scope distance does not increase (only allow setting to smaller values) - if self.scope_distance == -1: - new_scope_distance = scope_distance - else: - new_scope_distance = min(self.scope_distance, scope_distance) + """ + Setter for the scope_distance attribute, ensuring it only decreases. + + The scope_distance attribute is designed to never increase; it can only be set to smaller values than + the current one. If a larger value is provided, it is ignored. The setter also updates the event's + tags to reflect the new scope distance. + + Parameters: + scope_distance (int): The new scope distance to set, must be a non-negative integer. + + Note: + The method will automatically update the relevant 'distance-' tags associated with the event. + """ + if scope_distance < 0: + raise ValueError(f"Invalid scope distance: {scope_distance}") + # ensure scope distance does not increase (only allow setting to smaller values) + if self.scope_distance is None: + new_scope_distance = scope_distance + else: + new_scope_distance = min(self.scope_distance, scope_distance) + if self._scope_distance != new_scope_distance: + # remove old scope distance tags self._scope_distance = new_scope_distance - for t in list(self.tags): - if t.startswith("distance-"): - self.tags.remove(t) - self.tags.add(f"distance-{new_scope_distance}") + self.refresh_scope_tags() + # apply recursively to parent events + parent_scope_distance = getattr(self.parent, "scope_distance", None) + if parent_scope_distance is not None and self.parent is not self: + self.parent.scope_distance = new_scope_distance + 1 + + def refresh_scope_tags(self): + for t in list(self.tags): + if t.startswith("distance-"): + self.remove_tag(t) + if self.host: + if self.scope_distance == 0: + self.add_tag("in-scope") + self.remove_tag("affiliate") + else: + self.remove_tag("in-scope") + self.add_tag(f"distance-{self.scope_distance}") @property - def source(self): - return self._source + def scope_description(self): + """ + Returns a single word describing the scope of the event. - @source.setter - def source(self, source): - if is_event(source): - self._source = source - if source.scope_distance >= 0 and source != self: - new_scope_distance = int(source.scope_distance) + "in-scope" if the event is in scope, "affiliate" if it's an affiliate, otherwise "distance-{scope_distance}" + """ + if self.scope_distance == 0: + return "in-scope" + elif "affiliate" in self.tags: + return "affiliate" + return f"distance-{self.scope_distance}" + + @property + def parent(self): + return self._parent + + @parent.setter + def parent(self, parent): + """ + Setter for the parent attribute, ensuring it's a valid event and updating scope distance. + + Sets the parent of the event and automatically adjusts the scope distance based on the parent event's + scope distance. The scope distance is incremented by 1 if the host of the parent event is different + from the current event's host. + + Parameters: + parent (BaseEvent): The new parent event to set. Must be a valid event object. + + Note: + If an invalid parent is provided and the event is not a dummy, a warning will be logged. + """ + if is_event(parent): + self._parent = parent + hosts_are_same = (self.host and parent.host) and (self.host == parent.host) + new_scope_distance = int(parent.scope_distance) + if self.host and parent.scope_distance is not None: # only increment the scope distance if the host changes - if not self.host == source.host: + if self._scope_distance_increment_same_host or not hosts_are_same: new_scope_distance += 1 - self.scope_distance = new_scope_distance - self.source_id = str(source.id) + self.scope_distance = new_scope_distance + # inherit certain tags + if hosts_are_same: + # inherit web spider distance from parent + self.web_spider_distance = getattr(parent, "web_spider_distance", 0) + event_has_url = getattr(self, "parsed_url", None) is not None + for t in parent.tags: + if t in ("affiliate",): + self.add_tag(t) + elif t.startswith("mutation-"): + self.add_tag(t) + # only add these tags if the event has a URL + if event_has_url: + if t in ("spider-danger", "spider-max"): + self.add_tag(t) elif not self._dummy: - log.warning(f"Tried to set invalid source on {self}: (got: {source})") + log.warning(f"Tried to set invalid parent on {self}: (got: {repr(parent)} ({type(parent)}))") - def get_source(self): - """ - Takes into account events with the _omit flag + @property + def parent_id(self): + parent_id = getattr(self.get_parent(), "id", None) + if parent_id is not None: + return parent_id + return self._parent_id + + @property + def parent_uuid(self): + parent_uuid = getattr(self.get_parent(), "uuid", None) + if parent_uuid is not None: + return parent_uuid + return self._parent_uuid + + @property + def validators(self): """ - if getattr(self.source, "_omit", False): - return self.source.get_source() - return self.source - - def make_internal(self): - if not self._made_internal: - self._internal = True - self.tags.add("internal") - self._made_internal = True - - def unmake_internal(self, set_scope_distance=None, force_output=False, emit_trail=True): - source_trail = [] - if self._made_internal: - if set_scope_distance is not None: - self.scope_distance = set_scope_distance - self._internal = False - self.tags.remove("internal") - if force_output: - self._force_output = True - self._made_internal = False - - if getattr(self.source, "_internal", False): - source_scope_distance = None - if set_scope_distance is not None: - source_scope_distance = set_scope_distance + 1 - source_trail += self.source.unmake_internal( - set_scope_distance=source_scope_distance, force_output=force_output - ) - source_trail.append(self.source) + Depending on whether the scan attribute is accessible, return either a config-aware or non-config-aware validator - if emit_trail and self.scan: - for e in source_trail: - self.scan.manager.emit_event(e, release=False) + This exists to prevent a chicken-and-egg scenario during the creation of certain events such as URLs, + whose sanitization behavior is different depending on the config. - return source_trail + However, thanks to this property, validation can still work in the absence of a config. + """ + if self.scan is not None: + return self.scan.helpers.config_aware_validators + return validators - def make_in_scope(self, set_scope_distance=0): - source_trail = [] - # keep the event internal if the module requests so, unless it's a DNS_NAME - if getattr(self.module, "_scope_shepherding", True) or self.type in ("DNS_NAME",): - source_trail = self.unmake_internal( - set_scope_distance=set_scope_distance, force_output=True, emit_trail=True - ) - self.scope_distance = set_scope_distance - if set_scope_distance == 0: - self.tags.add("in-scope") - return source_trail + def get_parent(self): + """ + Takes into account events with the _omit flag + """ + if getattr(self.parent, "_omit", False): + return self.parent.get_parent() + return self.parent + + def get_parents(self, omit=False, include_self=False): + parents = [] + e = self + if include_self: + parents.append(self) + while 1: + if omit: + parent = e.get_parent() + else: + parent = e.parent + if parent is None: + break + if e == parent: + break + parents.append(parent) + e = parent + return parents def _host(self): return "" def _sanitize_data(self, data): + """ + Validates and sanitizes the event's data during instantiation. + + By default, uses the '_data_load' method to pre-process the data and then applies the '_data_validator' + to validate and create a sanitized dictionary. Raises a ValidationError if any of the validations fail. + Subclasses can override this method to provide custom validation logic. + + Returns: + Any: The sanitized data. + + Raises: + ValidationError: If the data fails to validate. + """ + data = self._data_load(data) if self._data_validator is not None: if not isinstance(data, dict): raise ValidationError(f"data is not of type dict: {data}") - data = self._data_validator(**data).dict() + data = self._data_validator(**data).model_dump(exclude_none=True) return self.sanitize_data(data) def sanitize_data(self, data): @@ -280,32 +675,64 @@ def sanitize_data(self, data): @property def data_human(self): + """ + Human representation of event.data + """ return self._data_human() def _data_human(self): - return str(self.data) + if isinstance(self.data, (dict, list)): + with suppress(Exception): + return json.dumps(self.data, sort_keys=True) + return smart_decode(self.data) + + def _data_load(self, data): + """ + How to load the event data (JSON-decode it, etc.) + """ + return data @property def data_id(self): + """ + Representation of the event.data used to calculate the event's ID + """ return self._data_id() def _data_id(self): return self.data @property - def data_graph(self): - return self._data_graph() + def pretty_string(self): + """ + A human-friendly representation of the event's data. Used for graph representation. - def _data_graph(self): - if type(self.data) in (list, dict): - with suppress(Exception): - return json.dumps(self.data, sort_keys=True) - return smart_decode(self.data) + If the event's data is a dictionary, the function will try to return a JSON-formatted string. + Otherwise, it will use smart_decode to convert the data into a string representation. + + Override if necessary. - def _setup(self): + Returns: + str: The graphical representation of the event's data. """ - Perform optional setup, e.g. adding custom tags + return self._pretty_string() + + def _pretty_string(self): + return self._data_human() + + @property + def data_graph(self): + """ + Representation of event.data for neo4j graph nodes """ + return self.pretty_string + + @property + def data_json(self): + """ + JSON representation of event.data + """ + return self.data def __contains__(self, other): """ @@ -326,49 +753,170 @@ def __contains__(self, other): if self.host == other.host: return True # hostnames and IPs - return host_in_host(other.host, self.host) + radixtarget = RadixTarget() + radixtarget.insert(self.host) + return bool(radixtarget.search(other.host)) return False - def json(self, mode="graph"): - j = dict() - for i in ("type", "id", "web_spider_distance"): + def json(self, mode="json", siem_friendly=False): + """ + Serializes the event object to a JSON-compatible dictionary. + + By default, it includes attributes such as 'type', 'id', 'data', 'scope_distance', and others that are present. + Additional specific attributes can be serialized based on the mode specified. + + Parameters: + mode (str): Specifies the data serialization mode. Default is "json". Other options include "graph", "human", and "id". + siem_friendly (bool): Whether to format the JSON in a way that's friendly to SIEM ingestion by Elastic, Splunk, etc. This ensures the value of "data" is always the same type (a dictionary). + + Returns: + dict: JSON-serializable dictionary representation of the event object. + """ + j = {} + # type, ID, scope description + for i in ("type", "id", "uuid", "scope_description", "netloc"): v = getattr(self, i, "") if v: - j.update({i: v}) + j.update({i: str(v)}) + # event data data_attr = getattr(self, f"data_{mode}", None) if data_attr is not None: - j["data"] = data_attr + data = data_attr + else: + data = smart_decode(self.data) + if siem_friendly: + j["data"] = {self.type: data} else: - j["data"] = smart_decode(self.data) + j["data"] = data + # host, dns children + if self.host: + j["host"] = str(self.host) + j["resolved_hosts"] = sorted(str(h) for h in self.resolved_hosts) + j["dns_children"] = {k: list(v) for k, v in self.dns_children.items()} + if isinstance(self.port, int): + j["port"] = self.port + # web spider distance + web_spider_distance = getattr(self, "web_spider_distance", None) + if web_spider_distance is not None: + j["web_spider_distance"] = web_spider_distance + # scope distance j["scope_distance"] = self.scope_distance - j["scan"] = self.scan.id - j["timestamp"] = self.timestamp.timestamp() - source = self.get_source() - source_id = getattr(source, "id", "") - if source_id: - j["source"] = source_id + # scan + if self.scan: + j["scan"] = self.scan.id + # timestamp + j["timestamp"] = self.timestamp.isoformat() + # parent event + parent_id = self.parent_id + if parent_id: + j["parent"] = parent_id + parent_uuid = self.parent_uuid + if parent_uuid: + j["parent_uuid"] = parent_uuid + # tags if self.tags: j.update({"tags": list(self.tags)}) + # parent module if self.module: j.update({"module": str(self.module)}) + # sequence of modules that led to discovery + if self.module_sequence: + j.update({"module_sequence": str(self.module_sequence)}) + # discovery context + j["discovery_context"] = self.discovery_context + j["discovery_path"] = self.discovery_path + j["parent_chain"] = self.parent_chain # normalize non-primitive python objects for k, v in list(j.items()): if k == "data": continue - if type(v) not in (str, int, float, bool, list, type(None)): + if type(v) not in (str, int, float, bool, list, dict, type(None)): try: j[k] = json.dumps(v, sort_keys=True) except Exception: j[k] = smart_decode(v) return j + @staticmethod + def from_json(j): + """ + Convenience shortcut to create an Event object from a JSON-compatible dictionary. + + Calls the `event_from_json()` function to deserialize the event. + + Parameters: + j (dict): The JSON-compatible dictionary containing event data. + + Returns: + Event: The deserialized Event object. + """ + return event_from_json(j) + + @property + def module_sequence(self): + """ + Get a human-friendly string that represents the sequence of modules responsible for generating this event. + + Includes the names of omitted parent events to provide a complete view of the module sequence leading to this event. + + Returns: + str: The module sequence in human-friendly format. + """ + module_name = getattr(self.module, "name", "") + if getattr(self.parent, "_omit", False): + module_name = f"{self.parent.module_sequence}->{module_name}" + return module_name + + @property + def module_priority(self): + if self._module_priority is None: + module = getattr(self, "module", None) + self._module_priority = int(max(1, min(5, getattr(module, "priority", 3)))) + return self._module_priority + + @module_priority.setter + def module_priority(self, priority): + self._module_priority = int(max(1, min(5, priority))) + @property def priority(self): - self_priority = int(max(1, min(5, self._priority))) - mod_priority = int(max(1, min(5, getattr(self.module, "priority", 1)))) - timestamp = self.timestamp.timestamp() - return self_priority + mod_priority + (1 / timestamp) + if self._priority is None: + timestamp = self.timestamp.timestamp() + if self.parent.timestamp == self.timestamp: + self._priority = (timestamp,) + else: + self._priority = getattr(self.parent, "priority", ()) + (timestamp,) + + return self._priority + + @property + def type(self): + return self._type + + @type.setter + def type(self, val): + self._type = val + self._hash = None + self._id = None + + @property + def _host_size(self): + """ + Used for sorting events by their host size, so that parent ones (e.g. IP subnets) come first + """ + if self.host: + if isinstance(self.host, str): + # smaller domains should come first + return len(self.host) + else: + try: + # bigger IP subnets should come first + return -self.host.num_addresses + except AttributeError: + # IP addresses default to 1 + return 1 + return 0 def __iter__(self): """ @@ -380,13 +928,13 @@ def __lt__(self, other): """ For queue sorting """ - return self.priority < int(getattr(other, "priority", 5)) + return self.priority < getattr(other, "priority", (0,)) def __gt__(self, other): """ For queue sorting """ - return self.priority > int(getattr(other, "priority", 5)) + return self.priority > getattr(other, "priority", (0,)) def __eq__(self, other): try: @@ -401,38 +949,142 @@ def __hash__(self): return self._hash def __str__(self): - d = str(self.data) - return f'{self.type}("{d[:50]}{("..." if len(d) > 50 else "")}", module={self.module}, tags={self.tags})' + max_event_len = 80 + d = str(self.data).replace("\n", "\\n") + return f'{self.type}("{d[:max_event_len]}{("..." if len(d) > max_event_len else "")}", module={self.module}, tags={self.tags})' def __repr__(self): return str(self) +class SCAN(BaseEvent): + def _data_human(self): + return f"{self.data['name']} ({self.data['id']})" + + @property + def discovery_path(self): + return [] + + @property + def parent_chain(self): + return [] + + +class FINISHED(BaseEvent): + """ + Special signal event to indicate end of scan + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._priority = (999999999999999,) + + class DefaultEvent(BaseEvent): def sanitize_data(self, data): return data class DictEvent(BaseEvent): - def _data_human(self): - return json.dumps(self.data, sort_keys=True) + def sanitize_data(self, data): + url = data.get("url", "") + if url: + self.parsed_url = self.validators.validate_url_parsed(url) + return data + + def _data_load(self, data): + if isinstance(data, str): + return json.loads(data) + return data class DictHostEvent(DictEvent): def _host(self): - return make_ip_type(self.data["host"]) + if isinstance(self.data, dict) and "host" in self.data: + return make_ip_type(self.data["host"]) + else: + parsed = getattr(self, "parsed_url", None) + if parsed is not None: + return make_ip_type(parsed.hostname) + + +class ClosestHostEvent(DictHostEvent): + # if a host/path/url isn't specified, this event type grabs it from the closest parent + # inherited by FINDING and VULNERABILITY + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not self.host: + for parent in self.get_parents(include_self=True): + # inherit closest URL + if "url" not in self.data: + parent_url = getattr(parent, "parsed_url", None) + if parent_url is not None: + self.data["url"] = parent_url.geturl() + # inherit closest path + if "path" not in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE": + parent_path = parent.data.get("path", None) + if parent_path is not None: + self.data["path"] = parent_path + # inherit closest host + if parent.host: + self.data["host"] = str(parent.host) + # we do this to refresh the hash + self.data = self.data + break + # die if we still haven't found a host + if not self.host and not self.data.get("path", ""): + raise ValueError(f"No host was found in event parents: {self.get_parents()}. Host must be specified!") + + +class DictPathEvent(DictEvent): + def sanitize_data(self, data): + new_data = dict(data) + new_data["path"] = str(new_data["path"]) + file_blobs = getattr(self.scan, "_file_blobs", False) + folder_blobs = getattr(self.scan, "_folder_blobs", False) + blob = None + try: + self._data_path = Path(data["path"]) + # prepend the scan's home dir if the path is relative + if not self._data_path.is_absolute(): + self._data_path = self.scan.home / self._data_path + if self._data_path.is_file(): + self.add_tag("file") + if file_blobs: + with open(self._data_path, "rb") as file: + blob = file.read() + elif self._data_path.is_dir(): + self.add_tag("folder") + if folder_blobs: + blob = self._tar_directory(self._data_path) + except KeyError: + pass + if blob: + new_data["blob"] = base64.b64encode(blob).decode("utf-8") + + return new_data + + def _tar_directory(self, dir_path): + tar_buffer = io.BytesIO() + with tarfile.open(fileobj=tar_buffer, mode="w:gz") as tar: + # Add the entire directory to the tar archive + tar.add(dir_path, arcname=dir_path.name) + return tar_buffer.getvalue() + + +class ASN(DictEvent): + _always_emit = True + _quick_emit = True class CODE_REPOSITORY(DictHostEvent): + _always_emit = True + class _data_validator(BaseModel): url: str - _validate_url = validator("url", allow_reuse=True)(validators.validate_url) - - def _host(self): - self.parsed = validators.validate_url_parsed(self.data["url"]) - return make_ip_type(self.parsed.hostname) + _validate_url = field_validator("url")(validators.validate_url) - def _data_graph(self): + def _pretty_string(self): return self.data["url"] @@ -440,9 +1092,10 @@ class IP_ADDRESS(BaseEvent): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) ip = ipaddress.ip_address(self.data) - self.tags.add(f"ipv{ip.version}") + self.add_tag(f"ipv{ip.version}") if ip.is_private: - self.tags.add("private") + self.add_tag("private-ip") + self.dns_resolve_distance = getattr(self.parent, "dns_resolve_distance", 0) def sanitize_data(self, data): return validators.validate_host(data) @@ -451,11 +1104,38 @@ def _host(self): return ipaddress.ip_address(self.data) -class IP_RANGE(BaseEvent): +class DnsEvent(BaseEvent): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # prevent runaway DNS entries + self.dns_resolve_distance = 0 + parent = getattr(self, "parent", None) + module = getattr(self, "module", None) + module_type = getattr(module, "_type", "") + parent_module = getattr(parent, "module", None) + parent_module_type = getattr(parent_module, "_type", "") + if module_type == "DNS": + self.dns_resolve_distance = getattr(parent, "dns_resolve_distance", 0) + if parent_module_type == "DNS": + self.dns_resolve_distance += 1 + # self.add_tag(f"resolve-distance-{self.dns_resolve_distance}") + # tag subdomain / domain + if is_subdomain(self.host): + self.add_tag("subdomain") + elif is_domain(self.host): + self.add_tag("domain") + # tag private IP + try: + if self.host.is_private: + self.add_tag("private-ip") + except AttributeError: + pass + + +class IP_RANGE(DnsEvent): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - net = ipaddress.ip_network(self.data, strict=False) - self.tags.add(f"ipv{net.version}") + self.add_tag(f"ipv{self.host.version}") def sanitize_data(self, data): return str(ipaddress.ip_network(str(data), strict=False)) @@ -464,16 +1144,7 @@ def _host(self): return ipaddress.ip_network(self.data) -class DNS_NAME(BaseEvent): - _priority = 2 - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - if is_subdomain(self.data): - self.tags.add("subdomain") - elif is_domain(self.data): - self.tags.add("domain") - +class DNS_NAME(DnsEvent): def sanitize_data(self, data): return validators.validate_host(data) @@ -482,9 +1153,15 @@ def _host(self): def _words(self): stem = self.host_stem - if "wildcard" in self.tags: - stem = "".join(stem.split(".")[1:]) - return extract_words(self.host_stem) + if not is_ptr(stem): + split_stem = stem.split(".") + if split_stem: + leftmost_segment = split_stem[0] + if leftmost_segment == "_wildcard": + stem = ".".join(split_stem[1:]) + if stem: + return extract_words(stem) + return set() class OPEN_TCP_PORT(BaseEvent): @@ -496,82 +1173,176 @@ def _host(self): return host def _words(self): - if not is_ip(self.host): + if not is_ip(self.host) and not is_ptr(self.host): return extract_words(self.host_stem) return set() class URL_UNVERIFIED(BaseEvent): + _status_code_regex = re.compile(r"^status-(\d{1,3})$") + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) + self.num_redirects = getattr(self.parent, "num_redirects", 0) + + def _data_id(self): + data = super()._data_id() + + # remove the querystring for URL/URL_UNVERIFIED events, because we will conditionally add it back in (based on settings) + if self.__class__.__name__.startswith("URL") and self.scan is not None: + prefix = data.split("?")[0] + + # consider spider-danger tag when deduping + if "spider-danger" in self.tags: + prefix += "spider-danger" + + if not self.scan.config.get("url_querystring_remove", True) and self.parsed_url.query: + query_dict = parse_qs(self.parsed_url.query) + if self.scan.config.get("url_querystring_collapse", True): + # Only consider parameter names in dedup (collapse values) + cleaned_query = "|".join(sorted(query_dict.keys())) + else: + # Consider parameter names and values in dedup + cleaned_query = "&".join( + f"{key}={','.join(sorted(values))}" for key, values in sorted(query_dict.items()) + ) + data = f"{prefix}:{self.parsed_url.scheme}:{self.parsed_url.netloc}:{self.parsed_url.path}:{cleaned_query}" + return data def sanitize_data(self, data): - self.parsed = validators.validate_url_parsed(data) + self.parsed_url = self.validators.validate_url_parsed(data) + + # special handling of URL extensions + if self.parsed_url is not None: + url_path = self.parsed_url.path + if url_path: + parsed_path_lower = str(url_path).lower() + extension = get_file_extension(parsed_path_lower) + if extension: + self.url_extension = extension + self.add_tag(f"extension-{extension}") # tag as dir or endpoint - if str(self.parsed.path).endswith("/"): - self.tags.add("dir") + if str(self.parsed_url.path).endswith("/"): + self.add_tag("dir") else: - self.tags.add("endpoint") - - parsed_path_lower = str(self.parsed.path).lower() - - url_extension_blacklist = [] - url_extension_httpx_only = [] - scan = getattr(self, "scan", None) - if scan is not None: - url_extension_blacklist = [e.lower() for e in scan.config.get("url_extension_blacklist", [])] - url_extension_httpx_only = [e.lower() for e in scan.config.get("url_extension_httpx_only", [])] - - extension = get_file_extension(parsed_path_lower) - if extension: - self.tags.add(f"extension-{extension}") - if extension in url_extension_blacklist: - self.tags.add("blacklisted") - if extension in url_extension_httpx_only: - self.tags.add("httpx-only") - self._omit = True - - data = self.parsed.geturl() + self.add_tag("endpoint") + + data = self.parsed_url.geturl() return data + def add_tag(self, tag): + self_url = getattr(self, "parsed_url", "") + self_host = getattr(self, "host", "") + # autoincrement web spider distance if the "spider-danger" tag is added + if tag == "spider-danger" and "spider-danger" not in self.tags and self_url and self_host: + parent_hosts_and_urls = set() + for p in self.get_parents(): + # URL_UNVERIFIED events don't count because they haven't been visited yet + if p.type == "URL_UNVERIFIED": + continue + url = getattr(p, "parsed_url", "") + parent_hosts_and_urls.add((p.host, url)) + # if there's a URL anywhere in our parent chain that's different from ours but shares our host, we're in dAnGeR + dangerous_parent = any( + p_host == self.host and p_url != self_url for p_host, p_url in parent_hosts_and_urls + ) + if dangerous_parent: + # increment the web spider distance + if self.type == "URL_UNVERIFIED": + self.web_spider_distance += 1 + if self.is_spider_max: + self.add_tag("spider-max") + super().add_tag(tag) + + @property + def is_spider_max(self): + if self.scan: + depth = url_depth(self.parsed_url) + if (self.web_spider_distance > self.scan.web_spider_distance) or (depth > self.scan.web_spider_depth): + return True + return False + def with_port(self): netloc_with_port = make_netloc(self.host, self.port) - return self.parsed._replace(netloc=netloc_with_port) + return self.parsed_url._replace(netloc=netloc_with_port) def _words(self): - first_elem = self.parsed.path.lstrip("/").split("/")[0] - if not "." in first_elem: + first_elem = self.parsed_url.path.lstrip("/").split("/")[0] + if "." not in first_elem: return extract_words(first_elem) return set() def _host(self): - return make_ip_type(self.parsed.hostname) + return make_ip_type(self.parsed_url.hostname) @property - def port(self): - if self.parsed.port is not None: - return self.parsed.port - elif self.parsed.scheme == "https": - return 443 - elif self.parsed.scheme == "http": - return 80 + def http_status(self): + for t in self.tags: + match = self._status_code_regex.match(t) + if match: + return int(match.groups()[0]) + return 0 class URL(URL_UNVERIFIED): - def sanitize_data(self, data): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not self._dummy and not any(t.startswith("status-") for t in self.tags): raise ValidationError( 'Must specify HTTP status tag for URL event, e.g. "status-200". Use URL_UNVERIFIED if the URL is unvisited.' ) - return super().sanitize_data(data) + + @property + def resolved_hosts(self): + # TODO: remove this when we rip out httpx + return {".".join(i.split("-")[1:]) for i in self.tags if i.startswith("ip-")} + + @property + def pretty_string(self): + return self.data + + +class STORAGE_BUCKET(DictEvent, URL_UNVERIFIED): + _always_emit = True + _suppress_chain_dupes = True + + class _data_validator(BaseModel): + name: str + url: str + _validate_url = field_validator("url")(validators.validate_url) + + def sanitize_data(self, data): + data = super().sanitize_data(data) + data["name"] = data["name"].lower() + return data + + def _words(self): + return self.data["name"] class URL_HINT(URL_UNVERIFIED): pass +class WEB_PARAMETER(DictHostEvent): + def _data_id(self): + # dedupe by url:name:param_type + url = self.data.get("url", "") + name = self.data.get("name", "") + param_type = self.data.get("type", "") + return f"{url}:{name}:{param_type}" + + def _url(self): + return self.data["url"] + + def __str__(self): + max_event_len = 200 + d = str(self.data) + return f'{self.type}("{d[:max_event_len]}{("..." if len(d) > max_event_len else "")}", module={self.module}, tags={self.tags})' + + class EMAIL_ADDRESS(BaseEvent): def sanitize_data(self, data): return validators.validate_email(data) @@ -586,75 +1357,147 @@ def _words(self): class HTTP_RESPONSE(URL_UNVERIFIED, DictEvent): - _priority = 2 - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) - if not str(self.data.get("status-code", 0)).startswith("3"): - self.web_spider_distance += 1 + # count number of consecutive redirects + self.num_redirects = getattr(self.parent, "num_redirects", 0) + if str(self.http_status).startswith("3"): + self.num_redirects += 1 + + def _data_id(self): + return self.data["method"] + "|" + self.data["url"] def sanitize_data(self, data): url = data.get("url", "") - self.parsed = validators.validate_url_parsed(url) - - header_dict = {} - for i in data.get("response-header", "").splitlines(): - if len(i) > 0 and ":" in i: - k, v = i.split(":", 1) - k = k.strip().lower() - v = v.lstrip() - header_dict[k] = v - data["header-dict"] = header_dict - return data + self.parsed_url = self.validators.validate_url_parsed(url) + data["url"] = self.parsed_url.geturl() + + if not "raw_header" in data: + raise ValueError("raw_header is required for HTTP_RESPONSE events") + + if "header-dict" not in data: + header_dict = {} + for i in data.get("raw_header", "").splitlines(): + if len(i) > 0 and ":" in i: + k, v = i.split(":", 1) + k = k.strip().lower() + v = v.lstrip() + if k in header_dict: + header_dict[k].append(v) + else: + header_dict[k] = [v] + data["header-dict"] = header_dict + + # move URL to the front of the dictionary for visibility + data = dict(data) + new_data = {"url": data.pop("url")} + new_data.update(data) + + return new_data def _words(self): return set() + def _pretty_string(self): + return f"{self.data['hash']['header_mmh3']}:{self.data['hash']['body_mmh3']}" -class VULNERABILITY(DictHostEvent): - _priority = 1 + @property + def raw_response(self): + """ + Formats the status code, headers, and body into a single string formatted as an HTTP/1.1 response. + """ + raw_header = self.data.get("raw_header", "") + body = self.data.get("body", "") + return f"{raw_header}{body}" - def _sanitize_data(self, data): - data = super()._sanitize_data(data) - self.tags.add(data["severity"].lower()) + @property + def http_status(self): + try: + return int(self.data.get("status_code", 0)) + except (ValueError, TypeError): + return 0 + + @property + def http_title(self): + http_title = self.data.get("title", "") + try: + return recursive_decode(http_title) + except Exception: + return http_title + + @property + def redirect_location(self): + location = self.data.get("location", "") + # if it's a redirect + if location: + # get the url scheme + scheme = is_uri(location, return_scheme=True) + # if there's no scheme (i.e. it's a relative redirect) + if not scheme: + # then join the location with the current url + location = urljoin(self.parsed_url.geturl(), location) + return location + + +class VULNERABILITY(ClosestHostEvent): + _always_emit = True + _quick_emit = True + severity_colors = { + "CRITICAL": "🟪", + "HIGH": "🟥", + "MEDIUM": "🟧", + "LOW": "🟨", + "UNKNOWN": "⬜", + } + + def sanitize_data(self, data): + self.add_tag(data["severity"].lower()) return data class _data_validator(BaseModel): - host: str + host: Optional[str] = None severity: str description: str - url: Optional[str] - _validate_host = validator("host", allow_reuse=True)(validators.validate_host) - _validate_severity = validator("severity", allow_reuse=True)(validators.validate_severity) + url: Optional[str] = None + path: Optional[str] = None + _validate_url = field_validator("url")(validators.validate_url) + _validate_host = field_validator("host")(validators.validate_host) + _validate_severity = field_validator("severity")(validators.validate_severity) - def _data_graph(self): - return f'[{self.data["severity"]}] {self.data["description"]}' + def _pretty_string(self): + return f"[{self.data['severity']}] {self.data['description']}" -class FINDING(DictHostEvent): - _priority = 1 +class FINDING(ClosestHostEvent): + _always_emit = True + _quick_emit = True class _data_validator(BaseModel): - host: str + host: Optional[str] = None description: str - url: Optional[str] - _validate_host = validator("host", allow_reuse=True)(validators.validate_host) + url: Optional[str] = None + path: Optional[str] = None + _validate_url = field_validator("url")(validators.validate_url) + _validate_host = field_validator("host")(validators.validate_host) - def _data_graph(self): + def _pretty_string(self): return self.data["description"] class TECHNOLOGY(DictHostEvent): - _priority = 2 - class _data_validator(BaseModel): host: str technology: str - url: Optional[str] - _validate_host = validator("host", allow_reuse=True)(validators.validate_host) + url: Optional[str] = None + _validate_url = field_validator("url")(validators.validate_url) + _validate_host = field_validator("host")(validators.validate_host) - def _data_graph(self): + def _data_id(self): + # dedupe by host+port+tech + tech = self.data.get("technology", "") + return f"{self.host}:{self.port}:{tech}" + + def _pretty_string(self): return self.data["technology"] @@ -662,10 +1505,11 @@ class VHOST(DictHostEvent): class _data_validator(BaseModel): host: str vhost: str - url: Optional[str] - _validate_host = validator("host", allow_reuse=True)(validators.validate_host) + url: Optional[str] = None + _validate_url = field_validator("url")(validators.validate_url) + _validate_host = field_validator("host")(validators.validate_host) - def _data_graph(self): + def _pretty_string(self): return self.data["vhost"] @@ -673,74 +1517,266 @@ class PROTOCOL(DictHostEvent): class _data_validator(BaseModel): host: str protocol: str - _validate_host = validator("host", allow_reuse=True)(validators.validate_open_port) + port: Optional[int] = None + banner: Optional[str] = None + _validate_host = field_validator("host")(validators.validate_host) + _validate_port = field_validator("port")(validators.validate_port) - def _host(self): - host, self._port = split_host_port(self.data["host"]) - return host + def sanitize_data(self, data): + new_data = dict(data) + new_data["protocol"] = data.get("protocol", "").upper() + return new_data - def _data_graph(self): + @property + def port(self): + return self.data.get("port", None) + + def _pretty_string(self): return self.data["protocol"] +class GEOLOCATION(BaseEvent): + _always_emit = True + _quick_emit = True + + +class PASSWORD(BaseEvent): + _always_emit = True + _quick_emit = True + + +class HASHED_PASSWORD(BaseEvent): + _always_emit = True + _quick_emit = True + + +class USERNAME(BaseEvent): + _always_emit = True + _quick_emit = True + + +class SOCIAL(DictHostEvent): + _always_emit = True + _quick_emit = True + _scope_distance_increment_same_host = True + + +class WEBSCREENSHOT(DictPathEvent, DictHostEvent): + _always_emit = True + _quick_emit = True + + +class AZURE_TENANT(DictEvent): + _always_emit = True + _quick_emit = True + + +class WAF(DictHostEvent): + _always_emit = True + _quick_emit = True + + class _data_validator(BaseModel): + url: str + host: str + waf: str + info: Optional[str] = None + _validate_url = field_validator("url")(validators.validate_url) + _validate_host = field_validator("host")(validators.validate_host) + + def _pretty_string(self): + return self.data["waf"] + + +class FILESYSTEM(DictPathEvent): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if self._data_path.is_file(): + # detect type of file content using magic + from bbot.core.helpers.libmagic import get_magic_info, get_compression + + try: + extension, mime_type, description, confidence = get_magic_info(self.data["path"]) + self.data["magic_extension"] = extension + self.data["magic_mime_type"] = mime_type + self.data["magic_description"] = description + self.data["magic_confidence"] = confidence + # detection compression + compression = get_compression(mime_type) + if compression: + self.add_tag("compressed") + self.add_tag(f"{compression}-archive") + self.data["compression"] = compression + # refresh hash + self.data = self.data + except Exception as e: + log.debug(f"Error detecting file type: {type(e).__name__}: {e}") + + +class RAW_DNS_RECORD(DictHostEvent, DnsEvent): + # don't emit raw DNS records for affiliates + _always_emit_tags = ["target"] + + +class MOBILE_APP(DictEvent): + _always_emit = True + + def _sanitize_data(self, data): + if isinstance(data, str): + data = {"url": data} + if "url" not in data: + raise ValidationError("url is required for MOBILE_APP events") + url = data["url"] + # parse URL + try: + self.parsed_url = urlparse(url) + except Exception as e: + raise ValidationError(f"Error parsing URL {url}: {e}") + if not "id" in data: + # extract "id" getparam + params = parse_qs(self.parsed_url.query) + try: + _id = params["id"][0] + except Exception: + raise ValidationError("id is required for MOBILE_APP events") + data["id"] = _id + return data + + def _pretty_string(self): + return self.data["url"] + + def make_event( - data, event_type=None, source=None, module=None, scan=None, tags=None, confidence=100, dummy=False, internal=None + data, + event_type=None, + parent=None, + context=None, + module=None, + scan=None, + scans=None, + tags=None, + confidence=100, + dummy=False, + internal=None, ): """ - If data is already an event, simply return it + Creates and returns a new event object or modifies an existing one. + + This function serves as a factory for creating new event objects, either by generating a new `Event` + object or by updating an existing event with additional metadata. If `data` is already an event, + it updates the event based on the additional parameters provided. + + Parameters: + data (Union[str, dict, BaseEvent]): The primary data for the event or an existing event object. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided. + parent (BaseEvent, optional): Parent event leading to this event's discovery. + context (str, optional): Description of circumstances leading to event's discovery. + module (str, optional): Module that discovered the event. + scan (Scan, optional): BBOT Scan object associated with the event. + scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. + tags (Union[str, List[str]], optional): Descriptive tags for the event, as a list or a single string. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. + dummy (bool, optional): Disables data validations if set to True. Defaults to False. + internal (Any, optional): Makes the event internal if set to True. Defaults to None. + + Returns: + BaseEvent: A new or updated event object. + + Raises: + ValidationError: Raised when there's an error in event data or type sanitization. + + Examples: + If inside a module, e.g. from within its `handle_event()`: + >>> self.make_event("1.2.3.4", parent=event) + IP_ADDRESS("1.2.3.4", module=portscan, tags={'ipv4', 'distance-1'}) + + If you're outside a module but you have a scan object: + >>> scan.make_event("1.2.3.4", parent=scan.root_event) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'}) + + If you're outside a scan and just messing around: + >>> from bbot.core.event.base import make_event + >>> make_event("1.2.3.4", dummy=True) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4'}) + + Note: + When working within a module's `handle_event()`, use the instance method + `self.make_event()` instead of calling this function directly. """ + if not data: + raise ValidationError("No data provided") + + # allow tags to be either a string or an array + if not tags: + tags = [] + elif isinstance(tags, str): + tags = [tags] + tags = set(tags) if is_event(data): - if scan is not None and not data.scan: - data.scan = scan + event = copy(data) + if scan is not None and not event.scan: + event.scan = scan + if scans is not None and not event.scans: + event.scans = scans if module is not None: - data.module = module - if source is not None: - data.set_source(source) - if internal == True and not data._made_internal: - data.make_internal() + event.module = module + if parent is not None: + event.parent = parent + if context is not None: + event.discovery_context = context + if internal is True: + event.internal = True + if tags: + event.tags = tags.union(event.tags) event_type = data.type - return data + return event else: if event_type is None: - event_type = get_event_type(data) + event_type, data = get_event_type(data) if not dummy: log.debug(f'Autodetected event type "{event_type}" based on data: "{data}"') - if event_type is None: - raise ValidationError(f'Unable to autodetect event type from "{data}"') event_type = str(event_type).strip().upper() # Catch these common whoopsies - - # DNS_NAME <--> IP_ADDRESS confusion if event_type in ("DNS_NAME", "IP_ADDRESS"): - try: - data = validators.validate_host(data) - except Exception as e: - raise ValidationError(f'Error sanitizing event data "{data}" for type "{event_type}": {e}') - data_is_ip = is_ip(data) - if event_type == "DNS_NAME" and data_is_ip: - event_type = "IP_ADDRESS" - elif event_type == "IP_ADDRESS" and not data_is_ip: - event_type = "DNS_NAME" - - # DNS_NAME <--> EMAIL_ADDRESS confusion - if event_type in ("DNS_NAME", "EMAIL_ADDRESS"): - data_is_email = validators.soft_validate(data, "email") - if event_type == "DNS_NAME" and data_is_email: + # DNS_NAME <--> EMAIL_ADDRESS confusion + if validators.soft_validate(data, "email"): event_type = "EMAIL_ADDRESS" - elif event_type == "EMAIL_ADDRESS" and not data_is_email: - event_type = "DNS_NAME" + else: + # DNS_NAME <--> IP_ADDRESS confusion + try: + data = validators.validate_host(data) + except Exception as e: + log.trace(traceback.format_exc()) + raise ValidationError(f'Error sanitizing event data "{data}" for type "{event_type}": {e}') + data_is_ip = is_ip(data) + if event_type == "DNS_NAME" and data_is_ip: + event_type = "IP_ADDRESS" + elif event_type == "IP_ADDRESS" and not data_is_ip: + event_type = "DNS_NAME" + # USERNAME <--> EMAIL_ADDRESS confusion + if event_type == "USERNAME" and validators.soft_validate(data, "email"): + event_type = "EMAIL_ADDRESS" + tags.add("affiliate") + # Convert single-host IP_RANGE to IP_ADDRESS + if event_type == "IP_RANGE": + with suppress(Exception): + net = ipaddress.ip_network(data, strict=False) + if net.prefixlen == net.max_prefixlen: + event_type = "IP_ADDRESS" + data = net.network_address event_class = globals().get(event_type, DefaultEvent) return event_class( data, event_type=event_type, - source=source, + parent=parent, + context=context, module=module, scan=scan, + scans=scans, tags=tags, confidence=confidence, _dummy=dummy, @@ -748,5 +1784,64 @@ def make_event( ) +def event_from_json(j, siem_friendly=False): + """ + Creates an event object from a JSON dictionary. + + This function deserializes a JSON dictionary to create a new event object, using the `make_event` function + for the actual object creation. It sets additional attributes such as the timestamp and scope distance + based on the input JSON. + + Parameters: + j (Dict): JSON dictionary containing the event attributes. + Must include keys "data" and "type". + + Returns: + BaseEvent: A new event object initialized with attributes from the JSON dictionary. + + Raises: + ValidationError: Raised when the JSON dictionary is missing required fields. + + Note: + The function assumes that the input JSON dictionary is valid and may raise exceptions + if required keys are missing. Make sure to validate the JSON input beforehand. + """ + try: + event_type = j["type"] + kwargs = { + "event_type": event_type, + "scans": j.get("scans", []), + "tags": j.get("tags", []), + "confidence": j.get("confidence", 100), + "context": j.get("discovery_context", None), + "dummy": True, + } + if siem_friendly: + data = j["data"][event_type] + else: + data = j["data"] + kwargs["data"] = data + event = make_event(**kwargs) + event_uuid = j.get("uuid", None) + if event_uuid is not None: + event._uuid = uuid.UUID(event_uuid.split(":")[-1]) + + resolved_hosts = j.get("resolved_hosts", []) + event._resolved_hosts = set(resolved_hosts) + + event.timestamp = datetime.datetime.fromisoformat(j["timestamp"]) + event.scope_distance = j["scope_distance"] + parent_id = j.get("parent", None) + if parent_id is not None: + event._parent_id = parent_id + parent_uuid = j.get("parent_uuid", None) + if parent_uuid is not None: + parent_type, parent_uuid = parent_uuid.split(":", 1) + event._parent_uuid = parent_type + ":" + str(uuid.UUID(parent_uuid)) + return event + except KeyError as e: + raise ValidationError(f"Event missing required field: {e}") + + def is_event(e): return BaseEvent in e.__class__.__mro__ diff --git a/bbot/core/event/helpers.py b/bbot/core/event/helpers.py index 872283038e..0e3bd5fcd1 100644 --- a/bbot/core/event/helpers.py +++ b/bbot/core/event/helpers.py @@ -2,8 +2,9 @@ import ipaddress from contextlib import suppress -from bbot.core.helpers import sha1, smart_decode -from bbot.core.helpers.regexes import event_type_regexes, event_id_regex, _hostname_regex +from bbot.errors import ValidationError +from bbot.core.helpers.regexes import event_type_regexes +from bbot.core.helpers import smart_decode, smart_encode_punycode log = logging.getLogger("bbot.core.event.helpers") @@ -11,48 +12,41 @@ def get_event_type(data): """ - Attempt to divine event type from data - """ + Determines the type of event based on the given data. + + Args: + data (str): The data to be used for determining the event type. + + Returns: + str: The type of event such as "IP_ADDRESS", "IP_RANGE", or "URL_UNVERIFIED". + + Raises: + ValidationError: If the event type could not be determined. - data = smart_decode(data).strip() + Notes: + - Utilizes `smart_decode_punycode` and `smart_decode` to preprocess the data. + - Makes use of `ipaddress` standard library to check for IP and network types. + - Checks against a set of predefined regular expressions stored in `event_type_regexes`. + """ # IP address with suppress(Exception): ipaddress.ip_address(data) - return "IP_ADDRESS" + return "IP_ADDRESS", data # IP network with suppress(Exception): ipaddress.ip_network(data, strict=False) - return "IP_RANGE" + return "IP_RANGE", data + + data = smart_encode_punycode(smart_decode(data).strip()) # Strict regexes for t, regexes in event_type_regexes.items(): for r in regexes: if r.match(data): if t == "URL": - return "URL_UNVERIFIED" - return t - - # Assume DNS_NAME for basic words - if _hostname_regex.match(data): - return "DNS_NAME" - - -def is_event_id(s): - if event_id_regex.match(str(s)): - return True - return False - - -def make_event_id(data, event_type): - return f"{event_type}:{sha1(data).hexdigest()}" - - -host_types = ("URL", "DNS_NAME", "EMAIL_ADDRESS") - -port_types = ("OPEN_TCP_PORT",) - -host_ip_types = ("IP_ADDRESS", "IP_ADDRESS", "IP_RANGE", "IP_RANGE") + return "URL_UNVERIFIED", data + return t, data -scopable_types = host_types + port_types + raise ValidationError(f'Unable to autodetect event type from "{data}"') diff --git a/bbot/core/flags.py b/bbot/core/flags.py new file mode 100644 index 0000000000..f65dbad286 --- /dev/null +++ b/bbot/core/flags.py @@ -0,0 +1,24 @@ +flag_descriptions = { + "active": "Makes active connections to target systems", + "affiliates": "Discovers affiliated hostnames/domains", + "aggressive": "Generates a large amount of network traffic", + "baddns": "Runs all modules from the DNS auditing tool BadDNS", + "cloud-enum": "Enumerates cloud resources", + "code-enum": "Find public code repositories and search them for secrets etc.", + "deadly": "Highly aggressive", + "email-enum": "Enumerates email addresses", + "iis-shortnames": "Scans for IIS Shortname vulnerability", + "passive": "Never connects to target systems", + "portscan": "Discovers open ports", + "report": "Generates a report at the end of the scan", + "safe": "Non-intrusive, safe to run", + "service-enum": "Identifies protocols running on open ports", + "slow": "May take a long time to complete", + "social-enum": "Enumerates social media", + "subdomain-enum": "Enumerates subdomains", + "subdomain-hijack": "Detects hijackable subdomains", + "web-basic": "Basic, non-intrusive web scan functionality", + "web-paramminer": "Discovers HTTP parameters through brute-force", + "web-screenshots": "Takes screenshots of web pages", + "web-thorough": "More advanced web scanning functionality", +} diff --git a/bbot/core/helpers/__init__.py b/bbot/core/helpers/__init__.py index 3b00b5e4c5..294ec82d3f 100644 --- a/bbot/core/helpers/__init__.py +++ b/bbot/core/helpers/__init__.py @@ -1,3 +1,4 @@ from .url import * from .misc import * from . import regexes +from . import validators diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py new file mode 100644 index 0000000000..c6e5f8affb --- /dev/null +++ b/bbot/core/helpers/async_helpers.py @@ -0,0 +1,135 @@ +import uuid +import random +import asyncio +import logging +import functools +from datetime import datetime +from .misc import human_timedelta +from cachetools import keys, LRUCache +from contextlib import asynccontextmanager + +log = logging.getLogger("bbot.core.helpers.async_helpers") + + +class ShuffleQueue(asyncio.Queue): + def _put(self, item): + random_index = random.randint(0, self.qsize()) + self._queue.insert(random_index, item) + + def _get(self): + return self._queue.popleft() + + +class _Lock(asyncio.Lock): + def __init__(self, name): + self.name = name + super().__init__() + + +class NamedLock: + """ + Returns a unique asyncio.Lock() based on a provided string + + Useful for preventing multiple operations from occurring on the same data in parallel + E.g. simultaneous DNS lookups on the same hostname + """ + + def __init__(self, max_size=10000): + self._cache = LRUCache(maxsize=max_size) + + @asynccontextmanager + async def lock(self, name): + try: + lock = self._cache[name] + except KeyError: + lock = _Lock(name) + self._cache[name] = lock + async with lock: + yield + + +class TaskCounter: + def __init__(self): + self.tasks = {} + self._lock = None + + @property + def value(self): + return sum([t.n for t in self.tasks.values()]) + + @property + def lock(self): + if self._lock is None: + self._lock = asyncio.Lock() + return self._lock + + def count(self, task_name, n=1, _log=True): + if callable(task_name): + task_name = f"{task_name.__qualname__}()" + return self.Task(self, task_name, n=n, _log=_log) + + class Task: + def __init__(self, manager, task_name, n=1, _log=True): + self.manager = manager + self.task_name = task_name + self.task_id = None + self.start_time = None + self.log = _log + self.n = n + + async def __aenter__(self): + self.task_id = uuid.uuid4() + # if self.log: + # log.trace(f"Starting task {self.task_name} ({self.task_id})") + async with self.manager.lock: + self.start_time = datetime.now() + self.manager.tasks[self.task_id] = self + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + async with self.manager.lock: + self.manager.tasks.pop(self.task_id, None) + # if self.log: + # log.trace(f"Finished task {self.task_name} ({self.task_id})") + + def __str__(self): + running_for = human_timedelta(datetime.now() - self.start_time) + return f"{self.task_name} running for {running_for}" + + +def get_event_loop(): + try: + return asyncio.get_running_loop() + except RuntimeError: + log.verbose("Starting new event loop") + return asyncio.new_event_loop() + + +def async_to_sync_gen(async_gen): + loop = get_event_loop() + try: + while True: + yield loop.run_until_complete(async_gen.__anext__()) + except StopAsyncIteration: + pass + + +def async_cachedmethod(cache, key=keys.hashkey): + def decorator(method): + async def wrapper(self, *args, **kwargs): + method_cache = cache(self) + k = key(*args, **kwargs) + try: + return method_cache[k] + except KeyError: + pass + ret = await method(self, *args, **kwargs) + try: + method_cache[k] = ret + except ValueError: + pass + return ret + + return functools.wraps(method)(wrapper) + + return decorator diff --git a/bbot/core/helpers/bloom.py b/bbot/core/helpers/bloom.py new file mode 100644 index 0000000000..62d2caa38f --- /dev/null +++ b/bbot/core/helpers/bloom.py @@ -0,0 +1,78 @@ +import os +import mmh3 +import mmap + + +class BloomFilter: + """ + Simple bloom filter implementation capable of roughly 400K lookups/s. + + BBOT uses bloom filters in scenarios like DNS brute-forcing, where it's useful to keep track + of which mutations have been tried so far. + + A 100-megabyte bloom filter (800M bits) can store 10M entries with a .01% false-positive rate. + A python hash is 36 bytes. So if you wanted to store these in a set, this would take up + 36 * 10M * 2 (key+value) == 720 megabytes. So we save roughly 7 times the space. + """ + + def __init__(self, size=8000000): + self.size = size # total bits + self.byte_size = (size + 7) // 8 # calculate byte size needed for the given number of bits + + # Create an anonymous mmap region, compatible with both Windows and Unix + if os.name == "nt": # Windows + # -1 indicates an anonymous memory map in Windows + self.mmap_file = mmap.mmap(-1, self.byte_size) + else: # Unix/Linux + # Use MAP_ANONYMOUS along with MAP_SHARED + self.mmap_file = mmap.mmap(-1, self.byte_size, prot=mmap.PROT_WRITE, flags=mmap.MAP_ANON | mmap.MAP_SHARED) + + self.clear_all_bits() + + def add(self, item): + for hash_value in self._hashes(item): + index = hash_value // 8 + position = hash_value % 8 + current_byte = self.mmap_file[index] + self.mmap_file[index] = current_byte | (1 << position) + + def check(self, item): + for hash_value in self._hashes(item): + index = hash_value // 8 + position = hash_value % 8 + current_byte = self.mmap_file[index] + if not (current_byte & (1 << position)): + return False + return True + + def clear_all_bits(self): + self.mmap_file.seek(0) + # Write zeros across the entire mmap length + self.mmap_file.write(b"\x00" * self.byte_size) + + def _hashes(self, item): + if not isinstance(item, bytes): + if not isinstance(item, str): + item = str(item) + item = item.encode("utf-8") + return [abs(hash(item)) % self.size, abs(mmh3.hash(item)) % self.size, abs(self._fnv1a_hash(item)) % self.size] + + def _fnv1a_hash(self, data): + hash = 0x811C9DC5 # 2166136261 + for byte in data: + hash ^= byte + hash = (hash * 0x01000193) % 2**32 # 16777619 + return hash + + def close(self): + """Explicitly close the memory-mapped file.""" + self.mmap_file.close() + + def __del__(self): + try: + self.close() + except Exception: + pass + + def __contains__(self, item): + return self.check(item) diff --git a/bbot/core/helpers/cache.py b/bbot/core/helpers/cache.py index 9e0e1e68e6..3a70fbd248 100644 --- a/bbot/core/helpers/cache.py +++ b/bbot/core/helpers/cache.py @@ -1,9 +1,6 @@ import os import time import logging -import threading -from contextlib import suppress -from collections import OrderedDict from .misc import sha1 @@ -54,84 +51,3 @@ def is_cached(self, key, cache_hrs=24 * 7): def cache_filename(self, key): return self.cache_dir / sha1(key).hexdigest() - - -_sentinel = object() - - -class CacheDict: - """ - Dictionary to store cached values, with a maximum size limit - """ - - def __init__(self, max_size=1000): - self._cache = OrderedDict() - self._lock = threading.Lock() - self._max_size = int(max_size) - - def get(self, name, fallback=_sentinel): - name_hash = self._hash(name) - with self._lock: - try: - return self._cache[name_hash] - except KeyError: - if fallback is not _sentinel: - return fallback - raise - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def put(self, name, value): - name_hash = self._hash(name) - with self._lock: - try: - self._cache[name_hash] = value - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def _truncate(self): - if not self or len(self) <= self._max_size: - return - for nh in list(self._cache.keys()): - try: - del self._cache[nh] - except KeyError: - pass - if not self or len(self) <= self._max_size: - break - - def keys(self): - return self._cache.keys() - - def values(self): - return self._cache.values() - - def items(self): - return self._cache.items() - - def _hash(self, v): - if type(v) == int: - return v - return hash(str(v)) - - def __contains__(self, item): - return self._hash(item) in self._cache - - def __iter__(self): - return iter(self._cache) - - def __getitem__(self, item): - return self.get(item) - - def __setitem__(self, item, value): - self.put(item, value) - - def __bool__(self): - return bool(self._cache) - - def __len__(self): - return len(self._cache) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 2e23073761..d4f017b330 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -1,170 +1,305 @@ -import io import os +import asyncio import logging -import threading -import subprocess -from contextlib import suppress +import traceback +from signal import SIGINT +from subprocess import CompletedProcess, CalledProcessError, SubprocessError -from .misc import smart_decode +from .misc import smart_decode, smart_encode, which log = logging.getLogger("bbot.core.helpers.command") -def run_live(self, command, *args, **kwargs): - """ - Get live output, line by line, as a process executes - You can also pass input= and pipe data into the process' stdin - - This lets you chain processes like so: +async def run(self, *command, check=False, text=True, idle_timeout=None, **kwargs): + """Runs a command asynchronously and gets its output as a string. - ls_process = run_live(["ls", "/etc"]) - grep_process = run_live(["grep", "conf"], input=ls_process) - for line in grep_process: - log.success(line) + This method is a simple helper for executing a command and capturing its output. + If an error occurs during execution, it can optionally raise an error or just log the stderr. - - The above is roughly equivalent to: - ls /etc | grep conf + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + idle_timeout (int, optional): Sets a limit on the number of seconds the process can run before throwing a TimeoutError + **kwargs (dict): Additional keyword arguments for the subprocess. - NOTE: STDERR is hidden by default. - If you want to see it, pass stderr=None - """ + Returns: + CompletedProcess: A completed process object with attributes for the command, return code, stdout, and stderr. - if not "stdout" in kwargs: - kwargs["stdout"] = subprocess.PIPE - if not "stderr" in kwargs: - kwargs["stderr"] = subprocess.PIPE - _input = kwargs.pop("input", "") - input_msg = "" - if _input: - kwargs["stdin"] = subprocess.PIPE - input_msg = " (with stdin)" + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. - command = [str(s) for s in command] - log.hugeverbose(f"run_live{input_msg}: {' '.join(command)}") - with catch(subprocess.Popen, command, *args, **kwargs) as process: - if _input: - if type(_input) in (str, bytes): - _input = (_input,) - self.feed_pipe(process.stdin, _input, text=False) - for line in io.TextIOWrapper(process.stdout, encoding="utf-8", errors="ignore"): - yield line - - # surface stderr - process.wait() - if process.stderr and process.returncode != 0: - stderr = smart_decode(process.stderr.read()) - if stderr: - command_str = " ".join(command) - log.warning(f"Stderr for {command_str}:\n\t{stderr}") - - -def run(self, command, *args, **kwargs): + Examples: + >>> process = await run(["ls", "/tmp"]) + >>> process.stdout + "file1.txt\nfile2.txt" """ - Simple helper for running a command, and getting its output as a string - process = run(["ls", "/tmp"]) - process.stdout --> "file1.txt\nfile2.txt" + # proc_tracker optionally keeps track of which processes are running under which modules + # this allows for graceful SIGINTing of a module's processes in the case when it's killed + proc_tracker = kwargs.pop("_proc_tracker", set()) + log_stderr = kwargs.pop("_log_stderr", True) + proc, _input, command = await self._spawn_proc(*command, **kwargs) + if proc is not None: + proc_tracker.add(proc) + try: + if _input is not None: + if isinstance(_input, (list, tuple)): + _input = b"\n".join(smart_encode(i) for i in _input) + b"\n" + else: + _input = smart_encode(_input) + + try: + if idle_timeout is not None: + stdout, stderr = await asyncio.wait_for(proc.communicate(_input), timeout=idle_timeout) + else: + stdout, stderr = await proc.communicate(_input) + except asyncio.exceptions.TimeoutError: + proc.send_signal(SIGINT) + raise + + # surface stderr + if text: + if stderr is not None: + stderr = smart_decode(stderr) + if stdout is not None: + stdout = smart_decode(stdout) + if proc.returncode: + if check: + raise CalledProcessError(proc.returncode, command, output=stdout, stderr=stderr) + if stderr and log_stderr: + command_str = " ".join(command) + log.warning(f"Stderr for run({command_str}):\n\t{stderr}") + + return CompletedProcess(command, proc.returncode, stdout, stderr) + finally: + proc_tracker.remove(proc) + + +async def run_live(self, *command, check=False, text=True, idle_timeout=None, **kwargs): + """Runs a command asynchronously and iterates through its output line by line in realtime. + + This method is useful for executing a command and capturing its output on-the-fly, as it is generated. + If an error occurs during execution, it can optionally raise an error or just log the stderr. + + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + idle_timeout (int, optional): Sets a limit on the number of seconds the process can remain idle (no lines sent to stdout) before throwing a TimeoutError + **kwargs (dict): Additional keyword arguments for the subprocess. + + Yields: + str or bytes: The output lines of the command, either as a decoded string (if `text=True`) + or as bytes (if `text=False`). - NOTE: STDERR is captured (not displayed) by default. - If you want to see it, self.debug(process.stderr) or pass stderr=None + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. + + Examples: + >>> async for line in run_live(["tail", "-f", "/var/log/auth.log"]): + ... log.info(line) """ - if not "stdout" in kwargs: - kwargs["stdout"] = subprocess.PIPE - if not "stderr" in kwargs: - kwargs["stderr"] = subprocess.PIPE - if not "text" in kwargs: - kwargs["text"] = True + # proc_tracker optionally keeps track of which processes are running under which modules + # this allows for graceful SIGINTing of a module's processes in the case when it's killed + proc_tracker = kwargs.pop("_proc_tracker", set()) + log_stderr = kwargs.pop("_log_stderr", True) + proc, _input, command = await self._spawn_proc(*command, **kwargs) + if proc is not None: + proc_tracker.add(proc) + try: + input_task = None + if _input is not None: + input_task = asyncio.create_task(_write_stdin(proc, _input)) + + while 1: + try: + if idle_timeout is not None: + line = await asyncio.wait_for(proc.stdout.readline(), timeout=idle_timeout) + else: + line = await proc.stdout.readline() + except asyncio.exceptions.TimeoutError: + proc.send_signal(SIGINT) + raise + except ValueError as e: + command_str = " ".join([str(c) for c in command]) + log.warning(f"Error executing command {command_str}: {e}") + log.trace(traceback.format_exc()) + continue + if not line: + break + if text: + line = smart_decode(line).rstrip("\r\n") + else: + line = line.rstrip(b"\r\n") + yield line + + if input_task is not None: + try: + await input_task + except ConnectionError: + log.trace(f"ConnectionError in command: {command}, kwargs={kwargs}") + log.trace(traceback.format_exc()) + await proc.wait() + + if proc.returncode: + stdout, stderr = await proc.communicate() + if text: + if stderr is not None: + stderr = smart_decode(stderr) + if stdout is not None: + stdout = smart_decode(stdout) + if check: + raise CalledProcessError(proc.returncode, command, output=stdout, stderr=stderr) + # surface stderr + if stderr and log_stderr: + command_str = " ".join(command) + log.warning(f"Stderr for run_live({command_str}):\n\t{stderr}") + finally: + proc_tracker.remove(proc) - command = [str(s) for s in command] - log.hugeverbose(f"run: {' '.join(command)}") - result = catch(subprocess.run, command, *args, **kwargs) - # surface stderr - if result.stderr and result.returncode != 0: - stderr = smart_decode(result.stderr) - if stderr: - command_str = " ".join(command) - log.warning(f"Stderr for {command_str}:\n\t{stderr}") +async def _spawn_proc(self, *command, **kwargs): + """Spawns an asynchronous subprocess. - return result + Prepares the command and associated keyword arguments. If the `input` argument is provided, + it checks to ensure that the `stdin` argument is not also provided. Once prepared, it creates + and returns the subprocess. If the command executable is not found, it logs a warning and traceback. + Args: + *command (str): The command to run as separate arguments. + **kwargs (dict): Additional keyword arguments for the subprocess. -def catch(callback, *args, **kwargs): + Raises: + ValueError: If both stdin and input arguments are provided. + + Returns: + tuple: A tuple containing the created process (or None if creation failed), the input (or None if not provided), + and the prepared command (or None if subprocess creation failed). + + Examples: + >>> _spawn_proc("ls", "-l", input="data") + (, "data", ["ls", "-l"]) + """ try: - return callback(*args, **kwargs) - except FileNotFoundError as e: - import traceback + command, kwargs = self._prepare_command_kwargs(command, kwargs) + except SubprocessError as e: + log.warning(e) + return None, None, None + _input = kwargs.pop("input", None) + if _input is not None: + if kwargs.get("stdin") is not None: + raise ValueError("stdin and input arguments may not both be used.") + kwargs["stdin"] = asyncio.subprocess.PIPE + log.hugeverbose(f"run: {' '.join(command)}") + try: + proc = await asyncio.create_subprocess_exec(*command, **kwargs) + return proc, _input, command + except FileNotFoundError as e: log.warning(f"{e} - missing executable?") - log.debug(traceback.format_exc()) - except BrokenPipeError as e: - import traceback + log.trace(traceback.format_exc()) + return None, None, None + - log.warning(f"Error in subprocess: {e}") - log.debug(traceback.format_exc()) +async def _write_proc_line(proc, chunk): + try: + proc.stdin.write(smart_encode(chunk) + b"\n") + await proc.stdin.drain() + return True + except Exception as e: + proc_args = [str(s) for s in getattr(proc, "args", [])] + command = " ".join(proc_args).strip() + if command: + log.warning(f"Error writing line to stdin for command: {command}: {e}") + log.trace(traceback.format_exc()) + return False -def tempfile(self, content, pipe=True): +async def _write_stdin(proc, _input): """ - tempfile("temp\nfile\ncontent") --> Path("/home/user/.bbot/temp/pgxml13bov87oqrvjz7a") + Asynchronously writes input to an active subprocess's stdin. + + This function takes an `_input` parameter, which can be of type str, bytes, + list, tuple, or an asynchronous generator. The input is then written line by + line to the stdin of the given `proc`. - if "pipe" is True (the default), a named pipe is used instead of - a true file, which allows python data to be piped directly into the - process without taking up disk space + Args: + proc (subprocess.Popen): An active subprocess object. + _input (str, bytes, list, tuple, async generator): The data to write to stdin. """ - filename = self.temp_filename() - try: - if type(content) not in (set, list, tuple): - content = (content,) - if pipe: - os.mkfifo(filename) - self.feed_pipe(filename, content, text=True) + if _input is not None: + if isinstance(_input, (str, bytes)): + _input = [_input] + if isinstance(_input, (list, tuple)): + for chunk in _input: + write_result = await _write_proc_line(proc, chunk) + if not write_result: + break else: - with open(filename, "w", errors="ignore") as f: - for c in content: - f.write(f"{self.smart_decode(c)}\n") - except Exception as e: - import traceback + async for chunk in _input: + write_result = await _write_proc_line(proc, chunk) + if not write_result: + break + proc.stdin.close() - log.error(f"Error creating temp file: {e}") - log.debug(traceback.format_exc()) - return filename +def _prepare_command_kwargs(self, command, kwargs): + """ + Prepare arguments for passing into `asyncio.create_subprocess_exec()`. + This method modifies the `kwargs` dictionary in place to prepare it for + use in the `asyncio.create_subprocess_exec()` method. It sets the default + values for keys like 'limit', 'stdout', and 'stderr' if they are not + already present. It also handles the case when 'sudo' needs to be run. -def _feed_pipe(self, pipe, content, text=True): - try: - if text: - decode_fn = self.smart_decode - newline = "\n" - else: - decode_fn = self.smart_encode - newline = b"\n" - try: - if hasattr(pipe, "write"): - try: - for c in content: - pipe.write(decode_fn(c) + newline) - finally: - with suppress(Exception): - pipe.close() - else: - with open(pipe, "w") as p: - for c in content: - p.write(decode_fn(c) + newline) - except BrokenPipeError: - log.debug(f"Broken pipe in _feed_pipe()") - except ValueError: - import traceback - - log.debug(f"Error _feed_pipe(): {traceback.format_exc()}") - except KeyboardInterrupt: - self.scan.stop() - except Exception as e: - import traceback + Args: + command (list): The command to be run in the subprocess. + kwargs (dict): The keyword arguments to be passed to `asyncio.create_subprocess_exec()`. + + Returns: + tuple: A tuple containing the modified `command` and `kwargs`. + + Examples: + >>> _prepare_command_kwargs(['ls', '-l'], {}) + (['ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1}) + + >>> _prepare_command_kwargs(['ls', '-l'], {'sudo': True}) + (['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)}) + """ + # limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout) + if "limit" not in kwargs: + kwargs["limit"] = 1024 * 1024 * 100 + if "stdout" not in kwargs: + kwargs["stdout"] = asyncio.subprocess.PIPE + if "stderr" not in kwargs: + kwargs["stderr"] = asyncio.subprocess.PIPE + sudo = kwargs.pop("sudo", False) + + if len(command) == 1 and isinstance(command[0], (list, tuple)): + command = command[0] + command = [str(s) for s in command] + + if not command: + raise SubprocessError("Must specify a command") - log.error(f"Error in _feed_pipe(): {e}") - log.debug(traceback.format_exc()) + # use full path of binary, if not already specified + binary = command[0] + if "/" not in binary: + binary_full_path = which(binary) + if binary_full_path is None: + raise SubprocessError(f'Command "{binary}" was not found') + command[0] = binary_full_path + env = kwargs.get("env", os.environ) + if sudo and os.geteuid() != 0: + self.depsinstaller.ensure_root() + env["SUDO_ASKPASS"] = str((self.tools_dir / self.depsinstaller.askpass_filename).resolve()) + env["BBOT_SUDO_PASS"] = self.depsinstaller.encrypted_sudo_pw + kwargs["env"] = env -def feed_pipe(self, pipe, content, text=True): - t = threading.Thread(target=self._feed_pipe, args=(pipe, content), kwargs={"text": text}, daemon=True) - t.start() + PATH = os.environ.get("PATH", "") + LD_LIBRARY_PATH = os.environ.get("LD_LIBRARY_PATH", "") + command = ["sudo", "-E", "-A", f"LD_LIBRARY_PATH={LD_LIBRARY_PATH}", f"PATH={PATH}"] + command + return command, kwargs diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index aced28d86c..5bea5f508a 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -1,25 +1,79 @@ import os import sys +import stat import json import shutil import getpass import logging from time import sleep -import subprocess as sp from pathlib import Path +from threading import Lock from itertools import chain from contextlib import suppress +from secrets import token_bytes from ansible_runner.interface import run +from subprocess import CalledProcessError -from bbot.modules import module_loader +from ..misc import can_sudo_without_password, os_platform, rm_at_exit, get_python_constraints log = logging.getLogger("bbot.core.helpers.depsinstaller") class DepsInstaller: + CORE_DEPS = { + # core BBOT dependencies in the format of binary: package_name + # each one will only be installed if the binary is not found + "unzip": "unzip", + "zipinfo": "unzip", + "curl": "curl", + "git": "git", + "make": "make", + "gcc": "gcc", + "bash": "bash", + "which": "which", + "tar": "tar", + # debian why are you like this + "7z": [ + { + "name": "Install 7zip (Debian)", + "package": {"name": ["p7zip-full"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian'", + }, + { + "name": "Install 7zip (Non-Debian)", + "package": {"name": ["p7zip"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] != 'Debian'", + }, + { + "name": "Install p7zip-plugins (Fedora)", + "package": {"name": ["p7zip-plugins"], "state": "present"}, + "become": True, + "when": "ansible_facts['distribution'] == 'Fedora'", + }, + ], + } + def __init__(self, parent_helper): self.parent_helper = parent_helper - self._sudo_password = os.environ.get("BBOT_SUDO_PASS", None) + self.preset = self.parent_helper.preset + self.core = self.preset.core + + self.os_platform = os_platform() + + # respect BBOT's http timeout + self.web_config = self.parent_helper.config.get("web", {}) + http_timeout = self.web_config.get("http_timeout", 30) + os.environ["ANSIBLE_TIMEOUT"] = str(http_timeout) + + # cache encrypted sudo pass + self.askpass_filename = "sudo_askpass.py" + self._sudo_password = None + self._sudo_cache_setup = False + self._setup_sudo_cache() + self._installed_sudo_askpass = False + self.data_dir = self.parent_helper.cache_dir / "depsinstaller" self.parent_helper.mkdir(self.data_dir) self.setup_status_cache = self.data_dir / "setup_status.json" @@ -27,36 +81,29 @@ def __init__(self, parent_helper): self.parent_helper.mkdir(self.command_status) self.setup_status = self.read_setup_status() - self.no_deps = self.parent_helper.config.get("no_deps", False) - self.ansible_debug = self.parent_helper.config.get("debug", False) - self.force_deps = self.parent_helper.config.get("force_deps", False) - self.retry_deps = self.parent_helper.config.get("retry_deps", False) - self.ignore_failed_deps = self.parent_helper.config.get("ignore_failed_deps", False) + # make sure we're using a minimal git config + self.minimal_git_config = self.data_dir / "minimal_git.config" + self.minimal_git_config.touch() + os.environ["GIT_CONFIG_GLOBAL"] = str(self.minimal_git_config) + + self.deps_config = self.parent_helper.config.get("deps", {}) + self.deps_behavior = self.deps_config.get("behavior", "abort_on_failure").lower() + self.ansible_debug = self.core.logger.log_level <= logging.DEBUG self.venv = "" if sys.prefix != sys.base_prefix: self.venv = sys.prefix - # ensure that we have pip - venv_bin = Path(self.venv) / "bin" - python_executable = venv_bin / "python" - pip_executable = venv_bin / "pip" - if not pip_executable.is_file(): - with open(pip_executable, "w") as f: - f.write( - f'''#!/bin/bash -{python_executable} -m pip "$@"''' - ) - pip_executable.chmod(0o755) - self.all_modules_preloaded = module_loader.preloaded() + self.ensure_root_lock = Lock() - def install(self, *modules): + async def install(self, *modules): + await self.install_core_deps() succeeded = [] failed = [] try: notified = False for m in modules: # assume success if we're ignoring dependencies - if self.no_deps: + if self.deps_behavior == "disable": succeeded.append(m) continue # abort if module name is unknown @@ -65,33 +112,44 @@ def install(self, *modules): failed.append(m) continue preloaded = self.all_modules_preloaded[m] + log.debug(f"Installing {m} - Preloaded Deps {preloaded['deps']}") # make a hash of the dependencies and check if it's already been handled - module_hash = self.parent_helper.sha1(str(preloaded["deps"]) + self.venv).hexdigest() + # take into consideration whether the venv or bbot home directory changes + module_hash = self.parent_helper.sha1( + json.dumps(preloaded["deps"], sort_keys=True) + + self.venv + + str(self.parent_helper.bbot_home) + + os.uname()[1] + ).hexdigest() success = self.setup_status.get(module_hash, None) dependencies = list(chain(*preloaded["deps"].values())) if len(dependencies) <= 0: - log.debug(f'No setup to do for module "{m}"') + log.debug(f'No dependency work to do for module "{m}"') succeeded.append(m) continue else: - if success is None or (success is False and self.retry_deps) or self.force_deps: + if ( + success is None + or (success is False and self.deps_behavior == "retry_failed") + or self.deps_behavior == "force_install" + ): if not notified: - log.info(f"Installing module dependencies. Please be patient, this may take a while.") + log.hugeinfo("Installing module dependencies. Please be patient, this may take a while.") notified = True log.verbose(f'Installing dependencies for module "{m}"') # get sudo access if we need it - if preloaded.get("sudo", False) == True: + if preloaded.get("sudo", False) is True: self.ensure_root(f'Module "{m}" needs root privileges to install its dependencies.') - success = self.install_module(m) + success = await self.install_module(m) self.setup_status[module_hash] = success - if success or self.ignore_failed_deps: + if success or self.deps_behavior == "ignore_failed": log.debug(f'Setup succeeded for module "{m}"') succeeded.append(m) else: log.warning(f'Setup failed for module "{m}"') failed.append(m) else: - if success or self.ignore_failed_deps: + if success or self.deps_behavior == "ignore_failed": log.debug( f'Skipping dependency install for module "{m}" because it\'s already done (--force-deps to re-run)' ) @@ -105,67 +163,86 @@ def install(self, *modules): finally: self.write_setup_status() + succeeded.sort() + failed.sort() return succeeded, failed - def install_module(self, module): + async def install_module(self, module): success = True preloaded = self.all_modules_preloaded[module] + # ansible tasks + ansible_tasks = preloaded["deps"]["ansible"] + if ansible_tasks: + success &= self.tasks(module, ansible_tasks) + # apt deps_apt = preloaded["deps"]["apt"] if deps_apt: self.apt_install(deps_apt) - # pip - deps_pip = preloaded["deps"]["pip"] - if deps_pip: - success &= self.pip_install(deps_pip) - # shell deps_shell = preloaded["deps"]["shell"] if deps_shell: success &= self.shell(module, deps_shell) - # ansible tasks - ansible_tasks = preloaded["deps"]["ansible"] - if ansible_tasks: - success &= self.tasks(module, ansible_tasks) + # pip + deps_pip = preloaded["deps"]["pip"] + deps_pip_constraints = preloaded["deps"]["pip_constraints"] + if deps_pip: + success &= await self.pip_install(deps_pip, constraints=deps_pip_constraints) - return success + # shared/common + deps_common = preloaded["deps"]["common"] + if deps_common: + for dep_common in deps_common: + if self.setup_status.get(dep_common, False) is True: + log.debug( + f'Skipping installation of dependency "{dep_common}" for module "{module}" since it is already installed' + ) + continue + ansible_tasks = self.preset.module_loader._shared_deps[dep_common] + result = self.tasks(module, ansible_tasks) + self.setup_status[dep_common] = result + success &= result - def pip_install(self, packages): - packages = ",".join(packages) - log.verbose(f"Installing the following pip packages: {packages}") - args = {"name": packages} - if self.venv: - args["virtualenv"] = self.venv - args["virtualenv_python"] = sys.executable - success, err = self.ansible_run( - module="pip", args=args, ansible_args={"ansible_python_interpreter": sys.executable} - ) - if success: - log.info(f'Successfully installed pip packages "{packages}"') - else: - log.warning(f"Failed to install pip packages: {err}") return success + async def pip_install(self, packages, constraints=None): + packages_str = ",".join(packages) + log.info(f"Installing the following pip packages: {packages_str}") + + command = [sys.executable, "-m", "pip", "install", "--upgrade"] + packages + + # if no custom constraints are provided, use the constraints of the currently installed version of bbot + if constraints is not None: + constraints = get_python_constraints() + + constraints_tempfile = self.parent_helper.tempfile(constraints, pipe=False) + command.append("--constraint") + command.append(constraints_tempfile) + + process = None + try: + process = await self.parent_helper.run(command, check=True) + message = f'Successfully installed pip packages "{packages_str}"' + output = process.stdout + if output is not None: + message = output.splitlines()[-1] + log.info(message) + return True + except CalledProcessError as err: + log.warning(f"Failed to install pip packages {packages_str} (return code {err.returncode}): {err.stderr}") + return False + def apt_install(self, packages): """ Install packages with the OS's default package manager (apt, pacman, dnf, etc.) """ - packages_str = ",".join(packages) - log.verbose(f"Installing the following OS packages: {packages_str}") - args = {"name": packages_str, "state": "present"} # , "update_cache": True, "cache_valid_time": 86400} - success, err = self.ansible_run( - module="package", - args=args, - ansible_args={ - "ansible_become": True, - "ansible_become_method": "sudo", - }, - ) + args, kwargs = self._make_apt_ansible_args(packages) + success, err = self.ansible_run(module="package", args=args, **kwargs) if success: - log.info(f'Successfully installed OS packages "{packages_str}"') + log.info(f'Successfully installed OS packages "{",".join(sorted(packages))}"') else: log.warning( f"Failed to install OS packages ({err}). Recommend installing the following packages manually:" @@ -174,6 +251,21 @@ def apt_install(self, packages): log.warning(f" - {p}") return success + def _make_apt_ansible_args(self, packages): + packages_str = ",".join(sorted(packages)) + log.info(f"Installing the following OS packages: {packages_str}") + args = {"name": packages_str, "state": "present"} # , "update_cache": True, "cache_valid_time": 86400} + kwargs = {} + # don't sudo brew + if self.os_platform != "darwin": + kwargs = { + "ansible_args": { + "ansible_become": True, + "ansible_become_method": "sudo", + } + } + return args, kwargs + def shell(self, module, commands): tasks = [] for i, command in enumerate(commands): @@ -184,7 +276,7 @@ def shell(self, module, commands): command["cmd"] += f" && touch {command_status_file}" tasks.append( { - "name": f"{module}.deps_shell step {i+1}", + "name": f"{module}.deps_shell step {i + 1}", "ansible.builtin.shell": command, "args": {"executable": "/bin/bash", "creates": str(command_status_file)}, } @@ -193,10 +285,11 @@ def shell(self, module, commands): if success: log.info(f"Successfully ran {len(commands):,} shell commands") else: - log.warning(f"Failed to run shell dependencies") + log.warning("Failed to run shell dependencies") return success def tasks(self, module, tasks): + log.info(f"Running {len(tasks):,} Ansible tasks for {module}") success, err = self.ansible_run(tasks=tasks) if success: log.info(f"Successfully ran {len(tasks):,} Ansible tasks for {module}") @@ -205,7 +298,7 @@ def tasks(self, module, tasks): return success def ansible_run(self, tasks=None, module=None, args=None, ansible_args=None): - _ansible_args = {"ansible_connection": "local"} + _ansible_args = {"ansible_connection": "local", "ansible_python_interpreter": sys.executable} if ansible_args is not None: _ansible_args.update(ansible_args) module_args = None @@ -214,6 +307,14 @@ def ansible_run(self, tasks=None, module=None, args=None, ansible_args=None): log.debug(f"ansible_run(module={module}, args={args}, ansible_args={ansible_args})") playbook = None if tasks: + for task in tasks: + if "package" in task: + # special case for macos + if self.os_platform == "darwin": + # don't sudo brew + task["become"] = False + # brew doesn't support update_cache + task["package"].pop("update_cache", "") playbook = {"hosts": "all", "tasks": tasks} log.debug(json.dumps(playbook, indent=2)) if self._sudo_password is not None: @@ -232,8 +333,8 @@ def ansible_run(self, tasks=None, module=None, args=None, ansible_args=None): }, module=module, module_args=module_args, - quiet=not self.ansible_debug, - verbosity=(3 if self.ansible_debug else 0), + quiet=True, + verbosity=0, cancel_callback=lambda: None, ) @@ -243,14 +344,14 @@ def ansible_run(self, tasks=None, module=None, args=None, ansible_args=None): err = "" for e in res.events: if self.ansible_debug and not success: - log.debug(json.dumps(e, indent=4)) + log.debug(json.dumps(e, indent=2)) if e["event"] == "runner_on_failed": err = e["event_data"]["res"]["msg"] break return success, err def read_setup_status(self): - setup_status = dict() + setup_status = {} if self.setup_status_cache.is_file(): with open(self.setup_status_cache) as f: with suppress(Exception): @@ -262,28 +363,112 @@ def write_setup_status(self): json.dump(self.setup_status, f) def ensure_root(self, message=""): - if os.geteuid() != 0 and self._sudo_password is None: + self._install_sudo_askpass() + # skip if we've already done this + if self._sudo_password is not None: + return + with self.ensure_root_lock: + # first check if the environment variable is set + _sudo_password = os.environ.get("BBOT_SUDO_PASS", None) + if _sudo_password is not None or os.geteuid() == 0 or can_sudo_without_password(): + # if we're already root or we can sudo without a password, there's no need to prompt + return + if message: log.warning(message) - # sleep for a split second to flush previous log messages while not self._sudo_password: + # sleep for a split second to flush previous log messages sleep(0.1) - password = getpass.getpass(prompt="[USER] Please enter sudo password: ") - if self.verify_sudo_password(password): + _sudo_password = getpass.getpass(prompt="[USER] Please enter sudo password: ") + if self.parent_helper.verify_sudo_password(_sudo_password): log.success("Authentication successful") - self._sudo_password = password + self._sudo_password = _sudo_password else: log.warning("Incorrect password") - def verify_sudo_password(self, sudo_pass): - try: - sp.run( - ["sudo", "-S", "-k", "true"], - input=self.parent_helper.smart_encode(sudo_pass), - stderr=sp.DEVNULL, - stdout=sp.DEVNULL, - check=True, + async def install_core_deps(self): + to_install = set() + to_install_friendly = set() + playbook = [] + self._install_sudo_askpass() + # ensure tldextract data is cached + self.parent_helper.tldextract("evilcorp.co.uk") + # install any missing commands + for command, package_name_or_playbook in self.CORE_DEPS.items(): + if not self.parent_helper.which(command): + to_install_friendly.add(command) + if isinstance(package_name_or_playbook, str): + to_install.add(package_name_or_playbook) + else: + playbook.extend(package_name_or_playbook) + # install ansible community.general collection + if not self.setup_status.get("ansible:community.general", False): + log.info("Installing Ansible Community General Collection") + try: + command = ["ansible-galaxy", "collection", "install", "community.general"] + await self.parent_helper.run(command, check=True) + self.setup_status["ansible:community.general"] = True + log.info("Successfully installed Ansible Community General Collection") + except CalledProcessError as err: + log.warning( + f"Failed to install Ansible Community.General Collection (return code {err.returncode}): {err.stderr}" + ) + # construct ansible playbook + if to_install: + playbook.append( + { + "name": "Install Core BBOT Dependencies", + "package": {"name": list(to_install), "state": "present"}, + "become": True, + } ) - except sp.CalledProcessError: - return False - return True + # run playbook + if playbook: + log.info(f"Installing core BBOT dependencies: {','.join(sorted(to_install_friendly))}") + self.ensure_root() + self.ansible_run(tasks=playbook) + + def _setup_sudo_cache(self): + if not self._sudo_cache_setup: + self._sudo_cache_setup = True + # write temporary encryption key, to be deleted upon scan completion + self._sudo_temp_keyfile = self.parent_helper.temp_filename() + # remove it at exit + rm_at_exit(self._sudo_temp_keyfile) + # generate random 32-byte key + random_key = token_bytes(32) + # write key to file and set secure permissions + self._sudo_temp_keyfile.write_bytes(random_key) + self._sudo_temp_keyfile.chmod(0o600) + # export path to environment variable, for use in askpass script + os.environ["BBOT_SUDO_KEYFILE"] = str(self._sudo_temp_keyfile.resolve()) + + @property + def encrypted_sudo_pw(self): + if self._sudo_password is None: + return "" + return self._encrypt_sudo_pw(self._sudo_password) + + def _encrypt_sudo_pw(self, pw): + from Crypto.Cipher import AES + from Crypto.Util.Padding import pad + + key = self._sudo_temp_keyfile.read_bytes() + cipher = AES.new(key, AES.MODE_CBC) + ct_bytes = cipher.encrypt(pad(pw.encode(), AES.block_size)) + iv = cipher.iv.hex() + ct = ct_bytes.hex() + return f"{iv}:{ct}" + + def _install_sudo_askpass(self): + if not self._installed_sudo_askpass: + self._installed_sudo_askpass = True + # install custom askpass script + askpass_src = Path(__file__).resolve().parent / self.askpass_filename + askpass_dst = self.parent_helper.tools_dir / self.askpass_filename + shutil.copy(askpass_src, askpass_dst) + askpass_dst.chmod(askpass_dst.stat().st_mode | stat.S_IEXEC) + + @property + def all_modules_preloaded(self): + return self.preset.module_loader.preloaded() diff --git a/bbot/core/helpers/depsinstaller/sudo_askpass.py b/bbot/core/helpers/depsinstaller/sudo_askpass.py new file mode 100644 index 0000000000..ccd8cd01b6 --- /dev/null +++ b/bbot/core/helpers/depsinstaller/sudo_askpass.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +import os +import sys +from pathlib import Path +from Crypto.Cipher import AES +from Crypto.Util.Padding import unpad + +ENV_VAR_NAME = "BBOT_SUDO_PASS" +KEY_ENV_VAR_PATH = "BBOT_SUDO_KEYFILE" + + +def decrypt_password(encrypted_data, key): + iv, ciphertext = encrypted_data.split(":") + iv = bytes.fromhex(iv) + ct = bytes.fromhex(ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + pt = unpad(cipher.decrypt(ct), AES.block_size) + return pt.decode("utf-8") + + +def main(): + encrypted_password = os.environ.get(ENV_VAR_NAME, "") + # remove variable from environment once we've got it + os.environ.pop(ENV_VAR_NAME, None) + encryption_keypath = Path(os.environ.get(KEY_ENV_VAR_PATH, "")) + + if not encrypted_password or not encryption_keypath.is_file(): + print("Error: Encrypted password or encryption key not found in environment variables.", file=sys.stderr) + sys.exit(1) + + try: + key = encryption_keypath.read_bytes() + decrypted_password = decrypt_password(encrypted_password, key) + print(decrypted_password, end="") + except Exception as e: + print(f'Error decrypting password "{encrypted_password}": {str(e)}', file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index fb1f6c43d3..1ea5de0e02 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -1,95 +1,148 @@ import logging import xmltodict -from time import sleep from deepdiff import DeepDiff from contextlib import suppress from xml.parsers.expat import ExpatError -from bbot.core.errors import HttpCompareError +from bbot.errors import HttpCompareError log = logging.getLogger("bbot.core.helpers.diff") class HttpCompare: - def __init__(self, baseline_url, parent_helper, method="GET", allow_redirects=False): - + def __init__( + self, + baseline_url, + parent_helper, + method="GET", + data=None, + allow_redirects=False, + include_cache_buster=True, + headers=None, + cookies=None, + timeout=15, + ): self.parent_helper = parent_helper self.baseline_url = baseline_url + self.include_cache_buster = include_cache_buster + self.method = method + self.data = data + self.allow_redirects = allow_redirects + self._baselined = False + self.headers = headers + self.cookies = cookies + self.timeout = 15 + + @staticmethod + def merge_dictionaries(headers1, headers2): + if headers2 is None: + return headers1 + else: + merged_headers = headers1.copy() + merged_headers.update(headers2) + return merged_headers + + async def _baseline(self): + if not self._baselined: + # vanilla URL + if self.include_cache_buster: + url_1 = self.parent_helper.add_get_params(self.baseline_url, self.gen_cache_buster()).geturl() + else: + url_1 = self.baseline_url + baseline_1 = await self.parent_helper.request( + url_1, + follow_redirects=self.allow_redirects, + method=self.method, + data=self.data, + headers=self.headers, + cookies=self.cookies, + retries=2, + timeout=self.timeout, + ) + await self.parent_helper.sleep(1) + # put random parameters in URL, headers, and cookies + get_params = {self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)} + + if self.include_cache_buster: + get_params.update(self.gen_cache_buster()) + url_2 = self.parent_helper.add_get_params(self.baseline_url, get_params).geturl() + baseline_2 = await self.parent_helper.request( + url_2, + headers=self.merge_dictionaries( + {self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)}, self.headers + ), + cookies=self.merge_dictionaries( + {self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)}, self.cookies + ), + follow_redirects=self.allow_redirects, + method=self.method, + data=self.data, + retries=2, + timeout=self.timeout, + ) - # vanilla URL - url_1 = self.parent_helper.add_get_params(self.baseline_url, self.gen_cache_buster()).geturl() - baseline_1 = self.parent_helper.request(url_1, allow_redirects=allow_redirects, method=method) - sleep(1) - # put random parameters in URL, headers, and cookies - get_params = self.gen_cache_buster() - get_params.update({self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)}) - url_2 = self.parent_helper.add_get_params(self.baseline_url, get_params).geturl() - baseline_2 = self.parent_helper.request( - url_2, - headers={self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)}, - cookies={self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)}, - allow_redirects=allow_redirects, - method=method, - ) - - self.baseline = baseline_1 - - if baseline_1 is None or baseline_2 is None: - log.debug("HTTP error while establishing baseline, aborting") - raise HttpCompareError("Can't get baseline from source URL") - if baseline_1.status_code != baseline_2.status_code: - log.debug("Status code not stable during baseline, aborting") - raise HttpCompareError("Can't get baseline from source URL") - try: - baseline_1_json = xmltodict.parse(baseline_1.text) - baseline_2_json = xmltodict.parse(baseline_2.text) - except ExpatError: - log.debug(f"Cant HTML parse for {baseline_url}. Switching to text parsing as a backup") - baseline_1_json = baseline_1.text.split("\n") - baseline_2_json = baseline_2.text.split("\n") - - ddiff = DeepDiff(baseline_1_json, baseline_2_json, ignore_order=True, view="tree") - self.ddiff_filters = [] - - for k, v in ddiff.items(): - for x in list(ddiff[k]): - log.debug(f"Added {k} filter for path: {x.path()}") - self.ddiff_filters.append(x.path()) - - self.baseline_json = baseline_1_json - - self.baseline_ignore_headers = [ - h.lower() - for h in [ - "date", - "last-modified", - "content-length", - "ETag", - "X-Pad", - "X-Backside-Transport", + self.baseline = baseline_1 + + if baseline_1 is None or baseline_2 is None: + log.debug("HTTP error while establishing baseline, aborting") + raise HttpCompareError( + f"Can't get baseline from source URL: {url_1}:{baseline_1} / {url_2}:{baseline_2}" + ) + if baseline_1.status_code != baseline_2.status_code: + log.debug("Status code not stable during baseline, aborting") + raise HttpCompareError("Can't get baseline from source URL") + try: + baseline_1_json = xmltodict.parse(baseline_1.text) + baseline_2_json = xmltodict.parse(baseline_2.text) + except ExpatError: + log.debug(f"Can't HTML parse for {self.baseline_url}. Switching to text parsing as a backup") + baseline_1_json = baseline_1.text.split("\n") + baseline_2_json = baseline_2.text.split("\n") + + ddiff = DeepDiff( + baseline_1_json, baseline_2_json, ignore_order=True, view="tree", threshold_to_diff_deeper=0 + ) + self.ddiff_filters = [] + + for k in ddiff.keys(): + for x in list(ddiff[k]): + log.debug(f"Added {k} filter for path: {x.path()}") + self.ddiff_filters.append(x.path()) + + self.baseline_json = baseline_1_json + + self.baseline_ignore_headers = [ + h.lower() + for h in [ + "date", + "last-modified", + "content-length", + "ETag", + "X-Pad", + "X-Backside-Transport", + "keep-alive", + ] ] - ] - dynamic_headers = self.compare_headers(baseline_1.headers, baseline_2.headers) + dynamic_headers = self.compare_headers(baseline_1.headers, baseline_2.headers) - self.baseline_ignore_headers += dynamic_headers - self.baseline_body_distance = self.compare_body(baseline_1_json, baseline_2_json) + self.baseline_ignore_headers += [x.lower() for x in dynamic_headers] + self._baselined = True def gen_cache_buster(self): return {self.parent_helper.rand_string(6): "1"} def compare_headers(self, headers_1, headers_2): - differing_headers = [] for i, headers in enumerate((headers_1, headers_2)): for header, value in list(headers.items()): if header.lower() in self.baseline_ignore_headers: with suppress(KeyError): - log.debug(f'found ignored header "{header}" in headers_{i+1} and removed') + log.debug(f'found ignored header "{header}" in headers_{i + 1} and removed') del headers[header] - ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree") + ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree", threshold_to_diff_deeper=0) - for k, v in ddiff.items(): + for k in ddiff.keys(): for x in list(ddiff[k]): try: header_value = str(x).split("'")[1] @@ -99,11 +152,17 @@ def compare_headers(self, headers_1, headers_2): return differing_headers def compare_body(self, content_1, content_2): - if content_1 == content_2: return True - ddiff = DeepDiff(content_1, content_2, ignore_order=True, view="tree", exclude_paths=self.ddiff_filters) + ddiff = DeepDiff( + content_1, + content_2, + ignore_order=True, + view="tree", + exclude_paths=self.ddiff_filters, + threshold_to_diff_deeper=0, + ) if len(ddiff.keys()) == 0: return True @@ -111,8 +170,16 @@ def compare_body(self, content_1, content_2): log.debug(ddiff) return False - def compare( - self, subject, headers=None, cookies=None, check_reflection=False, method="GET", allow_redirects=False + async def compare( + self, + subject, + headers=None, + cookies=None, + check_reflection=False, + method="GET", + data=None, + allow_redirects=False, + timeout=None, ): """ Compares a URL with the baseline, with optional headers or cookies added @@ -123,15 +190,29 @@ def compare( "reflection" is whether the value was reflected in the HTTP response """ + await self._baseline() + + if timeout is None: + timeout = self.timeout + reflection = False - cache_key, cache_value = list(self.gen_cache_buster().items())[0] - url = self.parent_helper.add_get_params(subject, {cache_key: cache_value}).geturl() - subject_response = self.parent_helper.request( - url, headers=headers, cookies=cookies, allow_redirects=allow_redirects, method=method + if self.include_cache_buster: + cache_key, cache_value = list(self.gen_cache_buster().items())[0] + url = self.parent_helper.add_get_params(subject, {cache_key: cache_value}).geturl() + else: + url = subject + subject_response = await self.parent_helper.request( + url, + headers=headers, + cookies=cookies, + follow_redirects=allow_redirects, + method=method, + data=data, + timeout=timeout, ) - if not subject_response: - # this can be caused by a WAF not liking the header, so we really arent interested in it + if subject_response is None: + # this can be caused by a WAF not liking the header, so we really aren't interested in it return (True, "403", reflection, subject_response) if check_reflection: @@ -144,41 +225,50 @@ def compare( subject_params = self.parent_helper.get_get_params(subject) for k, v in subject_params.items(): - if k != cache_key and v in subject_response.text: - reflection = True - break - + if self.include_cache_buster and k != cache_key: + for item in v: + if item in subject_response.text: + reflection = True + break try: subject_json = xmltodict.parse(subject_response.text) except ExpatError: - log.debug(f"Cant HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup") + log.debug(f"Can't HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup") subject_json = subject_response.text.split("\n") + diff_reasons = [] + if self.baseline.status_code != subject_response.status_code: log.debug( f"status code was different [{str(self.baseline.status_code)}] -> [{str(subject_response.status_code)}], no match" ) - return (False, "code", reflection, subject_response) + diff_reasons.append("code") different_headers = self.compare_headers(self.baseline.headers, subject_response.headers) if different_headers: - log.debug(f"headers were different, no match [{different_headers}]") - return (False, "header", reflection, subject_response) + log.debug("headers were different, no match") + diff_reasons.append("header") + + if self.compare_body(self.baseline_json, subject_json) is False: + log.debug("difference in HTML body, no match") - if self.compare_body(self.baseline_json, subject_json) == False: - log.debug(f"difference in HTML body, no match") - return (False, "body", reflection, subject_response) - return (True, None, False, None) + diff_reasons.append("body") - def canary_check(self, url, mode, rounds=6): + if not diff_reasons: + return (True, [], reflection, subject_response) + else: + return (False, diff_reasons, reflection, subject_response) + + async def canary_check(self, url, mode, rounds=3): """ test detection using a canary to find hosts giving bad results """ + await self._baseline() headers = None cookies = None for i in range(0, rounds): - random_params = {self.parent_helper.rand_string(7): self.parent_helper.rand_string(6)} + random_params = {self.parent_helper.rand_string(7): self.parent_helper.rand_string(7)} new_url = str(url) if mode == "getparam": new_url = self.parent_helper.add_get_params(url, random_params).geturl() @@ -189,9 +279,11 @@ def canary_check(self, url, mode, rounds=6): else: raise ValueError(f'Invalid mode: "{mode}", choose from: getparam, header, cookie') - match, reason, reflection, subject_response = self.compare(new_url, headers=headers, cookies=cookies) + match, reasons, reflection, subject_response = await self.compare( + new_url, headers=headers, cookies=cookies, check_reflection=True + ) - # a nonsense header "caused" a difference, we need to abort - if match == False: + # if a nonsense header "caused" a difference, we need to abort. We also need to abort if our canary was reflected + if match is False or reflection is True: return False return True diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py deleted file mode 100644 index e4b4eded97..0000000000 --- a/bbot/core/helpers/dns.py +++ /dev/null @@ -1,499 +0,0 @@ -import re -import json -import logging -import dns.resolver -import dns.exception -from threading import Lock -from contextlib import suppress -from concurrent.futures import ThreadPoolExecutor - -from .regexes import dns_name_regex -from bbot.core.errors import ValidationError -from .threadpool import ThreadPoolWrapper, NamedLock -from .misc import is_ip, domain_parents, parent_domain, rand_string - -log = logging.getLogger("bbot.core.helpers.dns") - - -class DNSHelper: - """ - For automatic wildcard detection, nameserver validation, etc. - """ - - def __init__(self, parent_helper): - - self.parent_helper = parent_helper - self.resolver = dns.resolver.Resolver() - self.timeout = self.parent_helper.config.get("dns_timeout", 10) - self.abort_threshold = self.parent_helper.config.get("dns_abort_threshold", 5) - self.resolver.timeout = self.timeout - self.resolver.lifetime = self.timeout - self._resolver_list = None - - self.wildcard_ignore = self.parent_helper.config.get("dns_wildcard_ignore", None) - if not self.wildcard_ignore: - self.wildcard_ignore = [] - self.wildcard_ignore = tuple([str(d).strip().lower() for d in self.wildcard_ignore]) - self.wildcard_tests = self.parent_helper.config.get("dns_wildcard_tests", 5) - self._wildcard_cache = dict() - # since wildcard detection takes some time, This is to prevent multiple - # modules from kicking off wildcard detection for the same domain at the same time - self._wildcard_lock = NamedLock() - - self._errors = dict() - self._error_lock = Lock() - - # we need our own threadpool because using the shared one can lead to deadlocks - max_workers = self.parent_helper.config.get("max_dns_threads", 100) - executor = ThreadPoolExecutor(max_workers=max_workers) - self._thread_pool = ThreadPoolWrapper(executor, max_workers=max_workers) - - self._debug = self.parent_helper.config.get("dns_debug", False) - - self._dummy_modules = dict() - self._dummy_modules_lock = Lock() - - self._cache = self.parent_helper.CacheDict(max_size=10000) - self._cache_lock = Lock() - self._cache_locks = NamedLock() - - # copy the system's current resolvers to a text file for tool use - resolvers = dns.resolver.Resolver().nameservers - self.resolver_file = self.parent_helper.tempfile(resolvers, pipe=False) - - self.bad_ptr_regex = re.compile(r"(?:[0-9]{1,3}[-_\.]){3}[0-9]{1,3}") - self.filter_bad_ptrs = self.parent_helper.config.get("dns_filter_ptrs", True) - - def resolve(self, query, **kwargs): - """ - "1.2.3.4" --> { - "evilcorp.com", - } - "evilcorp.com" --> { - "1.2.3.4", - "dead::beef" - } - """ - results = set() - raw_results, errors = self.resolve_raw(query, **kwargs) - for (rdtype, answers) in raw_results: - for answer in answers: - for _, t in self.extract_targets(answer): - results.add(t) - return results - - def resolve_raw(self, query, **kwargs): - kwargs["tcp"] = True - if self.parent_helper.scan.stopping: - return [], [] - query = str(query).strip() - if is_ip(query): - kwargs.pop("type", None) - kwargs.pop("rdtype", None) - results, errors = self._resolve_ip(query, **kwargs) - return [("PTR", results)], [("PTR", e) for e in errors] - else: - results = [] - errors = [] - types = ["A", "AAAA"] - kwargs.pop("rdtype", None) - if "type" in kwargs: - t = kwargs.pop("type") - if isinstance(t, str): - if t.strip().lower() in ("any", "all", "*"): - types = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] - else: - types = [t.strip().upper()] - elif any([isinstance(t, x) for x in (list, tuple)]): - types = [str(_).strip().upper() for _ in t] - for t in types: - if getattr(self.parent_helper.scan, "stopping", False) == True: - break - r, e = self._resolve_hostname(query, rdtype=t, **kwargs) - if r: - results.append((t, r)) - for error in e: - errors.append((t, error)) - - return (results, errors) - - def _resolve_hostname(self, query, **kwargs): - self.debug(f"Resolving {query} with kwargs={kwargs}") - results = [] - errors = [] - parent = self.parent_helper.parent_domain(query) - rdtype = kwargs.get("rdtype", "A") - parent_hash = hash(f"{parent}:{rdtype}") - error_count = self._errors.get(parent_hash, 0) - if error_count >= self.abort_threshold: - log.verbose( - f'Aborting query "{query}" because failed {rdtype} queries for "{parent}" ({error_count:,}) exceeded abort threshold ({self.abort_threshold:,})' - ) - return results, errors - try: - results = list(self._catch(self.resolver.resolve, query, **kwargs)) - with self._error_lock: - if parent_hash in self._errors: - self._errors[parent_hash] = 0 - except (dns.resolver.NoNameservers, dns.exception.Timeout, dns.resolver.LifetimeTimeout) as e: - with self._error_lock: - try: - self._errors[parent_hash] += 1 - except KeyError: - self._errors[parent_hash] = 1 - log.verbose( - f'DNS error or timeout for {rdtype} query "{query}" ({self._errors[parent_hash]:,} so far): {e}' - ) - errors.append(e) - self.debug(f"Results for {query} with kwargs={kwargs}: {results}") - return results, errors - - def _resolve_ip(self, query, **kwargs): - self.debug(f"Reverse-resolving {query} with kwargs={kwargs}") - results = [] - errors = [] - try: - return list(self._catch(self.resolver.resolve_address, query, **kwargs)), errors - except dns.resolver.NoNameservers as e: - self.debug(f"{e} (query={query}, kwargs={kwargs})") - except (dns.exception.Timeout, dns.resolver.LifetimeTimeout) as e: - errors.append(e) - self.debug(f"Results for {query} with kwargs={kwargs}: {results}") - return results, errors - - def resolve_event(self, event): - result = self._resolve_event(event) - # if it's a wildcard, go again with _wildcard.{domain} - if len(result) == 1: - event = result[0] - return self._resolve_event(event, check_wildcard=False) - # else we're good - else: - return result - - def _resolve_event(self, event, check_wildcard=True): - """ - Tag event with appropriate dns record types - Optionally create child events from dns resolutions - """ - event_tags = set() - try: - if not event.host or event.type in ("IP_RANGE",): - return [], set(), False, False - children = [] - event_host = str(event.host) - # lock to ensure resolution of the same host doesn't start while we're working here - with self._cache_locks.get_lock(event_host): - - event_whitelisted = False - event_blacklisted = False - - # wildcard check first - if check_wildcard: - event_is_wildcard, wildcard_parent = self.is_wildcard(event_host) - if event_is_wildcard and event.type in ("DNS_NAME",): - event.data = wildcard_parent - return (event,) - else: - event_tags.add("wildcard") - - # try to get data from cache - _event_tags, _event_whitelisted, _event_blacklisted = self.cache_get(event_host) - event_tags.update(_event_tags) - # if we found it, return it - if _event_whitelisted is not None: - return children, event_tags, _event_whitelisted, _event_blacklisted - - # then resolve - resolved_raw, errors = self.resolve_raw(event_host, type="any") - if errors: - event_tags.add("dns-error") - for rdtype, records in resolved_raw: - event_tags.add("resolved") - event_tags.add(f"{rdtype.lower()}_record") - rdtype = str(rdtype).upper() - # whitelisting and blacklist of IPs - if rdtype in ("A", "AAAA"): - for r in records: - for _, t in self.extract_targets(r): - with suppress(ValidationError): - if self.parent_helper.scan.whitelisted(t): - event_whitelisted = True - with suppress(ValidationError): - if self.parent_helper.scan.blacklisted(t): - event_blacklisted = True - for r in records: - for _, t in self.extract_targets(r): - if t: - if self.filter_bad_ptrs and rdtype in ("PTR") and self.bad_ptr_regex.search(t): - self.debug(f"Filtering out bad PTR: {t}") - continue - children.append((t, rdtype)) - if "resolved" not in event_tags: - event_tags.add("unresolved") - self._cache[event_host] = (event_tags, event_whitelisted, event_blacklisted) - return children, event_tags, event_whitelisted, event_blacklisted - finally: - event._resolved.set() - - def cache_get(self, host): - try: - return self._cache[host] - except KeyError: - return set(), None, None - - def resolve_batch(self, queries, **kwargs): - """ - resolve_batch("www.evilcorp.com", "evilcorp.com") --> [ - ("www.evilcorp.com", {"1.1.1.1"}), - ("evilcorp.com", {"2.2.2.2"}) - ] - """ - futures = dict() - for query in queries: - future = self._thread_pool.submit_task(self._catch_keyboardinterrupt, self.resolve, query, **kwargs) - futures[future] = query - for future in self.parent_helper.as_completed(futures): - query = futures[future] - yield (query, future.result()) - - def extract_targets(self, record): - """ - Extract whatever hostnames/IPs a DNS records points to - """ - results = set() - rdtype = str(record.rdtype.name).upper() - if rdtype in ("A", "AAAA", "NS", "CNAME", "PTR"): - results.add((rdtype, self._clean_dns_record(record))) - elif rdtype == "SOA": - results.add((rdtype, self._clean_dns_record(record.mname))) - elif rdtype == "MX": - results.add((rdtype, self._clean_dns_record(record.exchange))) - elif rdtype == "SRV": - results.add((rdtype, self._clean_dns_record(record.target))) - elif rdtype == "TXT": - for s in record.strings: - s = self.parent_helper.smart_decode(s) - for match in dns_name_regex.finditer(s): - start, end = match.span() - host = s[start:end] - results.add((rdtype, host)) - else: - log.warning(f'Unknown DNS record type "{rdtype}"') - return results - - @staticmethod - def _clean_dns_record(record): - return str(record.to_text()).lower().rstrip(".") - - def get_valid_resolvers(self, min_reliability=0.99): - nameservers = set() - nameservers_url = "https://public-dns.info/nameserver/nameservers.json" - nameservers_file = self.parent_helper.download(nameservers_url, cache_hrs=72) - if nameservers_file is None: - log.warning(f"Failed to download nameservers from {nameservers_url}") - return set() - nameservers_json = [] - try: - nameservers_json = json.loads(open(nameservers_file).read()) - except Exception as e: - log.warning(f"Failed to load nameserver list from {nameservers_file}: {e}") - nameservers_file.unlink() - for entry in nameservers_json: - try: - ip = str(entry.get("ip", "")).strip() - except Exception: - continue - try: - reliability = float(entry.get("reliability", 0)) - except ValueError: - continue - if reliability >= min_reliability and is_ip(ip, version=4): - nameservers.add(ip) - log.verbose(f"Loaded {len(nameservers):,} nameservers from {nameservers_url}") - resolver_list = self.verify_nameservers(nameservers) - return resolver_list - - @property - def resolvers(self): - """ - Returns set() of valid DNS servers from public-dns.info - """ - if self._resolver_list is None: - file_content = self.parent_helper.cache_get("resolver_list") - if file_content is not None: - self._resolver_list = set([l for l in file_content.splitlines() if l]) - if not self._resolver_list: - log.info(f"Fetching and validating public DNS servers, this may take a few minutes") - resolvers = self.get_valid_resolvers() - if resolvers: - self._resolver_list = resolvers - self.parent_helper.cache_put("resolver_list", "\n".join(self._resolver_list)) - else: - return set() - return self._resolver_list - - @property - def mass_resolver_file(self): - self.resolvers - return self.parent_helper.cache_filename("resolver_list") - - def verify_nameservers(self, nameservers, timeout=2): - """Check each resolver to make sure it can actually resolve DNS names - - Args: - nameservers (list): nameservers to verify - timeout (int): timeout for dns query - """ - log.verbose(f"Verifying {len(nameservers):,} nameservers") - futures = [ - self._thread_pool.submit_task(self._catch_keyboardinterrupt, self.verify_nameserver, n) - for n in nameservers - ] - - valid_nameservers = set() - for future in self.parent_helper.as_completed(futures): - nameserver, error = future.result() - if error is None: - self.debug(f'Nameserver "{nameserver}" is valid') - valid_nameservers.add(nameserver) - else: - self.debug(str(error)) - log.verbose(f"Verified {len(valid_nameservers):,}/{len(nameservers):,} nameservers") - - return valid_nameservers - - def verify_nameserver(self, nameserver, timeout=2): - """Validate a nameserver by making a sample query and a garbage query - - Args: - nameserver (str): nameserver to verify - timeout (int): timeout for dns query - """ - self.debug(f'Verifying nameserver "{nameserver}"') - error = None - - resolver = dns.resolver.Resolver() - resolver.timeout = timeout - resolver.lifetime = timeout - resolver.nameservers = [nameserver] - - # first, make sure it can resolve a valid hostname - try: - a_results = [str(r) for r in list(resolver.resolve("dns.google", "A"))] - aaaa_results = [str(r) for r in list(resolver.resolve("dns.google", "AAAA"))] - if not ("2001:4860:4860::8888" in aaaa_results and "8.8.8.8" in a_results): - error = f"Nameserver {nameserver} failed to resolve basic query" - except Exception: - error = f"Nameserver {nameserver} failed to resolve basic query within {timeout} seconds" - - # then, make sure it isn't feeding us garbage data - randhost = f"www-m.{rand_string(9)}.{rand_string(10)}.com" - if error is None: - try: - a_results = list(resolver.resolve(randhost, "A")) - error = f"Nameserver {nameserver} returned garbage data" - except dns.exception.DNSException: - pass - # Garbage query to nameserver failed successfully ;) - if error is None: - try: - a_results = list(resolver.resolve(randhost, "AAAA")) - error = f"Nameserver {nameserver} returned garbage data" - except dns.exception.DNSException: - pass - # Garbage query to nameserver failed successfully ;) - - return nameserver, error - - def _catch(self, callback, *args, **kwargs): - try: - return callback(*args, **kwargs) - except dns.resolver.NoNameservers: - raise - except (dns.exception.Timeout, dns.resolver.LifetimeTimeout): - log.debug(f"DNS query with args={args}, kwargs={kwargs} timed out after {self.timeout} seconds") - raise - except dns.exception.DNSException as e: - self.debug(f"{e} (args={args}, kwargs={kwargs})") - except Exception: - log.warning(f"Error in {callback.__qualname__}() with args={args}, kwargs={kwargs}") - return list() - - def is_wildcard(self, query): - if is_ip(query) or not "." in query: - return False, query - for d in self.wildcard_ignore: - if self.parent_helper.host_in_host(query, d): - return False, query - if "_wildcard" in query.split("."): - return True, query - hosts = list(domain_parents(query, include_self=True))[:-1] - for host in hosts[::-1]: - is_wildcard, parent = self._is_wildcard(host) - if is_wildcard: - return True, f"_wildcard.{parent}" - return False, query - - def _is_wildcard(self, query): - parent = parent_domain(query) - parent_hash = hash(parent) - - # try to return from cache - with suppress(KeyError): - return self._wildcard_cache[parent_hash], parent - - with self._wildcard_lock.get_lock(parent): - - # resolve the base query - orig_results = self.resolve(query) - is_wildcard = False - - futures = [] - # resolve a bunch of random subdomains of the same parent - for _ in range(self.wildcard_tests): - rand_query = f"{rand_string(length=10)}.{parent}" - future = self._thread_pool.submit_task(self._catch_keyboardinterrupt, self.resolve, rand_query) - futures.append(future) - - # put all the IPs from the random subdomains in one bucket - wildcard_ips = set() - for future in self.parent_helper.as_completed(futures): - ips = future.result() - if ips: - wildcard_ips.update(ips) - - # if all of the original results are in the random bucket - if orig_results and wildcard_ips and all([ip in wildcard_ips for ip in orig_results]): - # then ladies and gentlemen we have a wildcard - is_wildcard = True - - self._wildcard_cache.update({parent_hash: is_wildcard}) - if is_wildcard: - log.verbose(f"Encountered domain with wildcard DNS: {parent}") - return is_wildcard, parent - - def _catch_keyboardinterrupt(self, callback, *args, **kwargs): - try: - return callback(*args, **kwargs) - except Exception as e: - import traceback - - log.error(f"Error in {callback.__qualname__}(): {e}") - log.debug(traceback.format_exc()) - except KeyboardInterrupt: - if self.parent_helper.scan: - self.parent_helper.scan.stop() - - def debug(self, *args, **kwargs): - if self._debug: - log.debug(*args, **kwargs) - - def _get_dummy_module(self, name): - with self._dummy_modules_lock: - try: - dummy_module = self._dummy_modules[name] - except KeyError: - dummy_module = self.parent_helper._make_dummy_module(name=name, _type="DNS") - self._dummy_modules[name] = dummy_module - return dummy_module diff --git a/bbot/core/helpers/dns/__init__.py b/bbot/core/helpers/dns/__init__.py new file mode 100644 index 0000000000..75426cd265 --- /dev/null +++ b/bbot/core/helpers/dns/__init__.py @@ -0,0 +1 @@ +from .dns import DNSHelper diff --git a/bbot/core/helpers/dns/brute.py b/bbot/core/helpers/dns/brute.py new file mode 100644 index 0000000000..71424c5a87 --- /dev/null +++ b/bbot/core/helpers/dns/brute.py @@ -0,0 +1,190 @@ +import json +import random +import asyncio +import logging +import subprocess + + +class DNSBrute: + """ + Helper for DNS brute-forcing. + + Examples: + >>> domain = "evilcorp.com" + >>> subdomains = ["www", "mail"] + >>> results = await self.helpers.dns.brute(self, domain, subdomains) + """ + + _nameservers_url = ( + "https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt" + ) + + def __init__(self, parent_helper): + self.parent_helper = parent_helper + self.log = logging.getLogger("bbot.helper.dns.brute") + self.dns_config = self.parent_helper.config.get("dns", {}) + self.num_canaries = 100 + self.max_resolvers = self.dns_config.get("brute_threads", 1000) + self.nameservers_url = self.dns_config.get("brute_nameservers", self._nameservers_url) + self.devops_mutations = list(self.parent_helper.word_cloud.devops_mutations) + self.digit_regex = self.parent_helper.re.compile(r"\d+") + self._resolver_file = None + self._dnsbrute_lock = None + + async def __call__(self, *args, **kwargs): + return await self.dnsbrute(*args, **kwargs) + + @property + def dnsbrute_lock(self): + if self._dnsbrute_lock is None: + self._dnsbrute_lock = asyncio.Lock() + return self._dnsbrute_lock + + async def dnsbrute(self, module, domain, subdomains, type=None): + subdomains = list(subdomains) + + if type is None: + type = "A" + type = str(type).strip().upper() + + wildcard_domains = await self.parent_helper.dns.is_wildcard_domain(domain, (type, "CNAME")) + wildcard_rdtypes = set() + for domain, rdtypes in wildcard_domains.items(): + wildcard_rdtypes.update(rdtypes) + if wildcard_domains: + self.log.hugewarning( + f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(sorted(wildcard_rdtypes))})" + ) + return [] + + canaries = self.gen_random_subdomains(self.num_canaries) + canaries_list = list(canaries) + canaries_pre = canaries_list[: int(self.num_canaries / 2)] + canaries_post = canaries_list[int(self.num_canaries / 2) :] + # sandwich subdomains between canaries + subdomains = canaries_pre + subdomains + canaries_post + + results = [] + canaries_triggered = [] + async for hostname, ip, rdtype in self._massdns(module, domain, subdomains, rdtype=type): + sub = hostname.split(domain)[0] + if sub in canaries: + canaries_triggered.append(sub) + else: + results.append(hostname) + + if len(canaries_triggered) > 5: + self.log.info( + f"Aborting massdns on {domain} due to false positive: ({len(canaries_triggered):,} canaries triggered - {','.join(canaries_triggered)})" + ) + return [] + + # everything checks out + return results + + async def _massdns(self, module, domain, subdomains, rdtype): + """ + { + "name": "www.blacklanternsecurity.com.", + "type": "A", + "class": "IN", + "status": "NOERROR", + "data": { + "answers": [ + { + "ttl": 3600, + "type": "CNAME", + "class": "IN", + "name": "www.blacklanternsecurity.com.", + "data": "blacklanternsecurity.github.io." + }, + { + "ttl": 3600, + "type": "A", + "class": "IN", + "name": "blacklanternsecurity.github.io.", + "data": "185.199.108.153" + } + ] + }, + "resolver": "168.215.165.186:53" + } + """ + resolver_file = await self.resolver_file() + command = ( + "massdns", + "-r", + resolver_file, + "-s", + self.max_resolvers, + "-t", + rdtype, + "-o", + "J", + "-q", + ) + subdomains = self.gen_subdomains(subdomains, domain) + hosts_yielded = set() + async with self.dnsbrute_lock: + async for line in module.run_process_live(*command, stderr=subprocess.DEVNULL, input=subdomains): + try: + j = json.loads(line) + except json.decoder.JSONDecodeError: + self.log.debug(f"Failed to decode line: {line}") + continue + answers = j.get("data", {}).get("answers", []) + if type(answers) == list and len(answers) > 0: + answer = answers[0] + hostname = answer.get("name", "").strip(".").lower() + if hostname.endswith(f".{domain}"): + data = answer.get("data", "") + rdtype = answer.get("type", "").upper() + if data and rdtype: + hostname_hash = hash(hostname) + if hostname_hash not in hosts_yielded: + hosts_yielded.add(hostname_hash) + yield hostname, data, rdtype + + async def gen_subdomains(self, prefixes, domain): + for p in prefixes: + if domain: + p = f"{p}.{domain}" + yield p + + async def resolver_file(self): + if self._resolver_file is None: + self._resolver_file_original = await self.parent_helper.wordlist( + self.nameservers_url, + cache_hrs=24 * 7, + ) + nameservers = set(self.parent_helper.read_file(self._resolver_file_original)) + nameservers.difference_update(self.parent_helper.dns.system_resolvers) + # exclude system nameservers from brute-force + # this helps prevent rate-limiting which might cause BBOT's main dns queries to fail + self._resolver_file = self.parent_helper.tempfile(nameservers, pipe=False) + return self._resolver_file + + def gen_random_subdomains(self, n=50): + delimiters = (".", "-") + lengths = list(range(3, 8)) + for i in range(0, max(0, n - 5)): + d = delimiters[i % len(delimiters)] + l = lengths[i % len(lengths)] + segments = [random.choice(self.devops_mutations) for _ in range(l)] + segments.append(self.parent_helper.rand_string(length=8, digits=False)) + subdomain = d.join(segments) + yield subdomain + for _ in range(5): + yield self.parent_helper.rand_string(length=8, digits=False) + + def has_excessive_digits(self, d): + """ + Identifies dns names with excessive numbers, e.g.: + - w1-2-3.evilcorp.com + - ptr1234.evilcorp.com + """ + is_ptr = self.parent_helper.is_ptr(d) + digits = self.digit_regex.findall(d) + excessive_digits = len(digits) > 2 + long_digits = any(len(d) > 3 for d in digits) + return is_ptr or excessive_digits or long_digits diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py new file mode 100644 index 0000000000..89758b35ff --- /dev/null +++ b/bbot/core/helpers/dns/dns.py @@ -0,0 +1,200 @@ +import dns +import logging +import dns.exception +import dns.asyncresolver +from cachetools import LFUCache +from radixtarget import RadixTarget + +from bbot.errors import DNSError +from bbot.core.engine import EngineClient +from bbot.core.helpers.async_helpers import async_cachedmethod +from ..misc import clean_dns_record, is_ip, is_domain, is_dns_name + +from .engine import DNSEngine + +log = logging.getLogger("bbot.core.helpers.dns") + + +class DNSHelper(EngineClient): + SERVER_CLASS = DNSEngine + ERROR_CLASS = DNSError + + """Helper class for DNS-related operations within BBOT. + + This class provides mechanisms for host resolution, wildcard domain detection, event tagging, and more. + It centralizes all DNS-related activities in BBOT, offering both synchronous and asynchronous methods + for DNS resolution, as well as various utilities for batch resolution and DNS query filtering. + + Attributes: + parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). + resolver (BBOTAsyncResolver): An asynchronous DNS resolver tailored for BBOT with rate-limiting capabilities. + timeout (int): The timeout value for DNS queries. Defaults to 5 seconds. + retries (int): The number of retries for failed DNS queries. Defaults to 1. + abort_threshold (int): The threshold for aborting after consecutive failed queries. Defaults to 50. + runaway_limit (int): Maximum allowed distance for consecutive DNS resolutions. Defaults to 5. + all_rdtypes (list): A list of DNS record types to be considered during operations. + wildcard_ignore (tuple): Domains to be ignored during wildcard detection. + wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. + _wildcard_cache (dict): Cache for wildcard detection results. + _dns_cache (LRUCache): Cache for DNS resolution results, limited in size. + resolver_file (Path): File containing system's current resolver nameservers. + filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True. + + Args: + parent_helper: The parent helper object with configuration details and utilities. + + Raises: + DNSError: If an issue arises when creating the BBOTAsyncResolver instance. + + Examples: + >>> dns_helper = DNSHelper(parent_config) + >>> resolved_host = dns_helper.resolver.resolve("example.com") + """ + + def __init__(self, parent_helper): + self.parent_helper = parent_helper + self.config = self.parent_helper.config + self.dns_config = self.config.get("dns", {}) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__(server_kwargs={"config": self.config}, debug=engine_debug) + + # resolver + self.timeout = self.dns_config.get("timeout", 5) + self.resolver = dns.asyncresolver.Resolver() + self.resolver.rotate = True + self.resolver.timeout = self.timeout + self.resolver.lifetime = self.timeout + + self.runaway_limit = self.dns_config.get("runaway_limit", 5) + + # wildcard handling + self.wildcard_disable = self.dns_config.get("wildcard_disable", False) + self.wildcard_ignore = RadixTarget() + for d in self.dns_config.get("wildcard_ignore", []): + self.wildcard_ignore.insert(d) + + # copy the system's current resolvers to a text file for tool use + self.system_resolvers = dns.resolver.Resolver().nameservers + # TODO: DNS server speed test (start in background task) + self.resolver_file = self.parent_helper.tempfile(self.system_resolvers, pipe=False) + + # brute force helper + self._brute = None + + self._is_wildcard_cache = LFUCache(maxsize=1000) + self._is_wildcard_domain_cache = LFUCache(maxsize=1000) + + async def resolve(self, query, **kwargs): + return await self.run_and_return("resolve", query=query, **kwargs) + + async def resolve_raw(self, query, **kwargs): + return await self.run_and_return("resolve_raw", query=query, **kwargs) + + async def resolve_batch(self, queries, **kwargs): + agen = self.run_and_yield("resolve_batch", queries=queries, **kwargs) + while 1: + try: + yield await agen.__anext__() + except (StopAsyncIteration, GeneratorExit): + await agen.aclose() + break + + async def resolve_raw_batch(self, queries): + agen = self.run_and_yield("resolve_raw_batch", queries=queries) + while 1: + try: + yield await agen.__anext__() + except (StopAsyncIteration, GeneratorExit): + await agen.aclose() + break + + @property + def brute(self): + if self._brute is None: + from .brute import DNSBrute + + self._brute = DNSBrute(self.parent_helper) + return self._brute + + @async_cachedmethod( + lambda self: self._is_wildcard_cache, + key=lambda query, rdtypes, raw_dns_records: (query, tuple(sorted(rdtypes)), bool(raw_dns_records)), + ) + async def is_wildcard(self, query, rdtypes, raw_dns_records=None): + """ + Use this method to check whether a *host* is a wildcard entry + + This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. + + If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. + + Args: + query (str): The hostname to check for a wildcard entry. + ips (list, optional): List of IPs to compare against, typically obtained from a previous DNS resolution of the query. + rdtype (str, optional): The DNS record type (e.g., "A", "AAAA") to consider during the check. + + Returns: + dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. + Keys are DNS record types like "A", "AAAA", etc. + Values are tuples where the first element is a boolean indicating if the query is a wildcard, + and the second element is the wildcard parent if it's a wildcard. + + Raises: + ValueError: If only one of `ips` or `rdtype` is specified or if no valid IPs are specified. + + Examples: + >>> is_wildcard("www.github.io") + {"A": (True, "github.io"), "AAAA": (True, "github.io")} + + >>> is_wildcard("www.evilcorp.com", ips=["93.184.216.34"], rdtype="A") + {"A": (False, "evilcorp.com")} + + Note: + `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) + """ + query = self._wildcard_prevalidation(query) + if not query: + return {} + + # skip check if the query is a domain + if is_domain(query): + return {} + + return await self.run_and_return("is_wildcard", query=query, rdtypes=rdtypes, raw_dns_records=raw_dns_records) + + @async_cachedmethod( + lambda self: self._is_wildcard_domain_cache, key=lambda domain, rdtypes: (domain, tuple(sorted(rdtypes))) + ) + async def is_wildcard_domain(self, domain, rdtypes): + domain = self._wildcard_prevalidation(domain) + if not domain: + return {} + + return await self.run_and_return("is_wildcard_domain", domain=domain, rdtypes=rdtypes) + + def _wildcard_prevalidation(self, host): + if self.wildcard_disable: + return False + + host = clean_dns_record(host) + # skip check if it's an IP or a plain hostname + if is_ip(host) or "." not in host: + return False + + # skip if query isn't a dns name + if not is_dns_name(host): + return False + + # skip check if the query's parent domain is excluded in the config + wildcard_ignore = self.wildcard_ignore.search(host) + if wildcard_ignore: + log.debug(f"Skipping wildcard detection on {host} because {wildcard_ignore} is excluded in the config") + return False + + return host + + async def _mock_dns(self, mock_data, custom_lookup_fn=None): + from .mock import MockResolver + + self.resolver = MockResolver(mock_data, custom_lookup_fn=custom_lookup_fn) + await self.run_and_return("_mock_dns", mock_data=mock_data, custom_lookup_fn=custom_lookup_fn) diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py new file mode 100644 index 0000000000..369a020685 --- /dev/null +++ b/bbot/core/helpers/dns/engine.py @@ -0,0 +1,665 @@ +import os +import dns +import time +import asyncio +import logging +import traceback +from cachetools import LRUCache +from contextlib import suppress + +from bbot.core.engine import EngineServer +from bbot.core.helpers.async_helpers import NamedLock +from bbot.core.helpers.dns.helpers import extract_targets +from bbot.core.helpers.misc import ( + is_ip, + rand_string, + parent_domain, + domain_parents, +) + + +log = logging.getLogger("bbot.core.helpers.dns.engine.server") + +all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] + + +class DNSEngine(EngineServer): + CMDS = { + 0: "resolve", + 1: "resolve_raw", + 2: "resolve_batch", + 3: "resolve_raw_batch", + 4: "is_wildcard", + 5: "is_wildcard_domain", + 99: "_mock_dns", + } + + def __init__(self, socket_path, config={}, debug=False): + super().__init__(socket_path, debug=debug) + + self.config = config + self.dns_config = self.config.get("dns", {}) + # config values + self.timeout = self.dns_config.get("timeout", 5) + self.retries = self.dns_config.get("retries", 1) + self.abort_threshold = self.dns_config.get("abort_threshold", 50) + + # resolver + self.resolver = dns.asyncresolver.Resolver() + self.resolver.rotate = True + self.resolver.timeout = self.timeout + self.resolver.lifetime = self.timeout + + # skip certain queries + dns_omit_queries = self.dns_config.get("omit_queries", None) + if not dns_omit_queries: + dns_omit_queries = [] + self.dns_omit_queries = {} + for d in dns_omit_queries: + d = d.split(":") + if len(d) == 2: + rdtype, query = d + rdtype = rdtype.upper() + query = query.lower() + try: + self.dns_omit_queries[rdtype].add(query) + except KeyError: + self.dns_omit_queries[rdtype] = {query} + + # wildcard handling + self.wildcard_ignore = self.dns_config.get("wildcard_ignore", None) + if not self.wildcard_ignore: + self.wildcard_ignore = [] + self.wildcard_ignore = tuple([str(d).strip().lower() for d in self.wildcard_ignore]) + self.wildcard_tests = self.dns_config.get("wildcard_tests", 5) + self._wildcard_cache = {} + # since wildcard detection takes some time, This is to prevent multiple + # modules from kicking off wildcard detection for the same domain at the same time + self._wildcard_lock = NamedLock() + + self._dns_connectivity_lock = None + self._last_dns_success = None + self._last_connectivity_warning = time.time() + # keeps track of warnings issued for wildcard detection to prevent duplicate warnings + self._dns_warnings = set() + self._errors = {} + self._debug = self.dns_config.get("debug", False) + self._dns_cache = LRUCache(maxsize=10000) + + self.filter_bad_ptrs = self.dns_config.get("filter_ptrs", True) + + async def resolve(self, query, **kwargs): + """Resolve DNS names and IP addresses to their corresponding results. + + This is a high-level function that can translate a given domain name to its associated IP addresses + or an IP address to its corresponding domain names. It's structured for ease of use within modules + and will abstract away most of the complexity of DNS resolution, returning a simple set of results. + + Args: + query (str): The domain name or IP address to resolve. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + set: A set containing resolved domain names or IP addresses. + + Examples: + >>> results = await resolve("1.2.3.4") + {"evilcorp.com"} + + >>> results = await resolve("evilcorp.com") + {"1.2.3.4", "dead::beef"} + """ + results = set() + try: + answers, errors = await self.resolve_raw(query, **kwargs) + for answer in answers: + for _, host in extract_targets(answer): + results.add(host) + except BaseException: + self.log.trace(f"Caught exception in resolve({query}, {kwargs}):") + self.log.trace(traceback.format_exc()) + raise + + self.debug(f"Results for {query} with kwargs={kwargs}: {results}") + return results + + async def resolve_raw(self, query, **kwargs): + """Resolves the given query to its associated DNS records. + + This function is a foundational method for DNS resolution in this class. It understands both IP addresses and + hostnames and returns their associated records in a raw format provided by the dnspython library. + + Args: + query (str): The IP address or hostname to resolve. + type (str or list[str], optional): Specifies the DNS record type(s) to fetch. Can be a single type like 'A' + or a list like ['A', 'AAAA']. If set to 'any', 'all', or '*', it fetches all supported types. If not + specified, the function defaults to fetching 'A' and 'AAAA' records. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing two lists: + - list: A list of tuples where each tuple consists of a record type string (like 'A') and the associated + raw dnspython answer. + - list: A list of tuples where each tuple consists of a record type string and the associated error if + there was an issue fetching the record. + + Examples: + >>> await resolve_raw("8.8.8.8") + ([('PTR', )], []) + + >>> await resolve_raw("dns.google") + (, []) + """ + # DNS over TCP is more reliable + # But setting this breaks DNS resolution on Ubuntu because systemd-resolve doesn't support TCP + # kwargs["tcp"] = True + try: + query = str(query).strip() + kwargs.pop("rdtype", None) + rdtype = kwargs.pop("type", "A") + if is_ip(query): + return await self._resolve_ip(query, **kwargs) + else: + return await self._resolve_hostname(query, rdtype=rdtype, **kwargs) + except BaseException: + self.log.trace(f"Caught exception in resolve_raw({query}, {kwargs}):") + self.log.trace(traceback.format_exc()) + raise + + async def _resolve_hostname(self, query, **kwargs): + """Translate a hostname into its corresponding IP addresses. + + This is the foundational function for converting a domain name into its associated IP addresses. It's designed + for internal use within the class and handles retries, caching, and a variety of error/timeout scenarios. + It also respects certain configurations that might ask to skip certain types of queries. Results are returned + in the default dnspython answer object format. + + Args: + query (str): The hostname to resolve. + rdtype (str, optional): The type of DNS record to query (e.g., 'A', 'AAAA'). Defaults to 'A'. + retries (int, optional): The number of times to retry on failure. Defaults to class-wide `retries`. + use_cache (bool, optional): Whether to check the cache before trying a fresh resolution. Defaults to True. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing: + - list: A list of resolved IP addresses. + - list: A list of errors encountered during the resolution process. + + Examples: + >>> results, errors = await _resolve_hostname("google.com") + (, []) + """ + self.debug(f"Resolving {query} with kwargs={kwargs}") + results = [] + errors = [] + rdtype = kwargs.get("rdtype", "A") + + # skip certain queries if requested + if rdtype in self.dns_omit_queries: + if any(h == query or query.endswith(f".{h}") for h in self.dns_omit_queries[rdtype]): + self.debug(f"Skipping {rdtype}:{query} because it's omitted in the config") + return results, errors + + parent = parent_domain(query) + retries = kwargs.pop("retries", self.retries) + use_cache = kwargs.pop("use_cache", True) + tries_left = int(retries) + 1 + parent_hash = hash((parent, rdtype)) + dns_cache_hash = hash((query, rdtype)) + while tries_left > 0: + try: + if use_cache: + results = self._dns_cache.get(dns_cache_hash, []) + if not results: + error_count = self._errors.get(parent_hash, 0) + if error_count >= self.abort_threshold: + connectivity = await self._connectivity_check() + if connectivity: + self.log.verbose( + f'Aborting query "{query}" because failed {rdtype} queries for "{parent}" ({error_count:,}) exceeded abort threshold ({self.abort_threshold:,})' + ) + if parent_hash not in self._dns_warnings: + self.log.verbose( + f'Aborting future {rdtype} queries to "{parent}" because error count ({error_count:,}) exceeded abort threshold ({self.abort_threshold:,})' + ) + self._dns_warnings.add(parent_hash) + return results, errors + results = await self._catch(self.resolver.resolve, query, **kwargs) + if use_cache: + self._dns_cache[dns_cache_hash] = results + if parent_hash in self._errors: + self._errors[parent_hash] = 0 + break + except ( + dns.resolver.NoNameservers, + dns.exception.Timeout, + dns.resolver.LifetimeTimeout, + TimeoutError, + asyncio.exceptions.TimeoutError, + ) as e: + try: + self._errors[parent_hash] += 1 + except KeyError: + self._errors[parent_hash] = 1 + errors.append(e) + # don't retry if we get a SERVFAIL + if isinstance(e, dns.resolver.NoNameservers): + break + tries_left -= 1 + err_msg = ( + f'DNS error or timeout for {rdtype} query "{query}" ({self._errors[parent_hash]:,} so far): {e}' + ) + if tries_left > 0: + retry_num = (retries + 1) - tries_left + self.debug(err_msg) + self.debug(f"Retry (#{retry_num}) resolving {query} with kwargs={kwargs}") + else: + self.log.verbose(err_msg) + + if results: + self._last_dns_success = time.time() + self.debug(f"Answers for {query} with kwargs={kwargs}: {list(results)}") + + if errors: + self.debug(f"Errors for {query} with kwargs={kwargs}: {errors}") + + return results, errors + + async def _resolve_ip(self, query, **kwargs): + """Translate an IP address into a corresponding DNS name. + + This is the most basic function that will convert an IP address into its associated domain name. It handles + retries, caching, and multiple types of timeout/error scenarios internally. The function is intended for + internal use and should not be directly called by modules without understanding its intricacies. + + Args: + query (str): The IP address to be reverse-resolved. + retries (int, optional): The number of times to retry on failure. Defaults to 0. + use_cache (bool, optional): Whether to check the cache for the result before attempting resolution. Defaults to True. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + tuple: A tuple containing: + - list: A list of resolved domain names (in default dnspython answer format). + - list: A list of errors encountered during resolution. + + Examples: + >>> results, errors = await _resolve_ip("8.8.8.8") + (, []) + """ + self.debug(f"Reverse-resolving {query} with kwargs={kwargs}") + retries = kwargs.pop("retries", 0) + use_cache = kwargs.pop("use_cache", True) + tries_left = int(retries) + 1 + results = [] + errors = [] + dns_cache_hash = hash((query, "PTR")) + while tries_left > 0: + try: + if use_cache: + results = self._dns_cache.get(dns_cache_hash, []) + if not results: + results = await self._catch(self.resolver.resolve_address, query, **kwargs) + if use_cache: + self._dns_cache[dns_cache_hash] = results + break + except ( + dns.resolver.NoNameservers, + dns.exception.Timeout, + dns.resolver.LifetimeTimeout, + TimeoutError, + asyncio.exceptions.TimeoutError, + ) as e: + errors.append(e) + # don't retry if we get a SERVFAIL + if isinstance(e, dns.resolver.NoNameservers): + self.debug(f"{e} (query={query}, kwargs={kwargs})") + break + else: + tries_left -= 1 + if tries_left > 0: + retry_num = (retries + 2) - tries_left + self.debug(f"Retrying (#{retry_num}) {query} with kwargs={kwargs}") + + if results: + self._last_dns_success = time.time() + + return results, errors + + async def resolve_batch(self, queries, threads=10, **kwargs): + """ + A helper to execute a bunch of DNS requests. + + Args: + queries (list): List of queries to resolve. + **kwargs: Additional keyword arguments to pass to `resolve()`. + + Yields: + tuple: A tuple containing the original query and its resolved value. + + Examples: + >>> import asyncio + >>> async def example_usage(): + ... async for result in resolve_batch(['www.evilcorp.com', 'evilcorp.com']): + ... print(result) + ('www.evilcorp.com', {'1.1.1.1'}) + ('evilcorp.com', {'2.2.2.2'}) + """ + async for (args, _, _), responses in self.task_pool( + self.resolve, args_kwargs=queries, threads=threads, global_kwargs=kwargs + ): + yield args[0], responses + + async def resolve_raw_batch(self, queries, threads=10, **kwargs): + queries_kwargs = [[q[0], {"type": q[1]}] for q in queries] + async for (args, kwargs, _), (answers, errors) in self.task_pool( + self.resolve_raw, args_kwargs=queries_kwargs, threads=threads, global_kwargs=kwargs + ): + query = args[0] + rdtype = kwargs["type"] + yield ((query, rdtype), (answers, errors)) + + async def _catch(self, callback, *args, **kwargs): + """ + Asynchronously catches exceptions thrown during DNS resolution and logs them. + + This method wraps around a given asynchronous callback function to handle different + types of DNS exceptions and general exceptions. It logs the exceptions for debugging + and, in some cases, re-raises them. + + Args: + callback (callable): The asynchronous function to be executed. + *args: Positional arguments to pass to the callback. + **kwargs: Keyword arguments to pass to the callback. + + Returns: + Any: The return value of the callback function, or an empty list if an exception is caught. + + Raises: + dns.resolver.NoNameservers: When no nameservers could be reached. + """ + try: + return await callback(*args, **kwargs) + except dns.resolver.NoNameservers: + raise + except (dns.exception.Timeout, dns.resolver.LifetimeTimeout, TimeoutError): + self.log.debug(f"DNS query with args={args}, kwargs={kwargs} timed out after {self.timeout} seconds") + raise + except dns.exception.DNSException as e: + self.debug(f"{e} (args={args}, kwargs={kwargs})") + except Exception as e: + self.log.warning(f"Error in {callback.__qualname__}() with args={args}, kwargs={kwargs}: {e}") + self.log.trace(traceback.format_exc()) + return [] + + async def is_wildcard(self, query, rdtypes, raw_dns_records=None): + """ + Use this method to check whether a *host* is a wildcard entry + + This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. + + It works by making a bunch of random DNS queries to the parent domain, compiling a list of wildcard IPs, + then comparing those to the IPs of the host in question. If the host's IP matches the wildcard ones, it's a wildcard. + + If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. + + Args: + query (str): The hostname to check for a wildcard entry. + rdtypes (list): The DNS record type (e.g., "A", "AAAA") to consider during the check. + raw_dns_records (dict, optional): Dictionary of {rdtype: [answer1, answer2, ...], ...} containing raw dnspython answers for the query. + + Returns: + dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. + Keys are DNS record types like "A", "AAAA", etc. + Values are tuples where the first element is a boolean indicating if the query is a wildcard, + and the second element is the wildcard parent if it's a wildcard. + + Examples: + >>> is_wildcard("www.github.io", rdtypes=["A", "AAAA", "MX"]) + {"A": (True, "github.io"), "AAAA": (True, "github.io"), "MX": (False, "github.io")} + + >>> is_wildcard("www.evilcorp.com", rdtypes=["A"]) + {"A": (False, "evilcorp.com")} + + Note: + `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) + """ + if isinstance(rdtypes, str): + rdtypes = [rdtypes] + + result = {} + + # if the work of resolving hasn't been done yet, do it + if raw_dns_records is None: + raw_dns_records = {} + queries = [(query, rdtype) for rdtype in rdtypes] + async for (_, rdtype), (answers, errors) in self.resolve_raw_batch(queries): + if answers: + for answer in answers: + try: + raw_dns_records[rdtype].add(answer) + except KeyError: + raw_dns_records[rdtype] = {answer} + else: + if errors: + self.debug(f"Failed to resolve {query} ({rdtype}) during wildcard detection") + result[rdtype] = ("ERROR", query) + + # clean + process the raw records into a baseline + baseline = {} + baseline_raw = {} + for rdtype, answers in raw_dns_records.items(): + for answer in answers: + text_answer = answer.to_text() + try: + baseline_raw[rdtype].add(text_answer) + except KeyError: + baseline_raw[rdtype] = {text_answer} + for _, host in extract_targets(answer): + try: + baseline[rdtype].add(host) + except KeyError: + baseline[rdtype] = {host} + + # if it's unresolved, it's a big nope + if not raw_dns_records: + return result + + # once we've resolved the base query and have IP addresses to work with + # we can compare the IPs to the ones we have on file for wildcards + + # only bother to check the rdypes that actually resolve + rdtypes_to_check = set(raw_dns_records) + + # for every parent domain, starting with the shortest + parents = list(domain_parents(query)) + for parent in parents[::-1]: + # check if the parent domain is set up with wildcards + wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check) + + # for every rdtype + for rdtype in list(baseline_raw): + # skip if we already found a wildcard for this rdtype + if rdtype in result: + continue + + # get our baseline IPs from above + _baseline = baseline.get(rdtype, set()) + _baseline_raw = baseline_raw.get(rdtype, set()) + + wildcard_rdtypes = wildcard_results.get(parent, {}) + wildcards = wildcard_rdtypes.get(rdtype, None) + if wildcards is None: + continue + wildcards, wildcard_raw = wildcards + + if wildcard_raw: + # skip this rdtype from now on + rdtypes_to_check.remove(rdtype) + + # check if any of our baseline IPs are in the wildcard results + is_wildcard = any(r in wildcards for r in _baseline) + is_wildcard_raw = any(r in wildcard_raw for r in _baseline_raw) + + # if there are any matches, we have a wildcard + if is_wildcard or is_wildcard_raw: + result[rdtype] = (True, parent) + else: + # otherwise, it's still suspicious, because we had random stuff resolve at this level + result[rdtype] = ("POSSIBLE", parent) + + # any rdtype that wasn't a wildcard, mark it as False + for rdtype, answers in baseline_raw.items(): + if answers and rdtype not in result: + result[rdtype] = (False, query) + + return result + + async def is_wildcard_domain(self, domain, rdtypes): + """ + Check whether a given host or its children make use of wildcard DNS entries. Wildcard DNS can have + various implications, particularly in subdomain enumeration and subdomain takeovers. + + Args: + domain (str): The domain to check for wildcard DNS entries. + rdtypes (list): Which DNS record types to check. + + Returns: + dict: A dictionary where the keys are the parent domains that have wildcard DNS entries, + and the values are another dictionary of DNS record types ("A", "AAAA", etc.) mapped to + sets of their resolved IP addresses. + + Examples: + >>> is_wildcard_domain("github.io") + {"github.io": {"A": {"1.2.3.4"}, "AAAA": {"dead::beef"}}} + + >>> is_wildcard_domain("example.com") + {} + """ + if isinstance(rdtypes, str): + rdtypes = [rdtypes] + rdtypes = set(rdtypes) + + wildcard_results = {} + # make a list of its parents + parents = list(domain_parents(domain, include_self=True)) + # and check each of them, beginning with the highest parent (i.e. the root domain) + for i, host in enumerate(parents[::-1]): + host_results = {} + queries = [((host, rdtype), {}) for rdtype in rdtypes] + async for ((_, rdtype), _, _), (results, results_raw) in self.task_pool( + self._is_wildcard_zone, args_kwargs=queries + ): + # if we hit a wildcard, we can skip this rdtype from now on + if results_raw: + rdtypes.remove(rdtype) + host_results[rdtype] = results, results_raw + + if host_results: + wildcard_results[host] = host_results + + return wildcard_results + + async def _is_wildcard_zone(self, host, rdtype): + """ + Check whether a specific DNS zone+rdtype has a wildcard configuration + """ + rdtype = rdtype.upper() + + # have we checked this host before? + host_hash = hash((host, rdtype)) + async with self._wildcard_lock.lock(host_hash): + # if we've seen this host before + try: + wildcard_results, wildcard_results_raw = self._wildcard_cache[host_hash] + self.debug(f"Got {host}:{rdtype} from cache") + except KeyError: + wildcard_results = set() + wildcard_results_raw = set() + self.debug(f"Checking if {host}:{rdtype} is a wildcard") + + # determine if this is a wildcard domain + # resolve a bunch of random subdomains of the same parent + rand_queries = [] + for _ in range(self.wildcard_tests): + rand_query = f"{rand_string(digits=False, length=10)}.{host}" + rand_queries.append((rand_query, rdtype)) + + async for (query, rdtype), (answers, errors) in self.resolve_raw_batch(rand_queries, use_cache=False): + for answer in answers: + # consider both the raw record + wildcard_results_raw.add(answer.to_text()) + # and all the extracted hosts + for _, t in extract_targets(answer): + wildcard_results.add(t) + + if wildcard_results: + self.log.info(f"Encountered domain with wildcard DNS ({rdtype}): *.{host}") + else: + self.debug(f"Finished checking {host}:{rdtype}, it is not a wildcard") + self._wildcard_cache[host_hash] = wildcard_results, wildcard_results_raw + + return wildcard_results, wildcard_results_raw + + async def _is_wildcard(self, query, rdtypes, dns_children): + if isinstance(rdtypes, str): + rdtypes = [rdtypes] + + @property + def dns_connectivity_lock(self): + if self._dns_connectivity_lock is None: + self._dns_connectivity_lock = asyncio.Lock() + return self._dns_connectivity_lock + + async def _connectivity_check(self, interval=5): + """ + Periodically checks for an active internet connection by attempting DNS resolution. + + Args: + interval (int, optional): The time interval, in seconds, at which to perform the check. + Defaults to 5 seconds. + + Returns: + bool: True if there is an active internet connection, False otherwise. + + Examples: + >>> await _connectivity_check() + True + """ + if self._last_dns_success is not None: + if time.time() - self._last_dns_success < interval: + return True + dns_server_working = [] + async with self.dns_connectivity_lock: + with suppress(Exception): + dns_server_working = await self._catch(self.resolver.resolve, "www.google.com", rdtype="A") + if dns_server_working: + self._last_dns_success = time.time() + return True + if time.time() - self._last_connectivity_warning > interval: + self.log.warning("DNS queries are failing, please check your internet connection") + self._last_connectivity_warning = time.time() + self._errors.clear() + return False + + def debug(self, *args, **kwargs): + if self._debug: + self.log.trace(*args, **kwargs) + + @property + def in_tests(self): + return os.getenv("BBOT_TESTING", "") == "True" + + async def _mock_dns(self, mock_data, custom_lookup_fn=None): + from .mock import MockResolver + + def deserialize_function(func_source): + assert self.in_tests, "Can only mock when BBOT_TESTING=True" + if func_source is None: + return None + namespace = {} + exec(func_source, {}, namespace) + return namespace["custom_lookup"] + + self.resolver = MockResolver(mock_data, custom_lookup_fn=deserialize_function(custom_lookup_fn)) diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py new file mode 100644 index 0000000000..340af5a425 --- /dev/null +++ b/bbot/core/helpers/dns/helpers.py @@ -0,0 +1,282 @@ +import logging + +from bbot.core.helpers.regexes import dns_name_extraction_regex +from bbot.core.helpers.misc import clean_dns_record, smart_decode + +log = logging.getLogger("bbot.core.helpers.dns") + + +# the following are the result of a 1-day internet survey to find the top SRV records +# the scan resulted in 36,282 SRV records. the count for each one is shown. +common_srvs = [ + "_sipfederationtls._tcp", # 6909 + "_sip._tls", # 6853 + "_autodiscover._tcp", # 4268 + "_xmpp-server._tcp", # 1437 + "_sip._tcp", # 1193 + "_sips._tcp", # 1183 + "_caldavs._tcp", # 1179 + "_carddavs._tcp", # 1132 + "_caldav._tcp", # 1035 + "_carddav._tcp", # 1024 + "_sip._udp", # 1007 + "_imaps._tcp", # 1007 + "_submission._tcp", # 906 + "_h323cs._tcp", # 846 + "_h323ls._udp", # 782 + "_xmpp-client._tcp", # 689 + "_pop3s._tcp", # 394 + "_jabber._tcp", # 277 + "_imap._tcp", # 267 + "_turn._udp", # 256 + "_pop3._tcp", # 221 + "_ldap._tcp", # 213 + "_smtps._tcp", # 195 + "_sipinternaltls._tcp", # 192 + "_vlmcs._tcp", # 165 + "_kerberos._udp", # 163 + "_kerberos._tcp", # 148 + "_kpasswd._udp", # 128 + "_kpasswd._tcp", # 100 + "_ntp._udp", # 90 + "_gc._tcp", # 73 + "_kerberos-master._udp", # 66 + "_ldap._tcp.dc._msdcs", # 63 + "_matrix._tcp", # 62 + "_smtp._tcp", # 61 + "_stun._udp", # 57 + "_kerberos._tcp.dc._msdcs", # 54 + "_ldap._tcp.gc._msdcs", # 49 + "_kerberos-adm._tcp", # 44 + "_ldap._tcp.pdc._msdcs", # 43 + "_kerberos-master._tcp", # 43 + "_http._tcp", # 37 + "_h323rs._tcp", # 36 + "_sipinternal._tcp", # 35 + "_turn._tcp", # 33 + "_stun._tcp", # 33 + "_h323ls._tcp", # 33 + "_x-puppet._tcp", # 30 + "_h323cs._udp", # 27 + "_stuns._tcp", # 26 + "_jabber-client._tcp", # 25 + "_x-puppet-ca._tcp", # 22 + "_ts3._udp", # 22 + "_minecraft._tcp", # 22 + "_turns._tcp", # 21 + "_ldaps._tcp", # 21 + "_xmpps-client._tcp", # 20 + "_https._tcp", # 19 + "_ftp._tcp", # 19 + "_xmpp-server._udp", # 18 + "_xmpp-client._udp", # 17 + "_jabber._udp", # 17 + "_jabber-client._udp", # 17 + "_xmpps-server._tcp", # 15 + "_finger._tcp", # 14 + "_stuns._udp", # 12 + "_hkp._tcp", # 12 + "_vlmcs._udp", # 11 + "_turns._udp", # 11 + "_tftp._udp", # 11 + "_ssh._tcp", # 11 + "_rtps._udp", # 11 + "_mysqlsrv._tcp", # 11 + "_hkps._tcp", # 11 + "_h323be._udp", # 11 + "_dns._tcp", # 11 + "_wss._tcp", # 10 + "_wpad._tcp", # 10 + "_whois._tcp", # 10 + "_webexconnect._tcp", # 10 + "_webexconnects._tcp", # 10 + "_vnc._tcp", # 10 + "_test._tcp", # 10 + "_telnet._tcp", # 10 + "_telnets._tcp", # 10 + "_teamspeak._tcp", # 10 + "_svns._tcp", # 10 + "_svcp._tcp", # 10 + "_smb._tcp", # 10 + "_sip-tls._tcp", # 10 + "_sftp._tcp", # 10 + "_secure-pop3._tcp", # 10 + "_secure-imap._tcp", # 10 + "_rtsp._tcp", # 10 + "_rtps._tcp", # 10 + "_rpc._tcp", # 10 + "_rfb._tcp", # 10 + "_raop._tcp", # 10 + "_pstn._tcp", # 10 + "_presence._tcp", # 10 + "_pkixrep._tcp", # 10 + "_pgprevokations._tcp", # 10 + "_pgpkeys._tcp", # 10 + "_ocsp._tcp", # 10 + "_nntp._tcp", # 10 + "_nfs._tcp", # 10 + "_netbios-ssn._tcp", # 10 + "_netbios-ns._tcp", # 10 + "_netbios-dgm._tcp", # 10 + "_mumble._tcp", # 10 + "_msrpc._tcp", # 10 + "_mqtts._tcp", # 10 + "_minecraft._udp", # 10 + "_iscsi._tcp", # 10 + "_ircs._tcp", # 10 + "_ipp._tcp", # 10 + "_ipps._tcp", # 10 + "_h323be._tcp", # 10 + "_gits._tcp", # 10 + "_ftps._tcp", # 10 + "_ftpes._tcp", # 10 + "_dnss._udp", # 10 + "_dnss._tcp", # 10 + "_diameter._tcp", # 10 + "_crl._tcp", # 10 + "_crls._tcp", # 10 + "_cmp._tcp", # 10 + "_certificates._tcp", # 10 + "_aix._tcp", # 10 + "_afpovertcp._tcp", # 10 + "_collab-edge._tls", # 6 + "_tcp", # 5 + "_client._smtp", # 3 + "_udp", # 2 + "_tls", # 2 + "_msdcs", # 2 + "_gc._msdcs", # 2 + "_ldaps._tcp.dc._msdcs", # 1 + "_kerberos._tcp.kdc._msdcs", # 1 + "_kerberos.tcp.dc._msdcs", # 1 + "_imap", # 1 + "_iax", # 1 +] + + +def extract_targets(record): + """ + Extracts hostnames or IP addresses from a given DNS record. + + This method reads the DNS record's type and based on that, extracts the target + hostnames or IP addresses it points to. The type of DNS record + (e.g., "A", "MX", "CNAME", etc.) determines which fields are used for extraction. + + Args: + record (dns.rdata.Rdata): The DNS record to extract information from. + + Returns: + set: A set of tuples, each containing the DNS record type and the extracted value. + + Examples: + >>> from dns.rrset import from_text + >>> record = from_text('www.example.com', 3600, 'IN', 'A', '192.0.2.1') + >>> extract_targets(record[0]) + {('A', '192.0.2.1')} + + >>> record = from_text('example.com', 3600, 'IN', 'MX', '10 mail.example.com.') + >>> extract_targets(record[0]) + {('MX', 'mail.example.com')} + + """ + results = set() + + def add_result(rdtype, _record): + cleaned = clean_dns_record(_record) + if cleaned: + results.add((rdtype, cleaned)) + + rdtype = str(record.rdtype.name).upper() + if rdtype in ("A", "AAAA", "NS", "CNAME", "PTR"): + add_result(rdtype, record) + elif rdtype == "SOA": + add_result(rdtype, record.mname) + elif rdtype == "MX": + add_result(rdtype, record.exchange) + elif rdtype == "SRV": + add_result(rdtype, record.target) + elif rdtype == "TXT": + for s in record.strings: + s = smart_decode(s) + for match in dns_name_extraction_regex.finditer(s): + start, end = match.span() + host = s[start:end] + add_result(rdtype, host) + elif rdtype == "NSEC": + add_result(rdtype, record.next) + else: + log.warning(f'Unknown DNS record type "{rdtype}"') + return results + + +def service_record(host, rdtype=None): + """ + Indicates that the provided host name and optional rdtype is an SRV or related service record. + + These types of records do/should not have A/AAAA/CNAME or similar records, and are simply used to advertise configuration information and/or policy information for different Internet facing services. + + This function exists to provide a consistent way in which to perform this test, rather than having duplicated code in multiple places in different modules. + + The response provides a way for modules to quickly test whether a host name is relevant and worth inspecting or using in context of what the module does. + + NOTE: While underscores are technically not supposed to exist in DNS names as per RFC's, they can be used, so we can't assume that a name that contains or starts with an underscore is a service record and so must check for specific strings. + + Args: + host (string): A DNS host name + + Returns: + bool: A boolean, True indicates that the host is an SRV or similar record, False indicates that it is not. + + Examples: + >>> service_record('_xmpp._tcp.example.com') + True + + >>> service_record('_custom._service.example.com', 'SRV') + True + + >>> service_record('_dmarc.example.com') + True + + >>> service_record('www.example.com') + False + """ + + # if we were providing an rdtype, check if it is SRV + # NOTE: we don't care what the name is if rdtype == SRV + if rdtype and str(rdtype).upper() == "SRV": + return True + + # we did not receive rdtype, so we'll have to inspect host name parts + parts = str(host).split(".") + + if not parts: + return False + + # DMARC TXT records, e.g. _dmarc.example.com + if parts[0] == "_dmarc": + return True + + # MTA-STS TXT records, e.g. _mta-sts.example.com + if parts[0] == "_mta-sts": + return True + + if len(parts) < 2: + return False + + # classic SRV record names, e.g. _ldap._tcp.example.com + if parts[1] == "_udp" or parts[1] == "_tcp": + return True + + # TLS indicating records, used by SMTP TLS-RPT etc, e.g. _smtp._tls.example.com + if parts[1] == "_tls": + return True + + # BIMI TXT records, e.g. selector._bimi.example.com + if parts[1] == "_bimi": + return True + + # DKIM TXT records, e.g. selector._domainkey.example.com + if parts[1] == "_domainkey": + return True + + return False diff --git a/bbot/core/helpers/dns/mock.py b/bbot/core/helpers/dns/mock.py new file mode 100644 index 0000000000..3f6fd83ea5 --- /dev/null +++ b/bbot/core/helpers/dns/mock.py @@ -0,0 +1,74 @@ +import dns +import logging + +log = logging.getLogger("bbot.core.helpers.dns.mock") + + +class MockResolver: + def __init__(self, mock_data=None, custom_lookup_fn=None): + self.mock_data = mock_data if mock_data else {} + self._custom_lookup_fn = custom_lookup_fn + self.nameservers = ["127.0.0.1"] + + async def resolve_address(self, ipaddr, *args, **kwargs): + modified_kwargs = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = "PTR" + return await self.resolve(str(dns.reversename.from_address(ipaddr)), *args, **modified_kwargs) + + def _lookup(self, query, rdtype): + query = query.strip(".") + ret = [] + if self._custom_lookup_fn is not None: + answers = self._custom_lookup_fn(query, rdtype) + if answers is not None: + ret.extend(list(answers)) + answers = self.mock_data.get(query, {}).get(rdtype, []) + if answers: + ret.extend(list(answers)) + if not ret: + raise dns.resolver.NXDOMAIN(f"No answer found for {query} {rdtype}") + return ret + + def create_dns_response(self, query_name, answers, rdtype): + query_name = query_name.strip(".") + message_text = f"""id 1234 +opcode QUERY +rcode NOERROR +flags QR AA RD +;QUESTION +{query_name}. IN {rdtype} +;ANSWER""" + for answer in answers: + if answer == "": + answer = '""' + message_text += f"\n{query_name}. 1 IN {rdtype} {answer}" + + message_text += "\n;AUTHORITY\n;ADDITIONAL\n" + message = dns.message.from_text(message_text) + # log.verbose(message_text) + return message + + async def resolve(self, query_name, rdtype=None): + if rdtype is None: + rdtype = "A" + elif isinstance(rdtype, str): + rdtype = rdtype.upper() + else: + rdtype = str(rdtype.name).upper() + + domain_name = dns.name.from_text(query_name) + rdtype_obj = dns.rdatatype.from_text(rdtype) + + if "_NXDOMAIN" in self.mock_data and query_name in self.mock_data["_NXDOMAIN"]: + # Simulate the NXDOMAIN exception + raise dns.resolver.NXDOMAIN + + try: + answers = self._lookup(query_name, rdtype) + log.verbose(f"Answers for {query_name}:{rdtype}: {answers}") + response = self.create_dns_response(query_name, answers, rdtype) + answer = dns.resolver.Answer(domain_name, rdtype_obj, dns.rdataclass.IN, response) + return answer + except dns.resolver.NXDOMAIN: + return [] diff --git a/bbot/core/helpers/files.py b/bbot/core/helpers/files.py new file mode 100644 index 0000000000..5e7d2d88d4 --- /dev/null +++ b/bbot/core/helpers/files.py @@ -0,0 +1,166 @@ +import os +import logging +import traceback +from contextlib import suppress + +from .misc import rm_at_exit + + +log = logging.getLogger("bbot.core.helpers.files") + + +def tempfile(self, content, pipe=True): + """ + Creates a temporary file or named pipe and populates it with content. + + Args: + content (list, set, tuple, str): The content to populate the temporary file with. + pipe (bool, optional): If True, a named pipe is used instead of a true file. + This allows Python data to be piped directly into the process without taking up disk space. + Defaults to True. + + Returns: + str: The filepath of the created temporary file or named pipe. + + Examples: + >>> tempfile(["This", "is", "temp", "content"]) + '/home/user/.bbot/temp/pgxml13bov87oqrvjz7a' + + >>> tempfile(["Another", "temp", "file"], pipe=False) + '/home/user/.bbot/temp/someotherfile' + """ + filename = self.temp_filename() + rm_at_exit(filename) + try: + if type(content) not in (set, list, tuple): + content = (content,) + if pipe: + os.mkfifo(filename) + self.feed_pipe(filename, content, text=True) + else: + with open(filename, "w", errors="ignore") as f: + for c in content: + line = f"{self.smart_decode(c)}\n" + f.write(line) + except Exception as e: + log.error(f"Error creating temp file: {e}") + log.trace(traceback.format_exc()) + + return filename + + +def _feed_pipe(self, pipe, content, text=True): + """ + Feeds content into a named pipe or file-like object. + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + + Notes: + The method tries to determine if 'pipe' is a file-like object that has a 'write' method. + If so, it writes directly to that object. Otherwise, it opens 'pipe' as a file for writing. + """ + try: + if text: + decode_fn = self.smart_decode + newline = "\n" + else: + decode_fn = self.smart_encode + newline = b"\n" + try: + if hasattr(pipe, "write"): + try: + for c in content: + pipe.write(decode_fn(c) + newline) + finally: + with suppress(Exception): + pipe.close() + else: + with open(pipe, "w") as p: + for c in content: + p.write(decode_fn(c) + newline) + except BrokenPipeError: + log.debug("Broken pipe in _feed_pipe()") + except ValueError: + log.debug(f"Error _feed_pipe(): {traceback.format_exc()}") + except KeyboardInterrupt: + self.scan.stop() + except Exception as e: + log.error(f"Error in _feed_pipe(): {e}") + log.trace(traceback.format_exc()) + + +def feed_pipe(self, pipe, content, text=True): + """ + Starts a new thread to feed content into a named pipe or file-like object using _feed_pipe(). + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + """ + t = self.preset.core.create_thread( + target=self._feed_pipe, + args=(pipe, content), + kwargs={"text": text}, + daemon=True, + custom_name="bbot feed_pipe()", + ) + t.start() + + +def tempfile_tail(self, callback): + """ + Create a named pipe and execute a callback function on each line that is written to the pipe. + + Useful for ingesting output from a program (e.g. nuclei) directly from a file in real-time as + each line is written. The idea is you create the file with this function and then tell the CLI + program to output to it as a normal output file. We are then able to scoop up the output line + by line as it's written to our "file" (which is actually a named pipe, shhh! ;) + + Args: + callback (Callable): A function that will be invoked with each line written to the pipe as its argument. + + Returns: + str: The filename of the created named pipe. + """ + filename = self.temp_filename() + rm_at_exit(filename) + try: + os.mkfifo(filename) + t = self.preset.core.create_thread( + target=tail, args=(filename, callback), daemon=True, custom_name="bbot tempfile_tail()" + ) + t.start() + except Exception as e: + log.error(f"Error setting up tail for file {filename}: {e}") + log.trace(traceback.format_exc()) + return + return filename + + +def tail(filename, callback): + """ + Continuously read lines from a file and execute a callback function on each line. + + Args: + filename (str): The path of the file to tail. + callback (Callable): A function to call on each line read from the file. + + Examples: + >>> def print_callback(line): + ... print(f"Received: {line}") + >>> tail("/path/to/file", print_callback) + """ + try: + with open(filename, errors="ignore") as f: + for line in f: + line = line.rstrip("\r\n") + callback(line) + except Exception as e: + log.error(f"Error tailing file {filename}: {e}") + log.trace(traceback.format_exc()) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index b6f4361337..78ccf67155 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -1,112 +1,232 @@ -import atexit -import shutil +import os import logging from pathlib import Path -from threading import Lock -from tabulate import tabulate +import multiprocessing as mp +from functools import partial +from concurrent.futures import ProcessPoolExecutor from . import misc from .dns import DNSHelper +from .web import WebHelper from .diff import HttpCompare +from .regex import RegexHelper from .wordcloud import WordCloud -from .threadpool import as_completed -from ...modules.base import BaseModule -from .depsinstaller import DepsInstaller from .interactsh import Interactsh +from .depsinstaller import DepsInstaller +from .async_helpers import get_event_loop + +from bbot.scanner.target import BaseTarget log = logging.getLogger("bbot.core.helpers") class ConfigAwareHelper: - from .web import wordlist, request, download, api_page_iter, curl - from .command import run, run_live, tempfile, feed_pipe, _feed_pipe - from .cache import cache_get, cache_put, cache_filename, is_cached, CacheDict + """ + Centralized helper class that provides unified access to various helper functions. + + This class serves as a convenient interface for accessing helper methods across different files. + It is designed to be configuration-aware, allowing helper functions to utilize scan-specific + configurations like rate-limits. The class leverages Python's `__getattribute__` magic method + to provide seamless access to helper functions across various namespaces. + + Attributes: + config (dict): Configuration settings for the BBOT scan instance. + _scan (Scan): A BBOT scan instance. + bbot_home (Path): Home directory for BBOT. + cache_dir (Path): Directory for storing cache files. + temp_dir (Path): Directory for storing temporary files. + tools_dir (Path): Directory for storing tools, e.g. compiled binaries. + lib_dir (Path): Directory for storing libraries. + scans_dir (Path): Directory for storing scan results. + wordlist_dir (Path): Directory for storing wordlists. + current_dir (Path): The current working directory. + keep_old_scans (int): The number of old scans to keep. + + Examples: + >>> helper = ConfigAwareHelper(config) + >>> ips = helper.dns.resolve("www.evilcorp.com") + """ + from . import ntlm from . import regexes from . import validators + from .files import tempfile, feed_pipe, _feed_pipe, tempfile_tail + from .cache import cache_get, cache_put, cache_filename, is_cached + from .command import run, run_live, _spawn_proc, _prepare_command_kwargs - def __init__(self, config, scan=None): - self.config = config - self._scan = scan - self.bbot_home = Path(self.config.get("home", "~/.bbot")).expanduser().resolve() + def __init__(self, preset): + self.preset = preset + self.bbot_home = self.preset.bbot_home self.cache_dir = self.bbot_home / "cache" self.temp_dir = self.bbot_home / "temp" self.tools_dir = self.bbot_home / "tools" self.lib_dir = self.bbot_home / "lib" self.scans_dir = self.bbot_home / "scans" + self.wordlist_dir = Path(__file__).parent.parent.parent / "wordlists" self.current_dir = Path.cwd() self.keep_old_scans = self.config.get("keep_scans", 20) self.mkdir(self.cache_dir) self.mkdir(self.temp_dir) self.mkdir(self.tools_dir) self.mkdir(self.lib_dir) - atexit.register(self.empty_temp_dir) - # holds requests CachedSession() objects for duration of scan - self.cache_sessions = dict() - self._futures = set() - self._future_lock = Lock() - self.dns = DNSHelper(self) + self._loop = None + + # multiprocessing thread pool + start_method = mp.get_start_method() + if start_method != "spawn": + self.warning(f"Multiprocessing spawn method is set to {start_method}.") + + # we spawn 1 fewer processes than cores + # this helps to avoid locking up the system or competing with the main python process for cpu time + num_processes = max(1, mp.cpu_count() - 1) + self.process_pool = ProcessPoolExecutor(max_workers=num_processes) + + self._cloud = None + + self.re = RegexHelper(self) + self._dns = None + self._web = None + self.config_aware_validators = self.validators.Validators(self) self.depsinstaller = DepsInstaller(self) self.word_cloud = WordCloud(self) self.dummy_modules = {} - def interactsh(self): - return Interactsh(self) - - def make_table(self, *args, **kwargs): - defaults = { - "tablefmt": "github", - "disable_numparse": True, - } - for k, v in defaults.items(): - if k not in kwargs: - kwargs[k] = v - return tabulate(*args, **kwargs) - - def http_compare(self, url, allow_redirects=False): + @property + def dns(self): + if self._dns is None: + self._dns = DNSHelper(self) + return self._dns - return HttpCompare(url, self, allow_redirects=allow_redirects) + @property + def web(self): + if self._web is None: + self._web = WebHelper(self) + return self._web - def temp_filename(self): + @property + def cloud(self): + if self._cloud is None: + from cloudcheck import cloud_providers + + self._cloud = cloud_providers + return self._cloud + + def bloom_filter(self, size): + from .bloom import BloomFilter + + return BloomFilter(size) + + def interactsh(self, *args, **kwargs): + return Interactsh(self, *args, **kwargs) + + def http_compare( + self, + url, + allow_redirects=False, + include_cache_buster=True, + headers=None, + cookies=None, + method="GET", + data=None, + timeout=15, + ): + return HttpCompare( + url, + self, + allow_redirects=allow_redirects, + include_cache_buster=include_cache_buster, + headers=headers, + cookies=cookies, + timeout=timeout, + method=method, + data=data, + ) + + def temp_filename(self, extension=None): """ temp_filename() --> Path("/home/user/.bbot/temp/pgxml13bov87oqrvjz7a") """ - return self.temp_dir / self.rand_string(20) - - def empty_temp_dir(self): - shutil.rmtree(self.temp_dir, ignore_errors=True) + filename = self.rand_string(20) + if extension is not None: + filename = f"{filename}.{extension}" + return self.temp_dir / filename def clean_old_scans(self): - _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name) + def _filter(x): + return x.is_dir() and self.regexes.scan_name_regex.match(x.name) + self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter) + def make_target(self, *targets, **kwargs): + return BaseTarget(*targets, scan=self.scan, **kwargs) + + @property + def config(self): + return self.preset.config + + @property + def web_config(self): + return self.preset.web_config + @property def scan(self): - if self._scan is None: - from bbot.scanner import Scanner + return self.preset.scan - self._scan = Scanner() - return self._scan + @property + def loop(self): + """ + Get the current event loop + """ + if self._loop is None: + self._loop = get_event_loop() + return self._loop - @staticmethod - def as_completed(*args, **kwargs): - return as_completed(*args, **kwargs) + def run_in_executor(self, callback, *args, **kwargs): + """ + Run a synchronous task in the event loop's default thread pool executor - def _make_dummy_module(self, name, _type): + Examples: + Execute callback: + >>> result = await self.helpers.run_in_executor(callback_fn, arg1, arg2) """ - Construct a dummy module, for attachment to events + callback = partial(callback, **kwargs) + return self.loop.run_in_executor(None, callback, *args) + + def run_in_executor_mp(self, callback, *args, **kwargs): """ - try: - return self.dummy_modules[name] - except KeyError: - dummy = DummyModule(scan=self.scan, name=name, _type=_type) - self.dummy_modules[name] = dummy - return dummy + Same as run_in_executor() except with a process pool executor + Use only in cases where callback is CPU-bound + + Examples: + Execute callback: + >>> result = await self.helpers.run_in_executor_mp(callback_fn, arg1, arg2) + """ + callback = partial(callback, **kwargs) + return self.loop.run_in_executor(self.process_pool, callback, *args) + + @property + def in_tests(self): + return os.environ.get("BBOT_TESTING", "") == "True" def __getattribute__(self, attr): """ - Allow static functions from sub-helpers to be accessed from the main class + Do not be afraid, the angel said. + + Overrides Python's built-in __getattribute__ to provide convenient access to helper methods. + + This method first attempts to find an attribute within this class itself. If unsuccessful, + it then looks in the 'misc', 'dns', and 'web' helper modules, in that order. If the attribute + is still not found, an AttributeError is raised. + + Args: + attr (str): The attribute name to look for. + + Returns: + Any: The attribute value, if found. + + Raises: + AttributeError: If the attribute is not found in any of the specified places. """ try: # first try self @@ -120,12 +240,13 @@ def __getattribute__(self, attr): # then try dns return getattr(self.dns, attr) except AttributeError: - # then die - raise AttributeError(f'Helper has no attribute "{attr}"') - - -class DummyModule(BaseModule): - def __init__(self, *args, **kwargs): - self._name = kwargs.pop("name") - self._type = kwargs.pop("_type") - super().__init__(*args, **kwargs) + try: + # then try web + return getattr(self.web, attr) + except AttributeError: + try: + # then try validators + return getattr(self.validators, attr) + except AttributeError: + # then die + raise AttributeError(f'Helper has no attribute "{attr}"') diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index c9ff99d8ac..c809999a3b 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -2,15 +2,16 @@ import json import base64 import random +import asyncio import logging -from time import sleep +import traceback from uuid import uuid4 -from threading import Thread + from Crypto.Hash import SHA256 from Crypto.PublicKey import RSA from Crypto.Cipher import AES, PKCS1_OAEP -from bbot.core.errors import InteractshError +from bbot.errors import InteractshError log = logging.getLogger("bbot.core.helpers.interactsh") @@ -18,16 +19,97 @@ class Interactsh: - def __init__(self, parent_helper): + """ + A pure python implementation of ProjectDiscovery's interact.sh. + + *"Interactsh is an open-source tool for detecting out-of-band interactions. It is a tool designed to detect vulnerabilities that cause external interactions."* + + - https://app.interactsh.com + - https://github.com/projectdiscovery/interactsh + + This class facilitates interactions with the interact.sh service for + out-of-band data exfiltration and vulnerability confirmation. It allows + for customization by accepting server and token parameters from the + configuration provided by `parent_helper`. + + Attributes: + parent_helper (ConfigAwareHelper): An instance of a helper class containing configuration data. + server (str): The server to be used. If None (the default), a random server will be chosen from a predetermined list. + correlation_id (str): An identifier to correlate requests and responses. Default is None. + custom_server (str): Optional. A custom interact.sh server. Loaded from configuration. + token (str): Optional. A token for interact.sh API. Loaded from configuration. + _poll_task (AsyncTask): The task responsible for polling the interact.sh server. + + Examples: + ```python + # instantiate interact.sh client (no requests are sent yet) + >>> interactsh_client = self.helpers.interactsh() + # register with an interact.sh server + >>> interactsh_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + # simulate an out-of-band interaction + >>> await self.helpers.request(f"https://{interactsh_domain}/test") + # wait for out-of-band interaction to be registered + >>> await asyncio.sleep(10) + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "q-type": "A", + "raw-request": "...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:23.187226851Z" + }, + { + "protocol": "http", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "raw-request": "GET /test HTTP/1.1 ...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:24.155677967Z" + } + ] + # finally, shut down the client + >>> await interactsh_client.deregister() + ``` + """ + + def __init__(self, parent_helper, poll_interval=10): self.parent_helper = parent_helper - self.server = self.parent_helper.config.get("interactsh_server", None) + self.server = None + self.correlation_id = None + self.custom_server = self.parent_helper.config.get("interactsh_server", None) self.token = self.parent_helper.config.get("interactsh_token", None) - self._thread = None + self.poll_interval = poll_interval + self._poll_task = None + + async def register(self, callback=None): + """ + Registers the instance with an interact.sh server and sets up polling. + + Generates RSA keys for secure communication, builds a correlation ID, + and sends a POST request to an interact.sh server to register. Optionally, + starts an asynchronous polling task to listen for interactions. - def register(self, callback=None): - if self.server == None: - self.server = random.choice(server_list) + Args: + callback (callable, optional): A function to be called each time new interactions are received. + Returns: + str: The registered domain for out-of-band interactions. + + Raises: + InteractshError: If registration with an interact.sh server fails. + + Examples: + >>> interactsh_client = self.helpers.interactsh() + >>> registered_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + """ rsa = RSA.generate(1024) self.public_key = rsa.publickey().exportKey() @@ -38,34 +120,68 @@ def register(self, callback=None): uuid = uuid4().hex.ljust(33, "a") guid = "".join(i if i.isdigit() else chr(ord(i) + random.randint(0, 20)) for i in uuid) - self.domain = f"{guid}.{self.server}" - self.correlation_id = guid[:20] self.secret = str(uuid4()) headers = {} - if self.token: - headers["Authorization"] = self.token + if self.custom_server: + if not self.token: + log.verbose("Interact.sh token is not set") + else: + headers["Authorization"] = self.token + self.server_list = [str(self.custom_server)] + else: + self.server_list = random.sample(server_list, k=len(server_list)) + for server in self.server_list: + log.info(f"Registering with interact.sh server: {server}") + data = { + "public-key": encoded_public_key, + "secret-key": self.secret, + "correlation-id": self.correlation_id, + } + r = await self.parent_helper.request( + f"https://{server}/register", headers=headers, json=data, method="POST" + ) + if r is None: + continue + try: + msg = r.json().get("message", "") + assert "registration successful" in msg + except Exception: + log.debug(f"Failed to register with interactsh server {self.server}") + continue + self.server = server + self.domain = f"{guid}.{self.server}" + break - data = {"public-key": encoded_public_key, "secret-key": self.secret, "correlation-id": self.correlation_id} - r = self.parent_helper.request( - f"https://{self.server}/register", headers=headers, json=data, method="POST", retries="infinite" - ) - msg = r.json().get("message", "") - if msg != "registration successful": - raise InteractshError(f"Failed to register with interactsh server {self.server}") + if not self.server: + raise InteractshError("Failed to register with an interactsh server") log.info( f"Successfully registered to interactsh server {self.server} with correlation_id {self.correlation_id} [{self.domain}]" ) if callable(callback): - self._thread = Thread(target=self.poll_loop, args=(callback,), daemon=True) - self._thread.start() + self._poll_task = asyncio.create_task(self.poll_loop(callback)) return self.domain - def deregister(self): + async def deregister(self): + """ + Deregisters the instance from the interact.sh server and cancels the polling task. + + Sends a POST request to the server to deregister, using the correlation ID + and secret key generated during registration. Optionally, if a polling + task was started, it is cancelled. + + Raises: + InteractshError: If required information is missing or if deregistration fails. + + Examples: + >>> await interactsh_client.deregister() + """ + if not self.server or not self.correlation_id or not self.secret: + raise InteractshError("Missing required information to deregister") headers = {} if self.token: @@ -73,46 +189,123 @@ def deregister(self): data = {"secret-key": self.secret, "correlation-id": self.correlation_id} - r = self.parent_helper.request(f"https://{self.server}/deregister", headers=headers, json=data, method="POST") - if "success" not in r.text: + r = await self.parent_helper.request( + f"https://{self.server}/deregister", headers=headers, json=data, method="POST" + ) + + if self._poll_task is not None: + self._poll_task.cancel() + + if "success" not in getattr(r, "text", ""): raise InteractshError(f"Failed to de-register with interactsh server {self.server}") - def poll(self): + async def poll(self): + """ + Polls the interact.sh server for interactions tied to the current instance. + + Sends a GET request to the server to fetch interactions associated with the + current correlation_id and secret key. Returned interactions are decrypted + using an AES key provided by the server response. + + Raises: + InteractshError: If required information for polling is missing. + + Returns: + list: A list of decrypted interaction data dictionaries. + + Examples: + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + ... + }, + ... + ] + """ + if not self.server or not self.correlation_id or not self.secret: + raise InteractshError("Missing required information to poll") headers = {} if self.token: headers["Authorization"] = self.token - r = self.parent_helper.request( - f"https://{self.server}/poll?id={self.correlation_id}&secret={self.secret}", headers=headers - ) + try: + r = await self.parent_helper.request( + f"https://{self.server}/poll?id={self.correlation_id}&secret={self.secret}", headers=headers + ) + if r is None: + raise InteractshError("Error polling interact.sh: No response from server") - data_list = r.json().get("data", None) - if data_list: - aes_key = r.json()["aes_key"] + ret = [] + data_list = r.json().get("data", None) + if data_list: + aes_key = r.json()["aes_key"] - for data in data_list: + for data in data_list: + decrypted_data = self._decrypt(aes_key, data) + ret.append(decrypted_data) + return ret + except Exception as e: + raise InteractshError(f"Error polling interact.sh: {e}") + + async def poll_loop(self, callback): + """ + Starts a polling loop to continuously check for interactions with the interact.sh server. + + Continuously polls the interact.sh server for interactions tied to the current instance, + using the `poll` method. When interactions are received, it executes the given callback + function with each interaction data. + + Parameters: + callback (callable): The function to be called for every interaction received from the server. - decrypted_data = self.decrypt(aes_key, data) - yield decrypted_data + Returns: + awaitable: An awaitable object that executes the internal `_poll_loop` method. - def poll_loop(self, callback): - return self.parent_helper.scan.manager.catch(self._poll_loop, callback, _force=True) + Examples: + >>> await interactsh_client.poll_loop(my_callback) + """ + async with self.parent_helper.scan._acatch(context=self._poll_loop): + return await self._poll_loop(callback) - def _poll_loop(self, callback): + async def _poll_loop(self, callback): while 1: if self.parent_helper.scan.stopping: - sleep(1) + await asyncio.sleep(1) continue - data_list = list(self.poll()) + data_list = [] + try: + data_list = await self.poll() + except InteractshError as e: + log.warning(e) + log.trace(traceback.format_exc()) if not data_list: - sleep(10) + await asyncio.sleep(self.poll_interval) continue for data in data_list: if data: - callback(data) + await self.parent_helper.execute_sync_or_async(callback, data) + + def _decrypt(self, aes_key, data): + """ + Decrypts and returns the data received from the interact.sh server. + + Uses RSA and AES for decrypting the data. RSA with PKCS1_OAEP and SHA256 is used to decrypt the AES key, + and then AES (CFB mode) is used to decrypt the actual data payload. + + Parameters: + aes_key (str): The AES key for decryption, encrypted with RSA and base64 encoded. + data (str): The data payload to decrypt, which is base64 encoded and AES encrypted. + + Returns: + dict: The decrypted data, loaded as a JSON object. - def decrypt(self, aes_key, data): + Examples: + >>> decrypted_data = self._decrypt(aes_key, data) + """ private_key = RSA.importKey(self.private_key) cipher = PKCS1_OAEP.new(private_key, hashAlgo=SHA256) aes_plain_key = cipher.decrypt(base64.b64decode(aes_key)) diff --git a/bbot/core/helpers/libmagic.py b/bbot/core/helpers/libmagic.py new file mode 100644 index 0000000000..37612f558e --- /dev/null +++ b/bbot/core/helpers/libmagic.py @@ -0,0 +1,65 @@ +import puremagic + + +def get_magic_info(file): + magic_detections = puremagic.magic_file(file) + if magic_detections: + magic_detections.sort(key=lambda x: x.confidence, reverse=True) + detection = magic_detections[0] + return detection.extension, detection.mime_type, detection.name, detection.confidence + return "", "", "", 0 + + +def get_compression(mime_type): + mime_type = mime_type.lower() + # from https://github.com/cdgriffith/puremagic/blob/master/puremagic/magic_data.json + compression_map = { + "application/arj": "arj", # ARJ archive + "application/binhex": "binhex", # BinHex encoded file + "application/epub+zip": "zip", # EPUB book (Zip archive) + "application/fictionbook2+zip": "zip", # FictionBook 2.0 (Zip) + "application/fictionbook3+zip": "zip", # FictionBook 3.0 (Zip) + "application/gzip": "gzip", # Gzip compressed file + "application/java-archive": "zip", # Java Archive (JAR) + "application/pak": "pak", # PAK archive + "application/vnd.android.package-archive": "zip", # Android package (APK) + "application/vnd.comicbook-rar": "rar", # Comic book archive (RAR) + "application/vnd.comicbook+zip": "zip", # Comic book archive (Zip) + "application/vnd.ms-cab-compressed": "cab", # Microsoft Cabinet archive + "application/vnd.palm": "palm", # Palm OS data + "application/vnd.rar": "rar", # RAR archive + "application/x-7z-compressed": "7z", # 7-Zip archive + "application/x-ace": "ace", # ACE archive + "application/x-alz": "alz", # ALZip archive + "application/x-arc": "arc", # ARC archive + "application/x-archive": "ar", # Unix archive + "application/x-bzip2": "bzip2", # Bzip2 compressed file + "application/x-compress": "compress", # Unix compress file + "application/x-cpio": "cpio", # CPIO archive + "application/x-gzip": "gzip", # Gzip compressed file + "application/x-itunes-ipa": "zip", # iOS application archive (IPA) + "application/x-java-pack200": "pack200", # Java Pack200 archive + "application/x-lha": "lha", # LHA archive + "application/x-lrzip": "lrzip", # Long Range ZIP + "application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive + "application/x-lz4": "lz4", # LZ4 compressed file + "application/x-lzip": "lzip", # Lzip compressed file + "application/x-lzma": "lzma", # LZMA compressed file + "application/x-par2": "par2", # PAR2 recovery file + "application/x-qpress": "qpress", # Qpress archive + "application/x-rar-compressed": "rar", # RAR archive + "application/x-sit": "sit", # StuffIt archive + "application/x-stuffit": "sit", # StuffIt archive + "application/x-tar": "tar", # Tar archive + "application/x-tgz": "tgz", # Gzip compressed Tar archive + "application/x-webarchive": "zip", # Web archive (Zip) + "application/x-xar": "xar", # XAR archive + "application/x-xz": "xz", # XZ compressed file + "application/x-zip-compressed-fb2": "zip", # Zip archive (FB2) + "application/x-zoo": "zoo", # Zoo archive + "application/x-zstd-compressed-tar": "zstd", # Zstandard compressed Tar archive + "application/zip": "zip", # Zip archive + "application/zstd": "zstd", # Zstandard compressed file + } + + return compression_map.get(mime_type, "") diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 387d261b34..92c9e523fd 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1,100 +1,311 @@ +import os import sys import copy import json -import atexit -import psutil import random -import shutil -import signal import string +import asyncio import logging import ipaddress -import wordninja +import regex as re +import subprocess as sp from pathlib import Path -from itertools import islice -from datetime import datetime from contextlib import suppress -import tldextract as _tldextract -from urllib.parse import urlparse, quote # noqa F401 -from hashlib import sha1 as hashlib_sha1 +from unidecode import unidecode # noqa F401 +from asyncio import create_task, gather, sleep, wait_for # noqa +from urllib.parse import urlparse, quote, unquote, urlunparse, urljoin # noqa F401 from .url import * # noqa F401 -from . import regexes -from .. import errors -from .names_generator import random_name # noqa F401 +from ... import errors +from . import regexes as bbot_regexes +from .names_generator import random_name, names, adjectives # noqa F401 log = logging.getLogger("bbot.core.helpers.misc") def is_domain(d): """ - "evilcorp.co.uk" --> True - "www.evilcorp.co.uk" --> False + Check if the given input represents a domain without subdomains. + + This function takes an input string `d` and returns True if it represents a domain without any subdomains. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain. + + Returns: + bool: True if the input is a domain without subdomains, False otherwise. + + Examples: + >>> is_domain("evilcorp.co.uk") + True + + >>> is_domain("www.evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ + d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) - if extracted.domain and not extracted.subdomain: - return True + if extracted.registered_domain: + if not extracted.subdomain: + return True + else: + return d.count(".") == 1 return False def is_subdomain(d): """ - "www.evilcorp.co.uk" --> True - "evilcorp.co.uk" --> False + Check if the given input represents a subdomain. + + This function takes an input string `d` and returns True if it represents a subdomain. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain or subdomain. + + Returns: + bool: True if the input is a subdomain, False otherwise. + + Examples: + >>> is_subdomain("www.evilcorp.co.uk") + True + + >>> is_subdomain("evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ + d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) - if extracted.domain and extracted.subdomain: - return True + if extracted.registered_domain: + if extracted.subdomain: + return True + else: + return d.count(".") > 1 return False +def is_ptr(d): + """ + Check if the given input represents a PTR record domain. + + This function takes an input string `d` and returns True if it matches the PTR record format. + Otherwise, it returns False. + + Args: + d (str): The input string potentially representing a PTR record domain. + + Returns: + bool: True if the input matches PTR record format, False otherwise. + + Examples: + >>> is_ptr("wsc-11-22-33-44.evilcorp.com") + True + + >>> is_ptr("www2.evilcorp.com") + False + """ + return bool(bbot_regexes.ptr_regex.search(str(d))) + + def is_url(u): + """ + Check if the given input represents a valid URL. + + This function takes an input string `u` and returns True if it matches any of the predefined URL formats. + Otherwise, it returns False. + + Args: + u (str): The input string potentially representing a URL. + + Returns: + bool: True if the input matches a valid URL format, False otherwise. + + Examples: + >>> is_url("https://evilcorp.com") + True + + >>> is_url("not-a-url") + False + """ u = str(u) - for r in regexes.event_type_regexes["URL"]: + for r in bbot_regexes.event_type_regexes["URL"]: if r.match(u): return True return False +uri_regex = re.compile(r"^([a-z0-9]{2,20})://", re.I) + + +def is_uri(u, return_scheme=False): + """ + Check if the given input represents a URI and optionally return its scheme. + + This function takes an input string `u` and returns True if it matches a URI format. + When `return_scheme` is True, it returns the URI scheme instead of a boolean. + + Args: + u (str): The input string potentially representing a URI. + return_scheme (bool, optional): Whether to return the URI scheme. Defaults to False. + + Returns: + Union[bool, str]: True if the input matches a URI format; the URI scheme if `return_scheme` is True. + + Examples: + >>> is_uri("http://evilcorp.com") + True + + >>> is_uri("ftp://evilcorp.com") + True + + >>> is_uri("evilcorp.com") + False + + >>> is_uri("ftp://evilcorp.com", return_scheme=True) + "ftp" + """ + match = uri_regex.match(u) + if return_scheme: + if match: + return match.groups()[0].lower() + return "" + return bool(match) + + def split_host_port(d): """ - "evilcorp.com:443" --> ("evilcorp.com", 443) - "192.168.1.1:443" --> (IPv4Address('192.168.1.1'), 443) - "[dead::beef]:443" --> (IPv6Address('dead::beef'), 443) + Parse a string containing a host and port into a tuple. + + This function takes an input string `d` and returns a tuple containing the host and port. + The host is converted to its appropriate IP address type if possible. The port is inferred + based on the scheme if not provided. + + Args: + d (str): The input string containing the host and possibly the port. + + Returns: + Tuple[Union[IPv4Address, IPv6Address, str], Optional[int]]: Tuple containing the host and port. + + Examples: + >>> split_host_port("evilcorp.com:443") + ("evilcorp.com", 443) + + >>> split_host_port("192.168.1.1:443") + (IPv4Address('192.168.1.1'), 443) + + >>> split_host_port("[dead::beef]:443") + (IPv6Address('dead::beef'), 443) + + Notes: + - If port is not provided, it is inferred based on the scheme: + - For "https" and "wss", port 443 is used. + - For "http" and "ws", port 80 is used. """ - if not "://" in d: - d = f"d://{d}" - parsed = urlparse(d) - port = None + d = str(d) host = None - with suppress(ValueError): - if parsed.port is None: - if parsed.scheme == "https": - port = 443 - elif parsed.scheme == "http": - port = 80 - else: - port = int(parsed.port) - with suppress(ValueError): - host = parsed.hostname + port = None + scheme = None + if is_ip(d): + return make_ip_type(d), port + + match = bbot_regexes.split_host_port_regex.match(d) + if match is None: + raise ValueError(f'split_port() failed to parse "{d}"') + scheme = match.group("scheme") + netloc = match.group("netloc") + if netloc is None: + raise ValueError(f'split_port() failed to parse "{d}"') + + match = bbot_regexes.extract_open_port_regex.match(netloc) + if match is None: + raise ValueError(f'split_port() failed to parse netloc "{netloc}" (original value: {d})') + + host = match.group(2) + if host is None: + host = match.group(1) + if host is None: + raise ValueError(f'split_port() failed to locate host in netloc "{netloc}" (original value: {d})') + + port = match.group(3) + if port is None and scheme is not None: + scheme = scheme.lower() + if scheme in ("https", "wss"): + port = 443 + elif scheme in ("http", "ws"): + port = 80 + elif port is not None: + with suppress(ValueError): + port = int(port) + return make_ip_type(host), port def parent_domain(d): """ - "www.internal.evilcorp.co.uk" --> "internal.evilcorp.co.uk" - "www.evilcorp.co.uk" --> "evilcorp.co.uk" - "evilcorp.co.uk" --> "evilcorp.co.uk" + Retrieve the parent domain of a given subdomain string. + + This function takes an input string `d` representing a subdomain and returns its parent domain. + If the input does not represent a subdomain, it returns the input as is. + + Args: + d (str): The input string representing a subdomain or domain. + + Returns: + str: The parent domain of the subdomain, or the original input if it is not a subdomain. + + Examples: + >>> parent_domain("www.internal.evilcorp.co.uk") + "internal.evilcorp.co.uk" + + >>> parent_domain("www.internal.evilcorp.co.uk:8080") + "internal.evilcorp.co.uk:8080" + + >>> parent_domain("www.evilcorp.co.uk") + "evilcorp.co.uk" + + >>> parent_domain("evilcorp.co.uk") + "evilcorp.co.uk" + + Notes: + - Port, if present in input, is preserved in the output. """ + host, port = split_host_port(d) if is_subdomain(d): - return ".".join(str(d).split(".")[1:]) + return make_netloc(".".join(str(host).split(".")[1:]), port) return d def domain_parents(d, include_self=False): """ - "test.www.evilcorp.co.uk" --> ["www.evilcorp.co.uk", "evilcorp.co.uk"] + Generate a list of parent domains for a given domain string. + + This function takes an input string `d` and generates a list of parent domains in decreasing order of specificity. + If `include_self` is set to True, the list will also include the input domain if it is not a top-level domain. + + Args: + d (str): The input string representing a domain or subdomain. + include_self (bool, optional): Whether to include the input domain itself. Defaults to False. + + Yields: + str: Parent domains of the input string in decreasing order of specificity. + + Examples: + >>> list(domain_parents("test.www.evilcorp.co.uk")) + ["www.evilcorp.co.uk", "evilcorp.co.uk"] + + Notes: + - Port, if present in input, is preserved in the output. """ + parent = str(d) if include_self and not is_domain(parent): yield parent @@ -108,117 +319,386 @@ def domain_parents(d, include_self=False): break +def subdomain_depth(d): + """ + Calculate the depth of subdomains within a given domain name. + + Args: + d (str): The domain name to analyze. + + Returns: + int: The depth of the subdomain. For example, a hostname "5.4.3.2.1.evilcorp.com" + has a subdomain depth of 5. + """ + subdomain, domain = split_domain(d) + if not subdomain: + return 0 + return subdomain.count(".") + 1 + + +def parent_url(u): + """ + Retrieve the parent URL of a given URL. + + This function takes an input string `u` representing a URL and returns its parent URL. + If the input URL does not have a parent (i.e., it's already the top-level), it returns None. + + Args: + u (str): The input string representing a URL. + + Returns: + Union[str, None]: The parent URL of the input URL, or None if it has no parent. + + Examples: + >>> parent_url("https://evilcorp.com/sub/path/") + "https://evilcorp.com/sub/" + + >>> parent_url("https://evilcorp.com/") + None + + Notes: + - Only the path component of the URL is modified. + - All other components like scheme, netloc, query, and fragment are preserved. + """ + parsed = urlparse(u) + path = Path(parsed.path) + if path.parent == path: + return None + else: + return urlunparse(parsed._replace(path=str(path.parent), query="")) + + +def url_parents(u): + """ + Generate a list of parent URLs for a given URL string. + + This function takes an input string `u` representing a URL and generates a list of its parent URLs in decreasing order of specificity. + + Args: + u (str): The input string representing a URL. + + Returns: + List[str]: A list of parent URLs of the input URL in decreasing order of specificity. + + Examples: + >>> url_parents("http://www.evilcorp.co.uk/admin/tools/cmd.php") + ["http://www.evilcorp.co.uk/admin/tools/", "http://www.evilcorp.co.uk/admin/", "http://www.evilcorp.co.uk/"] + + Notes: + - The list is generated by continuously calling `parent_url` until it returns None. + - All components of the URL except for the path are preserved. + """ + parent_list = [] + while 1: + parent = parent_url(u) + if parent is None: + return parent_list + elif parent not in parent_list: + parent_list.append(parent) + u = parent + + +def best_http_status(code1, code2): + """ + Determine the better HTTP status code between two given codes. + + The 'better' status code is considered based on typical usage and priority in HTTP communication. + Lower codes are generally better than higher codes. Within the same class (e.g., 2xx), a lower code is better. + Between different classes, the order of preference is 2xx > 3xx > 1xx > 4xx > 5xx. + + Args: + code1 (int): The first HTTP status code. + code2 (int): The second HTTP status code. + + Returns: + int: The better HTTP status code between the two provided codes. + + Examples: + >>> better_http_status(200, 404) + 200 + >>> better_http_status(500, 400) + 400 + >>> better_http_status(301, 302) + 301 + """ + + # Classify the codes into their respective categories (1xx, 2xx, 3xx, 4xx, 5xx) + def classify_code(code): + return int(code) // 100 + + class1 = classify_code(code1) + class2 = classify_code(code2) + + # Priority order for classes + priority_order = {2: 1, 3: 2, 1: 3, 4: 4, 5: 5} + + # Compare based on class priority + p1 = priority_order.get(class1, 10) + p2 = priority_order.get(class2, 10) + if p1 != p2: + return code1 if p1 < p2 else code2 + + # If in the same class, the lower code is better + return min(code1, code2) + + def tldextract(data): """ - "www.evilcorp.co.uk" --> ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + Extracts the subdomain, domain, and suffix from a URL string. + + Args: + data (str): The URL string to be processed. + + Returns: + ExtractResult: A named tuple containing the subdomain, domain, and suffix. + + Examples: + >>> tldextract("www.evilcorp.co.uk") + ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + + Notes: + - Utilizes `smart_decode` to preprocess the data. + - Makes use of the `tldextract` library for extraction. """ + import tldextract as _tldextract + return _tldextract.extract(smart_decode(data)) def split_domain(hostname): """ - "www.internal.evilcorp.co.uk" --> ("www.internal", "evilcorp.co.uk") + Splits the hostname into its subdomain and registered domain components. + + Args: + hostname (str): The full hostname to be split. + + Returns: + tuple: A tuple containing the subdomain and registered domain. + + Examples: + >>> split_domain("www.internal.evilcorp.co.uk") + ("www.internal", "evilcorp.co.uk") + + Notes: + - Utilizes the `tldextract` function to first break down the hostname. """ + if is_ip(hostname): + return ("", hostname) parsed = tldextract(hostname) - return (parsed.subdomain, parsed.registered_domain) + subdomain = parsed.subdomain + domain = parsed.registered_domain + if not domain: + split = hostname.split(".") + subdomain = ".".join(split[:-2]) + domain = ".".join(split[-2:]) + return (subdomain, domain) def domain_stem(domain): """ - An abbreviated representation of hostname that removes the TLD - www.evilcorp.com --> www.evilcorp + Returns an abbreviated representation of the hostname by removing the TLD (Top-Level Domain). + + Args: + domain (str): The full domain name to be abbreviated. + + Returns: + str: An abbreviated domain string without the TLD. + + Examples: + >>> domain_stem("www.evilcorp.com") + "www.evilcorp" + + Notes: + - Utilizes the `tldextract` function for domain parsing. """ parsed = tldextract(str(domain)) - return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") + return ".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") def ip_network_parents(i, include_self=False): """ - "192.168.1.1" --> [192.168.1.0/31, 192.168.1.0/30 ... 128.0.0.0/1, 0.0.0.0/0] + Generates all parent IP networks for a given IP address or network, optionally including the network itself. + + Args: + i (str or ipaddress.IPv4Network/ipaddress.IPv6Network): The IP address or network to find parents for. + include_self (bool, optional): Whether to include the network itself in the result. Default is False. + + Yields: + ipaddress.IPv4Network or ipaddress.IPv6Network: Parent IP networks in descending order of prefix length. + + Examples: + >>> list(ip_network_parents("192.168.1.1")) + [ipaddress.IPv4Network('192.168.1.0/31'), ipaddress.IPv4Network('192.168.1.0/30'), ... , ipaddress.IPv4Network('0.0.0.0/0')] + + Notes: + - Utilizes Python's built-in `ipaddress` module for network operations. """ net = ipaddress.ip_network(i, strict=False) for i in range(net.prefixlen - (0 if include_self else 1), -1, -1): yield ipaddress.ip_network(f"{net.network_address}/{i}", strict=False) -def is_ip(d, version=None): +def is_port(p): """ - "192.168.1.1" --> True - "bad::c0de" --> True - "evilcorp.com" --> False + Checks if the given string represents a valid port number. + + Args: + p (str or int): The port number to check. + + Returns: + bool: True if the port number is valid, False otherwise. + + Examples: + >>> is_port('80') + True + >>> is_port('70000') + False """ - if type(d) in (ipaddress.IPv4Address, ipaddress.IPv6Address): - if version is None or version == d.version: - return True + + p = str(p) + return p and p.isdigit() and 0 <= int(p) <= 65535 + + +def is_dns_name(d): + """ + Determines if the given string is a valid DNS name. + + Args: + d (str): The string to be checked. + + Returns: + bool: True if the string is a valid DNS name, False otherwise. + + Examples: + >>> is_dns_name('www.example.com') + True + >>> is_dns_name('localhost') + True + >>> is_dns_name('192.168.1.1') + False + """ + if is_ip(d): + return False + d = smart_decode(d) + if bbot_regexes.dns_name_validation_regex.match(d): + return True + return False + + +def is_ip(d, version=None, include_network=False): + """ + Checks if the given string or object represents a valid IP address. + + Args: + d (str or ipaddress.IPvXAddress): The IP address to check. + include_network (bool, optional): Whether to include network types (IPv4Network or IPv6Network). Defaults to False. + version (int, optional): The IP version to validate (4 or 6). Default is None. + + Returns: + bool: True if the string or object is a valid IP address, False otherwise. + + Examples: + >>> is_ip('192.168.1.1') + True + >>> is_ip('bad::c0de', version=6) + True + >>> is_ip('bad::c0de', version=4) + False + >>> is_ip('evilcorp.com') + False + """ + ip = None try: ip = ipaddress.ip_address(d) - if version is None or ip.version == version: - return True except Exception: - pass + if include_network: + try: + ip = ipaddress.ip_network(d, strict=False) + except Exception: + pass + if ip is not None and (version is None or ip.version == version): + return True return False -def is_ip_type(i): +def is_ip_type(i, network=None): """ - IPv6Address('dead::beef') --> True - IPv4Network('192.168.1.0/24') --> True - "192.168.1.0/24" --> False + Checks if the given object is an instance of an IPv4 or IPv6 type from the ipaddress module. + + Args: + i (ipaddress._BaseV4 or ipaddress._BaseV6): The IP object to check. + network (bool, optional): Whether to restrict the check to network types (IPv4Network or IPv6Network). Defaults to False. + + Returns: + bool: True if the object is an instance of ipaddress._BaseV4 or ipaddress._BaseV6, False otherwise. + + Examples: + >>> is_ip_type(ipaddress.IPv6Address('dead::beef')) + True + >>> is_ip_type(ipaddress.IPv4Network('192.168.1.0/24')) + True + >>> is_ip_type("192.168.1.0/24") + False """ - return hasattr(i, "is_multicast") + if network is not None: + is_network = ipaddress._BaseNetwork in i.__class__.__mro__ + if network: + return is_network + else: + return not is_network + return ipaddress._IPAddressBase in i.__class__.__mro__ def make_ip_type(s): """ - "dead::beef" --> IPv6Address('dead::beef') - "192.168.1.0/24" --> IPv4Network('192.168.1.0/24') - "evilcorp.com" --> "evilcorp.com" + Convert a string to its corresponding IP address or network type. + + This function attempts to convert the input string `s` into either an IPv4 or IPv6 address object, + or an IPv4 or IPv6 network object. If none of these conversions are possible, the original string is returned. + + Args: + s (str): The input string to be converted. + + Returns: + Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network, str]: The converted object or original string. + + Examples: + >>> make_ip_type("dead::beef") + IPv6Address('dead::beef') + + >>> make_ip_type("192.168.1.0/24") + IPv4Network('192.168.1.0/24') + + >>> make_ip_type("evilcorp.com") + 'evilcorp.com' """ + if not s: + raise ValueError(f'Invalid hostname: "{s}"') # IP address with suppress(Exception): - return ipaddress.ip_address(str(s).strip()) + return ipaddress.ip_address(s) # IP network with suppress(Exception): - return ipaddress.ip_network(str(s).strip(), strict=False) + return ipaddress.ip_network(s, strict=False) return s -def host_in_host(host1, host2): - """ - Is host1 included in host2? - "www.evilcorp.com" in "evilcorp.com"? --> True - "evilcorp.com" in "www.evilcorp.com"? --> False - IPv6Address('dead::beef') in IPv6Network('dead::/64')? --> True - IPv4Address('192.168.1.1') in IPv4Network('10.0.0.0/8')? --> False +def sha1(data): """ + Computes the SHA-1 hash of the given data. - if not host1 or not host2: - return False - - # check if hosts are IP types - host1_ip_type = is_ip_type(host1) - host2_ip_type = is_ip_type(host2) - # if both hosts are IP types - if host1_ip_type and host2_ip_type: - if not host1.version == host2.version: - return False - host1_net = ipaddress.ip_network(host1) - host2_net = ipaddress.ip_network(host2) - return host1_net.subnet_of(host2_net) - - # else hostnames - elif not (host1_ip_type or host2_ip_type): - host2_len = len(host2.split(".")) - host1_truncated = ".".join(host1.split(".")[-host2_len:]) - return host1_truncated == host2 - - return False + Args: + data (str or dict): The data to hash. If a dictionary, it is first converted to a JSON string with sorted keys. + Returns: + hashlib.Hash: SHA-1 hash object of the input data. -def sha1(data): - """ - sha1("asdf").hexdigest() --> "3da541559918a808c2402bba5012f6c60b27661c" + Examples: + >>> sha1("asdf").hexdigest() + '3da541559918a808c2402bba5012f6c60b27661c' """ + from hashlib import sha1 as hashlib_sha1 + if isinstance(data, dict): data = json.dumps(data, sort_keys=True) return hashlib_sha1(smart_encode(data)) @@ -226,9 +706,19 @@ def sha1(data): def smart_decode(data): """ - Turn data into a string without complaining about it - b"asdf" --> "asdf" - "asdf" --> "asdf" + Decodes the input data to a UTF-8 string, silently ignoring errors. + + Args: + data (str or bytes): The data to decode. + + Returns: + str: The decoded string. + + Examples: + >>> smart_decode(b"asdf") + "asdf" + >>> smart_decode("asdf") + "asdf" """ if isinstance(data, bytes): return data.decode("utf-8", errors="ignore") @@ -238,23 +728,97 @@ def smart_decode(data): def smart_encode(data): """ - Turn data into bytes without complaining about it - "asdf" --> b"asdf" + Encodes the input data to bytes using UTF-8 encoding, silently ignoring errors. + + Args: + data (str or bytes): The data to encode. + + Returns: + bytes: The encoded bytes. + + Examples: + >>> smart_encode("asdf") + b"asdf" + >>> smart_encode(b"asdf") + b"asdf" """ if isinstance(data, bytes): return data return str(data).encode("utf-8", errors="ignore") +encoded_regex = re.compile(r"%[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[ntrbv]") +backslash_regex = re.compile(r"(?P\\+)(?P[ntrvb])") + + +def ensure_utf8_compliant(text): + return text.encode("utf-8", errors="ignore").decode("utf-8") + + +def recursive_decode(data, max_depth=5): + """ + Recursively decodes doubly or triply-encoded strings to their original form. + + Supports both URL-encoding and backslash-escapes (including unicode) + + Args: + data (str): The data to decode. + max_depth (int, optional): Maximum recursion depth for decoding. Defaults to 5. + + Returns: + str: The decoded string. + + Examples: + >>> recursive_decode("Hello%20world%21") + "Hello world!" + >>> recursive_decode("Hello%20%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442") + "Hello Привет" + >>> recursive_dcode("%5Cu0020%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442%5Cu0021") + " Привет!" + """ + import codecs + + # Decode newline and tab escapes + data = backslash_regex.sub( + lambda match: {"n": "\n", "t": "\t", "r": "\r", "b": "\b", "v": "\v"}.get(match.group("char")), data + ) + data = smart_decode(data) + if max_depth == 0: + return data + # Decode URL encoding + data = unquote(data, errors="ignore") + # Decode Unicode escapes + with suppress(UnicodeEncodeError): + data = ensure_utf8_compliant(codecs.decode(data, "unicode_escape", errors="ignore")) + # Check if there's still URL-encoded or Unicode-escaped content + if encoded_regex.search(data): + # If yes, continue decoding + return recursive_decode(data, max_depth=max_depth - 1) + return data + + rand_pool = string.ascii_lowercase rand_pool_digits = rand_pool + string.digits def rand_string(length=10, digits=True): """ - rand_string() --> "c4hp4i9jzx" - rand_string(20) --> "ap4rsdtg5iw7ey7y3oa5" - rand_string(30) --> "xdmyxtglqf0z3q8t46n430kesq68yu" + Generates a random string of specified length. + + Args: + length (int, optional): The length of the random string. Defaults to 10. + digits (bool, optional): Whether to include digits in the string. Defaults to True. + + Returns: + str: A random string of the specified length. + + Examples: + >>> rand_string() + 'c4hp4i9jzx' + >>> rand_string(20) + 'ap4rsdtg5iw7ey7y3oa5' + >>> rand_string(30, digits=False) + 'xdmyxtglqfzqktngkesyulwbfrihva' """ pool = rand_pool if digits: @@ -262,15 +826,142 @@ def rand_string(length=10, digits=True): return "".join([random.choice(pool) for _ in range(int(length))]) -def extract_words(data, max_length=100): +def truncate_string(s, n): + if len(s) > n: + return s[: n - 3] + "..." + else: + return s + + +def extract_params_json(json_data, compare_mode="getparam"): + """ + Extracts key-value pairs from a JSON object and returns them as a set of tuples. Used by the `paramminer_headers` module. + + Args: + json_data (str): JSON-formatted string containing key-value pairs. + + Returns: + set: A set of tuples containing the keys and their corresponding values present in the JSON object. + + Raises: + Returns an empty set if JSONDecodeError occurs. + + Examples: + >>> extract_params_json('{"a": 1, "b": {"c": 2}}') + {('a', 1), ('b', {'c': 2}), ('c', 2)} + """ + try: + data = json.loads(json_data) + except json.JSONDecodeError: + return set() + + key_value_pairs = set() + stack = [(data, "")] + + while stack: + current_data, path = stack.pop() + if isinstance(current_data, dict): + for key, value in current_data.items(): + full_key = f"{path}.{key}" if path else key + if isinstance(value, dict): + stack.append((value, full_key)) + elif isinstance(value, list): + stack.append((value, full_key)) + else: + if validate_parameter(full_key, compare_mode): + key_value_pairs.add((full_key, value)) + elif isinstance(current_data, list): + for item in current_data: + if isinstance(item, (dict, list)): + stack.append((item, path)) + return key_value_pairs + + +def extract_params_xml(xml_data, compare_mode="getparam"): """ - Intelligently extract words from given data - Returns set() of extracted words + Extracts tags and their text values from an XML object and returns them as a set of tuples. + + Args: + xml_data (str): XML-formatted string containing elements. + + Returns: + set: A set of tuples containing the tags and their corresponding text values present in the XML object. + + Raises: + Returns an empty set if ParseError occurs. + + Examples: + >>> extract_params_xml('value') + {('root', None), ('child1', None), ('child2', 'value')} + """ + import xml.etree.ElementTree as ET + + try: + root = ET.fromstring(xml_data) + except ET.ParseError: + return set() + + tag_value_pairs = set() + stack = [root] + + while stack: + current_element = stack.pop() + if validate_parameter(current_element.tag, compare_mode): + tag_value_pairs.add((current_element.tag, current_element.text)) + for child in current_element: + stack.append(child) + return tag_value_pairs + + +# Define valid characters for each mode based on RFCs +valid_chars_dict = { + "header": { + chr(c) for c in range(33, 127) if chr(c) in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_" + }, + "getparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, + "postparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, + "cookie": {chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'}, +} + + +def validate_parameter(param, compare_mode): + compare_mode = compare_mode.lower() + if len(param) > 100: + return False + if compare_mode not in valid_chars_dict: + raise ValueError(f"Invalid compare_mode: {compare_mode}") + allowed_chars = valid_chars_dict[compare_mode] + return set(param).issubset(allowed_chars) + + +def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=100, word_regexes=None): + """Intelligently extracts words from given data. + + This function uses regular expressions and optionally wordninja to extract words + from a given text string. Thanks to wordninja it can handle concatenated words intelligently. + + Args: + data (str): The data from which words are to be extracted. + acronyms (bool, optional): Whether to include acronyms. Defaults to True. + wordninja (bool, optional): Whether to use the wordninja library to split concatenated words. Defaults to True. + model (object, optional): A custom wordninja model for special types of data such as DNS names. + max_length (int, optional): Maximum length for a word to be included. Defaults to 100. + word_regexes (list, optional): A list of compiled regular expression objects for word extraction. Defaults to None. + + Returns: + set: A set of extracted words. + + Examples: + >>> extract_words('blacklanternsecurity') + {'black', 'lantern', 'security', 'bls', 'blacklanternsecurity'} """ + import wordninja as _wordninja + + if word_regexes is None: + word_regexes = bbot_regexes.word_regexes words = set() data = smart_decode(data) - - for r in regexes.word_regexes: + for r in word_regexes: for word in set(r.findall(data)): # blacklanternsecurity if len(word) <= max_length: @@ -279,29 +970,97 @@ def extract_words(data, max_length=100): # blacklanternsecurity --> ['black', 'lantern', 'security'] # max_slice_length = 3 for word in list(words): - subwords = wordninja.split(word) - for subword in subwords: - words.add(subword) + if wordninja: + if model is None: + model = _wordninja + subwords = model.split(word) + for subword in subwords: + words.add(subword) + # this section generates compound words + # it is interesting but currently disabled the quality of its output doesn't quite justify its quantity # blacklanternsecurity --> ['black', 'lantern', 'security', 'blacklantern', 'lanternsecurity'] # for s, e in combinations(range(len(subwords) + 1), 2): # if e - s <= max_slice_length: # subword_slice = "".join(subwords[s:e]) # words.add(subword_slice) # blacklanternsecurity --> bls - if len(subwords) > 1: - words.add("".join([c[0] for c in subwords if len(c) > 0])) + if acronyms: + if len(subwords) > 1: + words.add("".join([c[0] for c in subwords if len(c) > 0])) return words -def kill_children(parent_pid=None, sig=signal.SIGTERM): +def closest_match(s, choices, n=1, cutoff=0.0): + """Finds the closest matching strings from a list of choices based on a given string. + + This function uses the difflib library to find the closest matches to a given string `s` from a list of `choices`. + It can return either the single best match or a list of the top `n` best matches. + + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + n (int, optional): The number of best matches to return. Defaults to 1. + cutoff (float, optional): A float value that defines the similarity threshold. Strings with similarity below this value are not considered. Defaults to 0.0. + + Returns: + str or list: Either the closest matching string or a list of the `n` closest matching strings. + + Examples: + >>> closest_match("asdf", ["asd", "fds"]) + 'asd' + >>> closest_match("asdf", ["asd", "fds", "asdff"], n=3) + ['asdff', 'asd', 'fds'] + """ + import difflib + + matches = difflib.get_close_matches(s, choices, n=n, cutoff=cutoff) + if not choices or not matches: + return + if n == 1: + return matches[0] + return matches + + +def get_closest_match(s, choices, msg=None): + """Finds the closest match from a list of choices for a given string. + + This function is particularly useful for CLI applications where you want to validate flags or modules. + + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + msg (str, optional): Additional message to prepend in the warning message. Defaults to None. + loglevel (str, optional): The log level to use for the warning message. Defaults to "HUGEWARNING". + exitcode (int, optional): The exit code to use when exiting the program. Defaults to 2. + + Examples: + >>> get_closest_match("some_module", ["some_mod", "some_other_mod"], msg="module") + # Output: Could not find module "some_module". Did you mean "some_mod"? + """ + if msg is None: + msg = "" + else: + msg += " " + closest = closest_match(s, choices) + return f'Could not find {msg}"{s}". Did you mean "{closest}"?' + + +def kill_children(parent_pid=None, sig=None): """ Forgive me father for I have sinned """ + import psutil + import signal + + if sig is None: + sig = signal.SIGTERM + try: parent = psutil.Process(parent_pid) except psutil.NoSuchProcess: log.debug(f"No such PID: {parent_pid}") + return log.debug(f"Killing children of process ID {parent.pid}") children = parent.children(recursive=True) for child in children: @@ -310,13 +1069,29 @@ def kill_children(parent_pid=None, sig=signal.SIGTERM): try: child.send_signal(sig) except psutil.NoSuchProcess: - log.debug(f"No such PID: {parent_pid}") + log.debug(f"No such PID: {child.pid}") + except psutil.AccessDenied: + log.debug(f"Error killing PID: {child.pid} - access denied") + log.debug(f"Finished killing children of process ID {parent.pid}") def str_or_file(s): - """ - "file.txt" --> ["file_line1", "file_line2", "file_line3"] - "not_a_file" --> ["not_a_file"] + """Reads a string or file and yields its content line-by-line. + + This function tries to open the given string `s` as a file and yields its lines. + If it fails to open `s` as a file, it treats `s` as a regular string and yields it as is. + + Args: + s (str): The string or file path to read. + + Yields: + str: Either lines from the file or the original string. + + Examples: + >>> list(str_or_file("file.txt")) + ['file_line1', 'file_line2', 'file_line3'] + >>> list(str_or_file("not_a_file")) + ['not_a_file'] """ try: with open(s, errors="ignore") as f: @@ -326,35 +1101,86 @@ def str_or_file(s): yield s -def chain_lists(l, try_files=False, msg=None): - """ - Chain together list, splitting entries on comma - - Optionally try to open entries as files and add their contents to the list - - Used for parsing a list of arguments that may include space and/or comma-separated values - - ["a", "b,c,d"] --> ["a", "b", "c", "d"] - - try_files=True: - - ["a,file.txt", "c,d"] --> ["a", "f_line1", "f_line2", "f_line3", "c", "d"] +split_regex = re.compile(r"[\s,]") + + +def chain_lists( + l, + try_files=False, + msg=None, + remove_blank=True, + validate=False, + validate_chars='<>:"/\\|?*)', +): + """Chains together list elements, allowing for entries separated by commas. + + This function takes a list `l` and flattens it by splitting its entries on commas. + It also allows you to optionally open entries as files and add their contents to the list. + + The order of entries is preserved, and deduplication is performed automatically. + + Args: + l (list): The list of strings to chain together. + try_files (bool, optional): Whether to try to open entries as files. Defaults to False. + msg (str, optional): An optional message to log when reading from a file. Defaults to None. + remove_blank (bool, optional): Whether to remove blank entries from the list. Defaults to True. + validate (bool, optional): Whether to perform validation for undesirable characters. Defaults to False. + validate_chars (str, optional): When performing validation, what additional set of characters to block (blocks non-printable ascii automatically). Defaults to '<>:"/\\|?*)' + + Returns: + list: The list of chained elements. + + Raises: + ValueError: If the input string contains invalid characters, when enabled (off by default). + + Examples: + >>> chain_lists(["a", "b,c,d"]) + ['a', 'b', 'c', 'd'] + + >>> chain_lists(["a,file.txt", "c,d"], try_files=True) + ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ - final_list = dict() + if isinstance(l, str): + l = [l] + final_list = {} for entry in l: - for s in entry.split(","): + for s in split_regex.split(entry): f = s.strip() + if validate: + if any((c in validate_chars) or (ord(c) < 32 and c != " ") for c in f): + raise ValueError(f"Invalid character in string: {f}") f_path = Path(f).resolve() if try_files and f_path.is_file(): if msg is not None: - msg = str(msg).format(filename=f_path) - log.info(msg) + new_msg = str(msg).format(filename=f_path) + log.info(new_msg) for line in str_or_file(f): final_list[line] = None else: final_list[f] = None - return list(final_list) + ret = list(final_list) + if remove_blank: + ret = [r for r in ret if r] + return ret def list_files(directory, filter=lambda x: True): - """ - "/tmp/test" --> ["file1.txt", "file2.txt"] + """Lists files in a given directory that meet a specified filter condition. + + Args: + directory (str): The directory where to list files. + filter (callable, optional): A function to filter the files. Defaults to a lambda function that returns True for all files. + + Yields: + Path: A Path object for each file that meets the filter condition. + + Examples: + >>> list(list_files("/tmp/test")) + [Path('/tmp/test/file1.py'), Path('/tmp/test/file2.txt')] + + >>> list(list_files("/tmp/test"), filter=lambda f: f.suffix == ".py") + [Path('/tmp/test/file1.py')] """ directory = Path(directory).resolve() if directory.is_dir(): @@ -364,20 +1190,50 @@ def list_files(directory, filter=lambda x: True): def rm_at_exit(path): + """Registers a file to be automatically deleted when the program exits. + + Args: + path (str or Path): The path to the file to be deleted upon program exit. + + Examples: + >>> rm_at_exit("/tmp/test/file1.txt") """ - Removes a file automatically when BBOT exits - """ - atexit.register(_rm_at_exit, path) + import atexit + + atexit.register(delete_file, path) + +def delete_file(path): + """Deletes a file at the given path. -def _rm_at_exit(path): + Args: + path (str or Path): The path to the file to be deleted. + + Note: + This function suppresses all exceptions to ensure that the program continues running even if the file could not be deleted. + + Examples: + >>> delete_file("/tmp/test/file1.txt") + """ with suppress(Exception): Path(path).unlink(missing_ok=True) def read_file(filename): - """ - "/tmp/file.txt" --> ["file_line1", "file_line2", "file_line3"] + """Reads a file line by line and yields each line without line breaks. + + Args: + filename (str or Path): The path to the file to read. + + Yields: + str: A line from the file without the trailing line break. + + Examples: + >>> for line in read_file("/tmp/file.txt"): + ... print(line) + file_line1 + file_line2 + file_line3 """ with open(filename, errors="ignore") as f: for line in f: @@ -385,10 +1241,24 @@ def read_file(filename): def gen_numbers(n, padding=2): - """ - n=5 --> ['0', '00', '01', '02', '03', '04', '1', '2', '3', '4'] - n=3, padding=3 --> ['0', '00', '000', '001', '002', '01', '02', '1', '2'] - n=5, padding=1 --> ['0', '1', '2', '3', '4'] + """Generates numbers with variable padding and returns them as a set of strings. + + Args: + n (int): The upper limit of numbers to generate, exclusive. + padding (int, optional): The maximum number of digits to pad the numbers with. Defaults to 2. + + Returns: + set: A set of string representations of numbers with varying degrees of padding. + + Examples: + >>> gen_numbers(5) + {'0', '00', '01', '02', '03', '04', '1', '2', '3', '4'} + + >>> gen_numbers(3, padding=3) + {'0', '00', '000', '001', '002', '01', '02', '1', '2'} + + >>> gen_numbers(5, padding=1) + {'0', '1', '2', '3', '4'} """ results = set() for i in range(n): @@ -397,21 +1267,54 @@ def gen_numbers(n, padding=2): return results -def make_netloc(host, port): - """ - ("192.168.1.1", 443) --> "192.168.1.1:443" - ("evilcorp.com", 80) --> "evilcorp.com:80" - ("dead::beef", 443) --> "[dead::beef]:443" +def make_netloc(host, port=None): + """Constructs a network location string from a given host and port. + + Args: + host (str): The hostname or IP address. + port (int, optional): The port number. If None, the port is omitted. + + Returns: + str: A network location string in the form 'host' or 'host:port'. + + Examples: + >>> make_netloc("192.168.1.1", None) + "192.168.1.1" + + >>> make_netloc("192.168.1.1", 443) + "192.168.1.1:443" + + >>> make_netloc("evilcorp.com", 80) + "evilcorp.com:80" + + >>> make_netloc("dead::beef", None) + "[dead::beef]" + + >>> make_netloc("dead::beef", 443) + "[dead::beef]:443" """ if is_ip(host, version=6): host = f"[{host}]" + if port is None: + return str(host) return f"{host}:{port}" def which(*executables): + """Finds the full path of the first available executable from a list of executables. + + Args: + *executables (str): One or more executable names to search for. + + Returns: + str: The full path of the first available executable, or None if none are found. + + Examples: + >>> which("python", "python3") + "/usr/bin/python" """ - "python" --> "/usr/bin/python" - """ + import shutil + for e in executables: location = shutil.which(e) if location: @@ -419,14 +1322,24 @@ def which(*executables): def search_dict_by_key(key, d): - """ - Search a dictionary by key name - Generator, yields all values with matching keys + """Search a nested dictionary or list of dictionaries by a key and yield all matching values. + + Args: + key (str): The key to search for. + d (Union[dict, list]): The dictionary or list of dictionaries to search. + + Yields: + Any: Yields all values that match the provided key. + + Examples: + >>> d = {'a': 1, 'b': {'c': 2, 'a': 3}, 'd': [{'a': 4}, {'e': 5}]} + >>> list(search_dict_by_key('a', d)) + [1, 3, 4] """ if isinstance(d, dict): if key in d: yield d[key] - for k, v in d.items(): + for v in d.values(): yield from search_dict_by_key(key, v) elif isinstance(d, list): for v in d: @@ -434,63 +1347,107 @@ def search_dict_by_key(key, d): def search_format_dict(d, **kwargs): - """ - Recursively .format() string values in dictionary keys - search_format_dict({"test": "{name} is awesome"}, name="keanu") - --> {"test": "keanu is awesome"} + """Recursively format string values in a dictionary or list using the provided keyword arguments. + + Args: + d (Union[dict, list, str]): The dictionary, list, or string to format. + **kwargs: Arbitrary keyword arguments used for string formatting. + + Returns: + Union[dict, list, str]: The formatted dictionary, list, or string. + + Examples: + >>> search_format_dict({"test": "#{name} is awesome"}, name="keanu") + {"test": "keanu is awesome"} """ if isinstance(d, dict): return {k: search_format_dict(v, **kwargs) for k, v in d.items()} elif isinstance(d, list): return [search_format_dict(v, **kwargs) for v in d] elif isinstance(d, str): - return d.format(**kwargs) - else: - return d + for find, replace in kwargs.items(): + find = "#{" + str(find) + "}" + d = d.replace(find, replace) + return d -def filter_dict(d, *key_names, fuzzy=False, invert=False): - """ - Recursively filter a dictionary based on key names - filter_dict({"key1": "test", "key2": "asdf"}, key_name=key2) - --> {"key2": "asdf"} - """ - ret = {} - if isinstance(d, dict): - for key in d: - if key in key_names or (fuzzy and any(k in key for k in key_names)): - ret[key] = copy.deepcopy(d[key]) - elif isinstance(d[key], list) or isinstance(d[key], dict): - child = filter_dict(d[key], *key_names, fuzzy=fuzzy) - if child: - ret[key] = child - return ret +def search_dict_values(d, *regexes): + """Recursively search a dictionary's values based on provided regex patterns. + Args: + d (Union[dict, list, str]): The dictionary, list, or string to search. + *regexes: Arbitrary number of compiled regex patterns. -def clean_dict(d, *key_names, fuzzy=False): - d = copy.deepcopy(d) - if isinstance(d, dict): - for key, val in list(d.items()): - if key in key_names or (fuzzy and any(k in key for k in key_names)): - d.pop(key) - else: - d[key] = clean_dict(val, *key_names, fuzzy=fuzzy) - return d + Returns: + Generator: Yields matching values based on the provided regex patterns. + + Examples: + >>> dict_to_search = { + ... "key1": { + ... "key2": [ + ... { + ... "key3": "A URL: https://www.evilcorp.com" + ... } + ... ] + ... } + ... } + >>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\\.[^\\s<>"]+') + >>> list(search_dict_values(dict_to_search, url_regexes)) + ["https://www.evilcorp.com"] + """ + + results = set() + if isinstance(d, str): + for r in regexes: + for match in r.finditer(d): + result = match.group() + h = hash(result) + if h not in results: + results.add(h) + yield result + elif isinstance(d, dict): + for v in d.values(): + yield from search_dict_values(v, *regexes) + elif isinstance(d, list): + for v in d: + yield from search_dict_values(v, *regexes) def grouper(iterable, n): """ - >>> list(grouper('ABCDEFG', 3)) - [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] + Grouper groups an iterable into chunks of a given size. + + Args: + iterable (iterable): The iterable to be chunked. + n (int): The size of each chunk. + + Returns: + iterator: An iterator that produces lists of elements from the original iterable, each of length `n` or less. + + Examples: + >>> list(grouper('ABCDEFG', 3)) + [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] """ + from itertools import islice + iterable = iter(iterable) return iter(lambda: list(islice(iterable, n)), []) def split_list(alist, wanted_parts=2): """ - >>> split_list([1,2,3,4,5]) - [[1, 2], [3, 4, 5]] + Splits a list into a specified number of approximately equal parts. + + Args: + alist (list): The list to be split. + wanted_parts (int): The number of parts to split the list into. + + Returns: + list: A list of lists, each containing a portion of the original list. + + Examples: + >>> split_list([1, 2, 3, 4, 5]) + [[1, 2], [3, 4, 5]] """ length = len(alist) return [alist[i * length // wanted_parts : (i + 1) * length // wanted_parts] for i in range(wanted_parts)] @@ -498,13 +1455,31 @@ def split_list(alist, wanted_parts=2): def mkdir(path, check_writable=True, raise_error=True): """ - Create a directory and ensure that it's writable + Creates a directory and optionally checks if it's writable. + + Args: + path (str or Path): The directory to create. + check_writable (bool, optional): Whether to check if the directory is writable. Default is True. + raise_error (bool, optional): Whether to raise an error if the directory creation fails. Default is True. + + Returns: + bool: True if the directory is successfully created (and writable, if check_writable=True); otherwise False. + + Raises: + DirectoryCreationError: Raised if the directory cannot be created and `raise_error=True`. + + Examples: + >>> mkdir("/tmp/new_dir") + True + >>> mkdir("/restricted_dir", check_writable=False, raise_error=False) + False """ path = Path(path).resolve() touchfile = path / f".{rand_string()}" try: path.mkdir(exist_ok=True, parents=True) - touchfile.touch() + if check_writable: + touchfile.touch() return True except Exception as e: if raise_error: @@ -517,9 +1492,23 @@ def mkdir(path, check_writable=True, raise_error=True): def make_date(d=None, microseconds=False): """ - make_date() --> "20220707_1325_50" - make_date(microseconds=True) --> "20220707_1330_35167617" + Generates a string representation of the current date and time, with optional microsecond precision. + + Args: + d (datetime, optional): A datetime object to convert. Defaults to the current date and time. + microseconds (bool, optional): Whether to include microseconds. Defaults to False. + + Returns: + str: A string representation of the date and time, formatted as YYYYMMDD_HHMM_SS or YYYYMMDD_HHMM_SSFFFFFF if microseconds are included. + + Examples: + >>> make_date() + "20220707_1325_50" + >>> make_date(microseconds=True) + "20220707_1330_35167617" """ + from datetime import datetime + f = "%Y%m%d_%H%M_%S" if microseconds: f += "%f" @@ -535,9 +1524,21 @@ def error_and_exit(msg): def get_file_extension(s): """ - https://evilcorp.com/api/test.php --> "php" - /etc/test.conf --> "conf" - /etc/passwd --> "" + Extracts the file extension from a given string representing a URL or file path. + + Args: + s (str): The string from which to extract the file extension. + + Returns: + str: The file extension, or an empty string if no extension is found. + + Examples: + >>> get_file_extension("https://evilcorp.com/api/test.php") + "php" + >>> get_file_extension("/etc/test.conf") + "conf" + >>> get_file_extension("/etc/passwd") + "" """ s = str(s).lower().strip() rightmost_section = s.rsplit("/", 1)[-1] @@ -549,13 +1550,23 @@ def get_file_extension(s): def backup_file(filename, max_backups=10): """ - rename a file as a backup + Renames a file by appending an iteration number as a backup. Recursively renames + files up to a specified maximum number of backups. - recursively renames files up to max_backups + Args: + filename (str or pathlib.Path): The file to backup. + max_backups (int, optional): The maximum number of backups to keep. Defaults to 10. - backup_file("/tmp/test.txt") --> "/tmp/test.0.txt" - backup_file("/tmp/test.0.txt") --> "/tmp/test.1.txt" - backup_file("/tmp/test.1.txt") --> "/tmp/test.2.txt" + Returns: + pathlib.Path: The new backup filepath. + + Examples: + >>> backup_file("/tmp/test.txt") + PosixPath("/tmp/test.0.txt") + >>> backup_file("/tmp/test.0.txt") + PosixPath("/tmp/test.1.txt") + >>> backup_file("/tmp/test.1.txt") + PosixPath("/tmp/test.2.txt") """ filename = Path(filename).resolve() suffixes = [s.strip(".") for s in filename.suffixes] @@ -573,11 +1584,21 @@ def backup_file(filename, max_backups=10): def latest_mtime(d): - """ - Given a directory, return the latest modified time of any contained file or directory (recursive) - Useful for sorting directories by modified time for the purpose of cleanup, etc. + """Get the latest modified time of any file or sub-directory in a given directory. + + This function takes a directory path as an argument and returns the latest modified time + of any contained file or directory, recursively. It's useful for sorting directories by + modified time for cleanup or other purposes. + + Args: + d (str or Path): The directory path to search for the latest modified time. - latest_mtime("~/.bbot/scans/mushy_susan") --> 1659016928.2848816 + Returns: + float: The latest modified time in Unix timestamp format. + + Examples: + >>> latest_mtime("~/.bbot/scans/mushy_susan") + 1659016928.2848816 """ d = Path(d).resolve() mtimes = [d.lstat().st_mtime] @@ -590,12 +1611,60 @@ def latest_mtime(d): return max(mtimes) -def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, raise_error=False): +def filesize(f): + """Get the file size of a given file. + + This function takes a file path as an argument and returns its size in bytes. If the path + does not point to a file, the function returns 0. + + Args: + f (str or Path): The file path for which to get the size. + + Returns: + int: The size of the file in bytes, or 0 if the path does not point to a file. + + Examples: + >>> filesize("/path/to/file.txt") + 1024 + """ + f = Path(f) + if f.is_file(): + return f.stat().st_size + return 0 + + +def rm_rf(f): + """Recursively delete a directory + + Args: + f (str or Path): The directory path to delete. + + Examples: + >>> rm_rf("/tmp/httpx98323849") """ - Given a directory "d", measure the number of subdirectories and files (matching "filter") - And remove (rm -r) the oldest ones past the threshold of "keep" + import shutil + + shutil.rmtree(f) + + +def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, raise_error=False): + """Clean up old files and directories within a given directory based on various filtering and sorting options. + + This function removes the oldest files and directories in the provided directory 'd' that exceed a specified + threshold ('keep'). The items to be deleted can be filtered using a lambda function 'filter', and they are + sorted by a key function, defaulting to latest modification time. + + Args: + d (str or Path): The directory path to clean up. + keep (int): The number of items to keep. Ones beyond this count will be removed. + filter (Callable): A lambda function for filtering which files or directories to consider. + Defaults to a lambda function that returns True for all. + key (Callable): A function to sort the files and directories. Defaults to latest modification time. + reverse (bool): Whether to reverse the order of sorted items before removing. Defaults to True. + raise_error (bool): Whether to raise an error if directory deletion fails. Defaults to False. - clean_old_dirs("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) + Examples: + >>> clean_old("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) """ d = Path(d) if not d.is_dir(): @@ -605,7 +1674,7 @@ def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, for path in paths[keep:]: try: log.debug(f"Removing {path}") - shutil.rmtree(path) + rm_rf(path) except Exception as e: msg = f"Failed to delete directory: {path}, {e}" if raise_error: @@ -614,5 +1683,1145 @@ def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, def extract_emails(s): - for email in regexes.email_regex.findall(smart_decode(s)): + """ + Extract email addresses from a body of text + + This function takes in a string and yields all email addresses found in it. + The emails are converted to lower case before yielding. It utilizes + regular expressions for email pattern matching. + + Args: + s (str): The input string from which to extract email addresses. + + Yields: + str: Yields email addresses found in the input string, in lower case. + + Examples: + >>> list(extract_emails("Contact us at info@evilcorp.com and support@evilcorp.com")) + ['info@evilcorp.com', 'support@evilcorp.com'] + """ + for email in bbot_regexes.email_regex.findall(smart_decode(s)): yield email.lower() + + +def extract_host(s): + """ + Attempts to find and extract the host portion of a string. + + Args: + s (str): The string from which to extract the host. + + Returns: + tuple: A tuple containing three strings: + (hostname (None if not found), string_before_hostname, string_after_hostname). + + Examples: + >>> extract_host("evilcorp.com:80") + ("evilcorp.com", "", ":80") + + >>> extract_host("http://evilcorp.com:80/asdf.php?a=b") + ("evilcorp.com", "http://", ":80/asdf.php?a=b") + + >>> extract_host("bob@evilcorp.com") + ("evilcorp.com", "bob@", "") + + >>> extract_host("[dead::beef]:22") + ("dead::beef", "[", "]:22") + + >>> extract_host("ftp://username:password@my-ftp.com/my-file.csv") + ( + "my-ftp.com", + "ftp://username:password@", + "/my-file.csv", + ) + """ + s = smart_decode(s) + match = bbot_regexes.extract_host_regex.search(s) + + if match: + hostname = match.group(1) + before = s[: match.start(1)] + after = s[match.end(1) :] + host, port = split_host_port(hostname) + netloc = make_netloc(host, port) + if netloc != hostname: + # invalid host / port + return (None, s, "") + if host is not None: + if port is not None: + after = f":{port}{after}" + if is_ip(host, version=6) and hostname.startswith("["): + before = f"{before}[" + after = f"]{after}" + hostname = str(host) + return (hostname, before, after) + + return (None, s, "") + + +def smart_encode_punycode(text: str) -> str: + """ + ドメイン.テスト --> xn--eckwd4c7c.xn--zckzah + """ + import idna + + host, before, after = extract_host(text) + if host is None: + return text + + try: + host = idna.encode(host).decode(errors="ignore") + except UnicodeError: + pass # If encoding fails, leave the host as it is + + return f"{before}{host}{after}" + + +def smart_decode_punycode(text: str) -> str: + """ + xn--eckwd4c7c.xn--zckzah --> ドメイン.テスト + """ + import idna + + host, before, after = extract_host(text) + if host is None: + return text + + try: + host = idna.decode(host) + except UnicodeError: + pass # If decoding fails, leave the host as it is + + return f"{before}{host}{after}" + + +def can_sudo_without_password(): + """Check if the current user has passwordless sudo access. + + This function checks whether the current user can use sudo without entering a password. + It runs a command with sudo and checks the return code to determine this. + + Returns: + bool: True if the current user can use sudo without a password, False otherwise. + + Examples: + >>> can_sudo_without_password() + True + """ + if os.geteuid() != 0: + env = dict(os.environ) + env["SUDO_ASKPASS"] = "/bin/false" + try: + sp.run(["sudo", "-K"], stderr=sp.DEVNULL, stdout=sp.DEVNULL, check=True, env=env) + sp.run(["sudo", "-An", "/bin/true"], stderr=sp.DEVNULL, stdout=sp.DEVNULL, check=True, env=env) + except sp.CalledProcessError: + return False + return True + + +def verify_sudo_password(sudo_pass): + """Verify if the given sudo password is correct. + + This function checks whether the sudo password provided is valid for the current user. + It runs a command with sudo, feeding in the password via stdin, and checks the return code. + + Args: + sudo_pass (str): The sudo password to verify. + + Returns: + bool: True if the sudo password is correct, False otherwise. + + Examples: + >>> verify_sudo_password("mysecretpassword") + True + """ + try: + sp.run( + ["sudo", "-S", "-k", "true"], + input=smart_encode(sudo_pass), + stderr=sp.DEVNULL, + stdout=sp.DEVNULL, + check=True, + ) + except sp.CalledProcessError: + return False + return True + + +def make_table(rows, header, **kwargs): + """Generate a formatted table from the given rows and headers. + + This function uses the `tabulate` package to generate a table with formatting options. + It can accept various input formats and table styles, which can be customized using optional arguments. + + Args: + *args: Positional arguments to be passed to `tabulate.tabulate`. + **kwargs: Keyword arguments to customize table formatting. + - tablefmt (str, optional): Table format. Default is 'grid'. + - disable_numparse (bool, optional): Disable automatic number parsing. Default is True. + - maxcolwidths (int, optional): Maximum column width. Default is 40. + + Returns: + str: A string representing the formatted table. + + Examples: + >>> print(make_table([["row1", "row1"], ["row2", "row2"]], ["header1", "header2"])) + +-----------+-----------+ + | header1 | header2 | + +===========+===========+ + | row1 | row1 | + +-----------+-----------+ + | row2 | row2 | + +-----------+-----------+ + """ + from tabulate import tabulate + + # fix IndexError: list index out of range + if not rows: + rows = [[]] + tablefmt = os.environ.get("BBOT_TABLE_FORMAT", None) + defaults = {"tablefmt": "grid", "disable_numparse": True, "maxcolwidths": None} + if tablefmt is None: + defaults.update({"maxcolwidths": 40}) + else: + defaults.update({"tablefmt": tablefmt}) + for k, v in defaults.items(): + if k not in kwargs: + kwargs[k] = v + # don't wrap columns in markdown + if tablefmt in ("github", "markdown"): + kwargs.pop("maxcolwidths") + # escape problematic markdown characters in rows + + def markdown_escape(s): + return str(s).replace("|", "|") + + rows = [[markdown_escape(f) for f in row] for row in rows] + header = [markdown_escape(h) for h in header] + return tabulate(rows, header, **kwargs) + + +def human_timedelta(d): + """Convert a TimeDelta object into a human-readable string. + + This function takes a datetime.timedelta object and converts it into a string format that + is easier to read and understand. + + Args: + d (datetime.timedelta): The TimeDelta object to convert. + + Returns: + str: A string representation of the TimeDelta object in human-readable form. + + Examples: + >>> from datetime import datetime + >>> + >>> start_time = datetime.now() + >>> end_time = datetime.now() + >>> elapsed_time = end_time - start_time + >>> human_timedelta(elapsed_time) + '2 hours, 30 minutes, 15 seconds' + """ + hours, remainder = divmod(d.seconds, 3600) + minutes, seconds = divmod(remainder, 60) + result = [] + if hours: + result.append(f"{hours:,} hour" + ("s" if hours > 1 else "")) + if minutes: + result.append(f"{minutes:,} minute" + ("s" if minutes > 1 else "")) + if seconds: + result.append(f"{seconds:,} second" + ("s" if seconds > 1 else "")) + ret = ", ".join(result) + if not ret: + ret = "0 seconds" + return ret + + +def bytes_to_human(_bytes): + """Convert a bytes size to a human-readable string. + + This function converts a numeric bytes value into a human-readable string format, complete + with the appropriate unit symbol (B, KB, MB, GB, etc.). + + Args: + _bytes (int): The number of bytes to convert. + + Returns: + str: A string representing the number of bytes in a more readable format, rounded to two + decimal places. + + Examples: + >>> bytes_to_human(1234129384) + '1.15GB' + """ + sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"] + units = {} + for count, size in enumerate(sizes): + units[size] = pow(1024, count) + for size in sizes: + if abs(_bytes) < 1024.0: + if size == sizes[0]: + _bytes = str(int(_bytes)) + else: + _bytes = f"{_bytes:.2f}" + return f"{_bytes}{size}" + _bytes /= 1024 + raise ValueError(f'Unable to convert "{_bytes}" to human filesize') + + +filesize_regex = re.compile(r"(?P[0-9\.]+)[\s]*(?P[a-z])", re.I) + + +def human_to_bytes(filesize): + """Convert a human-readable file size string to its bytes equivalent. + + This function takes a human-readable file size string, such as "2.5GB", and converts it + to its equivalent number of bytes. + + Args: + filesize (str or int): The human-readable file size string or integer bytes value to convert. + + Returns: + int: The number of bytes equivalent to the input human-readable file size. + + Raises: + ValueError: If the input string cannot be converted to bytes. + + Examples: + >>> human_to_bytes("23.23gb") + 24943022571 + """ + if isinstance(filesize, int): + return filesize + sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"] + units = {} + for count, size in enumerate(sizes): + size_increment = pow(1024, count) + units[size] = size_increment + if len(size) == 2: + units[size[0]] = size_increment + match = filesize_regex.match(filesize) + try: + if match: + num, size = match.groups() + size = size.upper() + size_increment = units[size] + return int(float(num) * size_increment) + except KeyError: + pass + raise ValueError(f'Unable to convert filesize "{filesize}" to bytes') + + +def integer_to_ordinal(n): + """ + Convert an integer to its ordinal representation. + + Args: + n (int): The integer to convert. + + Returns: + str: The ordinal representation of the integer. + + Examples: + >>> integer_to_ordinal(1) + '1st' + >>> integer_to_ordinal(2) + '2nd' + >>> integer_to_ordinal(3) + '3rd' + >>> integer_to_ordinal(11) + '11th' + >>> integer_to_ordinal(21) + '21st' + >>> integer_to_ordinal(101) + '101st' + """ + # Check the last digit + last_digit = n % 10 + # Check the last two digits for special cases (11th, 12th, 13th) + last_two_digits = n % 100 + + if 10 <= last_two_digits <= 20: + suffix = "th" + else: + if last_digit == 1: + suffix = "st" + elif last_digit == 2: + suffix = "nd" + elif last_digit == 3: + suffix = "rd" + else: + suffix = "th" + + return f"{n}{suffix}" + + +def cpu_architecture(): + """Return the CPU architecture of the current system. + + This function fetches and returns the architecture type of the CPU where the code is being executed. + It maps common identifiers like "x86_64" to more general types like "amd64". + + Returns: + str: A string representing the CPU architecture, such as "amd64", "armv7", or "arm64". + + Examples: + >>> cpu_architecture() + 'amd64' + """ + import platform + + uname = platform.uname() + arch = uname.machine.lower() + if arch.startswith("aarch"): + return "arm64" + elif arch == "x86_64": + return "amd64" + return arch + + +def os_platform(): + """Return the OS platform of the current system. + + This function fetches and returns the OS type where the code is being executed. + It converts the platform identifier to lowercase. + + Returns: + str: A string representing the OS platform, such as "linux", "darwin", or "windows". + + Examples: + >>> os_platform() + 'linux' + """ + import platform + + return platform.system().lower() + + +def os_platform_friendly(): + """Return a human-friendly OS platform string, suitable for golang release binaries. + + This function fetches the OS platform and modifies it to a more human-readable format if necessary. + Specifically, it changes "darwin" to "macOS". + + Returns: + str: A string representing the human-friendly OS platform, such as "macOS", "linux", or "windows". + + Examples: + >>> os_platform_friendly() + 'macOS' + """ + p = os_platform() + if p == "darwin": + return "macOS" + return p + + +tag_filter_regex = re.compile(r"[^a-z0-9]+") + + +def tagify(s, delimiter=None, maxlen=None): + """Sanitize a string into a tag-friendly format. + + Converts a given string to lowercase and replaces all characters not matching + [a-z0-9] with hyphens. Optionally truncates the result to 'maxlen' characters. + + Args: + s (str): The input string to sanitize. + maxlen (int, optional): The maximum length for the tag. Defaults to None. + + Returns: + str: A sanitized, tag-friendly string. + + Examples: + >>> tagify("HTTP Web Title") + 'http-web-title' + >>> tagify("HTTP Web Title", maxlen=8) + 'http-web' + """ + if delimiter is None: + delimiter = "-" + ret = str(s).lower() + return tag_filter_regex.sub(delimiter, ret)[:maxlen].strip(delimiter) + + +def memory_status(): + """Return statistics on system memory consumption. + + The function returns a `psutil` named tuple that contains statistics on + system virtual memory usage, such as total memory, used memory, available + memory, and more. + + Returns: + psutil._pslinux.svmem: A named tuple representing various statistics + about system virtual memory usage. + + Examples: + >>> mem = memory_status() + >>> mem.available + 13195399168 + + >>> mem = memory_status() + >>> mem.percent + 79.0 + """ + import psutil + + return psutil.virtual_memory() + + +def swap_status(): + """Return statistics on swap memory consumption. + + The function returns a `psutil` named tuple that contains statistics on + system swap memory usage, such as total swap, used swap, free swap, and more. + + Returns: + psutil._common.sswap: A named tuple representing various statistics + about system swap memory usage. + + Examples: + >>> swap = swap_status() + >>> swap.total + 4294967296 + + >>> swap = swap_status() + >>> swap.used + 2097152 + """ + import psutil + + return psutil.swap_memory() + + +def get_size(obj, max_depth=5, seen=None): + """ + Roughly estimate the memory footprint of a Python object using recursion. + + Parameters: + obj (any): The object whose size is to be determined. + max_depth (int, optional): Maximum depth to which nested objects will be inspected. Defaults to 5. + seen (set, optional): Objects that have already been accounted for, to avoid loops. + + Returns: + int: Approximate memory footprint of the object in bytes. + + Examples: + >>> get_size(my_list) + 4200 + + >>> get_size(my_dict, max_depth=3) + 8400 + """ + from collections.abc import Mapping + + # If seen is not provided, initialize an empty set + if seen is None: + seen = set() + # Get the id of the object + obj_id = id(obj) + # Decrease the maximum depth for the next recursion + new_max_depth = max_depth - 1 + # If the object has already been seen or we've reached the maximum recursion depth, return 0 + if obj_id in seen or new_max_depth <= 0: + return 0 + # Get the size of the object + size = sys.getsizeof(obj) + # Add the object's id to the set of seen objects + seen.add(obj_id) + # If the object has a __dict__ attribute, we want to measure its size + if hasattr(obj, "__dict__"): + # Iterate over the Method Resolution Order (MRO) of the class of the object + for cls in obj.__class__.__mro__: + # If the class's __dict__ contains a __dict__ key + if "__dict__" in cls.__dict__: + for k, v in obj.__dict__.items(): + size += get_size(k, new_max_depth, seen) + size += get_size(v, new_max_depth, seen) + break + # If the object is a mapping (like a dictionary), we want to measure the size of its items + if isinstance(obj, Mapping): + with suppress(StopIteration): + k, v = next(iter(obj.items())) + size += (get_size(k, new_max_depth, seen) + get_size(v, new_max_depth, seen)) * len(obj) + # If the object is a container (like a list or tuple) but not a string or bytes-like object + elif isinstance(obj, (list, tuple, set)): + with suppress(StopIteration): + size += get_size(next(iter(obj)), new_max_depth, seen) * len(obj) + # If the object has __slots__, we want to measure the size of the attributes in __slots__ + if hasattr(obj, "__slots__"): + size += sum(get_size(getattr(obj, s), new_max_depth, seen) for s in obj.__slots__ if hasattr(obj, s)) + return size + + +def is_file(f): + """ + Check if a path points to a file. + + Parameters: + f (str): Path to the file. + + Returns: + bool: True if the path is a file, False otherwise. + + Examples: + >>> is_file("/etc/passwd") + True + + >>> is_file("/nonexistent") + False + """ + with suppress(Exception): + return Path(f).is_file() + return False + + +def cloudcheck(ip): + """ + Check whether an IP address belongs to a cloud provider and returns the provider name, type, and subnet. + + Args: + ip (str): The IP address to check. + + Returns: + tuple: A tuple containing provider name (str), provider type (str), and subnet (IPv4Network). + + Examples: + >>> cloudcheck("168.62.20.37") + ('Azure', 'cloud', IPv4Network('168.62.0.0/19')) + """ + import cloudcheck as _cloudcheck + + return _cloudcheck.check(ip) + + +def is_async_function(f): + """ + Check if a given function is an asynchronous function. + + Args: + f (function): The function to check. + + Returns: + bool: True if the function is asynchronous, False otherwise. + + Examples: + >>> async def foo(): + ... pass + >>> is_async_function(foo) + True + """ + import inspect + + return inspect.iscoroutinefunction(f) + + +async def execute_sync_or_async(callback, *args, **kwargs): + """ + Execute a function or coroutine, handling either synchronous or asynchronous invocation. + + Args: + callback (Union[Callable, Coroutine]): The function or coroutine to execute. + *args: Variable-length argument list to pass to the callback. + **kwargs: Arbitrary keyword arguments to pass to the callback. + + Returns: + Any: The return value from the executed function or coroutine. + + Examples: + >>> async def foo_async(x): + ... return x + 1 + >>> def foo_sync(x): + ... return x + 1 + + >>> asyncio.run(execute_sync_or_async(foo_async, 1)) + 2 + + >>> asyncio.run(execute_sync_or_async(foo_sync, 1)) + 2 + """ + if is_async_function(callback): + return await callback(*args, **kwargs) + else: + return callback(*args, **kwargs) + + +def get_exception_chain(e): + """ + Retrieves the full chain of exceptions leading to the given exception. + + Args: + e (BaseException): The exception for which to get the chain. + + Returns: + list[BaseException]: List of exceptions in the chain, from the given exception back to the root cause. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... exc_chain = get_exception_chain(e) + ... for exc in exc_chain: + ... print(exc) + This is a value error + """ + exception_chain = [] + current_exception = e + while current_exception is not None: + exception_chain.append(current_exception) + current_exception = getattr(current_exception, "__context__", None) + return exception_chain + + +def in_exception_chain(e, exc_types): + """ + Given an Exception and a list of Exception types, returns whether any of the specified types are contained anywhere in the Exception chain. + + Args: + e (BaseException): The exception to check + exc_types (list[Exception]): Exception types to consider intentional cancellations. Default is KeyboardInterrupt + + Returns: + bool: Whether the error is the result of an intentional cancellaion + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except Exception as e: + ... if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + ... raise + """ + return any(isinstance(_, exc_types) for _ in get_exception_chain(e)) + + +def get_traceback_details(e): + """ + Retrieves detailed information from the traceback of an exception. + + Args: + e (BaseException): The exception for which to get traceback details. + + Returns: + tuple: A tuple containing filename (str), line number (int), and function name (str) where the exception was raised. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... filename, lineno, funcname = get_traceback_details(e) + ... print(f"File: {filename}, Line: {lineno}, Function: {funcname}") + File: , Line: 2, Function: + """ + import traceback + + tb = traceback.extract_tb(e.__traceback__) + last_frame = tb[-1] # Get the last frame in the traceback (the one where the exception was raised) + filename = last_frame.filename + lineno = last_frame.lineno + funcname = last_frame.name + return filename, lineno, funcname + + +async def cancel_tasks(tasks, ignore_errors=True): + """ + Asynchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + ignore_errors (bool, optional): Whether to ignore errors other than asyncio.CancelledError. Defaults to True. + + Examples: + >>> async def main(): + ... task1 = asyncio.create_task(async_function1()) + ... task2 = asyncio.create_task(async_function2()) + ... await cancel_tasks([task1, task2]) + ... + >>> asyncio.run(main()) + + Note: + This function will not cancel the current task that it is called from. + """ + current_task = asyncio.current_task() + tasks = [t for t in tasks if t != current_task] + for task in tasks: + # log.debug(f"Cancelling task: {task}") + task.cancel() + if ignore_errors: + for task in tasks: + try: + await task + except BaseException as e: + if not isinstance(e, asyncio.CancelledError): + import traceback + + log.trace(traceback.format_exc()) + + +def cancel_tasks_sync(tasks): + """ + Synchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + + Examples: + >>> loop = asyncio.get_event_loop() + >>> task1 = loop.create_task(some_async_function1()) + >>> task2 = loop.create_task(some_async_function2()) + >>> cancel_tasks_sync([task1, task2]) + + Note: + This function will not cancel the current task from which it is called. + """ + current_task = asyncio.current_task() + for task in tasks: + if task != current_task: + # log.debug(f"Cancelling task: {task}") + task.cancel() + + +def weighted_shuffle(items, weights): + """ + Shuffles a list of items based on their corresponding weights. + + Args: + items (list): The list of items to shuffle. + weights (list): The list of weights corresponding to each item. + + Returns: + list: A new list containing the shuffled items. + + Examples: + >>> items = ['apple', 'banana', 'cherry'] + >>> weights = [0.4, 0.5, 0.1] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + + Note: + The sum of all weights does not have to be 1. They will be normalized internally. + """ + # Create a list of tuples where each tuple is (item, weight) + pool = list(zip(items, weights)) + + shuffled_items = [] + + # While there are still items to be chosen... + while pool: + # Normalize weights + total = sum(weight for item, weight in pool) + weights = [weight / total for item, weight in pool] + + # Choose an index based on weight + chosen_index = random.choices(range(len(pool)), weights=weights, k=1)[0] + + # Add the chosen item to the shuffled list + chosen_item, chosen_weight = pool.pop(chosen_index) + shuffled_items.append(chosen_item) + + return shuffled_items + + +def parse_port_string(port_string): + """ + Parses a string containing ports and port ranges into a list of individual ports. + + Args: + port_string (str): The string containing individual ports and port ranges separated by commas. + + Returns: + list: A list of individual ports parsed from the input string. + + Raises: + ValueError: If the input string contains invalid ports or port ranges. + + Examples: + >>> parse_port_string("22,80,1000-1002") + [22, 80, 1000, 1001, 1002] + + >>> parse_port_string("1-2,3-5") + [1, 2, 3, 4, 5] + + >>> parse_port_string("invalid") + ValueError: Invalid port or port range: invalid + """ + elements = str(port_string).split(",") + ports = [] + + for element in elements: + if element.isdigit(): + port = int(element) + if 1 <= port <= 65535: + ports.append(port) + else: + raise ValueError(f"Invalid port: {element}") + elif "-" in element: + range_parts = element.split("-") + if len(range_parts) != 2 or not all(part.isdigit() for part in range_parts): + raise ValueError(f"Invalid port or port range: {element}") + start, end = map(int, range_parts) + if not (1 <= start < end <= 65535): + raise ValueError(f"Invalid port range: {element}") + ports.extend(range(start, end + 1)) + else: + raise ValueError(f"Invalid port or port range: {element}") + + return ports + + +async def as_completed(coros): + """ + Async generator that yields completed Tasks as they are completed. + + Args: + coros (iterable): An iterable of coroutine objects or asyncio Tasks. + + Yields: + asyncio.Task: A Task object that has completed its execution. + + Examples: + >>> async def main(): + ... async for task in as_completed([coro1(), coro2(), coro3()]): + ... result = task.result() + ... print(f'Task completed with result: {result}') + + >>> asyncio.run(main()) + """ + tasks = {coro if isinstance(coro, asyncio.Task) else asyncio.create_task(coro): coro for coro in coros} + while tasks: + done, _ = await asyncio.wait(tasks.keys(), return_when=asyncio.FIRST_COMPLETED) + for task in done: + tasks.pop(task) + yield task + + +def clean_dns_record(record): + """ + Cleans and formats a given DNS record for further processing. + + This static method converts the DNS record to text format if it's not already a string. + It also removes any trailing dots and converts the record to lowercase. + + Args: + record (str or dns.rdata.Rdata): The DNS record to clean. + + Returns: + str: The cleaned and formatted DNS record. + + Examples: + >>> clean_dns_record('www.evilcorp.com.') + 'www.evilcorp.com' + + >>> from dns.rrset import from_text + >>> record = from_text('www.evilcorp.com', 3600, 'IN', 'A', '1.2.3.4')[0] + >>> clean_dns_record(record) + '1.2.3.4' + """ + if not isinstance(record, str): + record = str(record.to_text()) + return str(record).rstrip(".").lower() + + +def truncate_filename(file_path, max_length=255): + """ + Truncate the filename while preserving the file extension to ensure the total path length does not exceed the maximum length. + + Args: + file_path (str): The original file path. + max_length (int): The maximum allowed length for the total path. Default is 255. + + Returns: + pathlib.Path: A new Path object with the truncated filename. + + Raises: + ValueError: If the directory path is too long to accommodate any filename within the limit. + + Example: + >>> truncate_filename('/path/to/example_long_filename.txt', 20) + PosixPath('/path/to/example.txt') + """ + p = Path(file_path) + directory, stem, suffix = p.parent, p.stem, p.suffix + + max_filename_length = max_length - len(str(directory)) - len(suffix) - 1 # 1 for the '/' separator + + if max_filename_length <= 0: + raise ValueError("The directory path is too long to accommodate any filename within the limit.") + + if len(stem) > max_filename_length: + truncated_stem = stem[:max_filename_length] + else: + truncated_stem = stem + + new_path = directory / (truncated_stem + suffix) + return new_path + + +def get_keys_in_dot_syntax(config): + """Retrieve all keys in an OmegaConf configuration in dot notation. + + This function converts an OmegaConf configuration into a list of keys + represented in dot notation. + + Args: + config (DictConfig): The OmegaConf configuration object. + + Returns: + List[str]: A list of keys in dot notation. + + Examples: + >>> config = OmegaConf.create({ + ... "web": { + ... "test": True + ... }, + ... "db": { + ... "host": "localhost", + ... "port": 5432 + ... } + ... }) + >>> get_keys_in_dot_syntax(config) + ['web.test', 'db.host', 'db.port'] + """ + from omegaconf import OmegaConf + + container = OmegaConf.to_container(config, resolve=True) + keys = [] + + def recursive_keys(d, parent_key=""): + for k, v in d.items(): + full_key = f"{parent_key}.{k}" if parent_key else k + if isinstance(v, dict): + recursive_keys(v, full_key) + else: + keys.append(full_key) + + recursive_keys(container) + return keys + + +def filter_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): + """ + Recursively filter a dictionary based on key names. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to filter for. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from the final dict. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary containing only the keys specified in key_names. + + Examples: + >>> filter_dict({"key1": "test", "key2": "asdf"}, "key2") + {"key2": "asdf"} + >>> filter_dict({"key1": "test", "key2": {"key3": "asdf"}}, "key1", "key3", exclude_keys="key2") + {'key1': 'test'} + """ + if exclude_keys is None: + exclude_keys = [] + if isinstance(exclude_keys, str): + exclude_keys = [exclude_keys] + ret = {} + if isinstance(d, dict): + for key in d: + if key in key_names or (fuzzy and any(k in key for k in key_names)): + if not any(k in exclude_keys for k in [key, _prev_key]): + ret[key] = copy.deepcopy(d[key]) + elif isinstance(d[key], list) or isinstance(d[key], dict): + child = filter_dict(d[key], *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) + if child: + ret[key] = child + return ret + + +def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): + """ + Recursively clean unwanted keys from a dictionary. + Useful for removing secrets from a config. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to remove. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from removal. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary cleaned of the keys specified in key_names. + + """ + if exclude_keys is None: + exclude_keys = [] + if isinstance(exclude_keys, str): + exclude_keys = [exclude_keys] + d = copy.deepcopy(d) + if isinstance(d, dict): + for key, val in list(d.items()): + if key in key_names or (fuzzy and any(k in key for k in key_names)): + if _prev_key not in exclude_keys: + d.pop(key) + continue + d[key] = clean_dict(val, *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) + return d + + +top_ports_cache = None + + +def top_tcp_ports(n, as_string=False): + """ + Returns the top *n* TCP ports as evaluated by nmap + """ + top_ports_file = Path(__file__).parent.parent.parent / "wordlists" / "top_open_ports_nmap.txt" + + global top_ports_cache + if top_ports_cache is None: + # Read the open ports from the file + with open(top_ports_file, "r") as f: + top_ports_cache = [int(line.strip()) for line in f] + + # If n is greater than the length of the ports list, add remaining ports from range(1, 65536) + unique_ports = set(top_ports_cache) + top_ports_cache.extend([port for port in range(1, 65536) if port not in unique_ports]) + + top_ports = top_ports_cache[:n] + if as_string: + return ",".join([str(s) for s in top_ports]) + return top_ports + + +class SafeDict(dict): + def __missing__(self, key): + return "{" + key + "}" + + +def safe_format(s, **kwargs): + """ + Format string while ignoring unused keys (prevents KeyError) + """ + return s.format_map(SafeDict(kwargs)) + + +def get_python_constraints(): + req_regex = re.compile(r"([^(]+)\s*\((.*)\)", re.IGNORECASE) + + def clean_requirement(req_string): + # Extract package name and version constraints from format like "package (>=1.0,<2.0)" + match = req_regex.match(req_string) + if match: + name, constraints = match.groups() + return f"{name.strip()}{constraints}" + + return req_string + + from importlib.metadata import distribution + + dist = distribution("bbot") + return [clean_requirement(r) for r in dist.requires] diff --git a/bbot/core/helpers/modules.py b/bbot/core/helpers/modules.py deleted file mode 100644 index a570816339..0000000000 --- a/bbot/core/helpers/modules.py +++ /dev/null @@ -1,243 +0,0 @@ -import ast -import sys -import importlib -from pathlib import Path -from omegaconf import OmegaConf -from contextlib import suppress - -from .misc import list_files, sha1, search_dict_by_key, search_format_dict - - -class ModuleLoader: - def __init__(self): - self._preloaded = {} - self._modules = {} - self._configs = {} - - def file_filter(self, file): - return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] - - def preload(self, module_dir): - """ - Preload modules from a specified directory - """ - module_dir = Path(module_dir) - for module_file in list_files(module_dir, filter=self.file_filter): - if module_dir.name == "modules": - namespace = f"bbot.modules" - else: - namespace = f"bbot.modules.{module_dir.name}" - try: - preloaded = self.preload_module(module_file) - module_type = "scan" - if module_dir.name in ("output", "internal"): - module_type = str(module_dir.name) - elif module_dir.name not in ("modules"): - preloaded["flags"] = list(set(preloaded["flags"] + [module_dir.name])) - preloaded["type"] = module_type - preloaded["namespace"] = namespace - config = OmegaConf.create(preloaded.get("config", {})) - self._configs[module_file.stem] = config - self._preloaded[module_file.stem] = preloaded - except Exception: - import traceback - - print(f"[CRIT] Error preloading {module_file}\n\n{traceback.format_exc()}") - print(f"[CRIT] Error in {module_file.name}") - sys.exit(1) - - return self.preloaded - - def preloaded(self, type=None): - preloaded = {} - if type is not None: - preloaded = {k: v for k, v in self._preloaded.items() if self.check_type(k, type)} - else: - preloaded = dict(self._preloaded) - return preloaded - - def configs(self, type=None): - configs = {} - if type is not None: - configs = {k: v for k, v in self._configs.items() if self.check_type(k, type)} - else: - configs = dict(self._configs) - return OmegaConf.create(configs) - - def find_and_replace(self, **kwargs): - self._preloaded = search_format_dict(self._preloaded, **kwargs) - - def check_type(self, module, type): - return self._preloaded[module]["type"] == type - - def preload_module(self, module_file): - watched_events = [] - produced_events = [] - flags = [] - meta = {} - pip_deps = [] - shell_deps = [] - apt_deps = [] - ansible_tasks = [] - python_code = open(module_file).read() - # take a hash of the code so we can keep track of when it changes - module_hash = sha1(python_code).hexdigest() - parsed_code = ast.parse(python_code) - config = {} - for root_element in parsed_code.body: - # look for classes - if type(root_element) == ast.ClassDef: - for class_attr in root_element.body: - # class attributes that are dictionaries - if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict: - # module options - if any([target.id == "options" for target in class_attr.targets]): - config.update(ast.literal_eval(class_attr.value)) - # module metadata - if any([target.id == "meta" for target in class_attr.targets]): - meta = ast.literal_eval(class_attr.value) - - # class attributes that are lists - if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List: - # flags - if any([target.id == "flags" for target in class_attr.targets]): - for flag in class_attr.value.elts: - if type(flag.value) == str: - flags.append(flag.value) - # watched events - if any([target.id == "watched_events" for target in class_attr.targets]): - for event_type in class_attr.value.elts: - if type(event_type.value) == str: - watched_events.append(event_type.value) - # produced events - if any([target.id == "produced_events" for target in class_attr.targets]): - for event_type in class_attr.value.elts: - if type(event_type.value) == str: - produced_events.append(event_type.value) - # python dependencies - if any([target.id == "deps_pip" for target in class_attr.targets]): - for python_dep in class_attr.value.elts: - if type(python_dep.value) == str: - pip_deps.append(python_dep.value) - # apt dependencies - elif any([target.id == "deps_apt" for target in class_attr.targets]): - for apt_dep in class_attr.value.elts: - if type(apt_dep.value) == str: - apt_deps.append(apt_dep.value) - # bash dependencies - elif any([target.id == "deps_shell" for target in class_attr.targets]): - for shell_dep in class_attr.value.elts: - shell_deps.append(ast.literal_eval(shell_dep)) - # ansible playbook - elif any([target.id == "deps_ansible" for target in class_attr.targets]): - ansible_tasks = ast.literal_eval(class_attr.value) - preloaded_data = { - "watched_events": watched_events, - "produced_events": produced_events, - "flags": flags, - "meta": meta, - "config": config, - "hash": module_hash, - "deps": {"pip": pip_deps, "shell": shell_deps, "apt": apt_deps, "ansible": ansible_tasks}, - "sudo": len(apt_deps) > 0, - } - if any(x == True for x in search_dict_by_key("become", ansible_tasks)) or any( - x == True for x in search_dict_by_key("ansible_become", ansible_tasks) - ): - preloaded_data["sudo"] = True - return preloaded_data - - def load_modules(self, module_names): - modules = {} - for module_name in module_names: - module = self.load_module(module_name) - modules[module_name] = module - self._modules[module_name] = module - return modules - - def load_module(self, module_name): - namespace = self._preloaded[module_name]["namespace"] - import_path = f"{namespace}.{module_name}" - module_variables = importlib.import_module(import_path, "bbot") - # for every top-level variable in the .py file - for variable in module_variables.__dict__.keys(): - # get its value - value = getattr(module_variables, variable) - with suppress(AttributeError): - # if it has watched_events and produced_events - if all( - type(a) == list - for a in (getattr(value, "watched_events", None), getattr(value, "produced_events", None)) - ): - # and if its variable name matches its filename - if value.__name__.lower() == module_name.lower(): - value._name = module_name - # then we have a module - return value - - def recommend_dependencies(self, modules): - """ - Returns a dictionary containing missing dependencies and their suggested resolutions - """ - resolve_choices = {} - # step 1: build a dictionary containing event types and their associated modules - # {"IP_ADDRESS": set("naabu", "ipneighbor", ...)} - watched = {} - produced = {} - for modname in modules: - preloaded = self._preloaded.get(modname) - if preloaded: - for event_type in preloaded.get("watched_events", []): - self.add_or_create(watched, event_type, modname) - for event_type in preloaded.get("produced_events", []): - self.add_or_create(produced, event_type, modname) - watched_all = {} - produced_all = {} - for modname, preloaded in self.preloaded().items(): - if preloaded: - for event_type in preloaded.get("watched_events", []): - self.add_or_create(watched_all, event_type, modname) - for event_type in preloaded.get("produced_events", []): - self.add_or_create(produced_all, event_type, modname) - - # step 2: check to see if there are missing dependencies - for modname in modules: - preloaded = self._preloaded.get(modname) - module_type = preloaded.get("type", "unknown") - if module_type != "scan": - continue - watched_events = preloaded.get("watched_events", []) - missing_deps = {e: not self.check_dependency(e, modname, produced) for e in watched_events} - if all(missing_deps.values()): - for event_type in watched_events: - choices = produced_all.get(event_type, []) - choices = set(choices) - with suppress(KeyError): - choices.remove(modname) - if event_type not in resolve_choices: - resolve_choices[event_type] = dict() - deps = resolve_choices[event_type] - self.add_or_create(deps, "required_by", modname) - for c in choices: - choice_type = self._preloaded.get(c, {}).get("type", "unknown") - if choice_type == "scan": - self.add_or_create(deps, "recommended", c) - - return resolve_choices - - def check_dependency(self, event_type, modname, produced): - if event_type not in produced: - return False - if produced[event_type] == {modname}: - return False - return True - - @staticmethod - def add_or_create(d, k, *items): - try: - d[k].update(set(items)) - except KeyError: - d[k] = set(items) - - -module_loader = ModuleLoader() diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 2087dfe5dd..d8c86c4f46 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -2,16 +2,23 @@ adjectives = [ "abnormal", + "accidental", + "acoustic", "acrophobic", - "adhesive", "adorable", "adversarial", "affectionate", "aggravated", + "aggrieved", + "agoraphobic", + "almighty", "anal", "atrocious", + "autistic", "awkward", "baby", + "begrudged", + "benevolent", "bewildered", "bighuge", "black", @@ -22,7 +29,10 @@ "childish", "chiseled", "cold", + "condescending", + "considerate", "constipated", + "contentious", "corrupted", "cosmic", "crafty", @@ -35,10 +45,14 @@ "cute", "dark", "dastardly", + "decrypted", "deep", + "delicious", + "demented", "demonic", - "depressed", "depraved", + "depressed", + "deranged", "derogatory", "despicable", "devilish", @@ -46,37 +60,45 @@ "diabolic", "diabolical", "difficult", + "dilapidated", "dismal", + "distilled", "disturbed", "dramatic", "drunk", "effeminate", + "effervescent", + "elden", "eldritch", "embarrassed", + "encrypted", "enigmatic", "enlightened", "esoteric", "ethereal", "euphoric", "evil", + "expired", "exquisite", "extreme", - "feathery", "ferocious", "fiendish", "fierce", + "flamboyant", "fleecy", "flirtatious", + "flustered", "foreboding", "frenetic", "frolicking", - "frothy", "furry", "fuzzy", - "gay", "gentle", "giddy", + "glowering", "glutinous", + "golden", + "gothic", "grievous", "gummy", "hallucinogenic", @@ -89,7 +111,10 @@ "hellish", "hideous", "hysterical", + "imaginary", + "immense", "immoral", + "impulsive", "incomprehensible", "inebriated", "inexplicable", @@ -100,6 +125,7 @@ "insidious", "insightful", "insolent", + "insufferable", "intelligent", "intensified", "intensive", @@ -107,32 +133,41 @@ "inventive", "irritable", "large", + "liquid", "loveable", "lovely", + "lucid", "malevolent", + "malfunctioning", "malicious", "manic", "masochistic", "medicated", "mediocre", "melodramatic", + "mighty", "moist", + "molten", "monstrous", "muscular", "mushy", "mysterious", + "nascent", "naughty", "nefarious", "negligent", "neurotic", - "normal", "nihilistic", + "normal", + "overattached", "overcompensating", + "overenthusiastic", "overmedicated", "overwhelming", "overzealous", "paranoid", "pasty", + "peckish", "pedantic", "pernicious", "perturbed", @@ -149,17 +184,18 @@ "premature", "profound", "promiscuous", - "psychic", "psychedelic", + "psychic", "puffy", "pure", - "queer", "questionable", "rabid", "raging", - "raving", "rambunctious", + "rapid_unscheduled", + "raving", "reckless", + "reductive", "ripped", "sadistic", "satanic", @@ -178,6 +214,7 @@ "sneaky", "soft", "sophisticated", + "spicy", "spiteful", "squishy", "steamy", @@ -185,17 +222,18 @@ "stoned", "strained", "strenuous", + "stricken", + "stubborn", "stuffed", "stumped", "subtle", - "suggestive", - "suicidal", "sudden", + "suggestive", "sunburned", "surreal", "suspicious", - "sycophantic", "sweet", + "sycophantic", "tense", "terrible", "terrific", @@ -204,10 +242,10 @@ "ticklish", "tiny", "tricky", - "tufty", "twitchy", "ugly", "unabated", + "unchained", "unexplained", "unhinged", "unholy", @@ -215,7 +253,9 @@ "unmedicated", "unmelted", "unmitigated", + "unrelenting", "unrestrained", + "unscheduled", "unworthy", "utmost", "vehement", @@ -234,12 +274,14 @@ "wispy", "witty", "woolly", + "zesty", ] names = [ "aaron", "abigail", "adam", + "adeem", "alan", "albert", "alex", @@ -251,6 +293,7 @@ "alyssa", "amanda", "amber", + "amir", "amy", "andrea", "andrew", @@ -267,8 +310,11 @@ "ashley", "audrey", "austin", + "azathoth", "baggins", + "bailey", "barbara", + "bart", "bellatrix", "benjamin", "betty", @@ -278,6 +324,7 @@ "bobby", "bombadil", "bonnie", + "bonson", "boromir", "bradley", "brandon", @@ -288,6 +335,7 @@ "brittany", "bruce", "bryan", + "caitlyn", "caleb", "cameron", "carl", @@ -304,6 +352,7 @@ "christine", "christopher", "cindy", + "ciri", "clara", "clarence", "cody", @@ -311,12 +360,16 @@ "courtney", "craig", "crystal", + "cthulu", "curtis", "cynthia", + "dagon", "dale", + "dandelion", "daniel", "danielle", "danny", + "data", "david", "dawn", "deborah", @@ -367,6 +420,7 @@ "evelyn", "faramir", "florence", + "fox", "frances", "francis", "frank", @@ -377,8 +431,10 @@ "galadriel", "gandalf", "gary", + "geordi", "george", "gerald", + "geralt", "gimli", "gladys", "glenn", @@ -388,7 +444,9 @@ "gollum", "grace", "gregory", + "gus", "hagrid", + "hank", "hannah", "harold", "harry", @@ -397,7 +455,9 @@ "helen", "henry", "hermione", + "homer", "howard", + "hunter", "irene", "isaac", "isabella", @@ -410,9 +470,12 @@ "jane", "janet", "janice", + "jaskier", "jasmine", "jason", + "jayce", "jean", + "jean-luc", "jeffrey", "jennifer", "jeremy", @@ -420,6 +483,7 @@ "jesse", "jessica", "jimmy", + "jinx", "joan", "joe", "joel", @@ -448,9 +512,11 @@ "kelly", "kenneth", "kenobi", + "kerry", "kevin", "kimberly", "kyle", + "kylie", "lantern", "larry", "laura", @@ -468,10 +534,13 @@ "lori", "louis", "louise", + "lucius", "luis", "luke", "lupin", "madison", + "magnus", + "marcus", "margaret", "maria", "marie", @@ -489,19 +558,24 @@ "melvin", "merry", "michael", + "micheal", "michelle", "mildred", + "milhouse", "monica", "nancy", "natalie", "nathan", "nathaniel", "nazgul", + "ned", + "nelson", "nicholas", "nicole", "noah", "norma", "norman", + "nyarlathotep", "obama", "olivia", "padme", @@ -516,6 +590,7 @@ "phillip", "phyllis", "pippin", + "powder", "rachel", "radagast", "ralph", @@ -524,6 +599,7 @@ "rebecca", "richard", "rita", + "roach", "robert", "robin", "rodney", @@ -531,6 +607,7 @@ "ron", "ronald", "rose", + "ross", "roy", "ruby", "russell", @@ -551,6 +628,7 @@ "shawn", "shelob", "shirley", + "silco", "sirius", "skywalker", "snape", @@ -560,11 +638,13 @@ "stephen", "steven", "susan", + "syrina", "tammy", "taylor", "teresa", "terry", "theoden", + "theon", "theresa", "thomas", "tiffany", @@ -575,13 +655,17 @@ "tracy", "travis", "treebeard", + "triss", "tyler", "tyrell", "vader", "valerie", + "vander", "vanessa", + "vi", "victor", "victoria", + "viktor", "vincent", "virginia", "voldemort", @@ -592,14 +676,21 @@ "wendy", "william", "willie", + "worf", "wormtongue", + "xavier", + "yennefer", "yoda", + "zach", "zachary", ] def random_name(): - name = f"{random.choice(adjectives)}_{random.choice(names)}" - if name == "white_lantern": - name = "black_lantern" - return name + name = random.choice(names) + adjective = random.choice(adjectives) + if adjective == "unchained": + scan_name = f"{name}_{adjective}" + else: + scan_name = f"{adjective}_{name}" + return scan_name diff --git a/bbot/core/helpers/ntlm.py b/bbot/core/helpers/ntlm.py index e4d9cd1ca0..9d66b3ea7e 100644 --- a/bbot/core/helpers/ntlm.py +++ b/bbot/core/helpers/ntlm.py @@ -5,7 +5,7 @@ import logging import collections -from bbot.core.errors import NTLMError +from bbot.errors import NTLMError log = logging.getLogger("bbot.core.helpers.ntlm") @@ -38,7 +38,7 @@ def __init__(self, pos_tup, raw): def decode_ntlm_challenge(st): hdr_tup = struct.unpack(">> rate_limiter = RateLimiter(100, "web") + >>> async def rate_limited_request(url): + ... async with rate_limiter: + ... return await request(url) + """ + + def __init__(self, rate, name): + self.rate = rate / 10 + self.name = name + self.log_interval = 10 + self.current_timestamp = time.time() + self.count = 0 + self._lock = None + self.last_notification = None + + @property + def lock(self): + if self._lock is None: + self._lock = asyncio.Lock() + return self._lock + + async def __aenter__(self): + async with self.lock: + while True: + if time.time() - self.current_timestamp >= 0.1: + # A new 0.1 second interval has begun, reset the count and timestamp + self.current_timestamp = time.time() + self.count = 1 + break + elif self.count < self.rate: + # Still within the rate limit for the current 0.1 second interval + self.count += 1 + break + else: + now = time.time() + if self.last_notification is None or now - self.last_notification >= self.log_interval: + log.verbose(f"{self.name} rate limit threshold ({self.rate * 10:.1f}/s) reached") + self.last_notification = now + # Rate limit for the current 0.1 second interval has been reached, wait until the next interval + await asyncio.sleep(self.current_timestamp + 0.1 - time.time()) + + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass diff --git a/bbot/core/helpers/regex.py b/bbot/core/helpers/regex.py new file mode 100644 index 0000000000..044684b83e --- /dev/null +++ b/bbot/core/helpers/regex.py @@ -0,0 +1,109 @@ +import asyncio +import regex as re +from . import misc + + +class RegexHelper: + """ + Class for misc CPU-intensive regex operations + + Offloads regex processing to other CPU cores via GIL release + thread pool + + For quick, one-off regexes, you don't need to use this helper. + Only use this helper if you're searching large bodies of text + or if your regex is CPU-intensive + """ + + def __init__(self, parent_helper): + self.parent_helper = parent_helper + + def ensure_compiled_regex(self, r): + """ + Make sure a regex has been compiled + """ + if not isinstance(r, re.Pattern): + raise ValueError("Regex must be compiled first!") + + def compile(self, *args, **kwargs): + return re.compile(*args, **kwargs) + + async def search(self, compiled_regex, *args, **kwargs): + self.ensure_compiled_regex(compiled_regex) + return await self.parent_helper.run_in_executor(compiled_regex.search, *args, **kwargs) + + async def sub(self, compiled_regex, *args, **kwargs): + self.ensure_compiled_regex(compiled_regex) + return await self.parent_helper.run_in_executor(compiled_regex.sub, *args, **kwargs) + + async def findall(self, compiled_regex, *args, **kwargs): + self.ensure_compiled_regex(compiled_regex) + return await self.parent_helper.run_in_executor(compiled_regex.findall, *args, **kwargs) + + async def findall_multi(self, compiled_regexes, *args, threads=10, **kwargs): + """ + Same as findall() but with multiple regexes + """ + if not isinstance(compiled_regexes, dict): + raise ValueError('compiled_regexes must be a dictionary like this: {"regex_name": }') + for v in compiled_regexes.values(): + self.ensure_compiled_regex(v) + + tasks = {} + + def new_task(regex_name, r): + task = self.parent_helper.run_in_executor(r.findall, *args, **kwargs) + tasks[task] = regex_name + + compiled_regexes = dict(compiled_regexes) + for _ in range(threads): # Start initial batch of tasks + if compiled_regexes: # Ensure there are args to process + new_task(*compiled_regexes.popitem()) + + while tasks: # While there are tasks pending + # Wait for the first task to complete + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + + for task in done: + result = task.result() + regex_name = tasks.pop(task) + yield (regex_name, result) + + if compiled_regexes: # Start a new task for each one completed, if URLs remain + new_task(*compiled_regexes.popitem()) + + async def finditer(self, compiled_regex, *args, **kwargs): + self.ensure_compiled_regex(compiled_regex) + return await self.parent_helper.run_in_executor(self._finditer, compiled_regex, *args, **kwargs) + + async def finditer_multi(self, compiled_regexes, *args, **kwargs): + """ + Same as finditer() but with multiple regexes + """ + for r in compiled_regexes: + self.ensure_compiled_regex(r) + return await self.parent_helper.run_in_executor(self._finditer_multi, compiled_regexes, *args, **kwargs) + + def _finditer_multi(self, compiled_regexes, *args, **kwargs): + matches = [] + for r in compiled_regexes: + for m in r.finditer(*args, **kwargs): + matches.append(m) + return matches + + def _finditer(self, compiled_regex, *args, **kwargs): + return list(compiled_regex.finditer(*args, **kwargs)) + + async def extract_params_html(self, *args, **kwargs): + return await self.parent_helper.run_in_executor(misc.extract_params_html, *args, **kwargs) + + async def extract_emails(self, *args, **kwargs): + return await self.parent_helper.run_in_executor(misc.extract_emails, *args, **kwargs) + + async def search_dict_values(self, *args, **kwargs): + def _search_dict_values(*_args, **_kwargs): + return list(misc.search_dict_values(*_args, **_kwargs)) + + return await self.parent_helper.run_in_executor(_search_dict_values, *args, **kwargs) + + async def recursive_decode(self, *args, **kwargs): + return await self.parent_helper.run_in_executor(misc.recursive_decode, *args, **kwargs) diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 3df82e2604..6a0a27456e 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -1,4 +1,4 @@ -import re +import regex as re from collections import OrderedDict # for extracting words from strings @@ -16,21 +16,68 @@ ] ] - word_regex = re.compile(r"[^\d\W_]+") word_num_regex = re.compile(r"[^\W_]+") num_regex = re.compile(r"\d+") -_ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" + +_ipv4_regex = r"(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}" +ipv4_regex = re.compile(_ipv4_regex, re.I) + +# IPv6 is complicated, so we have accommodate alternative patterns, +# :(:[A-F0-9]{1,4}){1,7} == ::1, ::ffff:1 +# ([A-F0-9]{1,4}:){1,7}: == 2001::, 2001:db8::, 2001:db8:0:1:2:3:: +# ([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4}) == 2001::1, 2001:db8::1, 2001:db8:0:1:2:3::1 +# ([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}) == 1:1:1:1:1:1:1:1, ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff + +_ipv6_regex = r"(:(:[A-F0-9]{1,4}){1,7}|([A-F0-9]{1,4}:){1,7}:|([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4})|([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}))" ipv6_regex = re.compile(_ipv6_regex, re.I) -_dns_name_regex = r"(([\w-]+)\.)+([a-z0-9]{2,20})" -_hostname_regex = re.compile(r"^[\w-]+$") -_email_regex = r"(?:[a-zA-Z0-9][\w\-\.\+]{,100})@(?:[a-zA-Z0-9_][\w\-\._]{,100})\.(?:[a-zA-Z]{2,8})" + +_ip_range_regexes = ( + _ipv4_regex + r"\/[0-9]{1,2}", + _ipv6_regex + r"\/[0-9]{1,3}", +) +ip_range_regexes = [re.compile(r, re.I) for r in _ip_range_regexes] + +# all dns names including IP addresses and bare hostnames (e.g. "localhost") +_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.?)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" +# dns names with periods (e.g. "www.example.com") +_dns_name_regex_with_period = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" + +dns_name_extraction_regex = re.compile(_dns_name_regex_with_period, re.I) +dns_name_validation_regex = re.compile(r"^" + _dns_name_regex + r"$", re.I) + +_email_regex = r"(?:[^\W_][\w\-\.\+']{,100})@" + _dns_name_regex email_regex = re.compile(_email_regex, re.I) +_ptr_regex = r"(?:[0-9]{1,3}[-_\.]){3}[0-9]{1,3}" +ptr_regex = re.compile(_ptr_regex) +# uuid regex +_uuid_regex = r"[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}" +uuid_regex = re.compile(_uuid_regex, re.I) +# event uuid regex +_event_uuid_regex = r"[0-9A-Z_]+:[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}" +event_uuid_regex = re.compile(_event_uuid_regex, re.I) + +_open_port_regexes = ( + _dns_name_regex + r":[0-9]{1,5}", + r"\[" + _ipv6_regex + r"\]:[0-9]{1,5}", +) +open_port_regexes = [re.compile(r, re.I) for r in _open_port_regexes] + +_url_regexes = ( + r"https?://" + _dns_name_regex + r"(?::[0-9]{1,5})?(?:(?:/|\?).*)?", + r"https?://\[" + _ipv6_regex + r"\](?::[0-9]{1,5})?(?:(?:/|\?).*)?", +) +url_regexes = [re.compile(r, re.I) for r in _url_regexes] + +_double_slash_regex = r"/{2,}" +double_slash_regex = re.compile(_double_slash_regex) + +# event type regexes, used throughout BBOT for autodetection of event types, validation, and excavation. event_type_regexes = OrderedDict( - [ + ( (k, tuple(re.compile(r, re.I) for r in regexes)) - for k, regexes in [ + for k, regexes in ( ( "DNS_NAME", (r"^" + _dns_name_regex + r"$",), @@ -40,23 +87,68 @@ (r"^" + _email_regex + r"$",), ), ( - "OPEN_TCP_PORT", + "IP_ADDRESS", ( - r"^((?:[A-Z0-9_]|[A-Z0-9_][A-Z0-9\-_]*[A-Z0-9_])[\.]?)+(?:[A-Z0-9_][A-Z0-9\-_]*[A-Z0-9_]|[A-Z0-9_]):[0-9]{1,5}$", - r"^\[" + _ipv6_regex + r"\]:[0-9]{1,5}$", + r"^" + _ipv4_regex + r"$", + r"^" + _ipv6_regex + r"$", ), ), + ( + "IP_RANGE", + tuple(r"^" + r + r"$" for r in _ip_range_regexes), + ), + ( + "OPEN_TCP_PORT", + tuple(r"^" + r + r"$" for r in _open_port_regexes), + ), ( "URL", - ( - r"https?://((?:[A-Z0-9_]|[A-Z0-9_][A-Z0-9\-_]*[A-Z0-9_])[\.]?)+(?:[A-Z0-9_][A-Z0-9\-_]*[A-Z0-9_]|[A-Z0-9_])(?::[0-9]{1,5})?.*$", - r"https?://\[" + _ipv6_regex + r"\](?::[0-9]{1,5})?.*$", - ), + tuple(r"^" + r + r"$" for r in _url_regexes), ), - ] - ] + ) + ) ) -event_id_regex = re.compile(r"[0-9a-f]{40}:[A-Z0-9_]+") -dns_name_regex = re.compile(_dns_name_regex, re.I) scan_name_regex = re.compile(r"[a-z]{3,20}_[a-z]{3,20}") + + +# For use with excavate parameters extractor +input_tag_regex = re.compile( + r"]+?name=[\"\']?([\.$\w]+)[\"\']?(?:[^>]*?value=[\"\']([=+\/\w]*)[\"\'])?[^>]*>" +) +jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") +jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") +a_tag_regex = re.compile(r"]*href=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") +img_tag_regex = re.compile(r"]*src=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") +get_form_regex = re.compile( + r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[gG][eE][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +post_form_regex = re.compile( + r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +select_tag_regex = re.compile( + r"]+?name=[\"\']?(\w+)[\"\']?[^>]*>(?:\s*]*?value=[\"\'](\w*)[\"\']?[^>]*>)?" +) +textarea_tag_regex = re.compile( + r']*\bname=["\']?(\w+)["\']?[^>]*>(.*?)', re.IGNORECASE | re.DOTALL +) +tag_attribute_regex = re.compile(r"<[^>]*(?:href|action|src)\s*=\s*[\"\']?(?!mailto:)([^\s\'\"\>]+)[\"\']?[^>]*>") + +valid_netloc = r"[^\s!@#$%^&()=/?\\'\";~`<>]+" + +_split_host_port_regex = r"(?:(?P[a-z0-9]{1,20})://)?(?:[^?]*@)?(?P" + valid_netloc + ")" +split_host_port_regex = re.compile(_split_host_port_regex, re.I) + +_extract_open_port_regex = r"(?:(?:\[([0-9a-f:]+)\])|([^\s:]+))(?::(\d{1,5}))?" +extract_open_port_regex = re.compile(_extract_open_port_regex) + +_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?(" + valid_netloc + ")" +extract_host_regex = re.compile(_extract_host_regex, re.I) + +# for use in recursive_decode() +encoded_regex = re.compile(r"%[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[ntrbv]") +backslash_regex = re.compile(r"(?P\\+)(?P[ntrvb])") + +uuid_regex = re.compile(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}") diff --git a/bbot/core/helpers/threadpool.py b/bbot/core/helpers/threadpool.py deleted file mode 100644 index e9ae8323cd..0000000000 --- a/bbot/core/helpers/threadpool.py +++ /dev/null @@ -1,94 +0,0 @@ -import logging -import threading -from time import sleep - -log = logging.getLogger("bbot.core.helpers.threadpool") - -from .cache import CacheDict -from ...core.errors import ScanCancelledError - - -class ThreadPoolWrapper: - """ - Layers more granular control overtop of a shared thread pool - Allows setting lower thread limits for modules, etc. - """ - - def __init__(self, executor, max_workers=None): - self.executor = executor - self.max_workers = max_workers - self.futures = set() - self._future_lock = threading.Lock() - self._submit_task_lock = threading.Lock() - - def submit_task(self, callback, *args, **kwargs): - with self._submit_task_lock: - if self.max_workers is not None: - while self.num_tasks > self.max_workers: - sleep(0.1) - try: - future = self.executor.submit(callback, *args, **kwargs) - except RuntimeError as e: - raise ScanCancelledError(e) - with self._future_lock: - self.futures.add(future) - return future - - @property - def num_tasks(self): - with self._future_lock: - for f in list(self.futures): - if f.done(): - self.futures.remove(f) - return len(self.futures) + (1 if self._submit_task_lock.locked() else 0) - - def shutdown(self, *args, **kwargs): - self.executor.shutdown(*args, **kwargs) - - -def as_completed(fs): - fs = list(fs) - while fs: - result = False - for i, f in enumerate(fs): - if f.done(): - result = True - future = fs.pop(i) - if future._state in ("CANCELLED", "CANCELLED_AND_NOTIFIED"): - continue - yield future - break - if not result: - sleep(0.05) - - -class _Lock: - def __init__(self, name): - self.name = name - self.lock = threading.Lock() - - def __enter__(self): - self.lock.acquire() - - def __exit__(self, exc_type, exc_val, exc_tb): - self.lock.release() - - -class NamedLock: - """ - Returns a unique threading.Lock() based on a provided string - - Useful for preventing multiple operations from occuring on the same data in parallel - E.g. simultaneous DNS lookups on the same hostname - """ - - def __init__(self, max_size=1000): - self._cache = CacheDict(max_size=max_size) - - def get_lock(self, name): - try: - return self._cache.get(name) - except KeyError: - new_lock = _Lock(name) - self._cache.put(name, new_lock) - return new_lock diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index 8fc0b3c40f..5482e54c51 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -1,20 +1,58 @@ -import re import uuid import logging from contextlib import suppress from urllib.parse import urlparse, parse_qs, urlencode, ParseResult +from .regexes import double_slash_regex + log = logging.getLogger("bbot.core.helpers.url") def parse_url(url): - if type(url) == ParseResult: + """ + Parse the given URL string or ParseResult object and return a ParseResult. + + This function checks if the input is already a ParseResult object. If it is, + it returns the object as-is. Otherwise, it parses the given URL string using + `urlparse`. + + Args: + url (Union[str, ParseResult]): The URL string or ParseResult object to be parsed. + + Returns: + ParseResult: A named 6-tuple that contains the components of a URL. + + Examples: + >>> parse_url('https://www.evilcorp.com') + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='', fragment='') + """ + if isinstance(url, ParseResult): return url return urlparse(url) def add_get_params(url, params): + """ + Add or update query parameters to the given URL. + + This function takes an existing URL and a dictionary of query parameters, + updates or adds these parameters to the URL, and returns a new URL. + + Args: + url (Union[str, ParseResult]): The original URL. + params (Dict[str, Any]): A dictionary containing the query parameters to be added or updated. + + Returns: + ParseResult: A named 6-tuple containing the components of the modified URL. + + Examples: + >>> add_get_params('https://www.evilcorp.com?foo=1', {'bar': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=1&bar=2', fragment='') + + >>> add_get_params('https://www.evilcorp.com?foo=1', {'foo': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=2', fragment='') + """ parsed = parse_url(url) old_params = dict(parse_qs(parsed.query)) old_params.update(params) @@ -22,6 +60,22 @@ def add_get_params(url, params): def get_get_params(url): + """ + Extract the query parameters from the given URL as a dictionary. + + Args: + url (Union[str, ParseResult]): The URL from which to extract query parameters. + + Returns: + Dict[str, List[str]]: A dictionary containing the query parameters and their values. + + Examples: + >>> get_get_params('https://www.evilcorp.com?foo=1&bar=2') + {'foo': ['1'], 'bar': ['2']} + + >>> get_get_params('https://www.evilcorp.com?foo=1&foo=2') + {'foo': ['1', '2']} + """ parsed = parse_url(url) return dict(parse_qs(parsed.query)) @@ -33,6 +87,32 @@ def get_get_params(url): def charset(p): + """ + Determine the character set of the given string based on the types of characters it contains. + + Args: + p (str): The string whose character set is to be determined. + + Returns: + int: A bitmask representing the types of characters present in the string. + - CHAR_LOWER = 1: Lowercase alphabets + - CHAR_UPPER = 2: Uppercase alphabets + - CHAR_DIGIT = 4: Digits + - CHAR_SYMBOL = 8: Symbols/Special characters + + Examples: + >>> charset('abc') + 1 + + >>> charset('abcABC') + 3 + + >>> charset('abc123') + 5 + + >>> charset('!abc123') + 13 + """ ret = 0 for c in p: if c.islower(): @@ -47,6 +127,28 @@ def charset(p): def param_type(p): + """ + Evaluates the type of the given parameter. + + Args: + p (str): The parameter whose type is to be evaluated. + + Returns: + int: An integer representing the type of parameter. + - 1: Integer + - 2: UUID + - 3: Other + + Examples: + >>> param_type('123') + 1 + + >>> param_type('550e8400-e29b-41d4-a716-446655440000') + 2 + + >>> param_type('abc') + 3 + """ try: int(p) return 1 @@ -57,35 +159,26 @@ def param_type(p): return 3 -double_slash_regex = re.compile(r"/{2,}") +def hash_url(url): + """ + Hashes a URL for the purpose of cleaning or collapsing similar URLs. + Args: + url (str): The URL to be hashed. -def clean_url(url): - """ - Remove query string and fragment, lowercase netloc, remove redundant port + Returns: + int: The hash value of the cleaned URL. - http://evilcorp.com:80 --> http://evilcorp.com/ - http://eViLcORp.com/ --> http://evilcorp.com/ - http://evilcorp.com/api?user=bob#place --> http://evilcorp.com/api - """ - parsed = parse_url(url) - parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") - # remove ports if they're redundant - if (parsed.scheme == "http" and parsed.port == 80) or (parsed.scheme == "https" and parsed.port == 443): - hostname = parsed.hostname - # special case for IPv6 URLs - if parsed.netloc.startswith("["): - hostname = f"[{hostname}]" - parsed = parsed._replace(netloc=hostname) - # normalize double slashes - parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) - # append / if path is empty - if parsed.path == "": - parsed = parsed._replace(path="/") - return parsed + Examples: + >>> hash_url('https://www.evilcorp.com') + -7448777882396416944 + >>> hash_url('https://www.evilcorp.com/page/1') + -8101275613229735915 -def hash_url(url): + >>> hash_url('https://www.evilcorp.com/page/2') + -8101275613229735915 + """ parsed = parse_url(url) parsed = parsed._replace(fragment="", query="") to_hash = [parsed.netloc] @@ -102,38 +195,23 @@ def hash_url(url): return hash(tuple(to_hash)) -def collapse_urls(urls, threshold=10): - """ - Smartly dedupe suspiciously-similar URLs like these: - - http://evilcorp.com/user/11111/info - - http://evilcorp.com/user/2222/info - - http://evilcorp.com/user/333/info - - http://evilcorp.com/user/44/info - - http://evilcorp.com/user/5/info - - Useful for cleaning large lists of garbage-riddled URLs from sources like wayback +def url_depth(url): """ - url_hashes = {} - for url in urls: - new_url = clean_url(url) - url_hash = hash_url(new_url) - try: - url_hashes[url_hash].add(new_url) - except KeyError: - url_hashes[url_hash] = { - new_url, - } - - for url_hash, new_urls in url_hashes.items(): - # if the number of URLs exceeds the threshold - if len(new_urls) > threshold: - # yield only one - yield next(iter(new_urls)) - else: - yield from new_urls + Calculate the depth of the given URL based on its path components. + Args: + url (Union[str, ParseResult]): The URL whose depth is to be calculated. -def url_depth(url): + Returns: + int: The depth of the URL, based on its path components. + + Examples: + >>> url_depth('https://www.evilcorp.com/foo/bar/') + 2 + + >>> url_depth('https://www.evilcorp.com/foo//bar/baz/') + 3 + """ parsed = parse_url(url) parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) split_path = str(parsed.path).strip("/").split("/") diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 6febccafef..88acb01462 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -1,18 +1,34 @@ import logging import ipaddress +from typing import Union +from functools import wraps +from contextlib import suppress from bbot.core.helpers import regexes -from bbot.core.helpers.url import clean_url -from bbot.core.helpers.misc import split_host_port, make_netloc +from bbot.errors import ValidationError +from bbot.core.helpers.url import parse_url, hash_url +from bbot.core.helpers.misc import smart_encode_punycode, split_host_port, make_netloc, is_ip -log = logging.getLogger("bbot.core.helpers.") +log = logging.getLogger("bbot.core.helpers.validators") def validator(func): """ - Decorator for squashing all errors into ValueError + Decorator that squashes all errors raised by the wrapped function into a ValueError. + + Args: + func (Callable): The function to be decorated. + + Returns: + Callable: The wrapped function. + + Examples: + >>> @validator + ... def validate_port(port): + ... return max(1, min(65535, int(str(port)))) """ + @wraps(func) def validate_wrapper(*args, **kwargs): try: return func(*args) @@ -23,12 +39,34 @@ def validate_wrapper(*args, **kwargs): @validator -def validate_port(port): +def validate_port(port: Union[str, int]): + """ + Validates and sanitizes a port number by ensuring it falls within the allowed range (1-65535). + + Args: + port (int or str): The port number to validate. + + Returns: + int: The sanitized port number. + + Raises: + ValueError: If the port number cannot be converted to an integer or is out of range. + + Examples: + >>> validate_port(22) + 22 + + >>> validate_port(70000) + 65535 + + >>> validate_port(-123) + 1 + """ return max(1, min(65535, int(str(port)))) @validator -def validate_open_port(open_port): +def validate_open_port(open_port: Union[str, int]): host, port = split_host_port(open_port) port = validate_port(port) host = validate_host(host) @@ -37,7 +75,34 @@ def validate_open_port(open_port): @validator -def validate_host(host): +def validate_host(host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address]): + """ + Validates and sanitizes a host string. This function handles IPv4, IPv6, and domain names. + + It automatically strips ports, trailing periods, and clinging asterisks and dashes. + + Args: + host (str): The host string to validate. + + Returns: + str: The sanitized host string. + + Raises: + ValidationError: If the host is invalid or does not conform to IPv4, IPv6, or DNS_NAME formats. + + Examples: + >>> validate_host("2001:db8::ff00:42:8329") + '2001:db8::ff00:42:8329' + + >>> validate_host("192.168.0.1:443") + '192.168.0.1' + + >>> validate_host(".*.eViLCoRP.com.") + 'evilcorp.com' + + >>> validate_host("Invalid<>Host") + ValueError: Validation failed for ('Invalid<>Host',), {}: Invalid hostname: "invalid<>host" + """ # stringify, strip and lowercase host = str(host).strip().lower() # handle IPv6 netlocs @@ -49,58 +114,170 @@ def validate_host(host): return str(ip) except Exception: # if IPv6 fails, strip ports and root zone - host = host.split(":")[0].split("@")[-1].rstrip(".") + host = host.split(":")[0].rstrip(".") try: ip = ipaddress.IPv4Address(host) return str(ip) except Exception: # finally, try DNS_NAME - host = host.lstrip("*.") + host = smart_encode_punycode(host) + # clean asterisks and clinging dashes + host = host.strip("*.-").replace("*", "") for r in regexes.event_type_regexes["DNS_NAME"]: if r.match(host): return host - if regexes._hostname_regex.match(host): - return host - assert False, f'Invalid hostname: "{host}"' + raise ValidationError(f'Invalid hostname: "{host}"') @validator -def validate_url(url): - return validate_url_parsed(url).geturl() +def validate_severity(severity: str): + severity = str(severity).strip().upper() + if severity not in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): + raise ValueError(f"Invalid severity: {severity}") + return severity @validator -def validate_url_parsed(url): - url = str(url).strip() - if not any(r.match(url) for r in regexes.event_type_regexes["URL"]): - assert False, f'Invalid URL: "{url}"' - return clean_url(url) +def validate_email(email: str): + email = smart_encode_punycode(str(email).strip().lower()) + if any(r.match(email) for r in regexes.event_type_regexes["EMAIL_ADDRESS"]): + return email + raise ValidationError(f'Invalid email: "{email}"') + + +def clean_url(url: str, url_querystring_remove=True): + """ + Cleans and normalizes a URL. This function removes the query string and fragment, + lowercases the netloc, and removes redundant port numbers. + + Args: + url (str): The URL string to clean. + + Returns: + ParseResult: A ParseResult object containing the cleaned URL. + + Examples: + >>> clean_url("http://evilcorp.com:80") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') + + >>> clean_url("http://eViLcORp.com/") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') + + >>> clean_url("http://evilcorp.com/api?user=bob#place") + ParseResult(scheme='http', netloc='evilcorp.com', path='/api', params='', query='', fragment='') + """ + parsed = parse_url(url) + + if url_querystring_remove: + parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") + else: + parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="") + try: + scheme = parsed.scheme + except ValueError: + scheme = "https" + with suppress(Exception): + port = parsed.port + if port is None: + port = 80 if scheme == "http" else 443 + hostname = validate_host(parsed.hostname) + # remove ports if they're redundant + if (scheme == "http" and port == 80) or (scheme == "https" and port == 443): + port = None + # special case for IPv6 URLs + netloc = make_netloc(hostname, port) + # urlparse is special - it needs square brackets even if there's no port + if is_ip(netloc, version=6): + netloc = f"[{netloc}]" + parsed = parsed._replace(netloc=netloc) + # normalize double slashes + parsed = parsed._replace(path=regexes.double_slash_regex.sub("/", parsed.path)) + # append / if path is empty + if parsed.path == "": + parsed = parsed._replace(path="/") + return parsed + + +def collapse_urls(*args, **kwargs): + return list(_collapse_urls(*args, **kwargs)) + + +def _collapse_urls(urls, threshold=10): + """ + Collapses a list of URLs by deduping similar URLs based on a hashing mechanism. + Useful for cleaning large lists of noisy URLs, such as those retrieved from wayback. + + Args: + urls (list): The list of URL strings to collapse. + threshold (int): The number of allowed duplicate URLs before collapsing. + + Yields: + str: A deduped URL from the input list. + + Example: + >>> list(collapse_urls(["http://evilcorp.com/user/11111/info", "http://evilcorp.com/user/2222/info"], threshold=1)) + ["http://evilcorp.com/user/11111/info"] + + """ + log.verbose(f"Collapsing {len(urls):,} URLs") + url_hashes = {} + for url in urls: + try: + new_url = clean_url(url) + except ValueError as e: + log.verbose(f"Failed to clean url {url}: {e}") + url_hash = hash_url(new_url) + try: + url_hashes[url_hash].add(new_url) + except KeyError: + url_hashes[url_hash] = { + new_url, + } + + for url_hash, new_urls in url_hashes.items(): + # if the number of URLs exceeds the threshold + if len(new_urls) > threshold: + # yield only one + yield next(iter(new_urls)) + else: + yield from new_urls @validator -def validate_severity(severity): - severity = str(severity).strip().upper() - if not severity in ("INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): - raise ValueError(f"Invalid severity: {severity}") - return severity +def validate_url(url: str): + return validate_url_parsed(url).geturl() @validator -def validate_email(email): - email = str(email).strip().lower() - match_1 = any(r.match(email) for r in regexes.event_type_regexes["EMAIL_ADDRESS"]) - match_2 = regexes._hostname_regex.match(email) - if match_1 or match_2: - return email - assert False, f'Invalid email: "{email}"' +def validate_url_parsed(url: str): + url = str(url).strip() + if not any(r.match(url) for r in regexes.event_type_regexes["URL"]): + raise ValidationError(f'Invalid URL: "{url}"') + return clean_url(url) def soft_validate(s, t): """ - Friendly validation wrapper that returns True/False instead of raising an error + Softly validates a given string against a specified type. This function returns a boolean + instead of raising an error. - is_valid_url = soft_validate("http://evilcorp.com", "url") - is_valid_host = soft_validate("http://evilcorp.com", "host") + Args: + s (str): The string to validate. + t (str): The type to validate against, e.g., "url" or "host". + + Returns: + bool: True if the string is valid, False otherwise. + + Raises: + ValueError: If no validator for the specified type is found. + + Examples: + >>> soft_validate("http://evilcorp.com", "url") + True + >>> soft_validate("evilcorp.com", "url") + False + >>> soft_validate("http://evilcorp", "wrong_type") + ValueError: No validator for type "wrong_type" """ try: validator_fn = globals()[f"validate_{t.strip().lower()}"] @@ -111,3 +288,29 @@ def soft_validate(s, t): return True except ValueError: return False + + +def is_email(email): + try: + validate_email(email) + return True + except ValueError: + return False + + +class Validators: + def __init__(self, parent_helper): + self.parent_helper = parent_helper + + def clean_url(self, url: str): + url_querystring_remove = self.parent_helper.config.get("url_querystring_remove", True) + return clean_url(url, url_querystring_remove=url_querystring_remove) + + def validate_url_parsed(self, url: str): + """ + This version is necessary so that it can be config-aware when needed, to avoid a chicken-egg situation. Currently this is only used by the base event class to sanitize URLs + """ + url = str(url).strip() + if not any(r.match(url) for r in regexes.event_type_regexes["URL"]): + raise ValidationError(f'Invalid URL: "{url}"') + return self.clean_url(url) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py deleted file mode 100644 index a4a4400c6c..0000000000 --- a/bbot/core/helpers/web.py +++ /dev/null @@ -1,264 +0,0 @@ -import logging -import requests -from time import sleep -from pathlib import Path -from requests_cache import CachedSession -from requests_cache.backends import SQLiteCache -from requests.exceptions import RequestException - -from bbot.core.errors import WordlistError - -log = logging.getLogger("bbot.core.helpers.web") - - -def wordlist(self, path, lines=None, **kwargs): - if not path: - raise WordlistError(f"Invalid wordlist: {path}") - if not "cache_hrs" in kwargs: - kwargs["cache_hrs"] = 720 - if self.is_url(path): - filename = self.download(str(path), **kwargs) - if filename is None: - raise WordlistError(f"Unable to retrieve wordlist from {path}") - else: - filename = Path(path).resolve() - if not filename.is_file(): - raise WordlistError(f"Unable to find wordlist at {path}") - - if lines is None: - return filename - else: - lines = int(lines) - with open(filename) as f: - read_lines = f.readlines() - cache_key = f"{filename}:{lines}" - truncated_filename = self.cache_filename(cache_key) - with open(truncated_filename, "w") as f: - for line in read_lines[:lines]: - f.write(line) - return truncated_filename - - -def download(self, url, **kwargs): - """ - Downloads file, returns full path of filename - If download failed, returns None - - Caching supported via "cache_hrs" - """ - success = False - filename = self.cache_filename(url) - cache_hrs = float(kwargs.pop("cache_hrs", -1)) - log.debug(f"Downloading file from {url} with cache_hrs={cache_hrs}") - if cache_hrs > 0 and self.is_cached(url): - log.debug(f"{url} is cached") - success = True - else: - method = kwargs.get("method", "GET") - try: - with self.request(method=method, url=url, stream=True, raise_error=True, **kwargs) as response: - status_code = getattr(response, "status_code", 0) - log.debug(f"Download result: HTTP {status_code}") - if status_code != 0: - response.raise_for_status() - with open(filename, "wb") as f: - for chunk in response.iter_content(chunk_size=8192): - f.write(chunk) - success = True - except RequestException as e: - log.warning(f"Failed to download {url}: {e}") - return - except AttributeError: - return - - if success: - return filename.resolve() - - -def request(self, *args, **kwargs): - """ - Multipurpose function for making web requests - - Supports custom sessions - session Request.Session() - - Arguments - cache_for (Union[None, int, float, str, datetime, timedelta]): Cache response for seconds - raise_error (bool): Whether to raise exceptions (default: False) - """ - - raise_error = kwargs.pop("raise_error", False) - - cache_for = kwargs.pop("cache_for", None) - if cache_for is not None: - log.debug(f"Caching HTTP session with expire_after={cache_for}") - try: - session = self.cache_sessions[cache_for] - except KeyError: - db_path = str(self.cache_dir / "requests-cache.sqlite") - backend = SQLiteCache(db_path=db_path) - session = CachedSession(expire_after=cache_for, backend=backend) - self.cache_sessions[cache_for] = session - - if kwargs.pop("session", None) or not cache_for: - session = kwargs.pop("session", None) - - http_timeout = self.config.get("http_timeout", 20) - user_agent = self.config.get("user_agent", "BBOT") - - # in case of URL only, assume GET request - if len(args) == 1: - kwargs["url"] = args[0] - args = [] - - url = kwargs.get("url", "") - retries = kwargs.pop("retries", 0) - - if not args and "method" not in kwargs: - kwargs["method"] = "GET" - - if not "timeout" in kwargs: - kwargs["timeout"] = http_timeout - - headers = kwargs.get("headers", None) - - if headers is None: - headers = {} - if "User-Agent" not in headers: - headers.update({"User-Agent": user_agent}) - kwargs["headers"] = headers - - http_debug = self.config.get("http_debug", False) - while retries == "infinite" or retries >= 0: - try: - if http_debug: - logstr = f"Web request: {str(args)}, {str(kwargs)}" - log.debug(logstr) - if session is not None: - response = session.request(*args, **kwargs) - else: - response = requests.request(*args, **kwargs) - if http_debug: - log.debug(f"Web response: {response} (Length: {len(response.content)}) headers: {response.headers}") - return response - except RequestException as e: - log.debug(f"Error with request: {e}") - if retries != "infinite": - retries -= 1 - if retries == "infinite" or retries >= 0: - log.warning(f'Error requesting "{url}" ({e}), retrying...') - sleep(1) - else: - if raise_error: - raise e - - -def api_page_iter(self, url, page_size=100, json=True, **requests_kwargs): - page = 1 - offset = 0 - while 1: - new_url = url.format(page=page, page_size=page_size, offset=offset) - result = self.request(new_url, **requests_kwargs) - try: - if json: - result = result.json() - yield result - except Exception: - import traceback - - log.warning(f'Error in api_page_iter() for url: "{new_url}"') - log.debug(traceback.format_exc()) - break - finally: - offset += page_size - page += 1 - - -def curl(self, *args, **kwargs): - - url = kwargs.get("url", "") - - if not url: - log.debug("No URL supplied to CURL helper") - return - - curl_command = ["curl", url, "-s"] - - raw_path = kwargs.get("raw_path", False) - if raw_path: - curl_command.append("--path-as-is") - - # respect global ssl verify settings - ssl_verify = self.config.get("ssl_verify") - if ssl_verify == False: - curl_command.append("-k") - - headers = kwargs.get("headers", {}) - - ignore_bbot_global_settings = kwargs.get("ignore_bbot_global_settings", False) - - if ignore_bbot_global_settings: - log.debug("ignore_bbot_global_settings enabled. Global settings will not be applied") - else: - http_timeout = self.config.get("http_timeout", 20) - user_agent = self.config.get("user_agent", "BBOT") - - if "User-Agent" not in headers: - headers["User-Agent"] = user_agent - - # add the timeout - if not "timeout" in kwargs: - timeout = http_timeout - - curl_command.append("-m") - curl_command.append(str(timeout)) - - for k, v in headers.items(): - if type(v) == list: - for x in v: - curl_command.append("-H") - curl_command.append(f"{k}: {x}") - - else: - curl_command.append("-H") - curl_command.append(f"{k}: {v}") - - post_data = kwargs.get("post_data", {}) - if len(post_data.items()) > 0: - curl_command.append("-d") - post_data_str = "" - for k, v in post_data.items(): - post_data_str += f"&{k}={v}" - curl_command.append(post_data_str.lstrip("&")) - - method = kwargs.get("method", "") - if method: - curl_command.append("-X") - curl_command.append(method) - - cookies = kwargs.get("cookies", "") - if cookies: - - curl_command.append("-b") - cookies_str = "" - for k, v in cookies.items(): - cookies_str += f"{k}={v}; " - curl_command.append(f'{cookies_str.rstrip(" ")}') - - path_override = kwargs.get("path_override", None) - if path_override: - curl_command.append("--request-target") - curl_command.append(f"{path_override}") - - head_mode = kwargs.get("head_mode", None) - if head_mode: - curl_command.append("-I") - - raw_body = kwargs.get("raw_body", None) - if raw_body: - curl_command.append("-d") - curl_command.append(raw_body) - - output_bytes = self.run(curl_command, text=False).stdout - output = self.smart_decode(output_bytes) - return output diff --git a/bbot/core/helpers/web/__init__.py b/bbot/core/helpers/web/__init__.py new file mode 100644 index 0000000000..8fcf82abbe --- /dev/null +++ b/bbot/core/helpers/web/__init__.py @@ -0,0 +1 @@ +from .web import WebHelper diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py new file mode 100644 index 0000000000..737a2f9dcb --- /dev/null +++ b/bbot/core/helpers/web/client.py @@ -0,0 +1,99 @@ +import httpx +import logging +from httpx._models import Cookies + +log = logging.getLogger("bbot.core.helpers.web.client") + + +class DummyCookies(Cookies): + def extract_cookies(self, *args, **kwargs): + pass + + +class BBOTAsyncClient(httpx.AsyncClient): + """ + A subclass of httpx.AsyncClient tailored with BBOT-specific configurations and functionalities. + This class provides rate limiting, logging, configurable timeouts, user-agent customization, custom + headers, and proxy settings. Additionally, it allows the disabling of cookies, making it suitable + for use across an entire scan. + + Attributes: + _bbot_scan (object): BBOT scan object containing configuration details. + _persist_cookies (bool): Flag to determine whether cookies should be persisted across requests. + + Examples: + >>> async with BBOTAsyncClient(_bbot_scan=bbot_scan_object) as client: + >>> response = await client.request("GET", "https://example.com") + >>> print(response.status_code) + 200 + """ + + @classmethod + def from_config(cls, config, target, *args, **kwargs): + kwargs["_config"] = config + kwargs["_target"] = target + web_config = config.get("web", {}) + retries = kwargs.pop("retries", web_config.get("http_retries", 1)) + ssl_verify = web_config.get("ssl_verify", False) + if ssl_verify is False: + from .ssl_context import ssl_context_noverify + + ssl_verify = ssl_context_noverify + kwargs["transport"] = httpx.AsyncHTTPTransport(retries=retries, verify=ssl_verify) + kwargs["verify"] = ssl_verify + return cls(*args, **kwargs) + + def __init__(self, *args, **kwargs): + self._config = kwargs.pop("_config") + self._target = kwargs.pop("_target") + + self._web_config = self._config.get("web", {}) + http_debug = self._web_config.get("debug", None) + if http_debug: + log.trace(f"Creating AsyncClient: {args}, {kwargs}") + + self._persist_cookies = kwargs.pop("persist_cookies", True) + + # timeout + http_timeout = self._web_config.get("http_timeout", 20) + if "timeout" not in kwargs: + kwargs["timeout"] = http_timeout + + # headers + headers = kwargs.get("headers", None) + if headers is None: + headers = {} + # user agent + user_agent = self._web_config.get("user_agent", "BBOT") + if "User-Agent" not in headers: + headers["User-Agent"] = user_agent + kwargs["headers"] = headers + # proxy + proxies = self._web_config.get("http_proxy", None) + kwargs["proxies"] = proxies + + log.verbose(f"Creating httpx.AsyncClient({args}, {kwargs})") + super().__init__(*args, **kwargs) + if not self._persist_cookies: + self._cookies = DummyCookies() + + def build_request(self, *args, **kwargs): + request = super().build_request(*args, **kwargs) + # add custom headers if the URL is in-scope + # TODO: re-enable this + if self._target.in_scope(str(request.url)): + for hk, hv in self._web_config.get("http_headers", {}).items(): + hv = str(hv) + # don't clobber headers + if hk not in request.headers: + request.headers[hk] = hv + return request + + def _merge_cookies(self, cookies): + if self._persist_cookies: + return super()._merge_cookies(cookies) + return cookies + + @property + def retries(self): + return self._transport._pool._retries diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py new file mode 100644 index 0000000000..de69e18766 --- /dev/null +++ b/bbot/core/helpers/web/engine.py @@ -0,0 +1,232 @@ +import ssl +import anyio +import httpx +import asyncio +import logging +import traceback +from socksio.exceptions import SOCKSError +from contextlib import asynccontextmanager + +from bbot.core.engine import EngineServer +from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain + +log = logging.getLogger("bbot.core.helpers.web.engine") + + +class HTTPEngine(EngineServer): + CMDS = { + 0: "request", + 1: "request_batch", + 2: "request_custom_batch", + 3: "download", + } + + client_only_options = ( + "retries", + "max_redirects", + ) + + def __init__(self, socket_path, target, config={}, debug=False): + super().__init__(socket_path, debug=debug) + self.target = target + self.config = config + self.web_config = self.config.get("web", {}) + self.http_debug = self.web_config.get("debug", False) + self._ssl_context_noverify = None + self.web_clients = {} + self.web_client = self.AsyncClient(persist_cookies=False) + + def AsyncClient(self, *args, **kwargs): + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 1) + try: + return self.web_clients[retries] + except KeyError: + from .client import BBOTAsyncClient + + client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + self.web_clients[client.retries] = client + return client + + async def request(self, *args, **kwargs): + raise_error = kwargs.pop("raise_error", False) + # TODO: use this + cache_for = kwargs.pop("cache_for", None) # noqa + + client = kwargs.get("client", self.web_client) + + # allow vs follow, httpx why?? + allow_redirects = kwargs.pop("allow_redirects", None) + if allow_redirects is not None and "follow_redirects" not in kwargs: + kwargs["follow_redirects"] = allow_redirects + + # in case of URL only, assume GET request + if len(args) == 1: + kwargs["url"] = args[0] + args = [] + + url = kwargs.get("url", "") + + if not args and "method" not in kwargs: + kwargs["method"] = "GET" + + client_kwargs = {} + for k in list(kwargs): + if k in self.client_only_options: + v = kwargs.pop(k) + client_kwargs[k] = v + + if client_kwargs: + client = self.AsyncClient(**client_kwargs) + + try: + async with self._acatch(url, raise_error): + if self.http_debug: + log.trace(f"Web request: {str(args)}, {str(kwargs)}") + response = await client.request(*args, **kwargs) + if self.http_debug: + log.trace( + f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" + ) + return response + except httpx.HTTPError as e: + if raise_error: + _response = getattr(e, "response", None) + return {"_request_error": str(e), "_response": _response} + + async def request_batch(self, urls, threads=10, **kwargs): + async for (args, _, _), response in self.task_pool( + self.request, args_kwargs=urls, threads=threads, global_kwargs=kwargs + ): + yield args[0], response + + async def request_custom_batch(self, urls_and_kwargs, threads=10, **kwargs): + async for (args, kwargs, tracker), response in self.task_pool( + self.request, args_kwargs=urls_and_kwargs, threads=threads, global_kwargs=kwargs + ): + yield args[0], kwargs, tracker, response + + async def download(self, url, **kwargs): + warn = kwargs.pop("warn", True) + raise_error = kwargs.pop("raise_error", False) + filename = kwargs.pop("filename") + try: + result = await self.stream_request(url, **kwargs) + if result is None: + raise httpx.HTTPError(f"No response from {url}") + content, response = result + log.debug(f"Download result: HTTP {response.status_code}") + response.raise_for_status() + with open(filename, "wb") as f: + f.write(content) + return filename + except httpx.HTTPError as e: + log_fn = log.verbose + if warn: + log_fn = log.warning + log_fn(f"Failed to download {url}: {e}") + if raise_error: + _response = getattr(e, "response", None) + return {"_download_error": str(e), "_response": _response} + + async def stream_request(self, url, **kwargs): + follow_redirects = kwargs.pop("follow_redirects", True) + max_size = kwargs.pop("max_size", None) + raise_error = kwargs.pop("raise_error", False) + if max_size is not None: + max_size = human_to_bytes(max_size) + kwargs["follow_redirects"] = follow_redirects + if "method" not in kwargs: + kwargs["method"] = "GET" + try: + total_size = 0 + chunk_size = 8192 + chunks = [] + + async with self._acatch(url, raise_error=True), self.web_client.stream(url=url, **kwargs) as response: + agen = response.aiter_bytes(chunk_size=chunk_size) + async for chunk in agen: + _chunk_size = len(chunk) + if max_size is not None and total_size + _chunk_size > max_size: + log.verbose( + f"Size of response from {url} exceeds {bytes_to_human(max_size)}, file will be truncated" + ) + await agen.aclose() + break + total_size += _chunk_size + chunks.append(chunk) + return b"".join(chunks), response + except httpx.HTTPError as e: + self.log.debug(f"Error requesting {url}: {e}") + if raise_error: + raise + + def ssl_context_noverify(self): + if self._ssl_context_noverify is None: + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + ssl_context.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 + ssl_context.set_ciphers("ALL:@SECLEVEL=0") + ssl_context.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option + self._ssl_context_noverify = ssl_context + return self._ssl_context_noverify + + @asynccontextmanager + async def _acatch(self, url, raise_error): + """ + Asynchronous context manager to handle various httpx errors during a request. + + Yields: + None + + Note: + This function is internal and should generally not be used directly. + `url`, `args`, `kwargs`, and `raise_error` should be in the same context as this function. + """ + try: + yield + except httpx.TimeoutException: + if raise_error: + raise + else: + log.verbose(f"HTTP timeout to URL: {url}") + except httpx.ConnectError: + if raise_error: + raise + else: + log.debug(f"HTTP connect failed to URL: {url}") + except httpx.HTTPError as e: + if raise_error: + raise + else: + log.trace(f"Error with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + except ssl.SSLError as e: + msg = f"SSL error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except anyio.EndOfStream as e: + msg = f"AnyIO error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except SOCKSError as e: + msg = f"SOCKS error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except BaseException as e: + # don't log if the error is the result of an intentional cancellation + if not any(isinstance(_e, asyncio.exceptions.CancelledError) for _e in get_exception_chain(e)): + log.trace(f"Unhandled exception with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + raise diff --git a/bbot/core/helpers/web/ssl_context.py b/bbot/core/helpers/web/ssl_context.py new file mode 100644 index 0000000000..fabe4188fd --- /dev/null +++ b/bbot/core/helpers/web/ssl_context.py @@ -0,0 +1,8 @@ +import ssl + +ssl_context_noverify = ssl.create_default_context() +ssl_context_noverify.check_hostname = False +ssl_context_noverify.verify_mode = ssl.CERT_NONE +ssl_context_noverify.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 +ssl_context_noverify.set_ciphers("ALL:@SECLEVEL=0") +ssl_context_noverify.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py new file mode 100644 index 0000000000..23f7a8c607 --- /dev/null +++ b/bbot/core/helpers/web/web.py @@ -0,0 +1,536 @@ +import logging +import warnings +from pathlib import Path +from bs4 import BeautifulSoup + +from bbot.core.engine import EngineClient +from bbot.core.helpers.misc import truncate_filename +from bbot.errors import WordlistError, CurlError, WebError + +from bs4 import MarkupResemblesLocatorWarning +from bs4.builder import XMLParsedAsHTMLWarning + +from .engine import HTTPEngine + +warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning) +warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning) + +log = logging.getLogger("bbot.core.helpers.web") + + +class WebHelper(EngineClient): + SERVER_CLASS = HTTPEngine + ERROR_CLASS = WebError + + """ + Main utility class for managing HTTP operations in BBOT. It serves as a wrapper around the BBOTAsyncClient, + which itself is a subclass of httpx.AsyncClient. The class provides functionalities to make HTTP requests, + download files, and handle cached wordlists. + + Attributes: + parent_helper (object): The parent helper object containing scan configurations. + http_debug (bool): Flag to indicate whether HTTP debugging is enabled. + ssl_verify (bool): Flag to indicate whether SSL verification is enabled. + web_client (BBOTAsyncClient): An instance of BBOTAsyncClient for making HTTP requests. + client_only_options (tuple): A tuple of options only applicable to the web client. + + Examples: + Basic web request: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + Download file: + >>> filename = await self.helpers.download("https://www.evilcorp.com/passwords.docx") + + Download wordlist (cached for 30 days by default): + >>> filename = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") + """ + + def __init__(self, parent_helper): + self.parent_helper = parent_helper + self.preset = self.parent_helper.preset + self.config = self.preset.config + self.web_config = self.config.get("web", {}) + self.web_spider_depth = self.web_config.get("spider_depth", 1) + self.web_spider_distance = self.web_config.get("spider_distance", 0) + self.web_clients = {} + self.target = self.preset.target + self.ssl_verify = self.config.get("ssl_verify", False) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__( + server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.minimal}, + debug=engine_debug, + ) + + def AsyncClient(self, *args, **kwargs): + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 1) + try: + return self.web_clients[retries] + except KeyError: + from .client import BBOTAsyncClient + + client = BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) + self.web_clients[client.retries] = client + return client + + async def request(self, *args, **kwargs): + """ + Asynchronous function for making HTTP requests, intended to be the most basic web request function + used widely across BBOT and within this helper class. Handles various exceptions and timeouts + that might occur during the request. + + This function automatically respects the scan's global timeout, proxy, headers, etc. + Headers you specify will be merged with the scan's. Your arguments take ultimate precedence, + meaning you can override the scan's values if you want. + + Args: + url (str): The URL to send the request to. + method (str, optional): The HTTP method to use for the request. Defaults to 'GET'. + headers (dict, optional): Dictionary of HTTP headers to send with the request. + params (dict, optional): Dictionary, list of tuples, or bytes to send in the query string. + cookies (dict, optional): Dictionary or CookieJar object containing cookies. + json (Any, optional): A JSON serializable Python object to send in the body. + data (dict, optional): Dictionary, list of tuples, or bytes to send in the body. + files (dict, optional): Dictionary of 'name': file-like-objects for multipart encoding upload. + auth (tuple, optional): Auth tuple to enable Basic/Digest/Custom HTTP auth. + timeout (float, optional): The maximum time to wait for the request to complete. + proxies (dict, optional): Dictionary mapping protocol schemes to proxy URLs. + allow_redirects (bool, optional): Enables or disables redirection. Defaults to None. + stream (bool, optional): Enables or disables response streaming. + raise_error (bool, optional): Whether to raise exceptions for HTTP connect, timeout errors. Defaults to False. + client (httpx.AsyncClient, optional): A specific httpx.AsyncClient to use for the request. Defaults to self.web_client. + cache_for (int, optional): Time in seconds to cache the request. Not used currently. Defaults to None. + + Raises: + httpx.TimeoutException: If the request times out. + httpx.ConnectError: If the connection fails. + httpx.RequestError: For other request-related errors. + + Returns: + httpx.Response or None: The HTTP response object returned by the httpx library. + + Examples: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + >>> response = await self.helpers.request("https://api.evilcorp.com/", method="POST", data="stuff") + + Note: + If the web request fails, it will return None unless `raise_error` is `True`. + """ + raise_error = kwargs.get("raise_error", False) + result = await self.run_and_return("request", *args, **kwargs) + if isinstance(result, dict) and "_request_error" in result: + if raise_error: + error_msg = result["_request_error"] + response = result["_response"] + error = self.ERROR_CLASS(error_msg) + error.response = response + raise error + return result + + async def request_batch(self, urls, *args, **kwargs): + """ + Given a list of URLs, request them in parallel and yield responses as they come in. + + Args: + urls (list[str]): List of URLs to visit + *args: Positional arguments to pass through to httpx + **kwargs: Keyword arguments to pass through to httpx + + Examples: + >>> async for url, response in self.helpers.request_batch(urls, headers={"X-Test": "Test"}): + >>> if response is not None and response.status_code == 200: + >>> self.hugesuccess(response) + """ + agen = self.run_and_yield("request_batch", urls, *args, **kwargs) + while 1: + try: + yield await agen.__anext__() + except (StopAsyncIteration, GeneratorExit): + await agen.aclose() + break + + async def request_custom_batch(self, urls_and_kwargs): + """ + Make web requests in parallel with custom options for each request. Yield responses as they come in. + + Similar to `request_batch` except it allows individual arguments for each URL. + + Args: + urls_and_kwargs (list[tuple]): List of tuples in the format: (url, kwargs, custom_tracker) + where custom_tracker is an optional value for your own internal use. You may use it to + help correlate requests, etc. + + Examples: + >>> urls_and_kwargs = [ + >>> ("http://evilcorp.com/1", {"method": "GET"}, "request-1"), + >>> ("http://evilcorp.com/2", {"method": "POST"}, "request-2"), + >>> ] + >>> async for url, kwargs, custom_tracker, response in self.helpers.request_custom_batch( + >>> urls_and_kwargs + >>> ): + >>> if response is not None and response.status_code == 200: + >>> self.hugesuccess(response) + """ + agen = self.run_and_yield("request_custom_batch", urls_and_kwargs) + while 1: + try: + yield await agen.__anext__() + except (StopAsyncIteration, GeneratorExit): + await agen.aclose() + break + + async def download(self, url, **kwargs): + """ + Asynchronous function for downloading files from a given URL. Supports caching with an optional + time period in hours via the "cache_hrs" keyword argument. In case of successful download, + returns the full path of the saved filename. If the download fails, returns None. + + Args: + url (str): The URL of the file to download. + filename (str, optional): The filename to save the downloaded file as. + If not provided, will generate based on URL. + max_size (str or int): Maximum filesize as a string ("5MB") or integer in bytes. + cache_hrs (float, optional): The number of hours to cache the downloaded file. + A negative value disables caching. Defaults to -1. + method (str, optional): The HTTP method to use for the request, defaults to 'GET'. + raise_error (bool, optional): Whether to raise exceptions for HTTP connect, timeout errors. Defaults to False. + **kwargs: Additional keyword arguments to pass to the httpx request. + + Returns: + Path or None: The full path of the downloaded file as a Path object if successful, otherwise None. + + Examples: + >>> filepath = await self.helpers.download("https://www.evilcorp.com/passwords.docx", cache_hrs=24) + """ + success = False + raise_error = kwargs.get("raise_error", False) + filename = kwargs.pop("filename", self.parent_helper.cache_filename(url)) + filename = truncate_filename(Path(filename).resolve()) + kwargs["filename"] = filename + max_size = kwargs.pop("max_size", None) + if max_size is not None: + max_size = self.parent_helper.human_to_bytes(max_size) + kwargs["max_size"] = max_size + cache_hrs = float(kwargs.pop("cache_hrs", -1)) + if cache_hrs > 0 and self.parent_helper.is_cached(url): + log.debug(f"{url} is cached at {self.parent_helper.cache_filename(url)}") + success = True + else: + result = await self.run_and_return("download", url, **kwargs) + if isinstance(result, dict) and "_download_error" in result: + if raise_error: + error_msg = result["_download_error"] + response = result["_response"] + error = self.ERROR_CLASS(error_msg) + error.response = response + raise error + elif result: + success = True + + if success: + return filename + + async def wordlist(self, path, lines=None, zip=False, zip_filename=None, **kwargs): + """ + Asynchronous function for retrieving wordlists, either from a local path or a URL. + Allows for optional line-based truncation and caching. Returns the full path of the wordlist + file or a truncated version of it. + + Args: + path (str): The local or remote path of the wordlist. + lines (int, optional): Number of lines to read from the wordlist. + If specified, will return a truncated wordlist with this many lines. + zip (bool, optional): Whether to unzip the file after downloading. Defaults to False. + zip_filename (str, optional): The name of the file to extract from the ZIP archive. + Required if zip is True. + cache_hrs (float, optional): Number of hours to cache the downloaded wordlist. + Defaults to 720 hours (30 days) for remote wordlists. + **kwargs: Additional keyword arguments to pass to the 'download' function for remote wordlists. + + Returns: + Path: The full path of the wordlist (or its truncated version) as a Path object. + + Raises: + WordlistError: If the path is invalid or the wordlist could not be retrieved or found. + + Examples: + Fetching full wordlist + >>> wordlist_path = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") + + Fetching and truncating to the first 100 lines + >>> wordlist_path = await self.helpers.wordlist("/root/rockyou.txt", lines=100) + """ + import zipfile + + if not path: + raise WordlistError(f"Invalid wordlist: {path}") + if "cache_hrs" not in kwargs: + kwargs["cache_hrs"] = 720 + if self.parent_helper.is_url(path): + filename = await self.download(str(path), **kwargs) + if filename is None: + raise WordlistError(f"Unable to retrieve wordlist from {path}") + else: + filename = Path(path).resolve() + if not filename.is_file(): + raise WordlistError(f"Unable to find wordlist at {path}") + + if zip: + if not zip_filename: + raise WordlistError("zip_filename must be specified when zip is True") + try: + with zipfile.ZipFile(filename, "r") as zip_ref: + if zip_filename not in zip_ref.namelist(): + raise WordlistError(f"File {zip_filename} not found in the zip archive {filename}") + zip_ref.extract(zip_filename, filename.parent) + filename = filename.parent / zip_filename + except Exception as e: + raise WordlistError(f"Error unzipping file {filename}: {e}") + + if lines is None: + return filename + else: + lines = int(lines) + with open(filename) as f: + read_lines = f.readlines() + cache_key = f"{filename}:{lines}" + truncated_filename = self.parent_helper.cache_filename(cache_key) + with open(truncated_filename, "w") as f: + for line in read_lines[:lines]: + f.write(line) + return truncated_filename + + async def curl(self, *args, **kwargs): + """ + An asynchronous function that runs a cURL command with specified arguments and options. + + This function constructs and executes a cURL command based on the provided parameters. + It offers support for various cURL options such as headers, post data, and cookies. + + Args: + *args: Variable length argument list for positional arguments. Unused in this function. + url (str): The URL for the cURL request. Mandatory. + raw_path (bool, optional): If True, activates '--path-as-is' in cURL. Defaults to False. + headers (dict, optional): A dictionary of HTTP headers to include in the request. + ignore_bbot_global_settings (bool, optional): If True, ignores the global settings of BBOT. Defaults to False. + post_data (dict, optional): A dictionary containing data to be sent in the request body. + method (str, optional): The HTTP method to use for the request (e.g., 'GET', 'POST'). + cookies (dict, optional): A dictionary of cookies to include in the request. + path_override (str, optional): Overrides the request-target to use in the HTTP request line. + head_mode (bool, optional): If True, includes '-I' to fetch headers only. Defaults to None. + raw_body (str, optional): Raw string to be sent in the body of the request. + **kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. + + Returns: + str: The output of the cURL command. + + Raises: + CurlError: If 'url' is not supplied. + + Examples: + >>> output = await curl(url="https://example.com", headers={"X-Header": "Wat"}) + >>> print(output) + """ + url = kwargs.get("url", "") + + if not url: + raise CurlError("No URL supplied to CURL helper") + + curl_command = ["curl", url, "-s"] + + raw_path = kwargs.get("raw_path", False) + if raw_path: + curl_command.append("--path-as-is") + + # respect global ssl verify settings + if self.ssl_verify is not True: + curl_command.append("-k") + + headers = kwargs.get("headers", {}) + + ignore_bbot_global_settings = kwargs.get("ignore_bbot_global_settings", False) + + if ignore_bbot_global_settings: + http_timeout = 20 # setting 20 as a worse-case setting + log.debug("ignore_bbot_global_settings enabled. Global settings will not be applied") + else: + http_timeout = self.parent_helper.web_config.get("http_timeout", 20) + user_agent = self.parent_helper.web_config.get("user_agent", "BBOT") + + if "User-Agent" not in headers: + headers["User-Agent"] = user_agent + + # only add custom headers if the URL is in-scope + if self.parent_helper.preset.in_scope(url): + for hk, hv in self.web_config.get("http_headers", {}).items(): + headers[hk] = hv + + # add the timeout + if "timeout" not in kwargs: + timeout = http_timeout + + curl_command.append("-m") + curl_command.append(str(timeout)) + + for k, v in headers.items(): + if isinstance(v, list): + for x in v: + curl_command.append("-H") + curl_command.append(f"{k}: {x}") + + else: + curl_command.append("-H") + curl_command.append(f"{k}: {v}") + + post_data = kwargs.get("post_data", {}) + if len(post_data.items()) > 0: + curl_command.append("-d") + post_data_str = "" + for k, v in post_data.items(): + post_data_str += f"&{k}={v}" + curl_command.append(post_data_str.lstrip("&")) + + method = kwargs.get("method", "") + if method: + curl_command.append("-X") + curl_command.append(method) + + cookies = kwargs.get("cookies", "") + if cookies: + curl_command.append("-b") + cookies_str = "" + for k, v in cookies.items(): + cookies_str += f"{k}={v}; " + curl_command.append(f"{cookies_str.rstrip(' ')}") + + path_override = kwargs.get("path_override", None) + if path_override: + curl_command.append("--request-target") + curl_command.append(f"{path_override}") + + head_mode = kwargs.get("head_mode", None) + if head_mode: + curl_command.append("-I") + + raw_body = kwargs.get("raw_body", None) + if raw_body: + curl_command.append("-d") + curl_command.append(raw_body) + log.verbose(f"Running curl command: {curl_command}") + output = (await self.parent_helper.run(curl_command)).stdout + return output + + def beautifulsoup( + self, + markup, + features="html.parser", + builder=None, + parse_only=None, + from_encoding=None, + exclude_encodings=None, + element_classes=None, + **kwargs, + ): + """ + Naviate, Search, Modify, Parse, or PrettyPrint HTML Content. + More information at https://beautiful-soup-4.readthedocs.io/en/latest/ + + Args: + markup: A string or a file-like object representing markup to be parsed. + features: Desirable features of the parser to be used. + This may be the name of a specific parser ("lxml", + "lxml-xml", "html.parser", or "html5lib") or it may be + the type of markup to be used ("html", "html5", "xml"). + Defaults to 'html.parser'. + builder: A TreeBuilder subclass to instantiate (or instance to use) + instead of looking one up based on `features`. + parse_only: A SoupStrainer. Only parts of the document + matching the SoupStrainer will be considered. + from_encoding: A string indicating the encoding of the + document to be parsed. + exclude_encodings = A list of strings indicating + encodings known to be wrong. + element_classes = A dictionary mapping BeautifulSoup + classes like Tag and NavigableString, to other classes you'd + like to be instantiated instead as the parse tree is + built. + **kwargs = For backwards compatibility purposes. + + Returns: + soup: An instance of the BeautifulSoup class + + Todo: + - Write tests for this function + + Examples: + >>> soup = self.helpers.beautifulsoup(event.data["body"], "html.parser") + Perform an html parse of the 'markup' argument and return a soup instance + + >>> email_type = soup.find(type="email") + Searches the soup instance for all occurrences of the passed in argument + """ + try: + soup = BeautifulSoup( + markup, features, builder, parse_only, from_encoding, exclude_encodings, element_classes, **kwargs + ) + return soup + except Exception as e: + log.debug(f"Error parsing beautifulsoup: {e}") + return False + + def response_to_json(self, response): + """ + Convert web response to JSON object, similar to the output of `httpx -irr -json` + """ + + if response is None: + return + + import mmh3 + from datetime import datetime + from hashlib import md5, sha256 + from bbot.core.helpers.misc import tagify, urlparse, split_host_port, smart_decode + + request = response.request + url = str(request.url) + parsed_url = urlparse(url) + netloc = parsed_url.netloc + scheme = parsed_url.scheme.lower() + host, port = split_host_port(f"{scheme}://{netloc}") + + raw_headers = "\r\n".join([f"{k}: {v}" for k, v in response.headers.items()]) + raw_headers_encoded = raw_headers.encode() + + headers = {} + for k, v in response.headers.items(): + k = tagify(k, delimiter="_") + headers[k] = v + + j = { + "timestamp": datetime.now().isoformat(), + "hash": { + "body_md5": md5(response.content).hexdigest(), + "body_mmh3": mmh3.hash(response.content), + "body_sha256": sha256(response.content).hexdigest(), + # "body_simhash": "TODO", + "header_md5": md5(raw_headers_encoded).hexdigest(), + "header_mmh3": mmh3.hash(raw_headers_encoded), + "header_sha256": sha256(raw_headers_encoded).hexdigest(), + # "header_simhash": "TODO", + }, + "header": headers, + "body": smart_decode(response.content), + "content_type": headers.get("content_type", "").split(";")[0].strip(), + "url": url, + "host": str(host), + "port": port, + "scheme": scheme, + "method": response.request.method, + "path": parsed_url.path, + "raw_header": raw_headers, + "status_code": response.status_code, + } + + return j diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index 7b98fa4e23..a5d9b9aaaf 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -1,32 +1,117 @@ +import re import csv import string import logging +import wordninja from pathlib import Path from contextlib import suppress from collections import OrderedDict +from .misc import tldextract, extract_words + log = logging.getLogger("bbot.core.helpers.wordcloud") class WordCloud(dict): + """ + WordCloud is a specialized dictionary-like class for storing and aggregating + words extracted from various data sources such as DNS names and URLs. The class + is intended to facilitate the generation of target-specific wordlists and mutations. + + The WordCloud class can be accessed and manipulated like a standard Python dictionary. + It also offers additional methods for generating mutations based on the words it contains. + + Attributes: + parent_helper: The parent helper object that provides necessary utilities. + devops_mutations: A set containing common devops-related mutations, loaded from a file. + dns_mutator: An instance of the DNSMutator class for generating DNS-based mutations. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> print(s.helpers.word_cloud) + { + "evilcorp": 2, + "ec": 2, + "www1": 1, + "evil": 2, + "www": 2, + "w1": 1, + "corp": 2, + "1": 1, + "wt": 1, + "test": 1, + "www-test": 1 + } + + >>> s.helpers.word_cloud.mutations(["word"], cloud=True, numbers=0, devops=False, letters=False) + [ + [ + "1", + "word" + ], + [ + "corp", + "word" + ], + [ + "ec", + "word" + ], + [ + "evil", + "word" + ], + ... + ] + + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ + def __init__(self, parent_helper, *args, **kwargs): self.parent_helper = parent_helper - self.max_backups = 20 - devops_filename = Path(__file__).parent.parent.parent / "wordlists" / "devops_mutations.txt" + devops_filename = self.parent_helper.wordlist_dir / "devops_mutations.txt" self.devops_mutations = set(self.parent_helper.read_file(devops_filename)) + self.dns_mutator = DNSMutator() + super().__init__(*args, **kwargs) def mutations( self, words, devops=True, cloud=True, letters=True, numbers=5, number_padding=2, substitute_numbers=True ): - if type(words) not in (set, list, tuple): + """ + Generate various mutations for the given list of words based on different criteria. + + Yields tuples of strings which can be joined on the desired delimiter, e.g. "-" or "_". + + Args: + words (Union[str, Iterable[str]]): A single word or list of words to mutate. + devops (bool): Whether to include devops-related mutations. + cloud (bool): Whether to include mutations from the word cloud. + letters (bool): Whether to include letter-based mutations. + numbers (int): The maximum numeric mutations to include. + number_padding (int): Padding for numeric mutations. + substitute_numbers (bool): Whether to substitute numbers in mutations. + + Yields: + tuple: A tuple containing each of the mutation segments. + """ + if isinstance(words, str): words = (words,) results = set() for word in words: h = hash(word) - if not h in results: + if h not in results: results.add(h) yield (word,) if numbers > 0: @@ -34,7 +119,7 @@ def mutations( for word in words: for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding): h = hash(number_mutation) - if not h in results: + if h not in results: results.add(h) yield (number_mutation,) for word in words: @@ -62,18 +147,64 @@ def modifiers(self, devops=True, cloud=True, letters=True, numbers=5, number_pad return modifiers def absorb_event(self, event): + """ + Absorbs an event from a BBOT scan into the word cloud. + + This method updates the word cloud by extracting words from the given event. It aims to avoid including PTR + (Pointer) records, as they tend to produce unhelpful mutations in the word cloud. + + Args: + event (Event): The event object containing the words to be absorbed into the word cloud. + """ for word in event.words: self.add_word(word) + if event.scope_distance == 0 and event.type.startswith("DNS_NAME"): + subdomain = tldextract(event.data).subdomain + if subdomain and not self.parent_helper.is_ptr(subdomain): + for s in subdomain.split("."): + self.dns_mutator.add_word(s) - def absorb_word(self, word, ninja=True): + def absorb_word(self, word, wordninja=True): """ - Use word ninja to smartly split the word, - e.g. "blacklantern" --> "black", "lantern" + Absorbs a word into the word cloud after splitting it using a word extraction algorithm. + + This method splits the input word into smaller meaningful words using word extraction, and then adds each + of them to the word cloud. The splitting is done using a predefined algorithm in the parent helper. + + Args: + word (str): The word to be split and absorbed into the word cloud. + wordninja (bool, optional): If True, word extraction is enabled. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.absorb_word("blacklantern") + >>> print(self.helpers.word_cloud) + { + "blacklantern": 1, + "black": 1, + "bl": 1, + "lantern": 1 + } """ - for w in self.parent_helper.extract_words(word): + for w in self.parent_helper.extract_words(word, wordninja=wordninja): self.add_word(w) def add_word(self, word, lowercase=True): + """ + Adds a word to the word cloud. + + This method updates the word cloud by adding a given word. If the word already exists in the cloud, + its frequency count is incremented by 1. Optionally, the word can be converted to lowercase before adding. + + Args: + word (str): The word to be added to the word cloud. + lowercase (bool, optional): If True, the word will be converted to lowercase before adding. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.add_word("Example") + >>> self.helpers.word_cloud.add_word("example") + >>> print(self.helpers.word_cloud) + {'example': 2} + """ if lowercase: word = word.lower() try: @@ -82,6 +213,34 @@ def add_word(self, word, lowercase=True): self[word] = 1 def get_number_mutations(self, base, n=5, padding=2): + """ + Generates mutations of a base string by modifying the numerical parts or appending numbers. + + This method detects existing numbers in the base string and tries incrementing and decrementing them within a + specified range. It also appends numbers at the end or after each word to generate more mutations. + + Args: + base (str): The base string to generate mutations from. + n (int, optional): The range of numbers to use for incrementing/decrementing. Defaults to 5. + padding (int, optional): Zero-pad numbers up to this length. Defaults to 2. + + Returns: + set: A set of mutated strings based on the base input. + + Examples: + >>> self.helpers.word_cloud.get_number_mutations("www2-test", n=2) + { + "www0-test", + "www1-test", + "www2-test", + "www2-test0", + "www2-test00", + "www2-test01", + "www2-test1", + "www3-test", + "www4-test" + } + """ results = set() # detects numbers and increments/decrements them @@ -121,16 +280,41 @@ def get_number_mutations(self, base, n=5, padding=2): for s in number_suffixes: results.add(f"{base}{s}") results.add(base) - results.add(s) return results def truncate(self, limit): + """ + Truncates the word cloud dictionary to retain only the top `limit` entries based on their occurrence frequencies. + + Args: + limit (int): The maximum number of entries to retain in the word cloud. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.truncate(2) + >>> self.helpers.word_cloud + {'cherry': 8, 'apple': 5} + """ new_self = dict(self.json(limit=limit)) self.clear() self.update(new_self) def json(self, limit=None): + """ + Returns the word cloud as a sorted OrderedDict, optionally truncated to the top `limit` entries. + + Args: + limit (int, optional): The maximum number of entries to include in the returned OrderedDict. If None, all entries are included. + + Returns: + OrderedDict: A dictionary sorted by word frequencies, potentially truncated to the top `limit` entries. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.json(limit=2) + OrderedDict([('cherry', 8), ('apple', 5)]) + """ cloud_sorted = sorted(self.items(), key=lambda x: x[-1], reverse=True) if limit is not None: cloud_sorted = cloud_sorted[:limit] @@ -138,9 +322,24 @@ def json(self, limit=None): @property def default_filename(self): - return self.parent_helper.scan.home / f"wordcloud.tsv" + return self.parent_helper.preset.scan.home / "wordcloud.tsv" def save(self, filename=None, limit=None): + """ + Saves the word cloud to a file. The cloud can optionally be truncated to the top `limit` entries. + + Args: + filename (str, optional): The path to the file where the word cloud will be saved. If None, uses a default filename. + limit (int, optional): The maximum number of entries to save to the file. If None, all entries are saved. + + Returns: + tuple: A tuple containing a boolean indicating success or failure, and the resolved filename. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.save(filename="word_cloud.txt", limit=2) + (True, Path('word_cloud.txt')) + """ if filename is None: filename = self.default_filename else: @@ -158,20 +357,27 @@ def save(self, filename=None, limit=None): log.debug(f"Saved word cloud ({len(self):,} words) to {filename}") return True, filename else: - log.debug(f"No words to save") + log.debug("No words to save") except Exception as e: import traceback log.warning(f"Failed to save word cloud to {filename}: {e}") - log.debug(traceback.format_exc()) + log.trace(traceback.format_exc()) return False, filename def load(self, filename=None): + """ + Loads a word cloud from a file. The file can be either a standard wordlist with one entry per line + or a .tsv (tab-separated) file where the first row is the count and the second row is the associated entry. + + Args: + filename (str, optional): The path to the file from which to load the word cloud. If None, uses a default filename. + """ if filename is None: wordcloud_path = self.default_filename else: wordcloud_path = Path(filename).resolve() - log.verbose(f"Loading word cloud from {filename}") + log.verbose(f"Loading word cloud from {wordcloud_path}") try: with open(str(wordcloud_path), newline="") as f: c = csv.reader(f, delimiter="\t") @@ -193,4 +399,128 @@ def load(self, filename=None): log_fn = log.warning log_fn(f"Failed to load word cloud from {wordcloud_path}: {e}") if filename is not None: - log.debug(traceback.format_exc()) + log.trace(traceback.format_exc()) + + +class Mutator(dict): + """ + Base class for generating mutations from a list of words. + It accumulates words and produces mutations from them. + """ + + def mutations(self, words, max_mutations=None): + mutations = self.top_mutations(max_mutations) + ret = set() + if isinstance(words, str): + words = [words] + for word in words: + for m in self.mutate(word, mutations=mutations): + ret.add("".join(m)) + return ret + + def mutate(self, word, max_mutations=None, mutations=None): + if mutations is None: + mutations = self.top_mutations(max_mutations) + for mutation in mutations.keys(): + ret = [] + for s in mutation: + if s is not None: + ret.append(s) + else: + ret.append(word) + yield ret + + def top_mutations(self, n=None): + if n is not None: + return dict(sorted(self.items(), key=lambda x: x[-1], reverse=True)[:n]) + else: + return dict(self) + + def _add_mutation(self, mutation): + if None not in mutation: + return + mutation = tuple([m for m in mutation if m != ""]) + try: + self[mutation] += 1 + except KeyError: + self[mutation] = 1 + + def add_word(self, word): + pass + + +class DNSMutator(Mutator): + """ + DNS-specific mutator used by the `dnsbrute_mutations` module to generate target-specific subdomain mutations. + + This class extends the Mutator base class to add DNS-specific logic for generating + subdomain mutations based on input words. It utilizes custom word extraction patterns + and a wordninja model trained on DNS-specific data. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ + + extract_word_regexes = [ + re.compile(r, re.I) + for r in [ + r"[a-z]+", + r"[a-z_-]+", + r"[a-z0-9]+", + r"[a-z0-9_-]+", + ] + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + wordlist_dir = Path(__file__).parent.parent.parent / "wordlists" + wordninja_dns_wordlist = wordlist_dir / "wordninja_dns.txt.gz" + self.model = wordninja.LanguageModel(wordninja_dns_wordlist) + + def mutations(self, words, max_mutations=None): + if isinstance(words, str): + words = [words] + new_words = set() + for word in words: + for e in extract_words(word, acronyms=False, model=self.model, word_regexes=self.extract_word_regexes): + new_words.add(e) + return super().mutations(new_words, max_mutations=max_mutations) + + def add_word(self, word): + spans = set() + mutations = set() + for r in self.extract_word_regexes: + for match in r.finditer(word): + span = match.span() + if span not in spans: + spans.add(span) + for start, end in spans: + match_str = word[start:end] + # skip digits + if match_str.isdigit(): + continue + before = word[:start] + after = word[end:] + basic_mutation = (before, None, after) + mutations.add(basic_mutation) + match_str_split = self.model.split(match_str) + if len(match_str_split) > 1: + for i, s in enumerate(match_str_split): + if s.isdigit(): + continue + split_before = "".join(match_str_split[:i]) + split_after = "".join(match_str_split[i + 1 :]) + wordninja_mutation = (before + split_before, None, split_after + after) + mutations.add(wordninja_mutation) + for m in mutations: + self._add_mutation(m) diff --git a/bbot/core/logger/__init__.py b/bbot/core/logger/__init__.py deleted file mode 100644 index 69d981ce68..0000000000 --- a/bbot/core/logger/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .logger import init_logging, get_log_level diff --git a/bbot/core/logger/logger.py b/bbot/core/logger/logger.py deleted file mode 100644 index e8b613abef..0000000000 --- a/bbot/core/logger/logger.py +++ /dev/null @@ -1,221 +0,0 @@ -import sys -import atexit -import logging -from copy import copy -from pathlib import Path -from queue import SimpleQueue -from contextlib import suppress -from logging.handlers import QueueHandler, QueueListener - -from ..configurator import config -from ..helpers.misc import mkdir, error_and_exit - - -class ColoredFormatter(logging.Formatter): - """ - Pretty colors for terminal - """ - - color_mapping = { - "DEBUG": 242, # grey - "VERBOSE": 242, # grey - "INFO": 69, # blue - "HUGEINFO": 69, # blue - "SUCCESS": 118, # green - "HUGESUCCESS": 118, # green - "WARNING": 208, # orange - "HUGEWARNING": 208, # orange - "ERROR": 196, # red - "CRITICAL": 196, # red - } - - char_mapping = { - "DEBUG": "DBUG", - "VERBOSE": "VERB", - "HUGEVERBOSE": "VERB", - "INFO": "INFO", - "HUGEINFO": "INFO", - "SUCCESS": "SUCC", - "HUGESUCCESS": "SUCC", - "WARNING": "WARN", - "HUGEWARNING": "WARN", - "ERROR": "ERRR", - "CRITICAL": "CRIT", - } - - prefix = "\033[1;38;5;" - suffix = "\033[0m" - - def format(self, record): - - colored_record = copy(record) - levelname = colored_record.levelname - levelchar = self.char_mapping.get(levelname, "INFO") - seq = self.color_mapping.get(levelname, 15) # default white - colored_levelname = f"{self.prefix}{seq}m[{levelchar}]{self.suffix}" - if levelname == "CRITICAL" or levelname.startswith("HUGE"): - colored_record.msg = f"{self.prefix}{seq}m{colored_record.msg}{self.suffix}" - colored_record.levelname = colored_levelname - - return logging.Formatter.format(self, colored_record) - - -def addLoggingLevel(levelName, levelNum, methodName=None): - """ - Comprehensively adds a new logging level to the `logging` module and the - currently configured logging class. - - `levelName` becomes an attribute of the `logging` module with the value - `levelNum`. `methodName` becomes a convenience method for both `logging` - itself and the class returned by `logging.getLoggerClass()` (usually just - `logging.Logger`). If `methodName` is not specified, `levelName.lower()` is - used. - - To avoid accidental clobberings of existing attributes, this method will - raise an `AttributeError` if the level name is already an attribute of the - `logging` module or if the method name is already present - - Example - ------- - >>> addLoggingLevel('TRACE', logging.DEBUG - 5) - >>> logging.getLogger(__name__).setLevel('TRACE') - >>> logging.getLogger(__name__).trace('that worked') - >>> logging.trace('so did this') - >>> logging.TRACE - 5 - - """ - if not methodName: - methodName = levelName.lower() - - if hasattr(logging, levelName): - raise AttributeError("{} already defined in logging module".format(levelName)) - if hasattr(logging, methodName): - raise AttributeError("{} already defined in logging module".format(methodName)) - if hasattr(logging.getLoggerClass(), methodName): - raise AttributeError("{} already defined in logger class".format(methodName)) - - # This method was inspired by the answers to Stack Overflow post - # http://stackoverflow.com/q/2183233/2988730, especially - # http://stackoverflow.com/a/13638084/2988730 - def logForLevel(self, message, *args, **kwargs): - if self.isEnabledFor(levelNum): - self._log(levelNum, message, args, **kwargs) - - def logToRoot(message, *args, **kwargs): - logging.log(levelNum, message, *args, **kwargs) - - logging.addLevelName(levelNum, levelName) - setattr(logging, levelName, levelNum) - setattr(logging.getLoggerClass(), methodName, logForLevel) - setattr(logging, methodName, logToRoot) - - -# custom logging levels -addLoggingLevel("STDOUT", 100) -addLoggingLevel("HUGEWARNING", 31) -addLoggingLevel("HUGESUCCESS", 26) -addLoggingLevel("SUCCESS", 25) -addLoggingLevel("HUGEINFO", 21) -addLoggingLevel("HUGEVERBOSE", 16) -addLoggingLevel("VERBOSE", 15) - - -def stop_listener(listener): - with suppress(Exception): - listener.stop() - - -def log_worker_setup(logging_queue): - """ - This needs to be run whenever a new multiprocessing.Process() is spawned - """ - log_level = get_log_level() - log = logging.getLogger("bbot") - # Don't do this more than once - if len(log.handlers) == 0: - log.setLevel(log_level) - queue_handler = QueueHandler(logging_queue) - log.addHandler(queue_handler) - return log - - -def log_listener_setup(logging_queue): - - log_dir = Path(config["home"]) / "logs" - if not mkdir(log_dir, raise_error=False): - error_and_exit(f"Failure creating or error writing to BBOT logs directory ({log_dir})") - - # Log to stderr - stderr_handler = logging.StreamHandler(sys.stderr) - - # Log to stdout - stdout_handler = logging.StreamHandler(sys.stdout) - - # Main log file - main_handler = logging.handlers.TimedRotatingFileHandler( - f"{log_dir}/bbot.log", when="d", interval=1, backupCount=14 - ) - - # Separate log file for debugging - debug_handler = logging.handlers.TimedRotatingFileHandler( - f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14 - ) - - log_level = get_log_level() - - stderr_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= log_level) - stdout_handler.addFilter(lambda x: x.levelno == logging.STDOUT) - debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) - main_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.VERBOSE) - - # Set log format - debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") - debug_handler.setFormatter(debug_format) - main_handler.setFormatter(debug_format) - stderr_handler.setFormatter(ColoredFormatter("%(levelname)s %(name)s: %(message)s")) - stdout_handler.setFormatter(logging.Formatter("%(message)s")) - - handlers = [stdout_handler, stderr_handler, main_handler] - if config.get("debug", False): - handlers.append(debug_handler) - - log_listener = QueueListener(logging_queue, *handlers) - log_listener.start() - atexit.register(stop_listener, log_listener) - return { - "stderr": stderr_handler, - "stdout": stdout_handler, - "file_debug": debug_handler, - "file_main": main_handler, - } - - -def init_logging(): - """ - Initializes logging, returns logging queue and dictionary containing log handlers - """ - - handlers = {} - logging_queue = None - - log = logging.getLogger("bbot") - # Don't do this more than once - if len(log.handlers) == 0: - logging_queue = SimpleQueue() - handlers = log_listener_setup(logging_queue) - log_worker_setup(logging_queue) - - return logging_queue, handlers - - -def get_log_level(): - from bbot.core.configurator.args import cli_options - - loglevel = logging.INFO - if cli_options is not None: - if cli_options.verbose: - loglevel = logging.VERBOSE - if cli_options.debug: - loglevel = logging.DEBUG - return loglevel diff --git a/bbot/core/modules.py b/bbot/core/modules.py new file mode 100644 index 0000000000..c83d34a96f --- /dev/null +++ b/bbot/core/modules.py @@ -0,0 +1,740 @@ +import re +import ast +import sys +import atexit +import pickle +import logging +import importlib +import omegaconf +import traceback +from copy import copy +from pathlib import Path +from omegaconf import OmegaConf +from contextlib import suppress + +from bbot.core import CORE +from bbot.errors import BBOTError +from bbot.logger import log_to_stderr + +from .flags import flag_descriptions +from .shared_deps import SHARED_DEPS +from .helpers.misc import ( + list_files, + sha1, + search_dict_by_key, + search_format_dict, + make_table, + os_platform, + mkdir, +) + + +log = logging.getLogger("bbot.module_loader") + +bbot_code_dir = Path(__file__).parent.parent + + +class ModuleLoader: + """ + Main class responsible for preloading BBOT modules. + + This class is in charge of preloading modules to determine their dependencies. + Once dependencies are identified, they are installed before the actual module is imported. + This ensures that all requisite libraries and components are available for the module to function correctly. + """ + + default_module_dir = bbot_code_dir / "modules" + + module_dir_regex = re.compile(r"^[a-z][a-z0-9_]*$") + + # if a module consumes these event types, automatically assume these dependencies + default_module_deps = {"HTTP_RESPONSE": "httpx", "URL": "httpx", "SOCIAL": "social"} + + def __init__(self): + self.core = CORE + + self._shared_deps = dict(SHARED_DEPS) + + self.__preloaded = {} + self._modules = {} + self._configs = {} + self.flag_choices = set() + self.all_module_choices = set() + self.scan_module_choices = set() + self.output_module_choices = set() + self.internal_module_choices = set() + + self._preload_cache = None + + self._module_dirs = set() + self._module_dirs_preloaded = set() + self.add_module_dir(self.default_module_dir) + + # save preload cache before exiting + atexit.register(self.save_preload_cache) + + def copy(self): + module_loader_copy = copy(self) + module_loader_copy.__preloaded = dict(self.__preloaded) + return module_loader_copy + + @property + def preload_cache_file(self): + return self.core.cache_dir / "module_preload_cache" + + @property + def module_dirs(self): + return self._module_dirs + + def add_module_dir(self, module_dir): + module_dir = Path(module_dir).resolve() + if module_dir in self._module_dirs: + log.debug(f'Already added custom module dir "{module_dir}"') + return + if not module_dir.is_dir(): + log.warning(f'Failed to add custom module dir "{module_dir}", please make sure it exists') + return + new_module_dirs = set() + for _module_dir in self.get_recursive_dirs(module_dir): + _module_dir = Path(_module_dir).resolve() + if _module_dir not in self._module_dirs: + self._module_dirs.add(_module_dir) + new_module_dirs.add(_module_dir) + self.preload(module_dirs=new_module_dirs) + + def file_filter(self, file): + file = file.resolve() + if "templates" in file.parts: + return False + return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] + + def preload(self, module_dirs=None): + """Preloads all BBOT modules. + + This function recursively iterates through each file in the module directories + and preloads each BBOT module to gather its meta-information and dependencies. + + Args: + module_dir (str or Path): Directory containing BBOT modules to be preloaded. + + Returns: + dict: A dictionary where keys are the names of the preloaded modules and + values are their respective preloaded data. + + Examples: + >>> preload("/path/to/bbot_modules/") + { + "module1": {...}, + "module2": {...}, + ... + } + """ + new_modules = False + if module_dirs is None: + module_dirs = self.module_dirs + + for module_dir in module_dirs: + if module_dir in self._module_dirs_preloaded: + log.debug(f"Already preloaded modules from {module_dir}") + continue + + log.debug(f"Preloading modules from {module_dir}") + new_modules = True + for module_file in list_files(module_dir, filter=self.file_filter): + module_name = module_file.stem + module_file = module_file.resolve() + + # try to load from cache + module_cache_key = (str(module_file), tuple(module_file.stat())) + preloaded = self.preload_cache.get(module_name, {}) + cache_key = preloaded.get("cache_key", ()) + if preloaded and module_cache_key == cache_key: + log.debug(f"Preloading {module_name} from cache") + else: + log.debug(f"Preloading {module_name} from disk") + if module_dir.name == "modules": + namespace = "bbot.modules" + else: + namespace = f"bbot.modules.{module_dir.name}" + try: + preloaded = self.preload_module(module_file) + module_type = "scan" + if module_dir.name in ("output", "internal"): + module_type = str(module_dir.name) + elif module_dir.name not in ("modules"): + flags = set(preloaded["flags"] + [module_dir.name]) + preloaded["flags"] = sorted(flags) + + # derive module dependencies from watched event types (only for scan modules) + if module_type == "scan": + for event_type in preloaded["watched_events"]: + if event_type in self.default_module_deps: + deps_modules = set(preloaded.get("deps", {}).get("modules", [])) + deps_modules.add(self.default_module_deps[event_type]) + preloaded["deps"]["modules"] = sorted(deps_modules) + + preloaded["type"] = module_type + preloaded["namespace"] = namespace + preloaded["cache_key"] = module_cache_key + + except Exception: + log_to_stderr(f"Error preloading {module_file}\n\n{traceback.format_exc()}", level="CRITICAL") + log_to_stderr(f"Error in {module_file.name}", level="CRITICAL") + sys.exit(1) + + self.all_module_choices.add(module_name) + module_type = preloaded.get("type", "scan") + if module_type == "scan": + self.scan_module_choices.add(module_name) + elif module_type == "output": + self.output_module_choices.add(module_name) + elif module_type == "internal": + self.internal_module_choices.add(module_name) + + flags = preloaded.get("flags", []) + self.flag_choices.update(set(flags)) + + self.__preloaded[module_name] = preloaded + config = OmegaConf.create(preloaded.get("config", {})) + self._configs[module_name] = config + + self._module_dirs_preloaded.add(module_dir) + + # update default config with module defaults + module_config = omegaconf.OmegaConf.create( + { + "modules": self.configs(), + } + ) + self.core.merge_default(module_config) + + return new_modules + + @property + def preload_cache(self): + if self._preload_cache is None: + self._preload_cache = {} + if self.preload_cache_file.is_file(): + with suppress(Exception): + with open(self.preload_cache_file, "rb") as f: + self._preload_cache = pickle.load(f) + return self._preload_cache + + @preload_cache.setter + def preload_cache(self, value): + self._preload_cache = value + mkdir(self.preload_cache_file.parent) + with open(self.preload_cache_file, "wb") as f: + pickle.dump(self._preload_cache, f) + + def save_preload_cache(self): + self.preload_cache = self.__preloaded + + @property + def _preloaded(self): + return self.__preloaded + + def get_recursive_dirs(self, *dirs): + dirs = {Path(d).resolve() for d in dirs} + for d in list(dirs): + if not d.is_dir(): + continue + for p in d.iterdir(): + if p.is_dir() and self.module_dir_regex.match(p.name): + dirs.update(self.get_recursive_dirs(p)) + return dirs + + def preloaded(self, type=None): + preloaded = {} + if type is not None: + preloaded = {k: v for k, v in self._preloaded.items() if self.check_type(k, type)} + else: + preloaded = dict(self._preloaded) + return preloaded + + def configs(self, type=None): + configs = {} + if type is not None: + configs = {k: v for k, v in self._configs.items() if self.check_type(k, type)} + else: + configs = dict(self._configs) + return OmegaConf.create(configs) + + def find_and_replace(self, **kwargs): + self.__preloaded = search_format_dict(self.__preloaded, **kwargs) + self._shared_deps = search_format_dict(self._shared_deps, **kwargs) + + def check_type(self, module, type): + return self._preloaded[module]["type"] == type + + def preload_module(self, module_file): + """ + Preloads a BBOT module to gather its meta-information and dependencies. + + This function reads a BBOT module file, extracts its attributes such as + events watched and produced, flags, meta-information, and dependencies. + + Args: + module_file (str): Path to the BBOT module file. + + Returns: + dict: A dictionary containing meta-information and dependencies for the module. + + Examples: + >>> preload_module("bbot/modules/wappalyzer.py") + { + "watched_events": [ + "HTTP_RESPONSE" + ], + "produced_events": [ + "TECHNOLOGY" + ], + "flags": [ + "active", + "safe", + "web-basic", + "web-thorough" + ], + "meta": { + "description": "Extract technologies from web responses" + }, + "config": {}, + "options_desc": {}, + "hash": "d5a88dd3866c876b81939c920bf4959716e2a374", + "deps": { + "modules": [ + "httpx" + ] + "pip": [ + "python-Wappalyzer~=0.3.1" + ], + "pip_constraints": [], + "shell": [], + "apt": [], + "ansible": [] + }, + "sudo": false + } + """ + watched_events = set() + produced_events = set() + flags = set() + meta = {} + deps_modules = set() + deps_pip = [] + deps_pip_constraints = [] + deps_shell = [] + deps_apt = [] + deps_common = [] + ansible_tasks = [] + python_code = open(module_file).read() + # take a hash of the code so we can keep track of when it changes + module_hash = sha1(python_code).hexdigest() + parsed_code = ast.parse(python_code) + config = {} + options_desc = {} + for root_element in parsed_code.body: + # look for classes + if type(root_element) == ast.ClassDef: + for class_attr in root_element.body: + # class attributes that are dictionaries + if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict: + # module options + if any(target.id == "options" for target in class_attr.targets): + config.update(ast.literal_eval(class_attr.value)) + # module options + elif any(target.id == "options_desc" for target in class_attr.targets): + options_desc.update(ast.literal_eval(class_attr.value)) + # module metadata + elif any(target.id == "meta" for target in class_attr.targets): + meta = ast.literal_eval(class_attr.value) + + # class attributes that are lists + if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List: + # flags + if any(target.id == "flags" for target in class_attr.targets): + for flag in class_attr.value.elts: + if type(flag.value) == str: + flags.add(flag.value) + # watched events + elif any(target.id == "watched_events" for target in class_attr.targets): + for event_type in class_attr.value.elts: + if type(event_type.value) == str: + watched_events.add(event_type.value) + # produced events + elif any(target.id == "produced_events" for target in class_attr.targets): + for event_type in class_attr.value.elts: + if type(event_type.value) == str: + produced_events.add(event_type.value) + + # bbot module dependencies + elif any(target.id == "deps_modules" for target in class_attr.targets): + for dep_module in class_attr.value.elts: + if type(dep_module.value) == str: + deps_modules.add(dep_module.value) + # python dependencies + elif any(target.id == "deps_pip" for target in class_attr.targets): + for dep_pip in class_attr.value.elts: + if type(dep_pip.value) == str: + deps_pip.append(dep_pip.value) + elif any(target.id == "deps_pip_constraints" for target in class_attr.targets): + for dep_pip in class_attr.value.elts: + if type(dep_pip.value) == str: + deps_pip_constraints.append(dep_pip.value) + # apt dependencies + elif any(target.id == "deps_apt" for target in class_attr.targets): + for dep_apt in class_attr.value.elts: + if type(dep_apt.value) == str: + deps_apt.append(dep_apt.value) + # bash dependencies + elif any(target.id == "deps_shell" for target in class_attr.targets): + for dep_shell in class_attr.value.elts: + deps_shell.append(ast.literal_eval(dep_shell)) + # ansible playbook + elif any(target.id == "deps_ansible" for target in class_attr.targets): + ansible_tasks = ast.literal_eval(class_attr.value) + # shared/common module dependencies + elif any(target.id == "deps_common" for target in class_attr.targets): + for dep_common in class_attr.value.elts: + if type(dep_common.value) == str: + deps_common.append(dep_common.value) + + for task in ansible_tasks: + if "become" not in task: + task["become"] = False + # don't sudo brew + elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True): + task["become"] = False + + preloaded_data = { + "path": str(module_file.resolve()), + "watched_events": sorted(watched_events), + "produced_events": sorted(produced_events), + "flags": sorted(flags), + "meta": meta, + "config": config, + "options_desc": options_desc, + "hash": module_hash, + "deps": { + "modules": sorted(deps_modules), + "pip": deps_pip, + "pip_constraints": deps_pip_constraints, + "shell": deps_shell, + "apt": deps_apt, + "ansible": ansible_tasks, + "common": deps_common, + }, + "sudo": len(deps_apt) > 0, + } + ansible_task_list = list(ansible_tasks) + for dep_common in deps_common: + try: + ansible_task_list.extend(self._shared_deps[dep_common]) + except KeyError: + common_choices = ",".join(self._shared_deps) + raise BBOTError( + f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})' + ) + for ansible_task in ansible_task_list: + if any(x is True for x in search_dict_by_key("become", ansible_task)) or any( + x is True for x in search_dict_by_key("ansible_become", ansible_tasks) + ): + preloaded_data["sudo"] = True + return preloaded_data + + def load_modules(self, module_names): + modules = {} + for module_name in module_names: + module = self.load_module(module_name) + modules[module_name] = module + self._modules[module_name] = module + return modules + + def load_module(self, module_name): + """Loads a BBOT module by its name. + + Imports the module from its namespace, locates its class, and returns it. + Identifies modules based on the presence of `watched_events` and `produced_events` attributes. + + Args: + module_name (str): The name of the module to load. + + Returns: + object: The loaded module class object. + + Examples: + >>> module = load_module("example_module") + >>> isinstance(module, object) + True + """ + preloaded = self._preloaded[module_name] + namespace = preloaded["namespace"] + try: + module_path = preloaded["path"] + except KeyError: + module_path = preloaded["cache_key"][0] + full_namespace = f"{namespace}.{module_name}" + + spec = importlib.util.spec_from_file_location(full_namespace, module_path) + module = importlib.util.module_from_spec(spec) + sys.modules[full_namespace] = module + spec.loader.exec_module(module) + + # for every top-level variable in the .py file + for variable in module.__dict__.keys(): + # get its value + value = getattr(module, variable) + with suppress(AttributeError): + # if it has watched_events and produced_events + if all( + type(a) == list + for a in (getattr(value, "watched_events", None), getattr(value, "produced_events", None)) + ): + # and if its variable name matches its filename + if value.__name__.lower() == module_name.lower(): + value._name = module_name + # then we have a module + return value + + def recommend_dependencies(self, modules): + """ + Returns a dictionary containing missing dependencies and their suggested resolutions + + Needs work. For this we should probably be building a dependency graph + """ + resolve_choices = {} + # step 1: build a dictionary containing event types and their associated modules + # {"IP_ADDRESS": set("masscan", "ipneighbor", ...)} + watched = {} + produced = {} + for modname in modules: + preloaded = self._preloaded.get(modname) + if preloaded: + for event_type in preloaded.get("watched_events", []): + self.add_or_create(watched, event_type, modname) + for event_type in preloaded.get("produced_events", []): + self.add_or_create(produced, event_type, modname) + watched_all = {} + produced_all = {} + for modname, preloaded in self.preloaded().items(): + if preloaded: + for event_type in preloaded.get("watched_events", []): + self.add_or_create(watched_all, event_type, modname) + for event_type in preloaded.get("produced_events", []): + self.add_or_create(produced_all, event_type, modname) + + # step 2: check to see if there are missing dependencies + for modname in modules: + preloaded = self._preloaded.get(modname) + module_type = preloaded.get("type", "unknown") + if module_type != "scan": + continue + watched_events = preloaded.get("watched_events", []) + missing_deps = {e: not self.check_dependency(e, modname, produced) for e in watched_events} + if all(missing_deps.values()): + for event_type in watched_events: + if event_type == "SCAN": + continue + choices = produced_all.get(event_type, []) + choices = set(choices) + with suppress(KeyError): + choices.remove(modname) + if event_type not in resolve_choices: + resolve_choices[event_type] = {} + deps = resolve_choices[event_type] + self.add_or_create(deps, "required_by", modname) + for c in choices: + choice_type = self._preloaded.get(c, {}).get("type", "unknown") + if choice_type == "scan": + self.add_or_create(deps, "recommended", c) + + return resolve_choices + + def check_dependency(self, event_type, modname, produced): + if event_type not in produced: + return False + if produced[event_type] == {modname}: + return False + return True + + @staticmethod + def add_or_create(d, k, *items): + try: + d[k].update(set(items)) + except KeyError: + d[k] = set(items) + + def modules_table(self, modules=None, mod_type=None, include_author=False, include_created_date=False): + """Generates a table of module information. + + Constructs a table to display information such as module name, type, and event details. + + Args: + modules (list, optional): List of module names to include in the table. + mod_type (str, optional): Type of modules to include ('scan', 'output', 'internal'). + + Returns: + str: A formatted table string. + + Examples: + >>> print(modules_table(["portscan"])) + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + | Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | + +==========+========+=================+==============================+===============================+======================+===================+ + | portscan | scan | No | Execute port scans | active, aggressive, portscan, | DNS_NAME, IP_ADDRESS | OPEN_TCP_PORT | + | | | | | web-thorough | | | + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + """ + + table = [] + header = ["Module", "Type", "Needs API Key", "Description", "Flags", "Consumed Events", "Produced Events"] + if include_author: + header.append("Author") + if include_created_date: + header.append("Created Date") + maxcolwidths = [20, 10, 5, 30, 30, 20, 20] + for module_name, preloaded in self.filter_modules(modules, mod_type): + module_type = preloaded["type"] + consumed_events = sorted(preloaded.get("watched_events", [])) + produced_events = sorted(preloaded.get("produced_events", [])) + flags = sorted(preloaded.get("flags", [])) + api_key_required = "" + meta = preloaded.get("meta", {}) + api_key_required = "Yes" if meta.get("auth_required", False) else "No" + description = meta.get("description", "") + row = [ + module_name, + module_type, + api_key_required, + description, + ", ".join(flags), + ", ".join(consumed_events), + ", ".join(produced_events), + ] + if include_author: + author = meta.get("author", "") + row.append(author) + if include_created_date: + created_date = meta.get("created_date", "") + row.append(created_date) + table.append(row) + return make_table(table, header, maxcolwidths=maxcolwidths) + + def modules_options(self, modules=None, mod_type=None): + """ + Return a list of module options + """ + modules_options = {} + for module_name, preloaded in self.filter_modules(modules, mod_type): + modules_options[module_name] = [] + module_options = preloaded["config"] + module_options_desc = preloaded["options_desc"] + for k, v in sorted(module_options.items(), key=lambda x: x[0]): + option_name = f"modules.{module_name}.{k}" + option_type = type(v).__name__ + option_description = module_options_desc[k] + modules_options[module_name].append((option_name, option_type, option_description, str(v))) + return modules_options + + def modules_options_table(self, modules=None, mod_type=None): + table = [] + header = ["Config Option", "Type", "Description", "Default"] + for module_options in self.modules_options(modules, mod_type).values(): + table += module_options + return make_table(table, header) + + def flags(self, flags=None): + _flags = {} + for module_name, preloaded in self.preloaded().items(): + for flag in preloaded.get("flags", []): + if not flags or flag in flags: + try: + _flags[flag].add(module_name) + except KeyError: + _flags[flag] = {module_name} + + _flags = sorted(_flags.items(), key=lambda x: x[0]) + _flags = sorted(_flags, key=lambda x: len(x[-1]), reverse=True) + return _flags + + def flags_table(self, flags=None): + table = [] + header = ["Flag", "# Modules", "Description", "Modules"] + maxcolwidths = [20, 5, 40, 80] + _flags = self.flags(flags=flags) + for flag, modules in _flags: + description = flag_descriptions.get(flag, "") + table.append([flag, f"{len(modules)}", description, ", ".join(sorted(modules))]) + return make_table(table, header, maxcolwidths=maxcolwidths) + + def events(self): + consuming_events = {} + producing_events = {} + for module_name, preloaded in self.preloaded().items(): + consumed = preloaded.get("watched_events", []) + produced = preloaded.get("produced_events", []) + for c in consumed: + try: + consuming_events[c].add(module_name) + except KeyError: + consuming_events[c] = {module_name} + for c in produced: + try: + producing_events[c].add(module_name) + except KeyError: + producing_events[c] = {module_name} + return consuming_events, producing_events + + def events_table(self): + table = [] + header = ["Event Type", "# Consuming Modules", "# Producing Modules", "Consuming Modules", "Producing Modules"] + consuming_events, producing_events = self.events() + all_event_types = sorted(set(consuming_events).union(set(producing_events))) + for e in all_event_types: + consuming = sorted(consuming_events.get(e, [])) + producing = sorted(producing_events.get(e, [])) + table.append([e, len(consuming), len(producing), ", ".join(consuming), ", ".join(producing)]) + return make_table(table, header) + + def filter_modules(self, modules=None, mod_type=None): + if modules is None: + module_list = list(self.preloaded(type=mod_type).items()) + else: + module_list = [(m, self._preloaded[m]) for m in modules] + module_list.sort(key=lambda x: x[0]) + module_list.sort(key=lambda x: "passive" in x[-1]["flags"]) + module_list.sort(key=lambda x: x[-1]["type"], reverse=True) + return module_list + + def ensure_config_files(self): + files = self.core.files_config + mkdir(files.config_dir) + + comment_notice = ( + "# NOTICE: THESE ENTRIES ARE COMMENTED BY DEFAULT\n" + + "# Please be sure to uncomment when inserting API keys, etc.\n" + ) + + config_obj = OmegaConf.to_object(self.core.default_config) + + # ensure bbot.yml + if not files.config_filename.exists(): + log_to_stderr(f"Creating BBOT config at {files.config_filename}") + no_secrets_config = self.core.no_secrets_config(config_obj) + yaml = OmegaConf.to_yaml(no_secrets_config) + yaml = comment_notice + "\n".join(f"# {line}" for line in yaml.splitlines()) + with open(str(files.config_filename), "w") as f: + f.write(yaml) + + # ensure secrets.yml + if not files.secrets_filename.exists(): + log_to_stderr(f"Creating BBOT secrets at {files.secrets_filename}") + secrets_only_config = self.core.secrets_only_config(config_obj) + yaml = OmegaConf.to_yaml(secrets_only_config) + yaml = comment_notice + "\n".join(f"# {line}" for line in yaml.splitlines()) + with open(str(files.secrets_filename), "w") as f: + f.write(yaml) + files.secrets_filename.chmod(0o600) + + +MODULE_LOADER = ModuleLoader() diff --git a/bbot/core/multiprocess.py b/bbot/core/multiprocess.py new file mode 100644 index 0000000000..5b2b2263fb --- /dev/null +++ b/bbot/core/multiprocess.py @@ -0,0 +1,58 @@ +import os +import atexit +from contextlib import suppress + + +class SharedInterpreterState: + """ + A class to track the primary BBOT process. + + Used to prevent spawning multiple unwanted processes with multiprocessing. + """ + + def __init__(self): + self.main_process_var_name = "_BBOT_MAIN_PID" + self.scan_process_var_name = "_BBOT_SCAN_PID" + atexit.register(self.cleanup) + + @property + def is_main_process(self): + is_main_process = self.main_pid == os.getpid() + return is_main_process + + @property + def is_scan_process(self): + is_scan_process = os.getpid() == self.scan_pid + return is_scan_process + + @property + def main_pid(self): + main_pid = int(os.environ.get(self.main_process_var_name, 0)) + if main_pid == 0: + main_pid = os.getpid() + # if main PID is not set, set it to the current PID + os.environ[self.main_process_var_name] = str(main_pid) + return main_pid + + @property + def scan_pid(self): + scan_pid = int(os.environ.get(self.scan_process_var_name, 0)) + if scan_pid == 0: + scan_pid = os.getpid() + # if scan PID is not set, set it to the current PID + os.environ[self.scan_process_var_name] = str(scan_pid) + return scan_pid + + def update_scan_pid(self): + os.environ[self.scan_process_var_name] = str(os.getpid()) + + def cleanup(self): + with suppress(Exception): + if self.is_main_process: + with suppress(KeyError): + del os.environ[self.main_process_var_name] + with suppress(KeyError): + del os.environ[self.scan_process_var_name] + + +SHARED_INTERPRETER_STATE = SharedInterpreterState() diff --git a/bbot/core/shared_deps.py b/bbot/core/shared_deps.py new file mode 100644 index 0000000000..cd338093fc --- /dev/null +++ b/bbot/core/shared_deps.py @@ -0,0 +1,214 @@ +DEP_FFUF = [ + { + "name": "Download ffuf", + "unarchive": { + "src": "https://github.com/ffuf/ffuf/releases/download/v#{BBOT_DEPS_FFUF_VERSION}/ffuf_#{BBOT_DEPS_FFUF_VERSION}_#{BBOT_OS}_#{BBOT_CPU_ARCH}.tar.gz", + "include": "ffuf", + "dest": "#{BBOT_TOOLS}", + "remote_src": True, + }, + } +] + +DEP_DOCKER = [ + { + "name": "Check if Docker is already installed", + "command": "docker --version", + "register": "docker_installed", + "ignore_errors": True, + }, + { + "name": "Install Docker (Non-Debian)", + "package": {"name": "docker", "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] != 'Debian' and docker_installed.rc != 0", + }, + { + "name": "Install Docker (Debian)", + "package": { + "name": "docker.io", + "state": "present", + }, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian' and docker_installed.rc != 0", + }, +] + +DEP_MASSDNS = [ + { + "name": "install dev tools", + "package": {"name": ["gcc", "git", "make"], "state": "present"}, + "become": True, + "ignore_errors": True, + }, + { + "name": "Download massdns source code", + "git": { + "repo": "https://github.com/blechschmidt/massdns.git", + "dest": "#{BBOT_TEMP}/massdns", + "single_branch": True, + "version": "master", + }, + }, + { + "name": "Build massdns (Linux)", + "command": {"chdir": "#{BBOT_TEMP}/massdns", "cmd": "make", "creates": "#{BBOT_TEMP}/massdns/bin/massdns"}, + "when": "ansible_facts['system'] == 'Linux'", + }, + { + "name": "Build massdns (non-Linux)", + "command": { + "chdir": "#{BBOT_TEMP}/massdns", + "cmd": "make nolinux", + "creates": "#{BBOT_TEMP}/massdns/bin/massdns", + }, + "when": "ansible_facts['system'] != 'Linux'", + }, + { + "name": "Install massdns", + "copy": {"src": "#{BBOT_TEMP}/massdns/bin/massdns", "dest": "#{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, + }, +] + +DEP_CHROMIUM = [ + { + "name": "Install Chromium (Non-Debian)", + "package": {"name": "chromium", "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] != 'Debian'", + "ignore_errors": True, + }, + { + "name": "Install Chromium dependencies (Ubuntu 24.04)", + "package": { + "name": "libasound2t64,libatk-bridge2.0-0,libatk1.0-0,libcairo2,libcups2,libdrm2,libgbm1,libnss3,libpango-1.0-0,libglib2.0-0,libxcomposite1,libxdamage1,libxfixes3,libxkbcommon0,libxrandr2", + "state": "present", + }, + "become": True, + "when": "ansible_facts['distribution'] == 'Ubuntu' and ansible_facts['distribution_version'] == '24.04'", + "ignore_errors": True, + }, + { + "name": "Install Chromium dependencies (Other Debian-based)", + "package": { + "name": "libasound2,libatk-bridge2.0-0,libatk1.0-0,libcairo2,libcups2,libdrm2,libgbm1,libnss3,libpango-1.0-0,libglib2.0-0,libxcomposite1,libxdamage1,libxfixes3,libxkbcommon0,libxrandr2", + "state": "present", + }, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian' and not (ansible_facts['distribution'] == 'Ubuntu' and ansible_facts['distribution_version'] == '24.04')", + "ignore_errors": True, + }, + { + "name": "Get latest Chromium version (Debian)", + "uri": { + "url": "https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2FLAST_CHANGE?alt=media", + "return_content": True, + }, + "register": "chromium_version", + "when": "ansible_facts['os_family'] == 'Debian'", + "ignore_errors": True, + }, + { + "name": "Download Chromium (Debian)", + "unarchive": { + "src": "https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F{{ chromium_version.content }}%2Fchrome-linux.zip?alt=media", + "remote_src": True, + "dest": "#{BBOT_TOOLS}", + "creates": "#{BBOT_TOOLS}/chrome-linux", + }, + "when": "ansible_facts['os_family'] == 'Debian'", + "ignore_errors": True, + }, + # Because Ubuntu is a special snowflake, we have to bend over backwards to fix the chrome sandbox + # see https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md + { + "name": "Chown chrome_sandbox to root:root", + "command": {"cmd": "chown -R root:root #{BBOT_TOOLS}/chrome-linux/chrome_sandbox"}, + "when": "ansible_facts['os_family'] == 'Debian'", + "become": True, + }, + { + "name": "Chmod chrome_sandbox to 4755", + "command": {"cmd": "chmod -R 4755 #{BBOT_TOOLS}/chrome-linux/chrome_sandbox"}, + "when": "ansible_facts['os_family'] == 'Debian'", + "become": True, + }, +] + +DEP_MASSCAN = [ + { + "name": "install os deps (Debian)", + "package": {"name": ["gcc", "git", "make", "libpcap0.8-dev"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian'", + "ignore_errors": True, + }, + { + "name": "install dev tools (Non-Debian)", + "package": {"name": ["gcc", "git", "make", "libpcap"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] != 'Debian'", + "ignore_errors": True, + }, + { + "name": "Download masscan source code", + "git": { + "repo": "https://github.com/robertdavidgraham/masscan.git", + "dest": "#{BBOT_TEMP}/masscan", + "single_branch": True, + "version": "master", + }, + }, + { + "name": "Build masscan", + "command": { + "chdir": "#{BBOT_TEMP}/masscan", + "cmd": "make -j", + "creates": "#{BBOT_TEMP}/masscan/bin/masscan", + }, + }, + { + "name": "Install masscan", + "copy": {"src": "#{BBOT_TEMP}/masscan/bin/masscan", "dest": "#{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, + }, +] + +DEP_JAVA = [ + { + "name": "Check if Java is installed", + "command": "which java", + "register": "java_installed", + "ignore_errors": True, + }, + { + "name": "Install latest JRE (Debian)", + "package": {"name": ["default-jre"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian' and java_installed.rc != 0", + }, + { + "name": "Install latest JRE (Arch)", + "package": {"name": ["jre-openjdk"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Archlinux' and java_installed.rc != 0", + }, + { + "name": "Install latest JRE (Fedora)", + "package": {"name": ["which", "java-latest-openjdk-headless"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'RedHat' and java_installed.rc != 0", + }, + { + "name": "Install latest JRE (Alpine)", + "package": {"name": ["openjdk11"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Alpine' and java_installed.rc != 0", + }, +] + +# shared module dependencies -- ffuf, massdns, chromium, etc. +SHARED_DEPS = {} +for var, val in list(locals().items()): + if var.startswith("DEP_") and isinstance(val, list): + var = var.split("_", 1)[-1].lower() + SHARED_DEPS[var] = val diff --git a/bbot/db/neo4j.py b/bbot/db/neo4j.py deleted file mode 100644 index d80e0f382b..0000000000 --- a/bbot/db/neo4j.py +++ /dev/null @@ -1,72 +0,0 @@ -import py2neo -import logging -from datetime import datetime - -log = logging.getLogger("bbot.db.neo4j") - -# uncomment this to enable neo4j debugging -# logging.basicConfig(level=logging.DEBUG, format="%(message)s") - - -class Neo4j: - """ - docker run --rm -p 7687:7687 -p 7474:7474 --env NEO4J_AUTH=neo4j/bbotislife neo4j - """ - - def __init__(self, uri="bolt://localhost:7687", username="neo4j", password="bbotislife"): - self.graph = py2neo.Graph(uri=uri, auth=(username, password)) - - def insert_event(self, event): - event_json = event.json(mode="graph") - source_id = event_json.get("source", "") - if not source_id: - log.warning(f"Skipping event without source: {event}") - return - source_type = source_id.split(":")[0] - source_node = self.make_node({"type": source_type, "id": source_id}) - - module = event_json.pop("module", "TARGET") - timestamp = datetime.fromtimestamp(event_json.pop("timestamp")) - event_node = self.make_node(event_json) - - relationship = py2neo.Relationship(source_node, module, event_node, timestamp=timestamp) - self.graph.merge(relationship) - - def insert_events(self, events): - event_nodes = dict() - event_list = [] - - for event in events: - event_json = event.json(mode="graph") - source_id = event_json.get("source", "") - if not source_id: - log.warning(f"Skipping event without source: {event}") - continue - event_node = self.make_node(event_json) - event_nodes[event.id] = event_node - event_list.append(event_node) - - if event_nodes: - subgraph = list(event_nodes.values())[0] - for dest_event in event_list: - module = dest_event.pop("module", "TARGET") - source_id = dest_event["source"] - source_type = source_id.split(":")[0] - try: - source_event = event_nodes[source_id] - except KeyError: - source_event = self.make_node({"type": source_type, "id": source_id}) - timestamp = datetime.fromtimestamp(dest_event.pop("timestamp")) - relation = py2neo.Relationship(source_event, module, dest_event, timestamp=timestamp) - subgraph = subgraph | relation - - self.graph.merge(subgraph) - - @staticmethod - def make_node(event): - event = dict(event) - event_type = event.pop("type") - event_node = py2neo.Node(event_type, **event) - event_node.__primarylabel__ = event_type - event_node.__primarykey__ = "id" - return event_node diff --git a/bbot/db/sql/models.py b/bbot/db/sql/models.py new file mode 100644 index 0000000000..d6e7656108 --- /dev/null +++ b/bbot/db/sql/models.py @@ -0,0 +1,147 @@ +# This file contains SQLModel (Pydantic + SQLAlchemy) models for BBOT events, scans, and targets. +# Used by the SQL output modules, but portable for outside use. + +import json +import logging +from pydantic import ConfigDict +from typing import List, Optional +from datetime import datetime, timezone +from typing_extensions import Annotated +from pydantic.functional_validators import AfterValidator +from sqlmodel import inspect, Column, Field, SQLModel, JSON, String, DateTime as SQLADateTime + + +log = logging.getLogger("bbot_server.models") + + +def naive_datetime_validator(d: datetime): + """ + Converts all dates into UTC, then drops timezone information. + + This is needed to prevent inconsistencies in sqlite, because it is timezone-naive. + """ + # drop timezone info + return d.replace(tzinfo=None) + + +NaiveUTC = Annotated[datetime, AfterValidator(naive_datetime_validator)] + + +class CustomJSONEncoder(json.JSONEncoder): + def default(self, obj): + # handle datetime + if isinstance(obj, datetime): + return obj.isoformat() + return super().default(obj) + + +class BBOTBaseModel(SQLModel): + model_config = ConfigDict(extra="ignore") + + def __init__(self, *args, **kwargs): + self._validated = None + super().__init__(*args, **kwargs) + + @property + def validated(self): + try: + if self._validated is None: + self._validated = self.__class__.model_validate(self) + return self._validated + except AttributeError: + return self + + def to_json(self, **kwargs): + return json.dumps(self.validated.model_dump(), sort_keys=True, cls=CustomJSONEncoder, **kwargs) + + @classmethod + def _pk_column_names(cls): + return [column.name for column in inspect(cls).primary_key] + + def __hash__(self): + return hash(self.to_json()) + + def __eq__(self, other): + return hash(self) == hash(other) + + +### EVENT ### + + +class Event(BBOTBaseModel, table=True): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + data = self._get_data(self.data, self.type) + self.data = {self.type: data} + if self.host: + self.reverse_host = self.host[::-1] + + def get_data(self): + return self._get_data(self.data, self.type) + + @staticmethod + def _get_data(data, type): + # handle SIEM-friendly format + if isinstance(data, dict) and list(data) == [type]: + return data[type] + return data + + uuid: str = Field( + primary_key=True, + index=True, + nullable=False, + ) + id: str = Field(index=True) + type: str = Field(index=True) + scope_description: str + data: dict = Field(sa_type=JSON) + host: Optional[str] + port: Optional[int] + netloc: Optional[str] + # store the host in reversed form for efficient lookups by domain + reverse_host: Optional[str] = Field(default="", exclude=True, index=True) + resolved_hosts: List = Field(default=[], sa_type=JSON) + dns_children: dict = Field(default={}, sa_type=JSON) + web_spider_distance: int = 10 + scope_distance: int = Field(default=10, index=True) + scan: str = Field(index=True) + timestamp: NaiveUTC = Field(index=True) + parent: str = Field(index=True) + tags: List = Field(default=[], sa_type=JSON) + module: str = Field(index=True) + module_sequence: str + discovery_context: str = "" + discovery_path: List[str] = Field(default=[], sa_type=JSON) + parent_chain: List[str] = Field(default=[], sa_type=JSON) + inserted_at: NaiveUTC = Field(default_factory=lambda: datetime.now(timezone.utc)) + + +### SCAN ### + + +class Scan(BBOTBaseModel, table=True): + id: str = Field(primary_key=True) + name: str + status: str + started_at: NaiveUTC = Field(index=True) + finished_at: Optional[NaiveUTC] = Field(default=None, sa_column=Column(SQLADateTime, nullable=True, index=True)) + duration_seconds: Optional[float] = Field(default=None) + duration: Optional[str] = Field(default=None) + target: dict = Field(sa_type=JSON) + preset: dict = Field(sa_type=JSON) + + +### TARGET ### + + +class Target(BBOTBaseModel, table=True): + name: str = "Default Target" + strict_scope: bool = False + seeds: List = Field(default=[], sa_type=JSON) + whitelist: List = Field(default=None, sa_type=JSON) + blacklist: List = Field(default=[], sa_type=JSON) + hash: str = Field(sa_column=Column("hash", String(length=255), unique=True, primary_key=True, index=True)) + scope_hash: str = Field(sa_column=Column("scope_hash", String(length=255), index=True)) + seed_hash: str = Field(sa_column=Column("seed_hashhash", String(length=255), index=True)) + whitelist_hash: str = Field(sa_column=Column("whitelist_hash", String(length=255), index=True)) + blacklist_hash: str = Field(sa_column=Column("blacklist_hash", String(length=255), index=True)) diff --git a/bbot/defaults.yml b/bbot/defaults.yml index c7452f852b..1a1aa62bb0 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -2,101 +2,234 @@ # BBOT working directory home: ~/.bbot -# How far out from the main scope to search -scope_search_distance: 1 -# Don't output events that are further than this from the main scope -scope_report_distance: 1 -# How far out from the main scope to resolve DNS names / IPs -scope_dns_search_distance: 2 -# Limit the number of BBOT threads -max_threads: 20 -# Limit the number of DNS threads -max_dns_threads: 100 -# Limit the number of brute force modules that can run at one time -max_brute_forcers: 2 +# How many scan results to keep before cleaning up the older ones +keep_scans: 20 +# Interval for displaying status messages +status_frequency: 15 +# Include the raw data of files (i.e. PDFs, web screenshots) as base64 in the event +file_blobs: false +# Include the raw data of directories (i.e. git repos) as tar.gz base64 in the event +folder_blobs: false + +### SCOPE ### + +scope: + # strict scope means only exact DNS names are considered in-scope + # subdomains are not included unless they are explicitly provided in the target list + strict: false + # Filter by scope distance which events are displayed in the output + # 0 == show only in-scope events (affiliates are always shown) + # 1 == show all events up to distance-1 (1 hop from target) + report_distance: 0 + # How far out from the main scope to search + # Do not change this setting unless you know what you're doing + search_distance: 0 + +### DNS ### + +dns: + # Completely disable DNS resolution (careful if you have IP whitelists/blacklists, consider using minimal=true instead) + disable: false + # Speed up scan by not creating any new DNS events, and only resolving A and AAAA records + minimal: false + # How many instances of the dns module to run concurrently + threads: 25 + # How many concurrent DNS resolvers to use when brute-forcing + # (under the hood this is passed through directly to massdns -s) + brute_threads: 1000 + # nameservers to use for DNS brute-forcing + # default is updated weekly and contains ~10K high-quality public servers + brute_nameservers: https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt + # How far away from the main target to explore via DNS resolution (independent of scope.search_distance) + # This is safe to change + search_distance: 1 + # Limit how many DNS records can be followed in a row (stop malicious/runaway DNS records) + runaway_limit: 5 + # DNS query timeout + timeout: 5 + # How many times to retry DNS queries + retries: 1 + # Completely disable BBOT's DNS wildcard detection + wildcard_disable: False + # Disable BBOT's DNS wildcard detection for select domains + wildcard_ignore: [] + # How many sanity checks to make when verifying wildcard DNS + # Increase this value if BBOT's wildcard detection isn't working + wildcard_tests: 10 + # Skip DNS requests for a certain domain and rdtype after encountering this many timeouts or SERVFAILs + # This helps prevent faulty DNS servers from hanging up the scan + abort_threshold: 50 + # Don't show PTR records containing IP addresses + filter_ptrs: true + # Enable/disable debug messages for DNS queries + debug: false + # For performance reasons, always skip these DNS queries + # Microsoft's DNS infrastructure is misconfigured so that certain queries to mail.protection.outlook.com always time out + omit_queries: + - SRV:mail.protection.outlook.com + - CNAME:mail.protection.outlook.com + - TXT:mail.protection.outlook.com + +### WEB ### + +web: + # HTTP proxy + http_proxy: + # Web user-agent + user_agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.2151.97 + # Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed) + spider_distance: 0 + # Set the maximum directory depth for the web spider + spider_depth: 1 + # Set the maximum number of links that can be followed per page + spider_links_per_page: 25 + # HTTP timeout (for Python requests; API calls, etc.) + http_timeout: 10 + # HTTP timeout (for httpx) + httpx_timeout: 5 + # Custom HTTP headers (e.g. cookies, etc.) + # in the format { "Header-Key": "header_value" } + # These are attached to all in-scope HTTP requests + # Note that some modules (e.g. github) may end up sending these to out-of-scope resources + http_headers: {} + # HTTP retries (for Python requests; API calls, etc.) + http_retries: 1 + # HTTP retries (for httpx) + httpx_retries: 1 + # Enable/disable debug messages for web requests/responses + debug: false + # Maximum number of HTTP redirects to follow + http_max_redirects: 5 + # Whether to verify SSL certificates + ssl_verify: false +### ENGINE ### + +engine: + debug: false + +# Tool dependencies +deps: + ffuf: + version: "2.1.0" + # How to handle installation of module dependencies + # Choices are: + # - abort_on_failure (default) - if a module dependency fails to install, abort the scan + # - retry_failed - try again to install failed dependencies + # - ignore_failed - run the scan regardless of what happens with dependency installation + # - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) + behavior: abort_on_failure ### ADVANCED OPTIONS ### +# Load BBOT modules from these custom paths +module_dirs: [] + # Infer certain events from others, e.g. IPs from IP ranges, DNS_NAMEs from URLs, etc. speculate: True # Passively search event data for URLs, hostnames, emails, etc. excavate: True # Summarize activity at the end of a scan aggregate: True -# HTTP proxy -http_proxy: -# HTTP timeout (for Python requests; API calls, etc.) -http_timeout: 30 -# HTTP timeout (for httpx) -httpx_timeout: 5 -# Enable/disable debug messages for web requests/responses -http_debug: false -# Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed) -web_spider_distance: 0 -# Set the maximum directory depth for the web spider -web_spider_depth: 1 -# Generate new DNS_NAME and IP_ADDRESS events through DNS resolution -dns_resolution: true -# DNS query timeout -dns_timeout: 10 -# Disable BBOT's smart DNS wildcard handling for select domains -dns_wildcard_ignore: [] -# How many sanity checks to make when verifying wildcard DNS -# Increase this value if BBOT's wildcard detection isn't working -dns_wildcard_tests: 5 -# Skip DNS requests for a certain domain and rdtype after encountering this many timeouts or SERVFAILs -# This helps prevent faulty DNS servers from hanging up the scan -dns_abort_threshold: 10 -# Enable/disable filtering of PTR records containing IP addresses -dns_filter_ptrs: true -# Enable/disable debug messages for dns queries -dns_debug: false -# Whether to verify SSL certificates -ssl_verify: false -# How many scan results to keep before cleaning up the older ones -keep_scans: 20 -# Web user-agent -user_agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36 +# DNS resolution, wildcard detection, etc. +dnsresolve: True +# Cloud provider tagging +cloudcheck: True + +# Strip querystring from URLs by default +url_querystring_remove: True +# When query string is retained, by default collapse parameter values down to a single value per parameter +url_querystring_collapse: True + # Completely ignore URLs with these extensions url_extension_blacklist: - # images - - png - - jpg - - bmp - - ico - - jpeg - - gif - - svg - # web/fonts - - css - - woff - - woff2 - - ttf - # audio - - mp3 - - m4a - - wav - - flac - # video - - mp4 - - mkv - - avi - - wmv - - mov - - flv - - webm + # images + - png + - jpg + - bmp + - ico + - jpeg + - gif + - svg + - webp + # web/fonts + - css + - woff + - woff2 + - ttf + - eot + - sass + - scss + # audio + - mp3 + - m4a + - wav + - flac + # video + - mp4 + - mkv + - avi + - wmv + - mov + - flv + - webm # Distribute URLs with these extensions only to httpx (these are omitted from output) url_extension_httpx_only: - - js + - js + +# These url extensions are almost always static, so we exclude them from modules that fuzz things +url_extension_static: + - pdf + - doc + - docx + - xls + - xlsx + - ppt + - pptx + - txt + - csv + - xml + - yaml + - ini + - log + - conf + - cfg + - env + - md + - rtf + - tiff + - bmp + - jpg + - jpeg + - png + - gif + - svg + - ico + - mp3 + - wav + - flac + - mp4 + - mov + - avi + - mkv + - webm + - zip + - tar + - gz + - bz2 + - 7z + - rar + # Don't output these types of events (they are still distributed to modules) omit_event_types: - - HTTP_RESPONSE - - URL_UNVERIFIED -# URL of BBOT server -agent_url: '' -# Agent Bearer authentication token -agent_token: '' + - HTTP_RESPONSE + - RAW_TEXT + - URL_UNVERIFIED + - DNS_NAME_UNRESOLVED + - FILESYSTEM + - WEB_PARAMETER + - RAW_DNS_RECORD + # - IP_ADDRESS # Custom interactsh server settings interactsh_server: null diff --git a/bbot/errors.py b/bbot/errors.py new file mode 100644 index 0000000000..a5ab9619b3 --- /dev/null +++ b/bbot/errors.py @@ -0,0 +1,82 @@ +class BBOTError(Exception): + pass + + +class ScanError(BBOTError): + pass + + +class ValidationError(BBOTError): + pass + + +class ConfigLoadError(BBOTError): + pass + + +class HttpCompareError(BBOTError): + pass + + +class DirectoryCreationError(BBOTError): + pass + + +class DirectoryDeletionError(BBOTError): + pass + + +class NTLMError(BBOTError): + pass + + +class InteractshError(BBOTError): + pass + + +class WordlistError(BBOTError): + pass + + +class CurlError(BBOTError): + pass + + +class PresetNotFoundError(BBOTError): + pass + + +class EnableModuleError(BBOTError): + pass + + +class EnableFlagError(BBOTError): + pass + + +class BBOTArgumentError(BBOTError): + pass + + +class PresetConditionError(BBOTError): + pass + + +class PresetAbortError(PresetConditionError): + pass + + +class BBOTEngineError(BBOTError): + pass + + +class WebError(BBOTEngineError): + pass + + +class DNSError(BBOTEngineError): + pass + + +class ExcavateError(BBOTError): + pass diff --git a/bbot/logger.py b/bbot/logger.py new file mode 100644 index 0000000000..b70d4b4b41 --- /dev/null +++ b/bbot/logger.py @@ -0,0 +1,52 @@ +import sys + +loglevel_mapping = { + "DEBUG": "DBUG", + "TRACE": "TRCE", + "VERBOSE": "VERB", + "HUGEVERBOSE": "VERB", + "INFO": "INFO", + "HUGEINFO": "INFO", + "SUCCESS": "SUCC", + "HUGESUCCESS": "SUCC", + "WARNING": "WARN", + "HUGEWARNING": "WARN", + "ERROR": "ERRR", + "CRITICAL": "CRIT", +} +color_mapping = { + "DEBUG": 242, # grey + "TRACE": 242, # red + "VERBOSE": 242, # grey + "INFO": 69, # blue + "HUGEINFO": 69, # blue + "SUCCESS": 118, # green + "HUGESUCCESS": 118, # green + "WARNING": 208, # orange + "HUGEWARNING": 208, # orange + "ERROR": 196, # red + "CRITICAL": 196, # red +} +color_prefix = "\033[1;38;5;" +color_suffix = "\033[0m" + + +def colorize(s, level="INFO"): + seq = color_mapping.get(level, 15) # default white + colored = f"{color_prefix}{seq}m{s}{color_suffix}" + return colored + + +def log_to_stderr(msg, level="INFO", logname=True): + """ + Print to stderr with BBOT logger colors + """ + levelname = level.upper() + if not any(x in sys.argv for x in ("-s", "--silent")): + levelshort = f"[{loglevel_mapping.get(level, 'INFO')}]" + levelshort = f"{colorize(levelshort, level=levelname)}" + if levelname == "CRITICAL" or levelname.startswith("HUGE"): + msg = colorize(msg, level=levelname) + if logname: + msg = f"{levelshort} {msg}" + print(msg, file=sys.stderr) diff --git a/bbot/modules/__init__.py b/bbot/modules/__init__.py index 6062b0170d..e69de29bb2 100644 --- a/bbot/modules/__init__.py +++ b/bbot/modules/__init__.py @@ -1,14 +0,0 @@ -import re -from pathlib import Path -from bbot.core.helpers.modules import module_loader - -dir_regex = re.compile(r"^[a-z][a-z0-9_]*$") - -parent_dir = Path(__file__).parent.resolve() -module_dirs = set([parent_dir]) -for e in parent_dir.iterdir(): - if e.is_dir() and dir_regex.match(e.name) and not e.name == "modules": - module_dirs.add(e) - -for d in module_dirs: - module_loader.preload(d) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py new file mode 100644 index 0000000000..1df424ebcc --- /dev/null +++ b/bbot/modules/ajaxpro.py @@ -0,0 +1,85 @@ +import regex as re +from urllib.parse import urlparse +from bbot.modules.base import BaseModule + + +class ajaxpro(BaseModule): + """ + Reference: https://mogwailabs.de/en/blog/2022/01/vulnerability-spotlight-rce-in-ajax.net-professional/ + """ + + ajaxpro_regex = re.compile(r' self.dns_abort_depth: + return True + return False + + async def abort_if(self, event): + # abort if dns name is unresolved + if event.type == "DNS_NAME_UNRESOLVED": + return True, "DNS name is unresolved" + return await super().abort_if(event) + + async def parse_results(self, r, query): + results = set() + json = r.json() + if json: + for hostname in json: + hostname = str(hostname).lower() + in_scope = hostname.endswith(f".{query}") + is_ptr = self.helpers.is_ptr(hostname) + too_long = self.abort_if_pre(hostname) + if in_scope and not is_ptr and not too_long: + results.add(hostname) + return sorted(results)[: self.config.get("limit", 1000)] diff --git a/bbot/modules/apkpure.py b/bbot/modules/apkpure.py new file mode 100644 index 0000000000..c17d25e2c6 --- /dev/null +++ b/bbot/modules/apkpure.py @@ -0,0 +1,63 @@ +import re +from pathlib import Path +from bbot.modules.base import BaseModule + + +class apkpure(BaseModule): + watched_events = ["MOBILE_APP"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "Download android applications from apkpure.com", + "created_date": "2024-10-11", + "author": "@domwhewell-sage", + } + options = {"output_folder": ""} + options_desc = {"output_folder": "Folder to download apk's to"} + + async def setup(self): + output_folder = self.config.get("output_folder") + if output_folder: + self.output_dir = Path(output_folder) / "apk_files" + else: + self.output_dir = self.scan.home / "apk_files" + self.helpers.mkdir(self.output_dir) + return await super().setup() + + async def filter_event(self, event): + if event.type == "MOBILE_APP": + if "android" not in event.tags: + return False, "event is not an android app" + return True + + async def handle_event(self, event): + app_id = event.data.get("id", "") + path = await self.download_apk(app_id) + if path: + await self.emit_event( + {"path": str(path)}, + "FILESYSTEM", + tags=["apk", "file"], + parent=event, + context=f'{{module}} downloaded the apk "{app_id}" to: {path}', + ) + + async def download_apk(self, app_id): + path = None + url = f"https://d.apkpure.com/b/XAPK/{app_id}?version=latest" + self.helpers.mkdir(self.output_dir / app_id) + response = await self.helpers.request(url, allow_redirects=True) + if response: + attachment = response.headers.get("Content-Disposition", "") + if "filename" in attachment: + match = re.search(r'filename="?([^"]+)"?', attachment) + if match: + filename = match.group(1) + extension = filename.split(".")[-1] + content = response.content + file_destination = self.output_dir / app_id / f"{app_id}.{extension}" + with open(file_destination, "wb") as f: + f.write(content) + self.info(f'Downloaded "{app_id}" from "{url}", saved to {file_destination}') + path = file_destination + return path diff --git a/bbot/modules/aspnet_viewstate.py b/bbot/modules/aspnet_viewstate.py deleted file mode 100644 index 72deb38956..0000000000 --- a/bbot/modules/aspnet_viewstate.py +++ /dev/null @@ -1,102 +0,0 @@ -from bbot.modules.base import BaseModule -import re - - -class aspnet_viewstate(BaseModule): - - watched_events = ["HTTP_RESPONSE"] - produced_events = ["VULNERABILITY"] - flags = ["active", "safe", "web"] - meta = {"description": "Parse web pages for viewstates and check them against blacklist3r"} - - generator_regex = re.compile(r'([^<>/]*)", re.I) return True - def handle_event(self, event): + async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) - domains, _ = self.query(query) + domains, openid_config = await self.query(query) + + tenant_id = None + authorization_endpoint = openid_config.get("authorization_endpoint", "") + matches = await self.helpers.re.findall(self.helpers.regexes.uuid_regex, authorization_endpoint) + if matches: + tenant_id = matches[0] + + tenant_names = set() if domains: - self.success(f'Found {len(domains):,} domains under tenant for "{query}"') - for domain in domains: - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: tenants? + self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') + for domain in domains: + if domain != query: + await self.emit_event( + domain, + "DNS_NAME", + parent=event, + tags=["affiliate", "azure-tenant"], + context=f'{{module}} queried Outlook autodiscover for "{query}" and found {{event.type}}: {{event.data}}', + ) + # tenant names + if domain.lower().endswith(".onmicrosoft.com"): + tenantname = domain.split(".")[0].lower() + if tenantname: + tenant_names.add(tenantname) + + tenant_names = sorted(tenant_names) + event_data = {"tenant-names": tenant_names, "domains": sorted(domains)} + tenant_names_str = ",".join(tenant_names) + if tenant_id is not None: + event_data["tenant-id"] = tenant_id + await self.emit_event( + event_data, + "AZURE_TENANT", + parent=event, + context=f'{{module}} queried Outlook autodiscover for "{query}" and found {{event.type}}: {tenant_names_str}', + ) - def query(self, domain): + async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" data = f""" @@ -55,19 +92,21 @@ def query(self, domain): self.debug(f"Retrieving tenant domains at {url}") - r = self.helpers.request(url, method="POST", headers=headers, data=data) + autodiscover_task = self.helpers.create_task( + self.helpers.request(url, method="POST", headers=headers, content=data) + ) + openid_url = f"https://login.windows.net/{domain}/.well-known/openid-configuration" + openid_task = self.helpers.create_task(self.helpers.request(openid_url)) + + r = await autodiscover_task status_code = getattr(r, "status_code", 0) - if status_code not in (200,): - self.warning(f"Error retrieving azure_tenant domains (status code: {status_code})") - return set(), set() - found_domains = list(set(self.d_xml_regex.findall(r.text))) + if status_code not in (200, 421): + self.verbose(f'Error retrieving azure_tenant domains for "{domain}" (status code: {status_code})') + return set(), {} + found_domains = list(set(await self.helpers.re.findall(self.d_xml_regex, r.text))) domains = set() - tenantnames = set() for d in found_domains: - # tenant names - if d.lower().endswith(".onmicrosoft.com"): - tenantnames.add(d.split(".")[0].lower()) # make sure we don't make any unnecessary api calls d = str(d).lower() _, query = self.helpers.split_domain(d) @@ -76,4 +115,10 @@ def query(self, domain): # absorb into word cloud self.scan.word_cloud.absorb_word(d) - return domains, tenantnames + r = await openid_task + openid_config = {} + with suppress(Exception): + openid_config = r.json() + + domains = sorted(domains) + return domains, openid_config diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py new file mode 100644 index 0000000000..2b11ac4536 --- /dev/null +++ b/bbot/modules/baddns.py @@ -0,0 +1,137 @@ +from baddns.base import get_all_modules +from baddns.lib.loader import load_signatures +from .base import BaseModule + +import asyncio +import logging + + +class baddns(BaseModule): + watched_events = ["DNS_NAME", "DNS_NAME_UNRESOLVED"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "safe", "web-basic", "baddns", "cloud-enum", "subdomain-hijack"] + meta = { + "description": "Check hosts for domain/subdomain takeovers", + "created_date": "2024-01-18", + "author": "@liquidsec", + } + options = {"custom_nameservers": [], "only_high_confidence": False, "enabled_submodules": []} + options_desc = { + "custom_nameservers": "Force BadDNS to use a list of custom nameservers", + "only_high_confidence": "Do not emit low-confidence or generic detections", + "enabled_submodules": "A list of submodules to enable. Empty list (default) enables CNAME, TXT and MX Only", + } + module_threads = 8 + deps_pip = ["baddns~=1.4.13"] + + def select_modules(self): + selected_submodules = [] + for m in get_all_modules(): + if m.name in self.enabled_submodules: + selected_submodules.append(m) + return selected_submodules + + def set_modules(self): + self.enabled_submodules = self.config.get("enabled_submodules", []) + if self.enabled_submodules == []: + self.enabled_submodules = ["CNAME", "MX", "TXT"] + + async def setup(self): + self.preset.core.logger.include_logger(logging.getLogger("baddns")) + self.custom_nameservers = self.config.get("custom_nameservers", []) or None + if self.custom_nameservers: + self.custom_nameservers = self.helpers.chain_lists(self.custom_nameservers) + self.only_high_confidence = self.config.get("only_high_confidence", False) + self.signatures = load_signatures() + self.set_modules() + all_submodules_list = [m.name for m in get_all_modules()] + for m in self.enabled_submodules: + if m not in all_submodules_list: + self.hugewarning( + f"Selected BadDNS submodule [{m}] does not exist. Available submodules: [{','.join(all_submodules_list)}]" + ) + return False + self.debug(f"Enabled BadDNS Submodules: [{','.join(self.enabled_submodules)}]") + return True + + async def handle_event(self, event): + tasks = [] + for ModuleClass in self.select_modules(): + kwargs = { + "http_client_class": self.scan.helpers.web.AsyncClient, + "dns_client": self.scan.helpers.dns.resolver, + "custom_nameservers": self.custom_nameservers, + "signatures": self.signatures, + } + + if ModuleClass.name == "NS": + kwargs["raw_query_max_retries"] = 1 + kwargs["raw_query_timeout"] = 5.0 + kwargs["raw_query_retry_wait"] = 0 + + module_instance = ModuleClass(event.data, **kwargs) + task = asyncio.create_task(module_instance.dispatch()) + tasks.append((module_instance, task)) + + async for completed_task in self.helpers.as_completed([task for _, task in tasks]): + module_instance = next((m for m, t in tasks if t == completed_task), None) + try: + task_result = await completed_task + except Exception as e: + self.warning(f"Task for {module_instance} raised an error: {e}") + task_result = None + + if task_result: + results = module_instance.analyze() + if results and len(results) > 0: + for r in results: + r_dict = r.to_dict() + + confidence = r_dict["confidence"] + + if confidence in ["CONFIRMED", "PROBABLE"]: + data = { + "severity": "MEDIUM", + "description": f"{r_dict['description']}. Confidence: [{confidence}] Signature: [{r_dict['signature']}] Indicator: [{r_dict['indicator']}] Trigger: [{r_dict['trigger']}] baddns Module: [{r_dict['module']}]", + "host": str(event.host), + } + await self.emit_event( + data, + "VULNERABILITY", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}', + ) + + elif confidence in ["UNLIKELY", "POSSIBLE"]: + if not self.only_high_confidence: + data = { + "description": f"{r_dict['description']} Confidence: [{confidence}] Signature: [{r_dict['signature']}] Indicator: [{r_dict['indicator']}] Trigger: [{r_dict['trigger']}] baddns Module: [{r_dict['module']}]", + "host": str(event.host), + } + await self.emit_event( + data, + "FINDING", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}', + ) + else: + self.debug( + f"Skipping low-confidence result due to only_high_confidence setting: {confidence}" + ) + + else: + self.warning(f"Got unrecognized confidence level: {confidence}") + + found_domains = r_dict.get("found_domains", None) + if found_domains: + for found_domain in found_domains: + await self.emit_event( + found_domain, + "DNS_NAME", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {{event.data}}', + ) + await module_instance.cleanup() diff --git a/bbot/modules/baddns_direct.py b/bbot/modules/baddns_direct.py new file mode 100644 index 0000000000..2aaaebf6fc --- /dev/null +++ b/bbot/modules/baddns_direct.py @@ -0,0 +1,92 @@ +from baddns.base import get_all_modules +from baddns.lib.loader import load_signatures +from .base import BaseModule + +import logging + + +class baddns_direct(BaseModule): + watched_events = ["URL", "STORAGE_BUCKET"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "safe", "subdomain-enum", "baddns", "cloud-enum"] + meta = { + "description": "Check for unusual subdomain / service takeover edge cases that require direct detection", + "created_date": "2024-01-29", + "author": "@liquidsec", + } + options = {"custom_nameservers": []} + options_desc = { + "custom_nameservers": "Force BadDNS to use a list of custom nameservers", + } + module_threads = 8 + deps_pip = ["baddns~=1.4.13"] + + scope_distance_modifier = 1 + + async def setup(self): + self.preset.core.logger.include_logger(logging.getLogger("baddns")) + self.custom_nameservers = self.config.get("custom_nameservers", []) or None + if self.custom_nameservers: + self.custom_nameservers = self.helpers.chain_lists(self.custom_nameservers) + self.only_high_confidence = self.config.get("only_high_confidence", False) + self.signatures = load_signatures() + return True + + def select_modules(self): + selected_modules = [] + for m in get_all_modules(): + if m.name in ["CNAME"]: + selected_modules.append(m) + return selected_modules + + async def handle_event(self, event): + CNAME_direct_module = self.select_modules()[0] + kwargs = { + "http_client_class": self.scan.helpers.web.AsyncClient, + "dns_client": self.scan.helpers.dns.resolver, + "custom_nameservers": self.custom_nameservers, + "signatures": self.signatures, + "direct_mode": True, + } + + CNAME_direct_instance = CNAME_direct_module(str(event.host), **kwargs) + if await CNAME_direct_instance.dispatch(): + results = CNAME_direct_instance.analyze() + if results and len(results) > 0: + for r in results: + r_dict = r.to_dict() + + data = { + "description": f"Possible [{r_dict['signature']}] via direct BadDNS analysis. Indicator: [{r_dict['indicator']}] Trigger: [{r_dict['trigger']}] baddns Module: [{r_dict['module']}]", + "host": str(event.host), + } + + await self.emit_event( + data, + "FINDING", + event, + tags=[f"baddns-{CNAME_direct_module.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}', + ) + await CNAME_direct_instance.cleanup() + + async def filter_event(self, event): + if event.type == "STORAGE_BUCKET": + if str(event.module).startswith("bucket_"): + return False + self.debug(f"Processing STORAGE_BUCKET for {event.host}") + if event.type == "URL": + if event.scope_distance > 0: + self.debug( + f"Rejecting {event.host} due to not being in scope (scope distance: {event.scope_distance})" + ) + return False + if "cdn-cloudflare" not in event.tags: + self.debug(f"Rejecting {event.host} due to not being behind CloudFlare") + return False + if "status-200" in event.tags or "status-301" in event.tags: + self.debug(f"Rejecting {event.host} due to lack of non-standard status code") + return False + + self.debug(f"Passed all checks and is processing {event.host}") + return True diff --git a/bbot/modules/baddns_zone.py b/bbot/modules/baddns_zone.py new file mode 100644 index 0000000000..b8f3967699 --- /dev/null +++ b/bbot/modules/baddns_zone.py @@ -0,0 +1,28 @@ +from .baddns import baddns as baddns_module + + +class baddns_zone(baddns_module): + watched_events = ["DNS_NAME"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "safe", "subdomain-enum", "baddns", "cloud-enum"] + meta = { + "description": "Check hosts for DNS zone transfers and NSEC walks", + "created_date": "2024-01-29", + "author": "@liquidsec", + } + options = {"custom_nameservers": [], "only_high_confidence": False} + options_desc = { + "custom_nameservers": "Force BadDNS to use a list of custom nameservers", + "only_high_confidence": "Do not emit low-confidence or generic detections", + } + module_threads = 8 + deps_pip = ["baddns~=1.4.13"] + + def set_modules(self): + self.enabled_submodules = ["NSEC", "zonetransfer"] + + # minimize nsec records feeding back into themselves + async def filter_event(self, event): + if "baddns-nsec" in event.tags or "baddns-nsec" in event.parent.tags: + return False + return True diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py new file mode 100644 index 0000000000..c7ec6e9761 --- /dev/null +++ b/bbot/modules/badsecrets.py @@ -0,0 +1,103 @@ +import multiprocessing +from pathlib import Path +from .base import BaseModule +from badsecrets.base import carve_all_modules + + +class badsecrets(BaseModule): + watched_events = ["HTTP_RESPONSE"] + produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"] + flags = ["active", "safe", "web-basic"] + meta = { + "description": "Library for detecting known or weak secrets across many web frameworks", + "created_date": "2022-11-19", + "author": "@liquidsec", + } + options = {"custom_secrets": None} + options_desc = { + "custom_secrets": "Include custom secrets loaded from a local file", + } + deps_pip = ["badsecrets~=0.9.29"] + + async def setup(self): + self.custom_secrets = None + custom_secrets = self.config.get("custom_secrets", None) + if custom_secrets: + secrets_path = Path(custom_secrets).expanduser() + if secrets_path.is_file(): + self.custom_secrets = custom_secrets + self.info(f"Successfully loaded secrets file [{custom_secrets}]") + else: + self.warning(f"custom secrets file [{custom_secrets}] is not valid") + return False, "Custom secrets file not valid" + return True + + @property + def _module_threads(self): + return max(1, multiprocessing.cpu_count() - 1) + + async def handle_event(self, event): + resp_body = event.data.get("body", None) + resp_headers = event.data.get("header", None) + resp_cookies = {} + if resp_headers: + resp_cookies_raw = resp_headers.get("set_cookie", None) + if resp_cookies_raw: + if "," in resp_cookies_raw: + resp_cookies_list = resp_cookies_raw.split(",") + else: + resp_cookies_list = [resp_cookies_raw] + for c in resp_cookies_list: + c2 = c.lstrip(";").strip().split(";")[0].split("=") + if len(c2) == 2: + resp_cookies[c2[0]] = c2[1] + if resp_body or resp_cookies: + try: + r_list = await self.helpers.run_in_executor_mp( + carve_all_modules, + body=resp_body, + headers=resp_headers, + cookies=resp_cookies, + url=event.data.get("url", None), + custom_resource=self.custom_secrets, + ) + except Exception as e: + self.warning(f"Error processing {event}: {e}") + return + if r_list: + for r in r_list: + if r["type"] == "SecretFound": + data = { + "severity": r["description"]["severity"], + "description": f"Known Secret Found. Secret Type: [{r['description']['secret']}] Secret: [{r['secret']}] Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'], 2000)}] Detecting Module: [{r['detecting_module']}] Details: [{r['details']}]", + "url": event.data["url"], + "host": str(event.host), + } + await self.emit_event( + data, + "VULNERABILITY", + event, + context=f'{{module}}\'s "{r["detecting_module"]}" module found known {r["description"]["product"]} secret ({{event.type}}): "{r["secret"]}"', + ) + elif r["type"] == "IdentifyOnly": + # There is little value to presenting a non-vulnerable asp.net viewstate, as it is not crackable without a Matrioshka brain. Just emit a technology instead. + if r["detecting_module"] == "ASPNET_Viewstate": + technology = "microsoft asp.net" + await self.emit_event( + {"technology": technology, "url": event.data["url"], "host": str(event.host)}, + "TECHNOLOGY", + event, + context=f"{{module}} identified {{event.type}}: {technology}", + ) + else: + data = { + "description": f"Cryptographic Product identified. Product Type: [{r['description']['product']}] Product: [{self.helpers.truncate_string(r['product'], 2000)}] Detecting Module: [{r['detecting_module']}]", + "url": event.data["url"], + "host": str(event.host), + } + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} identified cryptographic product ({{event.type}}): "{r["description"]["product"]}"', + ) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index ba96aeaf18..8d125c91c9 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -1,206 +1,493 @@ -import queue +import asyncio import logging -import threading import traceback -from time import sleep +from sys import exc_info from contextlib import suppress -from ..core.helpers.threadpool import ThreadPoolWrapper -from ..core.errors import ScanCancelledError, ValidationError, WordlistError - -from bbot.core.event.base import is_event +from ..errors import ValidationError +from ..core.helpers.misc import get_size # noqa +from ..core.helpers.async_helpers import TaskCounter, ShuffleQueue class BaseModule: + """The base class for all BBOT modules. + + Attributes: + watched_events (List): Event types to watch. + + produced_events (List): Event types to produce. + + meta (Dict): Metadata about the module, such as whether authentication is required and a description. + + flags (List): Flags indicating the type of module (must have at least "safe" or "aggressive" and "passive" or "active"). + + deps_modules (List): Other BBOT modules this module depends on. Empty list by default. + + deps_pip (List): Python dependencies to install via pip. Empty list by default. + + deps_apt (List): APT package dependencies to install. Empty list by default. + + deps_shell (List): Other dependencies installed via shell commands. Uses [ansible.builtin.shell](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html). Empty list by default. + + deps_ansible (List): Additional Ansible tasks for complex dependencies. Empty list by default. + + accept_dupes (bool): Whether to accept incoming duplicate events. Default is False. + + suppress_dupes (bool): Whether to suppress outgoing duplicate events. Default is True. + + per_host_only (bool): Limit the module to only scanning once per host. Default is False. + + per_hostport_only (bool): Limit the module to only scanning once per host:port. Default is False. + + per_domain_only (bool): Limit the module to only scanning once per domain. Default is False. + + scope_distance_modifier (int, None): Modifies scope distance acceptance for events. Default is 0. + ``` + None == accept all events + 2 == accept events up to and including the scan's configured search distance plus two + 1 == accept events up to and including the scan's configured search distance plus one + 0 == (DEFAULT) accept events up to and including the scan's configured search distance + ``` + + target_only (bool): Accept only the initial target event(s). Default is False. + + in_scope_only (bool): Accept only explicitly in-scope events, regardless of the scan's search distance. Default is False. + + options (Dict): Customizable options for the module, e.g., {"api_key": ""}. Empty dict by default. + + options_desc (Dict): Descriptions for options, e.g., {"api_key": "API Key"}. Empty dict by default. + + module_threads (int): Maximum concurrent instances of handle_event() or handle_batch(). Default is 1. + + batch_size (int): Size of batches processed by handle_batch(). Default is 1. + + batch_wait (int): Seconds to wait before force-submitting a batch. Default is 10. + + api_failure_abort_threshold (int): Threshold for setting error state after failed HTTP requests (only takes effect when `api_request()` is used. Default is 5. + + _preserve_graph (bool): When set to True, accept events that may be duplicates but are necessary for construction of complete graph. Typically only enabled for output modules that need to maintain full chains of events, e.g. `neo4j` and `json`. Default is False. + + _stats_exclude (bool): Whether to exclude this module from scan statistics. Default is False. + + _qsize (int): Outgoing queue size (0 for infinite). Default is 0. + + _priority (int): Priority level of the module. Lower values are higher priority. Default is 3. + + _name (str): Module name, overridden automatically. Default is 'base'. + + _type (str): Module type, for differentiating between normal and output modules. Default is 'scan'. + """ - # Event types to watch watched_events = [] - # Event types to produce produced_events = [] - # Module description, etc. meta = {"auth_required": False, "description": "Base module"} - # Flags, must include either "passive" or "active" flags = [] + options = {} + options_desc = {} - # python dependencies (pip install ____) + deps_modules = [] deps_pip = [] - # apt dependencies (apt install ____) deps_apt = [] - # other dependences as shell commands - # uses ansible.builtin.shell (https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html) deps_shell = [] - # list of ansible tasks for when other dependency installation methods aren't enough deps_ansible = [] - # Whether to accept incoming duplicate events + accept_dupes = False - # Whether to block outgoing duplicate events suppress_dupes = True - - # Scope distance modifier - accept/deny events based on scope distance - # None == accept all events - # 2 == accept events up to and including the scan's configured search distance plus two - # 1 == accept events up to and including the scan's configured search distance plus one - # 0 == accept events up to and including the scan's configured search distance - # -1 == accept events up to and including the scan's configured search distance minus one - # (this is the default setting because when the scan's configured search distance == 1 - # [the default], then this is equivalent to in_scope_only) - # -2 == accept events up to and including the scan's configured search distance minus two - scope_distance_modifier = -1 - # Only accept the initial target event(s) + per_host_only = False + per_hostport_only = False + per_domain_only = False + scope_distance_modifier = 0 target_only = False - # Only accept explicitly in-scope events (scope distance == 0) - # Use this options if your module is aggressive or if you don't want it to scale with - # the scan's search distance in_scope_only = False - # Options, e.g. {"api_key": ""} - options = {} - # Options description, e.g. {"api_key": "API Key"} - options_desc = {} - # Maximum concurrent instances of handle_event() or handle_batch() - max_event_handlers = 1 - # Max number of concurrent calls to submit_task() - max_threads = 1 - # Batch size - # If batch size > 1, override handle_batch() instead of handle_event() - batch_size = 1 - # Seconds to wait before force-submitting batch + _module_threads = 1 + _batch_size = 1 batch_wait = 10 - # When set to false, prevents events generated by this module from being automatically marked as in-scope - # Useful for low-confidence modules like speculate and ipneighbor - _scope_shepherding = True - # Exclude from scan statistics + + # API retries, etc. + _api_retries = 2 + # disable the module after this many failed attempts in a row + _api_failure_abort_threshold = 3 + # sleep for this many seconds after being rate limited + _429_sleep_interval = 30 + + default_discovery_context = "{module} discovered {event.type}: {event.data}" + + _preserve_graph = False _stats_exclude = False - # outgoing queue size - _qsize = 100 - # Priority of events raised by this module, 1-5, lower numbers == higher priority + _qsize = 1000 _priority = 3 - # Name, overridden automatically _name = "base" - # Type, for differentiating between normal modules and output modules, etc. _type = "scan" + _intercept = False + _shuffle_incoming_queue = True def __init__(self, scan): + """Initializes a module instance. + + Args: + scan: The BBOT scan object associated with this module instance. + + Attributes: + scan: The scan object associated with this module. + + errored (bool): Whether the module has errored out. Default is False. + """ self.scan = scan self.errored = False self._log = None - self._event_queue = None - self._event_semaphore = threading.Semaphore(self._qsize) - self._batch_idle = 0 - self.thread_pool = ThreadPoolWrapper( - self.scan._thread_pool.executor, max_workers=self.config.get("max_threads", self.max_threads) - ) - self._internal_thread_pool = ThreadPoolWrapper( - self.scan._internal_thread_pool.executor, max_workers=self.max_event_handlers - ) + self._incoming_event_queue = None + self._outgoing_event_queue = None + # track incoming events to prevent unwanted duplicates + self._incoming_dup_tracker = set() + # tracks which subprocesses are running under this module + self._proc_tracker = set() + # seconds since we've submitted a batch + self._last_submitted_batch = None # additional callbacks to be executed alongside self.cleanup() self.cleanup_callbacks = [] self._cleanedup = False self._watched_events = None - def setup(self): - """ - Perform setup functions at the beginning of the scan. - Optionally override this method. + self._task_counter = TaskCounter() + + # string constant + self._custom_filter_criteria_msg = "it did not meet custom filter criteria" + + self._api_keys = [] + + # track number of failures (for .api_request()) + self._api_request_failures = 0 + + self._tasks = [] + self._event_received = None + + # used for optional "per host" tracking + self._per_host_tracker = set() - Must return True or False based on whether the setup was successful + async def setup(self): """ + Performs one-time setup tasks for the module. + + This method is responsible for preparing the module for its operation, which may include tasks + such as downloading necessary resources, validating configuration parameters, or other preliminary + checks. + + Returns: + tuple: + - bool or None: A status indicating the outcome of the setup process. Returns `True` if + the setup was successful, `None` for a soft-fail where the module setup did not succeed + but the scan will continue with the module disabled, and `False` for a hard-fail where + the setup failure causes the scan to abort. + - str, optional: A reason for the setup failure, provided only when the setup does not + succeed (i.e., returns `None` or `False`). + + Examples: + >>> async def setup(self): + >>> if not self.config.get("api_key"): + >>> # Soft-fail: Configuration missing an API key + >>> return None, "No API key specified" + + >>> async def setup(self): + >>> try: + >>> wordlist = await self.helpers.wordlist("https://raw.githubusercontent.com/user/wordlist.txt") + >>> except WordlistError as e: + >>> # Hard-fail: Error retrieving wordlist + >>> return False, f"Error retrieving wordlist: {e}" + + >>> async def setup(self): + >>> self.timeout = self.config.get("timeout", 5) + >>> # Success: Setup completed without issues + >>> return True + """ + return True - def handle_event(self, event): - """ - Override this method if batch_size == 1. + async def handle_event(self, event): + """Asynchronously handles incoming events that the module is configured to watch. + + This method is automatically invoked when an event that matches any in `watched_events` is encountered during a scan. Override this method to implement custom event-handling logic for your module. + + Args: + event (Event): The event object containing details about the incoming event. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to 1. + + Returns: + None """ pass - def handle_batch(self, *events): - """ - Override this method if batch_size > 1. + async def handle_batch(self, *events): + """Handles incoming events in batches for optimized processing. + + This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. + + Args: + *events (Event): A variable number of Event objects to be processed in a batch. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to a value greater than 1. + + Returns: + None """ pass - def filter_event(self, event): - """ - Accept/reject events based on custom criteria + async def filter_event(self, event): + """Asynchronously filters incoming events based on custom criteria. - Override this method if you need more granular control - over which events are distributed to your module + Override this method for more granular control over which events are accepted by your module. This method is called automatically before `handle_event()` for each incoming event that matches any in `watched_events`. + + Args: + event (Event): The incoming Event object to be filtered. + + Returns: + tuple: A 2-tuple where the first value is a bool indicating whether the event should be accepted, and the second value is a string explaining the reason for its acceptance or rejection. By default, returns `(True, None)` to indicate acceptance without reason. + + Note: + This method should be overridden if the module requires custom logic for event filtering. """ return True - def finish(self): - """ - Perform final functions when scan is nearing completion + async def finish(self): + """Asynchronously performs final tasks as the scan nears completion. + + This method can be overridden to execute any necessary finalization logic. For example, if the module relies on a word cloud, you might wait for the scan to finish to ensure the word cloud is most complete before running an operation. - For example, if your module relies on the word cloud, you may choose to wait until - the scan is finished (and the word cloud is most complete) before running an operation. + Returns: + None - Note that this method may be called multiple times, because it may raise events. - Optionally override this method. + Warnings: + This method may be called multiple times since it can raise events, which may re-trigger the "finish" phase of the scan. Optional to override. """ return - def report(self): + async def report(self): + """Asynchronously executes a final task after the scan is complete but before cleanup. + + This method can be overridden to aggregate data and raise summary events at the end of the scan. + + Returns: + None + + Note: + This method is called only once per scan. """ - Perform a final task when the scan is finished, but before cleanup happens + return + + async def cleanup(self): + """Asynchronously performs final cleanup operations after the scan is complete. + + This method can be overridden to implement custom cleanup logic. It is called only once per scan and may not raise events. + + Returns: + None - This is useful for modules that aggregate data and raise summary events at the end of a scan + Note: + This method is called only once per scan and may not raise events. """ return - def cleanup(self): + async def require_api_key(self): """ - Perform final cleanup after the scan has finished - This method is called only once, and may not raise events. - Optionally override this method. + Asynchronously checks if an API key is required and valid. + + Args: + None + + Returns: + bool or tuple: Returns True if API key is valid and ready. + Returns a tuple (None, "error message") otherwise. + + Notes: + - Fetches the API key from the configuration. + - Calls the 'ping()' method to test API accessibility. + - Sets the API key readiness status accordingly. """ - return + self.api_key = self.config.get("api_key", "") + if self.auth_secret: + try: + await self.ping() + self.hugesuccess("API is ready") + return True, "" + except Exception as e: + self.trace(traceback.format_exc()) + return None, f"Error with API ({str(e).strip()})" + else: + return None, "No API key set" - def get_watched_events(self): + @property + def api_key(self): + if self._api_keys: + return self._api_keys[0] + + @api_key.setter + def api_key(self, api_keys): + if isinstance(api_keys, str): + api_keys = [api_keys] + self._api_keys = list(api_keys) + + def cycle_api_key(self): + if len(self._api_keys) > 1: + self.verbose("Cycling API key") + self._api_keys.insert(0, self._api_keys.pop()) + else: + self.debug("No extra API keys to cycle") + + @property + def api_retries(self): + return max(self._api_retries + 1, len(self._api_keys)) + + @property + def api_failure_abort_threshold(self): + return (self.api_retries * self._api_failure_abort_threshold) + 1 + + async def ping(self, url=None): + """Asynchronously checks the health of the configured API. + + This method is used in conjunction with require_api_key() to verify that the API is not just configured, but also responsive. It makes a test request to a known endpoint to validate the API's health. + + The method uses the `ping_url` attribute if defined, or falls back to a provided URL. If neither is available, no request is made. + + Args: + url (str, optional): A specific URL to use for the ping request. If not provided, the method will use the `ping_url` attribute. + + Returns: + None + + Raises: + ValueError: If the API response is not successful (status code != 200). + + Example Usage: + To use this method, simply define the `ping_url` attribute in your module: + + class MyModule(BaseModule): + ping_url = "https://api.example.com/ping" + + Alternatively, you can override this method for more complex health checks: + + async def ping(self): + r = await self.api_request(f"{self.base_url}/complex-health-check") + if r.status_code != 200 or r.json().get('status') != 'healthy': + raise ValueError(f"API unhealthy: {r.text}") """ - Override if you need your watched_events to be dynamic + if url is None: + url = getattr(self, "ping_url", "") + if url: + r = await self.api_request(url) + if getattr(r, "status_code", 0) != 200: + response_text = getattr(r, "text", "no response from server") + raise ValueError(response_text) + + @property + def batch_size(self): + batch_size = self.config.get("batch_size", None) + # only allow overriding the batch size if its default value is greater than 1 + # this prevents modules from being accidentally neutered by an incorrect batch_size setting + if batch_size is None or self._batch_size == 1: + batch_size = self._batch_size + return batch_size + + @property + def module_threads(self): + module_threads = self.config.get("module_threads", None) + if module_threads is None: + module_threads = self._module_threads + return module_threads + + @property + def auth_secret(self): + """Indicates if the module is properly configured for authentication. + + This read-only property should be used to check whether all necessary attributes (e.g., API keys, tokens, etc.) are configured to perform authenticated requests in the module. Commonly used in setup or initialization steps. + + Returns: + bool: True if the module is properly configured for authentication, otherwise False. + """ + return getattr(self, "api_key", "") + + @property + def event_received(self): + if self._event_received is None: + self._event_received = asyncio.Condition() + return self._event_received + + def get_watched_events(self): + """Retrieve the set of events that the module is interested in observing. + + Override this method if the set of events the module should watch needs to be determined dynamically, e.g., based on configuration options or other runtime conditions. + + Returns: + set: The set of event types that this module will handle. """ if self._watched_events is None: self._watched_events = set(self.watched_events) return self._watched_events - def submit_task(self, *args, **kwargs): - return self.thread_pool.submit_task(self.catch, *args, **kwargs) + async def _handle_batch(self): + """ + Asynchronously handles a batch of events in the module. - def catch(self, *args, **kwargs): - try: - lock_brutes = kwargs.pop("_lock_brutes", False) and "brute-force" in self.flags - lock_acquired = False - if lock_brutes: - lock_acquired = self.scan._brute_lock.acquire() - return self.scan.manager.catch(*args, **kwargs) - finally: - if lock_brutes and lock_acquired: - self.scan._brute_lock.release() - - def _handle_batch(self, force=False): - if self.num_queued_events > 0 and (force or self.num_queued_events >= self.batch_size): - self._batch_idle = 0 - on_finish_callback = None - events, finish, report = self.events_waiting - if finish: - on_finish_callback = self.finish - elif report: - on_finish_callback = self.report - if events: - self.debug(f"Handling batch of {len(events):,} events") - self._internal_thread_pool.submit_task( - self.catch, - self.handle_batch, - *events, - _on_finish_callback=on_finish_callback, - _lock_brutes=True, - ) - return True - return False + Args: + None + + Returns: + bool: True if events were submitted for processing, False otherwise. + + Notes: + - The method is wrapped in a task counter to monitor asynchronous operations. + - Checks if there are any events in the incoming queue and module is not in an error state. + - Invokes '_events_waiting()' to fetch a batch of events. + - Calls the module's 'handle_batch()' method to process these events. + - If a "FINISHED" event is found, invokes 'finish()' method of the module. + """ + finish = False + async with self._task_counter.count(f"{self.name}.handle_batch()") as counter: + submitted = False + if self.batch_size <= 1: + return + if self.num_incoming_events > 0: + events, finish = await self._events_waiting() + if events and not self.errored: + counter.n = len(events) + self.verbose(f"Handling batch of {len(events):,} events") + submitted = True + async with self.scan._acatch(f"{self.name}.handle_batch()"): + await self.handle_batch(*events) + self.verbose(f"Finished handling batch of {len(events):,} events") + if finish: + context = f"{self.name}.finish()" + async with self.scan._acatch(context), self._task_counter.count(context): + await self.finish() + return submitted def make_event(self, *args, **kwargs): + """Create an event for the scan. + + Raises a validation error if the event could not be created, unless raise_error is set to False. + + Args: + *args: Positional arguments to be passed to the scan's make_event method. + **kwargs: Keyword arguments to be passed to the scan's make_event method. + raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. + + Examples: + >>> new_event = self.make_event("1.2.3.4", parent=event) + >>> await self.emit_event(new_event) + + Returns: + Event or None: The created event, or None if a validation error occurred and raise_error was False. + + Raises: + ValidationError: If the event could not be validated and raise_error is True. + """ raise_error = kwargs.pop("raise_error", False) + module = kwargs.pop("module", None) + if module is None: + if (not args) or getattr(args[0], "module", None) is None: + kwargs["module"] = self try: event = self.scan.make_event(*args, **kwargs) except ValidationError as e: @@ -208,73 +495,130 @@ def make_event(self, *args, **kwargs): raise self.warning(f"{e}") return - if not event.module: - event.module = self return event - def emit_event(self, *args, **kwargs): - if self.scan.stopping: - return - on_success_callback = kwargs.pop("on_success_callback", None) - abort_if = kwargs.pop("abort_if", lambda e: False) - quick = kwargs.pop("quick", False) - event = self.make_event(*args, **kwargs) + async def emit_event(self, *args, **kwargs): + """Emit an event to the event queue and distribute it to interested modules. + + This is how modules "return" data. + + The method first creates an event object by calling `self.make_event()` with the provided arguments. + Then, the event is queued for outgoing distribution using `self.queue_outgoing_event()`. + + Args: + *args: Positional arguments to be passed to `self.make_event()` for event creation. + **kwargs: Keyword arguments to be passed for event creation or configuration of the emit action. + ```markdown + - on_success_callback: Optional callback function to execute upon successful event emission. + - abort_if: Optional condition under which the event emission should be aborted. + - quick: Optional flag to indicate whether the event should be processed quickly. + ``` + + Examples: + >>> await self.emit_event("www.evilcorp.com", parent=event, tags=["affiliate"]) + + >>> new_event = self.make_event("1.2.3.4", parent=event) + >>> await self.emit_event(new_event) + + Returns: + None + + Raises: + ValidationError: If the event cannot be validated (handled in `self.make_event()`). + """ + event_kwargs = dict(kwargs) + emit_kwargs = {} + for o in ("on_success_callback", "abort_if", "quick"): + v = event_kwargs.pop(o, None) + if v is not None: + emit_kwargs[o] = v + event = self.make_event(*args, **event_kwargs) if event: - okay = False - while not self.scan.stopping: - okay = self._event_semaphore.acquire(timeout=0.1) - if okay: - self.scan.manager.emit_event( - event, - abort_if=abort_if, - on_success_callback=on_success_callback, - quick=quick, - ) - break + await self.queue_outgoing_event(event, **emit_kwargs) + return event - @property - def events_waiting(self): + async def _events_waiting(self, batch_size=None): """ - yields all events in queue, up to maximum batch size + Asynchronously fetches events from the incoming_event_queue, up to a specified batch size. + + Args: + None + + Returns: + tuple: A tuple containing two elements: + - events (list): A list of acceptable events from the queue. + - finish (bool): A flag indicating if a "FINISHED" event is encountered. + + Notes: + - The method pulls events from incoming_event_queue using 'get_nowait()'. + - Events go through '_event_postcheck()' for validation. + - "FINISHED" events are handled differently and the finish flag is set to True. + - If the queue is empty or the batch size is reached, the loop breaks. """ + if batch_size is None: + batch_size = self.batch_size events = [] finish = False - report = False - left = int(self.batch_size) - while left > 0 and self.event_queue: + while self.incoming_event_queue: + if batch_size != -1 and len(events) > self.batch_size: + break try: - event = self.event_queue.get_nowait() - if type(event) == str: - if event == "FINISHED": + event = self.incoming_event_queue.get_nowait() + self.debug(f"Got {event} from {getattr(event, 'module', 'unknown_module')}") + acceptable, reason = await self._event_postcheck(event) + if acceptable: + if event.type == "FINISHED": finish = True - elif event == "REPORT": - report = True - else: - left -= 1 - events.append(event) - except queue.Empty: + else: + events.append(event) + self.scan.stats.event_consumed(event, self) + elif reason: + self.debug(f"Not accepting {event} because {reason}") + except asyncio.queues.QueueEmpty: break - return events, finish, report + return events, finish @property - def num_queued_events(self): + def num_incoming_events(self): ret = 0 - if self.event_queue: - ret = self.event_queue.qsize() + if self.incoming_event_queue is not False: + ret = self.incoming_event_queue.qsize() return ret def start(self): - self.thread = threading.Thread(target=self._worker, daemon=True) - self.thread.start() + self._tasks = [ + asyncio.create_task(self._worker(), name=f"{self.scan.name}.{self.name}._worker()") + for _ in range(self.module_threads) + ] + + async def _setup(self): + """ + Asynchronously sets up the module by invoking its 'setup()' method. + + This method catches exceptions during setup, sets the module's error state if necessary, and determines the + status code based on the result of the setup process. + + Args: + None - def _setup(self): + Returns: + tuple: A tuple containing the module's name, status (True for success, False for hard-fail, None for soft-fail), + and an optional status message. + Raises: + Exception: Captured exceptions from the 'setup()' method are logged, but not propagated. + + Notes: + - The 'setup()' method can return either a simple boolean status or a tuple of status and message. + - A WordlistError exception triggers a soft-fail status. + - The debug log will contain setup status information for the module. + """ status_codes = {False: "hard-fail", None: "soft-fail", True: "success"} status = False self.debug(f"Setting up module {self.name}") try: - result = self.setup() + result = await self.setup() if type(result) == tuple and len(result) == 2: status, msg = result else: @@ -282,176 +626,436 @@ def _setup(self): msg = status_codes[status] self.debug(f"Finished setting up module {self.name}") except Exception as e: - self.set_error_state() - if isinstance(e, WordlistError): - status = None + self.set_error_state(f"Unexpected error during module setup: {e}", critical=True) msg = f"{e}" - self.debug(traceback.format_exc()) - return status, str(msg) + self.trace() + return self, status, str(msg) - @property - def _force_batch(self): - """ - Determine whether a batch should be forcefully submitted - """ - # if we've been idle long enough - if self._batch_idle >= self.batch_wait: - return True - # if scan is finishing - if self.scan.status == "FINISHING": - return True - # if there's a batch stalemate - batch_modules = [m for m in self.scan.modules.values() if m.batch_size > 1] - if all([(not m.running) for m in batch_modules]): - return True - return False - - def _worker(self): - # keep track of how long we've been running - iterations = 0 - try: - while not self.scan.stopping: - iterations += 1 - if self.batch_size > 1: - if iterations % 3 == 0: - self._batch_idle += 1 - force = self._force_batch - if force: - self._batch_idle = 0 - submitted = self._handle_batch(force=force) - if not submitted: - sleep(0.3333) + async def _worker(self): + """ + The core worker loop for the module, responsible for handling events from the incoming event queue. - else: - try: - if self.event_queue: - e = self.event_queue.get_nowait() - else: - self.debug(f"Event queue is in bad state") - return - except queue.Empty: - sleep(0.3333) + This method is a coroutine and is run asynchronously. Multiple instances can run simultaneously based on + the 'module_threads' configuration. The worker dequeues events from 'incoming_event_queue', performs + necessary prechecks, and passes the event to the appropriate handler function. + + Args: + None + + Returns: + None + + Raises: + asyncio.CancelledError: If the worker is cancelled during its operation. + + Notes: + - The worker is sensitive to the 'stopping' flag of the scan. It will terminate if this flag is set. + - The worker handles backpressure by pausing when the outgoing event queue is full. + - Batch processing is supported and is activated when 'batch_size' > 1. + - Each event is subject to a post-check via '_event_postcheck()' to decide whether it should be handled. + - Special 'FINISHED' events trigger the 'finish()' method of the module. + """ + async with self.scan._acatch(context=self._worker, unhandled_is_critical=True): + try: + while not self.scan.stopping and not self.errored: + # hold the reigns if our outgoing queue is full + if self._qsize > 0 and self.outgoing_event_queue.qsize() >= self._qsize: + await asyncio.sleep(0.1) continue - self.debug(f"Got {e} from {getattr(e, 'module', e)}") - # if we receive the special "FINISHED" event - if type(e) == str: - if e == "FINISHED": - self._internal_thread_pool.submit_task(self.catch, self.finish) - elif e == "REPORT": - self._internal_thread_pool.submit_task(self.catch, self.report) - else: - if self._type == "output": - self.catch(self.handle_event, e) - else: - self._internal_thread_pool.submit_task(self.catch, self.handle_event, e, _lock_brutes=True) - except KeyboardInterrupt: - self.debug(f"Interrupted") - self.scan.stop() - except ScanCancelledError as e: - self.verbose(f"Scan cancelled, {e}") - except Exception as e: - self.set_error_state(f"Exception ({e.__class__.__name__}) in module {self.name}:\n{e}") - self.debug(traceback.format_exc()) - - def _filter_event(self, event): - # special "FINISHED" event - if type(event) == str: - if event in ("FINISHED", "REPORT"): - return True - else: - return False - # exclude non-watched types - if not any(t in self.get_watched_events() for t in ("*", event.type)): - return False - # built-in filtering based on scope distance, etc. - acceptable, reason = self.event_acceptable(event) - if not acceptable: - self.debug(f"Not accepting {event} because {reason}") - return False - # custom filtering - try: - if not self.filter_event(event): - self.debug(f"{event} did not meet custom filter criteria") - return False - except Exception as e: - import traceback + # if batch wasn't big enough, we wait for the next event before continuing + if self.batch_size > 1: + submitted = await self._handle_batch() + if not submitted: + async with self.event_received: + await self.event_received.wait() - self.error(f"Error in filter_event({event}): {e}") - self.debug(traceback.format_exc()) - return True + else: + try: + if self.incoming_event_queue is not False: + event = await self.incoming_event_queue.get() + else: + self.debug("Event queue is in bad state") + break + except asyncio.queues.QueueEmpty: + continue + self.debug(f"Got {event} from {getattr(event, 'module', 'unknown_module')}") + async with self._task_counter.count(f"event_postcheck({event})"): + acceptable, reason = await self._event_postcheck(event) + if acceptable: + if event.type == "FINISHED": + context = f"{self.name}.finish()" + async with self.scan._acatch(context), self._task_counter.count(context): + await self.finish() + else: + context = f"{self.name}.handle_event({event})" + self.scan.stats.event_consumed(event, self) + self.debug(f"Handling {event}") + async with self.scan._acatch(context), self._task_counter.count(context): + await self.handle_event(event) + self.debug(f"Finished handling {event}") + else: + self.debug(f"Not accepting {event} because {reason}") + except asyncio.CancelledError: + # this trace was used for debugging leaked CancelledErrors from inside httpx + # self.log.trace("Worker cancelled") + raise + except BaseException as e: + if self.helpers.in_exception_chain(e, (KeyboardInterrupt,)): + self.scan.stop() + else: + self.error(f"Critical failure in module {self.name}: {e}") + self.error(traceback.format_exc()) + self.log.trace("Worker stopped") @property def max_scope_distance(self): if self.in_scope_only or self.target_only: return 0 + if self.scope_distance_modifier is None: + return 999 return max(0, self.scan.scope_search_distance + self.scope_distance_modifier) - def event_acceptable(self, e): + def _event_precheck(self, event): """ - Return the max scope distance for an event that this module is qualified to accept + Pre-checks an event to determine if it should be accepted by the module for queuing. + + This method is called when an event is about to be enqueued into the module's incoming event queue. + It applies various filters such as special signal event types, module error state, watched event types, and more + to decide whether or not the event should be enqueued. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Examples: + >>> result, reason = self._event_precheck(event) + >>> if result: + ... self.incoming_event_queue.put_nowait(event) + ... else: + ... self.debug(f"Not accepting {event} because {reason}") + + Notes: + - The method considers special signal event types like "FINISHED". + - Checks whether the module is in an error state. + - Checks if the event type matches the types this module is interested in (`watched_events`). + - Checks for events tagged as 'target' if the module has `target_only` flag set. + - Applies specific filtering based on event type and module name. """ - acceptable = True - reason = "" + + # special signal event types + if event.type in ("FINISHED",): + return True, "its type is FINISHED" + if self.errored: + return False, "module is in error state" + # exclude non-watched types + if not any(t in self.get_watched_events() for t in ("*", event.type)): + return False, "its type is not in watched_events" if self.target_only: - if "target" not in e.tags: - acceptable = False - reason = "it did not meet target_only filter criteria" - if self.in_scope_only: - if e.scope_distance > 0: - acceptable = False - reason = "it did not meet in_scope_only filter criteria" - if self.scope_distance_modifier is not None: - if e.scope_distance < 0: - acceptable = False - reason = f"its scope_distance ({e.scope_distance}) is invalid." - elif e.scope_distance > self.max_scope_distance: - acceptable = False - reason = f"its scope_distance ({e.scope_distance}) exceeds the maximum allowed by the scan ({self.scan.scope_search_distance}) + the module ({self.scope_distance_modifier}) == {self.max_scope_distance}" - - # if the event is an IP address that came from a CIDR - source_is_range = getattr(e.source, "type", "") == "IP_RANGE" - if source_is_range and e.type == "IP_ADDRESS" and str(e.module) == "speculate" and self.name != "speculate": - # and the current module listens for both ranges and CIDRs - if all([x in self.watched_events for x in ("IP_RANGE", "IP_ADDRESS")]): - # then skip the event. - # this helps avoid double-portscanning both an individual IP and its parent CIDR. - acceptable = False - reason = "module consumes IP ranges directly" + if "target" not in event.tags: + return False, "it did not meet target_only filter criteria" + + # exclude certain URLs (e.g. javascript): + # TODO: revisit this after httpx rework + if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: + return False, "its extension was listed in url_extension_httpx_only" + + return True, "precheck succeeded" + + async def _event_postcheck(self, event): + """ + A simple wrapper for dup tracking + """ + # special exception for "FINISHED" event + if event.type in ("FINISHED",): + return True, "" + acceptable, reason = await self._event_postcheck_inner(event) + if acceptable: + # check duplicates + is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True) + if is_incoming_duplicate and not self.accept_dupes: + return False, "module has already seen it" + (f" ({reason})" if reason else "") return acceptable, reason - def _cleanup(self): + async def _event_postcheck_inner(self, event): + """ + Post-checks an event to determine if it should be accepted by the module for handling. + + This method is called when an event is dequeued from the module's incoming event queue, right before it is actually processed. + It applies various filters such as scope, custom filtering logic, and per-host tracking to decide the event's fate. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Notes: + - Override the `filter_event` method for custom filtering logic. + - This method also maintains host-based tracking when the `per_host_only` or similar flags are set. + - The method will also update event production stats for output modules. + """ + # force-output certain events to the graph + if self._is_graph_important(event): + return True, "event is critical to the graph" + + # check scope distance + filter_result, reason = self._scope_distance_check(event) + if not filter_result: + return filter_result, reason + + # custom filtering + async with self.scan._acatch(context=self.filter_event): + try: + filter_result = await self.filter_event(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}.filter_event({event}): {e}" + self.error(msg) + return False, msg + msg = str(self._custom_filter_criteria_msg) + with suppress(ValueError, TypeError): + filter_result, reason = filter_result + msg += f": {reason}" + if not filter_result: + return False, msg + + self.debug(f"{event} passed post-check") + return True, "" + + def _scope_distance_check(self, event): + if self.in_scope_only: + if event.scope_distance > 0: + return False, "it did not meet in_scope_only filter criteria" + if self.scope_distance_modifier is not None: + if event.scope_distance < 0: + return False, f"its scope_distance ({event.scope_distance}) is invalid." + elif event.scope_distance > self.max_scope_distance: + return ( + False, + f"its scope_distance ({event.scope_distance}) exceeds the maximum allowed by the scan ({self.scan.scope_search_distance}) + the module ({self.scope_distance_modifier}) == {self.max_scope_distance}", + ) + return True, "" + + async def _cleanup(self): if not self._cleanedup: self._cleanedup = True for callback in [self.cleanup] + self.cleanup_callbacks: + context = f"{self.name}.cleanup()" if callable(callback): - self.catch(callback, _force=True) - - def queue_event(self, e): - if self.event_queue is not None and not self.errored: - if self._filter_event(e): - if is_event(e): - self.scan.stats.event_consumed(e, self) - self.event_queue.put(e) - else: - self.debug(f"Not in an acceptable state to queue event") + async with self.scan._acatch(context), self._task_counter.count(context): + await self.helpers.execute_sync_or_async(callback) - def set_error_state(self, message=None): - if message is not None: - self.error(str(message)) + async def queue_event(self, event): + """ + Asynchronously queues an incoming event to the module's event queue for further processing. + + The function performs an initial check to see if the event is acceptable for queuing. + If the event passes the check, it is put into the `incoming_event_queue`. + + Args: + event: The event object to be queued. + + Returns: + None: The function doesn't return anything but modifies the state of the `incoming_event_queue`. + + Examples: + >>> await self.queue_event(some_event) + + Raises: + AttributeError: If the module is not in an acceptable state to queue incoming events. + """ + async with self._task_counter.count("queue_event()", _log=False): + if self.incoming_event_queue is False: + self.debug("Not in an acceptable state to queue incoming event") + return + acceptable, reason = self._event_precheck(event) + if not acceptable: + if reason and reason != "its type is not in watched_events": + self.debug(f"Not queueing {event} because {reason}") + return + else: + self.debug(f"Queueing {event} because {reason}") + try: + self.incoming_event_queue.put_nowait(event) + async with self.event_received: + self.event_received.notify() + if event.type != "FINISHED": + self.scan._new_activity = True + except AttributeError: + self.debug("Not in an acceptable state to queue incoming event") + + async def queue_outgoing_event(self, event, **kwargs): + """ + Queues an outgoing event to the module's outgoing event queue for further processing. + + The function attempts to put the event into the `outgoing_event_queue` immediately. + If it's not possible due to the current state of the module, an AttributeError is raised, and a debug log is generated. + + Args: + event: The event object to be queued. + **kwargs: Additional keyword arguments to be associated with the event. + + Returns: + None: The function doesn't return anything but modifies the state of the `outgoing_event_queue`. + + Examples: + >>> self.queue_outgoing_event(some_outgoing_event, abort_if=lambda e: "unresolved" in e.tags) + + Raises: + AttributeError: If the module is not in an acceptable state to queue outgoing events. + """ + try: + await self.outgoing_event_queue.put((event, kwargs)) + except AttributeError: + self.debug("Not in an acceptable state to queue outgoing event") + + def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False): + """ + Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message. + + The function sets the module's `errored` attribute to True and logs a warning with the optional message. + It also clears the incoming event queue to prevent further processing and updates its status to False. + + Args: + message (str, optional): Additional message to be logged along with the warning. + + Returns: + None: The function doesn't return anything but updates the `errored` state and clears the incoming event queue. + + Examples: + >>> self.set_error_state() + >>> self.set_error_state("Failed to connect to the server") + + Notes: + - The function sets `self._incoming_event_queue` to False to prevent its further use. + - If the module was already in an errored state, the function will not reset the error state or the queue. + """ if not self.errored: - self.debug(f"Setting error state for module {self.name}") + log_msg = "Setting error state" + if message is not None: + log_msg += f": {message}" + if critical: + log_fn = self.error + else: + log_fn = self.warning + log_fn(log_msg) self.errored = True # clear incoming queue - if self.event_queue: - self.debug(f"Emptying event_queue") - with suppress(queue.Empty): + if self.incoming_event_queue is not False: + self.debug("Emptying event_queue") + with suppress(asyncio.queues.QueueEmpty): while 1: - self.event_queue.get_nowait() + self.incoming_event_queue.get_nowait() # set queue to None to prevent its use # if there are leftover objects in the queue, the scan will hang. - self._event_queue = False + self._incoming_event_queue = False + + if clear_outgoing_queue: + with suppress(asyncio.queues.QueueEmpty): + while 1: + self.outgoing_event_queue.get_nowait() + + def is_incoming_duplicate(self, event, add=False): + if event.type in ("FINISHED",): + return False, "" + reason = "" + try: + event_hash = self._incoming_dedup_hash(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}._incoming_dedup_hash({event}): {e}" + self.error(msg) + return True, msg + with suppress(TypeError, ValueError): + event_hash, reason = event_hash + is_dup = event_hash in self._incoming_dup_tracker + if add: + self._incoming_dup_tracker.add(event_hash) + return is_dup, reason + + def _incoming_dedup_hash(self, event): + """ + Determines the criteria for what is considered to be a duplicate event if `accept_dupes` is False. + """ + if self.per_host_only: + return self.get_per_host_hash(event), "per_host_only=True" + if self.per_hostport_only: + return self.get_per_hostport_hash(event), "per_hostport_only=True" + elif self.per_domain_only: + return self.get_per_domain_hash(event), "per_domain_only=True" + return hash(event), "" + + def _outgoing_dedup_hash(self, event): + """ + Determines the criteria for what is considered to be a duplicate event if `suppress_dupes` is True. + + We take into account the `internal` attribute we don't want an internal event (which isn't distributed to output modules) + to inadvertently suppress a non-internal event. + """ + return hash((event, self.name, event.internal, event.always_emit)) + + def get_per_host_hash(self, event): + """ + Computes a per-host hash value for a given event. This method may be optionally overridden in subclasses. + + The function uses the event's `host` to create a string to be hashed. + + Args: + event (Event): The event object containing host information. + + Returns: + int: The hash value computed for the host. + + Examples: + >>> event = self.make_event("https://example.com:8443") + >>> self.get_per_host_hash(event) + """ + return hash(event.host) + + def get_per_hostport_hash(self, event): + """ + Computes a per-host:port hash value for a given event. This method may be optionally overridden in subclasses. + + The function uses the event's `host`, `port`, and `scheme` (for URLs) to create a string to be hashed. + The hash value is used for distinguishing events related to the same host. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the host. + + Examples: + >>> event = self.make_event("https://example.com:8443") + >>> self.get_per_hostport_hash(event) + """ + parsed = getattr(event, "parsed_url", None) + if parsed is None: + to_hash = self.helpers.make_netloc(event.host, event.port) + else: + to_hash = f"{parsed.scheme}://{parsed.netloc}/" + return hash(to_hash) + + def get_per_domain_hash(self, event): + """ + Computes a per-domain hash value for a given event. This method may be optionally overridden in subclasses. + + Events with the same root domain will receive the same hash value. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the domain. + + Examples: + >>> event = self.make_event("https://www.example.com:8443") + >>> self.get_per_domain_hash(event) + """ + _, domain = self.helpers.split_domain(event.host) + return hash(domain) @property def name(self): @@ -463,97 +1067,649 @@ def helpers(self): @property def status(self): - main_pool = self.thread_pool.num_tasks - internal_pool = self._internal_thread_pool.num_tasks - pool_total = main_pool + internal_pool - incoming_qsize = 0 - if self.event_queue: - incoming_qsize = self.event_queue.qsize() - outgoing_qsize = self._qsize - self._event_semaphore._value + """ + Provides the current status of the module as a dictionary. + + The dictionary contains the following keys: + - 'events': A sub-dictionary with 'incoming' and 'outgoing' keys, representing the number of events in the respective queues. + - 'tasks': The current value of the task counter. + - 'errored': A boolean value indicating if the module is in an error state. + - 'running': A boolean value indicating if the module is currently processing data. + + Returns: + dict: A dictionary containing the current status of the module. + + Examples: + >>> self.status + {'events': {'incoming': 5, 'outgoing': 2}, 'tasks': 3, 'errored': False, 'running': True} + """ status = { - "events": {"incoming": incoming_qsize, "outgoing": outgoing_qsize}, - "tasks": {"main_pool": main_pool, "internal_pool": internal_pool, "total": pool_total}, + "events": {"incoming": self.num_incoming_events, "outgoing": self.outgoing_event_queue.qsize()}, + "tasks": self._task_counter.value, "errored": self.errored, } - status["running"] = self._is_running(status) + status["running"] = self.running return status - @staticmethod - def _is_running(module_status): - for pool, count in module_status["tasks"].items(): - if count > 0: - return True - return False - @property def running(self): + """Property indicating whether the module is currently processing data. + + This property checks if the task counter (`self._task_counter.value`) is greater than zero, + indicating that there are ongoing tasks in the module. + + Returns: + bool: True if the module is currently processing data, False otherwise. """ - Indicates whether the module is currently processing data. + return self._task_counter.value > 0 + + @property + def finished(self): + """Property indicating whether the module has finished processing. + + This property checks three conditions to determine if the module is finished: + 1. The module is not currently running (`self.running` is False). + 2. The number of incoming events in the queue is zero or less (`self.num_incoming_events <= 0`). + 3. The number of outgoing events in the queue is zero or less (`self.outgoing_event_queue.qsize() <= 0`). + + Returns: + bool: True if the module has finished processing, False otherwise. + """ + return not self.running and self.num_incoming_events <= 0 and self.outgoing_event_queue.qsize() <= 0 + + async def run_process(self, *args, **kwargs): + kwargs["_proc_tracker"] = self._proc_tracker + return await self.helpers.run(*args, **kwargs) + + async def run_process_live(self, *args, **kwargs): + kwargs["_proc_tracker"] = self._proc_tracker + async for line in self.helpers.run_live(*args, **kwargs): + yield line + + def prepare_api_request(self, url, kwargs): + """ + Prepare an API request by adding the necessary authentication - header, bearer token, etc. + """ + if self.api_key: + url = url.format(api_key=self.api_key) + if "headers" not in kwargs: + kwargs["headers"] = {} + kwargs["headers"]["Authorization"] = f"Bearer {self.api_key}" + return url, kwargs + + async def api_request(self, *args, **kwargs): + """ + Makes an HTTP request while automatically: + - avoiding rate limits (sleep/retry) + - cycling API keys + - cancelling after too many failed attempts + """ + url = args[0] if args else kwargs.pop("url", "") + + # loop until we have a successful request + for _ in range(self.api_retries): + if "headers" not in kwargs: + kwargs["headers"] = {} + new_url, kwargs = self.prepare_api_request(url, kwargs) + kwargs["url"] = new_url + + r = await self.helpers.request(**kwargs) + success = r is not None and self._api_response_is_success(r) + + if success: + self._api_request_failures = 0 + else: + status_code = getattr(r, "status_code", 0) + response_text = getattr(r, "text", "") + self.trace(f"API response to {url} failed with status code {status_code}: {response_text}") + self._api_request_failures += 1 + if self._api_request_failures >= self.api_failure_abort_threshold: + self.set_error_state( + f"Setting error state due to {self._api_request_failures:,} failed HTTP requests" + ) + else: + # sleep for a bit if we're being rate limited + retry_after = self._get_retry_after(r) + if retry_after or status_code == 429: + sleep_interval = int(retry_after) if retry_after is not None else self._429_sleep_interval + self.verbose( + f"Sleeping for {sleep_interval:,} seconds due to rate limit (HTTP status: {status_code})" + ) + await asyncio.sleep(sleep_interval) + elif self._api_keys: + # if request failed, cycle API keys and try again + self.cycle_api_key() + continue + break + + return r + + def _get_retry_after(self, r): + # try to get retry_after from headers first + headers = getattr(r, "headers", {}) + retry_after = headers.get("Retry-After", None) + if retry_after is None: + # then look in body json + with suppress(Exception): + body_json = r.json() + if isinstance(body_json, dict): + retry_after = body_json.get("retry_after", None) + if retry_after is not None: + return float(retry_after) + + def _prepare_api_iter_req(self, url, page, page_size, offset, **requests_kwargs): + """ + Default function for preparing an API request for iterating through paginated data. """ - return self._is_running(self.status) + url = self.helpers.safe_format(url, page=page, page_size=page_size, offset=offset) + return url, requests_kwargs + + def _api_response_is_success(self, r): + return r.is_success + + async def api_page_iter(self, url, page_size=100, _json=True, next_key=None, iter_key=None, **requests_kwargs): + """ + An asynchronous generator function for iterating through paginated API data. + + This function continuously makes requests to a specified API URL, incrementing the page number + or applying a custom pagination function, and yields the received data one page at a time. + It is well-suited for APIs that provide paginated results. + + Args: + url (str): The initial API URL. Can contain placeholders for 'page', 'page_size', and 'offset'. + page_size (int, optional): The number of items per page. Defaults to 100. + json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True. + next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None. + iter_key (callable, optional): A function that builds each new request based on the page number, page size, and offset. Defaults to a simple implementation that autoreplaces {page} and {page_size} in the url. + **requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. + + Yields: + dict or httpx.Response: If 'json' is True, yields a dictionary containing the parsed JSON data. Otherwise, yields the raw HTTP response. + + Note: + The loop will continue indefinitely unless manually stopped. Make sure to break out of the loop once the last page has been received. + + Examples: + >>> agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') + >>> try: + >>> async for page in agen: + >>> subdomains = page["subdomains"] + >>> self.hugesuccess(subdomains) + >>> if not subdomains: + >>> break + >>> finally: + >>> await agen.aclose() + """ + page = 1 + offset = 0 + result = None + if iter_key is None: + iter_key = self._prepare_api_iter_req + while 1: + if result and callable(next_key): + try: + new_url = next_key(result) + except Exception as e: + self.debug(f"Failed to extract next page of results from {url}: {e}") + self.debug(traceback.format_exc()) + else: + new_url, new_kwargs = iter_key(url, page, page_size, offset, **requests_kwargs) + result = await self.api_request(new_url, **new_kwargs) + if result is None: + self.verbose(f"api_page_iter() got no response for {url}") + break + try: + if _json: + result = result.json() + yield result + except Exception: + self.warning(f'Error in api_page_iter() for url: "{new_url}"') + self.trace(traceback.format_exc()) + break + finally: + offset += page_size + page += 1 + + @property + def preset(self): + return self.scan.preset @property def config(self): + """Property that provides easy access to the module's configuration in the scan's config. + + This property serves as a shortcut to retrieve the module-specific configuration from + `self.scan.config`. If no configuration is found for this module, an empty dictionary is returned. + + Returns: + dict: The configuration dictionary specific to this module. + """ config = self.scan.config.get("modules", {}).get(self.name, {}) if config is None: config = {} return config @property - def event_queue(self): - if self._event_queue is None: - self._event_queue = queue.SimpleQueue() - return self._event_queue + def incoming_event_queue(self): + if self._incoming_event_queue is None: + if self._shuffle_incoming_queue: + self._incoming_event_queue = ShuffleQueue() + else: + self._incoming_event_queue = asyncio.Queue() + return self._incoming_event_queue + + @property + def outgoing_event_queue(self): + if self._outgoing_event_queue is None: + self._outgoing_event_queue = ShuffleQueue(self._qsize) + return self._outgoing_event_queue @property def priority(self): + """ + Gets the priority level of the module as an integer. + + The priority level is constrained to be between 1 and 5, inclusive. + A lower value indicates a higher priority. + + Returns: + int: The priority level of the module, constrained between 1 and 5. + + Examples: + >>> self.priority + 3 + """ return int(max(1, min(5, self._priority))) @property def auth_required(self): return self.meta.get("auth_required", False) + @property + def http_timeout(self): + """ + Convenience shortcut to `http_timeout` in the config + """ + return self.scan.web_config.get("http_timeout", 10) + @property def log(self): - if self._log is None: + if getattr(self, "_log", None) is None: self._log = logging.getLogger(f"bbot.modules.{self.name}") return self._log + @property + def memory_usage(self): + """Property that calculates the current memory usage of the module in bytes. + + This property uses the `get_size` function to estimate the memory consumption + of the module object. The depth of the object graph traversal is limited to 3 levels + to avoid performance issues. Commonly shared objects like `self.scan`, `self.helpers`, + are excluded from the calculation to prevent double-counting. + + Returns: + int: The estimated memory usage of the module in bytes. + """ + seen = {self.scan, self.helpers, self.log} # noqa + return get_size(self, max_depth=3, seen=seen) + def __str__(self): return self.name - def stdout(self, *args, **kwargs): - self.log.stdout(*args, extra={"scan_id": self.scan.id}, **kwargs) + def log_table(self, *args, **kwargs): + """Logs a table to the console and optionally writes it to a file. + + This function generates a table using `self.helpers.make_table`, then logs each line + of the table as an info-level log. If a table_name is provided, it also writes the table to a file. - def debug(self, *args, **kwargs): + Args: + *args: Variable length argument list to be passed to `self.helpers.make_table`. + **kwargs: Arbitrary keyword arguments. If 'table_name' is specified, the table will be written to a file. + + Returns: + str: The generated table as a string. + + Examples: + >>> self.log_table(['Header1', 'Header2'], [['row1col1', 'row1col2'], ['row2col1', 'row2col2']], table_name="my_table") + """ + table_name = kwargs.pop("table_name", None) + max_log_entries = kwargs.pop("max_log_entries", None) + table = self.helpers.make_table(*args, **kwargs) + lines_logged = 0 + for line in table.splitlines(): + if max_log_entries is not None and lines_logged > max_log_entries: + break + self.info(line) + lines_logged += 1 + if table_name is not None: + date = self.helpers.make_date() + filename = self.scan.home / f"{self.helpers.tagify(table_name)}-table-{date}.txt" + with open(filename, "w") as f: + f.write(table) + self.verbose(f"Wrote {table_name} to {filename}") + return table + + def _is_graph_important(self, event): + return self.preserve_graph and getattr(event, "_graph_important", False) and not getattr(event, "_omit", False) + + @property + def preserve_graph(self): + preserve_graph = self.config.get("preserve_graph", None) + if preserve_graph is None: + preserve_graph = self._preserve_graph + return preserve_graph + + def debug(self, *args, trace=False, **kwargs): + """Logs debug messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.debug("This is a debug message") + >>> self.debug("This is a debug message with a trace", trace=True) + """ self.log.debug(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def verbose(self, *args, trace=False, **kwargs): + """Logs messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def verbose(self, *args, **kwargs): + Examples: + >>> self.verbose("This is a verbose message") + >>> self.verbose("This is a verbose message with a trace", trace=True) + """ self.log.verbose(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def hugeverbose(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened white text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def hugeverbose(self, *args, **kwargs): + Examples: + >>> self.hugeverbose("This is a huge verbose message") + >>> self.hugeverbose("This is a huge verbose message with a trace", trace=True) + """ self.log.hugeverbose(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def info(self, *args, trace=False, **kwargs): + """Logs informational messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def info(self, *args, **kwargs): + Examples: + >>> self.info("This is an informational message") + >>> self.info("This is an informational message with a trace", trace=True) + """ self.log.info(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def hugeinfo(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened blue text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def hugeinfo(self, *args, **kwargs): + Examples: + >>> self.hugeinfo("This is a huge informational message") + >>> self.hugeinfo("This is a huge informational message with a trace", trace=True) + """ self.log.hugeinfo(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def success(self, *args, trace=False, **kwargs): + """Logs a success message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def success(self, *args, **kwargs): + Examples: + >>> self.success("Operation completed successfully") + >>> self.success("Operation completed with a trace", trace=True) + """ self.log.success(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def hugesuccess(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened green text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def hugesuccess(self, *args, **kwargs): + Examples: + >>> self.hugesuccess("This is a huge success message") + >>> self.hugesuccess("This is a huge success message with a trace", trace=True) + """ self.log.hugesuccess(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def warning(self, *args, trace=True, **kwargs): + """Logs a warning message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def warning(self, *args, **kwargs): + Examples: + >>> self.warning("This is a warning message") + >>> self.warning("This is a warning message with a trace", trace=False) + """ self.log.warning(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def hugewarning(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened orange text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def hugewarning(self, *args, **kwargs): + Examples: + >>> self.hugewarning("This is a huge warning message") + >>> self.hugewarning("This is a huge warning message with a trace", trace=False) + """ self.log.hugewarning(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def error(self, *args, trace=True, **kwargs): + """Logs an error message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. - def error(self, *args, **kwargs): + Examples: + >>> self.error("This is an error message") + >>> self.error("This is an error message with a trace", trace=False) + """ self.log.error(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + def trace(self, msg=None): + """Logs the stack trace of the most recently caught exception. + + This method captures the type, value, and traceback of the most recent exception and logs it using the trace level. It is typically used for debugging purposes. - def critical(self, *args, **kwargs): + Anything logged using this method will always be written to the scan's `debug.log`, even if debugging is not enabled. + + Examples: + >>> try: + >>> 1 / 0 + >>> except ZeroDivisionError: + >>> self.trace() + """ + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + self.log.trace(traceback.format_exc()) + else: + self.log.trace(msg) + + def critical(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened red text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.critical("This is a critical message") + >>> self.critical("This is a critical message with a trace", trace=False) + """ self.log.critical(*args, extra={"scan_id": self.scan.id}, **kwargs) + if trace: + self.trace() + + +class BaseInterceptModule(BaseModule): + """ + An Intercept Module is a special type of high-priority module that gets early access to events. + + If you want your module to tag or modify an event before it's distributed to the scan, it should + probably be an intercept module. + + Examples of intercept modules include `dns` (for DNS resolution and wildcard detection) + and `cloud` (for detection and tagging of cloud assets). + """ + + accept_dupes = True + _intercept = True + + async def _worker(self): + async with self.scan._acatch(context=self._worker, unhandled_is_critical=True): + try: + while not self.scan.stopping and not self.errored: + try: + if self.incoming_event_queue is not False: + incoming = await self.get_incoming_event() + try: + event, kwargs = incoming + except ValueError: + event = incoming + kwargs = {} + else: + self.debug("Event queue is in bad state") + break + except asyncio.queues.QueueEmpty: + await asyncio.sleep(0.1) + continue + + if event.type == "FINISHED": + context = f"{self.name}.finish()" + async with self.scan._acatch(context), self._task_counter.count(context): + await self.finish() + continue + + acceptable = True + async with self._task_counter.count(f"event_precheck({event})"): + precheck_pass, reason = self._event_precheck(event) + if not precheck_pass: + self.debug(f"Not intercepting {event} because precheck failed ({reason})") + acceptable = False + async with self._task_counter.count(f"event_postcheck({event})"): + postcheck_pass, reason = await self._event_postcheck(event) + if not postcheck_pass: + self.debug(f"Not intercepting {event} because postcheck failed ({reason})") + acceptable = False + + # whether to pass the event on to the rest of the scan + # defaults to true, unless handle_event returns False + forward_event = True + forward_event_reason = "" + + if acceptable: + context = f"{self.name}.handle_event({event, kwargs})" + self.scan.stats.event_consumed(event, self) + self.debug(f"Intercepting {event}") + async with self.scan._acatch(context), self._task_counter.count(context): + forward_event = await self.handle_event(event, **kwargs) + with suppress(ValueError, TypeError): + forward_event, forward_event_reason = forward_event + + if forward_event is False: + self.debug(f"Not forwarding {event} because {forward_event_reason}") + continue + + self.debug(f"Forwarding {event}") + await self.forward_event(event, kwargs) + + except asyncio.CancelledError: + # this trace was used for debugging leaked CancelledErrors from inside httpx + # self.log.trace("Worker cancelled") + raise + except BaseException as e: + if self.helpers.in_exception_chain(e, (KeyboardInterrupt,)): + self.scan.stop() + else: + self.critical(f"Critical failure in intercept module {self.name}: {e}") + self.critical(traceback.format_exc()) + self.log.trace("Worker stopped") + + async def get_incoming_event(self): + """ + Get an event from this module's incoming event queue + """ + return await self.incoming_event_queue.get() + + async def forward_event(self, event, kwargs): + """ + Used for forwarding the event on to the next intercept module + """ + await self.outgoing_event_queue.put((event, kwargs)) + + async def queue_outgoing_event(self, event, **kwargs): + """ + Used by emit_event() to raise new events to the scan + """ + # if this was a normal module, we'd put it in the outgoing queue + # but because it's an intercept module, we need to queue it at the scan's ingress + await self.scan.ingress_module.queue_event(event, kwargs) + + async def queue_event(self, event, kwargs=None): + """ + Put an event in this module's incoming event queue + """ + if kwargs is None: + kwargs = {} + try: + self.incoming_event_queue.put_nowait((event, kwargs)) + except AttributeError: + self.debug("Not in an acceptable state to queue incoming event") + + async def _event_postcheck(self, event): + return await self._event_postcheck_inner(event) diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py new file mode 100644 index 0000000000..8e70fe4143 --- /dev/null +++ b/bbot/modules/bevigil.py @@ -0,0 +1,75 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class bevigil(subdomain_enum_apikey): + """ + Retrieve OSINT data from mobile applications using BeVigil + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME", "URL_UNVERIFIED"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Retrieve OSINT data from mobile applications using BeVigil", + "created_date": "2022-10-26", + "author": "@alt-glitch", + "auth_required": True, + } + options = {"api_key": "", "urls": False} + options_desc = {"api_key": "BeVigil OSINT API Key", "urls": "Emit URLs in addition to DNS_NAMEs"} + + base_url = "https://osint.bevigil.com/api" + + async def setup(self): + self.api_key = self.config.get("api_key", "") + self.urls = self.config.get("urls", False) + return await super().setup() + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["X-Access-Token"] = self.api_key + return url, kwargs + + async def handle_event(self, event): + query = self.make_query(event) + subdomains = await self.query(query, request_fn=self.request_subdomains, parse_fn=self.parse_subdomains) + if subdomains: + for subdomain in subdomains: + await self.emit_event( + subdomain, + "DNS_NAME", + parent=event, + context=f'{{module}} queried BeVigil\'s API for "{query}" and discovered {{event.type}}: {{event.data}}', + ) + + if self.urls: + urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) + if urls: + for parsed_url in await self.helpers.run_in_executor_mp(self.helpers.validators.collapse_urls, urls): + await self.emit_event( + parsed_url.geturl(), + "URL_UNVERIFIED", + parent=event, + context=f'{{module}} queried BeVigil\'s API for "{query}" and discovered {{event.type}}: {{event.data}}', + ) + + async def request_subdomains(self, query): + url = f"{self.base_url}/{self.helpers.quote(query)}/subdomains/" + return await self.api_request(url) + + async def request_urls(self, query): + url = f"{self.base_url}/{self.helpers.quote(query)}/urls/" + return await self.api_request(url) + + async def parse_subdomains(self, r, query=None): + results = set() + subdomains = r.json().get("subdomains") + if subdomains: + results.update(subdomains) + return results + + async def parse_urls(self, r, query=None): + results = set() + urls = r.json().get("urls") + if urls: + results.update(urls) + return results diff --git a/bbot/modules/binaryedge.py b/bbot/modules/binaryedge.py index c7c83a5b22..e712beec56 100644 --- a/bbot/modules/binaryedge.py +++ b/bbot/modules/binaryedge.py @@ -1,11 +1,16 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class binaryedge(shodan_dns): +class binaryedge(subdomain_enum_apikey): watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME", "EMAIL_ADDRESS", "IP_ADDRESS", "OPEN_PORT", "PROTOCOL"] + produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the BinaryEdge API", "auth_required": True} + meta = { + "description": "Query the BinaryEdge API", + "created_date": "2022-08-17", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "max_records": 1000} options_desc = { "api_key": "BinaryEdge API key", @@ -14,20 +19,24 @@ class binaryedge(shodan_dns): base_url = "https://api.binaryedge.io/v2" - def setup(self): + async def setup(self): self.max_records = self.config.get("max_records", 1000) - self.headers = {"X-Key": self.config.get("api_key", "")} - return super().setup() + return await super().setup() + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["X-Key"] = self.api_key + return url, kwargs - def ping(self): + async def ping(self): url = f"{self.base_url}/user/subscription" - j = self.helpers.request(url, headers=self.headers).json() + j = (await self.api_request(url)).json() assert j.get("requests_left", 0) > 0 - def query(self, query): + async def request_url(self, query): # todo: host query (certs + services) url = f"{self.base_url}/query/domains/subdomain/{self.helpers.quote(query)}" - self.hugesuccess(url) - j = self.helpers.request(url, headers=self.headers).json() - for subdomain in j.get("events", []): - yield subdomain + return await self.api_request(url) + + async def parse_results(self, r, query): + j = r.json() + return j.get("events", []) diff --git a/bbot/modules/bucket_amazon.py b/bbot/modules/bucket_amazon.py new file mode 100644 index 0000000000..7829606f78 --- /dev/null +++ b/bbot/modules/bucket_amazon.py @@ -0,0 +1,23 @@ +from bbot.modules.templates.bucket import bucket_template + + +class bucket_amazon(bucket_template): + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "cloud-enum", "web-basic"] + meta = { + "description": "Check for S3 buckets related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + scope_distance_modifier = 3 + + cloud_helper_name = "amazon" + delimiters = ("", ".", "-") + base_domains = ["s3.amazonaws.com"] + regions = [None] + supports_open_check = True diff --git a/bbot/modules/bucket_azure.py b/bbot/modules/bucket_azure.py new file mode 100644 index 0000000000..dcf90eb346 --- /dev/null +++ b/bbot/modules/bucket_azure.py @@ -0,0 +1,36 @@ +from bbot.modules.templates.bucket import bucket_template + + +class bucket_azure(bucket_template): + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "cloud-enum", "web-basic"] + meta = { + "description": "Check for Azure storage blobs related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + + cloud_helper_name = "azure" + delimiters = ("", "-") + base_domains = ["blob.core.windows.net"] + # Dirbusting is required to know whether a bucket is public + supports_open_check = False + + def build_bucket_request(self, bucket_name, base_domain, region): + url = self.build_url(bucket_name, base_domain, region) + url = url.strip("/") + f"/{bucket_name}?restype=container" + return url, {} + + def check_bucket_exists(self, bucket_name, response): + status_code = getattr(response, "status_code", 0) + existent_bucket = status_code != 0 + return existent_bucket, set() + + def clean_bucket_url(self, url): + # only return root URL + return "/".join(url.split("/")[:3]) diff --git a/bbot/modules/bucket_digitalocean.py b/bbot/modules/bucket_digitalocean.py new file mode 100644 index 0000000000..8e1e008fc0 --- /dev/null +++ b/bbot/modules/bucket_digitalocean.py @@ -0,0 +1,24 @@ +from bbot.modules.templates.bucket import bucket_template + + +class bucket_digitalocean(bucket_template): + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "slow", "cloud-enum", "web-thorough"] + meta = { + "description": "Check for DigitalOcean spaces related to target", + "created_date": "2022-11-08", + "author": "@TheTechromancer", + } + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + + cloud_helper_name = "digitalocean" + delimiters = ("", "-") + base_domains = ["digitaloceanspaces.com"] + regions = ["ams3", "fra1", "nyc3", "sfo2", "sfo3", "sgp1"] + + def build_url(self, bucket_name, base_domain, region): + return f"https://{bucket_name}.{region}.{base_domain}/" diff --git a/bbot/modules/bucket_file_enum.py b/bbot/modules/bucket_file_enum.py new file mode 100644 index 0000000000..15a429de93 --- /dev/null +++ b/bbot/modules/bucket_file_enum.py @@ -0,0 +1,59 @@ +from bbot.modules.base import BaseModule +import xml.etree.ElementTree as ET + + +class bucket_file_enum(BaseModule): + """ + Enumerate files in public storage buckets + + Currently only supports AWS and DigitalOcean + """ + + watched_events = ["STORAGE_BUCKET"] + produced_events = ["URL_UNVERIFIED"] + meta = { + "description": "Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS, DigitalOcean", + "created_date": "2023-11-14", + "author": "@TheTechromancer", + } + flags = ["passive", "safe", "cloud-enum"] + options = { + "file_limit": 50, + } + options_desc = {"file_limit": "Limit the number of files downloaded per bucket"} + scope_distance_modifier = 2 + + async def setup(self): + self.file_limit = self.config.get("file_limit", 50) + return True + + async def handle_event(self, event): + cloud_tags = (t for t in event.tags if t.startswith("cloud-")) + if any(t.endswith("-amazon") or t.endswith("-digitalocean") for t in cloud_tags): + await self.handle_aws(event) + + async def handle_aws(self, event): + url = event.data["url"] + urls_emitted = 0 + response = await self.helpers.request(url) + status_code = getattr(response, "status_code", 0) + if status_code == 200: + content = response.text + root = ET.fromstring(content) + namespace = {"s3": "http://s3.amazonaws.com/doc/2006-03-01/"} + keys = [key.text for key in root.findall(".//s3:Key", namespace)] + for key in keys: + bucket_file = url + "/" + key + file_extension = self.helpers.get_file_extension(key) + if file_extension not in self.scan.url_extension_blacklist: + extension_upper = file_extension.upper() + await self.emit_event( + bucket_file, + "URL_UNVERIFIED", + parent=event, + tags="filedownload", + context=f"{{module}} enumerate files in bucket and discovered {extension_upper} file at {{event.type}}: {{event.data}}", + ) + urls_emitted += 1 + if urls_emitted >= self.file_limit: + return diff --git a/bbot/modules/bucket_firebase.py b/bbot/modules/bucket_firebase.py new file mode 100644 index 0000000000..100e4608ec --- /dev/null +++ b/bbot/modules/bucket_firebase.py @@ -0,0 +1,39 @@ +from bbot.modules.templates.bucket import bucket_template + + +class bucket_firebase(bucket_template): + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "cloud-enum", "web-basic"] + meta = { + "description": "Check for open Firebase databases related to target", + "created_date": "2023-03-20", + "author": "@TheTechromancer", + } + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + + cloud_helper_name = "google" + delimiters = ("", "-") + base_domains = ["firebaseio.com"] + + def filter_bucket(self, event): + host = str(event.host) + if not any(host.endswith(f".{d}") for d in self.base_domains): + return False, "bucket belongs to a different cloud provider" + return True, "" + + def build_url(self, bucket_name, base_domain, region): + return f"https://{bucket_name}.{base_domain}/.json" + + async def check_bucket_open(self, bucket_name, url): + url = url.strip("/") + "/.json" + response = await self.helpers.request(url) + tags = self.gen_tags_exists(response) + status_code = getattr(response, "status_code", 404) + msg = "" + if status_code == 200: + msg = "Open storage bucket" + return (msg, tags) diff --git a/bbot/modules/bucket_google.py b/bbot/modules/bucket_google.py new file mode 100644 index 0000000000..1b87f639ee --- /dev/null +++ b/bbot/modules/bucket_google.py @@ -0,0 +1,66 @@ +from bbot.modules.templates.bucket import bucket_template + + +class bucket_google(bucket_template): + """ + Adapted from https://github.com/RhinoSecurityLabs/GCPBucketBrute/blob/master/gcpbucketbrute.py + """ + + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "cloud-enum", "web-basic"] + meta = { + "description": "Check for Google object storage related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + + cloud_helper_name = "google" + delimiters = ("", "-", ".", "_") + base_domains = ["storage.googleapis.com"] + bad_permissions = [ + "storage.buckets.get", + "storage.buckets.list", + "storage.buckets.create", + "storage.buckets.delete", + "storage.buckets.setIamPolicy", + "storage.objects.get", + "storage.objects.list", + "storage.objects.create", + "storage.objects.delete", + "storage.objects.setIamPolicy", + ] + + def filter_bucket(self, event): + if not str(event.host).endswith(".googleapis.com"): + return False, "bucket belongs to a different cloud provider" + return True, "" + + def build_url(self, bucket_name, base_domain, region): + return f"https://www.googleapis.com/storage/v1/b/{bucket_name}" + + async def check_bucket_open(self, bucket_name, url): + bad_permissions = [] + try: + list_permissions = "&".join(["=".join(("permissions", p)) for p in self.bad_permissions]) + url = f"https://www.googleapis.com/storage/v1/b/{bucket_name}/iam/testPermissions?" + list_permissions + response = await self.helpers.request(url) + permissions = response.json() + if isinstance(permissions, dict): + bad_permissions = list(permissions.get("permissions", {})) + except Exception as e: + self.info(f'Failed to enumerate permissions for bucket "{bucket_name}": {e}') + msg = "" + if bad_permissions: + perms_str = ",".join(bad_permissions) + msg = f"Open permissions on storage bucket ({perms_str})" + return (msg, set()) + + def check_bucket_exists(self, bucket_name, response): + status_code = getattr(response, "status_code", 0) + existent_bucket = status_code not in (0, 400, 404) + return existent_bucket, set() diff --git a/bbot/modules/bufferoverrun.py b/bbot/modules/bufferoverrun.py new file mode 100644 index 0000000000..9523dc6269 --- /dev/null +++ b/bbot/modules/bufferoverrun.py @@ -0,0 +1,47 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class BufferOverrun(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query BufferOverrun's TLS API for subdomains", + "created_date": "2024-10-23", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "commercial": False} + options_desc = {"api_key": "BufferOverrun API key", "commercial": "Use commercial API"} + + base_url = "https://tls.bufferover.run/dns" + commercial_base_url = "https://bufferover-run-tls.p.rapidapi.com/ipv4/dns" + + async def setup(self): + self.commercial = self.config.get("commercial", False) + return await super().setup() + + def prepare_api_request(self, url, kwargs): + if self.commercial: + kwargs["headers"]["x-rapidapi-host"] = "bufferover-run-tls.p.rapidapi.com" + kwargs["headers"]["x-rapidapi-key"] = self.api_key + else: + kwargs["headers"]["x-api-key"] = self.api_key + return url, kwargs + + async def request_url(self, query): + url = f"{self.commercial_base_url if self.commercial else self.base_url}?q=.{query}" + return await self.api_request(url) + + async def parse_results(self, r, query): + j = r.json() + subdomains_set = set() + if isinstance(j, dict): + results = j.get("Results", []) + for result in results: + parts = result.split(",") + if len(parts) > 4: + subdomain = parts[4].strip() + if subdomain and subdomain.endswith(f".{query}"): + subdomains_set.add(subdomain) + return subdomains_set diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py new file mode 100644 index 0000000000..9887f18225 --- /dev/null +++ b/bbot/modules/builtwith.py @@ -0,0 +1,128 @@ +############################################################ +# # +# # +# [-] Processing BuiltWith Domains Output # +# # +# [-] 2022.08.19 # +# V05 # +# Black Lantern Security (BLSOPS) # +# # +# # +############################################################ + +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class builtwith(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["affiliates", "subdomain-enum", "passive", "safe"] + meta = { + "description": "Query Builtwith.com for subdomains", + "created_date": "2022-08-23", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "redirects": True} + options_desc = {"api_key": "Builtwith API key", "redirects": "Also look up inbound and outbound redirects"} + base_url = "https://api.builtwith.com" + + async def handle_event(self, event): + query = self.make_query(event) + # domains + subdomains = await self.query(query, parse_fn=self.parse_domains, request_fn=self.request_domains) + if subdomains: + for s in subdomains: + if s != event: + await self.emit_event( + s, + "DNS_NAME", + parent=event, + context=f'{{module}} queried the BuiltWith API for "{query}" and found {{event.type}}: {{event.data}}', + ) + # redirects + if self.config.get("redirects", True): + redirects = await self.query(query, parse_fn=self.parse_redirects, request_fn=self.request_redirects) + if redirects: + for r in redirects: + if r != event: + await self.emit_event( + r, + "DNS_NAME", + parent=event, + tags=["affiliate"], + context=f'{{module}} queried the BuiltWith redirect API for "{query}" and found redirect to {{event.type}}: {{event.data}}', + ) + + async def request_domains(self, query): + url = f"{self.base_url}/v20/api.json?KEY={{api_key}}&LOOKUP={query}&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes" + return await self.api_request(url) + + async def request_redirects(self, query): + url = f"{self.base_url}/redirect1/api.json?KEY={{api_key}}&LOOKUP={query}" + return await self.api_request(url) + + async def parse_domains(self, r, query): + """ + This method returns a set of subdomains. + Each subdomain is an "FQDN" that was reported in the "Detailed Technology Profile" page on builtwith.com + + Parameters + ---------- + r (requests Response): The raw requests response from the API + query (string): The query used against the API + """ + results_set = set() + json = r.json() + if json and isinstance(json, dict): + results = json.get("Results", []) + if results: + for result in results: + for chunk in result.get("Result", {}).get("Paths", []): + domain = chunk.get("Domain", "") + subdomain = chunk.get("SubDomain", "") + if domain: + if subdomain: + domain = f"{subdomain}.{domain}" + results_set.add(domain) + else: + errors = json.get("Errors", [{}]) + if errors: + error = errors[0].get("Message", "Unknown Error") + self.verbose(f"No results for {query}: {error}") + return results_set + + async def parse_redirects(self, r, query): + """ + This method creates a set. + Each entry in the set is either an Inbound or Outbound Redirect reported in the "Redirect Profile" page on builtwith.com + + Parameters + ---------- + r (requests Response): The raw requests response from the API + query (string): The query used against the API + + Returns + ------- + results (set) + """ + results = set() + json = r.json() + if json and isinstance(json, dict): + inbound = json.get("Inbound", []) + outbound = json.get("Outbound", []) + if inbound: + for i in inbound: + domain = i.get("Domain", "") + if domain: + results.add(domain) + if outbound: + for o in outbound: + domain = o.get("Domain", "") + if domain: + results.add(domain) + if not results: + error = json.get("error", "") + if error: + self.warning(f"No results for {query}: {error}") + return results diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index c106dc4374..61fb510775 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -1,5 +1,5 @@ +from bbot.errors import HttpCompareError from bbot.modules.base import BaseModule -from bbot.core.errors import HttpCompareError """ Port of https://github.com/iamj0ker/bypass-403/ and https://portswigger.net/bappstore/444407b96d9c4de0adb7aed89e826122 @@ -16,9 +16,12 @@ ("POST", "{scheme}://{netloc}/{path}", {"Content-Length": "0"}, False), ("GET", "{scheme}://{netloc}/{path}.php", None, False), ("GET", "{scheme}://{netloc}/{path}.json", None, False), - ("TRACE", "{scheme}://{netloc}/{path}/", None, True), + ("TRACE", "{scheme}://{netloc}/{path}", None, True), + ("GET", "{scheme}://{netloc}/(S(X))/{path}", None, True), # ASPNET COOKIELESS URLS + ("GET", "{scheme}://{netloc}/(S(X))/../(S(X))/{path}", None, True), # ASPNET COOKIELESS URLS ] + query_payloads = [ "%09", "%20", @@ -60,6 +63,9 @@ "X-Host": "127.0.0.1", } +# This is planned to be replaced in the future: https://github.com/blacklanternsecurity/bbot/issues/1068 +waf_strings = ["The requested URL was rejected"] + for qp in query_payloads: signatures.append(("GET", "{scheme}://{netloc}/{path}%s" % qp, None, True)) if "?" not in qp: # we only want to use "?" after the path @@ -70,60 +76,102 @@ class bypass403(BaseModule): - watched_events = ["URL"] produced_events = ["FINDING"] - flags = ["active", "aggressive", "web"] - meta = {"description": "Check 403 pages for common bypasses"} + flags = ["active", "aggressive", "web-thorough"] + meta = {"description": "Check 403 pages for common bypasses", "created_date": "2022-07-05", "author": "@liquidsec"} in_scope_only = True - def handle_event(self, event): - - try: - compare_helper = self.helpers.http_compare(event.data, allow_redirects=True) - except HttpCompareError as e: - self.debug(e) - return + async def do_checks(self, compare_helper, event, collapse_threshold): + results = set() + error_count = 0 for sig in signatures: + if error_count > 3: + self.warning(f"Received too many errors for URL {event.data} aborting bypass403") + return None sig = self.format_signature(sig, event) - if sig[2] != None: + if sig[2] is not None: headers = dict(sig[2]) else: headers = None - match, reason, reflection, subject_response = compare_helper.compare( - sig[1], headers=headers, method=sig[0], allow_redirects=True - ) - - if match == False: + try: + match, reasons, reflection, subject_response = await compare_helper.compare( + sig[1], headers=headers, method=sig[0], allow_redirects=True + ) + except HttpCompareError as e: + error_count += 1 + self.debug(e) + continue + + # In some cases WAFs will respond with a 200 code which causes a false positive + if subject_response is not None: + for ws in waf_strings: + if ws in subject_response.text: + self.debug("Rejecting result based on presence of WAF string") + return + + if match is False: if str(subject_response.status_code)[0] != "4": - if sig[2]: added_header_tuple = next(iter(sig[2].items())) reported_signature = f"Added Header: {added_header_tuple[0]}: {added_header_tuple[1]}" else: - reported_signature = f"Modified URL: {sig[1]}" - description = f"403 Bypass Reason: [{reason}] Sig: [{reported_signature}]" - self.emit_event( - {"description": description, "host": event.host, "url": event.data}, "FINDING", source=event - ) + reported_signature = f"Modified URL: {sig[0]} {sig[1]}" + description = f"403 Bypass Reasons: [{','.join(reasons)}] Sig: [{reported_signature}]" + results.add(description) + if len(results) > collapse_threshold: + return results else: self.debug(f"Status code changed to {str(subject_response.status_code)}, ignoring") + return results + + async def handle_event(self, event): + try: + compare_helper = self.helpers.http_compare(event.data, allow_redirects=True) + except HttpCompareError as e: + self.debug(e) + return - def filter_event(self, event): + collapse_threshold = 6 + results = await self.do_checks(compare_helper, event, collapse_threshold) + if results is None: + return + if len(results) > collapse_threshold: + await self.emit_event( + { + "description": f"403 Bypass MULTIPLE SIGNATURES (exceeded threshold {str(collapse_threshold)})", + "host": str(event.host), + "url": event.data, + }, + "FINDING", + parent=event, + context=f"{{module}} discovered multiple potential 403 bypasses ({{event.type}}) for {event.data}", + ) + else: + for description in results: + await self.emit_event( + {"description": description, "host": str(event.host), "url": event.data}, + "FINDING", + parent=event, + context=f"{{module}} discovered potential 403 bypass ({{event.type}}) for {event.data}", + ) + + # When a WAF-check helper is available in the future, we will convert to HTTP_RESPONSE and check for the WAF string here. + async def filter_event(self, event): if ("status-403" in event.tags) or ("status-401" in event.tags): return True return False def format_signature(self, sig, event): - if sig[3] == True: - cleaned_path = event.parsed.path.strip("/") + if sig[3] is True: + cleaned_path = event.parsed_url.path.strip("/") else: - cleaned_path = event.parsed.path.lstrip("/") - kwargs = {"scheme": event.parsed.scheme, "netloc": event.parsed.netloc, "path": cleaned_path} + cleaned_path = event.parsed_url.path.lstrip("/") + kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} formatted_url = sig[1].format(**kwargs) - if sig[2] != None: + if sig[2] is not None: formatted_headers = {k: v.format(**kwargs) for k, v in sig[2].items()} else: formatted_headers = None diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 3540d3120d..17fea87a13 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -1,37 +1,39 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class c99(shodan_dns): +class c99(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the C99 API for subdomains", "auth_required": True} + meta = { + "description": "Query the C99 API for subdomains", + "created_date": "2022-07-08", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "c99.nl API key"} base_url = "https://api.c99.nl" + ping_url = f"{base_url}/randomnumber?key={{api_key}}&between=1,100&json" - def ping(self): - url = f"{self.base_url}/randomnumber?key={self.api_key}&between=1,100&json" - response = self.helpers.request(url) - assert response.json()["success"] == True + async def ping(self): + url = f"{self.base_url}/randomnumber?key={{api_key}}&between=1,100&json" + response = await self.api_request(url) + assert response.json()["success"] is True, getattr(response, "text", "no response from server") - def query(self, query): - url = f"{self.base_url}/subdomainfinder?key={self.api_key}&domain={self.helpers.quote(query)}&json" - results = self.helpers.request(url) - try: - json = results.json() - if json: - subdomains = json.get("subdomains", []) - if subdomains: - for s in subdomains: - subdomain = s.get("subdomain", "") - if subdomain: - yield subdomain - else: - self.debug(f'No results for "{query}"') - except Exception: - import traceback + async def request_url(self, query): + url = f"{self.base_url}/subdomainfinder?key={{api_key}}&domain={self.helpers.quote(query)}&json" + return await self.api_request(url) - self.warning(f"Error retrieving c99.nl subdomains for {query}") - self.debug(traceback.format_exc()) + async def parse_results(self, r, query): + results = set() + j = r.json() + if isinstance(j, dict): + subdomains = j.get("subdomains", []) + if subdomains: + for s in subdomains: + subdomain = s.get("subdomain", "") + if subdomain: + results.add(subdomain) + return results diff --git a/bbot/modules/censys.py b/bbot/modules/censys.py index a82a55a50f..69e1f6f945 100644 --- a/bbot/modules/censys.py +++ b/bbot/modules/censys.py @@ -1,118 +1,98 @@ -from contextlib import suppress +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -from censys.common import exceptions -from censys.search import CensysHosts -from censys.search import CensysCertificates -from bbot.modules.shodan_dns import shodan_dns +class censys(subdomain_enum_apikey): + """ + thanks to https://github.com/owasp-amass/amass/blob/master/resources/scripts/cert/censys.ads + """ - -class censys(shodan_dns): watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME", "EMAIL_ADDRESS", "IP_ADDRESS", "OPEN_PORT", "PROTOCOL"] - flags = ["subdomain-enum", "email-enum", "passive", "safe"] - meta = {"description": "Query the Censys API", "auth_required": True} - options = {"api_id": "", "api_secret": "", "max_records": 1000} + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query the Censys API", + "created_date": "2022-08-04", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "max_pages": 5} options_desc = { - "api_id": "Censys.io API ID", - "api_secret": "Censys.io API Secret", - "max_records": "Limit results to help prevent exceeding API quota", + "api_key": "Censys.io API Key in the format of 'key:secret'", + "max_pages": "Maximum number of pages to fetch (100 results per page)", } - deps_pip = ["censys"] + base_url = "https://search.censys.io/api" - def setup(self): - self.max_records = self.config.get("max_records", 1000) - self.api_id = self.config.get("api_id", "") - self.api_secret = self.config.get("api_secret", "") - self._cert_name_threshold = 20 - with suppress(Exception): - self.hosts = CensysHosts(api_id=self.api_id, api_secret=self.api_secret) - with suppress(Exception): - self.certificates = CensysCertificates(api_id=self.api_id, api_secret=self.api_secret) - return super().setup() + async def setup(self): + self.max_pages = self.config.get("max_pages", 5) + return await super().setup() - def ping(self): - quota = self.certificates.quota() - used = int(quota["used"]) - allowance = int(quota["allowance"]) + async def ping(self): + url = f"{self.base_url}/v1/account" + resp = await self.api_request(url) + d = resp.json() + assert isinstance(d, dict), f"Invalid response from {url}: {resp}" + quota = d.get("quota", {}) + used = int(quota.get("used", 0)) + allowance = int(quota.get("allowance", 0)) assert used < allowance, "No quota remaining" - def query(self, query): - emails = set() - dns_names = set() - ip_addresses = dict() - try: - # certificates - certificate_query = f"parsed.names: {query}" - certificate_fields = ["parsed.names", "parsed.issuer_dn", "parsed.subject_dn"] - for result in self.certificates.search( - certificate_query, fields=certificate_fields, max_records=self.max_records - ): - parsed_names = result.get("parsed.names", []) - # helps filter out third-party certs with a lot of garbage names - _filter = lambda x: True - domain = self.helpers.tldextract(query).domain - if len(parsed_names) > self._cert_name_threshold: - _filter = lambda x: domain in str(x.lower()) - parsed_names = list(filter(_filter, parsed_names)) - dns_names.update(set([n.lstrip(".*").rstrip(".").lower() for n in parsed_names])) - emails.update(set(self.helpers.extract_emails(result.get("parsed.issuer_dn", "")))) - emails.update(set(self.helpers.extract_emails(result.get("parsed.subject_dn", "")))) + def prepare_api_request(self, url, kwargs): + api_id, api_secret = self.api_key.split(":", 1) + kwargs["auth"] = (api_id, api_secret) + return url, kwargs - # hosts - per_page = 100 - pages = max(1, int(self.max_records / per_page)) - hosts_query = f"services.tls.certificates.leaf_data.names: {query} or services.tls.certificates.leaf_data.subject.email_address: {query}" - for i, page in enumerate(self.hosts.search(hosts_query, per_page=per_page, pages=pages)): - for result in page: - ip = result.get("ip", "") - if not ip: - continue - ip_addresses[ip] = [] - services = result.get("services", []) - for service in services: - port = service.get("port") - service_name = service.get("service_name", "") - transport_protocol = service.get("transport_protocol", "") - if not port or not transport_protocol: - continue - ip_addresses[ip].append((port, service_name, transport_protocol)) - if self.scan.stopping: - break + async def query(self, query): + results = set() + cursor = "" + for i in range(self.max_pages): + url = f"{self.base_url}/v2/certificates/search" + json_data = { + "q": f"names: {query}", + "per_page": 100, + } + if cursor: + json_data.update({"cursor": cursor}) + resp = await self.api_request( + url, + method="POST", + json=json_data, + ) + + if resp is None: + break - except exceptions.CensysRateLimitExceededException: - self.warning("Exceeded Censys account limits") - except exceptions.CensysException as e: - self.warning(f"Error with API: {e}") - except Exception as e: - import traceback + try: + d = resp.json() + except Exception as e: + self.warning(f"Failed to parse JSON from {url} (response: {resp}): {e}") - self.warning(f"Unknown error: {e}") - self.debug(traceback.format_exc()) + if resp.status_code < 200 or resp.status_code >= 400: + if isinstance(d, dict): + error = d.get("error", "") + if error: + self.warning(error) + self.verbose(f'Non-200 Status code: {resp.status_code} for query "{query}", page #{i + 1}') + self.debug(f"Response: {resp.text}") + break + else: + if d is None: + break + elif not isinstance(d, dict): + break + status = d.get("status", "").lower() + result = d.get("result", {}) + hits = result.get("hits", []) + if status != "ok" or not hits: + break - return emails, dns_names, ip_addresses + for h in hits: + names = h.get("names", []) + for n in names: + results.add(n.strip(".*").lower()) - def handle_event(self, event): - query = self.make_query(event) - emails, dns_names, ip_addresses = self.query(query) - for email in emails: - self.emit_event(email, "EMAIL_ADDRESS", source=event) - for dns_name in dns_names: - self.emit_event(dns_name, "DNS_NAME", source=event) - for ip, services in ip_addresses.items(): - ip_event = self.make_event(ip, "IP_ADDRESS", source=event) - self.emit_event(ip_event) - for port, service_name, transport_protocol in services: - port_data = self.helpers.make_netloc(ip, port) - port_type = f"OPEN_{transport_protocol.upper()}_PORT" - port_event = self.make_event(port_data, port_type, source=ip_event) - self.emit_event(port_event) - if service_name: - service_name = str(service_name).upper() - protocol_data = {"host": port_data, "protocol": service_name} - self.emit_event(protocol_data, "PROTOCOL", source=port_event) + cursor = result.get("links", {}).get("next", "") + if not cursor: + break - @property - def auth_secret(self): - return self.api_id and self.api_secret + return results diff --git a/bbot/modules/certspotter.py b/bbot/modules/certspotter.py index 1606b54dc4..c6cbc6eb6d 100644 --- a/bbot/modules/certspotter.py +++ b/bbot/modules/certspotter.py @@ -1,21 +1,27 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class certspotter(crobat): +class certspotter(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query Certspotter's API for subdomains"} + meta = { + "description": "Query Certspotter's API for subdomains", + "created_date": "2022-07-28", + "author": "@TheTechromancer", + } base_url = "https://api.certspotter.com/v1" def request_url(self, query): url = f"{self.base_url}/issuances?domain={self.helpers.quote(query)}&include_subdomains=true&expand=dns_names" - return self.helpers.request(url) + return self.api_request(url, timeout=self.http_timeout + 30) - def parse_results(self, r, query): + async def parse_results(self, r, query): + results = set() json = r.json() if json: for r in json: for dns_name in r.get("dns_names", []): - yield dns_name.lstrip(".*").rstrip(".") + results.add(dns_name.lstrip(".*").rstrip(".")) + return results diff --git a/bbot/modules/chaos.py b/bbot/modules/chaos.py new file mode 100644 index 0000000000..15a321046a --- /dev/null +++ b/bbot/modules/chaos.py @@ -0,0 +1,44 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class chaos(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query ProjectDiscovery's Chaos API for subdomains", + "created_date": "2022-08-14", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": ""} + options_desc = {"api_key": "Chaos API key"} + + base_url = "https://dns.projectdiscovery.io/dns" + ping_url = f"{base_url}/example.com" + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["Authorization"] = self.api_key + return url, kwargs + + async def request_url(self, query): + _, domain = self.helpers.split_domain(query) + url = f"{self.base_url}/{domain}/subdomains" + return await self.api_request(url) + + async def parse_results(self, r, query): + results = set() + j = r.json() + subdomains_set = set() + if isinstance(j, dict): + domain = j.get("domain", "") + if domain: + subdomains = j.get("subdomains", []) + for s in subdomains: + s = s.lower().strip(".*") + subdomains_set.add(s) + for s in subdomains_set: + full_subdomain = f"{s}.{domain}" + if full_subdomain and full_subdomain.endswith(f".{query}"): + results.add(full_subdomain) + return results diff --git a/bbot/modules/code_repository.py b/bbot/modules/code_repository.py new file mode 100644 index 0000000000..f485579f9e --- /dev/null +++ b/bbot/modules/code_repository.py @@ -0,0 +1,56 @@ +import re +from bbot.modules.base import BaseModule + + +class code_repository(BaseModule): + watched_events = ["URL_UNVERIFIED"] + produced_events = ["CODE_REPOSITORY"] + meta = { + "description": "Look for code repository links in webpages", + "created_date": "2024-05-15", + "author": "@domwhewell-sage", + } + flags = ["passive", "safe", "code-enum"] + + # platform name : (regex, case_sensitive) + code_repositories = { + "git": [ + (r"github.com/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + (r"gitlab.(?:com|org)/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + ], + "docker": (r"hub.docker.com/r/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + "postman": (r"www.postman.com/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + } + + scope_distance_modifier = 1 + + async def setup(self): + self.compiled_regexes = {} + for k, v in self.code_repositories.items(): + if isinstance(v, list): + self.compiled_regexes[k] = [(re.compile(pattern), c) for pattern, c in v] + else: + pattern, c = v + self.compiled_regexes[k] = (re.compile(pattern), c) + return True + + async def handle_event(self, event): + for platform, regexes in self.compiled_regexes.items(): + if not isinstance(regexes, list): + regexes = [regexes] + for regex, case_sensitive in regexes: + for match in regex.finditer(event.data): + url = match.group() + if not case_sensitive: + url = url.lower() + url = f"https://{url}" + repo_event = self.make_event( + {"url": url}, + "CODE_REPOSITORY", + tags=platform, + parent=event, + ) + await self.emit_event( + repo_event, + context=f"{{module}} detected {platform} {{event.type}} at {url}", + ) diff --git a/bbot/modules/cookie_brute.py b/bbot/modules/cookie_brute.py deleted file mode 100644 index 777908b2c1..0000000000 --- a/bbot/modules/cookie_brute.py +++ /dev/null @@ -1,41 +0,0 @@ -from .header_brute import header_brute -from bbot.core.errors import ScanCancelledError - - -class cookie_brute(header_brute): - - watched_events = ["URL"] - produced_events = ["FINDING"] - flags = ["brute-force", "active", "aggressive", "slow", "web"] - meta = { - "description": "Check for common HTTP cookie parameters", - } - options = {"wordlist": "https://raw.githubusercontent.com/PortSwigger/param-miner/master/resources/params"} - options_desc = {"wordlist": "Define the wordlist to be used to derive cookies"} - scanned_hosts = [] - cookie_blacklist = [] - max_event_handlers = 12 - in_scope_only = True - compare_mode = "cookie" - - def check_batch(self, compare_helper, url, cookie_list): - - if self.scan.stopping: - raise ScanCancelledError() - cookies = {p: self.helpers.rand_string(14) for p in cookie_list} - return compare_helper.compare(url, cookies=cookies) - - def gen_count_args(self, url): - - cookie_count = 40 - while 1: - if cookie_count < 0: - break - fake_cookies = {self.helpers.rand_string(14): self.helpers.rand_string(14) for _ in range(0, cookie_count)} - yield cookie_count, (url,), {"cookies": fake_cookies} - cookie_count -= 5 - - def clean_list(self, cookie): - if (len(cookie) > 0) and (cookie.strip() not in self.cookie_blacklist): - return True - return False diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py new file mode 100644 index 0000000000..3630646a6f --- /dev/null +++ b/bbot/modules/credshed.py @@ -0,0 +1,109 @@ +from contextlib import suppress + +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class credshed(subdomain_enum): + watched_events = ["DNS_NAME"] + produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] + flags = ["passive", "safe"] + meta = { + "description": "Send queries to your own credshed server to check for known credentials of your targets", + "created_date": "2023-10-12", + "author": "@SpamFaux", + "auth_required": True, + } + options = {"username": "", "password": "", "credshed_url": ""} + options_desc = { + "username": "Credshed username", + "password": "Credshed password", + "credshed_url": "URL of credshed server", + } + target_only = True + + async def setup(self): + self.base_url = self.config.get("credshed_url", "").rstrip("/") + self.username = self.config.get("username", "") + self.password = self.config.get("password", "") + + # soft-fail if we don't have the necessary information to make queries + if not (self.base_url and self.username and self.password): + return None, "Must set username, password, and credshed_url" + + auth_setup = await self.helpers.request( + f"{self.base_url}/api/auth", method="POST", json={"username": self.username, "password": self.password} + ) + self.auth_token = "" + with suppress(Exception): + self.auth_token = auth_setup.json().get("access_token", "") + # hard-fail if we didn't get an access token + if not self.auth_token: + return False, f"Failed to retrieve credshed auth token from url: {self.base_url}" + + return await super().setup() + + async def handle_event(self, event): + query = self.make_query(event) + cs_query = await self.helpers.request( + f"{self.base_url}/api/search", + method="POST", + cookies={"access_token_cookie": self.auth_token}, + json={"query": query}, + ) + + if cs_query is not None and cs_query.status_code != 200: + self.warning( + f"Error retrieving results from {self.base_url} (status code {cs_query.status_code}): {cs_query.text}" + ) + + json_result = {} + with suppress(Exception): + json_result = cs_query.json() + + if not json_result: + return + + accounts = json_result.get("accounts", []) + + for i in accounts: + email = i.get("e", "") + pw = i.get("p", "") + hashes = i.get("h", []) + user = i.get("u", "") + src = i.get("s", []) + src = [src[0] if src else ""] + + tags = [] + if src: + tags = [f"credshed-source-{src}"] + + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=tags) + if email_event is not None: + await self.emit_event( + email_event, context=f'{{module}} searched for "{query}" and found {{event.type}}: {{event.data}}' + ) + if user: + await self.emit_event( + f"{email}:{user}", + "USERNAME", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) + if pw: + await self.emit_event( + f"{email}:{pw}", + "PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) + for h_pw in hashes: + if h_pw: + await self.emit_event( + f"{email}:{h_pw}", + "HASHED_PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) diff --git a/bbot/modules/crobat.py b/bbot/modules/crobat.py deleted file mode 100644 index 0b66489807..0000000000 --- a/bbot/modules/crobat.py +++ /dev/null @@ -1,85 +0,0 @@ -from bbot.modules.base import BaseModule - - -class crobat(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by several other modules including sublist3r, dnsdumpster, etc. - """ - - flags = ["subdomain-enum", "passive", "safe"] - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - meta = {"description": "Query Project Crobat for subdomains"} - - base_url = "https://sonar.omnisint.io" - - def setup(self): - self.processed = set() - return True - - def filter_event(self, event): - """ - Accept DNS_NAMEs that are either directly targets, or indirectly - in scope by resolving to in-scope IPs. - - Kill wildcards with fire. - - This filter_event is used across many modules - """ - if any(t in event.tags for t in ("dns-error", "unresolved")): - return False - query = self.make_query(event) - if self.already_processed(query): - return False - is_wildcard, _ = self.helpers.is_wildcard(f"{self.helpers.rand_string(digits=False)}.{query}") - if is_wildcard: - return False - self.processed.add(hash(query)) - return True - - def already_processed(self, hostname): - for parent in self.helpers.domain_parents(hostname, include_self=True): - if hash(parent) in self.processed: - return True - return False - - def abort_if(self, event): - # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains - return "in-scope" not in event.tags or "wildcard" in event.tags - - def handle_event(self, event): - query = self.make_query(event) - results = self.query(query) - if results: - for hostname in results: - if not hostname == event: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) - - def request_url(self, query): - url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" - return self.helpers.request(url) - - def make_query(self, event): - if "target" in event.tags: - return str(event.data) - else: - return self.helpers.parent_domain(event.data).lower() - - def parse_results(self, r, query=None): - json = r.json() - if json: - for hostname in json: - yield hostname - - def query(self, query): - try: - results = list(self.parse_results(self.request_url(query), query)) - if results: - return results - self.debug(f'No results for "{query}"') - except Exception: - import traceback - - self.warning(f"Error retrieving results for {query}") - self.debug(traceback.format_exc()) diff --git a/bbot/modules/crt.py b/bbot/modules/crt.py index 2d17ab5e46..a9ea2be454 100644 --- a/bbot/modules/crt.py +++ b/bbot/modules/crt.py @@ -1,35 +1,66 @@ -from urllib.parse import urlencode +import time +import asyncpg -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class crt(crobat): - +class crt(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query crt.sh (certificate transparency) for subdomains"} - - base_url = "https://crt.sh" - - def setup(self): - self.cert_ids = set() - return super().setup() - - def request_url(self, query): - params = {"q": query, "output": "json"} - return self.helpers.request(f"{self.base_url}?{urlencode(params)}") - - def parse_results(self, r, query): - j = r.json() - for cert_info in j: - if not type(cert_info) == dict: - continue - cert_id = cert_info.get("id") - if cert_id: - if hash(cert_id) not in self.cert_ids: - self.cert_ids.add(hash(cert_id)) - domain = cert_info.get("name_value") - if domain: - for d in domain.splitlines(): - yield d.lower().strip("*.") + meta = { + "description": "Query crt.sh (certificate transparency) for subdomains", + "created_date": "2022-05-13", + "author": "@TheTechromancer", + } + + deps_pip = ["asyncpg"] + + db_host = "crt.sh" + db_port = 5432 + db_user = "guest" + db_name = "certwatch" + reject_wildcards = False + + async def setup(self): + self.db_conn = None + return await super().setup() + + async def request_url(self, query): + if not self.db_conn: + self.db_conn = await asyncpg.connect( + host=self.db_host, port=self.db_port, user=self.db_user, database=self.db_name + ) + + sql = """ + WITH ci AS ( + SELECT array_agg(DISTINCT sub.NAME_VALUE) NAME_VALUES + FROM ( + SELECT DISTINCT cai.CERTIFICATE, cai.NAME_VALUE + FROM certificate_and_identities cai + WHERE plainto_tsquery('certwatch', $1) @@ identities(cai.CERTIFICATE) + AND cai.NAME_VALUE ILIKE ('%.' || $1) + LIMIT 50000 + ) sub + GROUP BY sub.CERTIFICATE + ) + SELECT DISTINCT unnest(NAME_VALUES) as name_value FROM ci; + """ + start = time.time() + results = await self.db_conn.fetch(sql, query) + end = time.time() + self.verbose(f"SQL query executed in: {end - start} seconds with {len(results):,} results") + return results + + async def parse_results(self, results, query): + domains = set() + for row in results: + domain = row["name_value"] + if domain: + for d in domain.splitlines(): + domains.add(d.lower()) + return domains + + async def cleanup(self): + if self.db_conn: + await self.db_conn.close() diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/deadly/dastardly.py new file mode 100644 index 0000000000..2677818161 --- /dev/null +++ b/bbot/modules/deadly/dastardly.py @@ -0,0 +1,137 @@ +from lxml import etree +from bbot.modules.base import BaseModule + + +class dastardly(BaseModule): + watched_events = ["HTTP_RESPONSE"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "aggressive", "slow", "web-thorough"] + meta = { + "description": "Lightweight web application security scanner", + "created_date": "2023-12-11", + "author": "@domwhewell-sage", + } + + deps_pip = ["lxml~=5.3.0"] + deps_common = ["docker"] + per_hostport_only = True + + default_discovery_context = "{module} performed a light web scan against {event.parent.data['url']} and discovered {event.data['description']} at {event.data['url']}" + + async def setup(self): + await self.run_process("systemctl", "start", "docker", sudo=True) + await self.run_process("docker", "pull", "public.ecr.aws/portswigger/dastardly:latest", sudo=True) + self.output_dir = self.scan.home / "dastardly" + self.helpers.mkdir(self.output_dir) + return True + + async def filter_event(self, event): + # Reject redirects. This helps to avoid scanning the same site twice. + is_redirect = str(event.data["status_code"]).startswith("30") + if is_redirect: + return False, "URL is a redirect" + return True + + async def handle_event(self, event): + host = event.parsed_url._replace(path="/").geturl() + self.verbose(f"Running Dastardly scan against {host}") + command, output_file = self.construct_command(host) + finished_proc = await self.run_process(command, sudo=True) + self.debug(f"dastardly stdout: {getattr(finished_proc, 'stdout', '')}") + self.debug(f"dastardly stderr: {getattr(finished_proc, 'stderr', '')}") + for testsuite in self.parse_dastardly_xml(output_file): + url = testsuite.endpoint + for testcase in testsuite.testcases: + for failure in testcase.failures: + if failure.severity == "Info": + await self.emit_event( + { + "host": str(event.host), + "url": url, + "description": failure.instance, + }, + "FINDING", + event, + context=f"{{module}} executed web scan against {host} and identified {{event.type}}: {failure.instance}", + ) + else: + await self.emit_event( + { + "severity": failure.severity, + "host": str(event.host), + "url": url, + "description": failure.instance, + }, + "VULNERABILITY", + event, + context=f"{{module}} executed web scan against {host} and identified {failure.severity.lower()} {{event.type}}: {failure.instance}", + ) + + def construct_command(self, target): + date_time = self.helpers.make_date() + file_name = self.helpers.tagify(target) + temp_path = self.output_dir / f"{date_time}_{file_name}.xml" + command = [ + "docker", + "run", + "--user", + "0", + "--rm", + "-v", + f"{self.output_dir}:/dastardly", + "-e", + f"BURP_START_URL={target}", + "-e", + f"BURP_REPORT_FILE_PATH=/dastardly/{temp_path.name}", + "public.ecr.aws/portswigger/dastardly:latest", + ] + return command, temp_path + + def parse_dastardly_xml(self, xml_file): + try: + with open(xml_file, "rb") as f: + et = etree.parse(f, parser=etree.XMLParser(recover=True, resolve_entities=False)) + for testsuite in et.iter("testsuite"): + yield TestSuite(testsuite) + except FileNotFoundError: + self.debug(f"Could not find Dastardly XML file at {xml_file}") + except OSError as e: + self.verbose(f"Error opening Dastardly XML file at {xml_file}: {e}") + except etree.ParseError as e: + self.warning(f"Error parsing Dastardly XML at {xml_file}: {e}") + + +class Failure: + def __init__(self, xml): + self.etree = xml + + # instance information + self.instance = self.etree.attrib.get("message", "") + self.severity = self.etree.attrib.get("type", "") + self.text = self.etree.text + + +class TestCase: + def __init__(self, xml): + self.etree = xml + + # title information + self.title = self.etree.attrib.get("name", "") + + # findings / failures(as dastardly names them) + self.failures = [] + for failure in self.etree.findall("failure"): + self.failures.append(Failure(failure)) + + +class TestSuite: + def __init__(self, xml): + self.etree = xml + + # endpoint information + self.endpoint = self.etree.attrib.get("name", "") + + # test cases + self.testcases = [] + for testcase in self.etree.findall("testcase"): + self.testcases.append(TestCase(testcase)) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index 26d020bb9b..63c8072c2b 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -1,113 +1,355 @@ +from bbot.modules.base import BaseModule + import random import string import json import base64 -from bbot.modules.base import BaseModule - class ffuf(BaseModule): - watched_events = ["URL"] - produced_events = ["URL"] - flags = ["brute-force", "aggressive", "active", "web"] - meta = {"description": "A fast web fuzzer written in Go"} + produced_events = ["URL_UNVERIFIED"] + flags = ["aggressive", "active"] + meta = {"description": "A fast web fuzzer written in Go", "created_date": "2022-04-10", "author": "@liquidsec"} options = { "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/raft-small-directories.txt", "lines": 5000, "max_depth": 0, - "version": "1.5.0", + "extensions": "", + "ignore_case": False, + "rate": 0, } options_desc = { "wordlist": "Specify wordlist to use when finding directories", "lines": "take only the first N lines from the wordlist when finding directories", - "max_depth": "the maxium directory depth to attempt to solve", - "version": "ffuf version", + "max_depth": "the maximum directory depth to attempt to solve", + "extensions": "Optionally include a list of extensions to extend the keyword with (comma separated)", + "ignore_case": "Only put lowercase words into the wordlist", + "rate": "Rate of requests per second (default: 0)", } - blacklist = ["images", "css", "image"] + deps_common = ["ffuf"] - deps_ansible = [ - { - "name": "Download ffuf", - "unarchive": { - "src": "https://github.com/ffuf/ffuf/releases/download/v{BBOT_MODULES_FFUF_VERSION}/ffuf_{BBOT_MODULES_FFUF_VERSION}_linux_amd64.tar.gz", - "include": "ffuf", - "dest": "{BBOT_TOOLS}", - "remote_src": True, - }, - } - ] + banned_characters = {" "} + blacklist = ["images", "css", "image"] in_scope_only = True - def setup(self): - - self.sanity_canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) + async def setup(self): + self.proxy = self.scan.web_config.get("http_proxy", "") + self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist_url = self.config.get("wordlist", "") - self.wordlist = self.helpers.wordlist(wordlist_url) - self.tempfile = self.generate_templist(self.wordlist) + self.debug(f"Using wordlist [{wordlist_url}]") + self.wordlist = await self.helpers.wordlist(wordlist_url) + self.wordlist_lines = self.generate_wordlist(self.wordlist) + self.tempfile, tempfile_len = self.generate_templist() + self.rate = self.config.get("rate", 0) + self.verbose(f"Generated dynamic wordlist with length [{str(tempfile_len)}]") + try: + self.extensions = self.helpers.chain_lists(self.config.get("extensions", ""), validate=True) + self.debug(f"Using custom extensions: [{','.join(self.extensions)}]") + except ValueError as e: + self.warning(f"Error parsing extensions: {e}") + return False return True - def handle_event(self, event): + async def handle_event(self, event): if self.helpers.url_depth(event.data) > self.config.get("max_depth"): - self.debug(f"Exceeded max depth, aborting event") + self.debug("Exceeded max depth, aborting event") return # only FFUF against a directory - if "." in event.parsed.path.split("/")[-1]: + if "." in event.parsed_url.path.split("/")[-1]: self.debug("Aborting FFUF as period was detected in right-most path segment (likely a file)") return else: # if we think its a directory, normalize it. fixed_url = event.data.rstrip("/") + "/" - for r in self.execute_ffuf(self.tempfile, event, fixed_url): - self.emit_event(r["url"], "URL", source=event, tags=[f"status-{r['status']}"]) - - def execute_ffuf(self, tempfile, event, url, suffix=""): - - fuzz_url = f"{url}FUZZ{suffix}" - command = ["ffuf", "-ac", "-json", "-w", tempfile, "-u", fuzz_url] - for found in self.helpers.run_live(command): - try: - found_json = json.loads(found) - input_json = found_json.get("input", {}) - if type(input_json) != dict: - self.debug("Error decoding JSON from ffuf") - continue - encoded_input = input_json.get("FUZZ", "") - input_val = base64.b64decode(encoded_input).decode() - if len(input_val.rstrip()) > 0: - if self.scan.stopping: - break - if input_val.rstrip() == self.sanity_canary: - self.debug("Found sanity canary! aborting remainder of run to avoid junk data...") - return + exts = ["", "/"] + if self.extensions: + for ext in self.extensions: + exts.append(f".{ext}") + + filters = await self.baseline_ffuf(fixed_url, exts=exts) + async for r in self.execute_ffuf(self.tempfile, fixed_url, exts=exts, filters=filters): + await self.emit_event( + r["url"], + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f"{{module}} brute-forced {event.data} and found {{event.type}}: {{event.data}}", + ) + + async def filter_event(self, event): + if "endpoint" in event.tags: + self.debug(f"rejecting URL [{event.data}] because we don't ffuf endpoints") + return False + return True + + async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal"): + filters = {} + for ext in exts: + self.debug(f"running baseline for URL [{url}] with ext [{ext}]") + # For each "extension", we will attempt to build a baseline using 4 requests + + canary_results = [] + + canary_length = 4 + canary_list = [] + for i in range(0, 4): + canary_list.append("".join(random.choice(string.ascii_lowercase) for i in range(canary_length))) + canary_length += 2 + + canary_temp_file = self.helpers.tempfile(canary_list, pipe=False) + async for canary_r in self.execute_ffuf( + canary_temp_file, + url, + prefix=prefix, + suffix=suffix, + mode=mode, + baseline=True, + apply_filters=False, + filters=filters, + ): + canary_results.append(canary_r) + + # First, lets check to make sure we got all 4 requests. If we didn't, there are likely serious connectivity issues. + # We should issue a warning in that case. + + if len(canary_results) != 4: + self.warning( + f"Could not attain baseline for URL [{url}] ext [{ext}] because baseline results are missing. Possible connectivity issues." + ) + filters[ext] = ["ABORT", "CONNECTIVITY_ISSUES"] + continue + + # if the codes are different, we should abort, this should also be a warning, as it is highly unusual behavior + if len({d["status"] for d in canary_results}) != 1: + self.warning("Got different codes for each baseline. This could indicate load balancing") + filters[ext] = ["ABORT", "BASELINE_CHANGED_CODES"] + continue + + # if the code we received was a 404, we are just going to look for cases where we get a different code + if canary_results[0]["status"] == 404: + self.debug("All baseline results were 404, we can just look for anything not 404") + filters[ext] = ["-fc", "404"] + continue + + # if we only got 403, we might already be blocked by a WAF. Issue a warning, but it's possible all 'not founds' are given 403 + if canary_results[0]["status"] == 403: + self.warning( + "All requests of the baseline received a 403 response. It is possible a WAF is actively blocking your traffic." + ) + + # if we only got 429, we are almost certainly getting blocked by a WAF or rate-limiting. Specifically with 429, we should respect them and abort the scan. + if canary_results[0]["status"] == 429: + self.warning( + f"Received code 429 (Too many requests) for URL [{url}]. A WAF or application is actively blocking requests, aborting." + ) + filters[ext] = ["ABORT", "RECEIVED_429"] + continue + + # we start by seeing if all of the baselines have the same character count + if len({d["length"] for d in canary_results}) == 1: + self.debug("All baseline results had the same char count, we can make a filter on that") + filters[ext] = [ + "-fc", + str(canary_results[0]["status"]), + "-fs", + str(canary_results[0]["length"]), + "-fmode", + "and", + ] + continue + + # if that doesn't work we can try words + if len({d["words"] for d in canary_results}) == 1: + self.debug("All baseline results had the same word count, we can make a filter on that") + filters[ext] = [ + "-fc", + str(canary_results[0]["status"]), + "-fw", + str(canary_results[0]["words"]), + "-fmode", + "and", + ] + continue + + # as a last resort we will try lines + if len({d["lines"] for d in canary_results}) == 1: + self.debug("All baseline results had the same word count, we can make a filter on that") + filters[ext] = [ + "-fc", + str(canary_results[0]["status"]), + "-fl", + str(canary_results[0]["lines"]), + "-fmode", + "and", + ] + continue + + # if even the line count isn't stable, we can only reliably count on the result if the code is different + filters[ext] = ["-fc", f"{str(canary_results[0]['status'])}"] + + return filters + + async def execute_ffuf( + self, + tempfile, + url, + prefix="", + suffix="", + exts=[""], + filters={}, + mode="normal", + apply_filters=True, + baseline=False, + ): + for ext in exts: + if mode == "normal": + self.debug("in mode [normal]") + + fuzz_url = f"{url}{prefix}FUZZ{suffix}" + + command = [ + "ffuf", + "-noninteractive", + "-s", + "-H", + f"User-Agent: {self.scan.useragent}", + "-json", + "-w", + tempfile, + "-u", + f"{fuzz_url}{ext}", + ] + + elif mode == "hostheader": + self.debug("in mode [hostheader]") + + command = [ + "ffuf", + "-noninteractive", + "-s", + "-H", + f"User-Agent: {self.scan.useragent}", + "-H", + f"Host: FUZZ{suffix}", + "-json", + "-w", + tempfile, + "-u", + f"{url}", + ] + else: + self.debug("invalid mode specified, aborting") + return + + if self.rate > 0: + command += ["-rate", f"{self.rate}"] + + if self.proxy: + command += ["-x", self.proxy] + + if apply_filters: + if ext in filters.keys(): + if filters[ext][0] == ("ABORT"): + self.warning(f"Exiting from FFUF run early, received an ABORT filter: [{filters[ext][1]}]") + continue + + elif filters[ext] is None: + pass + else: - yield found_json + command += filters[ext] + else: + command.append("-mc") + command.append("all") - except json.decoder.JSONDecodeError: - self.debug("Received invalid JSON from FFUF") + for hk, hv in self.scan.custom_http_headers.items(): + command += ["-H", f"{hk}: {hv}"] - def generate_templist(self, wordlist, prefix=None): + async for found in self.run_process_live(command): + try: + found_json = json.loads(found) + input_json = found_json.get("input", {}) + if type(input_json) != dict: + self.debug("Error decoding JSON from ffuf") + continue + encoded_input = input_json.get("FUZZ", "") + input_val = base64.b64decode(encoded_input).decode() + if len(input_val.rstrip()) > 0: + if self.scan.stopping: + break + if input_val.rstrip() == self.canary: + self.debug("Found canary! aborting...") + return + else: + if mode == "normal": + # before emitting, we are going to send another baseline. This will immediately catch things like a WAF flipping blocking on us mid-scan + if baseline is False: + pre_emit_temp_canary = [ + f + async for f in self.execute_ffuf( + self.helpers.tempfile( + ["".join(random.choice(string.ascii_lowercase) for i in range(4))], + pipe=False, + ), + url, + prefix=prefix, + suffix=suffix, + mode=mode, + exts=[ext], + baseline=True, + filters=filters, + ) + ] + if len(pre_emit_temp_canary) == 0: + yield found_json - f = open(wordlist, "r") - fl = f.readlines() - f.close() + else: + self.verbose( + f"Would have reported URL [{found_json['url']}], but baseline check failed. This could be due to a WAF turning on mid-scan, or an unusual web server configuration." + ) + self.verbose(f"Aborting the current run against [{url}]") + return + yield found_json + + except json.decoder.JSONDecodeError: + self.debug("Received invalid JSON from FFUF") + + def generate_templist(self, prefix=None): virtual_file = [] - virtual_file.append(self.sanity_canary) - for idx, val in enumerate(fl): - if idx > self.config.get("lines"): - break - if len(val) > 0: - - if val.strip().lower() in self.blacklist: - self.debug(f"Skipping adding [{val.strip()}] to wordlist because it was in the blacklist") - else: - if not prefix or val.startswith(prefix): - virtual_file.append(f"{val.strip()}") - return self.helpers.tempfile(virtual_file, pipe=False) + if prefix: + prefix = prefix.strip().lower() + max_lines = self.config.get("lines") + + for line in self.wordlist_lines[:max_lines]: + # Check if it starts with the given prefix (if any) + if (not prefix) or line.lower().startswith(prefix): + virtual_file.append(line) + + virtual_file.append(self.canary) + return self.helpers.tempfile(virtual_file, pipe=False), len(virtual_file) + + def generate_wordlist(self, wordlist_file): + wordlist_set = set() # Use a set to avoid duplicates + ignore_case = self.config.get("ignore_case", False) # Get the ignore_case option + for line in self.helpers.read_file(wordlist_file): + line = line.strip() + if not line: + continue + if line in self.blacklist: + self.debug(f"Skipping adding [{line}] to wordlist because it was in the blacklist") + continue + if any(x in line for x in self.banned_characters): + self.debug(f"Skipping adding [{line}] to wordlist because it has a banned character") + continue + if ignore_case: + line = line.lower() # Convert to lowercase if ignore_case is enabled + wordlist_set.add(line) # Add to set to handle duplicates + return list(wordlist_set) # Convert set back to list before returning diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index e4dd73e4aa..b973c714bc 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -1,29 +1,33 @@ import json -import subprocess - +import yaml +from itertools import islice from bbot.modules.base import BaseModule -technology_map = {"f5 bigip": "bigip", "microsoft asp.net": "asp"} - - class nuclei(BaseModule): + watched_events = ["URL"] + produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"] + flags = ["active", "aggressive"] + meta = { + "description": "Fast and customisable vulnerability scanner", + "created_date": "2022-03-12", + "author": "@TheTechromancer", + } - watched_events = ["URL", "TECHNOLOGY"] - produced_events = ["VULNERABILITY"] - flags = ["active", "aggressive", "web"] - meta = {"description": "Fast and customisable vulnerability scanner"} - - batch_size = 100 options = { - "version": "2.7.3", + "version": "3.3.9", "tags": "", "templates": "", "severity": "", "ratelimit": 150, "concurrency": 25, - "mode": "severe", - "etags": "intrusive", + "mode": "manual", + "etags": "", + "budget": 1, + "silent": False, + "directory_only": True, + "retries": 0, + "batch_size": 200, } options_desc = { "version": "nuclei version", @@ -32,200 +36,379 @@ class nuclei(BaseModule): "severity": "Filter based on severity field available in the template.", "ratelimit": "maximum number of requests to send per second (default 150)", "concurrency": "maximum number of templates to be executed in parallel (default 25)", - "mode": "technology | severe | manual. Technology: Only activate based on technology events that match nuclei tags. On by default. Severe: Only critical and high severity templates without intrusive. Manual: Fully manual settings", + "mode": "manual | technology | severe | budget. Technology: Only activate based on technology events that match nuclei tags (nuclei -as mode). Manual (DEFAULT): Fully manual settings. Severe: Only critical and high severity templates without intrusive. Budget: Limit Nuclei to a specified number of HTTP requests", "etags": "tags to exclude from the scan", + "budget": "Used in budget mode to set the number of requests which will be allotted to the nuclei scan", + "silent": "Don't display nuclei's banner or status messages", + "directory_only": "Filter out 'file' URL event (default True)", + "retries": "number of times to retry a failed request (default 0)", + "batch_size": "Number of targets to send to Nuclei per batch (default 200)", } deps_ansible = [ { "name": "Download nuclei", "unarchive": { - "src": "https://github.com/projectdiscovery/nuclei/releases/download/v{BBOT_MODULES_NUCLEI_VERSION}/nuclei_{BBOT_MODULES_NUCLEI_VERSION}_linux_amd64.zip", + "src": "https://github.com/projectdiscovery/nuclei/releases/download/v#{BBOT_MODULES_NUCLEI_VERSION}/nuclei_#{BBOT_MODULES_NUCLEI_VERSION}_#{BBOT_OS}_#{BBOT_CPU_ARCH}.zip", "include": "nuclei", - "dest": "{BBOT_TOOLS}", + "dest": "#{BBOT_TOOLS}", "remote_src": True, }, } ] + deps_pip = ["pyyaml~=6.0"] in_scope_only = True - - def setup(self): - + _batch_size = 25 + + async def setup(self): + # attempt to update nuclei templates + self.nuclei_templates_dir = self.helpers.tools_dir / "nuclei-templates" + self.info("Updating Nuclei templates") + update_results = await self.run_process( + ["nuclei", "-update-template-dir", self.nuclei_templates_dir, "-update-templates"] + ) + if update_results.stderr: + if "Successfully downloaded nuclei-templates" in update_results.stderr: + self.success("Successfully updated nuclei templates") + elif "No new updates found for nuclei templates" in update_results.stderr: + self.info("Nuclei templates already up-to-date") + else: + self.warning(f"Failure while updating nuclei templates: {update_results.stderr}") + else: + self.warning("Error running nuclei template update command") + self.proxy = self.scan.web_config.get("http_proxy", "") + self.mode = self.config.get("mode", "severe").lower() + self.ratelimit = int(self.config.get("ratelimit", 150)) + self.concurrency = int(self.config.get("concurrency", 25)) + self.budget = int(self.config.get("budget", 1)) + self.silent = self.config.get("silent", False) self.templates = self.config.get("templates") + if self.templates: + self.info(f"Using custom template(s) at: [{self.templates}]") self.tags = self.config.get("tags") + if self.tags: + self.info(f"Setting the following nuclei tags: [{self.tags}]") self.etags = self.config.get("etags") + if self.etags: + self.info(f"Excluding the following nuclei tags: [{self.etags}]") self.severity = self.config.get("severity") + if self.mode != "severe" and self.severity != "": + self.info(f"Limiting nuclei templates to the following severities: [{self.severity}]") self.iserver = self.scan.config.get("interactsh_server", None) self.itoken = self.scan.config.get("interactsh_token", None) + self.retries = int(self.config.get("retries", 0)) - self.template_stats = self.helpers.download( - "https://raw.githubusercontent.com/projectdiscovery/nuclei-templates/master/TEMPLATES-STATS.json", - cache_hrs=72, - ) - if not self.template_stats: - self.warning(f"Failed to download nuclei template stats.") - if self.config.get("mode ") == "technology": - self.warning("Can't run with technology_mode set to true without template tags JSON") - return False - else: - with open(self.template_stats) as f: - self.template_stats_json = json.load(f) - try: - self.tag_list = [e.get("name", "") for e in self.template_stats_json.get("tags", [])] - except Exception as e: - self.warning(f"Failed to parse template stats: {e}") - return False - - if self.config.get("mode") not in ("technology", "severe", "manual"): - self.warning(f"Unable to intialize nuclei: invalid mode selected: [{self.config.get('mode')}]") + if self.mode not in ("technology", "severe", "manual", "budget"): + self.warning(f"Unable to initialize nuclei: invalid mode selected: [{self.mode}]") return False - if self.config.get("mode") == "technology": + if self.mode == "technology": self.info( - "Running nuclei in TECHNOLOGY mode. Scans will only be performed against detected TECHNOLOGY events that match nuclei template tags" + "Running nuclei in TECHNOLOGY mode. Scans will only be performed with the --automatic-scan flag set. This limits the templates used to those that match wappalyzer signatures" ) - if "wappalyzer" not in self.scan.modules: - self.hugewarning( - "You are running nuclei in technology mode without wappalyzer to emit technologies. It will never execute unless another module is issuing technologies" - ) + self.tags = "" - if self.config.get("mode") == "severe": + if self.mode == "severe": self.info( "Running nuclei in SEVERE mode. Only critical and high severity templates will be used. Tag setting will be IGNORED." ) self.severity = "critical,high" self.tags = "" - if self.config.get("mode") == "manual": + if self.mode == "manual": self.info( "Running nuclei in MANUAL mode. Settings will be passed directly into nuclei with no modification" ) + + if self.mode == "budget": + self.info( + f"Running nuclei in BUDGET mode. This mode calculates which nuclei templates can be used, constrained by your 'budget' of number of requests. Current budget is set to: {self.budget}" + ) + + self.info("Processing nuclei templates to perform budget calculations...") + + self.nucleibudget = NucleiBudget(self) + self.budget_templates_file = self.helpers.tempfile(self.nucleibudget.collapsible_templates, pipe=False) + + self.info( + f"Loaded [{str(sum(self.nucleibudget.severity_stats.values()))}] templates based on a budget of [{str(self.budget)}] request(s)" + ) + self.info( + f"Template Severity: Critical [{self.nucleibudget.severity_stats['critical']}] High [{self.nucleibudget.severity_stats['high']}] Medium [{self.nucleibudget.severity_stats['medium']}] Low [{self.nucleibudget.severity_stats['low']}] Info [{self.nucleibudget.severity_stats['info']}] Unknown [{self.nucleibudget.severity_stats['unknown']}]" + ) + return True - def handle_batch(self, *events): + async def handle_batch(self, *events): + temp_target = self.helpers.make_target(*events) + nuclei_input = [str(e.data) for e in events] + async for severity, template, tags, host, url, name, extracted_results in self.execute_nuclei(nuclei_input): + # this is necessary because sometimes nuclei is inconsistent about the data returned in the host field + cleaned_host = temp_target.get(host) + parent_event = self.correlate_event(events, cleaned_host) + + if not parent_event: + continue - if self.config.get("mode") == "technology": + if url == "": + url = str(parent_event.data) - tags_to_scan = {} - for e in events: - if e.type == "TECHNOLOGY": - reported_tag = e.data.get("technology", "") - if reported_tag in technology_map.keys(): - reported_tag = technology_map[reported_tag] - if reported_tag in self.tag_list: - tag = e.data.get("technology", "") - if tag not in tags_to_scan.keys(): - tags_to_scan[tag] = [e] - else: - tags_to_scan[tag].append(e) - - self.debug(f"finished processing this batch's tags with {str(len(tags_to_scan.keys()))} total tags") - - for t in tags_to_scan.keys(): - nuclei_input = [e.data["url"] for e in tags_to_scan[t]] - taglist = self.tags.split(",") - taglist.append(t) - override_tags = ",".join(taglist).lstrip(",") - self.verbose(f"Running nuclei against {str(len(nuclei_input))} host(s) with the {t} tag") - for severity, template, host, name in self.execute_nuclei(nuclei_input, override_tags=override_tags): - source_event = self.correlate_event(events, host) - if source_event == None: - continue - self.emit_event( - { - "severity": severity, - "host": str(source_event.host), - "url": host, - "description": f"template: {template}, name: {name}", - }, - "VULNERABILITY", - source_event, - ) + if severity == "INFO" and "tech" in tags: + await self.emit_event( + {"technology": str(name).lower(), "url": url, "host": str(parent_event.host)}, + "TECHNOLOGY", + parent_event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {str(name).lower()}", + ) + continue - else: - nuclei_input = [str(e.data) for e in events] - for severity, template, host, name in self.execute_nuclei(nuclei_input): - source_event = self.correlate_event(events, host) - if source_event == None: - continue - self.emit_event( + description_string = f"template: [{template}], name: [{name}]" + if len(extracted_results) > 0: + description_string += f" Extracted Data: [{','.join(extracted_results)}]" + + if severity in ["INFO", "UNKNOWN"]: + await self.emit_event( + { + "host": str(parent_event.host), + "url": url, + "description": description_string, + }, + "FINDING", + parent_event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {description_string}", + ) + else: + await self.emit_event( { "severity": severity, - "host": str(source_event.host), - "url": host, - "description": f"template: {template}, name: {name}", + "host": str(parent_event.host), + "url": url, + "description": description_string, }, "VULNERABILITY", - source_event, + parent_event, + context=f"{{module}} scanned {url} and identified {severity.lower()} {{event.type}}: {description_string}", ) def correlate_event(self, events, host): for event in events: if host in event: return event - self.warning("Failed to correlate nuclei result with event") - - def execute_nuclei(self, nuclei_input, override_tags=""): + self.verbose(f"Failed to correlate nuclei result for {host}. Possible parent events:") + for event in events: + self.verbose(f" - {event.data}") + async def execute_nuclei(self, nuclei_input): command = [ "nuclei", - "-silent", - "-json", - "-update-directory", - f"{self.helpers.tools_dir}/nuclei-templates", + "-jsonl", + "-update-template-dir", + self.nuclei_templates_dir, "-rate-limit", - self.config.get("ratelimit"), + self.ratelimit, "-concurrency", - str(self.config.get("concurrency")), - # "-r", - # self.helpers.resolver_file, + self.concurrency, + "-disable-update-check", + "-stats-json", + "-retries", + self.retries, ] - for cli_option in ("severity", "templates", "iserver", "itoken", "etags"): + if self.helpers.system_resolvers: + command += ["-r", self.helpers.resolver_file] + + for hk, hv in self.scan.custom_http_headers.items(): + command += ["-H", f"{hk}: {hv}"] + + for cli_option in ("severity", "templates", "iserver", "itoken", "tags", "etags"): option = getattr(self, cli_option) if option: command.append(f"-{cli_option}") command.append(option) - if override_tags: - command.append(f"-tags") - command.append(override_tags) - else: - setup_tags = getattr(self, "tags") - if setup_tags: - command.append(f"-tags") - command.append(setup_tags) - - if self.scan.config.get("interactsh_disable") == True: - self.info("Disbling interactsh in accordance with global settings") + if self.scan.config.get("interactsh_disable") is True: + self.info("Disabling interactsh in accordance with global settings") command.append("-no-interactsh") - for line in self.helpers.run_live(command, input=nuclei_input, stderr=subprocess.DEVNULL): - try: - j = json.loads(line) - except json.decoder.JSONDecodeError: - self.debug(f"Failed to decode line: {line}") - continue - template = j.get("template-id", "") + if self.mode == "technology": + command.append("-as") + + if self.mode == "budget": + command.append("-t") + command.append(self.budget_templates_file) + + if self.proxy: + command.append("-proxy") + command.append(f"{self.proxy}") + + stats_file = self.helpers.tempfile_tail(callback=self.log_nuclei_status) + try: + with open(stats_file, "w") as stats_fh: + async for line in self.run_process_live(command, input=nuclei_input, stderr=stats_fh): + try: + j = json.loads(line) + except json.decoder.JSONDecodeError: + self.debug(f"Failed to decode line: {line}") + continue - # try to get the specific matcher name - name = j.get("matcher-name", "") + template = j.get("template-id", "") + + # try to get the specific matcher name + name = j.get("matcher-name", "") + + info = j.get("info", {}) + + # fall back to regular name + if not name: + self.debug( + f"Couldn't get matcher-name from nuclei json, falling back to regular name. Template: [{template}]" + ) + name = info.get("name", "") + severity = info.get("severity", "").upper() + tags = info.get("tags", []) + host = j.get("host", "") + url = j.get("matched-at", "") + if not self.helpers.is_url(url): + url = "" + + extracted_results = j.get("extracted-results", []) + + if template and name and severity: + yield (severity, template, tags, host, url, name, extracted_results) + else: + self.debug("Nuclei result missing one or more required elements, not reporting. JSON: ({j})") + finally: + stats_file.unlink() + + def log_nuclei_status(self, line): + if self.silent: + return + try: + line = json.loads(line) + except Exception: + self.info(str(line)) + return + duration = line.get("duration", "") + errors = line.get("errors", "") + hosts = line.get("hosts", "") + matched = line.get("matched", "") + percent = line.get("percent", "") + requests = line.get("requests", "") + rps = line.get("rps", "") + templates = line.get("templates", "") + total = line.get("total", "") + status = f"[{duration}] | Templates: {templates} | Hosts: {hosts} | RPS: {rps} | Matched: {matched} | Errors: {errors} | Requests: {requests}/{total} ({percent}%)" + self.info(status) + + async def cleanup(self): + resume_file = self.helpers.current_dir / "resume.cfg" + resume_file.unlink(missing_ok=True) - # fall back to regular name - if not name: + async def filter_event(self, event): + if self.config.get("directory_only", True): + if "endpoint" in event.tags: self.debug( - f"Couldn't get matcher-name from nuclei json, falling back to regular name. Template: [{template}]" + f"rejecting URL [{str(event.data)}] because directory_only is true and event has endpoint tag" ) - name = j.get("info", {}).get("name", "") - - severity = j.get("info", {}).get("severity", "").upper() - host = j.get("host", "") + return False + return True - if template and name and severity and host: - yield (severity, template, host, name) - else: - self.debug("Nuclei result missing one or more required elements, not reporting. JSON: ({j})") - def cleanup(self): - resume_file = self.helpers.current_dir / "resume.cfg" - resume_file.unlink(missing_ok=True) +class NucleiBudget: + def __init__(self, nuclei_module): + self.parent = nuclei_module + self._yaml_files = {} + self.templates_dir = nuclei_module.nuclei_templates_dir + self.yaml_list = self.get_yaml_list() + self.budget_paths = self.find_budget_paths(nuclei_module.budget) + self.collapsible_templates, self.severity_stats = self.find_collapsible_templates() + + def get_yaml_list(self): + return list(self.templates_dir.rglob("*.yaml")) + + # Given the current budget setting, scan all of the templates for paths, sort them by frequency and select the first N (budget) items + def find_budget_paths(self, budget): + path_frequency = {} + for yf in self.yaml_list: + if yf: + for paths in self.get_yaml_request_attr(yf, "path"): + for path in paths: + if path in path_frequency.keys(): + path_frequency[path] += 1 + else: + path_frequency[path] = 1 + + sorted_dict = dict(sorted(path_frequency.items(), key=lambda item: item[1], reverse=True)) + return list(dict(islice(sorted_dict.items(), budget)).keys()) + + def get_yaml_request_attr(self, yf, attr): + p = self.parse_yaml(yf) + requests = p.get("http", []) + for r in requests: + raw = r.get("raw") + if not raw: + res = r.get(attr) + if res is not None: + yield res + + def get_yaml_info_attr(self, yf, attr): + p = self.parse_yaml(yf) + info = p.get("info", []) + res = info.get(attr) + if res is not None: + yield res + + # Parse through all templates and locate those which match the conditions necessary to collapse down to the budget setting + def find_collapsible_templates(self): + collapsible_templates = [] + severity_dict = {} + for yf in self.yaml_list: + valid = True + if yf: + for paths in self.get_yaml_request_attr(yf, "path"): + if set(paths).issubset(self.budget_paths): + headers = self.get_yaml_request_attr(yf, "headers") + for header in headers: + if header: + valid = False + + method = self.get_yaml_request_attr(yf, "method") + for m in method: + if m != "GET": + valid = False + + max_redirects = self.get_yaml_request_attr(yf, "max-redirects") + for mr in max_redirects: + if mr: + valid = False + + redirects = self.get_yaml_request_attr(yf, "redirects") + for rd in redirects: + if rd: + valid = False + + cookie_reuse = self.get_yaml_request_attr(yf, "cookie-reuse") + for c in cookie_reuse: + if c: + valid = False + + if valid: + collapsible_templates.append(str(yf)) + severity_gen = self.get_yaml_info_attr(yf, "severity") + severity = next(severity_gen) + if severity in severity_dict.keys(): + severity_dict[severity] += 1 + else: + severity_dict[severity] = 1 + return collapsible_templates, severity_dict + + def parse_yaml(self, yamlfile): + if yamlfile not in self._yaml_files: + with open(yamlfile, "r") as stream: + try: + y = yaml.safe_load(stream) + self._yaml_files[yamlfile] = y + except yaml.YAMLError as e: + self.parent.warning(f"failed to load yaml file: {e}") + return {} + return self._yaml_files[yamlfile] diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index 67e6689651..29aa5b6438 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -1,94 +1,129 @@ -from bbot.modules.base import BaseModule +import base64 +from urllib.parse import urlparse +from bbot.modules.deadly.ffuf import ffuf -class vhost(BaseModule): +class vhost(ffuf): watched_events = ["URL"] produced_events = ["VHOST", "DNS_NAME"] - flags = ["active", "brute-force", "aggressive", "slow", "web"] - meta = {"description": "Fuzz for virtual hosts"} + flags = ["active", "aggressive", "slow"] + meta = {"description": "Fuzz for virtual hosts", "created_date": "2022-05-02", "author": "@liquidsec"} special_vhost_list = ["127.0.0.1", "localhost", "host.docker.internal"] options = { - "subdomain_wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", + "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", "force_basehost": "", + "lines": 5000, } options_desc = { - "subdomain_wordlist": "Wordlist containing subdomains", + "wordlist": "Wordlist containing subdomains", "force_basehost": "Use a custom base host (e.g. evilcorp.com) instead of the default behavior of using the current URL", + "lines": "take only the first N lines from the wordlist when finding directories", } - deps_ansible = [ - { - "name": "Download ffuf", - "unarchive": { - "src": "https://github.com/ffuf/ffuf/releases/download/v{BBOT_MODULES_FFUF_VERSION}/ffuf_{BBOT_MODULES_FFUF_VERSION}_linux_amd64.tar.gz", - "include": "ffuf", - "dest": "{BBOT_TOOLS}", - "remote_src": True, - }, - } - ] + + deps_common = ["ffuf"] + banned_characters = {" ", "."} + in_scope_only = True - def setup(self): - self.scanned_hosts = set() - self.subdomain_wordlist = self.helpers.wordlist(self.config.get("subdomain_wordlist")) - return True + async def setup(self): + self.scanned_hosts = {} + self.wordcloud_tried_hosts = set() + return await super().setup() - def handle_event(self, event): + async def handle_event(self, event): if not self.helpers.is_ip(event.host) or self.config.get("force_basehost"): - parsed_host = event.parsed - host = f"{parsed_host.scheme}://{parsed_host.netloc}/" - host_hash = hash(host) - if host_hash in self.scanned_hosts: - self.debug(f"Host {host} was already scanned, exiting") + host = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}" + if host in self.scanned_hosts.keys(): return else: - self.scanned_hosts.add(host_hash) + self.scanned_hosts[host] = event # subdomain vhost check - self.debug("Main vhost bruteforce") + self.verbose("Main vhost bruteforce") if self.config.get("force_basehost"): - basehostraw = self.config.get("force_basehost") + basehost = self.config.get("force_basehost") else: - basehostraw = self.helpers.parent_domain(event.host) + basehost = self.helpers.parent_domain(event.parsed_url.netloc) - self.debug(f"Basehost: {basehostraw}") - basehost = f".{basehostraw}" - command = ["ffuf", "-ac", "-s", "-w", self.subdomain_wordlist, "-u", host, "-H", f"Host: FUZZ{basehost}"] - for vhost in self.ffuf_vhost(command, host, parsed_host, basehost, event): - self.debug(f"Starting mutations check for {vhost}") - mutations_list_file = self.mutations_check(vhost) - command = ["ffuf", "-ac", "-s", "-w", mutations_list_file, "-u", host, "-H", f"Host: FUZZ{basehost}"] - self.ffuf_vhost(command, host, parsed_host, event, basehost) + self.debug(f"Using basehost: {basehost}") + async for vhost in self.ffuf_vhost(host, f".{basehost}", event): + self.verbose(f"Starting mutations check for {vhost}") + async for vhost in self.ffuf_vhost(host, f".{basehost}", event, wordlist=self.mutations_check(vhost)): + pass # check existing host for mutations - self.debug("Checking for vhost mutations on main host") - mutations_list_file = self.mutations_check(parsed_host.netloc.split(".")[0]) - command = ["ffuf", "-ac", "-s", "-w", mutations_list_file, "-u", host, "-H", f"Host: FUZZ{basehost}"] - self.ffuf_vhost(command, host, parsed_host, basehost, event) + self.verbose("Checking for vhost mutations on main host") + async for vhost in self.ffuf_vhost( + host, f".{basehost}", event, wordlist=self.mutations_check(event.parsed_url.netloc.split(".")[0]) + ): + pass # special vhost list - self.debug("Checking special vhost list") - basehost = basehostraw - special_vhost_list_file = self.helpers.tempfile(self.special_vhost_list) - command = ["ffuf", "-ac", "-s", "-w", special_vhost_list_file, "-u", host, "-H", f"Host: FUZZ"] - self.ffuf_vhost(command, host, parsed_host, basehost, event, skip_dns_host=True) - - def ffuf_vhost(self, command, host, parsed_host, basehost, event, skip_dns_host=False): - for found_vhost in self.helpers.run_live(command): - found_vhost = found_vhost.rstrip() - vhost_dict = {"host": str(event.host), "url": host, "vhost": found_vhost} - if f"{vhost_dict['vhost']}{basehost}" != parsed_host.netloc: - self.emit_event(vhost_dict, "VHOST", source=event) - if skip_dns_host == False: - self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) + self.verbose("Checking special vhost list") + async for vhost in self.ffuf_vhost( + host, + "", + event, + wordlist=self.helpers.tempfile(self.special_vhost_list, pipe=False), + skip_dns_host=True, + ): + pass + + async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=False): + filters = await self.baseline_ffuf(f"{host}/", exts=[""], suffix=basehost, mode="hostheader") + self.debug("Baseline completed and returned these filters:") + self.debug(filters) + if not wordlist: + wordlist = self.tempfile + async for r in self.execute_ffuf( + wordlist, host, exts=[""], suffix=basehost, filters=filters, mode="hostheader" + ): + found_vhost_b64 = r["input"]["FUZZ"] + vhost_str = base64.b64decode(found_vhost_b64).decode() + vhost_dict = {"host": str(event.host), "url": host, "vhost": vhost_str} + if f"{vhost_dict['vhost']}{basehost}" != event.parsed_url.netloc: + await self.emit_event( + vhost_dict, + "VHOST", + parent=event, + context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}", + ) + if skip_dns_host is False: + await self.emit_event( + f"{vhost_dict['vhost']}{basehost}", + "DNS_NAME", + parent=event, + tags=["vhost"], + context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {{event.data}}", + ) + yield vhost_dict["vhost"] def mutations_check(self, vhost): mutations_list = [] for mutation in self.helpers.word_cloud.mutations(vhost): - for i in ["", ".", "-"]: + for i in ["", "-"]: mutations_list.append(i.join(mutation)) - mutations_list_file = self.helpers.tempfile(mutations_list) + mutations_list_file = self.helpers.tempfile(mutations_list, pipe=False) return mutations_list_file + + async def finish(self): + # check existing hosts with wordcloud + tempfile = self.helpers.tempfile(list(self.helpers.word_cloud.keys()), pipe=False) + + for host, event in self.scanned_hosts.items(): + if host not in self.wordcloud_tried_hosts: + event.parsed_url = urlparse(host) + + self.verbose("Checking main host with wordcloud") + if self.config.get("force_basehost"): + basehost = self.config.get("force_basehost") + else: + basehost = self.helpers.parent_domain(event.parsed_url.netloc) + + async for vhost in self.ffuf_vhost(host, f".{basehost}", event, wordlist=tempfile): + pass + + self.wordcloud_tried_hosts.add(host) diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py new file mode 100644 index 0000000000..cfb1d6b7c5 --- /dev/null +++ b/bbot/modules/dehashed.py @@ -0,0 +1,115 @@ +from contextlib import suppress + +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class dehashed(subdomain_enum): + watched_events = ["DNS_NAME"] + produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] + flags = ["passive", "safe", "email-enum"] + meta = { + "description": "Execute queries against dehashed.com for exposed credentials", + "created_date": "2023-10-12", + "author": "@SpamFaux", + "auth_required": True, + } + options = {"username": "", "api_key": ""} + options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"} + target_only = True + + base_url = "https://api.dehashed.com/search" + + async def setup(self): + self.username = self.config.get("username", "") + self.api_key = self.config.get("api_key", "") + self.auth = (self.username, self.api_key) + self.headers = { + "Accept": "application/json", + } + + # soft-fail if we don't have the necessary information to make queries + if not (self.username and self.api_key): + return None, "No username / API key set" + + return await super().setup() + + async def handle_event(self, event): + query = self.make_query(event) + async for entries in self.query(query): + for entry in entries: + # we have to clean up the email field because dehashed does a poor job of it + email_str = entry.get("email", "").replace("\\", "") + found_emails = list(await self.helpers.re.extract_emails(email_str)) + if not found_emails: + self.debug(f"Invalid email from dehashed.com: {email_str}") + continue + email = found_emails[0] + + user = entry.get("username", "") + pw = entry.get("password", "") + h_pw = entry.get("hashed_password", "") + db_name = entry.get("database_name", "") + + tags = [] + if db_name: + tags = [f"db-{db_name}"] + if email: + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=tags) + if email_event is not None: + await self.emit_event( + email_event, + context=f'{{module}} searched API for "{query}" and found {{event.type}}: {{event.data}}', + ) + if user: + await self.emit_event( + f"{email}:{user}", + "USERNAME", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) + if pw: + await self.emit_event( + f"{email}:{pw}", + "PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) + if h_pw: + await self.emit_event( + f"{email}:{h_pw}", + "HASHED_PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) + + async def query(self, domain): + query = f"domain:{domain}" + url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}" + page = 0 + num_entries = 0 + agen = self.api_page_iter(url=url, auth=self.auth, headers=self.headers, _json=False) + async for result in agen: + result_json = {} + with suppress(Exception): + result_json = result.json() + total = result_json.get("total", 0) + entries = result_json.get("entries", []) + if entries is None: + entries = [] + num_entries += len(entries) + page += 1 + if (page >= 3) or (not entries): + if result is not None and result.status_code != 200: + self.warning( + f"Error retrieving results from dehashed.com (status code {result.status_code}): {result.text}" + ) + elif (page >= 3) and (total > num_entries): + self.info( + f"{domain} has {total:,} results in Dehashed. The API can only process the first 30,000 results. Please check dehashed.com to get the remaining results." + ) + await agen.aclose() + break + yield entries diff --git a/bbot/modules/digitorus.py b/bbot/modules/digitorus.py new file mode 100644 index 0000000000..049343ac27 --- /dev/null +++ b/bbot/modules/digitorus.py @@ -0,0 +1,31 @@ +import re + +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class digitorus(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query certificatedetails.com for subdomains", + "created_date": "2023-07-25", + "author": "@TheTechromancer", + } + + base_url = "https://certificatedetails.com" + + async def request_url(self, query): + url = f"{self.base_url}/{self.helpers.quote(query)}" + return await self.helpers.request(url) + + async def parse_results(self, r, query): + results = set() + content = getattr(r, "text", "") + extract_regex = re.compile(r"[\w.-]+\." + query, re.I) + if content: + for match in extract_regex.finditer(content): + subdomain = match.group().lower() + if subdomain and subdomain.endswith(f".{query}"): + results.add(subdomain) + return results diff --git a/bbot/modules/dnsbimi.py b/bbot/modules/dnsbimi.py new file mode 100644 index 0000000000..00c69f8c20 --- /dev/null +++ b/bbot/modules/dnsbimi.py @@ -0,0 +1,145 @@ +# bimi.py +# +# Checks for and parses common BIMI DNS TXT records, e.g. default._bimi.target.domain +# +# Example TXT record: "v=BIMI1; l=https://example.com/brand/logo.svg; a=https://example.com/brand/certificate.pem" +# +# BIMI records may contain a link to an SVG format brand authorised image, which may be useful for: +# 1. Sub-domain or otherwise unknown content hosting locations +# 2. Brand impersonation +# 3. May not be formatted/stripped of metadata correctly leading to some (low value probably) information exposure +# +# BIMI records may also contain a link to a PEM format X.509 VMC certificate, which may be similarly useful. +# +# We simply extract any URL's as URL_UNVERIFIED, no further parsing or download is done by this module in order to remain passive. +# +# The domain portion of any URL's is also passively checked and added as appropriate, for additional inspection by other modules. +# +# Files may be downloaded by other modules which respond to URL_UNVERIFIED events, if you have configured bbot to do so. +# +# NOTE: .svg file extensions are filtered from inclusion by default, modify "url_extension_blacklist" appropriately if you want the .svg image to be considered for download. +# +# NOTE: use the "filedownload" module if you to download .svg and .pem files. .pem will be downloaded by default, .svg will require a customised configuration for that module. +# +# The domain portion of any URL_UNVERIFIED's will be extracted by the various internal modules if .svg is not filtered. +# + +from bbot.modules.base import BaseModule +from bbot.core.helpers.dns.helpers import service_record + +import re + +# Handle "v=BIMI1; l=; a=;" == RFC conformant explicit declination to publish, e.g. useful on a sub-domain if you don't want the sub-domain to have a BIMI logo, yet your registered domain does? +# Handle "v=BIMI1; l=; a=" == RFC non-conformant explicit declination to publish +# Handle "v=BIMI1; l=;" == RFC non-conformant explicit declination to publish +# Handle "v=BIMI1; l=" == RFC non-conformant explicit declination to publish +# Handle "v=BIMI1;" == RFC non-conformant explicit declination to publish +# Handle "v=BIMI1" == RFC non-conformant explicit declination to publish +# Handle "v=BIMI1;l=https://bimi.entrust.net/example.com/logo.svg;" +# Handle "v=BIMI1; l=https://bimi.entrust.net/example.com/logo.svg;" +# Handle "v=BIMI1;l=https://bimi.entrust.net/example.com/logo.svg;a=https://bimi.entrust.net/example.com/certchain.pem" +# Handle "v=BIMI1; l=https://bimi.entrust.net/example.com/logo.svg;a=https://bimi.entrust.net/example.com/certchain.pem;" +_bimi_regex = r"^v=(?PBIMI1);* *(l=(?Phttps*://[^;]*|)|);*( *a=((?Phttps://[^;]*|)|);*)*$" +bimi_regex = re.compile(_bimi_regex, re.I) + + +class dnsbimi(BaseModule): + watched_events = ["DNS_NAME"] + produced_events = ["URL_UNVERIFIED", "RAW_DNS_RECORD"] + flags = ["subdomain-enum", "cloud-enum", "passive", "safe"] + meta = { + "description": "Check DNS_NAME's for BIMI records to find image and certificate hosting URL's", + "author": "@colin-stubbs", + "created_date": "2024-11-15", + } + options = { + "emit_raw_dns_records": False, + "emit_urls": True, + "selectors": "default,email,mail,bimi", + } + options_desc = { + "emit_raw_dns_records": "Emit RAW_DNS_RECORD events", + "emit_urls": "Emit URL_UNVERIFIED events", + "selectors": "CSV list of BIMI selectors to check", + } + + async def setup(self): + self.emit_raw_dns_records = self.config.get("emit_raw_dns_records", False) + self.emit_urls = self.config.get("emit_urls", True) + self._selectors = self.config.get("selectors", "").replace(", ", ",").split(",") + + return await super().setup() + + def _incoming_dedup_hash(self, event): + # dedupe by parent + parent_domain = self.helpers.parent_domain(event.data) + return hash(parent_domain), "already processed parent domain" + + async def filter_event(self, event): + if "_wildcard" in str(event.host).split("."): + return False, "event is wildcard" + + # there's no value in inspecting service records + if service_record(event.host) is True: + return False, "service record detected" + + return True + + async def inspectBIMI(self, event, domain): + parent_domain = self.helpers.parent_domain(event.data) + rdtype = "TXT" + + for selector in self._selectors: + tags = ["bimi-record", f"bimi-{selector}"] + hostname = f"{selector}._bimi.{parent_domain}" + + r = await self.helpers.resolve_raw(hostname, type=rdtype) + + if r: + raw_results, errors = r + + for answer in raw_results: + if self.emit_raw_dns_records: + await self.emit_event( + { + "host": hostname, + "type": rdtype, + "answer": answer.to_text(), + }, + "RAW_DNS_RECORD", + parent=event, + tags=tags.append(f"{rdtype.lower()}-record"), + context=f"{rdtype} lookup on {hostname} produced {{event.type}}", + ) + + # we need to strip surrounding quotes and whitespace, as well as fix TXT data that may have been split across two different rdata's + # e.g. we will get a single string, but within that string we may have two parts such as: + # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"' + s = answer.to_text().strip('"').strip().replace('" "', "") + + bimi_match = bimi_regex.search(s) + + if bimi_match and bimi_match.group("v") and "bimi" in bimi_match.group("v").lower(): + if bimi_match.group("l") and bimi_match.group("l") != "": + if self.emit_urls: + await self.emit_event( + bimi_match.group("l"), + "URL_UNVERIFIED", + parent=event, + tags=tags.append("bimi-location"), + ) + + if bimi_match.group("a") and bimi_match.group("a") != "": + if self.emit_urls: + await self.emit_event( + bimi_match.group("a"), + "URL_UNVERIFIED", + parent=event, + tags=tags.append("bimi-authority"), + ) + + async def handle_event(self, event): + await self.inspectBIMI(event, event.host) + + +# EOF diff --git a/bbot/modules/dnsbrute.py b/bbot/modules/dnsbrute.py new file mode 100644 index 0000000000..3b847933c4 --- /dev/null +++ b/bbot/modules/dnsbrute.py @@ -0,0 +1,59 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class dnsbrute(subdomain_enum): + flags = ["subdomain-enum", "active", "aggressive"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Brute-force subdomains with massdns + static wordlist", + "author": "@TheTechromancer", + "created_date": "2024-04-24", + } + options = { + "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", + "max_depth": 5, + } + options_desc = { + "wordlist": "Subdomain wordlist URL", + "max_depth": "How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com", + } + deps_common = ["massdns"] + reject_wildcards = "strict" + dedup_strategy = "lowest_parent" + _qsize = 10000 + + async def setup(self): + self.max_depth = max(1, self.config.get("max_depth", 5)) + self.subdomain_file = await self.helpers.wordlist(self.config.get("wordlist")) + self.subdomain_list = set(self.helpers.read_file(self.subdomain_file)) + self.wordlist_size = len(self.subdomain_list) + return await super().setup() + + async def filter_event(self, event): + eligible, reason = await super().filter_event(event) + query = self.make_query(event) + + # limit brute force depth + subdomain_depth = self.helpers.subdomain_depth(query) + 1 + if subdomain_depth > self.max_depth: + eligible = False + reason = f"subdomain depth of *.{query} ({subdomain_depth}) > max_depth ({self.max_depth})" + + # don't brute-force things that look like autogenerated PTRs + if self.helpers.dns.brute.has_excessive_digits(query): + eligible = False + reason = f'"{query}" looks like an autogenerated PTR' + + return eligible, reason + + async def handle_event(self, event): + query = self.make_query(event) + self.info(f"Brute-forcing {self.wordlist_size:,} subdomains for {query} (source: {event.data})") + for hostname in await self.helpers.dns.brute(self, query, self.subdomain_list): + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} tried {self.wordlist_size:,} subdomains against "{query}" and found {{event.type}}: {{event.data}}', + ) diff --git a/bbot/modules/dnsbrute_mutations.py b/bbot/modules/dnsbrute_mutations.py new file mode 100644 index 0000000000..f56a214572 --- /dev/null +++ b/bbot/modules/dnsbrute_mutations.py @@ -0,0 +1,157 @@ +import time + +from bbot.modules.base import BaseModule + + +class dnsbrute_mutations(BaseModule): + flags = ["subdomain-enum", "active", "aggressive", "slow"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Brute-force subdomains with massdns + target-specific mutations", + "author": "@TheTechromancer", + "created_date": "2024-04-25", + } + options = { + "max_mutations": 100, + } + options_desc = { + "max_mutations": "Maximum number of target-specific mutations to try per subdomain", + } + deps_common = ["massdns"] + _qsize = 10000 + + async def setup(self): + self.found = {} + self.parent_events = {} + self.max_mutations = self.config.get("max_mutations", 500) + # 800M bits == 100MB bloom filter == 10M entries before false positives start emerging + self.mutations_tried = self.helpers.bloom_filter(800000000) + self._mutation_run_counter = {} + return True + + async def handle_event(self, event): + # here we don't brute-force, we just add the subdomain to our end-of-scan + host = str(event.host) + self.parent_events[host] = event + if self.helpers.is_subdomain(host): + subdomain, domain = host.split(".", 1) + if not self.helpers.dns.brute.has_excessive_digits(subdomain): + try: + self.found[domain].add(subdomain) + except KeyError: + self.found[domain] = {subdomain} + + async def get_parent_event(self, subdomain): + start = time.time() + parent_host = await self.helpers.run_in_executor(self.helpers.closest_match, subdomain, self.parent_events) + elapsed = time.time() - start + self.trace(f"{subdomain}: got closest match among {len(self.parent_events):,} parent events in {elapsed:.2f}s") + return self.parent_events[parent_host] + + async def finish(self): + """ + TODO: speed up this loop. + We should see if we can combine multiple runs together instead of running them each individually. + """ + found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True) + # if we have a lot of rounds to make, don't try mutations on less-populated domains + trimmed_found = [] + if found: + avg_subdomains = sum([len(subdomains) for domain, subdomains in found[:50]]) / len(found[:50]) + for i, (domain, subdomains) in enumerate(found): + # accept domains that are in the top 50 or have more than 5 percent of the average number of subdomains + if i < 50 or (len(subdomains) > 1 and len(subdomains) >= (avg_subdomains * 0.05)): + trimmed_found.append((domain, subdomains)) + else: + self.verbose( + f"Skipping mutations on {domain} because it only has {len(subdomains):,} subdomain(s) (avg: {avg_subdomains:,})" + ) + + base_mutations = set() + try: + for i, (domain, subdomains) in enumerate(trimmed_found): + self.verbose(f"{domain} has {len(subdomains):,} subdomains") + # keep looping as long as we're finding things + while 1: + query = domain + + mutations = set(base_mutations) + + def add_mutation(m): + h = f"{m}.{domain}" + if h not in self.mutations_tried: + self.mutations_tried.add(h) + mutations.add(m) + + # try every subdomain everywhere else + for _domain, _subdomains in found: + if _domain == domain: + continue + for s in _subdomains: + first_segment = s.split(".")[0] + # skip stuff with lots of numbers (e.g. PTRs) + if self.helpers.dns.brute.has_excessive_digits(first_segment): + continue + add_mutation(first_segment) + for word in self.helpers.extract_words( + first_segment, word_regexes=self.helpers.word_cloud.dns_mutator.extract_word_regexes + ): + add_mutation(word) + + # numbers + devops mutations + for mutation in self.helpers.word_cloud.mutations( + subdomains, cloud=False, numbers=3, number_padding=1 + ): + for delimiter in ("", ".", "-"): + m = delimiter.join(mutation).lower() + add_mutation(m) + + # special dns mutator + for subdomain in self.helpers.word_cloud.dns_mutator.mutations( + subdomains, max_mutations=self.max_mutations + ): + add_mutation(subdomain) + + # skip if there's hardly any mutations + if len(mutations) < 10: + self.verbose( + f"Skipping {len(mutations):,} mutations against {domain} because there are less than 10" + ) + break + + if mutations: + self.info( + f"Trying {len(mutations):,} mutations against {domain} ({i + 1}/{len(trimmed_found)})" + ) + results = await self.helpers.dns.brute(self, query, mutations) + try: + mutation_run = self._mutation_run_counter[domain] + except KeyError: + self._mutation_run_counter[domain] = mutation_run = 1 + self._mutation_run_counter[domain] += 1 + for hostname in results: + parent_event = await self.get_parent_event(hostname) + mutation_run_ordinal = self.helpers.integer_to_ordinal(mutation_run) + await self.emit_event( + hostname, + "DNS_NAME", + parent=parent_event, + tags=[f"mutation-{mutation_run}"], + abort_if=self.abort_if, + context=f'{{module}} found a mutated subdomain of "{parent_event.host}" on its {mutation_run_ordinal} run: {{event.type}}: {{event.data}}', + ) + if results: + continue + break + except AssertionError as e: + self.warning(e) + + def abort_if(self, event): + if not event.scope_distance == 0: + return True, "event is not in scope" + if "wildcard" in event.tags: + return True, "event is a wildcard" + if "unresolved" in event.tags: + return True, "event is unresolved" + return False, "" diff --git a/bbot/modules/dnscaa.py b/bbot/modules/dnscaa.py new file mode 100644 index 0000000000..1465cd8faf --- /dev/null +++ b/bbot/modules/dnscaa.py @@ -0,0 +1,119 @@ +# dnscaa.py +# +# Checks for and parses CAA DNS TXT records for IODEF reporting destination email addresses and/or URL's. +# +# NOTE: when the target domain is initially resolved basic "dns_name_extraction_regex" matched targets will be extracted so we do not perform that again here. +# +# Example CAA records, +# 0 iodef "mailto:dnsadmin@example.com" +# 0 iodef "mailto:contact_pki@example.com" +# 0 iodef "mailto:ipladmin@example.com" +# 0 iodef "https://example.com/caa" +# 0 iodef "https://203.0.113.1/caa" <<< unlikely but possible? +# 0 iodef "https://[2001:db8::1]/caa" <<< unlikely but possible? +# +# We simply extract any URL's as URL_UNVERIFIED, no further activity against URL's is performed by this module in order to remain passive. +# +# Other modules which respond to URL_UNVERIFIED events may do so if you have configured bbot appropriately. +# +# The domain/IP portion of any URL_UNVERIFIED's should be extracted by the various internal modules. +# + +from bbot.modules.base import BaseModule + +import re + +from bbot.core.helpers.regexes import dns_name_extraction_regex, email_regex, url_regexes + +# Handle '0 iodef "mailto:support@hcaptcha.com"' +# Handle '1 iodef "https://some.host.tld/caa;"' +# Handle '0 issue "pki.goog; cansignhttpexchanges=yes; somethingelse=1"' +# Handle '1 issue ";"' == explicit denial for any wildcard issuance. +# Handle '128 issuewild "comodoca.com"' +# Handle '128 issuewild ";"' == explicit denial for any wildcard issuance. +_caa_regex = r"^(?P[0-9]+) +(?P\w+) +\"(?P[^;\"]*);* *(?P[^\"]*)\"$" +caa_regex = re.compile(_caa_regex) + +_caa_extensions_kvp_regex = r"(?P\w+)=(?P[^;]+)" +caa_extensions_kvp_regex = re.compile(_caa_extensions_kvp_regex) + + +class dnscaa(BaseModule): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME", "EMAIL_ADDRESS", "URL_UNVERIFIED"] + flags = ["subdomain-enum", "email-enum", "passive", "safe"] + meta = {"description": "Check for CAA records", "author": "@colin-stubbs", "created_date": "2024-05-26"} + options = { + "in_scope_only": True, + "dns_names": True, + "emails": True, + "urls": True, + } + options_desc = { + "in_scope_only": "Only check in-scope domains", + "dns_names": "emit DNS_NAME events", + "emails": "emit EMAIL_ADDRESS events", + "urls": "emit URL_UNVERIFIED events", + } + # accept DNS_NAMEs out to 2 hops if in_scope_only is False + scope_distance_modifier = 2 + + async def setup(self): + self.in_scope_only = self.config.get("in_scope_only", True) + self._dns_names = self.config.get("dns_names", True) + self._emails = self.config.get("emails", True) + self._urls = self.config.get("urls", True) + return await super().setup() + + async def filter_event(self, event): + if "_wildcard" in str(event.host).split("."): + return False, "event is wildcard" + + # scope filtering + if event.scope_distance > 0 and self.in_scope_only: + return False, "event is not in scope" + + return True + + async def handle_event(self, event): + tags = ["caa-record"] + + r = await self.helpers.resolve_raw(event.host, type="caa") + + if r: + raw_results, errors = r + + for answer in raw_results: + s = answer.to_text().strip().replace('" "', "") + + # validate CAA record vi regex so that we can determine what to do with it. + caa_match = caa_regex.search(s) + + if caa_match and caa_match.group("flags") and caa_match.group("property") and caa_match.group("text"): + # it's legit. + if caa_match.group("property").lower() == "iodef": + if self._emails: + for match in email_regex.finditer(caa_match.group("text")): + start, end = match.span() + email = caa_match.group("text")[start:end] + + await self.emit_event(email, "EMAIL_ADDRESS", tags=tags, parent=event) + + if self._urls: + for url_regex in url_regexes: + for match in url_regex.finditer(caa_match.group("text")): + start, end = match.span() + url = caa_match.group("text")[start:end].strip('"').strip() + + await self.emit_event(url, "URL_UNVERIFIED", tags=tags, parent=event) + + elif caa_match.group("property").lower().startswith("issue"): + if self._dns_names: + for match in dns_name_extraction_regex.finditer(caa_match.group("text")): + start, end = match.span() + name = caa_match.group("text")[start:end] + + await self.emit_event(name, "DNS_NAME", tags=tags, parent=event) + + +# EOF diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index c54e2a25da..f6dc263cb6 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -1,108 +1,35 @@ -from bbot.modules.base import BaseModule +from bbot.core.helpers.dns.helpers import common_srvs +from bbot.modules.templates.subdomain_enum import subdomain_enum -common_srvs = [ - # Micro$oft - "_ldap._tcp.dc._msdcs", - "_ldap._tcp.gc._msdcs", - "_ldap._tcp.pdc._msdcs", - "_ldap._tcp", - "_ldap._tcp.ForestDNSZones", - "_gc._msdcs", - "_kpasswd._tcp", - "_kpasswd._udp", - "_kerberos._tcp.dc._msdcs", - "_kerberos.tcp.dc._msdcs", - "_kerberos-master._tcp", - "_kerberos-master._udp", - "_kerberos._tcp", - "_kerberos._udp", - "_autodiscover._tcp", - # NTP - "_ntp._udp", - # mDNS - "_nntp._tcp", - # email - "_imap._tcp", - "_imap.tcp", - "_imaps._tcp", - "_pop3._tcp", - "_pop3s._tcp", - "_smtp._tcp", - # MailEnable - "_caldav._tcp", - "_caldavs._tcp", - "_carddav._tcp", - "_carddavs._tcp", - # STUN - "_stun._tcp", - "_stun._udp", - "_stuns._tcp", - "_turn._tcp", - "_turn._udp", - "_turns._tcp", - # SIP - "_h323be._tcp", - "_h323be._udp", - "_h323cs._tcp", - "_h323cs._udp", - "_h323ls._tcp", - "_h323ls._udp", - "_sip._tcp", - "_sip._tls", - "_sip._udp", - "_sipfederationtls._tcp", - "_sipinternal._tcp", - "_sipinternaltls._tcp", - "_sips._tcp", - # misc - "_aix._tcp", - "_certificates._tcp", - "_cmp._tcp", - "_crl._tcp", - "_crls._tcp", - "_finger._tcp", - "_ftp._tcp", - "_gc._tcp", - "_hkp._tcp", - "_hkps._tcp", - "_http._tcp", - "_https._tcp", - "_jabber-client._tcp", - "_jabber-client._udp", - "_jabber._tcp", - "_jabber._udp", - "_ocsp._tcp", - "_pgpkeys._tcp", - "_pgprevokations._tcp", - "_PKIXREP._tcp", - "_submission._tcp", - "_svcp._tcp", - "_telnet._tcp", - "_test._tcp", - "_whois._tcp", - "_x-puppet-ca._tcp", - "_x-puppet._tcp", - "_xmpp-client._tcp", - "_xmpp-client._udp", - "_xmpp-server._tcp", - "_xmpp-server._udp", -] - -class dnscommonsrv(BaseModule): +class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Check for common SRV records"} + flags = ["subdomain-enum", "active", "safe"] + meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} + dedup_strategy = "lowest_parent" + + options = {"max_depth": 2} + options_desc = {"max_depth": "The maximum subdomain depth to brute-force SRV records"} + + async def setup(self): + self.max_subdomain_depth = self.config.get("max_depth", 2) + self.num_srvs = len(common_srvs) + return True - def filter_event(self, event): - is_wildcard, _ = self.helpers.is_wildcard(event.host) - if is_wildcard: - return False + async def filter_event(self, event): + subdomain_depth = self.helpers.subdomain_depth(event.host) + if subdomain_depth > self.max_subdomain_depth: + return False, f"its subdomain depth ({subdomain_depth}) exceeds max_depth={self.max_subdomain_depth}" return True - def handle_event(self, event): - queries = [event.data] + [f"{srv}.{event.data}" for srv in common_srvs] - for query, results in self.helpers.resolve_batch(queries, type="srv"): - if results: - self.emit_event(query, "DNS_NAME", tags=["srv_record"], source=event) + async def handle_event(self, event): + query = self.make_query(event) + self.verbose(f'Brute-forcing {self.num_srvs:,} SRV records for "{query}"') + for hostname in await self.helpers.dns.brute(self, query, common_srvs, type="SRV"): + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} tried {self.num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', + ) diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index e40bf2030c..5c0ae29041 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -1,33 +1,39 @@ import re -from bs4 import BeautifulSoup -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class dnsdumpster(crobat): +class dnsdumpster(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query dnsdumpster for subdomains"} - - deps_pip = ["beautifulsoup4", "lxml"] + meta = { + "description": "Query dnsdumpster for subdomains", + "created_date": "2022-03-12", + "author": "@TheTechromancer", + } base_url = "https://dnsdumpster.com" - def query(self, domain): + async def query(self, domain): ret = [] # first, get the CSRF tokens - res1 = self.helpers.request(self.base_url) + res1 = await self.api_request(self.base_url) status_code = getattr(res1, "status_code", 0) if status_code in [429]: - self.warning(f'Too many requests "{status_code}"') + self.verbose(f'Too many requests "{status_code}"') return ret elif status_code not in [200]: - self.warning(f'Bad response code "{status_code}" from DNSDumpster') + self.verbose(f'Bad response code "{status_code}" from DNSDumpster') return ret else: self.debug(f'Valid response code "{status_code}" from DNSDumpster') - html = BeautifulSoup(res1.content, features="lxml") + + html = self.helpers.beautifulsoup(res1.content, "html.parser") + if html is False: + self.verbose("BeautifulSoup returned False") + return ret + csrftoken = None csrfmiddlewaretoken = None try: @@ -35,7 +41,7 @@ def query(self, domain): try: k, v = cookie.split("=", 1) except ValueError: - self.warning("Error retrieving cookie") + self.verbose("Error retrieving cookie") return ret if k == "csrftoken": csrftoken = str(v) @@ -56,7 +62,7 @@ def query(self, domain): # Otherwise, do the needful subdomains = set() - res2 = self.helpers.request( + res2 = await self.api_request( f"{self.base_url}/", method="POST", cookies={"csrftoken": csrftoken}, @@ -72,10 +78,12 @@ def query(self, domain): ) status_code = getattr(res2, "status_code", 0) if status_code not in [200]: - self.warning(f'Bad response code "{status_code}" from DNSDumpster') + self.verbose(f'Bad response code "{status_code}" from DNSDumpster') + return ret + html = self.helpers.beautifulsoup(res2.content, "html.parser") + if html is False: + self.verbose("BeautifulSoup returned False") return ret - - html = BeautifulSoup(res2.content, features="lxml") escaped_domain = re.escape(domain) match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$") for subdomain in html.findAll(text=match_pattern): diff --git a/bbot/modules/dnstlsrpt.py b/bbot/modules/dnstlsrpt.py new file mode 100644 index 0000000000..4232cc921f --- /dev/null +++ b/bbot/modules/dnstlsrpt.py @@ -0,0 +1,144 @@ +# dnstlsrpt.py +# +# Checks for and parses common TLS-RPT TXT records, e.g. _smtp._tls.target.domain +# +# TLS-RPT policies may contain email addresses or URL's for reporting destinations, typically the email addresses are software processed inboxes, but they may also be to individual humans or team inboxes. +# +# The domain portion of any email address or URL is also passively checked and added as appropriate, for additional inspection by other modules. +# +# Example records, +# _smtp._tls.example.com TXT "v=TLSRPTv1;rua=https://tlsrpt.azurewebsites.net/report" +# _smtp._tls.example.net TXT "v=TLSRPTv1; rua=mailto:sts-reports@example.net;" +# +# TODO: extract %{UNIQUE_ID}% from hosted services as ORG_STUB ? +# e.g. %{UNIQUE_ID}%@tlsrpt.hosted.service.provider is usually a tenant specific ID. +# e.g. tlsrpt@%{UNIQUE_ID}%.hosted.service.provider is usually a tenant specific ID. + +from bbot.modules.base import BaseModule +from bbot.core.helpers.dns.helpers import service_record + +import re + +from bbot.core.helpers.regexes import email_regex, url_regexes + +_tlsrpt_regex = r"^v=(?PTLSRPTv[0-9]+); *(?P.*)$" +tlsrpt_regex = re.compile(_tlsrpt_regex, re.I) + +_tlsrpt_kvp_regex = r"(?P\w+)=(?P[^;]+);*" +tlsrpt_kvp_regex = re.compile(_tlsrpt_kvp_regex) + +_csul = r"(?P[^, ]+)" +csul = re.compile(_csul) + + +class dnstlsrpt(BaseModule): + watched_events = ["DNS_NAME"] + produced_events = ["EMAIL_ADDRESS", "URL_UNVERIFIED", "RAW_DNS_RECORD"] + flags = ["subdomain-enum", "cloud-enum", "email-enum", "passive", "safe"] + meta = { + "description": "Check for TLS-RPT records", + "author": "@colin-stubbs", + "created_date": "2024-07-26", + } + options = { + "emit_emails": True, + "emit_raw_dns_records": False, + "emit_urls": True, + "emit_vulnerabilities": True, + } + options_desc = { + "emit_emails": "Emit EMAIL_ADDRESS events", + "emit_raw_dns_records": "Emit RAW_DNS_RECORD events", + "emit_urls": "Emit URL_UNVERIFIED events", + "emit_vulnerabilities": "Emit VULNERABILITY events", + } + + async def setup(self): + self.emit_emails = self.config.get("emit_emails", True) + self.emit_raw_dns_records = self.config.get("emit_raw_dns_records", False) + self.emit_urls = self.config.get("emit_urls", True) + self.emit_vulnerabilities = self.config.get("emit_vulnerabilities", True) + return await super().setup() + + def _incoming_dedup_hash(self, event): + # dedupe by parent + parent_domain = self.helpers.parent_domain(event.data) + return hash(parent_domain), "already processed parent domain" + + async def filter_event(self, event): + if "_wildcard" in str(event.host).split("."): + return False, "event is wildcard" + + # there's no value in inspecting service records + if service_record(event.host) is True: + return False, "service record detected" + + return True + + async def handle_event(self, event): + rdtype = "TXT" + tags = ["tlsrpt-record"] + hostname = f"_smtp._tls.{event.host}" + + r = await self.helpers.resolve_raw(hostname, type=rdtype) + + if r: + raw_results, errors = r + for answer in raw_results: + if self.emit_raw_dns_records: + await self.emit_event( + {"host": hostname, "type": rdtype, "answer": answer.to_text()}, + "RAW_DNS_RECORD", + parent=event, + tags=tags.append(f"{rdtype.lower()}-record"), + context=f"{rdtype} lookup on {hostname} produced {{event.type}}", + ) + + # we need to fix TXT data that may have been split across two different rdata's + # e.g. we will get a single string, but within that string we may have two parts such as: + # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"' + # NOTE: the leading and trailing double quotes are essential as part of a raw DNS TXT record, or another record type that contains a free form text string as a component. + s = answer.to_text().strip('"').replace('" "', "") + + # validate TLSRPT record, tag appropriately + tlsrpt_match = tlsrpt_regex.search(s) + + if ( + tlsrpt_match + and tlsrpt_match.group("v") + and tlsrpt_match.group("kvps") + and tlsrpt_match.group("kvps") != "" + ): + for kvp_match in tlsrpt_kvp_regex.finditer(tlsrpt_match.group("kvps")): + key = kvp_match.group("k").lower() + + if key == "rua": + for csul_match in csul.finditer(kvp_match.group("v")): + if csul_match.group("uri"): + for match in email_regex.finditer(csul_match.group("uri")): + start, end = match.span() + email = csul_match.group("uri")[start:end] + + if self.emit_emails: + await self.emit_event( + email, + "EMAIL_ADDRESS", + tags=tags.append(f"tlsrpt-record-{key}"), + parent=event, + ) + + for url_regex in url_regexes: + for match in url_regex.finditer(csul_match.group("uri")): + start, end = match.span() + url = csul_match.group("uri")[start:end] + + if self.emit_urls: + await self.emit_event( + url, + "URL_UNVERIFIED", + tags=tags.append(f"tlsrpt-record-{key}"), + parent=event, + ) + + +# EOF diff --git a/bbot/modules/dnszonetransfer.py b/bbot/modules/dnszonetransfer.py deleted file mode 100644 index cfda3b24e0..0000000000 --- a/bbot/modules/dnszonetransfer.py +++ /dev/null @@ -1,65 +0,0 @@ -import dns.zone -import dns.query - -from bbot.modules.base import BaseModule - - -class dnszonetransfer(BaseModule): - - flags = ["subdomain-enum", "active", "safe"] - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - meta = {"description": "Attempt DNS zone transfers"} - options = {"timeout": 10} - options_desc = {"timeout": "Max seconds to wait before timing out"} - max_event_handlers = 5 - suppress_dupes = False - - def setup(self): - self.timeout = self.config.get("timeout", 10) - return True - - def filter_event(self, event): - if any([x in event.tags for x in ("ns_record", "soa_record")]): - return True - return False - - def handle_event(self, event): - domain = event.data - self.debug("Finding nameservers with NS/SOA query") - nameservers = list(self.helpers.resolve(event.data, type=("NS", "SOA"))) - nameserver_ips = set() - for n in nameservers: - nameserver_ips.update(self.helpers.resolve(n)) - self.debug(f"Found {len(nameservers):} nameservers for domain {domain}") - for nameserver in nameserver_ips: - if self.scan.stopping: - break - try: - self.debug(f"Attempting zone transfer against {nameserver} for domain {domain}") - xfr_answer = dns.query.xfr(nameserver, domain, timeout=self.timeout, lifetime=self.timeout) - zone = dns.zone.from_xfr(xfr_answer) - except Exception as e: - self.debug(f"Error retrieving zone: {e}") - continue - self.hugesuccess(f"Successful zone transfer against {nameserver} for domain {domain}!") - finding_description = f"Successful DNS zone transfer against {nameserver} for {domain}" - self.emit_event({"host": str(event.host), "description": finding_description}, "FINDING", source=event) - for name, ttl, rdata in zone.iterate_rdatas(): - if str(name) == "@": - parent_data = domain - else: - parent_data = f"{name}.{domain}" - parent_event = self.make_event(parent_data, "DNS_NAME", event) - if not parent_event or parent_event == event: - parent_event = event - else: - self.emit_event(parent_event) - for rdtype, t in self.helpers.dns.extract_targets(rdata): - if not self.helpers.is_ip(t): - t = f"{t}.{domain}" - module = self.helpers.dns._get_dummy_module(rdtype) - child_event = self.scan.make_event(t, "DNS_NAME", parent_event, module=module) - self.emit_event(child_event) - else: - self.debug(f"No data returned by {nameserver} for domain {domain}") diff --git a/bbot/modules/docker_pull.py b/bbot/modules/docker_pull.py new file mode 100644 index 0000000000..ff9a9736a2 --- /dev/null +++ b/bbot/modules/docker_pull.py @@ -0,0 +1,201 @@ +import io +import json +import tarfile +from pathlib import Path +from bbot.modules.base import BaseModule + + +class docker_pull(BaseModule): + watched_events = ["CODE_REPOSITORY"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe", "slow", "code-enum"] + meta = { + "description": "Download images from a docker repository", + "created_date": "2024-03-24", + "author": "@domwhewell-sage", + } + options = {"all_tags": False, "output_folder": ""} + options_desc = { + "all_tags": "Download all tags from each registry (Default False)", + "output_folder": "Folder to download docker repositories to", + } + + scope_distance_modifier = 2 + + async def setup(self): + self.headers = { + "Accept": ",".join( + [ + "application/vnd.docker.distribution.manifest.v2+json", + "application/vnd.docker.distribution.manifest.list.v2+json", + "application/vnd.docker.distribution.manifest.v1+json", + "application/vnd.oci.image.manifest.v1+json", + ] + ) + } + self.all_tags = self.config.get("all_tags", True) + output_folder = self.config.get("output_folder") + if output_folder: + self.output_dir = Path(output_folder) / "docker_images" + else: + self.output_dir = self.scan.home / "docker_images" + self.helpers.mkdir(self.output_dir) + return await super().setup() + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "docker" not in event.tags: + return False, "event is not a docker repository" + return True + + async def handle_event(self, event): + repo_url = event.data.get("url") + repo_path = await self.download_docker_repo(repo_url) + if repo_path: + self.verbose(f"Downloaded docker repository {repo_url} to {repo_path}") + codebase_event = self.make_event( + {"path": str(repo_path), "description": f"Docker image repository: {repo_url}"}, + "FILESYSTEM", + tags=["docker", "tarball"], + parent=event, + ) + if codebase_event: + await self.emit_event( + codebase_event, context=f"{{module}} downloaded Docker image to {{event.type}}: {repo_path}" + ) + + def get_registry_and_repository(self, repository_url): + """Function to get the registry and repository from a html repository URL.""" + if repository_url.startswith("https://hub.docker.com/r/"): + registry = "https://registry-1.docker.io" + repository = repository_url.replace("https://hub.docker.com/r/", "") + else: + repository = "/".join(repository_url.split("/")[-2:]) + registry = repository_url.replace(repository, "") + return registry, repository + + async def docker_api_request(self, url: str): + """Make a request to the URL if that fails try to obtain an authentication token and try again.""" + for _ in range(2): + response = await self.helpers.request(url, headers=self.headers, follow_redirects=True) + if response is not None and response.status_code != 401: + return response + try: + www_authenticate_headers = response.headers.get("www-authenticate", "") + realm = www_authenticate_headers.split('realm="')[1].split('"')[0] + service = www_authenticate_headers.split('service="')[1].split('"')[0] + scope = www_authenticate_headers.split('scope="')[1].split('"')[0] + except (KeyError, IndexError): + self.log.warning(f"Could not obtain realm, service or scope from {url}") + break + auth_url = f"{realm}?service={service}&scope={scope}" + auth_response = await self.helpers.request(auth_url) + if not auth_response: + self.log.warning(f"Could not obtain token from {auth_url}") + break + auth_json = auth_response.json() + token = auth_json["token"] + self.headers.update({"Authorization": f"Bearer {token}"}) + return None + + async def get_tags(self, registry, repository): + url = f"{registry}/v2/{repository}/tags/list" + r = await self.docker_api_request(url) + if r is None or r.status_code != 200: + self.log.warning(f"Could not retrieve all tags for {repository} assuming tag:latest only.") + self.log.debug(f"Response: {r}") + return ["latest"] + try: + tags = r.json().get("tags", ["latest"]) + self.debug(f"Tags for {repository}: {tags}") + if self.all_tags: + return tags + else: + if "latest" in tags: + return ["latest"] + else: + return [tags[-1]] + except (KeyError, IndexError): + self.log.warning(f"Could not retrieve tags for {repository}.") + return ["latest"] + + async def get_manifest(self, registry, repository, tag): + url = f"{registry}/v2/{repository}/manifests/{tag}" + r = await self.docker_api_request(url) + if r is None or r.status_code != 200: + self.log.warning(f"Could not retrieve manifest for {repository}:{tag}.") + self.log.debug(f"Response: {r}") + return {} + response_json = r.json() + if response_json.get("manifests", []): + for manifest in response_json["manifests"]: + if manifest["platform"]["os"] == "linux" and manifest["platform"]["architecture"] == "amd64": + return await self.get_manifest(registry, repository, manifest["digest"]) + return response_json + + async def get_layers(self, manifest): + schema_version = manifest.get("schemaVersion", 2) + if schema_version == 1: + return [l["blobSum"] for l in manifest.get("fsLayers", [])] + elif schema_version == 2: + return [l["digest"] for l in manifest.get("layers", [])] + else: + return [] + + async def download_blob(self, registry, repository, digest): + url = f"{registry}/v2/{repository}/blobs/{digest}" + r = await self.docker_api_request(url) + if r is None or r.status_code != 200: + return None + else: + return r.content + + async def create_local_manifest(self, config, repository, tag, layers): + manifest = [{"Config": config, "RepoTags": [f"{repository}:{tag}"], "Layers": layers}] + return json.dumps(manifest).encode() + + async def download_and_get_filename(self, registry, repository, digest): + if ":" not in digest: + return None, None + blob = await self.download_blob(registry, repository, digest) + hash_func = digest.split(":")[0] + digest = digest.split(":")[1] + filename = f"blobs/{hash_func}/{digest}" + return blob, filename + + async def write_file_to_tar(self, tar, filename, file_content): + if filename and file_content: + file_io = io.BytesIO(file_content) + file_info = tarfile.TarInfo(name=filename) + file_info.size = len(file_io.getvalue()) + file_io.seek(0) + tar.addfile(file_info, file_io) + + async def download_docker_repo(self, repository_url): + registry, repository = self.get_registry_and_repository(repository_url) + tags = await self.get_tags(registry, repository) + for tag in tags: + self.info(f"Downloading {repository}:{tag}") + tar_file = await self.download_and_write_to_tar(registry, repository, tag) + return tar_file + + async def download_and_write_to_tar(self, registry, repository, tag): + output_tar = self.output_dir / f"{repository.replace('/', '_')}_{tag}.tar" + with tarfile.open(output_tar, mode="w") as tar: + manifest = await self.get_manifest(registry, repository, tag) + config_file, config_filename = await self.download_and_get_filename( + registry, repository, manifest.get("config", {}).get("digest", "") + ) + await self.write_file_to_tar(tar, config_filename, config_file) + + layer_filenames = [] + layer_digests = await self.get_layers(manifest) + for i, layer_digest in enumerate(layer_digests): + self.verbose(f"Downloading layer {i + 1}/{len(layer_digests)} from {repository}:{tag}") + blob, layer_filename = await self.download_and_get_filename(registry, repository, layer_digest) + layer_filenames.append(layer_filename) + await self.write_file_to_tar(tar, layer_filename, blob) + + manifest_json = await self.create_local_manifest(config_filename, repository, tag, layer_filenames) + await self.write_file_to_tar(tar, "manifest.json", manifest_json) + return output_tar diff --git a/bbot/modules/dockerhub.py b/bbot/modules/dockerhub.py new file mode 100644 index 0000000000..91cdc64ea5 --- /dev/null +++ b/bbot/modules/dockerhub.py @@ -0,0 +1,89 @@ +from bbot.modules.base import BaseModule + + +class dockerhub(BaseModule): + watched_events = ["SOCIAL", "ORG_STUB"] + produced_events = ["SOCIAL", "CODE_REPOSITORY", "URL_UNVERIFIED"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "Search for docker repositories of discovered orgs/usernames", + "created_date": "2024-03-12", + "author": "@domwhewell-sage", + } + + site_url = "https://hub.docker.com" + api_url = f"{site_url}/v2" + + scope_distance_modifier = 2 + + async def filter_event(self, event): + if event.type == "SOCIAL": + if event.data["platform"] != "docker": + return False, "platform is not docker" + return True + + async def handle_event(self, event): + if event.type == "ORG_STUB": + await self.handle_org_stub(event) + elif event.type == "SOCIAL": + await self.handle_social(event) + + async def handle_org_stub(self, event): + profile_name = event.data + # docker usernames are case sensitive, so if there are capitalizations we also try a lowercase variation + profiles_to_check = {profile_name, profile_name.lower()} + for p in profiles_to_check: + api_url = f"{self.api_url}/users/{p}" + api_result = await self.helpers.request(api_url, follow_redirects=True) + status_code = getattr(api_result, "status_code", 0) + if status_code == 200: + site_url = f"{self.site_url}/u/{p}" + # emit social event + await self.emit_event( + {"platform": "docker", "url": site_url, "profile_name": p}, + "SOCIAL", + parent=event, + context=f"{{module}} tried {event.type} {event.data} and found docker profile ({{event.type}}) at {p}", + ) + + async def handle_social(self, event): + username = event.data.get("profile_name", "") + if not username: + return + self.verbose(f"Searching for docker images belonging to {username}") + repos = await self.get_repos(username) + for repo in repos: + await self.emit_event( + {"url": repo}, + "CODE_REPOSITORY", + tags="docker", + parent=event, + context=f"{{module}} found docker image {{event.type}}: {repo}", + ) + + async def get_repos(self, username): + repos = [] + url = f"{self.api_url}/repositories/{username}?page_size=25&page=" + "{page}" + agen = self.api_page_iter(url, _json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j.get("results", []): + image_name = item.get("name", "") + namespace = item.get("namespace", "") + if image_name and namespace: + repos.append("https://hub.docker.com/r/" + namespace + "/" + image_name) + finally: + await agen.aclose() + return repos diff --git a/bbot/modules/dotnetnuke.py b/bbot/modules/dotnetnuke.py new file mode 100644 index 0000000000..7e8b4d3d4e --- /dev/null +++ b/bbot/modules/dotnetnuke.py @@ -0,0 +1,208 @@ +from bbot.errors import InteractshError +from bbot.modules.base import BaseModule + + +class dotnetnuke(BaseModule): + DNN_signatures_body = [ + " CODE_REPOSITORY) + repos = [] + if is_org: + if in_scope: + self.verbose(f"Searching for repos belonging to organization {user}") + repos = await self.query_org_repos(user) + else: + self.verbose(f"Organization {user} does not appear to be in-scope") + elif "github-org-member" in event.tags: + self.verbose(f"Searching for repos belonging to user {user}") + repos = await self.query_user_repos(user) + for repo_url in repos: + repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", tags="git", parent=event) + if not repo_event: + continue + await self.emit_event( + repo_event, + context=f"{{module}} listed repos for GitHub profile and discovered {{event.type}}: {repo_url}", + ) + + # find members from org (SOCIAL --> SOCIAL) + if is_org and self.include_members: + self.verbose(f"Searching for any members belonging to {user}") + org_members = await self.query_org_members(user) + for member in org_members: + member_url = f"https://github.com/{member}" + event_data = {"platform": "github", "profile_name": member, "url": member_url} + member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", parent=event) + if member_event: + await self.emit_event( + member_event, + context=f"{{module}} listed members of GitHub organization and discovered {{event.type}}: {member_url}", + ) + + # find valid orgs from stub (ORG_STUB --> SOCIAL) + elif event.type == "ORG_STUB": + user = event.data + self.verbose(f"Validating whether the organization {user} is within our scope...") + is_org, in_scope = await self.validate_org(user) + if "target" in event.tags: + in_scope = True + if not is_org or not in_scope: + self.verbose(f"Unable to validate that {user} is in-scope, skipping...") + return + + user_url = f"https://github.com/{user}" + event_data = {"platform": "github", "profile_name": user, "url": user_url} + github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", parent=event) + if github_org_event: + await self.emit_event( + github_org_event, + context=f'{{module}} tried "{user}" as GitHub profile and discovered {{event.type}}: {user_url}', + ) + + async def query_org_repos(self, query): + repos = [] + url = f"{self.base_url}/orgs/{self.helpers.quote(query)}/repos?per_page=100&page=" + "{page}" + agen = self.api_page_iter(url, _json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j: + html_url = item.get("html_url", "") + repos.append(html_url) + finally: + await agen.aclose() + return repos + + async def query_org_members(self, query): + members = [] + url = f"{self.base_url}/orgs/{self.helpers.quote(query)}/members?per_page=100&page=" + "{page}" + agen = self.api_page_iter(url, _json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j: + login = item.get("login", "") + members.append(login) + finally: + await agen.aclose() + return members + + async def query_user_repos(self, query): + repos = [] + url = f"{self.base_url}/users/{self.helpers.quote(query)}/repos?per_page=100&page=" + "{page}" + agen = self.api_page_iter(url, _json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j: + html_url = item.get("html_url", "") + repos.append(html_url) + finally: + await agen.aclose() + return repos + + async def validate_org(self, org): + is_org = False + in_scope = False + url = f"{self.base_url}/orgs/{org}" + r = await self.api_request(url) + if r is None: + return is_org, in_scope + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return is_org, in_scope + if status_code == 200: + is_org = True + in_scope_hosts = await self.scan.extract_in_scope_hostnames(getattr(r, "text", "")) + if in_scope_hosts: + self.verbose( + f'Found in-scope hostname(s): "{in_scope_hosts}" for github org: {org}, it appears to be in-scope' + ) + in_scope = True + return is_org, in_scope diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py new file mode 100644 index 0000000000..6e683112cf --- /dev/null +++ b/bbot/modules/github_workflows.py @@ -0,0 +1,227 @@ +import zipfile +import fnmatch + +from bbot.modules.templates.github import github + + +class github_workflows(github): + watched_events = ["CODE_REPOSITORY"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "Download a github repositories workflow logs and workflow artifacts", + "created_date": "2024-04-29", + "author": "@domwhewell-sage", + "auth_required": True, + } + options = {"api_key": "", "num_logs": 1} + options_desc = { + "api_key": "Github token", + "num_logs": "For each workflow fetch the last N successful runs logs (max 100)", + } + + scope_distance_modifier = 2 + + async def setup(self): + self.num_logs = int(self.config.get("num_logs", 1)) + if self.num_logs > 100: + self.log.error("num_logs option is capped at 100") + return False + self.output_dir = self.scan.home / "workflow_logs" + self.helpers.mkdir(self.output_dir) + return await super().setup() + + def _api_response_is_success(self, r): + # we allow 404s because they're normal + return r.is_success or getattr(r, "status_code", 0) == 404 + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "git" not in event.tags and "github" not in event.data.get("url", ""): + return False, "event is not a git repository" + return True + + async def handle_event(self, event): + repo_url = event.data.get("url") + owner = repo_url.split("/")[-2] + repo = repo_url.split("/")[-1] + for workflow in await self.get_workflows(owner, repo): + workflow_name = workflow.get("name") + workflow_id = workflow.get("id") + self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") + for run in await self.get_workflow_runs(owner, repo, workflow_id): + run_id = run.get("id") + workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" + self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") + for log in await self.download_run_logs(owner, repo, run_id): + logfile_event = self.make_event( + { + "path": str(log), + "description": f"Workflow run logs from {workflow_url}", + }, + "FILESYSTEM", + tags=["textfile"], + parent=event, + ) + await self.emit_event( + logfile_event, + context=f"{{module}} downloaded workflow run logs from {workflow_url} to {{event.type}}: {log}", + ) + artifacts = await self.get_run_artifacts(owner, repo, run_id) + if artifacts: + for artifact in artifacts: + artifact_id = artifact.get("id") + artifact_name = artifact.get("name") + expired = artifact.get("expired") + if not expired: + filepath = await self.download_run_artifacts(owner, repo, artifact_id, artifact_name) + if filepath: + artifact_event = self.make_event( + { + "path": str(filepath), + "description": f"Workflow run artifact from {workflow_url}", + }, + "FILESYSTEM", + tags=["zipfile"], + parent=event, + ) + await self.emit_event( + artifact_event, + context=f"{{module}} downloaded workflow run artifact from {workflow_url} to {{event.type}}: {filepath}", + ) + + async def get_workflows(self, owner, repo): + workflows = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows?per_page=100&page=" + "{page}" + agen = self.api_page_iter(url, _json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json().get("workflows", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j: + workflows.append(item) + finally: + await agen.aclose() + return workflows + + async def get_workflow_runs(self, owner, repo, workflow_id): + runs = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?status=success&per_page={self.num_logs}" + r = await self.api_request(url) + if r is None: + return runs + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return runs + if status_code != 200: + return runs + try: + j = r.json().get("workflow_runs", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return runs + if not j: + return runs + for item in j: + runs.append(item) + return runs + + async def download_run_logs(self, owner, repo, run_id): + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + filename = f"run_{run_id}.zip" + file_destination = folder / filename + try: + await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) + self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") + except Exception as e: + file_destination = None + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"You must have the actions scope to download logs for {owner}/{repo}/{run_id} (status: {status_code})" + ) + else: + self.info( + f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available (status: {status_code})" + ) + # Secrets are duplicated in the individual workflow steps so just extract the main log files from the top folder + if file_destination: + main_logs = [] + with zipfile.ZipFile(file_destination, "r") as logzip: + for name in logzip.namelist(): + if fnmatch.fnmatch(name, "*.txt") and "/" not in name: + logzip.extract(name, folder) + main_logs.append(folder / name) + return main_logs + else: + return [] + + async def get_run_artifacts(self, owner, repo, run_id): + artifacts = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + r = await self.api_request(url) + if r is None: + return artifacts + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return artifacts + if status_code != 200: + return artifacts + try: + j = r.json().get("artifacts", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return artifacts + if not j: + return artifacts + for item in j: + artifacts.append(item) + return artifacts + + async def download_run_artifacts(self, owner, repo, artifact_id, artifact_name): + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + file_destination = folder / artifact_name + try: + await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) + self.info( + f"Downloaded workflow artifact {owner}/{repo}/{artifact_id}/{artifact_name} to {file_destination}" + ) + except Exception as e: + file_destination = None + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"You must have the actions scope to download run artifacts for {owner}/{repo}/{artifact_id} (status: {status_code})" + ) + return file_destination diff --git a/bbot/modules/gitlab.py b/bbot/modules/gitlab.py new file mode 100644 index 0000000000..e1ba3850ee --- /dev/null +++ b/bbot/modules/gitlab.py @@ -0,0 +1,141 @@ +from bbot.modules.base import BaseModule + + +class gitlab(BaseModule): + watched_events = ["HTTP_RESPONSE", "TECHNOLOGY", "SOCIAL"] + produced_events = ["TECHNOLOGY", "SOCIAL", "CODE_REPOSITORY", "FINDING"] + flags = ["active", "safe", "code-enum"] + meta = { + "description": "Detect GitLab instances and query them for repositories", + "created_date": "2024-03-11", + "author": "@TheTechromancer", + } + options = {"api_key": ""} + options_desc = {"api_key": "Gitlab access token"} + + scope_distance_modifier = 2 + + async def setup(self): + await self.require_api_key() + return True + + async def filter_event(self, event): + # only accept out-of-scope SOCIAL events + if event.type == "HTTP_RESPONSE": + if event.scope_distance > self.scan.scope_search_distance: + return False, "event is out of scope distance" + elif event.type == "TECHNOLOGY": + if not event.data["technology"].lower().startswith("gitlab"): + return False, "technology is not gitlab" + if not self.helpers.is_ip(event.host) and self.helpers.tldextract(event.host).domain == "gitlab": + return False, "gitlab instance is not self-hosted" + elif event.type == "SOCIAL": + if event.data["platform"] != "gitlab": + return False, "platform is not gitlab" + return True + + async def handle_event(self, event): + if event.type == "HTTP_RESPONSE": + await self.handle_http_response(event) + elif event.type == "TECHNOLOGY": + await self.handle_technology(event) + elif event.type == "SOCIAL": + await self.handle_social(event) + + async def handle_http_response(self, event): + # identify gitlab instances from HTTP responses + # HTTP_RESPONSE --> TECHNOLOGY + # HTTP_RESPONSE --> FINDING + headers = event.data.get("header", {}) + if "x_gitlab_meta" in headers: + url = event.parsed_url._replace(path="/").geturl() + await self.emit_event( + {"host": str(event.host), "technology": "GitLab", "url": url}, + "TECHNOLOGY", + parent=event, + context=f"{{module}} detected {{event.type}}: GitLab at {url}", + ) + description = f"GitLab server at {event.host}" + await self.emit_event( + {"host": str(event.host), "description": description}, + "FINDING", + parent=event, + context=f"{{module}} detected {{event.type}}: {description}", + ) + + async def handle_technology(self, event): + # retrieve gitlab groups from gitlab instances + # TECHNOLOGY --> SOCIAL + # TECHNOLOGY --> URL + # TECHNOLOGY --> CODE_REPOSITORY + base_url = self.get_base_url(event) + projects_url = self.helpers.urljoin(base_url, "api/v4/projects?simple=true") + await self.handle_projects_url(projects_url, event) + groups_url = self.helpers.urljoin(base_url, "api/v4/groups?simple=true") + await self.handle_groups_url(groups_url, event) + + async def handle_social(self, event): + # retrieve repositories from gitlab user + # SOCIAL --> CODE_REPOSITORY + # SOCIAL --> SOCIAL + username = event.data.get("profile_name", "") + if not username: + return + base_url = self.get_base_url(event) + urls = [ + # group + self.helpers.urljoin(base_url, f"api/v4/users/{username}/projects?simple=true"), + # user + self.helpers.urljoin(base_url, f"api/v4/groups/{username}/projects?simple=true"), + ] + for url in urls: + await self.handle_projects_url(url, event) + + async def handle_projects_url(self, projects_url, event): + for project in await self.gitlab_json_request(projects_url): + project_url = project.get("web_url", "") + if project_url: + code_event = self.make_event({"url": project_url}, "CODE_REPOSITORY", tags="git", parent=event) + await self.emit_event( + code_event, context=f"{{module}} enumerated projects and found {{event.type}} at {project_url}" + ) + namespace = project.get("namespace", {}) + if namespace: + await self.handle_namespace(namespace, event) + + async def handle_groups_url(self, groups_url, event): + for group in await self.gitlab_json_request(groups_url): + await self.handle_namespace(group, event) + + async def gitlab_json_request(self, url): + response = await self.api_request(url) + if response is not None: + try: + json = response.json() + except Exception: + return [] + if json and isinstance(json, list): + return json + return [] + + async def handle_namespace(self, namespace, event): + namespace_name = namespace.get("path", "") + namespace_url = namespace.get("web_url", "") + namespace_path = namespace.get("full_path", "") + if namespace_name and namespace_url and namespace_path: + namespace_url = self.helpers.parse_url(namespace_url)._replace(path=f"/{namespace_path}").geturl() + social_event = self.make_event( + {"platform": "gitlab", "profile_name": namespace_path, "url": namespace_url}, + "SOCIAL", + parent=event, + ) + await self.emit_event( + social_event, + context=f'{{module}} found GitLab namespace ({{event.type}}) "{namespace_name}" at {namespace_url}', + ) + + def get_base_url(self, event): + base_url = event.data.get("url", "") + if not base_url: + base_url = f"https://{event.host}" + return self.helpers.urlparse(base_url)._replace(path="/").geturl() diff --git a/bbot/modules/google_playstore.py b/bbot/modules/google_playstore.py new file mode 100644 index 0000000000..171bcb9b13 --- /dev/null +++ b/bbot/modules/google_playstore.py @@ -0,0 +1,93 @@ +from bbot.modules.base import BaseModule + + +class google_playstore(BaseModule): + watched_events = ["ORG_STUB", "CODE_REPOSITORY"] + produced_events = ["MOBILE_APP"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "Search for android applications on play.google.com", + "created_date": "2024-10-08", + "author": "@domwhewell-sage", + } + + base_url = "https://play.google.com" + + async def setup(self): + self.app_link_regex = self.helpers.re.compile(r"/store/apps/details\?id=([a-zA-Z0-9._-]+)") + return True + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "android" not in event.tags: + return False, "event is not an android repository" + return True + + async def handle_event(self, event): + if event.type == "CODE_REPOSITORY": + await self.handle_url(event) + elif event.type == "ORG_STUB": + await self.handle_org_stub(event) + + async def handle_url(self, event): + repo_url = event.data.get("url") + app_id = repo_url.split("id=")[1].split("&")[0] + await self.emit_event( + {"id": app_id, "url": repo_url}, + "MOBILE_APP", + tags="android", + parent=event, + context=f'{{module}} extracted the mobile app name "{app_id}" from: {repo_url}', + ) + + async def handle_org_stub(self, event): + org_name = event.data + self.verbose(f"Searching for any android applications for {org_name}") + for apk_name in await self.query(org_name): + valid_apk = await self.validate_apk(apk_name) + if valid_apk: + self.verbose(f"Got {apk_name} from playstore") + await self.emit_event( + {"id": apk_name, "url": f"{self.base_url}/store/apps/details?id={apk_name}"}, + "MOBILE_APP", + tags="android", + parent=event, + context=f'{{module}} searched play.google.com for apps belonging to "{org_name}" and found "{apk_name}" to be in scope', + ) + else: + self.debug(f"Got {apk_name} from playstore app details does not contain any in-scope URLs or Emails") + + async def query(self, query): + app_links = [] + url = f"{self.base_url}/store/search?q={self.helpers.quote(query)}&c=apps" + r = await self.helpers.request(url) + if r is None: + return app_links + status_code = getattr(r, "status_code", 0) + try: + html_content = r.content.decode("utf-8") + # Use regex to find all app links + app_links = await self.helpers.re.findall(self.app_link_regex, html_content) + except Exception as e: + self.warning(f"Failed to parse html response from {r.url} (HTTP status: {status_code}): {e}") + return app_links + return app_links + + async def validate_apk(self, apk_name): + """ + Check the app details page the "App support" section will include URLs or Emails to the app developer + """ + in_scope = False + url = f"{self.base_url}/store/apps/details?id={apk_name}" + r = await self.helpers.request(url) + if r is None: + return in_scope + status_code = getattr(r, "status_code", 0) + if status_code == 200: + html = r.text + in_scope_hosts = await self.scan.extract_in_scope_hostnames(html) + if in_scope_hosts: + in_scope = True + else: + self.warning(f"Failed to fetch {url} (HTTP status: {status_code})") + return in_scope diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index f3a7853c97..6e3b2e19eb 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -1,3 +1,7 @@ +import os +import asyncio +import aiosqlite +import multiprocessing from pathlib import Path from contextlib import suppress from shutil import copyfile, copymode @@ -6,57 +10,93 @@ class gowitness(BaseModule): - watched_events = ["URL"] - produced_events = ["SCREENSHOT"] - flags = ["active", "web", "safe"] - meta = {"description": "Take screenshots of webpages"} - batch_size = 100 + watched_events = ["URL", "SOCIAL"] + produced_events = ["WEBSCREENSHOT", "URL", "URL_UNVERIFIED", "TECHNOLOGY"] + flags = ["active", "safe", "web-screenshots"] + meta = {"description": "Take screenshots of webpages", "created_date": "2022-07-08", "author": "@TheTechromancer"} options = { - "version": "2.4.0", - "threads": 4, + "version": "2.4.2", + "threads": 0, "timeout": 10, "resolution_x": 1440, "resolution_y": 900, "output_path": "", + "social": False, + "idle_timeout": 1800, } options_desc = { - "version": "gowitness version", - "threads": "threads used to run", - "timeout": "preflight check timeout", - "resolution_x": "screenshot resolution x", - "resolution_y": "screenshot resolution y", - "output_path": "where to save screenshots", + "version": "Gowitness version", + "threads": "How many gowitness threads to spawn (default is number of CPUs x 2)", + "timeout": "Preflight check timeout", + "resolution_x": "Screenshot resolution x", + "resolution_y": "Screenshot resolution y", + "output_path": "Where to save screenshots", + "social": "Whether to screenshot social media webpages", + "idle_timeout": "Skip the current gowitness batch if it stalls for longer than this many seconds", } - deps_apt = ["chromium-browser"] + deps_common = ["chromium"] + deps_pip = ["aiosqlite"] deps_ansible = [ { "name": "Download gowitness", "get_url": { - "url": "https://github.com/sensepost/gowitness/releases/download/{BBOT_MODULES_GOWITNESS_VERSION}/gowitness-{BBOT_MODULES_GOWITNESS_VERSION}-linux-amd64", - "dest": "{BBOT_TOOLS}/gowitness", + "url": "https://github.com/sensepost/gowitness/releases/download/#{BBOT_MODULES_GOWITNESS_VERSION}/gowitness-#{BBOT_MODULES_GOWITNESS_VERSION}-#{BBOT_OS_PLATFORM}-#{BBOT_CPU_ARCH}", + "dest": "#{BBOT_TOOLS}/gowitness", "mode": "755", }, - } + }, ] - # visit up to and including the scan's configured search distance - # this is one hop further than the default - scope_distance_modifier = 0 + _batch_size = 100 + # gowitness accepts SOCIAL events up to distance 2, otherwise it is in-scope-only + scope_distance_modifier = 2 - def setup(self): + async def setup(self): + num_cpus = multiprocessing.cpu_count() + default_thread_count = min(20, num_cpus * 2) self.timeout = self.config.get("timeout", 10) - self.threads = self.config.get("threads", 4) - self.proxy = self.scan.config.get("http_proxy", "") + self.idle_timeout = self.config.get("idle_timeout", 1800) + self.threads = self.config.get("threads", 0) + if not self.threads: + self.threads = default_thread_count + self.proxy = self.scan.web_config.get("http_proxy", "") self.resolution_x = self.config.get("resolution_x") self.resolution_y = self.config.get("resolution_y") + self.visit_social = self.config.get("social", True) output_path = self.config.get("output_path") if output_path: self.base_path = Path(output_path) / "gowitness" else: self.base_path = self.scan.home / "gowitness" + self.chrome_path = None + custom_chrome_path = self.helpers.tools_dir / "chrome-linux" / "chrome" + if custom_chrome_path.is_file(): + self.chrome_path = custom_chrome_path + + # fix ubuntu-specific sandbox bug + chrome_devel_sandbox = self.helpers.tools_dir / "chrome-linux" / "chrome_sandbox" + if chrome_devel_sandbox.is_file(): + os.environ["CHROME_DEVEL_SANDBOX"] = str(chrome_devel_sandbox) + + # make sure we have a working chrome install + chrome_test_pass = False + for binary in ("chrome", "chromium", "chromium-browser", custom_chrome_path): + binary_path = self.helpers.which(binary) + if binary_path and Path(binary_path).is_file(): + chrome_test_proc = await self.run_process([binary_path, "--version"]) + if getattr(chrome_test_proc, "returncode", 1) == 0: + self.verbose(f"Found chrome executable at {binary_path}") + chrome_test_pass = True + break + if not chrome_test_pass: + return False, "Failed to set up Google chrome. Please install manually or try again with --force-deps." + self.db_path = self.base_path / "gowitness.sqlite3" self.screenshot_path = self.base_path / "screenshots" self.command = self.construct_command() self.prepped = False + self.screenshots_taken = {} + self.connections_logged = set() + self.technologies_found = set() return True def prep(self): @@ -68,21 +108,98 @@ def prep(self): copymode(self.helpers.tools_dir / "gowitness", self.base_path / "gowitness") self.prepped = True - def filter_event(self, event): + async def filter_event(self, event): # Ignore URLs that are redirects if any(t.startswith("status-30") for t in event.tags): - return False + return False, "URL is a redirect" + # ignore events from self + if event.type == "URL" and event.module == self: + return False, "event is from self" + if event.type == "SOCIAL": + if not self.visit_social: + return False, "visit_social=False" + else: + # Accept out-of-scope SOCIAL pages, but not URLs + if event.scope_distance > 0: + return False, "event is not in-scope" return True - def handle_batch(self, *events): + async def handle_batch(self, *events): self.prep() - stdin = "\n".join([str(e.data) for e in events]) - for line in self.helpers.run_live(self.command, input=stdin): - self.debug(line) + event_dict = {} + for e in events: + key = e.data + if e.type == "SOCIAL": + key = e.data["url"] + event_dict[key] = e + stdin = "\n".join(list(event_dict)) + + try: + async for line in self.run_process_live(self.command, input=stdin, idle_timeout=self.idle_timeout): + self.debug(line) + except asyncio.exceptions.TimeoutError: + urls_str = ",".join(event_dict) + self.warning(f"Gowitness timed out while visiting the following URLs: {urls_str}", trace=False) + return + + # emit web screenshots + new_screenshots = await self.get_new_screenshots() + for filename, screenshot in new_screenshots.items(): + url = screenshot["url"] + final_url = screenshot["final_url"] + filename = self.screenshot_path / screenshot["filename"] + filename = filename.relative_to(self.scan.home) + # NOTE: this prevents long filenames from causing problems in BBOT, but gowitness will still fail to save it. + filename = self.helpers.truncate_filename(filename) + webscreenshot_data = {"path": str(filename), "url": final_url} + parent_event = event_dict[url] + await self.emit_event( + webscreenshot_data, + "WEBSCREENSHOT", + parent=parent_event, + context=f"{{module}} visited {final_url} and saved {{event.type}} to {filename}", + ) + + # emit URLs + new_network_logs = await self.get_new_network_logs() + for url, row in new_network_logs.items(): + ip = row["ip"] + status_code = row["status_code"] + tags = [f"status-{status_code}", f"ip-{ip}", "spider-danger"] + + _id = row["url_id"] + parent_url = self.screenshots_taken[_id] + parent_event = event_dict[parent_url] + if url and url.startswith("http"): + await self.emit_event( + url, + "URL_UNVERIFIED", + parent=parent_event, + tags=tags, + context=f"{{module}} visited {{event.type}}: {url}", + ) + + # emit technologies + new_technologies = await self.get_new_technologies() + for row in new_technologies.values(): + parent_id = row["url_id"] + parent_url = self.screenshots_taken[parent_id] + parent_event = event_dict[parent_url] + technology = row["value"] + tech_data = {"technology": technology, "url": parent_url, "host": str(parent_event.host)} + await self.emit_event( + tech_data, + "TECHNOLOGY", + parent=parent_event, + context=f"{{module}} visited {parent_url} and found {{event.type}}: {technology}", + ) def construct_command(self): # base executable command = ["gowitness"] + # chrome path + if self.chrome_path is not None: + command += ["--chrome-path", str(self.chrome_path)] # db path command += ["--db-path", str(self.db_path)] # screenshot path @@ -99,4 +216,65 @@ def construct_command(self): command += ["file", "-f", "-"] # threads command += ["--threads", str(self.threads)] + # timeout + command += ["--timeout", str(self.timeout)] return command + + async def get_new_screenshots(self): + screenshots = {} + if self.db_path.is_file(): + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row + con.text_factory = self.helpers.smart_decode + async with con.execute("SELECT * FROM urls") as cur: + async for row in cur: + row = dict(row) + _id = row["id"] + if _id not in self.screenshots_taken: + self.screenshots_taken[_id] = row["url"] + screenshots[_id] = row + return screenshots + + async def get_new_network_logs(self): + network_logs = {} + if self.db_path.is_file(): + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row + async with con.execute("SELECT * FROM network_logs") as cur: + async for row in cur: + row = dict(row) + url = row["final_url"] + if url not in self.connections_logged: + self.connections_logged.add(url) + network_logs[url] = row + return network_logs + + async def get_new_technologies(self): + technologies = {} + if self.db_path.is_file(): + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row + async with con.execute("SELECT * FROM technologies") as cur: + async for row in cur: + _id = row["id"] + if _id not in self.technologies_found: + self.technologies_found.add(_id) + row = dict(row) + technologies[_id] = row + return technologies + + async def cur_execute(self, cur, query): + try: + return await cur.execute(query) + except aiosqlite.OperationalError as e: + self.warning(f"Error executing query: {query}: {e}") + return [] + + async def report(self): + if self.screenshots_taken: + self.success(f"{len(self.screenshots_taken):,} web screenshots captured. To view:") + self.success(" - Start gowitness") + self.success(f" - cd {self.base_path} && ./gowitness server") + self.success(" - Browse to http://localhost:7171") + else: + self.info("No web screenshots captured") diff --git a/bbot/modules/hackertarget.py b/bbot/modules/hackertarget.py index 05bf6828cf..b42352d473 100644 --- a/bbot/modules/hackertarget.py +++ b/bbot/modules/hackertarget.py @@ -1,19 +1,31 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class hackertarget(crobat): +class hackertarget(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the hackertarget.com API for subdomains"} + meta = { + "description": "Query the hackertarget.com API for subdomains", + "created_date": "2022-07-28", + "author": "@TheTechromancer", + } base_url = "https://api.hackertarget.com" - def request_url(self, query): - return self.helpers.request(f"{self.base_url}/hostsearch/?q={self.helpers.quote(query)}") + async def request_url(self, query): + url = f"{self.base_url}/hostsearch/?q={self.helpers.quote(query)}" + response = await self.api_request(url) + return response - def parse_results(self, r, query): + async def parse_results(self, r, query): + results = set() for line in r.text.splitlines(): host = line.split(",")[0] - if self.helpers.validators.validate_host(host): - yield host + try: + self.helpers.validators.validate_host(host) + results.add(host) + except ValueError: + self.debug(f"Error validating API result: {line}") + continue + return results diff --git a/bbot/modules/header_brute.py b/bbot/modules/header_brute.py deleted file mode 100644 index c3f886674d..0000000000 --- a/bbot/modules/header_brute.py +++ /dev/null @@ -1,136 +0,0 @@ -from bbot.modules.base import BaseModule -from bbot.core.errors import HttpCompareError, ScanCancelledError - - -class header_brute(BaseModule): - - watched_events = ["URL"] - produced_events = ["FINDING"] - flags = ["brute-force", "active", "aggressive", "slow", "web"] - meta = {"description": "Check for common HTTP header parameters"} - options = {"wordlist": "https://raw.githubusercontent.com/PortSwigger/param-miner/master/resources/headers"} - options_desc = {"wordlist": "Define the wordlist to be used to derive headers"} - scanned_hosts = [] - header_blacklist = [ - "content-length", - "expect", - "accept-encoding", - "transfer-encoding", - "connection", - "if-match", - "if-modified-since", - "if-none-match", - "if-unmodified-since", - ] - max_event_handlers = 12 - in_scope_only = True - compare_mode = "header" - - def setup(self): - - wordlist_url = self.config.get("wordlist", "") - self.wordlist = self.helpers.wordlist(wordlist_url) - return True - - def handle_event(self, event): - - url = event.data - try: - compare_helper = self.helpers.http_compare(url) - except HttpCompareError as e: - self.debug(e) - return - batch_size = self.count_test(url) - if batch_size == None or batch_size <= 0: - self.debug(f"Failed to get baseline max {self.compare_mode} count, aborting") - return - self.debug(f"Resolved batch_size at {str(batch_size)}") - - if compare_helper.canary_check(url, mode=self.compare_mode) == False: - self.warning(f'Aborting "{url}" due to failed canary check') - return - - fl = [h.strip().lower() for h in self.helpers.read_file(self.wordlist)] - - wordlist_cleaned = list(filter(self.clean_list, fl)) - - results = set() - abort_threshold = 25 - try: - for group in self.helpers.grouper(wordlist_cleaned, batch_size): - for result, reason, reflection in self.binary_search(compare_helper, url, group): - results.add((result, reason, reflection)) - if len(results) >= abort_threshold: - self.warning( - f"Abort threshold ({abort_threshold}) reached, too many {self.compare_mode}s found" - ) - results.clear() - assert False - except ScanCancelledError: - return - except AssertionError: - pass - - for result, reason, reflection in results: - tags = [] - if reflection: - tags = ["http_reflection"] - description = ( - f"[{self.compare_mode.upper()}_BRUTE] {self.compare_mode.capitalize()}: [{result}] Reason: [{reason}]" - ) - self.emit_event( - {"host": str(event.host), "url": url, "description": description}, - "FINDING", - event, - tags=tags, - ) - - def count_test(self, url): - - baseline = self.helpers.request(url) - if baseline is None: - return - if str(baseline.status_code)[0] in ("4", "5"): - return - for count, args, kwargs in self.gen_count_args(url): - r = self.helpers.request(*args, **kwargs) - if r is not None and not ((str(r.status_code)[0] in ("4", "5"))): - return count - - def gen_count_args(self, url): - header_count = 95 - while 1: - if header_count < 0: - break - fake_headers = {} - for i in range(0, header_count): - fake_headers[self.helpers.rand_string(14)] = self.helpers.rand_string(14) - yield header_count, (url,), {"headers": fake_headers} - header_count -= 5 - - def clean_list(self, header): - if (len(header) > 0) and ("%" not in header) and (header not in self.header_blacklist): - return True - return False - - def binary_search(self, compare_helper, url, group, reason=None, reflection=False): - self.debug(f"Entering recursive binary_search with {len(group):,} sized group") - if len(group) == 1: - yield group[0], reason, reflection - elif len(group) > 1: - for group_slice in self.helpers.split_list(group): - match, reason, reflection, subject_response = self.check_batch(compare_helper, url, group_slice) - if match == False: - yield from self.binary_search(compare_helper, url, group_slice, reason, reflection) - else: - self.warning(f"Submitted group of size 0 to binary_search()") - - def check_batch(self, compare_helper, url, header_list): - - if self.scan.stopping: - raise ScanCancelledError() - rand = self.helpers.rand_string() - test_headers = {} - for header in header_list: - test_headers[header] = rand - return compare_helper.compare(url, headers=test_headers, check_reflection=(len(header_list) == 1)) diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index 752e6f9d70..a60967b8b4 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -1,118 +1,132 @@ +from bbot.errors import InteractshError from bbot.modules.base import BaseModule -from bbot.core.errors import InteractshError class host_header(BaseModule): - watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] - flags = ["active", "aggressive", "web"] - meta = {"description": "Try common HTTP Host header spoofing techniques"} + flags = ["active", "aggressive", "web-thorough"] + meta = { + "description": "Try common HTTP Host header spoofing techniques", + "created_date": "2022-07-27", + "author": "@liquidsec", + } in_scope_only = True + per_hostport_only = True deps_apt = ["curl"] - def setup(self): - - if self.scan.config.get("interactsh_disable", False) == False: + async def setup(self): + self.subdomain_tags = {} + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() - self.interactsh_domain = self.interactsh_instance.register(callback=self.interactsh_callback) + self.domain = await self.interactsh_instance.register(callback=self.interactsh_callback) except InteractshError as e: self.warning(f"Interactsh failure: {e}") return False - - self.interactsh_subdomain_tags = {} + else: + self.warning("Interactsh is disabled globally. Interaction based detections will be disabled.") + self.domain = f"{self.rand_string(12, digits=False)}.com" return True - def interactsh_callback(self, r): + def rand_string(self, *args, **kwargs): + return self.helpers.rand_string(*args, **kwargs) + + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: - match = self.interactsh_subdomain_tags.get(full_id.split(".")[0]) - self.hugewarning(match) + match = self.subdomain_tags.get(full_id.split(".")[0]) if match is None: return matched_event = match[0] matched_technique = match[1] - self.emit_event( + protocol = r.get("protocol").upper() + await self.emit_event( { "host": str(matched_event.host), "url": matched_event.data["url"], - "description": f"Spoofed Host header ({matched_technique}) [{r.get('protocol').upper()}] interaction", + "description": f"Spoofed Host header ({matched_technique}) [{protocol}] interaction", }, "FINDING", matched_event, + context=f"{{module}} spoofed host header and induced {{event.type}}: {protocol} interaction", ) else: # this is likely caused by something trying to resolve the base domain first and can be ignored self.debug("skipping results because subdomain tag was missing") - def finish(self): - for r in self.interactsh_instance.poll(): - self.interactsh_callback(r) - - def cleanup(self): - try: - self.interactsh_instance.deregister() - self.debug( - f"successfully degregistered interactsh session with correlation_id {self.interactsh_instance.correlation_id}" - ) - except InteractshError as e: - self.warning(f"Interactsh failure: {e}") + async def finish(self): + if self.scan.config.get("interactsh_disable", False) is False: + await self.helpers.sleep(5) + try: + for r in await self.interactsh_instance.poll(): + await self.interactsh_callback(r) + except InteractshError as e: + self.debug(f"Error in interact.sh: {e}") - def handle_event(self, event): + async def cleanup(self): + if self.scan.config.get("interactsh_disable", False) is False: + try: + await self.interactsh_instance.deregister() + self.debug( + f"successfully deregistered interactsh session with correlation_id {self.interactsh_instance.correlation_id}" + ) + except InteractshError as e: + self.warning(f"Interactsh failure: {e}") + async def handle_event(self, event): # get any set-cookie responses from the response and add them to the request + url = event.data["url"] added_cookies = {} - for k, v in event.data["header-dict"].items(): - - if k.lower() == "set-cookie": - - cookie_string = v - cookie_split = cookie_string.split("=") - added_cookies = {cookie_split[0]: cookie_split[1]} + for header_values in event.data["header-dict"].values(): + for header_value in header_values: + if header_value.lower() == "set-cookie": + header_split = header_value.split("=") + try: + added_cookies = {header_split[0]: header_split[1]} + except IndexError: + self.debug(f"failed to parse cookie from string {header_value}") domain_reflections = [] # host header replacement technique_description = "standard" self.debug(f"Performing {technique_description} case") - subdomain_tag = self.helpers.rand_string(4, digits=False) - self.interactsh_subdomain_tags[subdomain_tag] = (event, technique_description) - output = self.helpers.curl( - url=event.data["url"], - headers={"Host": f"{subdomain_tag}.{self.interactsh_domain}"}, + subdomain_tag = self.rand_string(4, digits=False) + self.subdomain_tags[subdomain_tag] = (event, technique_description) + output = await self.helpers.curl( + url=url, + headers={"Host": f"{subdomain_tag}.{self.domain}"}, ignore_bbot_global_settings=True, cookies=added_cookies, ) - - if self.interactsh_domain in output: + if self.domain in output: domain_reflections.append(technique_description) # absolute URL / Host header transposition technique_description = "absolute URL transposition" self.debug(f"Performing {technique_description} case") - subdomain_tag = self.helpers.rand_string(4, digits=False) - self.interactsh_subdomain_tags[subdomain_tag] = (event, technique_description) - output = self.helpers.curl( - url=event.data["url"], - headers={"Host": f"{subdomain_tag}.{self.interactsh_domain}"}, - path_override=event.data["url"], + subdomain_tag = self.rand_string(4, digits=False) + self.subdomain_tags[subdomain_tag] = (event, technique_description) + output = await self.helpers.curl( + url=url, + path_override=url, cookies=added_cookies, ) - if self.interactsh_domain in output: + if self.domain in output: domain_reflections.append(technique_description) # duplicate host header tolerance technique_description = "duplicate host header tolerance" - output = self.helpers.curl( - url=event.data["url"], + output = await self.helpers.curl( + url=url, # Sending a blank HOST first as a hack to trick curl. This makes it no longer an "internal header", thereby allowing for duplicates # The fact that it's accepting two host headers is rare enough to note on its own, and not too noisy. Having the 3rd header be an interactsh would result in false negatives for the slightly less interesting cases. headers={"Host": ["", str(event.host), str(event.host)]}, @@ -122,22 +136,23 @@ def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - self.emit_event( + description = "Duplicate Host Header Tolerated" + await self.emit_event( { "host": str(event.host), - "url": event.data["url"], - "description": f"Duplicate Host Header Tolerated", + "url": url, + "description": description, }, "FINDING", event, + context=f"{{module}} scanned {event.data['url']} and identified {{event.type}}: {description}", ) - # Host Header Overrides - + # host header overrides technique_description = "host override headers" - self.debug(f"Performing {technique_description} case") - subdomain_tag = self.helpers.rand_string(4, digits=False) - self.interactsh_subdomain_tags[subdomain_tag] = (event, technique_description) + self.verbose(f"Performing {technique_description} case") + subdomain_tag = self.rand_string(4, digits=False) + self.subdomain_tags[subdomain_tag] = (event, technique_description) override_headers_list = [ "X-Host", @@ -151,25 +166,26 @@ def handle_event(self, event): ] override_headers = {} for oh in override_headers_list: - override_headers[oh] = f"{subdomain_tag}.{self.interactsh_domain}" + override_headers[oh] = f"{subdomain_tag}.{self.domain}" - output = self.helpers.curl( - url=event.data["url"], + output = await self.helpers.curl( + url=url, headers=override_headers, cookies=added_cookies, ) - if self.interactsh_domain in output: + if self.domain in output: domain_reflections.append(technique_description) # emit all the domain reflections we found for dr in domain_reflections: - - self.emit_event( + description = f"Possible Host header injection. Injection technique: {dr}" + await self.emit_event( { "host": str(event.host), - "url": event.data["url"], - "description": f"Possible Host header injection. Injection technique: {dr}", + "url": url, + "description": description, }, "FINDING", event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {description}", ) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index e1c89fbbec..8edc4e1d69 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -1,82 +1,109 @@ -import json +import re +import orjson +import tempfile import subprocess +from pathlib import Path from bbot.modules.base import BaseModule class httpx(BaseModule): - watched_events = ["OPEN_TCP_PORT", "URL_UNVERIFIED", "URL"] produced_events = ["URL", "HTTP_RESPONSE"] - flags = ["active", "safe", "web"] - meta = {"description": "Visit webpages. Many other modules rely on httpx"} + flags = ["active", "safe", "web-basic", "social-enum", "subdomain-enum", "cloud-enum"] + meta = { + "description": "Visit webpages. Many other modules rely on httpx", + "created_date": "2022-07-08", + "author": "@TheTechromancer", + } - batch_size = 100 - options = {"in_scope_only": True, "version": "1.2.3", "max_response_size": 5242880} + options = { + "threads": 50, + "in_scope_only": True, + "version": "1.2.5", + "max_response_size": 5242880, + "store_responses": False, + "probe_all_ips": False, + } options_desc = { - "in_scope_only": "Only visit web resources that are in scope.", + "threads": "Number of httpx threads to use", + "in_scope_only": "Only visit web reparents that are in scope.", "version": "httpx version", "max_response_size": "Max response size in bytes", + "store_responses": "Save raw HTTP responses to scan folder", + "probe_all_ips": "Probe all the ips associated with same host", } deps_ansible = [ { "name": "Download httpx", "unarchive": { - "src": "https://github.com/projectdiscovery/httpx/releases/download/v{BBOT_MODULES_HTTPX_VERSION}/httpx_{BBOT_MODULES_HTTPX_VERSION}_linux_amd64.zip", + "src": "https://github.com/projectdiscovery/httpx/releases/download/v#{BBOT_MODULES_HTTPX_VERSION}/httpx_#{BBOT_MODULES_HTTPX_VERSION}_#{BBOT_OS}_#{BBOT_CPU_ARCH}.zip", "include": "httpx", - "dest": "{BBOT_TOOLS}", + "dest": "#{BBOT_TOOLS}", "remote_src": True, }, } ] - scope_distance_modifier = 0 + scope_distance_modifier = 2 + _shuffle_incoming_queue = False + _batch_size = 500 + _priority = 2 - def setup(self): - self.timeout = self.scan.config.get("httpx_timeout", 5) + async def setup(self): + self.threads = self.config.get("threads", 50) self.max_response_size = self.config.get("max_response_size", 5242880) - self.visited = set() + self.store_responses = self.config.get("store_responses", False) + self.probe_all_ips = self.config.get("probe_all_ips", False) + self.httpx_tempdir_regex = re.compile(r"^httpx\d+$") return True - def filter_event(self, event): - + async def filter_event(self, event): if "_wildcard" in str(event.host).split("."): - return False + return False, "event is wildcard" if "unresolved" in event.tags: - return False + return False, "event is unresolved" - if str(event.module) == "httpx": - return False + if event.module == self: + return False, "event is from self" - # scope filtering + if "spider-max" in event.tags: + return False, "event exceeds spidering limits" + # scope filtering in_scope_only = self.config.get("in_scope_only", True) - safe_to_visit = "httpx-safe" in event.tags - if not safe_to_visit and (in_scope_only and not self.scan.in_scope(event)): - return False - # reject base URLs to avoid visiting a resource twice - # note: speculate makes open ports from + if "httpx-safe" in event.tags: + return True + max_scope_distance = 0 if in_scope_only else (self.scan.scope_search_distance + 1) + if event.scope_distance > max_scope_distance: + return False, "event is not in scope" return True - def handle_batch(self, *events): - + def make_url_metadata(self, event): + has_spider_max = "spider-max" in event.tags + url_hash = None + if event.type.startswith("URL"): + # we NEED the port, otherwise httpx will try HTTPS even for HTTP URLs + url = event.with_port().geturl() + if event.parsed_url.path == "/": + url_hash = hash((event.host, event.port, has_spider_max)) + else: + url = str(event.data) + url_hash = hash((event.host, event.port, has_spider_max)) + if url_hash is None: + url_hash = hash((url, has_spider_max)) + return url, url_hash + + def _incoming_dedup_hash(self, event): + url, url_hash = self.make_url_metadata(event) + return url_hash + + async def handle_batch(self, *events): stdin = {} - for e in events: - url_hash = None - if "httpx-only" in e.tags or "spider-danger" not in e.tags: - if e.type.startswith("URL"): - # we NEED the port, otherwise httpx will try HTTPS even for HTTP URLs - url = e.with_port().geturl() - if e.parsed.path == "/": - url_hash = hash((e.host, e.port)) - else: - url = str(e.data) - url_hash = hash((e.host, e.port)) - - if url_hash not in self.visited: - stdin[url] = e - if url_hash is not None: - self.visited.add(url_hash) + + for event in events: + url, url_hash = self.make_url_metadata(event) + stdin[url] = event if not stdin: return @@ -86,50 +113,100 @@ def handle_batch(self, *events): "-silent", "-json", "-include-response", + "-threads", + self.threads, "-timeout", - self.timeout, + self.scan.httpx_timeout, + "-retries", + self.scan.httpx_retries, "-header", f"User-Agent: {self.scan.useragent}", "-response-size-to-read", f"{self.max_response_size}", - # "-r", - # self.helpers.resolver_file, ] - proxy = self.scan.config.get("http_proxy", "") + + if self.store_responses: + response_dir = self.scan.home / "httpx" + self.helpers.mkdir(response_dir) + command += ["-srd", str(response_dir)] + + dns_resolvers = ",".join(self.helpers.system_resolvers) + if dns_resolvers: + command += ["-r", dns_resolvers] + + if self.probe_all_ips: + command += ["-probe-all-ips"] + + for hk, hv in self.scan.custom_http_headers.items(): + command += ["-header", f"{hk}: {hv}"] + proxy = self.scan.http_proxy if proxy: command += ["-http-proxy", proxy] - for line in self.helpers.run_live(command, input=list(stdin), stderr=subprocess.DEVNULL): + async for line in self.run_process_live(command, text=False, input=list(stdin), stderr=subprocess.DEVNULL): try: - j = json.loads(line) - except json.decoder.JSONDecodeError: - self.debug(f"Failed to decode line: {line}") + j = await self.helpers.run_in_executor(orjson.loads, line) + except orjson.JSONDecodeError: + self.warning(f"httpx failed to decode line: {line}") continue url = j.get("url", "") - status_code = int(j.get("status-code", 0)) + status_code = int(j.get("status_code", 0)) if status_code == 0: self.debug(f'No HTTP status code for "{url}"') continue - source_event = stdin.get(j.get("input", ""), None) + parent_event = stdin.get(j.get("input", ""), None) - if source_event is None: - self.warning(f"Unable to correlate source event from: {line}") + if parent_event is None: + self.warning(f"Unable to correlate parent event from: {line}") continue # discard 404s from unverified URLs - if source_event.type == "URL_UNVERIFIED" and status_code in (404,): + path = j.get("path", "/") + if parent_event.type == "URL_UNVERIFIED" and status_code in (404,) and path != "/": self.debug(f'Discarding 404 from "{url}"') continue # main URL - url_event = self.make_event(url, "URL", source_event, tags=[f"status-{status_code}"]) - if url_event and not "httpx-only" in url_event.tags: - if url_event != source_event: - self.emit_event(url_event) + tags = [f"status-{status_code}"] + httpx_ip = j.get("host", "") + if httpx_ip: + tags.append(f"ip-{httpx_ip}") + # grab title + title = self.helpers.tagify(j.get("title", ""), maxlen=30) + if title: + tags.append(f"http-title-{title}") + + url_context = "{module} visited {event.parent.data} and got status code {event.http_status}" + if parent_event.type == "OPEN_TCP_PORT": + url_context += " at {event.data}" + + url_event = self.make_event( + url, + "URL", + parent_event, + tags=tags, + context=url_context, + ) + if url_event: + if url_event != parent_event: + await self.emit_event(url_event) # HTTP response - self.emit_event(j, "HTTP_RESPONSE", url_event, internal=True) - - def cleanup(self): + content_type = j.get("header", {}).get("content_type", "unspecified").split(";")[0] + content_length = j.get("content_length", 0) + content_length = self.helpers.bytes_to_human(content_length) + await self.emit_event( + j, + "HTTP_RESPONSE", + url_event, + tags=url_event.tags, + context=f"HTTP_RESPONSE was {content_length} with {content_type} content type", + ) + + for tempdir in Path(tempfile.gettempdir()).iterdir(): + if tempdir.is_dir() and self.httpx_tempdir_regex.match(tempdir.name): + self.helpers.rm_rf(tempdir) + + async def cleanup(self): resume_file = self.helpers.current_dir / "resume.cfg" resume_file.unlink(missing_ok=True) diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 66caa8e528..bfd5c63200 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -1,12 +1,28 @@ -# adopted from https://github.com/bugcrowd/HUNT +# adapted from https://github.com/bugcrowd/HUNT -import re from bbot.modules.base import BaseModule - hunt_param_dict = { - "cmdi": ["daemon", "host", "upload", "dir", "execute", "download", "log", "ip", "cli", "cmd"], - "debug": [ + "Command Injection": [ + "daemon", + "host", + "upload", + "dir", + "execute", + "download", + "log", + "ip", + "cli", + "cmd", + "exec", + "command", + "func", + "code", + "update", + "shell", + "eval", + ], + "Debug": [ "access", "admin", "dbg", @@ -34,8 +50,31 @@ "cfg", "config", ], - "lfi": ["file", "document", "folder", "root", "path", "pg", "style", "pdf", "template", "php_path", "doc"], - "idor": [ + "Directory Traversal": ["entry", "download", "attachment", "basepath", "path", "file", "source", "dest"], + "Local File Include": [ + "file", + "document", + "folder", + "root", + "path", + "pg", + "style", + "pdf", + "template", + "php_path", + "doc", + "lang", + "include", + "img", + "view", + "layout", + "export", + "log", + "configFile", + "stylesheet", + "configFileUrl", + ], + "Insecure Direct Object Reference": [ "id", "user", "account", @@ -49,8 +88,18 @@ "profile", "edit", "report", + "docId", + "accountId", + "customerId", + "reportId", + "jobId", + "sessionId", + "api_key", + "instance", + "identifier", + "access", ], - "sqli": [ + "SQL Injection": [ "id", "select", "report", @@ -81,8 +130,23 @@ "string", "number", "filter", + "limit", + "offset", + "item", + "input", + "date", + "value", + "orderBy", + "groupBy", + "pageNum", + "pageSize", + "tag", + "author", + "postId", + "parentId", + "d", ], - "ssrf": [ + "Server-side Request Forgery": [ "dest", "redirect", "uri", @@ -111,74 +175,120 @@ "show", "navigation", "open", + "proxy", + "target", + "server", + "domain", + "connect", + "fetch", + "apiEndpoint", + ], + "Server-Side Template Injection": [ + "template", + "preview", + "id", + "view", + "activity", + "name", + "content", + "redirect", + "expression", + "statement", + "tpl", + "render", + "format", + "engine", + ], + "XML external entity injection": [ + "xml", + "dtd", + "xsd", + "xmlDoc", + "xmlData", + "entityType", + "entity", + "xmlUrl", + "schema", + "xmlFile", + "xmlPath", + "xmlSource", + "xmlEndpoint", + "xslt", + "xmlConfig", + "xmlCallback", + "attributeName", + "wsdl", + "xmlDocUrl", + ], + "Insecure Cryptography": [ + "encrypted", + "cipher", + "iv", + "checksum", + "hash", + "salt", + "hmac", + "secret", + "key", + "signatureAlgorithm", + "keyId", + "sharedSecret", + "privateKeyId", + "privateKey", + "publicKey", + "publicKeyId", + "encryptedData", + "encryptedMessage", + "encryptedPayload", + "encryptedFile", + "cipherText", + "cipherAlgorithm", + "keySize", + "keyPair", + "keyDerivation", + "encryptionMethod", + "decryptionKey", + ], + "Unsafe Deserialization": [ + "serialized", + "object", + "dataObject", + "serialization", + "payload", + "encoded", + "marshalled", + "pickled", + "jsonData", + "state", + "sessionData", + "cache", + "tokenData", + "serializedSession", + "objectState", + "jsonDataPayload", ], - "ssti": ["template", "preview", "id", "view", "activity", "name", "content", "redirect"], } class hunt(BaseModule): - input_tag_regex = re.compile(r"]*?\s+)?href=(?:[\"\'](.+\?.+?))[\"\'].+[>\s]") - - watched_events = ["HTTP_RESPONSE"] + watched_events = ["WEB_PARAMETER"] produced_events = ["FINDING"] - flags = ["active", "safe", "web"] - meta = {"description": "Watch for commonly-exploitable HTTP parameters"} + flags = ["active", "safe", "web-thorough"] + meta = { + "description": "Watch for commonly-exploitable HTTP parameters", + "author": "@liquidsec", + "created_date": "2022-07-20", + } # accept all events regardless of scope distance scope_distance_modifier = None - def extract_params(self, body): - - # check for input tags - input_tag = re.findall(self.input_tag_regex, body) - - for i in input_tag: - self.debug(f"FOUND PARAM ({i}) IN INPUT TAGS") - yield i - - # check for jquery get parameters - jquery_get = re.findall(self.jquery_get_regex, body) - - for i in jquery_get: - self.debug(f"FOUND PARAM ({i}) IN JQUERY GET PARAMS") - yield i - - # check for jquery post parameters - jquery_post = re.findall(self.jquery_post_regex, body) - if jquery_post: - for i in jquery_post: - for x in i.split(","): - s = x.split(":")[0].rstrip() - self.debug(f"FOUND PARAM ({s}) IN A JQUERY POST PARAMS") - yield s - - a_tag = re.findall(self.a_tag_regex, body) - if a_tag: - for url in a_tag: - if url.startswith("http"): - url_parsed = self.helpers.parse_url(url) - if not self.scan.in_scope(url_parsed.netloc): - self.debug(f"Skipping checking for parameters because URL ({url}) is not in scope") - continue - i = url_parsed.query.split("&") - else: - i = url.split("?")[1].split("&") - for x in i: - s = x.split("=")[0] - - self.debug(f"FOUND PARAM ({s}) IN A TAG GET PARAMS") - yield s - - def handle_event(self, event): - body = event.data.get("response-body", "") - for p in self.extract_params(body): - for k in hunt_param_dict.keys(): - if p.lower() in hunt_param_dict[k]: - description = f"Found potential {k.upper()} parameter [{p}]" - self.emit_event( - {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, - "FINDING", - event, - ) + async def handle_event(self, event): + p = event.data["name"] + for k in hunt_param_dict.keys(): + if p.lower() in hunt_param_dict[k]: + description = f"Found potential {k.upper()} parameter [{p}]" + data = {"host": str(event.host), "description": description} + url = event.data.get("url", "") + if url: + data["url"] = url + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 46aeadd19e..2b708faab8 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -1,52 +1,65 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class hunterio(shodan_dns): - +class hunterio(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS", "DNS_NAME", "URL_UNVERIFIED"] flags = ["passive", "email-enum", "subdomain-enum", "safe"] - meta = {"description": "Query hunter.io for emails", "auth_required": True} + meta = { + "description": "Query hunter.io for emails", + "created_date": "2022-04-25", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "Hunter.IO API key"} base_url = "https://api.hunter.io/v2" + ping_url = f"{base_url}/account?api_key={{api_key}}" + limit = 100 - def setup(self): - self.limit = 100 - return super().setup() - - def ping(self): - r = self.helpers.request(f"{self.base_url}/account?api_key={self.api_key}") - resp_content = getattr(r, "text", "") - assert getattr(r, "status_code", 0) == 200, resp_content - - def handle_event(self, event): + async def handle_event(self, event): query = self.make_query(event) - for entry in self.query(query): + for entry in await self.query(query): email = entry.get("value", "") sources = entry.get("sources", []) if email: email_event = self.make_event(email, "EMAIL_ADDRESS", event) if email_event: - self.emit_event(email_event) + await self.emit_event( + email_event, + context=f'{{module}} queried Hunter.IO API for "{query}" and found {{event.type}}: {{event.data}}', + ) for source in sources: domain = source.get("domain", "") if domain: - self.emit_event(domain, "DNS_NAME", email_event) + await self.emit_event( + domain, + "DNS_NAME", + email_event, + context=f"{{module}} originally found {email} at {{event.type}}: {{event.data}}", + ) url = source.get("uri", "") if url: - self.emit_event(url, "URL_UNVERIFIED", email_event) + await self.emit_event( + url, + "URL_UNVERIFIED", + email_event, + context=f"{{module}} originally found {email} at {{event.type}}: {{event.data}}", + ) - def query(self, query): + async def query(self, query): emails = [] url = ( - f"{self.base_url}/domain-search?domain={query}&api_key={self.api_key}" - + "&limit={page_size}&offset={offset}" + f"{self.base_url}/domain-search?domain={query}&api_key={{api_key}}" + "&limit={page_size}&offset={offset}" ) - for j in self.helpers.api_page_iter(url, page_size=self.limit): - new_emails = j.get("data", {}).get("emails", []) - if not new_emails: - break - emails += new_emails + agen = self.api_page_iter(url, page_size=self.limit) + try: + async for j in agen: + new_emails = j.get("data", {}).get("emails", []) + if not new_emails: + break + emails += new_emails + finally: + await agen.aclose() return emails diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index 152be003cd..48860445f5 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -1,125 +1,356 @@ +import re + from bbot.modules.base import BaseModule +valid_chars = "ETAONRISHDLFCMUGYPWBVKJXQZ0123456789_-$~()&!#%'@^`{}]]" + + +def encode_all(string): + return "".join("%{0:0>2}".format(format(ord(char), "x")) for char in string) -class iis_shortnames(BaseModule): +class IISShortnamesError(Exception): + pass + + +class iis_shortnames(BaseModule): watched_events = ["URL"] produced_events = ["URL_HINT"] - flags = ["active", "safe"] - meta = {"description": "Check for IIS shortname vulnerability"} - options = {"detect_only": True, "threads": 8} + flags = ["active", "safe", "web-basic", "iis-shortnames"] + meta = { + "description": "Check for IIS shortname vulnerability", + "created_date": "2022-04-15", + "author": "@liquidsec", + } + options = {"detect_only": True, "max_node_count": 50, "speculate_magic_urls": True} options_desc = { "detect_only": "Only detect the vulnerability and do not run the shortname scanner", - "threads": "the number of threads to run concurrently when executing the IIS shortname scanner", + "max_node_count": "Limit how many nodes to attempt to resolve on any given recursion branch", + "speculate_magic_urls": "Attempt to discover iis 'magic' special folders", } in_scope_only = True - deps_ansible = [ - { - "name": "Install Java JRE (Debian)", - "become": True, - "package": {"name": "default-jre", "state": "latest"}, - "when": """ansible_facts['os_family'] == 'Debian'""", - }, - { - "name": "Install Java JRE (RedHat)", - "become": True, - "package": {"name": "java-latest-openjdk", "state": "latest"}, - "when": """ansible_facts['os_family'] == 'RedHat'""", - }, - { - "name": "Install Java JRE (Archlinux)", - "package": {"name": "jre-openjdk", "state": "present"}, - "become": True, - "when": """ansible_facts['os_family'] == 'Archlinux'""", - }, - ] - - def setup(self): - iis_shortname_jar = "https://github.com/irsdl/IIS-ShortName-Scanner/raw/master/iis_shortname_scanner.jar" - iis_shortname_config = "https://raw.githubusercontent.com/irsdl/IIS-ShortName-Scanner/master/config.xml" - self.iis_scanner_jar = self.helpers.download(iis_shortname_jar, cache_hrs=720) - self.iis_scanner_config = self.helpers.download(iis_shortname_config, cache_hrs=720) - if self.iis_scanner_jar and self.iis_scanner_config: - return True + _module_threads = 8 + + async def detect(self, target): + technique = None + detections = [] + random_string = self.helpers.rand_string(8) + control_url = f"{target}{random_string}*~1*/a.aspx" + test_url = f"{target}*~1*/a.aspx" + + for method in ["GET", "POST", "OPTIONS", "DEBUG", "HEAD", "TRACE"]: + kwargs = {"method": method, "allow_redirects": False, "timeout": 10} + confirmations = 0 + iterations = 5 # one failed detection is tolerated, as long as its not the first run + while iterations > 0: + control_result = await self.helpers.request(control_url, **kwargs) + test_result = await self.helpers.request(test_url, **kwargs) + if control_result and test_result: + if control_result.status_code != test_result.status_code: + confirmations += 1 + self.debug(f"New detection on {target}, number of confirmations: [{str(confirmations)}]") + if confirmations > 3: + technique = f"{str(control_result.status_code)}/{str(test_result.status_code)} HTTP Code" + detections.append((method, test_result.status_code, technique)) + break + elif ("Error Code0x80070002" in control_result.text) and ( + "Error Code0x00000000" in test_result.text + ): + confirmations += 1 + if confirmations > 3: + detections.append((method, 0, technique)) + technique = "HTTP Body Error Message" + break + iterations -= 1 + if confirmations == 0: + break + return detections + + async def setup(self): + self.scanned_tracker = set() + return True + + @staticmethod + def normalize_url(url): + return str(url.rstrip("/") + "/").lower() + + async def directory_confirm(self, target, method, url_hint, affirmative_status_code): + payload = encode_all(f"{url_hint}") + url = f"{target}{payload}" + directory_confirm_result = await self.helpers.request( + method=method, url=url, allow_redirects=False, retries=2, timeout=10 + ) + if directory_confirm_result is not None: + if directory_confirm_result.status_code == affirmative_status_code: + return True return False - def handle_event(self, event): + async def duplicate_check(self, target, method, url_hint, affirmative_status_code): + duplicates = [] + count = 2 + base_hint = re.sub(r"~\d", "", url_hint) + suffix = "/a.aspx" + + while 1: + payload = encode_all(f"{base_hint}~{str(count)}*") + url = f"{target}{payload}{suffix}" + + duplicate_check_results = await self.helpers.request( + method=method, url=url, allow_redirects=False, retries=2, timeout=10 + ) + + if not duplicate_check_results: + self.debug("duplicate check produced NoneType sample") + break + + if duplicate_check_results.status_code != affirmative_status_code: + break + else: + duplicates.append(f"{base_hint}~{str(count)}") + count += 1 + + if count > 5: + self.warning("Found more than 5 files with the same shortname. Will stop further duplicate checking.") + break + + return duplicates + + async def threaded_request(self, method, url, affirmative_status_code, c): + r = await self.helpers.request(method=method, url=url, allow_redirects=False, retries=2, timeout=10) + if r is not None: + if r.status_code == affirmative_status_code: + return True, c + return None, c + + async def solve_valid_chars(self, method, target, affirmative_status_code): + confirmed_chars = [] + confirmed_exts = [] + suffix = "/a.aspx" + + urls_and_kwargs = [] + kwargs = {"method": method, "allow_redirects": False, "retries": 2, "timeout": 10} + for c in valid_chars: + for file_part in ("stem", "ext"): + if file_part == "stem": + payload = encode_all(f"*{c}*~1*") + elif file_part == "ext": + payload = encode_all(f"*~1*{c}*") + url = f"{target}{payload}{suffix}" + urls_and_kwargs.append((url, kwargs, (c, file_part))) + + async for url, kwargs, (c, file_part), response in self.helpers.request_custom_batch(urls_and_kwargs): + if response is not None: + if response.status_code == affirmative_status_code: + if file_part == "stem": + confirmed_chars.append(c) + elif file_part == "ext": + confirmed_exts.append(c) + + return confirmed_chars, confirmed_exts + + async def solve_shortname_recursive( + self, + safety_counter, + method, + target, + prefix, + affirmative_status_code, + char_list, + ext_char_list, + extension_mode=False, + node_count=0, + ): + url_hint_list = [] + found_results = False - normalized_url = event.data.rstrip("/") + "/" - result = self.detect(normalized_url) + cl = ext_char_list if extension_mode is True else char_list - if result: - description = f"IIS Shortname Vulnerability" - self.emit_event( + urls_and_kwargs = [] + + for c in cl: + suffix = "/a.aspx" + wildcard = "*" if extension_mode else "*~1*" + payload = encode_all(f"{prefix}{c}{wildcard}") + url = f"{target}{payload}{suffix}" + kwargs = {"method": method} + urls_and_kwargs.append((url, kwargs, c)) + + async for url, kwargs, c, response in self.helpers.request_custom_batch(urls_and_kwargs): + if response is not None: + if response.status_code == affirmative_status_code: + found_results = True + node_count += 1 + safety_counter.counter += 1 + if safety_counter.counter > 1500: + raise IISShortnamesError(f"Exceeded safety counter threshold ({safety_counter.counter})") + self.verbose(f"node_count: {str(node_count)} for node: {target}") + if node_count > self.config.get("max_node_count"): + self.verbose( + f"iis_shortnames: max_node_count ({str(self.config.get('max_node_count'))}) exceeded for node: {target}. Affected branch will be terminated." + ) + return url_hint_list + + # check to make sure the file isn't shorter than 6 characters + wildcard = "~1*" + payload = encode_all(f"{prefix}{c}{wildcard}") + url = f"{target}{payload}{suffix}" + r = await self.helpers.request( + method=method, url=url, allow_redirects=False, retries=2, timeout=10 + ) + if r is not None: + if r.status_code == affirmative_status_code: + url_hint_list.append(f"{prefix}{c}") + + url_hint_list += await self.solve_shortname_recursive( + safety_counter, + method, + target, + f"{prefix}{c}", + affirmative_status_code, + char_list, + ext_char_list, + extension_mode, + node_count=node_count, + ) + if len(prefix) > 0 and found_results is False: + url_hint_list.append(f"{prefix}") + self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}") + return url_hint_list + + async def handle_event(self, event): + class safety_counter_obj: + counter = 0 + + normalized_url = self.normalize_url(event.data) + self.scanned_tracker.add(normalized_url) + + detections = await self.detect(normalized_url) + + technique_strings = [] + if detections: + for detection in detections: + method, affirmative_status_code, technique = detection + technique_strings.append(f"{method} ({technique})") + + description = f"IIS Shortname Vulnerability Detected. Potentially Vulnerable Method/Techniques: [{','.join(technique_strings)}]" + await self.emit_event( {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, + context="{module} detected low {event.type}: IIS shortname enumeration", ) + + if self.config.get("speculate_magic_urls") and "iis-magic-url" not in event.tags: + magic_url_bin = f"{normalized_url}bin::$INDEX_ALLOCATION/" + self.debug(f"making IIS magic URL: {magic_url_bin}") + magic_url_event = self.make_event( + magic_url_bin, "URL", parent=event, tags=["iis-magic-url", "status-403"] + ) + await self.scan.modules["iis_shortnames"].incoming_event_queue.put(magic_url_event) + if not self.config.get("detect_only"): - command = [ - "java", - "-jar", - self.iis_scanner_jar, - "0", - str(self.config.get("threads", 8)), - normalized_url, - self.iis_scanner_config, - ] - output = self.helpers.run(command).stdout - self.debug(output) - discovered_directories, discovered_files = self.shortname_parse(output) - for d in discovered_directories: - if d[-2] == "~": - d = d.split("~")[:-1][0] - self.emit_event(normalized_url + d, "URL_HINT", event, tags=["directory"]) - for f in discovered_files: - if f[-2] == "~": - f = f.split("~")[:-1][0] - self.emit_event(normalized_url + f, "URL_HINT", event, tags=["file"]) - - def detect(self, url): - - detected = False - http_methods = ["GET", "OPTIONS", "DEBUG"] - for http_method in http_methods: - dir_name = self.helpers.rand_string(8) - file_name = self.helpers.rand_string(1) - control_url = url.rstrip("/") + "/" + f"{dir_name}*~1*/{file_name}.aspx" - control = self.helpers.request(control_url, method=http_method) - test_url = url.rstrip("/") + "/" + f"*~1*/{file_name}.aspx" - test = self.helpers.request(test_url, method=http_method) - if (control != None) and (test != None): - if (control.status_code != 404) and (test.status_code == 404): - detected = True - return detected - - def shortname_parse(self, output): - discovered_directories = [] - discovered_files = [] - parseLines = output.split("\n") - inDirectories = False - inFiles = False - for idx, line in enumerate(parseLines): - if "Identified directories" in line: - inDirectories = True - elif "Indentified files" in line: - inFiles = True - inDirectories = False - elif ":" in line: - pass - elif "Actual" in line: - pass - else: - if inFiles == True: - if len(line) > 0: - shortname = line.split(" ")[-1].split(".")[0].split("~")[0] - extension = line.split(" ")[-1].split(".")[1] - if "?" not in extension: - discovered_files.append(f"{shortname}.{extension}".lower()) - - elif inDirectories == True: - if len(line) > 0: - shortname = line.split(" ")[-1] - discovered_directories.append(shortname.lower()) - return discovered_directories, discovered_files + for detection in detections: + safety_counter = safety_counter_obj() + + method, affirmative_status_code, technique = detection + valid_method_confirmed = False + + if valid_method_confirmed: + break + confirmed_chars, confirmed_exts = await self.solve_valid_chars( + method, normalized_url, affirmative_status_code + ) + + if len(confirmed_chars) >= len(valid_chars) - 4: + self.debug( + f"Detected [{len(confirmed_chars)}] characters (out of {len(valid_chars)}) as valid. This is likely a false positive" + ) + continue + + if len(confirmed_chars) > 0: + valid_method_confirmed = True + else: + continue + + self.verbose(f"Confirmed character list: {','.join(confirmed_chars)}") + self.verbose(f"Confirmed ext character list: {','.join(confirmed_exts)}") + try: + file_name_hints = list( + set( + await self.solve_shortname_recursive( + safety_counter, + method, + normalized_url, + "", + affirmative_status_code, + confirmed_chars, + confirmed_exts, + ) + ) + ) + except IISShortnamesError as e: + self.warning(f"Aborted Shortname Run for URL [{normalized_url}] due to Error: [{e}]") + return + + file_name_hints = [f"{x}~1" for x in file_name_hints] + url_hint_list = [] + + file_name_hints_dedupe = file_name_hints[:] + + for x in file_name_hints_dedupe: + duplicates = await self.duplicate_check(normalized_url, method, x, affirmative_status_code) + if duplicates: + file_name_hints += duplicates + + # check for the case of a folder and file with the same filename + for d in file_name_hints: + if await self.directory_confirm(normalized_url, method, d, affirmative_status_code): + self.verbose(f"Confirmed Directory URL_HINT: {d} from node {normalized_url}") + url_hint_list.append(d) + + for y in file_name_hints: + try: + file_name_extension_hints = await self.solve_shortname_recursive( + safety_counter, + method, + normalized_url, + f"{y}.", + affirmative_status_code, + confirmed_chars, + confirmed_exts, + extension_mode=True, + ) + except IISShortnamesError as e: + self.warning(f"Aborted Shortname Run for URL {normalized_url} due to Error: [{e}]") + return + + for z in file_name_extension_hints: + if z.endswith("."): + z = z.rstrip(".") + self.verbose(f"Found new file URL_HINT: {z} from node {normalized_url}") + url_hint_list.append(z) + + for url_hint in url_hint_list: + if "." in url_hint: + hint_type = "shortname-endpoint" + else: + hint_type = "shortname-directory" + + tags = [hint_type] + if "iis-magic-url" in event.tags: + tags.append("iis-magic-url") + await self.emit_event( + f"{normalized_url}/{url_hint}", + "URL_HINT", + event, + tags=tags, + context=f"{{module}} enumerated shortnames at {normalized_url} and found {{event.type}}: {url_hint}", + ) + + async def filter_event(self, event): + if "dir" in event.tags: + if self.normalize_url(event.data) not in self.scanned_tracker: + return True + return False + return False diff --git a/bbot/modules/internal/aggregate.py b/bbot/modules/internal/aggregate.py index a8d3ba10d3..54e3a52ccc 100644 --- a/bbot/modules/internal/aggregate.py +++ b/bbot/modules/internal/aggregate.py @@ -1,12 +1,13 @@ -from bbot.modules.base import BaseModule +from bbot.modules.report.base import BaseReportModule -class aggregate(BaseModule): - watched_events = ["SUMMARY"] - produced_events = ["SUMMARY"] +class aggregate(BaseReportModule): flags = ["passive", "safe"] - meta = {"description": "Report on scan statistics"} + meta = { + "description": "Summarize statistics at the end of a scan", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } - def report(self): - for table_row in str(self.scan.stats).splitlines(): - self.info(table_row) + async def report(self): + self.log_table(*self.scan.stats._make_table(), table_name="scan-stats") diff --git a/bbot/modules/internal/base.py b/bbot/modules/internal/base.py index 9e7967b42e..8ef1b7fd94 100644 --- a/bbot/modules/internal/base.py +++ b/bbot/modules/internal/base.py @@ -9,13 +9,6 @@ class BaseInternalModule(BaseModule): # Priority, 1-5, lower numbers == higher priority _priority = 3 - @property - def config(self): - config = self.scan.config.get("internal_modules", {}).get(self.name, {}) - if config is None: - config = {} - return config - @property def log(self): if self._log is None: diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py new file mode 100644 index 0000000000..82f4e164f4 --- /dev/null +++ b/bbot/modules/internal/cloudcheck.py @@ -0,0 +1,113 @@ +from contextlib import suppress + +from bbot.modules.base import BaseInterceptModule + + +class CloudCheck(BaseInterceptModule): + watched_events = ["*"] + meta = { + "description": "Tag events by cloud provider, identify cloud resources like storage buckets", + "created_date": "2024-07-07", + "author": "@TheTechromancer", + } + scope_distance_modifier = 1 + _priority = 3 + + async def setup(self): + self.dummy_modules = None + return True + + def make_dummy_modules(self): + self.dummy_modules = {} + for provider_name in self.helpers.cloud.providers.keys(): + module = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan") + module.default_discovery_context = "{module} derived {event.type}: {event.host}" + self.dummy_modules[provider_name] = module + + async def filter_event(self, event): + if (not event.host) or (event.type in ("IP_RANGE",)): + return False, "event does not have host attribute" + return True + + async def handle_event(self, event, **kwargs): + # don't hold up the event loop loading cloud IPs etc. + if self.dummy_modules is None: + self.make_dummy_modules() + # cloud tagging by hosts + hosts_to_check = set(event.resolved_hosts) + with suppress(KeyError): + hosts_to_check.remove(event.host_original) + hosts_to_check = [event.host_original] + list(hosts_to_check) + + for i, host in enumerate(hosts_to_check): + host_is_ip = self.helpers.is_ip(host) + try: + cloudcheck_results = self.helpers.cloudcheck(host) + except Exception as e: + self.trace(f"Error running cloudcheck against {event} (host: {host}): {e}") + continue + for provider, provider_type, subnet in cloudcheck_results: + if provider: + event.add_tag(f"{provider_type}-{provider}") + if host_is_ip: + event.add_tag(f"{provider_type}-ip") + else: + # if the original hostname is a cloud domain, tag it as such + if i == 0: + event.add_tag(f"{provider_type}-domain") + # any children are tagged as CNAMEs + else: + event.add_tag(f"{provider_type}-cname") + + found = set() + str_hosts_to_check = [str(host) for host in hosts_to_check] + # look for cloud assets in hosts, http responses + # loop through each provider + for provider in self.helpers.cloud.providers.values(): + provider_name = provider.name.lower() + base_kwargs = { + "parent": event, + "tags": [f"{provider.provider_type}-{provider_name}"], + "_provider": provider_name, + } + # loop through the provider's regex signatures, if any + for event_type, sigs in provider.signatures.items(): + if event_type != "STORAGE_BUCKET": + raise ValueError(f'Unknown cloudcheck event type "{event_type}"') + base_kwargs["event_type"] = event_type + for sig in sigs: + matches = [] + # TODO: convert this to an excavate YARA hook + # if event.type == "HTTP_RESPONSE": + # matches = await self.helpers.re.findall(sig, event.data.get("body", "")) + if event.type.startswith("DNS_NAME"): + for host in str_hosts_to_check: + match = sig.match(host) + if match: + matches.append(match.groups()) + for match in matches: + if match not in found: + found.add(match) + + _kwargs = dict(base_kwargs) + event_type_tag = f"cloud-{event_type}" + _kwargs["tags"].append(event_type_tag) + if event.type.startswith("DNS_NAME"): + event.add_tag(event_type_tag) + + if event_type == "STORAGE_BUCKET": + bucket_name, bucket_domain = match + bucket_url = f"https://{bucket_name}.{bucket_domain}" + _kwargs["data"] = { + "name": bucket_name, + "url": bucket_url, + "context": f"{{module}} analyzed {event.type} and found {{event.type}}: {bucket_url}", + } + await self.emit_event(**_kwargs) + + async def emit_event(self, *args, **kwargs): + provider_name = kwargs.pop("_provider") + dummy_module = self.dummy_modules[provider_name] + event = dummy_module.make_event(*args, **kwargs) + if event: + await super().emit_event(event) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py new file mode 100644 index 0000000000..3dddd289a4 --- /dev/null +++ b/bbot/modules/internal/dnsresolve.py @@ -0,0 +1,335 @@ +import ipaddress +from contextlib import suppress + +from bbot.errors import ValidationError +from bbot.core.helpers.dns.engine import all_rdtypes +from bbot.core.helpers.dns.helpers import extract_targets +from bbot.modules.base import BaseInterceptModule, BaseModule + + +class DNSResolve(BaseInterceptModule): + watched_events = ["*"] + produced_events = ["DNS_NAME", "IP_ADDRESS", "RAW_DNS_RECORD"] + meta = {"description": "Perform DNS resolution", "created_date": "2022-04-08", "author": "@TheTechromancer"} + _priority = 1 + scope_distance_modifier = None + + class HostModule(BaseModule): + _name = "host" + _type = "internal" + + @property + def module_threads(self): + return self.dns_config.get("threads", 25) + + async def setup(self): + self.dns_config = self.scan.config.get("dns", {}) + self.dns_disable = self.dns_config.get("disable", False) + if self.dns_disable: + return None, "DNS resolution is disabled in the config" + + self.minimal = self.dns_config.get("minimal", False) + self.minimal_rdtypes = ("A", "AAAA", "CNAME") + if self.minimal: + self.non_minimal_rdtypes = () + else: + self.non_minimal_rdtypes = tuple([t for t in all_rdtypes if t not in self.minimal_rdtypes]) + self.dns_search_distance = max(0, int(self.dns_config.get("search_distance", 1))) + self._emit_raw_records = None + + self.host_module = self.HostModule(self.scan) + self.children_emitted = set() + self.children_emitted_raw = set() + self.hosts_resolved = set() + + return True + + async def filter_event(self, event): + if (not event.host) or (event.type in ("IP_RANGE",)): + return False, "event does not have host attribute" + return True + + async def handle_event(self, event, **kwargs): + event_is_ip = self.helpers.is_ip(event.host) + if event_is_ip: + minimal_rdtypes = ("PTR",) + non_minimal_rdtypes = () + else: + minimal_rdtypes = self.minimal_rdtypes + non_minimal_rdtypes = self.non_minimal_rdtypes + + # first, we find or create the main DNS_NAME or IP_ADDRESS associated with this event + main_host_event, whitelisted, blacklisted, new_event = self.get_dns_parent(event) + original_tags = set(event.tags) + + # minimal resolution - first, we resolve A/AAAA records for scope purposes + if new_event or event is main_host_event: + await self.resolve_event(main_host_event, types=minimal_rdtypes) + # are any of its IPs whitelisted/blacklisted? + whitelisted, blacklisted = self.check_scope(main_host_event) + if whitelisted and event.scope_distance > 0: + self.debug(f"Making {main_host_event} in-scope because it resolves to an in-scope resource (A/AAAA)") + main_host_event.scope_distance = 0 + + # abort if the event resolves to something blacklisted + if blacklisted: + return False, "it has a blacklisted DNS record" + + # DNS resolution for hosts that aren't IPs + if not event_is_ip: + # if the event is within our dns search distance, resolve the rest of our records + if main_host_event.scope_distance < self._dns_search_distance: + await self.resolve_event(main_host_event, types=non_minimal_rdtypes) + # check for wildcards if the event is within the scan's search distance + if new_event and main_host_event.scope_distance <= self.scan.scope_search_distance: + event_data_changed = await self.handle_wildcard_event(main_host_event) + if event_data_changed: + # since data has changed, we check again whether it's a duplicate + if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate( + event, add=True + ): + if not event._graph_important: + return ( + False, + "it's a DNS wildcard, and its module already emitted a similar wildcard event", + ) + else: + self.debug( + f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" + ) + + # if there weren't any DNS children and it's not an IP address, tag as unresolved + if not main_host_event.raw_dns_records and not event_is_ip: + main_host_event.add_tag("unresolved") + main_host_event.type = "DNS_NAME_UNRESOLVED" + + # main_host_event.add_tag(f"resolve-distance-{main_host_event.dns_resolve_distance}") + + dns_tags = main_host_event.tags.difference(original_tags) + + dns_resolve_distance = getattr(main_host_event, "dns_resolve_distance", 0) + runaway_dns = dns_resolve_distance >= self.helpers.dns.runaway_limit + if runaway_dns: + # kill runaway DNS chains + self.debug( + f"Skipping DNS children for {event} because their DNS resolve distances would be greater than the configured value for this scan ({self.helpers.dns.runaway_limit})" + ) + main_host_event.add_tag(f"runaway-dns-{dns_resolve_distance}") + else: + # emit dns children + await self.emit_dns_children_raw(main_host_event, dns_tags) + if not self.minimal: + await self.emit_dns_children(main_host_event) + + # emit the main DNS_NAME or IP_ADDRESS + if ( + new_event + and event is not main_host_event + and main_host_event.scope_distance <= self._dns_search_distance + ): + await self.emit_event(main_host_event) + + # transfer scope distance to event + event.scope_distance = main_host_event.scope_distance + event._resolved_hosts = main_host_event.resolved_hosts + + async def handle_wildcard_event(self, event): + rdtypes = tuple(event.raw_dns_records) + wildcard_rdtypes = await self.helpers.is_wildcard( + event.host, rdtypes=rdtypes, raw_dns_records=event.raw_dns_records + ) + for rdtype, (is_wildcard, wildcard_host) in wildcard_rdtypes.items(): + if is_wildcard is False: + continue + elif is_wildcard is True: + event.add_tag("wildcard") + wildcard_tag = "wildcard" + else: + event.add_tag(f"wildcard-{is_wildcard}") + wildcard_tag = f"wildcard-{is_wildcard}" + event.add_tag(f"{rdtype}-{wildcard_tag}") + + # wildcard event modification (www.evilcorp.com --> _wildcard.evilcorp.com) + if wildcard_rdtypes and "target" not in event.tags: + # these are the rdtypes that have wildcards + wildcard_rdtypes_set = set(wildcard_rdtypes) + # consider the event a full wildcard if all its records are wildcards + event_is_wildcard = False + if wildcard_rdtypes_set: + event_is_wildcard = all(r[0] is True for r in wildcard_rdtypes.values()) + + if event_is_wildcard: + if event.type in ("DNS_NAME",) and "_wildcard" not in event.data.split("."): + wildcard_parent = self.helpers.parent_domain(event.host) + for rdtype, (_is_wildcard, _parent_domain) in wildcard_rdtypes.items(): + if _is_wildcard: + wildcard_parent = _parent_domain + break + wildcard_data = f"_wildcard.{wildcard_parent}" + if wildcard_data != event.data: + self.debug(f'Wildcard detected, changing event.data "{event.data}" --> "{wildcard_data}"') + event.data = wildcard_data + return True + return False + + async def emit_dns_children(self, event): + for rdtype, children in event.dns_children.items(): + module = self._make_dummy_module(rdtype) + for child_host in children: + try: + child_event = self.scan.make_event( + child_host, + "DNS_NAME", + module=module, + parent=event, + context=f"{rdtype} record for {event.host} contains {{event.type}}: {{event.host}}", + ) + except ValidationError as e: + self.warning(f'Event validation failed for DNS child of {event}: "{child_host}" ({rdtype}): {e}') + continue + + child_hash = hash(f"{event.host}:{module}:{child_host}") + # if we haven't emitted this one before + if child_hash not in self.children_emitted: + # and it's either in-scope or inside our dns search distance + if self.preset.in_scope(child_host) or child_event.scope_distance <= self._dns_search_distance: + self.children_emitted.add(child_hash) + # if it's a hostname and it's only one hop away, mark it as affiliate + if child_event.type == "DNS_NAME" and child_event.scope_distance == 1: + child_event.add_tag("affiliate") + self.debug(f"Queueing DNS child for {event}: {child_event}") + await self.emit_event(child_event) + + async def emit_dns_children_raw(self, event, dns_tags): + for rdtype, answers in event.raw_dns_records.items(): + rdtype_lower = rdtype.lower() + tags = {t for t in dns_tags if rdtype_lower in t.split("-")} + if self.emit_raw_records and rdtype not in ("A", "AAAA", "CNAME", "PTR"): + for answer in answers: + text_answer = answer.to_text() + child_hash = hash(f"{event.host}:{rdtype}:{text_answer}") + if child_hash not in self.children_emitted_raw: + self.children_emitted_raw.add(child_hash) + await self.emit_event( + {"host": str(event.host), "type": rdtype, "answer": text_answer}, + "RAW_DNS_RECORD", + parent=event, + tags=tags, + context=f"{rdtype} lookup on {{event.parent.host}} produced {{event.type}}", + ) + + def check_scope(self, event): + whitelisted = False + blacklisted = False + dns_children = getattr(event, "dns_children", {}) + for rdtype in ("A", "AAAA", "CNAME"): + hosts = dns_children.get(rdtype, []) + # update resolved hosts + event.resolved_hosts.update(hosts) + for host in hosts: + # having a CNAME to an in-scope host doesn't make you in-scope + if rdtype != "CNAME": + if not whitelisted: + with suppress(ValidationError): + if self.scan.whitelisted(host): + whitelisted = True + event.add_tag(f"dns-whitelisted-{rdtype}") + # but a CNAME to a blacklisted host means you're blacklisted + if not blacklisted: + with suppress(ValidationError): + if self.scan.blacklisted(host): + blacklisted = True + event.add_tag("blacklisted") + event.add_tag(f"dns-blacklisted-{rdtype}") + if blacklisted: + whitelisted = False + return whitelisted, blacklisted + + async def resolve_event(self, event, types): + if not types: + return + event_host = str(event.host) + queries = [(event_host, rdtype) for rdtype in types] + dns_errors = {} + async for (query, rdtype), (answers, errors) in self.helpers.dns.resolve_raw_batch(queries): + # errors + try: + dns_errors[rdtype].update(errors) + except KeyError: + dns_errors[rdtype] = set(errors) + for answer in answers: + event.add_tag(f"{rdtype}-record") + # raw dnspython answers + try: + event.raw_dns_records[rdtype].add(answer) + except KeyError: + event.raw_dns_records[rdtype] = {answer} + # hosts + for _rdtype, host in extract_targets(answer): + try: + event.dns_children[_rdtype].add(host) + except KeyError: + event.dns_children[_rdtype] = {host} + # check for private IPs + try: + ip = ipaddress.ip_address(host) + if ip.is_private: + event.add_tag("private-ip") + except ValueError: + continue + + # tag event with errors + for rdtype, errors in dns_errors.items(): + # only consider it an error if there weren't any results for that rdtype + if errors and rdtype not in event.dns_children: + event.add_tag(f"{rdtype}-error") + + def get_dns_parent(self, event): + """ + Get the first parent DNS_NAME / IP_ADDRESS of an event. If one isn't found, create it. + """ + for parent in event.get_parents(include_self=True): + if parent.host == event.host and parent.type in ("IP_ADDRESS", "DNS_NAME", "DNS_NAME_UNRESOLVED"): + blacklisted = any(t.startswith("dns-blacklisted-") for t in parent.tags) + whitelisted = any(t.startswith("dns-whitelisted-") for t in parent.tags) + new_event = parent is event + return parent, whitelisted, blacklisted, new_event + tags = set() + if "target" in event.tags: + tags.add("target") + return ( + self.scan.make_event( + event.host, + "DNS_NAME", + module=self.host_module, + parent=event, + context="{event.parent.type} has host {event.type}: {event.host}", + tags=tags, + ), + None, + None, + True, + ) + + @property + def emit_raw_records(self): + if self._emit_raw_records is None: + watching_raw_records = any("RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()) + omitted_event_types = self.scan.config.get("omit_event_types", []) + omit_raw_records = "RAW_DNS_RECORD" in omitted_event_types + self._emit_raw_records = watching_raw_records or not omit_raw_records + return self._emit_raw_records + + @property + def _dns_search_distance(self): + return max(self.scan.scope_search_distance, self.dns_search_distance) + + def _make_dummy_module(self, name): + try: + dummy_module = self.scan.dummy_modules[name] + except KeyError: + dummy_module = self.scan._make_dummy_module(name=name, _type="DNS") + dummy_module._priority = 4 + dummy_module.suppress_dupes = False + self.scan.dummy_modules[name] = dummy_module + return dummy_module diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 1c1dc0890c..69a1a32be3 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -1,267 +1,1086 @@ -import re +import yara +import json import html -import base64 -import jwt as j - -from bbot.core.helpers.regexes import _email_regex +import time +import inspect +import regex as re +from pathlib import Path +from bbot.errors import ExcavateError +import bbot.core.helpers.regexes as bbot_regexes +from bbot.modules.base import BaseInterceptModule from bbot.modules.internal.base import BaseInternalModule +from urllib.parse import urlparse, urljoin, parse_qs, urlunparse, urldefrag + + +def find_subclasses(obj, base_class): + """ + Finds and returns subclasses of a specified base class within an object. + + Parameters: + obj : object + The object to inspect for subclasses. + base_class : type + The base class to find subclasses of. + + Returns: + list + A list of subclasses found within the object. + + Example: + >>> class A: pass + >>> class B(A): pass + >>> class C(A): pass + >>> find_subclasses(locals(), A) + [, ] + """ + subclasses = [] + for name, member in inspect.getmembers(obj): + if inspect.isclass(member) and issubclass(member, base_class) and member is not base_class: + subclasses.append(member) + return subclasses + + +def _exclude_key(original_dict, key_to_exclude): + """ + Returns a new dictionary excluding the specified key from the original dictionary. + + Parameters: + original_dict : dict + The dictionary to exclude the key from. + key_to_exclude : hashable + The key to exclude. + + Returns: + dict + A new dictionary without the specified key. + + Example: + >>> original = {'a': 1, 'b': 2, 'c': 3} + >>> _exclude_key(original, 'b') + {'a': 1, 'c': 3} + """ + return {key: value for key, value in original_dict.items() if key != key_to_exclude} + + +def extract_params_url(parsed_url): + params = parse_qs(parsed_url.query) + flat_params = {k: v[0] for k, v in params.items()} + + for p, p_value in flat_params.items(): + yield "GET", parsed_url, p, p_value, "direct_url", _exclude_key(flat_params, p) + + +def extract_params_location(location_header_value, original_parsed_url): + """ + Extracts parameters from a location header, yielding them one at a time. + Args: + location_header_value (dict): Contents of location header + original_url: The original parsed URL the header was received from (urllib.parse.ParseResult) -class BaseExtractor: - regexes = {} + Yields: + method(str), parsed_url(urllib.parse.ParseResult), parameter_name(str), original_value(str), regex_name(str), additional_params(dict): The HTTP method associated with the parameter (GET, POST, None), A urllib.parse.ParseResult object representing the endpoint associated with the parameter, the parameter found in the location header, its original value (if available), the name of the detecting regex, a dict of additional params if any + """ + if location_header_value.startswith("http://") or location_header_value.startswith("https://"): + parsed_url = urlparse(location_header_value) + else: + parsed_url = urlparse(f"{original_parsed_url.scheme}://{original_parsed_url.netloc}{location_header_value}") + + params = parse_qs(parsed_url.query) + flat_params = {k: v[0] for k, v in params.items()} + + for p, p_value in flat_params.items(): + yield "GET", parsed_url, p, p_value, "location_header", _exclude_key(flat_params, p) + + +class YaraRuleSettings: + def __init__(self, description, tags, emit_match): + self.description = description + self.tags = tags + self.emit_match = emit_match + + +class ExcavateRule: + """ + The BBOT Regex Commandments: + + 1) Thou shalt employ YARA regexes in place of Python regexes, save when necessity doth compel otherwise. + 2) Thou shalt ne'er wield a Python regex against a vast expanse of text. + 3) Whensoever it be possible, thou shalt favor string matching o'er regexes. + + Amen. + """ + + yara_rules = {} def __init__(self, excavate): self.excavate = excavate - self.compiled_regexes = {} - for rname, r in self.regexes.items(): - self.compiled_regexes[rname] = re.compile(r) + self.helpers = excavate.helpers + self.name = "" + + async def preprocess(self, r, event, discovery_context): + """ + Preprocesses YARA rule results, extracts meta tags, and configures a YaraRuleSettings object. + + This method retrieves optional meta tags from YARA rules and uses them to configure a YaraRuleSettings object. + It formats the results from the YARA engine into a suitable format for the process() method and initiates + a call to process(), passing on the pre-processed YARA results, event data, YARA rule settings, and discovery context. + + This should typically NOT be overridden. + + Parameters: + r : YaraMatch + The YARA match object containing the rule and meta information. + event : Event + The event data associated with the YARA match. + discovery_context : DiscoveryContext + The context in which the discovery is made. + + Returns: + None + """ + description = "" + tags = [] + emit_match = False - def search(self, content, event, **kwargs): - for name, regex in self.compiled_regexes.items(): - results = regex.findall(content) + if "description" in r.meta.keys(): + description = r.meta["description"] + if "tags" in r.meta.keys(): + tags = self.excavate.helpers.chain_lists(r.meta["tags"]) + if "emit_match" in r.meta.keys(): + emit_match = True + + yara_rule_settings = YaraRuleSettings(description, tags, emit_match) + yara_results = {} + for h in r.strings: + yara_results[h.identifier.lstrip("$")] = sorted( + {i.matched_data.decode("utf-8", errors="ignore") for i in h.instances} + ) + await self.process(yara_results, event, yara_rule_settings, discovery_context) + + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + """ + Processes YARA rule results and reports events with enriched data. + + This method iterates over the provided YARA rule results and constructs event data for each match. + It enriches the event data with host, URL, and description information, and conditionally includes + matched data based on the YaraRuleSettings. Finally, it reports the constructed event data. + + Override when custom processing and/or validation is needed on data before reporting. + + Parameters: + yara_results : dict + A dictionary where keys are YARA rule identifiers and values are lists of matched data strings. + event : Event + The event data associated with the YARA match. + yara_rule_settings : YaraRuleSettings + The settings configured from YARA rule meta tags, including description, tags, and emit_match flag. + discovery_context : DiscoveryContext + The context in which the discovery is made. + + Returns: + None + """ + for results in yara_results.values(): for result in results: - self.report(result, name, event, **kwargs) + event_data = {"description": f"{discovery_context} {yara_rule_settings.description}"} + if yara_rule_settings.emit_match: + event_data["description"] += f" [{result}]" + await self.report(event_data, event, yara_rule_settings, discovery_context) + + async def report_prep(self, event_data, event_type, event, tags): + """ + Prepares an event draft for reporting by creating and tagging the event. + + This method creates an event draft using the provided event data and type, associating it with a parent event. + It tags the event draft with the provided tags and returns the draft. If event creation fails, it returns None. + + Override when an event needs to be modified before it is emitted - for example, custom tags need to be conditionally added. + + Parameters: + event_data : dict + The data to be included in the event. + event_type : str + The type of the event being reported. + event : Event + The parent event to which this event draft is related. + tags : list + A list of tags to be associated with the event draft. + + Returns: + EventDraft or None + """ + event_draft = self.excavate.make_event(event_data, event_type, parent=event) + if not event_draft: + return None + event_draft.add_tags(tags) + return event_draft + + async def report( + self, event_data, event, yara_rule_settings, discovery_context, event_type="FINDING", abort_if=None, **kwargs + ): + """ + Reports an event by preparing an event draft and emitting it. + + Processes the provided event data, sets a default description if needed, prepares the event draft, and emits it. + It constructs a context string for the event and uses the report_prep method to create the event draft. If the draft is successfully + created, it emits the event. + + Typically not overridden, but might need to be if custom logic is needed to build description/context, etc. - def report(self, result, name, event): - pass + Parameters: + event_data : dict + The data to be included in the event. + event : Event + The parent event to which this event is related. + yara_rule_settings : YaraRuleSettings + The settings configured from YARA rule meta tags, including description and tags. + discovery_context : DiscoveryContext + The context in which the discovery is made. + event_type : str, optional + The type of the event being reported, default is "FINDING". + abort_if : callable, optional + A callable that determines if the event emission should be aborted. + **kwargs : dict + Additional keyword arguments to pass to the report_prep method. + Returns: + None + """ -class HostnameExtractor(BaseExtractor): - regexes = {} + # If a description is not set and is needed, provide a basic one + if event_type == "FINDING" and "description" not in event_data.keys(): + event_data["description"] = f"{discovery_context} {yara_rule_settings['self.description']}" + subject = "" + if isinstance(event_data, str): + subject = f" {event_data}" + context = f"Excavate's {self.__class__.__name__} emitted {event_type}{subject}, because {discovery_context} {yara_rule_settings.description}" + tags = yara_rule_settings.tags + event_draft = await self.report_prep(event_data, event_type, event, tags, **kwargs) + if event_draft: + await self.excavate.emit_event(event_draft, context=context, abort_if=abort_if) + +class CustomExtractor(ExcavateRule): def __init__(self, excavate): - dns_targets = [t for t in excavate.scan.target if t.type == "DNS_NAME"] - for i, t in enumerate(dns_targets): - self.regexes[f"dns_name_{i+1}"] = r"(?:(?:[\w-]+)\.)+" + str(t.host) super().__init__(excavate) - def report(self, result, name, event, **kwargs): - self.excavate.emit_event(result, "DNS_NAME", source=event) + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier, results in yara_results.items(): + for result in results: + event_data = {} + description_string = ( + f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" + ) + event_data["description"] = ( + f"Custom Yara Rule [{self.name}]{description_string} Matched via identifier [{identifier}]" + ) + if yara_rule_settings.emit_match: + event_data["description"] += f" and extracted [{result}]" + await self.report(event_data, event, yara_rule_settings, discovery_context) + + +class excavate(BaseInternalModule, BaseInterceptModule): + """ + Example (simple) Excavate Rules: + class excavateTestRule(ExcavateRule): + yara_rules = { + "SearchForText": 'rule SearchForText { meta: description = "Contains the text AAAABBBBCCCC" strings: $text = "AAAABBBBCCCC" condition: $text }', + "SearchForText2": 'rule SearchForText2 { meta: description = "Contains the text DDDDEEEEFFFF" strings: $text2 = "DDDDEEEEFFFF" condition: $text2 }', + } + """ -class URLExtractor(BaseExtractor): - regexes = { - "fullurl": r"https?://(?:\w|\d)(?:[\d\w-]+\.?)+(?::\d{1,5})?(?:/[-\w\.\(\)]+)*/?", - "a-tag": r"]*?\s+)?href=([\"'])(.*?)\1", - "script-tag": r"]*?\s+)?src=([\"'])(.*?)\1", + watched_events = ["HTTP_RESPONSE", "RAW_TEXT"] + produced_events = ["URL_UNVERIFIED", "WEB_PARAMETER"] + flags = ["passive"] + meta = { + "description": "Passively extract juicy tidbits from scan data", + "created_date": "2022-06-27", + "author": "@liquidsec", } - prefix_blacklist = ["javascript:", "mailto:", "tel:"] + options = { + "retain_querystring": False, + "yara_max_match_data": 2000, + "custom_yara_rules": "", + } + options_desc = { + "retain_querystring": "Keep the querystring intact on emitted WEB_PARAMETERS", + "yara_max_match_data": "Sets the maximum amount of text that can extracted from a YARA regex", + "custom_yara_rules": "Include custom Yara rules", + } + scope_distance_modifier = None + accept_dupes = False - def report(self, result, name, event, **kwargs): + _module_threads = 8 - spider_danger = kwargs.get("spider_danger", True) + parameter_blacklist = { + p.lower() + for p in [ + "__VIEWSTATE", + "__EVENTARGUMENT", + "__EVENTVALIDATION", + "__EVENTTARGET", + "__EVENTARGUMENT", + "__VIEWSTATEGENERATOR", + "__SCROLLPOSITIONY", + "__SCROLLPOSITIONX", + "ASP.NET_SessionId", + "JSESSIONID", + "PHPSESSID", + ] + } - tags = [] - parsed = getattr(event, "parsed", None) + yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{") + yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})") - if (name == "a-tag" or name == "script-tag") and parsed: - path = html.unescape(result[1]).lstrip("/") - if not path.startswith("http://") and not path.startswith("https://"): - result = f"{event.parsed.scheme}://{event.parsed.netloc}/{path}" - else: - result = path + def in_bl(self, value): + return value.lower() in self.parameter_blacklist - for p in self.prefix_blacklist: - if path.startswith(p): - self.excavate.debug(f"omitted result from a-tag parser because of blacklisted prefix [{p}]") - return + def url_unparse(self, param_type, parsed_url): + if param_type == "GETPARAM": + querystring = "" + else: + querystring = parsed_url.query + + return urlunparse( + ( + parsed_url.scheme, + parsed_url.netloc, + parsed_url.path, + "", + querystring if self.retain_querystring else "", + "", + ) + ) - url_depth = self.excavate.helpers.url_depth(result) - web_spider_depth = self.excavate.scan.config.get("web_spider_depth", 1) - spider_distance = getattr(event, "web_spider_distance", 0) - web_spider_distance = self.excavate.scan.config.get("web_spider_distance", 0) - if spider_danger and (url_depth > web_spider_depth or spider_distance > web_spider_distance): - tags.append("spider-danger") + class ParameterExtractor(ExcavateRule): + yara_rules = {} - self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(result, "URL_UNVERIFIED", source=event, tags=tags) + class ParameterExtractorRule: + name = "" + def extract(self): + pass -class EmailExtractor(BaseExtractor): + def __init__(self, excavate, result): + self.excavate = excavate + self.result = result - regexes = {"email": _email_regex} + class GetJquery(ParameterExtractorRule): + name = "GET jquery" + discovery_regex = r"/\$.get\([^\)].+\)/ nocase" + extraction_regex = re.compile(r"\$.get\([\'\"](.+)[\'\"].+(\{.+\})\)") + output_type = "GETPARAM" - def report(self, result, name, event, **kwargs): - self.excavate.debug(f"Found email address [{result}] from parsing [{event.data.get('url')}]") - self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) + def convert_to_dict(self, extracted_str): + extracted_str = extracted_str.replace("'", '"') + extracted_str = re.sub(r"(\w+):", r'"\1":', extracted_str) + try: + return json.loads(extracted_str) + except json.JSONDecodeError as e: + self.excavate.debug(f"Failed to decode JSON: {e}") + return None + def extract(self): + extracted_results = self.extraction_regex.findall(str(self.result)) + if extracted_results: + for action, extracted_parameters in extracted_results: + extracted_parameters_dict = self.convert_to_dict(extracted_parameters) + for parameter_name, original_value in extracted_parameters_dict.items(): + yield ( + self.output_type, + parameter_name, + original_value, + action, + _exclude_key(extracted_parameters_dict, parameter_name), + ) -class ErrorExtractor(BaseExtractor): + class PostJquery(GetJquery): + name = "POST jquery" + discovery_regex = r"/\$.post\([^\)].+\)/ nocase" + extraction_regex = re.compile(r"\$.post\([\'\"](.+)[\'\"].+(\{.+\})\)") + output_type = "POSTPARAM" - regexes = { - "PHP:1": r"\.php on line [0-9]+", - "PHP:2": r"\.php on line [0-9]+", - "PHP:3": "Fatal error:", - "Microsoft SQL Server:1": r"\[(ODBC SQL Server Driver|SQL Server|ODBC Driver Manager)\]", - "Microsoft SQL Server:2": "You have an error in your SQL syntax; check the manual", - "Java:1": r"\.java:[0-9]+", - "Java:2": r"\.java\((Inlined )?Compiled Code\)", - "Perl": r"at (\/[A-Za-z0-9\.]+)*\.pm line [0-9]+", - "Python": r"File \"[A-Za-z0-9\-_\./]*\", line [0-9]+, in", - "Ruby": r"\.rb:[0-9]+:in", - "ASP.NET:1": "Exception of type", - "ASP.NET:2": "--- End of inner exception stack trace ---", - "ASP.NET:3": "Microsoft OLE DB Provider", - "ASP.NET:4": r"Error ([\d-]+) \([\dA-F]+\)", - } + class HtmlTags(ParameterExtractorRule): + name = "HTML Tags" + discovery_regex = r'/<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ nocase' + extraction_regex = bbot_regexes.tag_attribute_regex + output_type = "GETPARAM" - def report(self, result, name, event, **kwargs): - self.excavate.debug(f"Found error message from parsing [{event.data.get('url')}] with regex [{name}]") - description = f"Error message Detected at Error Type: {name}" - self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, - "FINDING", - source=event, - ) + def extract(self): + urls = self.extraction_regex.findall(str(self.result)) + for url in urls: + parsed_url = urlparse(url) + query_strings = parse_qs(parsed_url.query) + query_strings_dict = { + k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in query_strings.items() + } + for parameter_name, original_value in query_strings_dict.items(): + yield ( + self.output_type, + parameter_name, + original_value, + url, + _exclude_key(query_strings_dict, parameter_name), + ) + class GetForm(ParameterExtractorRule): + name = "GET Form" + discovery_regex = r'/]*\bmethod=["\']?get["\']?[^>]*>.*<\/form>/s nocase' + form_content_regexes = [ + bbot_regexes.input_tag_regex, + bbot_regexes.select_tag_regex, + bbot_regexes.textarea_tag_regex, + ] + extraction_regex = bbot_regexes.get_form_regex + output_type = "GETPARAM" -class JWTExtractor(BaseExtractor): + def extract(self): + forms = self.extraction_regex.findall(str(self.result)) + for form_action, form_content in forms: + form_parameters = {} + for form_content_regex in self.form_content_regexes: + input_tags = form_content_regex.findall(form_content) - regexes = {"JWT": r"eyJ(?:[\w-]*\.)(?:[\w-]*\.)[\w-]*"} + for parameter_name, original_value in input_tags: + form_parameters[parameter_name] = original_value - def report(self, result, name, event, **kwargs): - self.excavate.debug(f"Found JWT candidate [{result}]") - try: - j.decode(result, options={"verify_signature": False}) - jwt_headers = j.get_unverified_header(result) - tags = [] - if jwt_headers["alg"].upper()[0:2] == "HS": - tags = ["crackable"] - description = f"JWT Identified [{result}]" - self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, - "FINDING", - event, - tags=tags, + for parameter_name, original_value in form_parameters.items(): + yield ( + self.output_type, + parameter_name, + original_value, + form_action, + _exclude_key(form_parameters, parameter_name), + ) + + class PostForm(GetForm): + name = "POST Form" + discovery_regex = r'/]*\bmethod=["\']?post["\']?[^>]*>.*<\/form>/s nocase' + extraction_regex = bbot_regexes.post_form_regex + output_type = "POSTPARAM" + + def __init__(self, excavate): + super().__init__(excavate) + self.parameterExtractorCallbackDict = {} + regexes_component_list = [] + parameterExtractorRules = find_subclasses(self, self.ParameterExtractorRule) + for r in parameterExtractorRules: + self.excavate.verbose(f"Including ParameterExtractor Submodule: {r.__name__}") + self.parameterExtractorCallbackDict[r.__name__] = r + regexes_component_list.append(f"${r.__name__} = {r.discovery_regex}") + regexes_component = " ".join(regexes_component_list) + self.yara_rules["parameter_extraction"] = ( + rf'rule parameter_extraction {{meta: description = "contains POST form" strings: {regexes_component} condition: any of them}}' ) - except j.exceptions.DecodeError: - self.excavate.debug(f"Error decoding JWT candidate {result}") + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier, results in yara_results.items(): + for result in results: + if identifier not in self.parameterExtractorCallbackDict.keys(): + raise ExcavateError("ParameterExtractor YaraRule identified reference non-existent submodule") + parameterExtractorSubModule = self.parameterExtractorCallbackDict[identifier]( + self.excavate, result + ) + extracted_params = parameterExtractorSubModule.extract() + if extracted_params: + for ( + parameter_type, + parameter_name, + original_value, + endpoint, + additional_params, + ) in extracted_params: + self.excavate.debug( + f"Found Parameter [{parameter_name}] in [{parameterExtractorSubModule.name}] ParameterExtractor Submodule" + ) + endpoint = event.data["url"] if not endpoint else endpoint + url = ( + endpoint + if endpoint.startswith(("http://", "https://")) + else f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{endpoint}" + ) + if self.excavate.helpers.validate_parameter(parameter_name, parameter_type): + if self.excavate.in_bl(parameter_name) is False: + parsed_url = urlparse(url) + if not parsed_url.hostname: + self.excavate.warning( + f"Error Parsing reconstructed URL [{url}] during parameter extraction, missing hostname" + ) + continue + description = f"HTTP Extracted Parameter [{parameter_name}] ({parameterExtractorSubModule.name} Submodule)" + data = { + "host": parsed_url.hostname, + "type": parameter_type, + "name": parameter_name, + "original_value": original_value, + "url": self.excavate.url_unparse(parameter_type, parsed_url), + "additional_params": additional_params, + "assigned_cookies": self.excavate.assigned_cookies, + "description": description, + } + await self.report( + data, event, yara_rule_settings, discovery_context, event_type="WEB_PARAMETER" + ) + else: + self.excavate.debug(f"blocked parameter [{parameter_name}] due to BL match") + else: + self.excavate.debug(f"blocked parameter [{parameter_name}] due to validation failure") -class SerializationExtractor(BaseExtractor): - regexes = {"Java": r"(?:[^a-zA-Z0-9+/]|^)(rO0[a-zA-Z0-9+/]+={,2})"} + class CSPExtractor(ExcavateRule): + yara_rules = { + "csp": r'rule csp { meta: tags = "affiliate" description = "contains CSP Header" strings: $csp = /Content-Security-Policy:[^\r\n]+/ nocase condition: $csp }', + } - def report(self, result, name, event, **kwargs): - description = f"{name} serialized object found" - self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event - ) + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier in yara_results.keys(): + for csp_str in yara_results[identifier]: + domains = await self.excavate.scan.extract_in_scope_hostnames(csp_str) + for domain in domains: + await self.report(domain, event, yara_rule_settings, discovery_context, event_type="DNS_NAME") + class EmailExtractor(ExcavateRule): + yara_rules = { + "email": 'rule email { meta: description = "contains email address" strings: $email = /[^\\W_][\\w\\-\\.\\+\']{0,100}@[a-zA-Z0-9\\-]{1,100}(\\.[a-zA-Z0-9\\-]{1,100})*\\.[a-zA-Z]{2,63}/ nocase fullword condition: $email }', + } -class JavascriptExtractor(BaseExtractor): - # based on on https://github.com/m4ll0k/SecretFinder/blob/master/SecretFinder.py - - regexes = { - "google_api": r"AIza[0-9A-Za-z-_]{35}", - "firebase": r"AAAA[A-Za-z0-9_-]{7}:[A-Za-z0-9_-]{140}", - "google_oauth": r"ya29\.[0-9A-Za-z\-_]+", - "amazon_aws_access_key_id": r"A[SK]IA[0-9A-Z]{16}", - "amazon_mws_auth_toke": r"amzn\\.mws\\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", - "amazon_aws_url": r"s3\.amazonaws.com[/]+|[a-zA-Z0-9_-]*\.s3\.amazonaws.com", - "amazon_aws_url2": r"[a-zA-Z0-9-\.\_]+\.s3\.amazonaws\.com", - "amazon_aws_url3": r"s3://[a-zA-Z0-9-\.\_]+", - "amazon_aws_url4": r"s3.amazonaws.com/[a-zA-Z0-9-\.\_]+", - "amazon_aws_url5": r"s3.console.aws.amazon.com/s3/buckets/[a-zA-Z0-9-\.\_]+", - "facebook_access_token": r"EAACEdEose0cBA[0-9A-Za-z]+", - "authorization_basic": r"(?i)basic [a-zA-Z0-9:_\+\/-]{4,100}={0,2}", - "authorization_bearer": r"bearer [a-zA-Z0-9_\-\.=:_\+\/]{5,100}", - "apikey": r"api(?:key|_key)\s?=\s?[\'\"\`][a-zA-Z0-9_\-]{5,100}[\'\"\`]", - "mailgun_api_key": r"key-[0-9a-zA-Z]{32}", - "paypal_braintree_access_token": r"access_token\$production\$[0-9a-z]{16}\$[0-9a-f]{32}", - "square_oauth_secret": r"sq0csp-[ 0-9A-Za-z\-_]{43}|sq0[a-z]{3}-[0-9A-Za-z\-_]{22,43}", - "square_access_token": r"sqOatp-[0-9A-Za-z\-_]{22}", - "stripe_standard_api": r"sk_live_[0-9a-zA-Z]{24}", - "stripe_restricted_api": r"rk_live_[0-9a-zA-Z]{24}", - "github_access_token": r"[a-zA-Z0-9_-]*:[a-zA-Z0-9_\-]+@github\.com*", - "rsa_private_key": r"-----BEGIN RSA PRIVATE KEY-----", - "ssh_dsa_private_key": r"-----BEGIN DSA PRIVATE KEY-----", - "ssh_dc_private_key": r"-----BEGIN EC PRIVATE KEY-----", - "pgp_private_block": r"-----BEGIN PGP PRIVATE KEY BLOCK-----", - "json_web_token": r"ey[A-Za-z0-9-_=]+\.[A-Za-z0-9-_=]+\.?[A-Za-z0-9-_.+/=]*$", - "slack_token": r"\"api_token\":\"(xox[a-zA-Z]-[a-zA-Z0-9-]+)\"", - "SSH_privKey": r"([-]+BEGIN [^\s]+ PRIVATE KEY[-]+[\s]*[^-]*[-]+END [^\s]+ PRIVATE KEY[-]+)", - "possible_creds_var": r"(?:password|passwd|pwd|pass)\s*=+\s*['\"][^\s'\"]{1,60}['\"]", - } + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier in yara_results.keys(): + for email_str in yara_results[identifier]: + await self.report( + email_str, event, yara_rule_settings, discovery_context, event_type="EMAIL_ADDRESS" + ) - def report(self, result, name, event, **kwargs): + # Future Work: Emit a JWT Object, and make a new Module to ingest it. + class JWTExtractor(ExcavateRule): + yara_rules = { + "jwt": r'rule jwt { meta: emit_match = "True" description = "contains JSON Web Token (JWT)" strings: $jwt = /\beyJ[_a-zA-Z0-9\/+]*\.[_a-zA-Z0-9\/+]*\.[_a-zA-Z0-9\/+]*/ nocase condition: $jwt }', + } - # ensure that basic auth matches aren't false positives - if name == "authorization_basic": - try: - b64test = base64.b64decode(result.split(" ")[1].encode()) - if b":" not in b64test: - return - except (base64.binascii.Error, UnicodeDecodeError): - return + class ErrorExtractor(ExcavateRule): + signatures = { + "PHP_1": r"/\.php on line [0-9]+/", + "PHP_2": r"/\.php<\/b> on line [0-9]+/", + "PHP_3": '"Fatal error:"', + "Microsoft_SQL_Server_1": r"/\[(ODBC SQL Server Driver|SQL Server|ODBC Driver Manager)\]/", + "Microsoft_SQL_Server_2": '"You have an error in your SQL syntax; check the manual"', + "Java_1": r"/\.java:[0-9]+/", + "Java_2": r"/\.java\((Inlined )?Compiled Code\)/", + "Perl": r"/at (\/[A-Za-z0-9\._]+)*\.pm line [0-9]+/", + "Python": r"/File \"[A-Za-z0-9\-_\.\/]*\", line [0-9]+, in/", + "Ruby": r"/\.rb:[0-9]+:in/", + "ASPNET_1": '"Exception of type"', + "ASPNET_2": '"--- End of inner exception stack trace ---"', + "ASPNET_3": '"Microsoft OLE DB Provider"', + "ASPNET_4": r"/Error ([\d-]+) \([\dA-F]+\)/", + } + yara_rules = {} - self.excavate.debug(f"Found Possible Secret in Javascript [{result}]") - description = f"Possible secret in JS [{result}] Signature [{name}]" - self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event - ) + def __init__(self, excavate): + super().__init__(excavate) + signature_component_list = [] + for signature_name, signature in self.signatures.items(): + signature_component_list.append(rf"${signature_name} = {signature}") + signature_component = " ".join(signature_component_list) + self.yara_rules["error_detection"] = ( + f'rule error_detection {{meta: description = "contains a verbose error message" strings: {signature_component} condition: any of them}}' + ) + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier in yara_results.keys(): + for findings in yara_results[identifier]: + event_data = { + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" + } + await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") -class excavate(BaseInternalModule): + class SerializationExtractor(ExcavateRule): + regexes = { + "Java": re.compile(r"[^a-zA-Z0-9\/+]rO0[a-zA-Z0-9+\/]+={0,2}"), + "DOTNET": re.compile(r"[^a-zA-Z0-9\/+]AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), + "PHP_Array": re.compile(r"[^a-zA-Z0-9\/+]YTo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_String": re.compile(r"[^a-zA-Z0-9\/+]czo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_Object": re.compile(r"[^a-zA-Z0-9\/+]Tzo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "Possible_Compressed": re.compile(r"[^a-zA-Z0-9\/+]H4sIAAAAAAAA[a-zA-Z0-9+\/]+={0,2}"), + } + yara_rules = {} - watched_events = ["HTTP_RESPONSE"] - produced_events = ["URL_UNVERIFIED"] - flags = ["passive"] - meta = {"description": "Passively extract juicy tidbits from scan data"} + def __init__(self, excavate): + super().__init__(excavate) + regexes_component_list = [] + for regex_name, regex in self.regexes.items(): + regexes_component_list.append(rf"${regex_name} = /\b{regex.pattern}/ nocase") + regexes_component = " ".join(regexes_component_list) + self.yara_rules["serialization_detection"] = ( + f'rule serialization_detection {{meta: description = "contains a possible serialized object" strings: {regexes_component} condition: any of them}}' + ) - scope_distance_modifier = None + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier in yara_results.keys(): + for findings in yara_results[identifier]: + event_data = { + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" + } + await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") + + class FunctionalityExtractor(ExcavateRule): + yara_rules = { + "File_Upload_Functionality": r'rule File_Upload_Functionality { meta: description = "contains file upload functionality" strings: $fileuploadfunc = /]+type=["\']?file["\']?[^>]+>/ nocase condition: $fileuploadfunc }', + "Web_Service_WSDL": r'rule Web_Service_WSDL { meta: emit_match = "True" description = "contains a web service WSDL URL" strings: $wsdl = /https?:\/\/[^\s]*\.(wsdl)/ nocase condition: $wsdl }', + } + + class NonHttpSchemeExtractor(ExcavateRule): + yara_rules = { + "Non_HTTP_Scheme": r'rule Non_HTTP_Scheme { meta: description = "contains non-http scheme URL" strings: $nonhttpscheme = /\b\w{2,35}:\/\/[\w.-]+(:\d+)?\b/ nocase fullword condition: $nonhttpscheme }' + } + + scheme_blacklist = ["javascript", "mailto", "tel", "data", "vbscript", "about", "file"] + + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for results in yara_results.values(): + for url_str in results: + scheme = url_str.split("://")[0] + if scheme in self.scheme_blacklist: + continue + if scheme not in self.excavate.valid_schemes: + continue + try: + parsed_url = urlparse(url_str) + except Exception as e: + self.excavate.debug(f"Error parsing URI {url_str}: {e}") + continue + netloc = getattr(parsed_url, "netloc", None) + if netloc is None: + continue + try: + host, port = self.excavate.helpers.split_host_port(parsed_url.netloc) + except ValueError as e: + self.excavate.debug(f"Failed to parse netloc: {e}") + continue + if parsed_url.scheme in ["http", "https"]: + continue + + def abort_if(e): + return e.scope_distance > 0 + + finding_data = {"host": str(host), "description": f"Non-HTTP URI: {parsed_url.geturl()}"} + await self.report(finding_data, event, yara_rule_settings, discovery_context, abort_if=abort_if) + protocol_data = {"protocol": parsed_url.scheme, "host": str(host)} + if port: + protocol_data["port"] = port + await self.report( + protocol_data, + event, + yara_rule_settings, + discovery_context, + event_type="PROTOCOL", + abort_if=abort_if, + ) + + class URLExtractor(ExcavateRule): + yara_rules = { + "url_full": ( + r""" + rule url_full { + meta: + tags = "spider-danger" + description = "contains full URL" + strings: + $url_full = /https?:\/\/([\w\.-]+)(:\d{1,5})?([\/\w\.-]*)/ + condition: + $url_full + } + """ + ), + "url_attr": ( + r""" + rule url_attr { + meta: + tags = "spider-danger" + description = "contains tag with src or href attribute" + strings: + $url_attr = /<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ + condition: + $url_attr + } + """ + ), + } + full_url_regex = re.compile(r"(https?)://(\w(?:[\w-]+\.?)+(?::\d{1,5})?(?:/[-\w\.\(\)]*[-\w\.]+)*/?)") + full_url_regex_strict = re.compile(r"^(https?):\/\/([\w.-]+)(?::\d{1,5})?(\/[\w\/\.-]*)?(\?[^\s]+)?$") + tag_attribute_regex = bbot_regexes.tag_attribute_regex + + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier, results in yara_results.items(): + urls_found = 0 + final_url = "" + for url_str in results: + if identifier == "url_full": + if not await self.helpers.re.search(self.full_url_regex, url_str): + self.excavate.debug( + f"Rejecting potential full URL [{url_str}] as did not match full_url_regex" + ) + continue + final_url = url_str + + self.excavate.debug(f"Discovered Full URL [{final_url}]") + elif identifier == "url_attr" and hasattr(event, "parsed_url"): + m = await self.helpers.re.search(self.tag_attribute_regex, url_str) + if not m: + self.excavate.debug( + f"Rejecting potential attribute URL [{url_str}] as did not match tag_attribute_regex" + ) + continue + unescaped_url = html.unescape(m.group(1)) + source_url = event.parsed_url.geturl() + final_url = urldefrag(urljoin(source_url, unescaped_url)).url + if not await self.helpers.re.search(self.full_url_regex_strict, final_url): + self.excavate.debug( + f"Rejecting reconstructed URL [{final_url}] as did not match full_url_regex_strict" + ) + continue + self.excavate.debug( + f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] " + ) + + if final_url: + if self.excavate.scan.in_scope(final_url): + urls_found += 1 + + await self.report( + final_url, + event, + yara_rule_settings, + discovery_context, + event_type="URL_UNVERIFIED", + urls_found=urls_found, + ) + + async def report_prep(self, event_data, event_type, event, tags, **kwargs): + event_draft = self.excavate.make_event(event_data, event_type, parent=event) + if not event_draft: + return None + url_in_scope = self.excavate.scan.in_scope(event_draft) + urls_found = kwargs.get("urls_found", None) + if urls_found: + exceeds_max_links = urls_found > self.excavate.scan.web_spider_links_per_page and url_in_scope + if exceeds_max_links: + tags.append("spider-max") + event_draft.add_tags(tags) + return event_draft + + class HostnameExtractor(ExcavateRule): + yara_rules = {} + + def __init__(self, excavate): + super().__init__(excavate) + self.yara_rules.update(excavate.scan.dns_yara_rules_uncompiled) + + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + for identifier in yara_results.keys(): + for domain_str in yara_results[identifier]: + await self.report(domain_str, event, yara_rule_settings, discovery_context, event_type="DNS_NAME") - deps_pip = ["pyjwt"] + class LoginPageExtractor(ExcavateRule): + yara_rules = { + "login_page": r""" + rule login_page { + meta: + description = "Detects login pages with username and password fields" + strings: + $username_field = /]+name=["']?(user|login|email)/ nocase + $password_field = /]+name=["']?passw?/ nocase + condition: + $username_field and $password_field + } + """ + } - def setup(self): + async def process(self, yara_results, event, yara_rule_settings, discovery_context): + if yara_results: + event.add_tag("login-page") - self.hostname = HostnameExtractor(self) - self.url = URLExtractor(self) - self.email = EmailExtractor(self) - self.error = ErrorExtractor(self) - self.jwt = JWTExtractor(self) - self.javascript = JavascriptExtractor(self) - self.serialization = SerializationExtractor(self) + def add_yara_rule(self, rule_name, rule_content, rule_instance): + rule_instance.name = rule_name + self.yara_rules_dict[rule_name] = rule_content + self.yara_preprocess_dict[rule_name] = rule_instance.preprocess + + async def extract_yara_rules(self, rules_content): + for r in await self.helpers.re.findall(self.yara_rule_regex, rules_content): + yield r + + async def setup(self): + self.yara_rules_dict = {} + self.yara_preprocess_dict = {} + + modules_WEB_PARAMETER = [ + module_name + for module_name, module in self.scan.modules.items() + if "WEB_PARAMETER" in module.watched_events + ] + + self.parameter_extraction = bool(modules_WEB_PARAMETER) + + self.retain_querystring = False + if self.config.get("retain_querystring", False) is True: + self.retain_querystring = True + + for module in self.scan.modules.values(): + if not str(module).startswith("_"): + ExcavateRules = find_subclasses(module, ExcavateRule) + for e in ExcavateRules: + self.debug(f"Including Submodule {e.__name__}") + if e.__name__ == "ParameterExtractor": + message = ( + "Parameter Extraction disabled because no modules consume WEB_PARAMETER events" + if not self.parameter_extraction + else f"Parameter Extraction enabled because the following modules consume WEB_PARAMETER events: [{', '.join(modules_WEB_PARAMETER)}]" + ) + self.debug(message) if not self.parameter_extraction else self.hugeinfo(message) + # do not add parameter extraction yara rules if it's disabled + if not self.parameter_extraction: + continue + excavateRule = e(self) + for rule_name, rule_content in excavateRule.yara_rules.items(): + self.add_yara_rule(rule_name, rule_content, excavateRule) + + self.custom_yara_rules = str(self.config.get("custom_yara_rules", "")) + if self.custom_yara_rules: + custom_rules_count = 0 + if Path(self.custom_yara_rules).is_file(): + with open(self.custom_yara_rules) as f: + rules_content = f.read() + self.debug(f"Successfully loaded custom yara rules file [{self.custom_yara_rules}]") + else: + self.debug("Custom yara rules file is NOT a file. Will attempt to treat it as rule content") + rules_content = self.custom_yara_rules + + self.debug(f"Final combined yara rule contents: {rules_content}") + custom_yara_rule_processed = self.extract_yara_rules(rules_content) + async for rule_content in custom_yara_rule_processed: + try: + yara.compile(source=rule_content) + except yara.SyntaxError as e: + return False, f"Custom Yara rule failed to compile: {e}" + + rule_match = await self.helpers.re.search(self.yara_rule_name_regex, rule_content) + if not rule_match: + return False, "Custom Yara formatted incorrectly: could not find rule name" + + rule_name = rule_match.groups(1)[0] + c = CustomExtractor(self) + self.add_yara_rule(rule_name, rule_content, c) + custom_rules_count += 1 + if custom_rules_count > 0: + self.hugeinfo(f"Successfully added {str(custom_rules_count)} custom Yara rule(s)") + + yara_max_match_data = self.config.get("yara_max_match_data", 2000) + + yara.set_config(max_match_data=yara_max_match_data) + yara_rules_combined = "\n".join(self.yara_rules_dict.values()) + try: + start = time.time() + self.verbose(f"Compiling {len(self.yara_rules_dict):,} YARA rules") + for rule_name, rule_content in self.yara_rules_dict.items(): + self.debug(f" - {rule_name}") + self.yara_rules = yara.compile(source=yara_rules_combined) + self.verbose(f"{len(self.yara_rules_dict):,} YARA rules compiled in {time.time() - start:.2f} seconds") + except yara.SyntaxError as e: + self.debug(yara_rules_combined) + return False, f"Yara Rules failed to compile with error: [{e}]" + + # pre-load valid URL schemes + valid_schemes_filename = self.helpers.wordlist_dir / "valid_url_schemes.txt" + self.valid_schemes = set(self.helpers.read_file(valid_schemes_filename)) + + self.url_querystring_remove = self.scan.config.get("url_querystring_remove", True) return True - def search(self, source, extractors, event, **kwargs): - for e in extractors: - e.search(source, event, **kwargs) + async def search(self, data, event, content_type, discovery_context="HTTP response"): + if not data: + return None + + decoded_data = await self.helpers.re.recursive_decode(data) - def handle_event(self, event): + if self.parameter_extraction: + content_type_lower = content_type.lower() if content_type else "" + extraction_map = { + "json": self.helpers.extract_params_json, + "xml": self.helpers.extract_params_xml, + } + + for source_type, extract_func in extraction_map.items(): + if source_type in content_type_lower: + results = extract_func(data) + if results: + for parameter_name, original_value in results: + description = ( + f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" + ) + data = { + "host": str(event.host), + "type": "SPECULATIVE", + "name": parameter_name, + "original_value": original_value, + "url": str(event.data["url"]), + "additional_params": {}, + "assigned_cookies": self.assigned_cookies, + "description": description, + } + context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + return - data = event.data + for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"): + rule_name = result.rule + if rule_name in self.yara_preprocess_dict: + await self.yara_preprocess_dict[rule_name](result, event, discovery_context) + else: + self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") - # HTTP_RESPONSE is a special case + async def handle_event(self, event): if event.type == "HTTP_RESPONSE": + # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled + if ( + self.parameter_extraction is True + and self.url_querystring_remove is False + and str(event.parent.parent.module) == "TARGET" + ): + self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_url(event.parsed_url): + if self.in_bl(parameter_name) is False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + data = event.data + + # process response data + body = event.data.get("body", "") + headers = event.data.get("header-dict", {}) + if body == "" and headers == {}: + return - # handle redirects - location = event.data.get("location", "") - if location: - if not location.lower().startswith("http"): - location = event.parsed._replace(path=location).geturl() - self.emit_event(location, "URL_UNVERIFIED", event) + self.assigned_cookies = {} + content_type = None + reported_location_header = False - body = event.data.get("response-body", "") - self.search( + for header, header_values in headers.items(): + for header_value in header_values: + if header.lower() == "set-cookie" and self.parameter_extraction: + if "=" not in header_value: + self.debug(f"Cookie found without '=': {header_value}") + continue + else: + cookie_name = header_value.split("=")[0] + cookie_value = header_value.split("=")[1].split(";")[0] + + if self.in_bl(cookie_value) is False: + self.assigned_cookies[cookie_name] = cookie_value + description = f"Set-Cookie Assigned Cookie [{cookie_name}]" + data = { + "host": str(event.host), + "type": "COOKIE", + "name": cookie_name, + "original_value": cookie_value, + "url": self.url_unparse("COOKIE", event.parsed_url), + "description": description, + } + context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") + if header.lower() == "location": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + scheme = self.helpers.is_uri(redirect_location, return_scheme=True) + if scheme in ("http", "https"): + web_spider_distance = getattr(event, "web_spider_distance", 0) + num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) + if num_redirects <= self.scan.web_max_redirects: + # we do not want to allow the web_spider_distance to be incremented on redirects, so we do not add spider-danger tag + url_event = self.make_event( + redirect_location, "URL_UNVERIFIED", event, tags="affiliate" + ) + if url_event is not None: + reported_location_header = True + await self.emit_event( + url_event, + context=f'excavate looked in "Location" header and found {url_event.type}: {url_event.data}', + ) + + # Try to extract parameters from the redirect URL + if self.parameter_extraction: + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_location(header_value, event.parsed_url): + if self.in_bl(parameter_name) is False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.warning("location header found but missing redirect_location in HTTP_RESPONSE") + if header.lower() == "content-type": + content_type = headers["content-type"][0] + + await self.search( body, - [self.hostname, self.url, self.email, self.error, self.jwt, self.javascript, self.serialization], event, - spider_danger=True, + content_type, + discovery_context="HTTP response (body)", ) - headers = event.data.get("response-header", "") - self.search( - headers, - [self.hostname, self.url, self.email, self.error, self.jwt, self.serialization], + if reported_location_header: + # Location header should be removed if we already found and emitted a result. + # Failure to do so results in a race against the same URL extracted by the URLExtractor submodule + # If the extracted URL wins, it will cause the manual one to be a dupe, but it will have a higher web_spider_distance. + headers.pop("location") + headers_str = "\n".join(f"{k}: {v}" for k, values in headers.items() for v in values) + + await self.search( + headers_str, event, - spider_danger=False, + content_type, + discovery_context="HTTP response (headers)", ) - else: - - self.search( - str(data), [self.hostname, self.url, self.email, self.error, self.jwt, self.serialization], event + await self.search( + event.data, + event, + content_type="", + discovery_context="Parsed file content", ) diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index e67024d7ce..2555cd7d7e 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -1,5 +1,7 @@ +import random import ipaddress +from bbot.core.helpers import validators from bbot.modules.internal.base import BaseInternalModule @@ -9,69 +11,197 @@ class speculate(BaseInternalModule): in situations where e.g. a port scanner isn't enabled """ - watched_events = ["IP_RANGE", "URL", "URL_UNVERIFIED", "DNS_NAME", "IP_ADDRESS", "HTTP_RESPONSE"] - produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS"] + watched_events = [ + "IP_RANGE", + "URL", + "URL_UNVERIFIED", + "DNS_NAME", + "DNS_NAME_UNRESOLVED", + "IP_ADDRESS", + "HTTP_RESPONSE", + "STORAGE_BUCKET", + "SOCIAL", + "AZURE_TENANT", + "USERNAME", + ] + produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS", "FINDING", "ORG_STUB"] flags = ["passive"] - meta = {"description": "Derive certain event types from others by common sense"} + meta = { + "description": "Derive certain event types from others by common sense", + "created_date": "2022-05-03", + "author": "@liquidsec", + } - options = {"max_hosts": 65536} - options_desc = {"max_hosts": "Max number of IP_RANGE hosts to convert into IP_ADDRESS events"} - max_event_handlers = 5 - scope_distance_modifier = 0 - _scope_shepherding = False + options = {"max_hosts": 65536, "ports": "80,443", "essential_only": False} + options_desc = { + "max_hosts": "Max number of IP_RANGE hosts to convert into IP_ADDRESS events", + "ports": "The set of ports to speculate on", + "essential_only": "Only enable essential speculate features (no extra discovery)", + } + scope_distance_modifier = 1 + _priority = 4 - def setup(self): - self.open_port_consumers = any(["OPEN_TCP_PORT" in m.watched_events for m in self.scan.modules.values()]) - self.portscanner_enabled = any(["portscan" in m.flags for m in self.scan.modules.values()]) + default_discovery_context = "speculated {event.type}: {event.data}" + + async def setup(self): + scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"] + self.open_port_consumers = any("OPEN_TCP_PORT" in m.watched_events for m in scan_modules) + # only consider active portscanners (still speculate if only passive ones are enabled) + self.portscanner_enabled = any( + "portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values() + ) + self.emit_open_ports = self.open_port_consumers and not self.portscanner_enabled self.range_to_ip = True - target_len = len(self.scan.target) + self.dns_disable = self.scan.config.get("dns", {}).get("disable", False) + self.essential_only = self.config.get("essential_only", False) + self.org_stubs_seen = set() + + port_string = self.config.get("ports", "80,443") + try: + self.ports = self.helpers.parse_port_string(str(port_string)) + except ValueError as e: + return False, f"Error parsing ports: {e}" + + if not self.portscanner_enabled: + self.info(f"No portscanner enabled. Assuming open ports: {', '.join(str(x) for x in self.ports)}") + + target_len = len(self.scan.target.seeds) if target_len > self.config.get("max_hosts", 65536): if not self.portscanner_enabled: self.hugewarning( f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation" ) - self.hugewarning(f"Enabling a port scanner module is highly recommended") + self.hugewarning('Enabling the "portscan" module is highly recommended') self.range_to_ip = False + return True - def handle_event(self, event): + async def handle_event(self, event): + ### BEGIN ESSENTIAL SPECULATION ### + # These features are required for smooth operation of bbot + # I.e. they are not "osinty" or intended to discover anything, they only compliment other modules + + # we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert + event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1) + speculate_open_ports = self.emit_open_ports and event_in_scope_distance + # generate individual IP addresses from IP range if event.type == "IP_RANGE" and self.range_to_ip: net = ipaddress.ip_network(event.data) - for x in net: - self.emit_event(x, "IP_ADDRESS", source=event, internal=True) + ips = list(net) + random.shuffle(ips) + for ip in ips: + await self.emit_event( + ip, + "IP_ADDRESS", + parent=event, + internal=True, + context=f"speculate converted range into individual IP_ADDRESS: {ip}", + ) + + # IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT + if speculate_open_ports: + # don't act on unresolved DNS_NAMEs + usable_dns = False + if event.type == "DNS_NAME": + if self.dns_disable or event.resolved_hosts: + usable_dns = True + + if event.type == "IP_ADDRESS" or usable_dns: + for port in self.ports: + await self.emit_event( + self.helpers.make_netloc(event.data, port), + "OPEN_TCP_PORT", + parent=event, + internal=True, + context="speculated {event.type}: {event.data}", + ) + + ### END ESSENTIAL SPECULATION ### + if self.essential_only: + return # parent domains - if event.type == "DNS_NAME": - parent = self.helpers.parent_domain(event.data) + if event.type.startswith("DNS_NAME"): + parent = self.helpers.parent_domain(event.host_original) if parent != event.data: - self.emit_event(parent, "DNS_NAME", source=event, internal=True) - - # generate open ports - emit_open_ports = self.open_port_consumers and not self.portscanner_enabled - # from URLs - if event.type == "URL" or (event.type == "URL_UNVERIFIED" and emit_open_ports): - if event.host and event.port not in (80, 443): - self.emit_event( - self.helpers.make_netloc(event.host, event.port), "OPEN_TCP_PORT", source=event, internal=True + await self.emit_event( + parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}" ) - # from hosts - if emit_open_ports: - # don't act on unresolved DNS_NAMEs - if event.type == "IP_ADDRESS" or ( - event.type == "DNS_NAME" and any([x in event.tags for x in ("a_record", "aaaa_record")]) - ): - self.emit_event(self.helpers.make_netloc(event.data, 80), "OPEN_TCP_PORT", source=event, internal=True) - self.emit_event( - self.helpers.make_netloc(event.data, 443), "OPEN_TCP_PORT", source=event, internal=True + + # URL --> OPEN_TCP_PORT + event_is_url = event.type == "URL" + if event_is_url or (event.type == "URL_UNVERIFIED" and self.open_port_consumers): + # only speculate port from a URL if it wouldn't be speculated naturally from the host + if event.host and (event.port not in self.ports or not speculate_open_ports): + await self.emit_event( + self.helpers.make_netloc(event.host, event.port), + "OPEN_TCP_PORT", + parent=event, + internal=not event_is_url, # if the URL is verified, the port is definitely open + context=f"speculated {{event.type}} from {event.type}: {{event.data}}", ) - def filter_event(self, event): - # don't accept IP_RANGE --> IP_ADDRESS events from self - if str(event.module) == "speculate": - if not (event.type == "IP_ADDRESS" and str(getattr(event.source, "type")) == "IP_RANGE"): - return False - # don't accept errored DNS_NAMEs - if any(t in event.tags for t in ("dns-error", "unresolved")): - return False - return True + # speculate sub-directory URLS from URLS + if event.type == "URL": + url_parents = self.helpers.url_parents(event.data) + for up in url_parents: + url_event = self.make_event(f"{up}/", "URL_UNVERIFIED", parent=event) + if url_event is not None: + # inherit web spider distance from parent (don't increment) + parent_web_spider_distance = getattr(event, "web_spider_distance", 0) + url_event.web_spider_distance = parent_web_spider_distance + await self.emit_event(url_event, context="speculated web sub-directory {event.type}: {event.data}") + + # speculate URL_UNVERIFIED from URL or any event with "url" attribute + event_is_url = event.type == "URL" + event_has_url = isinstance(event.data, dict) and "url" in event.data + event_tags = ["httpx-safe"] if event.type in ("CODE_REPOSITORY", "SOCIAL") else [] + if event_is_url or event_has_url: + if event_is_url: + url = event.data + else: + url = event.data["url"] + # only emit the url if it's not already in the event's history + if not any(e.type == "URL_UNVERIFIED" and e.data == url for e in event.get_parents()): + await self.emit_event( + url, + "URL_UNVERIFIED", + tags=event_tags, + parent=event, + context="speculated {event.type}: {event.data}", + ) + + # ORG_STUB from TLD, SOCIAL, AZURE_TENANT + org_stubs = set() + if event.type == "DNS_NAME" and event.scope_distance == 0: + tldextracted = self.helpers.tldextract(event.data) + registered_domain = getattr(tldextracted, "registered_domain", "") + if registered_domain: + tld_stub = getattr(tldextracted, "domain", "") + if tld_stub: + decoded_tld_stub = self.helpers.smart_decode_punycode(tld_stub) + org_stubs.add(decoded_tld_stub) + org_stubs.add(self.helpers.unidecode(decoded_tld_stub)) + elif event.type == "SOCIAL": + stub = event.data.get("stub", "") + if stub: + org_stubs.add(stub.lower()) + elif event.type == "AZURE_TENANT": + tenant_names = event.data.get("tenant-names", []) + org_stubs.update(set(tenant_names)) + for stub in org_stubs: + stub_hash = hash(stub) + if stub_hash not in self.org_stubs_seen: + self.org_stubs_seen.add(stub_hash) + stub_event = self.make_event(stub, "ORG_STUB", parent=event) + if stub_event: + await self.emit_event(stub_event, context="speculated {event.type}: {event.data}") + + # USERNAME --> EMAIL + if event.type == "USERNAME": + email = event.data.split(":", 1)[-1] + if validators.soft_validate(email, "email"): + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=["affiliate"]) + if email_event: + await self.emit_event(email_event, context="detected {event.type}: {event.data}") diff --git a/bbot/modules/internal/unarchive.py b/bbot/modules/internal/unarchive.py new file mode 100644 index 0000000000..f95cb0765f --- /dev/null +++ b/bbot/modules/internal/unarchive.py @@ -0,0 +1,82 @@ +from pathlib import Path +from bbot.modules.internal.base import BaseInternalModule +from bbot.core.helpers.libmagic import get_magic_info, get_compression + + +class unarchive(BaseInternalModule): + watched_events = ["FILESYSTEM"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe"] + meta = { + "description": "Extract different types of files into folders on the filesystem", + "created_date": "2024-12-08", + "author": "@domwhewell-sage", + } + + async def setup(self): + self.ignore_compressions = ["application/java-archive", "application/vnd.android.package-archive"] + self.compression_methods = { + "zip": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"], + "bzip2": ["tar", "--overwrite", "-xvjf", "{filename}", "-C", "{extract_dir}/"], + "xz": ["tar", "--overwrite", "-xvJf", "{filename}", "-C", "{extract_dir}/"], + "7z": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"], + # "rar": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"], + # "lzma": ["7z", "x", '-p""', "-aoa", "{filename}", "-o{extract_dir}/"], + "tar": ["tar", "--overwrite", "-xvf", "{filename}", "-C", "{extract_dir}/"], + "gzip": ["tar", "--overwrite", "-xvzf", "{filename}", "-C", "{extract_dir}/"], + } + return True + + async def filter_event(self, event): + if "file" in event.tags: + if event.data["magic_mime_type"] in self.ignore_compressions: + return False, f"Ignoring file type: {event.data['magic_mime_type']}, {event.data['path']}" + if "compression" in event.data: + if not event.data["compression"] in self.compression_methods: + return ( + False, + f"Extract unable to handle file type: {event.data['compression']}, {event.data['path']}", + ) + else: + return False, f"Event is not a compressed file: {event.data['path']}" + else: + return False, "Event is not a file" + return True + + async def handle_event(self, event): + path = Path(event.data["path"]) + output_dir = path.parent / path.name.replace(".", "_") + + # Use the appropriate extraction method based on the file type + self.info(f"Extracting {path} to {output_dir}") + success = await self.extract_file(path, output_dir) + + # If the extraction was successful, emit the event + if success: + await self.emit_event( + {"path": str(output_dir)}, + "FILESYSTEM", + tags=["folder", "unarchived-folder"], + parent=event, + context=f'extracted "{path}" to: {output_dir}', + ) + else: + output_dir.rmdir() + + async def extract_file(self, path, output_dir): + extension, mime_type, description, confidence = get_magic_info(path) + compression_format = get_compression(mime_type) + cmd_list = self.compression_methods.get(compression_format, []) + if cmd_list: + if not output_dir.exists(): + self.helpers.mkdir(output_dir) + command = [s.format(filename=path, extract_dir=output_dir) for s in cmd_list] + try: + await self.run_process(command, check=True) + for item in output_dir.iterdir(): + if item.is_file(): + await self.extract_file(item, output_dir / item.stem) + except Exception as e: + self.warning(f"Error extracting {path}. Error: {e}") + return False + return True diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py new file mode 100644 index 0000000000..52c5040b28 --- /dev/null +++ b/bbot/modules/internetdb.py @@ -0,0 +1,147 @@ +from bbot.modules.base import BaseModule + + +class internetdb(BaseModule): + """ + Query IP in Shodan InternetDB, returning open ports, discovered technologies, and findings/vulnerabilities + API reference: https://internetdb.shodan.io/docs + + Example API response: + + { + "cpes": [ + "cpe:/a:microsoft:internet_information_services", + "cpe:/a:microsoft:outlook_web_access:15.0.1367", + ], + "hostnames": [ + "autodiscover.evilcorp.com", + "mail.evilcorp.com", + ], + "ip": "1.2.3.4", + "ports": [ + 25, + 80, + 443, + ], + "tags": [ + "starttls", + "self-signed", + "eol-os" + ], + "vulns": [ + "CVE-2021-26857", + "CVE-2021-26855" + ] + } + """ + + watched_events = ["IP_ADDRESS", "DNS_NAME"] + produced_events = ["TECHNOLOGY", "VULNERABILITY", "FINDING", "OPEN_TCP_PORT", "DNS_NAME"] + flags = ["passive", "safe", "portscan", "subdomain-enum"] + meta = { + "description": "Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities", + "created_date": "2023-12-22", + "author": "@TheTechromancer", + } + options = {"show_open_ports": False} + options_desc = { + "show_open_ports": "Display OPEN_TCP_PORT events in output, even if they didn't lead to an interesting discovery" + } + + # we get lots of 404s, that's normal + _api_failure_abort_threshold = 9999999999 + + _qsize = 500 + + base_url = "https://internetdb.shodan.io" + + async def setup(self): + self.show_open_ports = self.config.get("show_open_ports", False) + return True + + def _incoming_dedup_hash(self, event): + return hash(self.get_ip(event)) + + async def handle_event(self, event): + ip = self.get_ip(event) + if ip is None: + return + url = f"{self.base_url}/{ip}" + r = await self.api_request(url) + if r is None: + self.debug(f"No response for {event.data}") + return + try: + data = r.json() + except Exception as e: + self.verbose(f"Error parsing JSON response from {url}: {e}") + self.trace() + return + if data: + if r.status_code == 200: + await self._parse_response(data=data, event=event, ip=ip) + elif r.status_code == 404: + detail = data.get("detail", "") + if detail: + self.debug(f"404 response for {url}: {detail}") + else: + err_data = data.get("type", "") + err_msg = data.get("msg", "") + self.verbose(f"Shodan error for {ip}: {err_data}: {err_msg}") + + async def _parse_response(self, data: dict, event, ip): + """Handles emitting events from returned JSON""" + data: dict # has keys: cpes, hostnames, ip, ports, tags, vulns + ip = str(ip) + query_host = ip if event.data == ip else f"{event.data} ({ip})" + # ip is a string, ports is a list of ports, the rest is a list of strings + for hostname in data.get("hostnames", []): + if hostname != event.data: + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', + ) + for cpe in data.get("cpes", []): + await self.emit_event( + {"technology": cpe, "host": str(event.host)}, + "TECHNOLOGY", + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', + ) + for port in data.get("ports", []): + await self.emit_event( + self.helpers.make_netloc(event.data, port), + "OPEN_TCP_PORT", + parent=event, + internal=(not self.show_open_ports), + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', + ) + vulns = data.get("vulns", []) + if vulns: + vulns_str = ", ".join([str(v) for v in vulns]) + await self.emit_event( + {"description": f"Shodan reported possible vulnerabilities: {vulns_str}", "host": str(event.host)}, + "FINDING", + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found potential {{event.type}}: {vulns_str}', + ) + + def get_ip(self, event): + """ + Get the first available IP address from an event (IP_ADDRESS or DNS_NAME) + """ + if event.type == "IP_ADDRESS": + return event.host + elif event.type == "DNS_NAME": + # always try IPv4 first + ipv6 = [] + ips = [h for h in event.resolved_hosts if self.helpers.is_ip(h)] + for ip in sorted([str(ip) for ip in ips]): + if self.helpers.is_ip(ip, version=4): + return ip + elif self.helpers.is_ip(ip, version=6): + ipv6.append(ip) + for ip in ipv6: + return ip diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py new file mode 100644 index 0000000000..2a4b387f45 --- /dev/null +++ b/bbot/modules/ip2location.py @@ -0,0 +1,74 @@ +from bbot.modules.base import BaseModule + + +class IP2Location(BaseModule): + """ + IP2Location.io Geolocation API. + """ + + watched_events = ["IP_ADDRESS"] + produced_events = ["GEOLOCATION"] + flags = ["passive", "safe"] + meta = { + "description": "Query IP2location.io's API for geolocation information. ", + "created_date": "2023-09-12", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "lang": ""} + options_desc = { + "api_key": "IP2location.io API Key", + "lang": "Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name.", + } + scope_distance_modifier = 1 + _priority = 2 + suppress_dupes = False + + base_url = "http://api.ip2location.io" + + async def setup(self): + self.lang = self.config.get("lang", "") + return await self.require_api_key() + + async def ping(self): + url = self.build_url("8.8.8.8") + await super().ping(url) + + def build_url(self, data): + url = f"{self.base_url}/?key={{api_key}}&ip={data}&format=json&source=bbot" + if self.lang: + url = f"{url}&lang={self.lang}" + return url + + async def handle_event(self, event): + try: + url = self.build_url(event.data) + result = await self.api_request(url) + if result: + geo_data = result.json() + if not geo_data: + self.verbose(f"No JSON response from {url}") + else: + self.verbose(f"No response from {url}") + except Exception: + self.verbose(f"Error retrieving results for {event.data}", trace=True) + return + + geo_data = {k: v for k, v in geo_data.items() if v is not None} + if "error" in geo_data: + error_msg = geo_data.get("error").get("error_message", "") + if error_msg: + self.warning(error_msg) + elif geo_data: + country = geo_data.get("country_name", "unknown country") + region = geo_data.get("region_name", "unknown region") + city = geo_data.get("city_name", "unknown city") + lat = geo_data.get("latitude", "") + long = geo_data.get("longitude", "") + description = f"{city}, {region}, {country} ({lat}, {long})" + await self.emit_event( + geo_data, + "GEOLOCATION", + event, + context=f'{{module}} queried IP2Location API for "{event.data}" and found {{event.type}}: {description}', + ) diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index 0f139227ac..3bae28a37f 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -4,33 +4,40 @@ class ipneighbor(BaseModule): - watched_events = ["IP_ADDRESS"] produced_events = ["IP_ADDRESS"] flags = ["passive", "subdomain-enum", "aggressive"] - meta = {"description": "Look beside IPs in their surrounding subnet"} + meta = { + "description": "Look beside IPs in their surrounding subnet", + "created_date": "2022-06-08", + "author": "@TheTechromancer", + } options = {"num_bits": 4} options_desc = {"num_bits": "Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts)"} - scope_distance_modifier = 0 - _scope_shepherding = False + scope_distance_modifier = 1 - def setup(self): + async def setup(self): self.processed = set() self.num_bits = max(1, int(self.config.get("num_bits", 4))) return True - def filter_event(self, event): + async def filter_event(self, event): if str(event.module) in ("speculate", "ipneighbor"): return False return True - def handle_event(self, event): + async def handle_event(self, event): main_ip = event.host netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits) network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False) subnet_hash = hash(network) - if not subnet_hash in self.processed: + if subnet_hash not in self.processed: self.processed.add(subnet_hash) for ip in network: if ip != main_ip: - self.emit_event(str(ip), "IP_ADDRESS", event, internal=True) + ip_event = self.make_event(str(ip), "IP_ADDRESS", event, internal=True) + if ip_event: + await self.emit_event( + ip_event, + context="{module} produced {event.type}: {event.data}", + ) diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py new file mode 100644 index 0000000000..02cfe0f3dc --- /dev/null +++ b/bbot/modules/ipstack.py @@ -0,0 +1,61 @@ +from bbot.modules.base import BaseModule + + +class Ipstack(BaseModule): + """ + Ipstack GeoIP + Leverages the ipstack.com API to geolocate a host by IP address. + """ + + watched_events = ["IP_ADDRESS"] + produced_events = ["GEOLOCATION"] + flags = ["passive", "safe"] + meta = { + "description": "Query IPStack's GeoIP API", + "created_date": "2022-11-26", + "author": "@tycoonslive", + "auth_required": True, + } + options = {"api_key": ""} + options_desc = {"api_key": "IPStack GeoIP API Key"} + scope_distance_modifier = 1 + _priority = 2 + suppress_dupes = False + + base_url = "http://api.ipstack.com" + ping_url = f"{base_url}/check?access_key={{api_key}}" + + async def setup(self): + return await self.require_api_key() + + async def handle_event(self, event): + try: + url = f"{self.base_url}/{event.data}?access_key={{api_key}}" + result = await self.api_request(url) + if result: + geo_data = result.json() + if not geo_data: + self.verbose(f"No JSON response from {url}") + else: + self.verbose(f"No response from {url}") + except Exception: + self.verbose(f"Error retrieving results for {event.data}", trace=True) + return + geo_data = {k: v for k, v in geo_data.items() if v is not None} + if "error" in geo_data: + error_msg = geo_data.get("error").get("info", "") + if error_msg: + self.warning(error_msg) + elif geo_data: + country = geo_data.get("country_name", "unknown country") + region = geo_data.get("region_name", "unknown region") + city = geo_data.get("city", "unknown city") + lat = geo_data.get("latitude", "") + long = geo_data.get("longitude", "") + description = f"{city}, {region}, {country} ({lat}, {long})" + await self.emit_event( + geo_data, + "GEOLOCATION", + event, + context=f'{{module}} queried ipstack.com\'s API for "{event.data}" and found {{event.type}}: {description}', + ) diff --git a/bbot/modules/jadx.py b/bbot/modules/jadx.py new file mode 100644 index 0000000000..33722fc3d7 --- /dev/null +++ b/bbot/modules/jadx.py @@ -0,0 +1,87 @@ +from pathlib import Path +from subprocess import CalledProcessError +from bbot.modules.internal.base import BaseModule + + +class jadx(BaseModule): + watched_events = ["FILESYSTEM"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "Decompile APKs and XAPKs using JADX", + "created_date": "2024-11-04", + "author": "@domwhewell-sage", + } + options = { + "threads": 4, + } + options_desc = { + "threads": "Maximum jadx threads for extracting apk's, default: 4", + } + deps_common = ["java"] + deps_ansible = [ + { + "name": "Create jadx directory", + "file": {"path": "#{BBOT_TOOLS}/jadx", "state": "directory", "mode": "0755"}, + }, + { + "name": "Download jadx", + "unarchive": { + "src": "https://github.com/skylot/jadx/releases/download/v1.5.0/jadx-1.5.0.zip", + "include": ["lib/jadx-1.5.0-all.jar", "bin/jadx"], + "dest": "#{BBOT_TOOLS}/jadx", + "remote_src": True, + }, + }, + ] + + allowed_file_types = ["java archive", "android application package"] + + async def setup(self): + self.threads = self.config.get("threads", 4) + return True + + async def filter_event(self, event): + if "file" in event.tags: + if event.data["magic_description"].lower() not in self.allowed_file_types: + return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}" + else: + return False, "Event is not a file" + return True + + async def handle_event(self, event): + path = Path(event.data["path"]) + output_dir = path.parent / path.name.replace(".", "_") + self.helpers.mkdir(output_dir) + success = await self.decompile_apk(path, output_dir) + + # If jadx was able to decompile the java archive, emit an event + if success: + await self.emit_event( + {"path": str(output_dir)}, + "FILESYSTEM", + tags=["folder", "unarchived-folder"], + parent=event, + context=f'extracted "{path}" to: {output_dir}', + ) + else: + output_dir.rmdir() + + async def decompile_apk(self, path, output_dir): + command = [ + f"{self.scan.helpers.tools_dir}/jadx/bin/jadx", + "--threads-count", + self.threads, + "--output-dir", + str(output_dir), + str(path), + ] + try: + output = await self.run_process(command, check=True) + except CalledProcessError as e: + self.warning(f"Error decompiling {path}. STDOUT: {e.stdout} STDERR: {repr(e.stderr)}") + return False + if not (output_dir / "resources").exists() and not (output_dir / "sources").exists(): + self.warning(f"JADX was unable to decompile {path}: (STDOUT: {output.stdout} STDERR: {output.stderr})") + return False + return True diff --git a/bbot/modules/leakix.py b/bbot/modules/leakix.py index e5900cea2f..ac9e81f87b 100644 --- a/bbot/modules/leakix.py +++ b/bbot/modules/leakix.py @@ -1,55 +1,46 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class leakix(crobat): +class leakix(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query leakix.net for subdomains"} + options = {"api_key": ""} + # NOTE: API key is not required (but having one will get you more results) + options_desc = {"api_key": "LeakIX API Key"} + meta = { + "description": "Query leakix.net for subdomains", + "created_date": "2022-07-11", + "author": "@TheTechromancer", + } base_url = "https://leakix.net" + ping_url = f"{base_url}/host/1.1.1.1" - def handle_event(self, event): - query = self.make_query(event) - headers = {"Accept": "application/json"} - r = self.helpers.request(f"{self.base_url}/domain/{self.helpers.quote(query)}", headers=headers) - if not r: - return - try: - j = r.json() - except Exception: - self.warning(f"Error decoding JSON") - return - services = j.get("Services", []) - if services: - for s in services: - if s.get("event_type", "") != "service": - continue - host = s.get("host", "") - if not host: - continue - source_event = self.make_event(host, "DNS_NAME", source=event) - self.emit_event(source_event) - ssl = s.get("ssl", {}) - if not ssl: - continue - certificate = ssl.get("certificate", {}) - if not certificate: - continue - cert_domains = set() - cn = self.clean_dns_name(certificate.get("cn", "")) - if cn: - cert_domains.add(cn) - domains = certificate.get("domain", []) - if domains: - for d in domains: - d = self.clean_dns_name(d) - if d: - cert_domains.add(d) - for d in cert_domains: - if d != host: - self.emit_event(d, "DNS_NAME", source=source_event) + async def setup(self): + ret = await super(subdomain_enum_apikey, self).setup() + self.api_key = self.config.get("api_key", "") + if self.api_key: + return await self.require_api_key() + return ret - @staticmethod - def clean_dns_name(dns_name): - return str(dns_name).strip().lower().lstrip(".*") + def prepare_api_request(self, url, kwargs): + if self.api_key: + kwargs["headers"]["api-key"] = self.api_key + kwargs["headers"]["Accept"] = "application/json" + return url, kwargs + + async def request_url(self, query): + url = f"{self.base_url}/api/subdomains/{self.helpers.quote(query)}" + response = await self.api_request(url) + return response + + async def parse_results(self, r, query=None): + results = set() + json = r.json() + if json: + for entry in json: + subdomain = entry.get("subdomain", "") + if subdomain: + results.add(subdomain) + return results diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py deleted file mode 100644 index 8bdf2f6611..0000000000 --- a/bbot/modules/massdns.py +++ /dev/null @@ -1,178 +0,0 @@ -import json -import subprocess - -from .crobat import crobat - - -class massdns(crobat): - - flags = ["brute-force", "subdomain-enum", "passive", "slow", "aggressive"] - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - meta = {"description": "Brute-force subdomains with massdns (highly effective)"} - options = { - "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", - "max_resolvers": 500, - } - options_desc = {"wordlist": "Subdomain wordlist URL", "max_resolvers": "Number of concurrent massdns resolvers"} - subdomain_file = None - deps_ansible = [ - {"name": "install dev tools", "package": {"name": ["gcc", "git", "make"], "state": "present"}, "become": True}, - { - "name": "Download massdns source code", - "git": { - "repo": "https://github.com/blechschmidt/massdns.git", - "dest": "{BBOT_TEMP}/massdns", - "single_branch": True, - "version": "master", - }, - }, - { - "name": "Build massdns", - "command": {"chdir": "{BBOT_TEMP}/massdns", "cmd": "make", "creates": "{BBOT_TEMP}/massdns/bin/massdns"}, - }, - { - "name": "Install massdns", - "copy": {"src": "{BBOT_TEMP}/massdns/bin/massdns", "dest": "{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, - }, - ] - - def setup(self): - self.found = dict() - self.mutations_tried = set() - self.source_events = dict() - self.subdomain_file = self.helpers.wordlist(self.config.get("wordlist")) - return super().setup() - - def handle_event(self, event): - query = self.make_query(event) - h = hash(query) - if not h in self.source_events: - self.source_events[h] = event - - # wildcard sanity check - is_wildcard, _ = self.helpers.is_wildcard(f"{self.helpers.rand_string()}.{query}") - if is_wildcard: - self.debug(f"Skipping wildcard: {query}") - return - - for hostname in self.massdns(query, self.helpers.read_file(self.subdomain_file)): - self.emit_result(hostname, event, query) - - def emit_result(self, result, source_event, query): - if not result == source_event: - kwargs = {"abort_if": lambda e: any([x in e.tags for x in ("wildcard", "unresolved")])} - if result.endswith(f".{query}"): - kwargs["on_success_callback"] = self.add_found - self.emit_event(result, "DNS_NAME", source_event, **kwargs) - - def massdns(self, domain, subdomains): - """ - { - "name": "www.blacklanternsecurity.com.", - "type": "A", - "class": "IN", - "status": "NOERROR", - "data": { - "answers": [ - { - "ttl": 3600, - "type": "CNAME", - "class": "IN", - "name": "www.blacklanternsecurity.com.", - "data": "blacklanternsecurity.github.io." - }, - { - "ttl": 3600, - "type": "A", - "class": "IN", - "name": "blacklanternsecurity.github.io.", - "data": "185.199.108.153" - } - ] - }, - "resolver": "168.215.165.186:53" - } - """ - if self.scan.stopping: - return - - self.debug(f"Brute-forcing subdomains for {domain}") - command = ( - "massdns", - "-r", - self.helpers.dns.mass_resolver_file, - "-s", - self.config.get("max_resolvers", 1000), - "-t", - "A", - "-t", - "AAAA", - "-o", - "J", - "-q", - ) - subdomains = self.gen_subdomains(subdomains, domain) - for line in self.helpers.run_live(command, stderr=subprocess.DEVNULL, input=subdomains): - try: - j = json.loads(line) - except json.decoder.JSONDecodeError: - self.debug(f"Failed to decode line: {line}") - continue - answers = j.get("data", {}).get("answers", []) - if type(answers) == list: - for answer in answers: - hostname = answer.get("name", "") - if hostname: - data = answer.get("data", "") - # avoid garbage answers like this: - # 8AAAA queries have been locally blocked by dnscrypt-proxy/Set block_ipv6 to false to disable this feature - if " " not in data: - yield hostname.rstrip(".") - - def finish(self): - found = list(self.found.items()) - - base_mutations = set() - for domain, subdomains in found: - base_mutations.update(set(subdomains)) - - for i, (domain, subdomains) in enumerate(found): - query = domain - domain_hash = hash(domain) - if self.scan.stopping: - return - mutations = set(base_mutations) - for mutation in self.helpers.word_cloud.mutations(subdomains): - h = hash((domain_hash, mutation)) - if h not in self.mutations_tried: - self.mutations_tried.add(h) - for delimiter in ("", ".", "-"): - m = delimiter.join(mutation).lower() - mutations.add(m) - self.verbose(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") - for hostname in self.massdns(query, mutations): - source_event = self.get_source_event(hostname) - if source_event is None: - self.debug(f"Could not correlate source event from: {hostname}") - continue - self.emit_result(hostname, source_event, query) - - def add_found(self, event): - if self.helpers.is_subdomain(event.data): - subdomain, domain = event.data.split(".", 1) - try: - self.found[domain].add(subdomain) - except KeyError: - self.found[domain] = set((subdomain,)) - - def gen_subdomains(self, prefixes, domain): - for p in prefixes: - yield f"{p}.{domain}" - - def get_source_event(self, hostname): - for p in self.helpers.domain_parents(hostname): - try: - return self.source_events[hash(p)] - except KeyError: - continue diff --git a/bbot/modules/myssl.py b/bbot/modules/myssl.py new file mode 100644 index 0000000000..1a04364bcf --- /dev/null +++ b/bbot/modules/myssl.py @@ -0,0 +1,29 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class myssl(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query myssl.com's API for subdomains", + "created_date": "2023-07-10", + "author": "@TheTechromancer", + } + + base_url = "https://myssl.com/api/v1/discover_sub_domain" + + async def request_url(self, query): + url = f"{self.base_url}?domain={self.helpers.quote(query)}" + return await self.api_request(url) + + async def parse_results(self, r, query): + results = set() + json = r.json() + if json and isinstance(json, dict): + data = json.get("data", []) + for d in data: + hostname = d.get("domain", "").lower() + if hostname: + results.add(hostname) + return results diff --git a/bbot/modules/naabu.py b/bbot/modules/naabu.py deleted file mode 100644 index d81f5d5491..0000000000 --- a/bbot/modules/naabu.py +++ /dev/null @@ -1,109 +0,0 @@ -import json -import subprocess -from bbot.modules.base import BaseModule - - -class naabu(BaseModule): - - watched_events = ["IP_ADDRESS", "DNS_NAME", "IP_RANGE"] - produced_events = ["OPEN_TCP_PORT"] - flags = ["active", "portscan", "aggressive"] - meta = {"description": "Execute port scans with naabu"} - options = { - "ports": "", - "top_ports": 100, - "version": "2.0.7", - } - options_desc = { - "ports": "ports to scan", - "top_ports": "top ports to scan", - "version": "naabu version", - } - max_event_handlers = 2 - batch_size = 100 - - deps_ansible = [ - { - "name": "install libpcap (Debian)", - "package": {"name": "libpcap0.8", "state": "present"}, - "become": True, - "when": """ansible_facts['os_family'] == 'Debian'""", - }, - { - "name": "install libpcap (others)", - "package": {"name": "libpcap", "state": "present"}, - "become": True, - "when": """ansible_facts['os_family'] != 'Debian'""", - }, - { - "name": "symlink libpcap", - "file": {"src": "/usr/lib/libpcap.so", "dest": "{BBOT_LIB}/libpcap.so.0.8", "state": "link"}, - "ignore_errors": "yes", - "when": """ansible_facts['os_family'] != 'Debian'""", - }, - { - "name": "Download naabu", - "unarchive": { - "src": "https://github.com/projectdiscovery/naabu/releases/download/v{BBOT_MODULES_NAABU_VERSION}/naabu_{BBOT_MODULES_NAABU_VERSION}_linux_amd64.zip", - "include": "naabu", - "dest": "{BBOT_TOOLS}", - "remote_src": True, - }, - }, - ] - - def handle_batch(self, *events): - - _input = [str(e.data) for e in events] - command = self.construct_command() - for line in self.helpers.run_live(command, input=_input, stderr=subprocess.DEVNULL): - try: - j = json.loads(line) - except Exception as e: - self.debug(f'Error parsing line "{line}" as JSON: {e}') - break - host = j.get("host", j.get("ip")) - port = j.get("port") - - source_event = None - # check exact matches first - for event in events: - if host == str(event.host): - source_event = event - break - # then make a broader check, for cidrs etc. - if source_event is None: - intermediary_event = None - for event in events: - if host in event: - intermediary_event = event - break - if intermediary_event is not None: - source_event = self.make_event(host, "IP_ADDRESS", source=intermediary_event) - self.emit_event(source_event) - - if source_event is None: - self.warning(f'Failed to correlate source event for host "{host}"') - continue - - self.emit_event(f"{host}:{port}", "OPEN_TCP_PORT", source=source_event) - - def construct_command(self): - ports = self.config.get("ports", "") - top_ports = self.config.get("top_ports", "") - command = [ - "naabu", - "-silent", - "-json", - # "-r", - # self.helpers.resolver_file - ] - if ports: - command += ["-p", ports] - else: - command += ["-top-ports", top_ports] - return command - - def cleanup(self): - resume_file = self.helpers.current_dir / "resume.cfg" - resume_file.unlink(missing_ok=True) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py new file mode 100644 index 0000000000..114f7d66fd --- /dev/null +++ b/bbot/modules/newsletters.py @@ -0,0 +1,60 @@ +# Created a new module called 'newsletters' that will scrape the websites (or recursive websites, +# thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also +# contains a 'placeholder'. The combination of these two HTML items usually signify the presence +# of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used +# to find newsletters for a future email bombing attack. + +from .base import BaseModule +import re + +# Known Websites with Newsletters +# https://futureparty.com/ +# https://www.marketingbrew.com/ +# https://buffer.com/ +# https://www.milkkarten.net/ +# https://geekout.mattnavarra.com/ + + +class newsletters(BaseModule): + watched_events = ["HTTP_RESPONSE"] + produced_events = ["FINDING"] + flags = ["active", "safe"] + meta = { + "description": "Searches for Newsletter Submission Entry Fields on Websites", + "created_date": "2024-02-02", + "author": "@stryker2k2", + } + + # Parse through Website to find a Text Entry Box of 'type = email' + # and ensure that there is placeholder text within it. + def find_type(self, soup): + email_type = soup.find(type="email") + if email_type: + regex = re.compile(r"placeholder") + if regex.search(str(email_type)): + return True + return False + + async def handle_event(self, event): + _event = event + + # Call find_type Function if Webpage return Status Code 200 && "body" is found in event.data + # Ex: 'bbot -m httpx newsletters -t https://apf-api.eng.vn.cloud.tesla.com' returns + # Status Code 200 but does NOT have event.data["body"] + if _event.data["status_code"] == 200: + if "body" in _event.data: + body = _event.data["body"] + soup = self.helpers.beautifulsoup(body, "html.parser") + if soup is False: + self.debug("BeautifulSoup returned False") + return + result = self.find_type(soup) + if result: + description = "Found a Newsletter Submission Form that could be used for email bombing attacks" + data = {"host": str(_event.host), "description": description, "url": _event.data["url"]} + await self.emit_event( + data, + "FINDING", + _event, + context="{module} searched HTTP_RESPONSE and identified {event.type}: a Newsletter Submission Form that could be used for email bombing attacks", + ) diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 4e7074ce10..67268616de 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -1,11 +1,10 @@ -from threading import Lock - +from bbot.errors import NTLMError from bbot.modules.base import BaseModule -from bbot.core.errors import NTLMError, RequestException ntlm_discovery_endpoints = [ "", - "autodiscover/autodiscover.xml" "ecp/", + "autodiscover/autodiscover.xml", + "ecp/", "ews/", "ews/exchange.asmx", "exchange/", @@ -61,101 +60,85 @@ class ntlm(BaseModule): + """ + Todo: + Cancel pending requests and break out of loop when valid endpoint is found + (waiting on https://github.com/encode/httpcore/discussions/783/ to be fixed first) + """ watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["FINDING", "DNS_NAME"] - flags = ["active", "safe", "web"] - meta = {"description": "Watch for HTTP endpoints that support NTLM authentication"} - options = {"max_threads": 10, "try_all": False} - options_desc = {"max_threads": "Maximum concurrent requests", "try_all": "Try every NTLM endpoint"} + flags = ["active", "safe", "web-basic"] + meta = { + "description": "Watch for HTTP endpoints that support NTLM authentication", + "created_date": "2022-07-25", + "author": "@liquidsec", + } + options = {"try_all": False} + options_desc = {"try_all": "Try every NTLM endpoint"} in_scope_only = True - def setup(self): - self.processed = set() - self.processed_lock = Lock() + async def setup(self): self.found = set() self.try_all = self.config.get("try_all", False) return True - def handle_event(self, event): + async def handle_event(self, event): found_hash = hash(f"{event.host}:{event.port}") - if found_hash not in self.found: - result_FQDN, request_url = self.handle_url(event) - if result_FQDN and request_url: + if event.type == "URL": + url = event.data + else: + url = event.data["url"] + if found_hash in self.found: + return + + urls = {url} + if self.try_all: + for endpoint in ntlm_discovery_endpoints: + urls.add(f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/{endpoint}") + + num_urls = len(urls) + agen = self.helpers.request_batch( + urls, headers=NTLM_test_header, allow_redirects=False, timeout=self.http_timeout + ) + async for url, response in agen: + ntlm_resp = response.headers.get("WWW-Authenticate", "") + if not ntlm_resp: + continue + ntlm_resp_b64 = max(ntlm_resp.split(","), key=lambda x: len(x)).split()[-1] + try: + ntlm_resp_decoded = self.helpers.ntlm.ntlmdecode(ntlm_resp_b64) + if not ntlm_resp_decoded: + continue + + await agen.aclose() self.found.add(found_hash) - self.emit_event( + fqdn = ntlm_resp_decoded.get("FQDN", "") + await self.emit_event( { "host": str(event.host), - "url": request_url, - "description": f"NTLM AUTH: {result_FQDN}", + "url": url, + "description": f"NTLM AUTH: {ntlm_resp_decoded}", }, "FINDING", - source=event, + parent=event, + context=f"{{module}} tried {num_urls:,} NTLM endpoints against {url} and identified NTLM auth ({{event.type}}): {fqdn}", ) - self.emit_event(result_FQDN, "DNS_NAME", source=event) + fqdn = ntlm_resp_decoded.get("FQDN", "") + if fqdn: + await self.emit_event(fqdn, "DNS_NAME", parent=event) + break + + except NTLMError as e: + self.verbose(str(e)) - def filter_event(self, event): + async def filter_event(self, event): if self.try_all: return True if event.type == "HTTP_RESPONSE": if "www-authenticate" in event.data["header-dict"]: - header_value = event.data["header-dict"]["www-authenticate"].lower() + header_value = event.data["header-dict"]["www-authenticate"][0].lower() if "ntlm" in header_value or "negotiate" in header_value: return True return False - - def handle_url(self, event): - if event.type == "URL": - urls = { - event.data, - } - else: - urls = { - event.data["url"], - } - if self.try_all: - for endpoint in ntlm_discovery_endpoints: - urls.add(f"{event.parsed.scheme}://{event.parsed.netloc}/{endpoint}") - - futures = {} - for url in urls: - future = self.submit_task(self.check_ntlm, url) - futures[future] = url - - for future in self.helpers.as_completed(futures): - url = futures[future] - try: - result = future.result() - if result: - for future in futures: - future.cancel() - return str(result["FQDN"]), url - except RequestException as e: - self.warning(str(e)) - - return None, None - - def check_ntlm(self, test_url): - - url_hash = hash(test_url) - - with self.processed_lock: - if url_hash in self.processed: - return - self.processed.add(url_hash) - - # use lower timeout value - http_timeout = self.config.get("httpx_timeout", 5) - r = self.helpers.request( - test_url, headers=NTLM_test_header, raise_error=True, allow_redirects=False, timeout=http_timeout - ) - ntlm_resp = r.headers.get("WWW-Authenticate", "") - if ntlm_resp: - ntlm_resp_b64 = max(ntlm_resp.split(","), key=lambda x: len(x)).split()[-1] - try: - ntlm_resp_decoded = self.helpers.ntlm.ntlmdecode(ntlm_resp_b64) - if ntlm_resp_decoded: - return ntlm_resp_decoded - except NTLMError as e: - self.verbose(str(e)) diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py new file mode 100644 index 0000000000..58c0507c09 --- /dev/null +++ b/bbot/modules/oauth.py @@ -0,0 +1,164 @@ +from bbot.core.helpers.regexes import url_regexes + +from .base import BaseModule + + +class OAUTH(BaseModule): + watched_events = ["DNS_NAME", "URL_UNVERIFIED"] + produced_events = ["DNS_NAME"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "active", "safe"] + meta = { + "description": "Enumerate OAUTH and OpenID Connect services", + "created_date": "2023-07-12", + "author": "@TheTechromancer", + } + options = {"try_all": False} + options_desc = {"try_all": "Check for OAUTH/IODC on every subdomain and URL."} + + in_scope_only = False + scope_distance_modifier = 1 + _module_threads = 2 + + async def setup(self): + self.processed = set() + self.regexes = list(url_regexes) + list(self.scan.dns_regexes) + self.try_all = self.config.get("try_all", False) + return True + + async def filter_event(self, event): + if event.module == self or any(t in event.tags for t in ("target", "domain", "ms-auth-url")): + return True + elif self.try_all and event.scope_distance == 0: + return True + return False + + async def handle_event(self, event): + _, domain = self.helpers.split_domain(event.data) + source_domain = getattr(event, "source_domain", domain) + if not self.scan.in_scope(source_domain): + return + + oidc_tasks = [] + if event.scope_distance == 0: + domain_hash = hash(domain) + if domain_hash not in self.processed: + self.processed.add(domain_hash) + oidc_tasks.append(self.helpers.create_task(self.getoidc(f"https://login.windows.net/{domain}"))) + + if event.type == "URL_UNVERIFIED": + url = event.data + else: + url = f"https://{event.data}" + + oauth_tasks = [] + if self.try_all or any(t in event.tags for t in ("oauth-token-endpoint",)): + oauth_tasks.append(self.helpers.create_task(self.getoauth(url))) + if self.try_all or any(t in event.tags for t in ("ms-auth-url",)): + for u in self.url_and_base(url): + oidc_tasks.append(self.helpers.create_task(self.getoidc(u))) + + for oidc_task in oidc_tasks: + url, token_endpoint, oidc_results = await oidc_task + if token_endpoint: + finding_event = self.make_event( + { + "description": f"OpenID Connect Endpoint (domain: {source_domain}) found at {url}", + "host": event.host, + "url": url, + }, + "FINDING", + parent=event, + ) + if finding_event: + finding_event.source_domain = source_domain + await self.emit_event( + finding_event, + context=f'{{module}} identified {{event.type}}: OpenID Connect Endpoint for "{source_domain}" at {url}', + ) + url_event = self.make_event( + token_endpoint, "URL_UNVERIFIED", parent=event, tags=["affiliate", "oauth-token-endpoint"] + ) + if url_event: + url_event.source_domain = source_domain + await self.emit_event( + url_event, + context=f'{{module}} identified OpenID Connect Endpoint for "{source_domain}" at {{event.type}}: {url}', + ) + for result in oidc_results: + if result not in (domain, event.data): + event_type = "URL_UNVERIFIED" if self.helpers.is_url(result) else "DNS_NAME" + await self.emit_event( + result, + event_type, + parent=event, + tags=["affiliate"], + context=f'{{module}} analyzed OpenID configuration for "{source_domain}" and found {{event.type}}: {{event.data}}', + ) + + for oauth_task in oauth_tasks: + url = await oauth_task + if url: + description = f"Potentially Sprayable OAUTH Endpoint (domain: {source_domain}) at {url}" + oauth_finding = self.make_event( + { + "description": description, + "host": event.host, + "url": url, + }, + "FINDING", + parent=event, + ) + if oauth_finding: + oauth_finding.source_domain = source_domain + await self.emit_event( + oauth_finding, + context=f"{{module}} identified {{event.type}}: {description}", + ) + + def url_and_base(self, url): + yield url + parsed = self.helpers.urlparse(url) + baseurl = f"{parsed.scheme}://{parsed.netloc}/" + if baseurl != url: + yield baseurl + + async def getoidc(self, url): + results = set() + if not url.endswith("openid-configuration"): + url = url.strip("/") + "/.well-known/openid-configuration" + url_hash = hash("OIDC:" + url) + token_endpoint = "" + if url_hash not in self.processed: + self.processed.add(url_hash) + r = await self.helpers.request(url) + if r is None: + return url, token_endpoint, results + try: + json = r.json() + except Exception: + return url, token_endpoint, results + if json and isinstance(json, dict): + token_endpoint = json.get("token_endpoint", "") + for found in await self.helpers.re.search_dict_values(json, *self.regexes): + results.add(found) + results -= {token_endpoint} + return url, token_endpoint, results + + async def getoauth(self, url): + data = { + "grant_type": "authorization_code", + "client_id": "xxx", + "redirect_uri": "https://example.com", + "code": "xxx", + "client_secret": "xxx", + } + url_hash = hash("OAUTH:" + url) + if url_hash not in self.processed: + self.processed.add(url_hash) + r = await self.helpers.request(url, method="POST", data=data) + if r is None: + return + if r.status_code in (400, 401): + if "json" in r.headers.get("content-type", "").lower(): + if any(x in r.text.lower() for x in ("invalid_grant", "invalid_client")): + return url diff --git a/bbot/modules/otx.py b/bbot/modules/otx.py new file mode 100644 index 0000000000..f0075bfc1c --- /dev/null +++ b/bbot/modules/otx.py @@ -0,0 +1,28 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class otx(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query otx.alienvault.com for subdomains", + "created_date": "2022-08-24", + "author": "@TheTechromancer", + } + + base_url = "https://otx.alienvault.com" + + def request_url(self, query): + url = f"{self.base_url}/api/v1/indicators/domain/{self.helpers.quote(query)}/passive_dns" + return self.api_request(url) + + async def parse_results(self, r, query): + results = set() + j = r.json() + if isinstance(j, dict): + for entry in j.get("passive_dns", []): + subdomain = entry.get("hostname", "") + if subdomain: + results.add(subdomain) + return results diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py new file mode 100644 index 0000000000..63fefd0ac8 --- /dev/null +++ b/bbot/modules/output/asset_inventory.py @@ -0,0 +1,373 @@ +import csv +import ipaddress +from contextlib import suppress + +from .csv import CSV +from bbot.core.helpers.misc import make_ip_type, is_ip, is_port, best_http_status + +severity_map = { + "INFO": 0, + 0: "N/A", + 1: "LOW", + 2: "MEDIUM", + 3: "HIGH", + 4: "CRITICAL", + "N/A": 0, + "LOW": 1, + "MEDIUM": 2, + "HIGH": 3, + "CRITICAL": 4, +} + + +class asset_inventory(CSV): + watched_events = [ + "OPEN_TCP_PORT", + "DNS_NAME", + "URL", + "FINDING", + "VULNERABILITY", + "TECHNOLOGY", + "IP_ADDRESS", + "WAF", + "HTTP_RESPONSE", + ] + produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] + meta = { + "description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV", + "created_date": "2022-09-30", + "author": "@liquidsec", + } + options = {"output_file": "", "use_previous": False, "recheck": False, "summary_netmask": 16} + options_desc = { + "output_file": "Set a custom output file", + "use_previous": "Emit previous asset inventory as new events (use in conjunction with -n )", + "recheck": "When use_previous=True, don't retain past details like open ports or findings. Instead, allow them to be rediscovered by the new scan", + "summary_netmask": "Subnet mask to use when summarizing IP addresses at end of scan", + } + + header_row = [ + "Host", + "Provider", + "IP (External)", + "IP (Internal)", + "Open Ports", + "HTTP Status", + "HTTP Title", + "Risk Rating", + "Findings", + "Technologies", + "WAF", + "DNS Records", + ] + filename = "asset-inventory.csv" + + async def setup(self): + self.assets = {} + self.use_previous = self.config.get("use_previous", False) + self.recheck = self.config.get("recheck", False) + self.summary_netmask = self.config.get("summary_netmask", 16) + self.emitted_contents = False + self._ran_hooks = False + ret = await super().setup() + return ret + + async def filter_event(self, event): + if event._internal: + return False, "event is internal" + if event.type not in self.watched_events: + return False, "event type is not in watched_events" + if not self.scan.in_scope(event): + return False, "event is not in scope" + if "unresolved" in event.tags: + return False, "event is unresolved" + return True, "" + + async def handle_event(self, event): + if (await self.filter_event(event))[0]: + hostkey = _make_hostkey(event.host, event.resolved_hosts) + if hostkey not in self.assets: + self.assets[hostkey] = Asset(event.host, self.recheck) + self.assets[hostkey].absorb_event(event) + + async def report(self): + stats = {} + totals = {} + + def increment_stat(stat, value): + try: + totals[stat] += 1 + except KeyError: + totals[stat] = 1 + if stat not in stats: + stats[stat] = {} + try: + stats[stat][value] += 1 + except KeyError: + stats[stat][value] = 1 + + def sort_key(asset): + host = str(asset.host) + is_digit = False + with suppress(IndexError): + is_digit = host[0].isdigit() + return (is_digit, host) + + for asset in sorted(self.assets.values(), key=sort_key): + findings_and_vulns = asset.findings.union(asset.vulnerabilities) + ports = getattr(asset, "ports", set()) + ports = [str(p) for p in sorted([int(p) for p in asset.ports])] + ips_all = getattr(asset, "ip_addresses", []) + ips_external = sorted([str(ip) for ip in [i for i in ips_all if not i.is_private]]) + ips_internal = sorted([str(ip) for ip in [i for i in ips_all if i.is_private]]) + host = self.helpers.make_ip_type(getattr(asset, "host", "")) + if host and isinstance(host, str): + _, domain = self.helpers.split_domain(host) + if domain: + increment_stat("Domains", domain) + for ip in ips_all: + net = ipaddress.ip_network(f"{ip}/{self.summary_netmask}", strict=False) + increment_stat("IP Addresses", str(net)) + for port in ports: + increment_stat("Open Ports", port) + row = { + "Host": host, + "Provider": getattr(asset, "provider", ""), + "IP (External)": ", ".join(ips_external), + "IP (Internal)": ", ".join(ips_internal), + "Open Ports": ", ".join(ports), + "HTTP Status": asset.http_status_full, + "HTTP Title": str(getattr(asset, "http_title", "")), + "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], + "Findings": "\n".join(findings_and_vulns), + "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "WAF": getattr(asset, "waf", ""), + "DNS Records": ", ".join(sorted([str(r) for r in getattr(asset, "dns_records", [])])), + } + row.update(asset.custom_fields) + self.writerow(row) + + for header in ("Domains", "IP Addresses", "Open Ports"): + table_header = [header, ""] + if header in stats: + table = [] + stats_sorted = sorted(stats[header].items(), key=lambda x: x[-1], reverse=True) + total = totals[header] + for k, v in stats_sorted: + table.append([str(k), f"{v:,}/{total} ({v / total * 100:.1f}%)"]) + self.log_table(table, table_header, table_name=f"asset-inventory-{header}") + + if self._file is not None: + self.info(f"Saved asset-inventory output to {self.output_file}") + + async def finish(self): + if self.use_previous and not self.emitted_contents: + self.emitted_contents = True + if self.output_file.is_file(): + self.info(f"Emitting previous results from {self.output_file}") + with open(self.output_file, newline="") as f: + c = csv.DictReader(f) + for row in c: + # yield to event loop to make sure we don't hold up the scan + await self.helpers.sleep(0) + host = row.get("Host", "").strip() + ips = row.get("IP (External)", "") + "," + row.get("IP (Internal)", "") + if not host or not ips: + continue + hostkey = _make_hostkey(host, ips) + asset = self.assets.get(hostkey, None) + if asset is None: + asset = Asset(host, self.recheck) + self.assets[hostkey] = asset + asset.absorb_csv_row(row) + self.add_custom_headers(list(asset.custom_fields)) + if not is_ip(asset.host): + host_event = self.make_event( + asset.host, "DNS_NAME", parent=self.scan.root_event, raise_error=True + ) + await self.emit_event( + host_event, context="{module} emitted previous result: {event.type}: {event.data}" + ) + for port in asset.ports: + netloc = self.helpers.make_netloc(asset.host, port) + open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", parent=host_event) + if open_port_event: + await self.emit_event( + open_port_event, + context="{module} emitted previous result: {event.type}: {event.data}", + ) + else: + for ip in asset.ip_addresses: + ip_event = self.make_event( + ip, "IP_ADDRESS", parent=self.scan.root_event, raise_error=True + ) + await self.emit_event( + ip_event, context="{module} emitted previous result: {event.type}: {event.data}" + ) + for port in asset.ports: + netloc = self.helpers.make_netloc(ip, port) + open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", parent=ip_event) + if open_port_event: + await self.emit_event( + open_port_event, + context="{module} emitted previous result: {event.type}: {event.data}", + ) + else: + self.warning( + f"use_previous=True was set but no previous asset inventory was found at {self.output_file}" + ) + else: + self._run_hooks() + + def _run_hooks(self): + """ + modules can use self.asset_inventory_hook() to add custom functionality to asset_inventory + the asset inventory module is passed in as the first argument to the method. + """ + if not self._ran_hooks: + self._ran_hooks = True + for module in self.scan.modules.values(): + hook = getattr(module, "asset_inventory_hook", None) + if hook is not None and callable(hook): + hook(self) + + +class Asset: + def __init__(self, host, recheck): + self.host = host + self.ip_addresses = set() + self.dns_records = set() + self.ports = set() + self.findings = set() + self.vulnerabilities = set() + self.status = "UNKNOWN" + self.risk_rating = 0 + self.provider = "" + self.waf = "" + self.technologies = set() + self.custom_fields = {} + self.http_status = 0 + self.http_title = "" + self.redirect_location = "" + self.recheck = recheck + + def absorb_csv_row(self, row): + # host + host = make_ip_type(row.get("Host", "").strip()) + if host and not is_ip(host): + self.host = host + # ips + self.ip_addresses = set(_make_ip_list(row.get("IP (External)", ""))) + self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", "")))) + # If user requests a recheck dont import the following fields to force them to be rechecked + if not self.recheck: + # ports + ports = [i.strip() for i in row.get("Open Ports", "").split(",")] + self.ports.update({i for i in ports if i and is_port(i)}) + # findings + findings = [i.strip() for i in row.get("Findings", "").splitlines()] + self.findings.update({i for i in findings if i}) + # technologies + technologies = [i.strip() for i in row.get("Technologies", "").splitlines()] + self.technologies.update({i for i in technologies if i}) + # risk rating + risk_rating = row.get("Risk Rating", "").strip() + if risk_rating and risk_rating.isdigit() and int(risk_rating) > self.risk_rating: + self.risk_rating = int(risk_rating) + # provider + provider = row.get("Provider", "").strip() + if provider: + self.provider = provider + # custom fields + for k, v in row.items(): + v = str(v) + # update the custom field if it doesn't clash with our main fields + # and if the new value isn't blank + if v and k not in asset_inventory.header_row: + self.custom_fields[k] = v + + def absorb_event(self, event): + if not is_ip(event.host): + self.host = event.host + + dns_children = getattr(event, "_dns_children", {}) + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted([str(r) for r in records]): + self.dns_records.add(f"{rdtype}:{record}") + + http_status = getattr(event, "http_status", 0) + update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status + if update_http_status: + self.http_status = http_status + if str(http_status).startswith("3"): + if event.type == "HTTP_RESPONSE": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + self.redirect_location = redirect_location + else: + self.redirect_location = "" + + if event.resolved_hosts: + self.ip_addresses.update(set(_make_ip_list(event.resolved_hosts))) + + if event.port: + self.ports.add(str(event.port)) + + if event.type == "FINDING": + location = event.data.get("url", event.data.get("host", "")) + if location: + self.findings.add(f"{location}:{event.data['description']}") + + if event.type == "VULNERABILITY": + location = event.data.get("url", event.data.get("host", "")) + if location: + self.findings.add(f"{location}:{event.data['description']}:{event.data['severity']}") + severity_int = severity_map.get(event.data.get("severity", "N/A"), 0) + if severity_int > self.risk_rating: + self.risk_rating = severity_int + + if event.type == "TECHNOLOGY": + self.technologies.add(event.data["technology"]) + + if event.type == "WAF": + if waf := event.data.get("waf", ""): + if update_http_status or not self.waf: + self.waf = waf + + if event.type == "HTTP_RESPONSE": + if title := event.data.get("title", ""): + if update_http_status or not self.http_title: + self.http_title = title + + for tag in event.tags: + if tag.startswith("cdn-") or tag.startswith("cloud-"): + self.provider = tag + break + + @property + def hostkey(self): + return _make_hostkey(self.host, self.ip_addresses) + + @property + def http_status_full(self): + return str(self.http_status) + (f" -> {self.redirect_location}" if self.redirect_location else "") + + +def _make_hostkey(host, ips): + """ + We handle public and private IPs differently + If the IPs are public, we dedupe by host + If they're private, we dedupe by the IPs themselves + """ + ips = _make_ip_list(ips) + is_private = ips and all(is_ip(i) and i.is_private for i in ips) + if is_private: + return ",".join(sorted([str(i) for i in ips])) + return str(host) + + +def _make_ip_list(ips): + if isinstance(ips, str): + ips = [i.strip() for i in ips.split(",")] + ips = [make_ip_type(i) for i in ips if i and is_ip(i)] + return ips diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index 74de187844..da80d4d0aa 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -1,36 +1,97 @@ import logging - +from pathlib import Path from bbot.modules.base import BaseModule class BaseOutputModule(BaseModule): accept_dupes = True _type = "output" - emit_graph_trail = True scope_distance_modifier = None _stats_exclude = True + _shuffle_incoming_queue = False + + def human_event_str(self, event): + event_type = f"[{event.type}]" + event_tags = "" + if getattr(event, "tags", []): + event_tags = f"\t({', '.join(sorted(getattr(event, 'tags', [])))})" + event_str = f"{event_type:<20}\t{event.data_human}\t{event.module_sequence}{event_tags}" + return event_str + + def _event_precheck(self, event): + reason = "precheck succeeded" + # special signal event types + if event.type in ("FINISHED",): + return True, "its type is FINISHED" + if self.errored: + return False, "module is in error state" + # exclude non-watched types + if not any(t in self.get_watched_events() for t in ("*", event.type)): + return False, "its type is not in watched_events" + if self.target_only: + if "target" not in event.tags: + return False, "it did not meet target_only filter criteria" + + ### begin output-module specific ### + + # force-output certain events to the graph + if self._is_graph_important(event): + return True, "event is critical to the graph" + + # exclude certain URLs (e.g. javascript): + # TODO: revisit this after httpx rework + if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: + return False, (f"Omitting {event} from output because it's marked as httpx-only") - def _filter_event(self, e): - # special "FINISHED" event - if type(e) == str: - if e == "FINISHED": - return True + # omit certain event types + if event._omit: + if "target" in event.tags: + reason = "it's a target" + self.debug(f"Allowing omitted event: {event} because {reason}") + elif event.type in self.get_watched_events(): + reason = "its type is explicitly in watched_events" + self.debug(f"Allowing omitted event: {event} because {reason}") else: - return False - if e._omit: - return False - if e._force_output: - return True - if e._internal: - return False - return True + return False, "_omit is True" + + # internal events like those from speculate, ipneighbor + # or events that are over our report distance + if event._internal: + return False, "_internal is True" + + return True, reason + + async def _event_postcheck(self, event): + acceptable, reason = await super()._event_postcheck(event) + if acceptable and not event._stats_recorded and event.type not in ("FINISHED",): + event._stats_recorded = True + self.scan.stats.event_produced(event) + return acceptable, reason + + def is_incoming_duplicate(self, event, add=False): + is_incoming_duplicate, reason = super().is_incoming_duplicate(event, add=add) + # make exception for graph-important events + if self._is_graph_important(event): + return False, "event is graph-important" + return is_incoming_duplicate, reason + + def _prep_output_dir(self, filename): + self.output_file = self.config.get("output_file", "") + if self.output_file: + self.output_file = Path(self.output_file) + else: + self.output_file = self.scan.home / str(filename) + self.helpers.mkdir(self.output_file.parent) + self._file = None + + def _scope_distance_check(self, event): + return True, "" @property - def config(self): - config = self.scan.config.get("output_modules", {}).get(self.name, {}) - if config is None: - config = {} - return config + def file(self): + if getattr(self, "_file", None) is None: + self._file = open(self.output_file, mode="a") + return self._file @property def log(self): diff --git a/bbot/modules/output/csv.py b/bbot/modules/output/csv.py index a559c855a0..9b7d4b4bd9 100644 --- a/bbot/modules/output/csv.py +++ b/bbot/modules/output/csv.py @@ -1,5 +1,4 @@ import csv -from pathlib import Path from contextlib import suppress from bbot.modules.output.base import BaseOutputModule @@ -7,51 +6,82 @@ class CSV(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to CSV"} + meta = {"description": "Output to CSV", "created_date": "2022-04-07", "author": "@TheTechromancer"} options = {"output_file": ""} options_desc = {"output_file": "Output to CSV file"} - emit_graph_trail = False - - def setup(self): - self.output_file = self.config.get("output_file", "") - if self.output_file: - self.output_file = Path(self.output_file) - else: - self.output_file = self.scan.home / "output.csv" - self.helpers.mkdir(self.output_file.parent) - self._file = None + + header_row = [ + "Event type", + "Event data", + "IP Address", + "Source Module", + "Scope Distance", + "Event Tags", + "Discovery Path", + ] + filename = "output.csv" + accept_dupes = False + + async def setup(self): + self.custom_headers = [] + self._headers_set = set() self._writer = None + self._prep_output_dir(self.filename) return True @property def writer(self): if self._writer is None: - self._writer = csv.writer(self.file) - self._writer.writerow(["Event type", "Event data", "Source Module", "Scope Distance", "Event Tags"]) + self._writer = csv.DictWriter(self.file, fieldnames=self.fieldnames) + self._writer.writeheader() return self._writer @property def file(self): if self._file is None: + if self.output_file.is_file(): + self.helpers.backup_file(self.output_file) self._file = open(self.output_file, mode="a", newline="") return self._file + @property + def fieldnames(self): + return self.header_row + list(self.custom_headers) + def writerow(self, row): self.writer.writerow(row) self.file.flush() - def handle_event(self, event): + async def handle_event(self, event): + # ["Event type", "Event data", "IP Address", "Source Module", "Scope Distance", "Event Tags"] + discovery_path = getattr(event, "discovery_path", []) self.writerow( - [ - getattr(event, "type", ""), - getattr(event, "data", ""), - str(getattr(event, "module", "")), - str(getattr(event, "scope_distance", "")), - ",".join(sorted(list(getattr(event, "tags", [])))), - ] + { + "Event type": getattr(event, "type", ""), + "Event data": getattr(event, "data", ""), + "IP Address": ",".join( + str(x) for x in getattr(event, "resolved_hosts", set()) if self.helpers.is_ip(x) + ), + "Source Module": str(getattr(event, "module_sequence", "")), + "Scope Distance": str(getattr(event, "scope_distance", "")), + "Event Tags": ",".join(sorted(getattr(event, "tags", []))), + "Discovery Path": " --> ".join(discovery_path), + } ) - def cleanup(self): - if self._file is not None: + async def cleanup(self): + if getattr(self, "_file", None) is not None: with suppress(Exception): self.file.close() + + async def report(self): + if self._file is not None: + self.info(f"Saved CSV output to {self.output_file}") + + def add_custom_headers(self, headers): + if isinstance(headers, str): + headers = [headers] + for header in headers: + if header not in self._headers_set: + self._headers_set.add(header) + self.custom_headers.append(header) diff --git a/bbot/modules/output/discord.py b/bbot/modules/output/discord.py new file mode 100644 index 0000000000..2aa4d21f84 --- /dev/null +++ b/bbot/modules/output/discord.py @@ -0,0 +1,17 @@ +from bbot.modules.templates.webhook import WebhookOutputModule + + +class Discord(WebhookOutputModule): + watched_events = ["*"] + meta = { + "description": "Message a Discord channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW", "retries": 10} + options_desc = { + "webhook_url": "Discord webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or higher", + "retries": "Number of times to retry sending the message before skipping the event", + } diff --git a/bbot/modules/output/emails.py b/bbot/modules/output/emails.py new file mode 100644 index 0000000000..60d9a153c5 --- /dev/null +++ b/bbot/modules/output/emails.py @@ -0,0 +1,35 @@ +from bbot.modules.output.txt import TXT +from bbot.modules.base import BaseModule + + +class Emails(TXT): + watched_events = ["EMAIL_ADDRESS"] + flags = ["email-enum"] + meta = { + "description": "Output any email addresses found belonging to the target domain", + "created_date": "2023-12-23", + "author": "@domwhewell-sage", + } + options = {"output_file": ""} + options_desc = {"output_file": "Output to file"} + in_scope_only = True + accept_dupes = False + + output_filename = "emails.txt" + + async def setup(self): + self.emails_written = 0 + return await super().setup() + + def _scope_distance_check(self, event): + return BaseModule._scope_distance_check(self, event) + + async def handle_event(self, event): + if self.file is not None: + self.emails_written += 1 + self.file.write(f"{event.data}\n") + self.file.flush() + + async def report(self): + if getattr(self, "_file", None) is not None: + self.info(f"Saved {self.emails_written:,} email addresses to {self.output_file}") diff --git a/bbot/modules/output/http.py b/bbot/modules/output/http.py index 26d2a407ed..9d9241da0b 100644 --- a/bbot/modules/output/http.py +++ b/bbot/modules/output/http.py @@ -1,13 +1,13 @@ -import requests -from requests.auth import HTTPBasicAuth -from requests.exceptions import RequestException - from bbot.modules.output.base import BaseOutputModule class HTTP(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to HTTP"} + meta = { + "description": "Send every event to a custom URL via a web request", + "created_date": "2022-04-13", + "author": "@TheTechromancer", + } options = { "url": "", "method": "POST", @@ -15,6 +15,7 @@ class HTTP(BaseOutputModule): "username": "", "password": "", "timeout": 10, + "siem_friendly": False, } options_desc = { "url": "Web URL", @@ -23,37 +24,50 @@ class HTTP(BaseOutputModule): "username": "Username (basic auth)", "password": "Password (basic auth)", "timeout": "HTTP timeout", + "siem_friendly": "Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc.", } - def setup(self): - self.session = requests.Session() - if not self.config.get("url", ""): + async def setup(self): + self.url = self.config.get("url", "") + self.method = self.config.get("method", "POST") + self.timeout = self.config.get("timeout", 10) + self.siem_friendly = self.config.get("siem_friendly", False) + self.headers = {} + bearer = self.config.get("bearer", "") + if bearer: + self.headers["Authorization"] = f"Bearer {bearer}" + username = self.config.get("username", "") + password = self.config.get("password", "") + self.auth = None + if username: + self.auth = (username, password) + if not self.url: self.warning("Must set URL") return False - if not self.config.get("method", ""): + if not self.method: self.warning("Must set HTTP method") return False return True - def handle_event(self, event): - r = requests.Request( - url=self.config.get("url"), - method=self.config.get("method", "POST"), - ) - r.headers["User-Agent"] = self.scan.useragent - r.json = dict(event) - username = self.config.get("username", "") - password = self.config.get("password", "") - if username: - r.auth = HTTPBasicAuth(username, password) - bearer = self.config.get("bearer", "") - if bearer: - r.headers["Authorization"] = f"Bearer {bearer}" - try: - timeout = self.config.get("timeout", 10) - self.session.send(r.prepare(), timeout=timeout) - except RequestException as e: - import traceback - - self.warning(f"Error sending {event}: {e}") - self.debug(traceback.format_exc()) + async def handle_event(self, event): + while 1: + response = await self.helpers.request( + url=self.url, + method=self.method, + auth=self.auth, + headers=self.headers, + json=event.json(siem_friendly=self.siem_friendly), + ) + is_success = False if response is None else response.is_success + if not is_success: + status_code = getattr(response, "status_code", 0) + self.warning(f"Error sending {event} (HTTP status code: {status_code}), retrying...") + body = getattr(response, "text", "") + self.debug(body) + if status_code == 429: + sleep_interval = 10 + else: + sleep_interval = 1 + await self.helpers.sleep(sleep_interval) + continue + break diff --git a/bbot/modules/output/human.py b/bbot/modules/output/human.py deleted file mode 100644 index 45fb413814..0000000000 --- a/bbot/modules/output/human.py +++ /dev/null @@ -1,45 +0,0 @@ -from pathlib import Path -from contextlib import suppress - -from bbot.modules.output.base import BaseOutputModule - - -class Human(BaseOutputModule): - watched_events = ["*"] - meta = {"description": "Output to text"} - options = {"output_file": "", "console": True} - options_desc = {"output_file": "Output to file", "console": "Output to console"} - emit_graph_trail = False - - def setup(self): - self.output_file = self.config.get("output_file", "") - if self.output_file: - self.output_file = Path(self.output_file) - else: - self.output_file = self.scan.home / "output.txt" - self.helpers.mkdir(self.output_file.parent) - self._file = None - return True - - @property - def file(self): - if self._file is None: - self._file = open(self.output_file, mode="a") - return self._file - - def handle_event(self, event): - event_type = f"[{event.type}]" - event_tags = "" - if getattr(event, "tags", []): - event_tags = f'\t({", ".join(sorted(getattr(event, "tags", [])))})' - event_str = f"{event_type:<20}\t{event.data_human}\t{event.module}{event_tags}" - if self.file is not None: - self.file.write(event_str + "\n") - self.file.flush() - if self.config.get("console", True): - self.stdout(event_str) - - def cleanup(self): - if self._file is not None: - with suppress(Exception): - self.file.close() diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index a0242937b7..a35fa6aed7 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -1,5 +1,4 @@ import json -from pathlib import Path from contextlib import suppress from bbot.modules.output.base import BaseOutputModule @@ -7,35 +6,35 @@ class JSON(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to JSON"} - options = {"output_file": "", "console": False} - options_desc = {"output_file": "Output to file", "console": "Output to console"} + meta = { + "description": "Output to Newline-Delimited JSON (NDJSON)", + "created_date": "2022-04-07", + "author": "@TheTechromancer", + } + options = {"output_file": "", "siem_friendly": False} + options_desc = { + "output_file": "Output to file", + "siem_friendly": "Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc.", + } + _preserve_graph = True - def setup(self): - self.output_file = self.config.get("output_file", "") - if self.output_file: - self.output_file = Path(self.output_file) - else: - self.output_file = self.scan.home / "output.json" - self.helpers.mkdir(self.output_file.parent) - self._file = None + async def setup(self): + self._prep_output_dir("output.json") + self.siem_friendly = self.config.get("siem_friendly", False) return True - @property - def file(self): - if self._file is None: - self._file = open(self.output_file, mode="a") - return self._file - - def handle_event(self, event): - event_str = json.dumps(dict(event)) + async def handle_event(self, event): + event_json = event.json(siem_friendly=self.siem_friendly) + event_str = json.dumps(event_json) if self.file is not None: self.file.write(event_str + "\n") self.file.flush() - if self.config.get("console", False) or "human" not in self.scan.modules: - self.stdout(event_str) - def cleanup(self): - if self._file is not None: + async def cleanup(self): + if getattr(self, "_file", None) is not None: with suppress(Exception): self.file.close() + + async def report(self): + if self._file is not None: + self.info(f"Saved JSON output to {self.output_file}") diff --git a/bbot/modules/output/mysql.py b/bbot/modules/output/mysql.py new file mode 100644 index 0000000000..8d9a1f7f4c --- /dev/null +++ b/bbot/modules/output/mysql.py @@ -0,0 +1,55 @@ +from bbot.modules.templates.sql import SQLTemplate + + +class MySQL(SQLTemplate): + watched_events = ["*"] + meta = { + "description": "Output scan data to a MySQL database", + "created_date": "2024-11-13", + "author": "@TheTechromancer", + } + options = { + "username": "root", + "password": "bbotislife", + "host": "localhost", + "port": 3306, + "database": "bbot", + } + options_desc = { + "username": "The username to connect to MySQL", + "password": "The password to connect to MySQL", + "host": "The server running MySQL", + "port": "The port to connect to MySQL", + "database": "The database name to connect to", + } + deps_pip = ["sqlmodel", "aiomysql"] + protocol = "mysql+aiomysql" + + async def create_database(self): + from sqlalchemy import text + from sqlalchemy.ext.asyncio import create_async_engine + + # Create the engine for the initial connection to the server + initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0]) + + async with initial_engine.connect() as conn: + # Check if the database exists + result = await conn.execute(text(f"SHOW DATABASES LIKE '{self.database}'")) + database_exists = result.scalar() is not None + + # Create the database if it does not exist + if not database_exists: + # Use aiomysql directly to create the database + import aiomysql + + raw_conn = await aiomysql.connect( + user=self.username, + password=self.password, + host=self.host, + port=self.port, + ) + try: + async with raw_conn.cursor() as cursor: + await cursor.execute(f"CREATE DATABASE {self.database}") + finally: + await raw_conn.ensure_closed() diff --git a/bbot/modules/output/neo4j.py b/bbot/modules/output/neo4j.py index b140d897b9..b859af516f 100644 --- a/bbot/modules/output/neo4j.py +++ b/bbot/modules/output/neo4j.py @@ -1,42 +1,163 @@ -from bbot.db.neo4j import Neo4j +import json +import logging +from contextlib import suppress +from neo4j import AsyncGraphDatabase from bbot.modules.output.base import BaseOutputModule +# silence annoying neo4j logger +logging.getLogger("neo4j").setLevel(logging.CRITICAL) + + class neo4j(BaseOutputModule): """ - docker run --rm -p 7687:7687 -p 7474:7474 --env NEO4J_AUTH=neo4j/bbotislife neo4j + # start Neo4j in the background with docker + docker run -d -p 7687:7687 -p 7474:7474 -v "$(pwd)/neo4j/:/data/" -e NEO4J_AUTH=neo4j/bbotislife neo4j + + # view all running docker containers + > docker ps + + # view all docker containers + > docker ps -a + + # stop a docker container + > docker stop + + # remove a docker container + > docker remove + + # start a stopped container + > docker start """ watched_events = ["*"] - meta = {"description": "Output to Neo4j"} + meta = {"description": "Output to Neo4j", "created_date": "2022-04-07", "author": "@TheTechromancer"} options = {"uri": "bolt://localhost:7687", "username": "neo4j", "password": "bbotislife"} options_desc = { "uri": "Neo4j server + port", "username": "Neo4j username", "password": "Neo4j password", } - deps_pip = ["py2neo"] - batch_size = 50 + deps_pip = ["neo4j"] + _batch_size = 500 + _preserve_graph = True - def setup(self): + async def setup(self): try: - self.neo4j = Neo4j( + self.driver = AsyncGraphDatabase.driver( uri=self.config.get("uri", self.options["uri"]), - username=self.config.get("username", self.options["username"]), - password=self.config.get("password", self.options["password"]), + auth=( + self.config.get("username", self.options["username"]), + self.config.get("password", self.options["password"]), + ), ) - self.neo4j.insert_event(self.scan.root_event) + self.session = self.driver.session() + await self.session.run("Match () Return 1 Limit 1") except Exception as e: - self.warning(f"Error setting up Neo4j: {e}") - import traceback - - self.debug(traceback.format_exc()) - return False + return False, f"Error setting up Neo4j: {e}" return True - def handle_event(self, event): - self.neo4j.insert_event(event) + async def handle_batch(self, *all_events): + # group events by type, since cypher doesn't allow dynamic labels + events_by_type = {} + parents_by_type = {} + relationships = [] + for event in all_events: + parent = event.get_parent() + try: + events_by_type[event.type].append(event) + except KeyError: + events_by_type[event.type] = [event] + try: + parents_by_type[parent.type].append(parent) + except KeyError: + parents_by_type[parent.type] = [parent] + + module = str(event.module) + timestamp = event.timestamp + relationships.append((parent, module, timestamp, event)) + + all_ids = {} + for event_type, events in events_by_type.items(): + self.debug(f"{len(events):,} events of type {event_type}") + all_ids.update(await self.merge_events(events, event_type)) + for event_type, parents in parents_by_type.items(): + self.debug(f"{len(parents):,} parents of type {event_type}") + all_ids.update(await self.merge_events(parents, event_type, id_only=True)) + + rel_ids = [] + for parent, module, timestamp, event in relationships: + try: + src_id = all_ids[parent.id] + dst_id = all_ids[event.id] + except KeyError as e: + self.error(f'Error "{e}" correlating {parent.id}:{parent.data} --> {event.id}:{event.data}') + continue + rel_ids.append((src_id, module, timestamp, dst_id)) + + await self.merge_relationships(rel_ids) + + async def merge_events(self, events, event_type, id_only=False): + if id_only: + insert_data = [{"data": str(e.data), "type": e.type, "id": e.id} for e in events] + else: + insert_data = [] + for e in events: + event_json = e.json(mode="graph") + # we pop the timestamp because it belongs on the relationship + event_json.pop("timestamp") + # nested data types aren't supported in neo4j + for key in ("dns_children", "discovery_path"): + if key in event_json: + event_json[key] = json.dumps(event_json[key]) + insert_data.append(event_json) + + cypher = f"""UNWIND $events AS event + MERGE (_:{event_type} {{ id: event.id }}) + SET _ += properties(event) + RETURN event.data as event_data, event.id as event_id, elementId(_) as neo4j_id""" + neo4j_ids = {} + # insert events + try: + results = await self.session.run(cypher, events=insert_data) + # get Neo4j ids + for result in await results.data(): + event_id = result["event_id"] + neo4j_id = result["neo4j_id"] + neo4j_ids[event_id] = neo4j_id + except Exception as e: + self.error(f"Error inserting Neo4j nodes (label:{event_type}): {e}") + self.trace(insert_data) + self.trace(cypher) + return neo4j_ids + + async def merge_relationships(self, relationships): + rels_by_module = {} + # group by module + for src_id, module, timestamp, dst_id in relationships: + data = {"src_id": src_id, "timestamp": timestamp, "dst_id": dst_id} + try: + rels_by_module[module].append(data) + except KeyError: + rels_by_module[module] = [data] + + for module, rels in rels_by_module.items(): + self.debug(f"{len(rels):,} relationships of type {module}") + cypher = f""" + UNWIND $rels AS rel + MATCH (a) WHERE elementId(a) = rel.src_id + MATCH (b) WHERE elementId(b) = rel.dst_id + MERGE (a)-[_:{module}]->(b) + SET _.timestamp = rel.timestamp""" + try: + await self.session.run(cypher, rels=rels) + except Exception as e: + self.error(f"Error inserting Neo4j relationship (label:{module}): {e}") + self.trace(cypher) - def handle_batch(self, *events): - self.neo4j.insert_events(events) + async def cleanup(self): + with suppress(Exception): + await self.session.close() + with suppress(Exception): + await self.driver.close() diff --git a/bbot/modules/output/nmap_xml.py b/bbot/modules/output/nmap_xml.py new file mode 100644 index 0000000000..52698e0de8 --- /dev/null +++ b/bbot/modules/output/nmap_xml.py @@ -0,0 +1,171 @@ +import sys +from xml.dom import minidom +from datetime import datetime +from xml.etree.ElementTree import Element, SubElement, tostring + +from bbot import __version__ +from bbot.modules.output.base import BaseOutputModule + + +class NmapHost: + __slots__ = ["hostnames", "open_ports"] + + def __init__(self): + self.hostnames = set() + # a dict of {port: {protocol: banner}} + self.open_ports = dict() + + +class Nmap_XML(BaseOutputModule): + watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "IP_ADDRESS", "PROTOCOL", "HTTP_RESPONSE"] + meta = {"description": "Output to Nmap XML", "created_date": "2024-11-16", "author": "@TheTechromancer"} + output_filename = "output.nmap.xml" + in_scope_only = True + + async def setup(self): + self.hosts = {} + self._prep_output_dir(self.output_filename) + return True + + async def handle_event(self, event): + event_host = event.host + + # we always record by IP + ips = [] + for ip in event.resolved_hosts: + try: + ips.append(self.helpers.make_ip_type(ip)) + except ValueError: + continue + if not ips and self.helpers.is_ip(event_host): + ips = [event_host] + + for ip in ips: + try: + nmap_host = self.hosts[ip] + except KeyError: + nmap_host = NmapHost() + self.hosts[ip] = nmap_host + + event_port = getattr(event, "port", None) + if event.type == "OPEN_TCP_PORT": + if event_port not in nmap_host.open_ports: + nmap_host.open_ports[event.port] = {} + elif event.type in ("PROTOCOL", "HTTP_RESPONSE"): + if event_port is not None: + try: + existing_services = nmap_host.open_ports[event.port] + except KeyError: + existing_services = {} + nmap_host.open_ports[event.port] = existing_services + if event.type == "PROTOCOL": + protocol = event.data["protocol"].lower() + banner = event.data.get("banner", None) + elif event.type == "HTTP_RESPONSE": + protocol = event.parsed_url.scheme.lower() + banner = event.http_title + if protocol not in existing_services: + existing_services[protocol] = banner + + if self.helpers.is_ip(event_host): + if str(event.module) == "PTR": + nmap_host.hostnames.add(event.parent.data) + else: + nmap_host.hostnames.add(event_host) + + async def report(self): + scan_start_time = str(int(self.scan.start_time.timestamp())) + scan_start_time_str = self.scan.start_time.strftime("%a %b %d %H:%M:%S %Y") + scan_end_time = datetime.now() + scan_end_time_str = scan_end_time.strftime("%a %b %d %H:%M:%S %Y") + scan_end_time_timestamp = str(scan_end_time.timestamp()) + scan_duration = scan_end_time - self.scan.start_time + num_hosts_up = len(self.hosts) + + # Create the root element + nmaprun = Element( + "nmaprun", + { + "scanner": "bbot", + "args": " ".join(sys.argv), + "start": scan_start_time, + "startstr": scan_start_time_str, + "version": str(__version__), + "xmloutputversion": "1.05", + }, + ) + + ports_scanned = [] + speculate_module = self.scan.modules.get("speculate", None) + if speculate_module is not None: + ports_scanned = speculate_module.ports + portscan_module = self.scan.modules.get("portscan", None) + if portscan_module is not None: + ports_scanned = self.helpers.parse_port_string(str(portscan_module.ports)) + num_ports_scanned = len(sorted(ports_scanned)) + ports_scanned = ",".join(str(x) for x in sorted(ports_scanned)) + + # Add scaninfo + SubElement( + nmaprun, + "scaninfo", + {"type": "syn", "protocol": "tcp", "numservices": str(num_ports_scanned), "services": ports_scanned}, + ) + + # Add host information + for ip, nmap_host in self.hosts.items(): + hostnames = sorted(nmap_host.hostnames) + ports = sorted(nmap_host.open_ports) + + host_elem = SubElement(nmaprun, "host") + SubElement(host_elem, "status", {"state": "up", "reason": "user-set", "reason_ttl": "0"}) + SubElement(host_elem, "address", {"addr": str(ip), "addrtype": f"ipv{ip.version}"}) + + if hostnames: + hostnames_elem = SubElement(host_elem, "hostnames") + for hostname in hostnames: + SubElement(hostnames_elem, "hostname", {"name": hostname, "type": "user"}) + + ports = SubElement(host_elem, "ports") + for port, protocols in nmap_host.open_ports.items(): + port_elem = SubElement(ports, "port", {"protocol": "tcp", "portid": str(port)}) + SubElement(port_elem, "state", {"state": "open", "reason": "syn-ack", "reason_ttl": "0"}) + # + for protocol, banner in protocols.items(): + attrs = {"name": protocol, "method": "probed", "conf": "10"} + if banner is not None: + attrs["product"] = banner + attrs["extrainfo"] = banner + SubElement(port_elem, "service", attrs) + + # Add runstats + runstats = SubElement(nmaprun, "runstats") + SubElement( + runstats, + "finished", + { + "time": scan_end_time_timestamp, + "timestr": scan_end_time_str, + "summary": f"BBOT done at {scan_end_time_str}; {num_hosts_up} scanned in {scan_duration} seconds", + "elapsed": str(scan_duration.total_seconds()), + "exit": "success", + }, + ) + SubElement(runstats, "hosts", {"up": str(num_hosts_up), "down": "0", "total": str(num_hosts_up)}) + + # make backup of the file + self.helpers.backup_file(self.output_file) + + # Pretty-format the XML + rough_string = tostring(nmaprun, encoding="utf-8") + reparsed = minidom.parseString(rough_string) + + # Create a new document with the doctype + doctype = minidom.DocumentType("nmaprun") + reparsed.insertBefore(doctype, reparsed.documentElement) + + pretty_xml = reparsed.toprettyxml(indent=" ") + + with open(self.output_file, "w") as f: + f.write(pretty_xml) + self.info(f"Saved Nmap XML output to {self.output_file}") diff --git a/bbot/modules/output/postgres.py b/bbot/modules/output/postgres.py new file mode 100644 index 0000000000..45beb7c7bc --- /dev/null +++ b/bbot/modules/output/postgres.py @@ -0,0 +1,53 @@ +from bbot.modules.templates.sql import SQLTemplate + + +class Postgres(SQLTemplate): + watched_events = ["*"] + meta = { + "description": "Output scan data to a SQLite database", + "created_date": "2024-11-08", + "author": "@TheTechromancer", + } + options = { + "username": "postgres", + "password": "bbotislife", + "host": "localhost", + "port": 5432, + "database": "bbot", + } + options_desc = { + "username": "The username to connect to Postgres", + "password": "The password to connect to Postgres", + "host": "The server running Postgres", + "port": "The port to connect to Postgres", + "database": "The database name to connect to", + } + deps_pip = ["sqlmodel", "asyncpg"] + protocol = "postgresql+asyncpg" + + async def create_database(self): + import asyncpg + from sqlalchemy import text + from sqlalchemy.ext.asyncio import create_async_engine + + # Create the engine for the initial connection to the server + initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0]) + + async with initial_engine.connect() as conn: + # Check if the database exists + result = await conn.execute(text(f"SELECT 1 FROM pg_database WHERE datname = '{self.database}'")) + database_exists = result.scalar() is not None + + # Create the database if it does not exist + if not database_exists: + # Use asyncpg directly to create the database + raw_conn = await asyncpg.connect( + user=self.username, + password=self.password, + host=self.host, + port=self.port, + ) + try: + await raw_conn.execute(f"CREATE DATABASE {self.database}") + finally: + await raw_conn.close() diff --git a/bbot/modules/output/python.py b/bbot/modules/output/python.py new file mode 100644 index 0000000000..81ceb360ed --- /dev/null +++ b/bbot/modules/output/python.py @@ -0,0 +1,9 @@ +from bbot.modules.output.base import BaseOutputModule + + +class python(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Output via Python API", "created_date": "2022-09-13", "author": "@TheTechromancer"} + + async def _worker(self): + pass diff --git a/bbot/modules/output/slack.py b/bbot/modules/output/slack.py new file mode 100644 index 0000000000..d65c816b3e --- /dev/null +++ b/bbot/modules/output/slack.py @@ -0,0 +1,32 @@ +import yaml + +from bbot.modules.templates.webhook import WebhookOutputModule + + +class Slack(WebhookOutputModule): + watched_events = ["*"] + meta = { + "description": "Message a Slack channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW", "retries": 10} + options_desc = { + "webhook_url": "Discord webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or higher", + "retries": "Number of times to retry sending the message before skipping the event", + } + content_key = "text" + + def format_message_str(self, event): + event_tags = ",".join(sorted(event.tags)) + return f"`[{event.type}]`\t*`{event.data}`*\t`{event_tags}`" + + def format_message_other(self, event): + event_yaml = yaml.dump(event.data) + event_type = f"*`[{event.type}]`*" + if event.type in ("VULNERABILITY", "FINDING"): + event_str, color = self.get_severity_color(event) + event_type = f"{color} `{event_str}` {color}" + return f"""*{event_type}*\n```\n{event_yaml}```""" diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py new file mode 100644 index 0000000000..0c0a0dd804 --- /dev/null +++ b/bbot/modules/output/splunk.py @@ -0,0 +1,62 @@ +from bbot.errors import WebError +from bbot.modules.output.base import BaseOutputModule + + +class Splunk(BaseOutputModule): + watched_events = ["*"] + meta = { + "description": "Send every event to a splunk instance through HTTP Event Collector", + "created_date": "2024-02-17", + "author": "@w0Tx", + } + options = { + "url": "", + "hectoken": "", + "index": "", + "source": "", + "timeout": 10, + } + options_desc = { + "url": "Web URL", + "hectoken": "HEC Token", + "index": "Index to send data to", + "source": "Source path to be added to the metadata", + "timeout": "HTTP timeout", + } + + async def setup(self): + self.url = self.config.get("url", "") + self.source = self.config.get("source", "bbot") + self.index = self.config.get("index", "main") + self.timeout = self.config.get("timeout", 10) + self.headers = {} + + hectoken = self.config.get("hectoken", "") + if hectoken: + self.headers["Authorization"] = f"Splunk {hectoken}" + if not self.url: + return False, "Must set URL" + if not self.source: + self.warning("Please provide a source") + return True + + async def handle_event(self, event): + while 1: + try: + data = { + "index": self.index, + "source": self.source, + "sourcetype": "_json", + "event": event.json(), + } + await self.helpers.request( + url=self.url, + method="POST", + headers=self.headers, + json=data, + raise_error=True, + ) + break + except WebError as e: + self.warning(f"Error sending {event}: {e}, retrying...") + await self.helpers.sleep(1) diff --git a/bbot/modules/output/sqlite.py b/bbot/modules/output/sqlite.py new file mode 100644 index 0000000000..261b13b6e2 --- /dev/null +++ b/bbot/modules/output/sqlite.py @@ -0,0 +1,33 @@ +from pathlib import Path + +from bbot.modules.templates.sql import SQLTemplate + + +class SQLite(SQLTemplate): + watched_events = ["*"] + meta = { + "description": "Output scan data to a SQLite database", + "created_date": "2024-11-07", + "author": "@TheTechromancer", + } + options = { + "database": "", + } + options_desc = { + "database": "The path to the sqlite database file", + } + deps_pip = ["sqlmodel", "aiosqlite"] + + async def setup(self): + db_file = self.config.get("database", "") + if not db_file: + db_file = self.scan.home / "output.sqlite" + db_file = Path(db_file) + if not db_file.is_absolute(): + db_file = self.scan.home / db_file + self.db_file = db_file + self.db_file.parent.mkdir(parents=True, exist_ok=True) + return await super().setup() + + def connection_string(self, mask_password=False): + return f"sqlite+aiosqlite:///{self.db_file}" diff --git a/bbot/modules/output/stdout.py b/bbot/modules/output/stdout.py new file mode 100644 index 0000000000..59a121bd47 --- /dev/null +++ b/bbot/modules/output/stdout.py @@ -0,0 +1,69 @@ +import json + +from bbot.logger import log_to_stderr +from bbot.modules.output.base import BaseOutputModule + + +class Stdout(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Output to text", "created_date": "2024-04-03", "author": "@TheTechromancer"} + options = {"format": "text", "event_types": [], "event_fields": [], "in_scope_only": False, "accept_dupes": True} + options_desc = { + "format": "Which text format to display, choices: text,json", + "event_types": "Which events to display, default all event types", + "event_fields": "Which event fields to display", + "in_scope_only": "Whether to only show in-scope events", + "accept_dupes": "Whether to show duplicate events, default True", + } + vuln_severity_map = {"LOW": "HUGEWARNING", "MEDIUM": "HUGEWARNING", "HIGH": "CRITICAL", "CRITICAL": "CRITICAL"} + format_choices = ["text", "json"] + + async def setup(self): + self.text_format = self.config.get("format", "text").strip().lower() + if self.text_format not in self.format_choices: + return ( + False, + f'Invalid text format choice, "{self.text_format}" (choices: {",".join(self.format_choices)})', + ) + self.accept_event_types = [str(s).upper() for s in self.config.get("event_types", [])] + self.show_event_fields = [str(s) for s in self.config.get("event_fields", [])] + self.in_scope_only = self.config.get("in_scope_only", False) + self.accept_dupes = self.config.get("accept_dupes", False) + return True + + async def filter_event(self, event): + if self.accept_event_types: + if event.type not in self.accept_event_types: + return False, f'Event type "{event.type}" is not in the allowed event_types' + return True + + async def handle_event(self, event): + json_mode = "human" if self.text_format == "text" else "json" + event_json = event.json(mode=json_mode) + if self.show_event_fields: + event_json = {k: str(event_json.get(k, "")) for k in self.show_event_fields} + + if self.text_format == "text": + await self.handle_text(event, event_json) + elif self.text_format == "json": + await self.handle_json(event, event_json) + + async def handle_text(self, event, event_json): + if self.show_event_fields: + event_str = "\t".join([str(s) for s in event_json.values()]) + else: + event_str = self.human_event_str(event) + + # log vulnerabilities in vivid colors + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "INFO") + if severity in self.vuln_severity_map: + loglevel = self.vuln_severity_map[severity] + log_to_stderr(event_str, level=loglevel, logname=False) + elif event.type == "FINDING": + log_to_stderr(event_str, level="HUGEINFO", logname=False) + + print(event_str) + + async def handle_json(self, event, event_json): + print(json.dumps(event_json)) diff --git a/bbot/modules/output/subdomains.py b/bbot/modules/output/subdomains.py new file mode 100644 index 0000000000..6c2bfb0b02 --- /dev/null +++ b/bbot/modules/output/subdomains.py @@ -0,0 +1,41 @@ +from bbot.modules.output.txt import TXT +from bbot.modules.base import BaseModule + + +class Subdomains(TXT): + watched_events = ["DNS_NAME", "DNS_NAME_UNRESOLVED"] + flags = ["subdomain-enum"] + meta = { + "description": "Output only resolved, in-scope subdomains", + "created_date": "2023-07-31", + "author": "@TheTechromancer", + } + options = {"output_file": "", "include_unresolved": False} + options_desc = {"output_file": "Output to file", "include_unresolved": "Include unresolved subdomains in output"} + accept_dupes = False + in_scope_only = True + + output_filename = "subdomains.txt" + + async def setup(self): + self.include_unresolved = self.config.get("include_unresolved", False) + self.subdomains_written = 0 + return await super().setup() + + async def filter_event(self, event): + if event.type == "DNS_NAME_UNRESOLVED" and not self.include_unresolved: + return False, "Not accepting unresolved subdomain (include_unresolved=False)" + return True + + def _scope_distance_check(self, event): + return BaseModule._scope_distance_check(self, event) + + async def handle_event(self, event): + if self.file is not None: + self.subdomains_written += 1 + self.file.write(f"{event.data}\n") + self.file.flush() + + async def report(self): + if getattr(self, "_file", None) is not None: + self.info(f"Saved {self.subdomains_written:,} subdomains to {self.output_file}") diff --git a/bbot/modules/output/teams.py b/bbot/modules/output/teams.py new file mode 100644 index 0000000000..c9a7cf1820 --- /dev/null +++ b/bbot/modules/output/teams.py @@ -0,0 +1,109 @@ +from bbot.modules.templates.webhook import WebhookOutputModule + + +class Teams(WebhookOutputModule): + watched_events = ["*"] + meta = { + "description": "Message a Teams channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW", "retries": 10} + options_desc = { + "webhook_url": "Teams webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or higher", + "retries": "Number of times to retry sending the message before skipping the event", + } + + async def handle_event(self, event): + data = self.format_message(event) + await self.api_request( + url=self.webhook_url, + method="POST", + json=data, + ) + + def trim_message(self, message): + if len(message) > self.message_size_limit: + message = message[: self.message_size_limit - 3] + "..." + return message + + def format_message_str(self, event): + items = [] + msg = self.trim_message(event.data) + items.append({"type": "TextBlock", "text": f"{msg}", "wrap": True}) + items.append({"type": "FactSet", "facts": [{"title": "Tags:", "value": ", ".join(event.tags)}]}) + return items + + def format_message_other(self, event): + items = [{"type": "FactSet", "facts": []}] + for key, value in event.data.items(): + if key != "severity": + msg = self.trim_message(str(value)) + items[0]["facts"].append({"title": f"{key}:", "value": msg}) + return items + + def get_severity_color(self, event): + color = "Accent" + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "INFO") + if severity == "CRITICAL": + color = "Attention" + elif severity == "HIGH": + color = "Attention" + elif severity == "MEDIUM": + color = "Warning" + elif severity == "LOW": + color = "Good" + return color + + def format_message(self, event): + adaptive_card = { + "type": "message", + "attachments": [ + { + "contentType": "application/vnd.microsoft.card.adaptive", + "contentUrl": None, + "content": { + "$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "type": "AdaptiveCard", + "version": "1.2", + "msteams": {"width": "full"}, + "body": [], + }, + } + ], + } + heading = {"type": "TextBlock", "text": f"{event.type}", "wrap": True, "size": "Large", "style": "heading"} + body = adaptive_card["attachments"][0]["content"]["body"] + body.append(heading) + if event.type in ("VULNERABILITY", "FINDING"): + subheading = { + "type": "TextBlock", + "text": event.data.get("severity", "INFO"), + "spacing": "None", + "size": "Large", + "wrap": True, + } + subheading["color"] = self.get_severity_color(event) + body.append(subheading) + main_text = { + "type": "ColumnSet", + "separator": True, + "spacing": "Medium", + "columns": [ + { + "type": "Column", + "width": "stretch", + "items": [], + } + ], + } + if isinstance(event.data, str): + items = self.format_message_str(event) + else: + items = self.format_message_other(event) + main_text["columns"][0]["items"] = items + body.append(main_text) + return adaptive_card diff --git a/bbot/modules/output/txt.py b/bbot/modules/output/txt.py new file mode 100644 index 0000000000..2dfb14c106 --- /dev/null +++ b/bbot/modules/output/txt.py @@ -0,0 +1,32 @@ +from contextlib import suppress + +from bbot.modules.output.base import BaseOutputModule + + +class TXT(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Output to text", "created_date": "2024-04-03", "author": "@TheTechromancer"} + options = {"output_file": ""} + options_desc = {"output_file": "Output to file"} + + output_filename = "output.txt" + + async def setup(self): + self._prep_output_dir(self.output_filename) + return True + + async def handle_event(self, event): + event_str = self.human_event_str(event) + + if self.file is not None: + self.file.write(event_str + "\n") + self.file.flush() + + async def cleanup(self): + if getattr(self, "_file", None) is not None: + with suppress(Exception): + self.file.close() + + async def report(self): + if getattr(self, "_file", None) is not None: + self.info(f"Saved TXT output to {self.output_file}") diff --git a/bbot/modules/output/web_parameters.py b/bbot/modules/output/web_parameters.py new file mode 100644 index 0000000000..634a623720 --- /dev/null +++ b/bbot/modules/output/web_parameters.py @@ -0,0 +1,55 @@ +from contextlib import suppress +from collections import defaultdict + +from bbot.modules.output.base import BaseOutputModule + + +class Web_parameters(BaseOutputModule): + watched_events = ["WEB_PARAMETER"] + meta = { + "description": "Output WEB_PARAMETER names to a file", + "created_date": "2025-01-25", + "author": "@liquidsec", + } + options = {"output_file": "", "include_count": False} + options_desc = { + "output_file": "Output to file", + "include_count": "Include the count of each parameter in the output", + } + + output_filename = "web_parameters.txt" + + async def setup(self): + self._prep_output_dir(self.output_filename) + self.parameter_counts = defaultdict(int) + return True + + async def handle_event(self, event): + parameter_name = event.data.get("name", "") + if parameter_name: + self.parameter_counts[parameter_name] += 1 + + async def cleanup(self): + if getattr(self, "_file", None) is not None: + with suppress(Exception): + self.file.close() + + async def report(self): + include_count = self.config.get("include_count", False) + + # Sort behavior: + # - If include_count is True, sort by count (descending) and then alphabetically by name + # - If include_count is False, sort alphabetically by name only + sorted_parameters = sorted( + self.parameter_counts.items(), key=lambda x: (-x[1], x[0]) if include_count else x[0] + ) + for param, count in sorted_parameters: + if include_count: + # Include the count of each parameter in the output + self.file.write(f"{count}\t{param}\n") + else: + # Only include the parameter name, effectively deduplicating by name + self.file.write(f"{param}\n") + self.file.flush() + if getattr(self, "_file", None) is not None: + self.info(f"Saved web parameters to {self.output_file}") diff --git a/bbot/modules/output/web_report.py b/bbot/modules/output/web_report.py new file mode 100644 index 0000000000..92ff98289f --- /dev/null +++ b/bbot/modules/output/web_report.py @@ -0,0 +1,100 @@ +from bbot.modules.output.base import BaseOutputModule +import markdown +import html + + +class web_report(BaseOutputModule): + watched_events = ["URL", "TECHNOLOGY", "FINDING", "VULNERABILITY", "VHOST"] + meta = { + "description": "Create a markdown report with web assets", + "created_date": "2023-02-08", + "author": "@liquidsec", + } + options = { + "output_file": "", + "css_theme_file": "https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/5.1.0/github-markdown.min.css", + } + options_desc = {"output_file": "Output to file", "css_theme_file": "CSS theme URL for HTML output"} + deps_pip = ["markdown~=3.4.3"] + + async def setup(self): + html_css_file = self.config.get("css_theme_file", "") + + self.html_header = f""" + + + + + + + """ + + self.html_footer = "" + self.web_assets = {} + self.markdown = "" + + self._prep_output_dir("web_report.html") + return True + + async def handle_event(self, event): + if event.type == "URL": + parsed = event.parsed_url + host = f"{parsed.scheme}://{parsed.netloc}/" + if host not in self.web_assets.keys(): + self.web_assets[host] = {"URL": []} + parent_chain = [] + + current_parent = event.parent + while not current_parent.type == "SCAN": + parent_chain.append( + f" ({current_parent.module})---> [{current_parent.type}]:{html.escape(current_parent.pretty_string)}" + ) + current_parent = current_parent.parent + + parent_chain.reverse() + parent_chain_text = ( + "".join(parent_chain) + + f" ({event.module})---> " + + f"[{event.type}]:{html.escape(event.pretty_string)}" + ) + self.web_assets[host]["URL"].append(f"**{html.escape(event.data)}**: {parent_chain_text}") + + else: + current_parent = event.parent + parsed = None + while 1: + if current_parent.type == "URL": + parsed = current_parent.parsed_url + break + current_parent = current_parent.parent + if current_parent.parent.type == "SCAN": + break + if parsed: + host = f"{parsed.scheme}://{parsed.netloc}/" + if host not in self.web_assets.keys(): + self.web_assets[host] = {"URL": []} + if event.type not in self.web_assets[host].keys(): + self.web_assets[host][event.type] = [html.escape(event.pretty_string)] + else: + self.web_assets[host][event.type].append(html.escape(event.pretty_string)) + + async def report(self): + for host in self.web_assets.keys(): + self.markdown += f"# {host}\n\n" + + for event_type in self.web_assets[host].keys(): + self.markdown += f"### {event_type}\n" + dedupe = [] + for e in self.web_assets[host][event_type]: + if e in dedupe: + continue + dedupe.append(e) + self.markdown += f"\n* {e}\n" + self.markdown += "\n" + + if self.file is not None: + self.file.write(self.html_header) + self.file.write(markdown.markdown(self.markdown)) + self.file.write(self.html_footer) + self.file.flush() + self.info(f"Web Report saved to {self.output_file}") diff --git a/bbot/modules/output/websocket.py b/bbot/modules/output/websocket.py index eddad862c7..c8f54097ac 100644 --- a/bbot/modules/output/websocket.py +++ b/bbot/modules/output/websocket.py @@ -1,53 +1,63 @@ import json -import threading -import websocket -from time import sleep +import asyncio +import websockets from bbot.modules.output.base import BaseOutputModule class Websocket(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to websockets"} - options = {"url": "", "token": ""} - options_desc = {"url": "Web URL", "token": "Authorization Bearer token"} - - def setup(self): + meta = {"description": "Output to websockets", "created_date": "2022-04-15", "author": "@TheTechromancer"} + options = {"url": "", "token": "", "preserve_graph": True} + options_desc = { + "url": "Web URL", + "token": "Authorization Bearer token", + "preserve_graph": "Preserve full chains of events in the graph (prevents orphans)", + } + + async def setup(self): self.url = self.config.get("url", "") if not self.url: - self.warning("Must set URL") - return False - kwargs = {} + return False, "Must set URL" self.token = self.config.get("token", "") - if self.token: - kwargs.update({"header": {"Authorization": f"Bearer {self.token}"}}) - self.ws = websocket.WebSocketApp(self.url, **kwargs) - self.thread = threading.Thread(target=self.start_websocket, daemon=True) - self.thread.start() + self._ws = None return True - def start_websocket(self): - not_keyboardinterrupt = False - while not self.scan.stopping: - not_keyboardinterrupt = self.ws.run_forever() - if not not_keyboardinterrupt: - break - sleep(1) - - def handle_event(self, event): - event_json = event.json - self.send(event_json) - - def send(self, message): - while self.ws is not None: + async def handle_event(self, event): + event_json = event.json() + await self.send(event_json) + + async def ws(self, rebuild=False): + if self._ws is None or rebuild: + kwargs = {"close_timeout": 0.5} + if self.token: + kwargs.update({"extra_headers": {"Authorization": f"Bearer {self.token}"}}) + verbs = ("Building", "Built") + if rebuild: + verbs = ("Rebuilding", "Rebuilt") + self.debug(f"{verbs[0]} websocket connection to {self.url}") + self._ws = await websockets.connect(self.url, **kwargs) + self.debug(f"{verbs[1]} websocket connection to {self.url}") + return self._ws + + async def send(self, message): + rebuild = False + while not self.scan.stopped: try: - self.ws.send(json.dumps(message)) + ws = await self.ws(rebuild=rebuild) + message_str = json.dumps(message) + self.debug(f"Sending message of length {len(message_str)}") + await ws.send(message_str) + rebuild = False break except Exception as e: self.warning(f"Error sending message: {e}, retrying") - sleep(1) - continue - - def cleanup(self): - self.ws.close() - self.ws = None + await asyncio.sleep(1) + rebuild = True + + async def cleanup(self): + if self._ws is not None: + self.debug(f"Closing connection to {self.url}") + await self._ws.close() + self.debug(f"Closed connection to {self.url}") + self._ws = None diff --git a/bbot/modules/paramminer_cookies.py b/bbot/modules/paramminer_cookies.py new file mode 100644 index 0000000000..a3b4619d45 --- /dev/null +++ b/bbot/modules/paramminer_cookies.py @@ -0,0 +1,47 @@ +from .paramminer_headers import paramminer_headers + + +class paramminer_cookies(paramminer_headers): + """ + Inspired by https://github.com/PortSwigger/param-miner + """ + + watched_events = ["HTTP_RESPONSE", "WEB_PARAMETER"] + produced_events = ["WEB_PARAMETER"] + produced_events = ["FINDING"] + flags = ["active", "aggressive", "slow", "web-paramminer"] + meta = { + "description": "Smart brute-force to check for common HTTP cookie parameters", + "created_date": "2022-06-27", + "author": "@liquidsec", + } + options = { + "wordlist": "", # default is defined within setup function + "recycle_words": False, + "skip_boring_words": True, + } + options_desc = { + "wordlist": "Define the wordlist to be used to derive headers", + "recycle_words": "Attempt to use words found during the scan on all other endpoints", + "skip_boring_words": "Remove commonly uninteresting words from the wordlist", + } + options_desc = {"wordlist": "Define the wordlist to be used to derive cookies"} + scanned_hosts = [] + boring_words = set() + _module_threads = 12 + in_scope_only = True + compare_mode = "cookie" + default_wordlist = "paramminer_parameters.txt" + + async def check_batch(self, compare_helper, url, cookie_list): + cookies = {p: self.rand_string(14) for p in cookie_list} + return await compare_helper.compare(url, cookies=cookies, check_reflection=(len(cookie_list) == 1)) + + def gen_count_args(self, url): + cookie_count = 40 + while 1: + if cookie_count < 0: + break + fake_cookies = {self.rand_string(14): self.rand_string(14) for _ in range(0, cookie_count)} + yield cookie_count, (url,), {"cookies": fake_cookies} + cookie_count -= 5 diff --git a/bbot/modules/paramminer_getparams.py b/bbot/modules/paramminer_getparams.py new file mode 100644 index 0000000000..e6f35f6235 --- /dev/null +++ b/bbot/modules/paramminer_getparams.py @@ -0,0 +1,47 @@ +from .paramminer_headers import paramminer_headers + + +class paramminer_getparams(paramminer_headers): + """ + Inspired by https://github.com/PortSwigger/param-miner + """ + + watched_events = ["HTTP_RESPONSE", "WEB_PARAMETER"] + produced_events = ["WEB_PARAMETER"] + produced_events = ["FINDING"] + flags = ["active", "aggressive", "slow", "web-paramminer"] + meta = { + "description": "Use smart brute-force to check for common HTTP GET parameters", + "created_date": "2022-06-28", + "author": "@liquidsec", + } + scanned_hosts = [] + options = { + "wordlist": "", # default is defined within setup function + "recycle_words": False, + "skip_boring_words": True, + } + options_desc = { + "wordlist": "Define the wordlist to be used to derive headers", + "recycle_words": "Attempt to use words found during the scan on all other endpoints", + "skip_boring_words": "Remove commonly uninteresting words from the wordlist", + } + boring_words = {"utm_source", "utm_campaign", "utm_medium", "utm_term", "utm_content"} + in_scope_only = True + compare_mode = "getparam" + default_wordlist = "paramminer_parameters.txt" + + async def check_batch(self, compare_helper, url, getparam_list): + test_getparams = {p: self.rand_string(14) for p in getparam_list} + return await compare_helper.compare( + self.helpers.add_get_params(url, test_getparams).geturl(), check_reflection=(len(getparam_list) == 1) + ) + + def gen_count_args(self, url): + getparam_count = 40 + while 1: + if getparam_count < 0: + break + fake_getparams = {self.rand_string(14): self.rand_string(14) for _ in range(0, getparam_count)} + yield getparam_count, (self.helpers.add_get_params(url, fake_getparams).geturl(),), {} + getparam_count -= 5 diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py new file mode 100644 index 0000000000..cd3648ada0 --- /dev/null +++ b/bbot/modules/paramminer_headers.py @@ -0,0 +1,269 @@ +import re + +from bbot.errors import HttpCompareError +from bbot.modules.base import BaseModule + + +class paramminer_headers(BaseModule): + """ + Inspired by https://github.com/PortSwigger/param-miner + """ + + watched_events = ["HTTP_RESPONSE", "WEB_PARAMETER"] + produced_events = ["WEB_PARAMETER"] + flags = ["active", "aggressive", "slow", "web-paramminer"] + meta = { + "description": "Use smart brute-force to check for common HTTP header parameters", + "created_date": "2022-04-15", + "author": "@liquidsec", + } + options = { + "wordlist": "", # default is defined within setup function + "recycle_words": False, + "skip_boring_words": True, + } + options_desc = { + "wordlist": "Define the wordlist to be used to derive headers", + "recycle_words": "Attempt to use words found during the scan on all other endpoints", + "skip_boring_words": "Remove commonly uninteresting words from the wordlist", + } + scanned_hosts = [] + boring_words = { + "accept", + "accept-encoding", + "accept-language", + "action", + "authorization", + "cf-connecting-ip", + "connection", + "content-encoding", + "content-length", + "content-range", + "content-type", + "cookie", + "date", + "expect", + "host", + "if", + "if-match", + "if-modified-since", + "if-none-match", + "if-unmodified-since", + "javascript", + "keep-alive", + "label", + "negotiate", + "proxy", + "range", + "referer", + "start", + "trailer", + "transfer-encoding", + "upgrade", + "user-agent", + "vary", + "waf-stuff-below", + "x-scanner", + "x_alto_ajax_key", + "zaccess-control-request-headers", + "zaccess-control-request-method", + "zmax-forwards", + "zorigin", + "zreferrer", + "zvia", + "zx-request-id", + "zx-timer", + } + _module_threads = 12 + in_scope_only = True + compare_mode = "header" + default_wordlist = "paramminer_headers.txt" + + header_regex = re.compile(r"^[!#$%&\'*+\-.^_`|~0-9a-zA-Z]+: [^\r\n]+$") + + async def setup(self): + self.recycle_words = self.config.get("recycle_words", True) + self.event_dict = {} + self.already_checked = set() + wordlist = self.config.get("wordlist", "") + if not wordlist: + wordlist = f"{self.helpers.wordlist_dir}/{self.default_wordlist}" + self.debug(f"Using wordlist: [{wordlist}]") + self.wl = { + h.strip().lower() + for h in self.helpers.read_file(await self.helpers.wordlist(wordlist)) + if len(h) > 0 and "%" not in h + } + + # check against the boring list (if the option is set) + if self.config.get("skip_boring_words", True): + self.wl -= self.boring_words + self.extracted_words_master = set() + + return True + + def rand_string(self, *args, **kwargs): + return self.helpers.rand_string(*args, **kwargs) + + async def do_mining(self, wl, url, batch_size, compare_helper): + for i in wl: + if i not in self.wl: + h = hash(i + url) + self.already_checked.add(h) + + results = set() + abort_threshold = 15 + try: + for group in self.helpers.grouper(wl, batch_size): + async for result, reasons, reflection in self.binary_search(compare_helper, url, group): + results.add((result, ",".join(reasons), reflection)) + if len(results) >= abort_threshold: + self.warning( + f"Abort threshold ({abort_threshold}) reached, too many {self.compare_mode}s found for url: {url}" + ) + results.clear() + assert False + except AssertionError: + pass + return results + + async def process_results(self, event, results): + url = event.data.get("url") + for result, reasons, reflection in results: + paramtype = self.compare_mode.upper() + if paramtype == "HEADER": + if self.header_regex.match(result): + self.debug("rejecting parameter as it is not a valid header") + continue + tags = [] + if reflection: + tags = ["http_reflection"] + description = f"[Paramminer] {self.compare_mode.capitalize()}: [{result}] Reasons: [{reasons}] Reflection: [{str(reflection)}]" + reflected = "reflected " if reflection else "" + + await self.emit_event( + { + "host": str(event.host), + "url": url, + "type": paramtype, + "description": description, + "name": result, + }, + "WEB_PARAMETER", + event, + tags=tags, + context=f'{{module}} scanned {url} and identified {{event.type}}: {reflected}{self.compare_mode} parameter: "{result}"', + ) + + async def handle_event(self, event): + # If recycle words is enabled, we will collect WEB_PARAMETERS we find to build our list in finish() + # We also collect any parameters of type "SPECULATIVE" + if event.type == "WEB_PARAMETER": + parameter_name = event.data.get("name") + if self.recycle_words or (event.data.get("type") == "SPECULATIVE"): + if self.config.get("skip_boring_words", True) and parameter_name in self.boring_words: + return + if parameter_name not in self.wl: # Ensure it's not already in the wordlist + self.debug(f"Adding {parameter_name} to wordlist") + self.extracted_words_master.add(parameter_name) + + elif event.type == "HTTP_RESPONSE": + url = event.data.get("url") + try: + compare_helper = self.helpers.http_compare(url) + except HttpCompareError as e: + self.debug(f"Error initializing compare helper: {e}") + return + batch_size = await self.count_test(url) + if batch_size is None or batch_size <= 0: + self.debug(f"Failed to get baseline max {self.compare_mode} count, aborting") + return + self.debug(f"Resolved batch_size at {str(batch_size)}") + + self.event_dict[url] = (event, batch_size) + try: + if not await compare_helper.canary_check(url, mode=self.compare_mode): + raise HttpCompareError("failed canary check") + except HttpCompareError as e: + self.verbose(f'Aborting "{url}" ({e})') + return + + try: + results = await self.do_mining(self.wl, url, batch_size, compare_helper) + except HttpCompareError as e: + self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.data}]") + await self.process_results(event, results) + + async def count_test(self, url): + baseline = await self.helpers.request(url) + if baseline is None: + return + if str(baseline.status_code)[0] in {"4", "5"}: + return + for count, args, kwargs in self.gen_count_args(url): + r = await self.helpers.request(*args, **kwargs) + if r is not None and str(r.status_code)[0] not in {"4", "5"}: + return count + + def gen_count_args(self, url): + header_count = 95 + while 1: + if header_count < 0: + break + fake_headers = {} + for i in range(0, header_count): + fake_headers[self.rand_string(14)] = self.rand_string(14) + yield header_count, (url,), {"headers": fake_headers} + header_count -= 5 + + async def binary_search(self, compare_helper, url, group, reasons=None, reflection=False): + if reasons is None: + reasons = [] + self.debug(f"Entering recursive binary_search with {len(group):,} sized group") + if len(group) == 1 and len(reasons) > 0: + yield group[0], reasons, reflection + elif len(group) > 1 or (len(group) == 1 and len(reasons) == 0): + for group_slice in self.helpers.split_list(group): + match, reasons, reflection, subject_response = await self.check_batch(compare_helper, url, group_slice) + if match is False: + async for r in self.binary_search(compare_helper, url, group_slice, reasons, reflection): + yield r + else: + self.debug( + f"binary_search() failed to start with group of size {str(len(group))} and {str(len(reasons))} length reasons" + ) + + async def check_batch(self, compare_helper, url, header_list): + rand = self.rand_string() + test_headers = {} + for header in header_list: + test_headers[header] = rand + return await compare_helper.compare(url, headers=test_headers, check_reflection=(len(header_list) == 1)) + + async def finish(self): + for url, (event, batch_size) in list(self.event_dict.items()): + try: + compare_helper = self.helpers.http_compare(url) + except HttpCompareError as e: + self.debug(f"Error initializing compare helper: {e}") + continue + words_to_process = { + i for i in self.extracted_words_master.copy() if hash(i + url) not in self.already_checked + } + try: + results = await self.do_mining(words_to_process, url, batch_size, compare_helper) + except HttpCompareError as e: + self.debug(f"Encountered HttpCompareError: [{e}] for URL [{url}]") + continue + await self.process_results(event, results) + + async def filter_event(self, event): + # Filter out static endpoints + if event.data.get("url").endswith(tuple(f".{ext}" for ext in self.config.get("url_extension_static", []))): + return False + + # We don't need to look at WEB_PARAMETERS that we produced + if str(event.module).startswith("paramminer"): + return False + + return True diff --git a/bbot/modules/passivetotal.py b/bbot/modules/passivetotal.py index 21510115cd..b20c7bbac0 100644 --- a/bbot/modules/passivetotal.py +++ b/bbot/modules/passivetotal.py @@ -1,39 +1,46 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class passivetotal(shodan_dns): +class passivetotal(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the PassiveTotal API for subdomains", "auth_required": True} - options = {"username": "", "api_key": ""} - options_desc = {"username": "RiskIQ Username", "api_key": "RiskIQ API Key"} + meta = { + "description": "Query the PassiveTotal API for subdomains", + "created_date": "2022-08-08", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": ""} + options_desc = {"api_key": "PassiveTotal API Key in the format of 'username:api_key'"} base_url = "https://api.passivetotal.org/v2" - def setup(self): - self.username = self.config.get("username", "") - self.api_key = self.config.get("api_key", "") - self.auth = (self.username, self.api_key) - return super().setup() + async def setup(self): + return await super().setup() - def ping(self): + async def ping(self): url = f"{self.base_url}/account/quota" - j = self.helpers.request(url, auth=self.auth).json() + j = (await self.api_request(url)).json() limit = j["user"]["limits"]["search_api"] used = j["user"]["counts"]["search_api"] assert used < limit, "No quota remaining" - def abort_if(self, event): + def prepare_api_request(self, url, kwargs): + api_username, api_key = self.api_key.split(":", 1) + kwargs["auth"] = (api_username, api_key) + return url, kwargs + + async def abort_if(self, event): # RiskIQ is famous for their junk data - return super().abort_if(event) or "unresolved" in event.tags + return await super().abort_if(event) or "unresolved" in event.tags - def query(self, query): + async def request_url(self, query): url = f"{self.base_url}/enrichment/subdomains?query={self.helpers.quote(query)}" - j = self.helpers.request(url, auth=self.auth).json() - for subdomain in j.get("subdomains", []): - yield f"{subdomain}.{query}" + return await self.api_request(url) - @property - def api_secret(self): - return self.username and self.api_key + async def parse_results(self, r, query): + results = set() + for subdomain in r.json().get("subdomains", []): + results.add(f"{subdomain}.{query}") + return results diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index fe967cbe22..0c53c2ad42 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -1,33 +1,49 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class pgp(crobat): +class pgp(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] - meta = {"description": "Query common PGP servers for email addresses"} + meta = { + "description": "Query common PGP servers for email addresses", + "created_date": "2022-08-10", + "author": "@TheTechromancer", + } + # TODO: scan for Web Key Directory (/.well-known/openpgpkey/) options = { "search_urls": [ "https://keyserver.ubuntu.com/pks/lookup?fingerprint=on&op=vindex&search=", "http://the.earth.li:11371/pks/lookup?fingerprint=on&op=vindex&search=", + "https://pgpkeys.eu/pks/lookup?search=&op=index", + "https://pgp.mit.edu/pks/lookup?search=&op=index", ] } options_desc = {"search_urls": "PGP key servers to search"} - def handle_event(self, event): + async def handle_event(self, event): query = self.make_query(event) - results = self.query(query) + results = await self.query(query) if results: - for hostname in results: - if not hostname == event: - self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) + for email, keyserver in results: + await self.emit_event( + email, + "EMAIL_ADDRESS", + event, + abort_if=self.abort_if, + context=f'{{module}} queried PGP keyserver {keyserver} for "{query}" and found {{event.type}}: {{event.data}}', + ) - def query(self, query): - for url in self.config.get("search_urls", []): - url = url.replace("", self.helpers.quote(query)) - response = self.helpers.request(url) + async def query(self, query): + results = set() + urls = self.config.get("search_urls", []) + urls = [url.replace("", self.helpers.quote(query)) for url in urls] + async for url, response in self.helpers.request_batch(urls): + keyserver = self.helpers.urlparse(url).netloc + response = await self.helpers.request(url) if response is not None: - for email in self.helpers.extract_emails(response.text): + for email in await self.helpers.re.extract_emails(response.text): email = email.lower() - if email.lower().endswith(query): - yield email + if email.endswith(query): + results.add((email, keyserver)) + return results diff --git a/bbot/modules/portfilter.py b/bbot/modules/portfilter.py new file mode 100644 index 0000000000..5ee51e295b --- /dev/null +++ b/bbot/modules/portfilter.py @@ -0,0 +1,43 @@ +from bbot.modules.base import BaseInterceptModule + + +class portfilter(BaseInterceptModule): + watched_events = ["OPEN_TCP_PORT", "URL_UNVERIFIED", "URL"] + flags = ["passive", "safe"] + meta = { + "description": "Filter out unwanted open ports from cloud/CDN targets", + "created_date": "2025-01-06", + "author": "@TheTechromancer", + } + options = { + "cdn_tags": "cdn-", + "allowed_cdn_ports": "80,443", + } + options_desc = { + "cdn_tags": "Comma-separated list of tags to skip, e.g. 'cdn,cloud'", + "allowed_cdn_ports": "Comma-separated list of ports that are allowed to be scanned for CDNs", + } + + _priority = 4 + + async def setup(self): + self.cdn_tags = [t.strip() for t in self.config.get("cdn_tags", "").split(",")] + self.allowed_cdn_ports = self.config.get("allowed_cdn_ports", "").strip() + if self.allowed_cdn_ports: + try: + self.allowed_cdn_ports = [int(p.strip()) for p in self.allowed_cdn_ports.split(",")] + except Exception as e: + return False, f"Error parsing allowed CDN ports '{self.allowed_cdn_ports}': {e}" + return True + + async def handle_event(self, event): + # if the port isn't in our list of allowed CDN ports + if event.port not in self.allowed_cdn_ports: + for cdn_tag in self.cdn_tags: + # and if any of the event's tags match our CDN filter + if any(t.startswith(str(cdn_tag)) for t in event.tags): + return ( + False, + f"one of the event's tags matches the tag '{cdn_tag}' and the port is not in the allowed list", + ) + return True diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py new file mode 100644 index 0000000000..ed77193ea3 --- /dev/null +++ b/bbot/modules/portscan.py @@ -0,0 +1,323 @@ +import json +import ipaddress +from contextlib import suppress +from radixtarget import RadixTarget + +from bbot.modules.base import BaseModule + + +# TODO: this module is getting big. It should probably be two modules: one for ping and one for SYN. + + +class portscan(BaseModule): + flags = ["active", "portscan", "safe"] + watched_events = ["IP_ADDRESS", "IP_RANGE", "DNS_NAME"] + produced_events = ["OPEN_TCP_PORT"] + meta = { + "description": "Port scan with masscan. By default, scans top 100 ports.", + "created_date": "2024-05-15", + "author": "@TheTechromancer", + } + options = { + "top_ports": 100, + "ports": "", + # ping scan at 600 packets/s ~= private IP space in 8 hours + "rate": 300, + "wait": 5, + "ping_first": False, + "ping_only": False, + "adapter": "", + "adapter_ip": "", + "adapter_mac": "", + "router_mac": "", + } + options_desc = { + "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", + "ports": "Ports to scan", + "rate": "Rate in packets per second", + "wait": "Seconds to wait for replies after scan is complete", + "ping_first": "Only portscan hosts that reply to pings", + "ping_only": "Ping sweep only, no portscan", + "adapter": 'Manually specify a network interface, such as "eth0" or "tun0". If not specified, the first network interface found with a default gateway will be used.', + "adapter_ip": "Send packets using this IP address. Not needed unless masscan's autodetection fails", + "adapter_mac": "Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails", + "router_mac": "Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails", + } + deps_common = ["masscan"] + batch_size = 1000000 + _shuffle_incoming_queue = False + + async def setup(self): + self.top_ports = self.config.get("top_ports", 100) + self.rate = self.config.get("rate", 300) + self.wait = self.config.get("wait", 10) + self.ping_first = self.config.get("ping_first", False) + self.ping_only = self.config.get("ping_only", False) + self.ping_scan = self.ping_first or self.ping_only + self.adapter = self.config.get("adapter", "") + self.adapter_ip = self.config.get("adapter_ip", "") + self.adapter_mac = self.config.get("adapter_mac", "") + self.router_mac = self.config.get("router_mac", "") + self.ports = self.config.get("ports", "") + if self.ports: + try: + self.helpers.parse_port_string(self.ports) + except ValueError as e: + return False, f"Error parsing ports '{self.ports}': {e}" + + # whether we've finished scanning our original scan targets + self.scanned_initial_targets = False + # keeps track of individual scanned IPs and their open ports + # this is necessary because we may encounter more hosts with the same IP + # and we want to avoid scanning them again + self.open_port_cache = {} + # keeps track of which IPs/subnets have already been scanned + self.syn_scanned = self.helpers.make_target(acl_mode=True) + self.ping_scanned = self.helpers.make_target(acl_mode=True) + self.prep_blacklist() + self.helpers.depsinstaller.ensure_root(message="Masscan requires root privileges") + # check if we're set up for IPv6 + self.ipv6_support = True + dry_run_command = self._build_masscan_command(target_file=self.helpers.tempfile(["::1"], pipe=False), wait=0) + ipv6_result = await self.run_process( + dry_run_command, + sudo=True, + _log_stderr=False, + ) + if ipv6_result is None: + return False, "Masscan failed to run" + returncode = getattr(ipv6_result, "returncode", 0) + if returncode and "failed to detect IPv6 address" in ipv6_result.stderr: + self.warning("It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") + self.ipv6_support = False + return True + + async def handle_batch(self, *events): + # on our first run, we automatically include all our initial scan targets + if not self.scanned_initial_targets: + self.scanned_initial_targets = True + events = set(events) + events.update( + {e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")} + ) + + # ping scan + if self.ping_scan: + ping_targets, ping_correlator = await self.make_targets(events, self.ping_scanned) + ping_events = [] + async for alive_host, _, parent_event in self.masscan(ping_targets, ping_correlator, ping=True): + # port 0 means icmp ping response + ping_event = await self.emit_open_port(alive_host, 0, parent_event) + ping_events.append(ping_event) + syn_targets, syn_correlator = await self.make_targets(ping_events, self.syn_scanned) + else: + syn_targets, syn_correlator = await self.make_targets(events, self.syn_scanned) + + # TCP SYN scan + if not self.ping_only: + async for ip, port, parent_event in self.masscan(syn_targets, syn_correlator): + await self.emit_open_port(ip, port, parent_event) + else: + self.debug("Only ping sweep was requested, skipping TCP SYN scan") + + async def masscan(self, targets, correlator, ping=False): + scan_type = "ping" if ping else "SYN" + self.debug(f"Starting masscan {scan_type} scan") + if not targets: + self.debug("No targets specified, aborting.") + return + + target_file = self.helpers.tempfile(targets, pipe=False) + command = self._build_masscan_command(target_file, ping=ping) + stats_file = self.helpers.tempfile_tail(callback=self.log_masscan_status) + try: + with open(stats_file, "w") as stats_fh: + async for line in self.run_process_live(command, sudo=True, stderr=stats_fh): + for ip, port in self.parse_json_line(line): + parent_events = correlator.search(ip) + # masscan gets the occasional junk result. this is harmless and + # seems to be a side effect of it having its own TCP stack + # see https://github.com/robertdavidgraham/masscan/issues/397 + if parent_events is None: + self.debug(f"Failed to correlate {ip} to targets") + continue + emitted_hosts = set() + for parent_event in parent_events: + if parent_event.type == "DNS_NAME": + host = parent_event.host + else: + host = ip + if host not in emitted_hosts: + yield host, port, parent_event + emitted_hosts.add(host) + finally: + for file in (stats_file, target_file): + file.unlink() + + async def make_targets(self, events, scanned_tracker): + """ + Convert events into a list of targets, skipping ones that have already been scanned + """ + correlator = RadixTarget() + targets = set() + for event in sorted(events, key=lambda e: e._host_size): + # skip events without host + if not event.host: + continue + ips = set() + try: + # first assume it's an ip address / ip range + # False == it's not a hostname + ips.add(ipaddress.ip_network(event.host, strict=False)) + except Exception: + # if it's a hostname, get its IPs from resolved_hosts + for h in event.resolved_hosts: + try: + ips.add(ipaddress.ip_network(h, strict=False)) + except Exception: + continue + + for ip in ips: + # remove IPv6 addresses if we're not scanning IPv6 + if not self.ipv6_support and ip.version == 6: + self.debug(f"Not scanning IPv6 address {ip} because we aren't set up for IPv6") + continue + + # check if we already found open ports on this IP + if event.type != "IP_RANGE": + ip_hash = hash(ip.network_address) + already_found_ports = self.open_port_cache.get(ip_hash, None) + if already_found_ports is not None: + # if so, emit them + for port in already_found_ports: + await self.emit_open_port(event.host, port, event) + + # build a correlation from the IP back to its original parent event + events_set = correlator.search(ip) + if events_set is None: + correlator.insert(ip, {event}) + else: + events_set.add(event) + + # has this IP already been scanned? + if not scanned_tracker.get(ip): + # if not, add it to targets! + scanned_tracker.add(ip) + targets.add(ip) + else: + self.debug(f"Skipping {ip} because it's already been scanned") + + return targets, correlator + + async def emit_open_port(self, ip, port, parent_event): + parent_is_dns_name = parent_event.type == "DNS_NAME" + if parent_is_dns_name: + host = parent_event.host + else: + host = ip + + if port == 0: + event_data = host + event_type = "DNS_NAME" if parent_is_dns_name else "IP_ADDRESS" + scan_type = "ping" + else: + event_data = self.helpers.make_netloc(host, port) + event_type = "OPEN_TCP_PORT" + scan_type = "TCP SYN" + + event = self.make_event( + event_data, + event_type, + parent=parent_event, + context=f"{{module}} executed a {scan_type} scan against {parent_event.data} and found: {{event.type}}: {{event.data}}", + ) + + await self.emit_event(event) + return event + + def parse_json_line(self, line): + try: + j = json.loads(line) + except Exception: + return + ip = j.get("ip", "") + if not ip: + return + ip = self.helpers.make_ip_type(ip) + ip_hash = hash(ip) + ports = j.get("ports", []) + if not ports: + return + for p in ports: + proto = p.get("proto", "") + port_number = p.get("port", 0) + try: + self.open_port_cache[ip_hash].add(port_number) + except KeyError: + self.open_port_cache[ip_hash] = {port_number} + if proto == "" or port_number == "": + continue + yield ip, port_number + + def prep_blacklist(self): + exclude = [] + for t in self.scan.blacklist: + t = self.helpers.make_ip_type(t.data) + if not isinstance(t, str): + if self.helpers.is_ip(t): + exclude.append(str(ipaddress.ip_network(t))) + else: + exclude.append(str(t)) + if not exclude: + exclude = ["255.255.255.255/32"] + self.exclude_file = self.helpers.tempfile(exclude, pipe=False) + + def _build_masscan_command(self, target_file=None, ping=False, dry_run=False, wait=None): + if wait is None: + wait = self.wait + command = ( + "masscan", + "--excludefile", + str(self.exclude_file), + "--rate", + self.rate, + "--wait", + wait, + "--open-only", + "-oJ", + "-", + ) + if target_file is not None: + command += ("-iL", str(target_file)) + if dry_run: + command += ("-p1", "--wait", "0") + else: + if self.adapter: + command += ("--adapter", self.adapter) + if self.adapter_ip: + command += ("--adapter-ip", self.adapter_ip) + if self.adapter_mac: + command += ("--adapter-mac", self.adapter_mac) + if self.router_mac: + command += ("--router-mac", self.router_mac) + if ping: + command += ("--ping",) + else: + if self.ports: + command += ("-p", self.ports) + else: + command += ("-p", self.helpers.top_tcp_ports(self.top_ports, as_string=True)) + return command + + def log_masscan_status(self, s): + if "FAIL" in s: + self.warning(s) + self.warning( + 'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' + ) + else: + self.verbose(s) + + async def cleanup(self): + with suppress(Exception): + self.exclude_file.unlink() diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py new file mode 100644 index 0000000000..f6eafc6ff9 --- /dev/null +++ b/bbot/modules/postman.py @@ -0,0 +1,106 @@ +from bbot.modules.templates.postman import postman + + +class postman(postman): + watched_events = ["ORG_STUB", "SOCIAL"] + produced_events = ["CODE_REPOSITORY"] + flags = ["passive", "subdomain-enum", "safe", "code-enum"] + meta = { + "description": "Query Postman's API for related workspaces, collections, requests and download them", + "created_date": "2024-09-07", + "author": "@domwhewell-sage", + } + options = {"api_key": ""} + options_desc = {"api_key": "Postman API Key"} + reject_wildcards = False + + async def handle_event(self, event): + # Handle postman profile + if event.type == "SOCIAL": + owner = event.data.get("profile_name", "") + in_scope_workspaces = await self.process_workspaces(user=owner) + elif event.type == "ORG_STUB": + owner = event.data + in_scope_workspaces = await self.process_workspaces(org=owner) + if in_scope_workspaces: + for workspace in in_scope_workspaces: + repo_url = workspace["url"] + repo_name = workspace["repo_name"] + if event.type == "SOCIAL": + context = f'{{module}} searched postman.com for workspaces belonging to "{owner}" and found "{repo_name}" at {{event.type}}: {repo_url}' + elif event.type == "ORG_STUB": + context = f'{{module}} searched postman.com for "{owner}" and found matching workspace "{repo_name}" at {{event.type}}: {repo_url}' + await self.emit_event( + {"url": repo_url}, + "CODE_REPOSITORY", + tags="postman", + parent=event, + context=context, + ) + + async def process_workspaces(self, user=None, org=None): + in_scope_workspaces = [] + owner = user or org + if owner: + self.verbose(f"Searching for postman workspaces, collections, requests for {owner}") + for item in await self.query(owner): + workspace = item["document"] + slug = workspace["slug"] + profile = workspace["publisherHandle"] + repo_url = f"{self.html_url}/{profile}/{slug}" + workspace_id = await self.get_workspace_id(repo_url) + if (org and workspace_id) or (user and owner.lower() == profile.lower()): + self.verbose(f"Found workspace ID {workspace_id} for {repo_url}") + data = await self.request_workspace(workspace_id) + in_scope = await self.validate_workspace( + data["workspace"], data["environments"], data["collections"] + ) + if in_scope: + in_scope_workspaces.append({"url": repo_url, "repo_name": slug}) + else: + self.verbose( + f"Failed to validate {repo_url} is in our scope as it does not contain any in-scope dns_names / emails" + ) + return in_scope_workspaces + + async def query(self, query): + def api_page_iter(url, page, page_size, offset, **kwargs): + kwargs["json"]["body"]["from"] = offset + return url, kwargs + + data = [] + url = f"{self.base_url}/ws/proxy" + json = { + "service": "search", + "method": "POST", + "path": "/search-all", + "body": { + "queryIndices": [ + "collaboration.workspace", + ], + "queryText": self.helpers.quote(query), + "size": 25, + "from": 0, + "clientTraceId": "", + "requestOrigin": "srp", + "mergeEntities": "true", + "nonNestedRequests": "true", + "domain": "public", + }, + } + + agen = self.api_page_iter( + url, page_size=25, method="POST", iter_key=api_page_iter, json=json, _json=False, headers=self.headers + ) + async for r in agen: + status_code = getattr(r, "status_code", 0) + if status_code != 200: + self.debug(f"Reached end of postman search results (url: {r.url}) with status code {status_code}") + break + try: + data.extend(r.json().get("data", [])) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return None + + return data diff --git a/bbot/modules/postman_download.py b/bbot/modules/postman_download.py new file mode 100644 index 0000000000..727c655772 --- /dev/null +++ b/bbot/modules/postman_download.py @@ -0,0 +1,83 @@ +import zipfile +import json +from pathlib import Path +from bbot.modules.templates.postman import postman + + +class postman_download(postman): + watched_events = ["CODE_REPOSITORY"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "subdomain-enum", "safe", "code-enum"] + meta = { + "description": "Download workspaces, collections, requests from Postman", + "created_date": "2024-09-07", + "author": "@domwhewell-sage", + } + options = {"output_folder": "", "api_key": ""} + options_desc = {"output_folder": "Folder to download postman workspaces to", "api_key": "Postman API Key"} + scope_distance_modifier = 2 + + async def setup(self): + output_folder = self.config.get("output_folder") + if output_folder: + self.output_dir = Path(output_folder) / "postman_workspaces" + else: + self.output_dir = self.scan.home / "postman_workspaces" + self.helpers.mkdir(self.output_dir) + return await super().setup() + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "postman" not in event.tags: + return False, "event is not a postman workspace" + return True + + async def handle_event(self, event): + repo_url = event.data.get("url") + workspace_id = await self.get_workspace_id(repo_url) + if workspace_id: + self.verbose(f"Found workspace ID {workspace_id} for {repo_url}") + data = await self.request_workspace(workspace_id) + workspace = data["workspace"] + environments = data["environments"] + collections = data["collections"] + workspace_path = self.save_workspace(workspace, environments, collections) + if workspace_path: + self.verbose(f"Downloaded workspace from {repo_url} to {workspace_path}") + codebase_event = self.make_event( + {"path": str(workspace_path)}, "FILESYSTEM", tags=["postman", "workspace"], parent=event + ) + await self.emit_event( + codebase_event, + context=f"{{module}} downloaded postman workspace at {repo_url} to {{event.type}}: {workspace_path}", + ) + + def save_workspace(self, workspace, environments, collections): + zip_path = None + # Create a folder for the workspace + name = workspace["name"] + id = workspace["id"] + folder = self.output_dir / name + self.helpers.mkdir(folder) + zip_path = folder / f"{id}.zip" + + # Main Workspace + self.add_json_to_zip(zip_path, workspace, f"{name}.postman_workspace.json") + + # Workspace Environments + if environments: + for environment in environments: + environment_id = environment["id"] + self.add_json_to_zip(zip_path, environment, f"{environment_id}.postman_environment.json") + + # Workspace Collections + if collections: + for collection in collections: + collection_name = collection["info"]["name"] + self.add_json_to_zip(zip_path, collection, f"{collection_name}.postman_collection.json") + return zip_path + + def add_json_to_zip(self, zip_path, data, filename): + with zipfile.ZipFile(zip_path, "a") as zipf: + json_content = json.dumps(data, indent=4) + zipf.writestr(filename, json_content) diff --git a/bbot/modules/rapiddns.py b/bbot/modules/rapiddns.py new file mode 100644 index 0000000000..150728eca3 --- /dev/null +++ b/bbot/modules/rapiddns.py @@ -0,0 +1,23 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class rapiddns(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query rapiddns.io for subdomains", + "created_date": "2022-08-24", + "author": "@TheTechromancer", + } + + base_url = "https://rapiddns.io" + + async def request_url(self, query): + url = f"{self.base_url}/subdomain/{self.helpers.quote(query)}?full=1#result" + response = await self.api_request(url, timeout=self.http_timeout + 10) + return response + + async def parse_results(self, r, query): + text = getattr(r, "text", "") + return await self.scan.extract_in_scope_hostnames(text) diff --git a/bbot/modules/report/affiliates.py b/bbot/modules/report/affiliates.py index cc0f4ccb39..a67c665504 100644 --- a/bbot/modules/report/affiliates.py +++ b/bbot/modules/report/affiliates.py @@ -1,22 +1,26 @@ -from bbot.modules.report.base import ReportModule +from bbot.modules.report.base import BaseReportModule -class affiliates(ReportModule): +class affiliates(BaseReportModule): watched_events = ["*"] produced_events = [] - flags = ["passive", "safe"] - meta = {"description": "Summarize affiliate domains at the end of a scan"} + flags = ["passive", "safe", "affiliates"] + meta = { + "description": "Summarize affiliate domains at the end of a scan", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } scope_distance_modifier = None accept_dupes = True - def setup(self): + async def setup(self): self.affiliates = {} return True - def handle_event(self, event): + async def handle_event(self, event): self.add_affiliate(event) - def report(self): + async def report(self): affiliates = sorted(self.affiliates.items(), key=lambda x: x[-1]["weight"], reverse=True) header = ["Affiliate", "Score", "Count"] table = [] @@ -24,13 +28,12 @@ def report(self): count = stats["count"] weight = stats["weight"] table.append([domain, f"{weight:.2f}", f"{count:,}"]) - for row in self.helpers.make_table(table, header).splitlines(): - self.info(row) + self.log_table(table, header, table_name="affiliates", max_log_entries=50) def add_affiliate(self, event): if event.scope_distance > 0 and event.host and isinstance(event.host, str): subdomain, domain = self.helpers.split_domain(event.host) - weight = 1 / event.scope_distance + (1 if "affiliate" in event.tags else 0) + weight = (1 / event.scope_distance) + (1 if "affiliate" in event.tags else 0) if domain and not self.scan.in_scope(domain): try: self.affiliates[domain]["weight"] += weight diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 5974ffa5e1..3b3c488d15 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -1,105 +1,252 @@ -import ipaddress -import traceback +from bbot.modules.report.base import BaseReportModule -from bbot.modules.report.base import ReportModule - -class asn(ReportModule): +class asn(BaseReportModule): watched_events = ["IP_ADDRESS"] produced_events = ["ASN"] flags = ["passive", "subdomain-enum", "safe"] - meta = {"description": "Query bgpview.io for ASNs"} - scope_distance_modifier = 0 - - base_url = "https://api.bgpview.io" + meta = { + "description": "Query ripe and bgpview.io for ASNs", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } + scope_distance_modifier = 1 + # we accept dupes to avoid missing data + # because sometimes IP addresses are re-emitted with lower scope distances + accept_dupes = True - def setup(self): + async def setup(self): self.asn_counts = {} - self.asn_data = {} - self.asn_metadata = {} + self.asn_cache = {} + self.ripe_cache = {} + self.sources = ["bgpview", "ripe"] + self.unknown_asn = { + "asn": "UNKNOWN", + "subnet": "0.0.0.0/32", + "name": "unknown", + "description": "unknown", + "country": "", + } return True - def filter_event(self, event): - if "private" in event.tags: + async def filter_event(self, event): + if str(event.module) == "ipneighbor": + return False + if getattr(event.host, "is_private", False): return False return True - def handle_event(self, event): - if self.cache_get(event.host) == False: - asns = self.get_asn(event.host) - if asns: + async def handle_event(self, event): + host = event.host + if self.cache_get(host) is False: + asns, source = await self.get_asn(host) + if not asns: + self.cache_put(self.unknown_asn) + else: for asn in asns: - if asn not in self.asn_metadata: - contacts = self.get_asn_metadata(asn) - if not contacts: - continue - for c in contacts: - self.emit_event(c, "EMAIL_ADDRESS", source=event) - self.asn_metadata[asn] = True + emails = asn.pop("emails", []) + self.cache_put(asn) + asn_event = self.make_event(asn, "ASN", parent=event) + asn_number = asn.get("asn", "") + asn_desc = asn.get("description", "") + asn_name = asn.get("name", "") + asn_subnet = asn.get("subnet", "") + if not asn_event: + continue + await self.emit_event( + asn_event, + context=f"{{module}} checked {event.data} against {source} API and got {{event.type}}: AS{asn_number} ({asn_name}, {asn_desc}, {asn_subnet})", + ) + for email in emails: + await self.emit_event( + email, + "EMAIL_ADDRESS", + parent=asn_event, + context=f"{{module}} retrieved details for AS{asn_number} and found {{event.type}}: {{event.data}}", + ) - def report(self): - asn_data = sorted(self.asn_data.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) - header = ["ASN", "Subnet", "Host Count", "Name", "Description"] + async def report(self): + asn_data = sorted(self.asn_cache.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) + if not asn_data: + return + header = ["ASN", "Subnet", "Host Count", "Name", "Description", "Country"] table = [] - for subnet, prefix in asn_data: + for subnet, asn in asn_data: count = self.asn_counts[subnet] - name = prefix.get("name", "") - description = prefix.get("description", "") - asn = "AS" + str(prefix.get("asn", {}).get("asn", "")) - table.append([asn, subnet, f"{count:,}", name, description]) - event_str = f"{asn} - {subnet} ({count:,} hosts): {name}, {description}" - self.emit_event(event_str, "ASN", source=self.scan.root_event, quick=True) - for row in self.helpers.make_table(table, header).splitlines(): - self.info(row) + number = asn["asn"] + if number != "UNKNOWN": + number = "AS" + number + name = asn["name"] + country = asn["country"] + description = asn["description"] + table.append([number, str(subnet), f"{count:,}", name, description, country]) + self.log_table(table, header, table_name="asns") + + def cache_put(self, asn): + asn = dict(asn) + subnet = self.helpers.make_ip_type(asn.pop("subnet")) + self.asn_cache[subnet] = asn + try: + self.asn_counts[subnet] += 1 + except KeyError: + self.asn_counts[subnet] = 1 def cache_get(self, ip): ret = False for p in self.helpers.ip_network_parents(ip): try: self.asn_counts[p] += 1 - if ret == False: + if ret is False: ret = p except KeyError: continue return ret - def get_asn(self, ip): - url = f"{self.base_url}/ip/{ip}" - r = self.helpers.request(url, retries=5) + async def get_asn(self, ip, retries=1): + """ + Takes in an IP + returns a list of ASNs, e.g.: + [{'asn': '54113', 'subnet': '2606:50c0:8000::/48', 'name': 'FASTLY', 'description': 'Fastly', 'country': 'US', 'emails': []}, {'asn': '54113', 'subnet': '2606:50c0:8000::/46', 'name': 'FASTLY', 'description': 'Fastly', 'country': 'US', 'emails': []}] + """ + for attempt in range(retries + 1): + for i, source in enumerate(list(self.sources)): + get_asn_fn = getattr(self, f"get_asn_{source}") + res = await get_asn_fn(ip) + if res is False: + # demote the current source to lowest priority since it just failed + self.sources.append(self.sources.pop(i)) + self.verbose(f"Failed to contact {source}, retrying") + continue + return res, source + self.warning(f"Error retrieving ASN for {ip}") + return [], "" + + async def get_asn_ripe(self, ip): + url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}" + response = await self.get_url(url, "ASN") + asns = [] + if response is False: + return False + data = response.get("data", {}) + if not data: + data = {} + prefix = data.get("prefix", "") + asn_numbers = data.get("asns", []) + if not prefix or not asn_numbers: + return [] + if not asn_numbers: + asn_numbers = [] + for number in asn_numbers: + asn = await self.get_asn_metadata_ripe(number) + if asn is False: + return False + asn["subnet"] = prefix + asns.append(asn) + return asns + + async def get_asn_metadata_ripe(self, asn_number): try: - j = r.json() - data = j.get("data", {}) - if data: - prefixes = data.get("prefixes", []) - for prefix in prefixes: - subnet = prefix.get("prefix", "") - if not subnet: + return self.ripe_cache[asn_number] + except KeyError: + metadata_keys = { + "name": ["ASName", "OrgId"], + "description": ["OrgName", "OrgTechName", "RTechName"], + "country": ["Country"], + } + url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" + response = await self.get_url(url, "ASN Metadata", cache=True) + if response is False: + return False + data = response.get("data", {}) + if not data: + data = {} + records = data.get("records", []) + if not records: + records = [] + emails = set() + asn = {k: "" for k in metadata_keys.keys()} + for record in records: + for item in record: + key = item.get("key", "") + value = item.get("value", "") + for email in await self.helpers.re.extract_emails(value): + emails.add(email.lower()) + if not key: continue - subnet = ipaddress.ip_network(subnet) - self.asn_data[subnet] = prefix - self.asn_counts[subnet] = 1 - asn = str(prefix.get("asn", {}).get("asn", "")) - yield asn - else: - self.debug(f'No results for "{ip}"') - except Exception as e: - self.warning(f"Error retrieving ASN for {ip}: {e}") - self.debug(f"Got data: {getattr(r, 'content', '')}") - self.debug(traceback.format_exc()) + if value: + for keyname, keyvals in metadata_keys.items(): + if key in keyvals and not asn.get(keyname, ""): + asn[keyname] = value + asn["emails"] = list(emails) + asn["asn"] = str(asn_number) + self.ripe_cache[asn_number] = asn + return asn + + async def get_asn_bgpview(self, ip): + url = f"https://api.bgpview.io/ip/{ip}" + data = await self.get_url(url, "ASN") + asns = [] + asns_tried = set() + if data is False: + return False + data = data.get("data", {}) + prefixes = data.get("prefixes", []) + for prefix in prefixes: + details = prefix.get("asn", {}) + asn = str(details.get("asn", "")) + subnet = prefix.get("prefix", "") + if not (asn or subnet): + continue + name = details.get("name") or prefix.get("name") or "" + description = details.get("description") or prefix.get("description") or "" + country = details.get("country_code") or prefix.get("country_code") or "" + emails = [] + if asn not in asns_tried: + emails = await self.get_emails_bgpview(asn) + if emails is False: + return False + asns_tried.add(asn) + asns.append( + { + "asn": asn, + "subnet": subnet, + "name": name, + "description": description, + "country": country, + "emails": emails, + } + ) + if not asns: + self.debug(f'No results for "{ip}"') + return asns - def get_asn_metadata(self, asn): - url = f"{self.base_url}/asn/{asn}" - r = self.helpers.request(url, retries=5) + async def get_emails_bgpview(self, asn): + contacts = [] + url = f"https://api.bgpview.io/asn/{asn}" + data = await self.get_url(url, "ASN metadata", cache=True) + if data is False: + return False + data = data.get("data", {}) + if not data: + self.debug(f'No results for "{asn}"') + return + email_contacts = data.get("email_contacts", []) + abuse_contacts = data.get("abuse_contacts", []) + contacts = [l.strip().lower() for l in email_contacts + abuse_contacts] + return list(set(contacts)) + + async def get_url(self, url, data_type, cache=False): + kwargs = {} + if cache: + kwargs["cache_for"] = 60 * 60 * 24 + r = await self.helpers.request(url, **kwargs) + data = {} try: j = r.json() - data = j.get("data", {}) - if not data: - self.debug(f'No results for "{asn}"') - return - email_contacts = data.get("email_contacts", []) - abuse_contacts = data.get("abuse_contacts", []) - contacts = [l.strip().lower() for l in email_contacts + abuse_contacts] - return list(set(contacts)) + if not isinstance(j, dict): + return data + return j except Exception as e: - self.warning(f"Error retrieving ASN metadata for {asn}: {e}") - self.debug(traceback.format_exc()) + self.verbose(f"Error retrieving {data_type} at {url}: {e}", trace=True) + self.debug(f"Got data: {getattr(r, 'content', '')}") + return False diff --git a/bbot/modules/report/base.py b/bbot/modules/report/base.py index 832c35345a..8a97cddd56 100644 --- a/bbot/modules/report/base.py +++ b/bbot/modules/report/base.py @@ -1,5 +1,5 @@ from bbot.modules.base import BaseModule -class ReportModule(BaseModule): +class BaseReportModule(BaseModule): _stats_exclude = True diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py new file mode 100644 index 0000000000..e41b3119fb --- /dev/null +++ b/bbot/modules/robots.py @@ -0,0 +1,53 @@ +from bbot.modules.base import BaseModule + + +class robots(BaseModule): + watched_events = ["URL"] + produced_events = ["URL_UNVERIFIED"] + flags = ["active", "safe", "web-basic"] + meta = {"description": "Look for and parse robots.txt", "created_date": "2023-02-01", "author": "@liquidsec"} + + options = {"include_sitemap": False, "include_allow": True, "include_disallow": True} + options_desc = { + "include_sitemap": "Include 'sitemap' entries", + "include_allow": "Include 'Allow' Entries", + "include_disallow": "Include 'Disallow' Entries", + } + + in_scope_only = True + per_hostport_only = True + + async def setup(self): + return True + + async def handle_event(self, event): + host = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" + result = None + url = f"{host}robots.txt" + result = await self.helpers.request(url) + if result: + body = result.text + + if body: + lines = body.split("\n") + for l in lines: + if len(l) > 0: + split_l = l.split(": ") + if (split_l[0].lower() == "allow" and self.config.get("include_allow") is True) or ( + split_l[0].lower() == "disallow" and self.config.get("include_disallow") is True + ): + unverified_url = f"{host}{split_l[1].lstrip('/')}".replace( + "*", self.helpers.rand_string(4) + ) + + elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") is True: + unverified_url = split_l[1] + else: + continue + await self.emit_event( + unverified_url, + "URL_UNVERIFIED", + parent=event, + tags=["spider-danger"], + context=f"{{module}} found robots.txt at {url} and extracted {{event.type}}: {{event.data}}", + ) diff --git a/bbot/modules/securitytrails.py b/bbot/modules/securitytrails.py index d28270d7d5..b92ac07dc1 100644 --- a/bbot/modules/securitytrails.py +++ b/bbot/modules/securitytrails.py @@ -1,38 +1,35 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class securitytrails(shodan_dns): - +class securitytrails(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the SecurityTrails API for subdomains", "auth_required": True} + meta = { + "description": "Query the SecurityTrails API for subdomains", + "created_date": "2022-07-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "SecurityTrails API key"} base_url = "https://api.securitytrails.com/v1" + ping_url = f"{base_url}/ping?apikey={{api_key}}" - def setup(self): + async def setup(self): self.limit = 100 - return super().setup() - - def ping(self): - r = self.helpers.request(f"{self.base_url}/ping?apikey={self.api_key}") - resp_content = getattr(r, "text", "") - assert getattr(r, "status_code", 0) == 200, resp_content - - def query(self, query): - url = f"{self.base_url}/domain/{query}/subdomains?apikey={self.api_key}" - r = self.helpers.request(url) - try: - j = r.json() - if type(j) == dict: - for host in j.get("subdomains", []): - yield f"{host}.{query}" - else: - self.debug(f'No results for "{query}"') - except Exception: - import traceback - - self.warning(f'Error retrieving subdomains for "{query}"') - self.debug(traceback.format_exc()) + return await super().setup() + + async def request_url(self, query): + url = f"{self.base_url}/domain/{query}/subdomains?apikey={{api_key}}" + response = await self.api_request(url) + return response + + async def parse_results(self, r, query): + results = set() + j = r.json() + if isinstance(j, dict): + for host in j.get("subdomains", []): + results.add(f"{host}.{query}") + return results diff --git a/bbot/modules/securitytxt.py b/bbot/modules/securitytxt.py new file mode 100644 index 0000000000..880865c9b8 --- /dev/null +++ b/bbot/modules/securitytxt.py @@ -0,0 +1,128 @@ +# securitytxt.py +# +# Checks for/parses https://target.domain/.well-known/security.txt +# +# Refer to: https://securitytxt.org/ +# +# security.txt may contain email addresses and URL's, and possibly IP addresses. +# +# Example security.txt: +# +# Contact: mailto:security.reports@example.com +# Expires: 2028-05-31T14:00:00.000Z +# Encryption: https://example.com/security.pgp +# Preferred-Languages: en, es +# Canonical: https://example.com/.well-known/security.txt +# Canonical: https://www.example.com/.well-known/security.txt +# Policy: https://example.com/security-policy.html +# Hiring: https://example.com/jobs.html +# +# Example security.txt with PGP signature: +# +# -----BEGIN PGP SIGNED MESSAGE----- +# Hash: SHA512 +# +# Contact: https://vdp.example.com +# Expires: 2025-01-01T00:00:00.000Z +# Preferred-Languages: fr, en +# Canonical: https://example.com/.well-known/security.txt +# Policy: https://example.com/cert +# Hiring: https://www.careers.example.com +# -----BEGIN PGP SIGNATURE----- +# +# iQIzBAEBCgAdFiEELC1a63jHPhyV60KPsvWy9dDkrigFAmJBypcACgkQsvWy9dDk +# rijXHQ//Qya3hUSy5PYW+fI3eFP1+ak6gYq3Cbzkf57cqiBhxGetIGIGNJ6mxgjS +# KAuvXLMUWgZD73r//fjZ5v1lpuWmpt54+ecat4DgcVCvFKYpaH+KBlay8SX7XtQH +# 9T2NXMcez353TMR3EUOdLwdBzGZprf0Ekg9EzaHKMk0k+A4D9CnSb8Y6BKDPC7wr +# eadwDIR9ESo0va4sjjcllCG9MF5hqK25SfsKriCSEAMhse2FToEBbw8ImkPKowMN +# whJ4MIVlBxybu6XoIyk3n7HRRduijywy7uV80pAkhk/hL6wiW3M956FiahfRI6ad +# +Gky/Ri5TjwAE/x5DhUH8O2toPsn71DeIE4geKfz5d/v41K0yncdrHjzbj0CAHu3 +# wVWLKnEp8RVqTlOR8jU0HqQUQy8iZk4LY91ROv+QjG/jUTWlwun8Ljh+YUeJTMRp +# MGftCdCrrYjIy5aEQqWztt+dXKac/9e1plq3yyfuW1L+wG3zS7X+NpIJgygMvEwT +# L3dqfQf63sjk8kWIZMVnicHBlc6BiLqUn020l+pkIOr4MuuJmIlByhlnfqH7YM8k +# VShwDx7rs4Hj08C7NVCYIySaM2jM4eNKGt9V5k1F1sklCVfYaT8OqOhJrzhcisOC +# YcQDhjt/iZTR8SzrHO7kFZbaskIp2P7JMaPax2fov15AnNHQQq8= +# =8vfR +# -----END PGP SIGNATURE----- + +from bbot.modules.base import BaseModule + +import re + +from bbot.core.helpers.regexes import email_regex, url_regexes + +_securitytxt_regex = r"^(?P\w+): *(?P.*)$" +securitytxt_regex = re.compile(_securitytxt_regex, re.I | re.M) + + +class securitytxt(BaseModule): + watched_events = ["DNS_NAME"] + produced_events = ["EMAIL_ADDRESS", "URL_UNVERIFIED"] + flags = ["subdomain-enum", "cloud-enum", "active", "web-basic", "safe"] + meta = { + "description": "Check for security.txt content", + "author": "@colin-stubbs", + "created_date": "2024-05-26", + } + options = { + "emails": True, + "urls": True, + } + options_desc = { + "emails": "emit EMAIL_ADDRESS events", + "urls": "emit URL_UNVERIFIED events", + } + + async def setup(self): + self._emails = self.config.get("emails", True) + self._urls = self.config.get("urls", True) + return await super().setup() + + def _incoming_dedup_hash(self, event): + # dedupe by parent + parent_domain = self.helpers.parent_domain(event.data) + return hash(parent_domain), "already processed parent domain" + + async def filter_event(self, event): + if "_wildcard" in str(event.host).split("."): + return False, "event is wildcard" + return True + + async def handle_event(self, event): + tags = ["securitytxt-policy"] + url = f"https://{event.host}/.well-known/security.txt" + + r = await self.helpers.request(url, method="GET") + + if r is None or r.status_code != 200: + # it doesn't look like we got a valid response... + return + + try: + s = r.text + except Exception: + s = "" + + # avoid parsing the response unless it looks, at a very basic level, like an actual security.txt + s_lower = s.lower() + if "contact: " in s_lower or "expires: " in s_lower: + for securitytxt_match in securitytxt_regex.finditer(s): + v = securitytxt_match.group("v") + + for match in email_regex.finditer(v): + start, end = match.span() + email = v[start:end] + + if self._emails: + await self.emit_event(email, "EMAIL_ADDRESS", parent=event, tags=tags) + + for url_regex in url_regexes: + for match in url_regex.finditer(v): + start, end = match.span() + found_url = v[start:end] + + if found_url != url and self._urls is True: + await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags) + + +# EOF diff --git a/bbot/modules/shodan_dns.py b/bbot/modules/shodan_dns.py index 8bade6c4cd..2ad0bc5057 100644 --- a/bbot/modules/shodan_dns.py +++ b/bbot/modules/shodan_dns.py @@ -1,49 +1,26 @@ -from .crobat import crobat +from bbot.modules.templates.shodan import shodan -class shodan_dns(crobat): - """ - A typical module for authenticated, API-based subdomain enumeration - Inherited by several other modules including securitytrails, c99.nl, etc. - """ - +class shodan_dns(shodan): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query Shodan for subdomains", "auth_required": True} + meta = { + "description": "Query Shodan for subdomains", + "created_date": "2022-07-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "Shodan API key"} base_url = "https://api.shodan.io" - def setup(self): - super().setup() - self.api_key = self.config.get("api_key", "") - if self.auth_secret: - try: - self.ping() - self.hugesuccess(f"API is ready") - return True - except Exception as e: - return None, f"Error with API ({str(e).strip()})" - else: - return None, "No API key set" - - def ping(self): - r = self.helpers.request(f"{self.base_url}/api-info?key={self.api_key}") - resp_content = getattr(r, "text", "") - assert getattr(r, "status_code", 0) == 200, resp_content - - def request_url(self, query): - url = f"{self.base_url}/dns/domain/{self.helpers.quote(query)}?key={self.api_key}" - return self.helpers.request(url) + async def handle_event(self, event): + await self.handle_event_paginated(event) - def parse_results(self, r, query): - json = r.json() - if json: - for hostname in json.get("subdomains"): - yield f"{hostname}.{query}" + def make_url(self, query): + return f"{self.base_url}/dns/domain/{self.helpers.quote(query)}?key={{api_key}}&page={{page}}" - @property - def auth_secret(self): - return self.api_key + async def parse_results(self, json, query): + return [f"{sub}.{query}" for sub in json.get("subdomains", [])] diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py new file mode 100644 index 0000000000..187aae1941 --- /dev/null +++ b/bbot/modules/sitedossier.py @@ -0,0 +1,56 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class sitedossier(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query sitedossier.com for subdomains", + "created_date": "2023-08-04", + "author": "@TheTechromancer", + } + + base_url = "http://www.sitedossier.com/parentdomain" + max_pages = 10 + + async def handle_event(self, event): + query = self.make_query(event) + async for hostname in self.query(query): + try: + hostname = self.helpers.validators.validate_host(hostname) + except ValueError as e: + self.verbose(e) + continue + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched sitedossier.com for "{query}" and found {{event.type}}: {{event.data}}', + ) + + async def query(self, query, parse_fn=None, request_fn=None): + results = set() + base_url = f"{self.base_url}/{self.helpers.quote(query)}" + url = str(base_url) + for i, page in enumerate(range(1, 100 * self.max_pages + 2, 100)): + self.verbose(f"Fetching page #{i + 1} for {query}") + if page > 1: + url = f"{base_url}/{page}" + response = await self.helpers.request(url) + if response is None: + self.info(f'Query "{query}" failed (no response)') + break + if response.status_code == 302: + self.verbose("Hit rate limit captcha") + break + for match in await self.helpers.re.finditer_multi(self.scan.dns_regexes, response.text): + hostname = match.group().lower() + if hostname and hostname not in results: + results.add(hostname) + yield hostname + if '= last_page: - break + domain_ids = await self.helpers.re.findall(self.next_page_regex, r.text) + if domain_ids: + domain_id = domain_ids[0] + for page in range(2, 22): + r2 = await self.api_request(f"{self.base_url}/domain/{domain_id}?p={page}") + if not r2: + continue + responses.append(r2) + pages = re.findall(r"/domain/" + domain_id + r"\?p=(\d+)", r2.text) + if not pages: + break + last_page = max([int(p) for p in pages]) + if page >= last_page: + break + + for i, r in enumerate(responses): + for email in await self.helpers.re.extract_emails(r.text): + await self.emit_event( + email, + "EMAIL_ADDRESS", + parent=event, + context=f'{{module}} searched skymem.info for "{query}" and found {{event.type}} on page {i + 1}: {{event.data}}', + ) diff --git a/bbot/modules/smuggler.py b/bbot/modules/smuggler.py index eef64e1bb0..357fec1885 100644 --- a/bbot/modules/smuggler.py +++ b/bbot/modules/smuggler.py @@ -1,3 +1,5 @@ +import sys + from bbot.modules.base import BaseModule @@ -7,48 +9,39 @@ class smuggler(BaseModule): - watched_events = ["URL"] produced_events = ["FINDING"] - flags = ["active", "aggressive", "web", "slow", "brute-force"] - meta = {"description": "Check for HTTP smuggling"} + flags = ["active", "aggressive", "slow", "web-thorough"] + meta = {"description": "Check for HTTP smuggling", "created_date": "2022-07-06", "author": "@liquidsec"} in_scope_only = True + per_hostport_only = True deps_ansible = [ { "name": "Get smuggler repo", - "git": {"repo": "https://github.com/defparam/smuggler.git", "dest": "{BBOT_TOOLS}/smuggler"}, + "git": {"repo": "https://github.com/defparam/smuggler.git", "dest": "#{BBOT_TOOLS}/smuggler"}, } ] - def setup(self): - self.scanned_hosts = set() - return True - - def handle_event(self, event): - - host = f"{event.parsed.scheme}://{event.parsed.netloc}/" - host_hash = hash(host) - if host_hash in self.scanned_hosts: - self.debug(f"Host {host} was already scanned, exiting") - return - else: - self.scanned_hosts.add(host_hash) - + async def handle_event(self, event): command = [ - "python", + sys.executable, f"{self.scan.helpers.tools_dir}/smuggler/smuggler.py", "--no-color", "-q", "-u", event.data, ] - for f in self.helpers.run_live(command): - if "Issue Found" in f: - technique = f.split(":")[0].rstrip() - text = f.split(":")[1].split("-")[0].strip() - description = f"[HTTP SMUGGLER] [{text}] Technique: {technique}" - self.emit_event( - {"host": str(event.host), "url": event.data, "description": description}, "FINDING", source=event - ) + async for line in self.run_process_live(command): + for f in line.split("\r"): + if "Issue Found" in f: + technique = f.split(":")[0].rstrip() + text = f.split(":")[1].split("-")[0].strip() + description = f"[HTTP SMUGGLER] [{text}] Technique: {technique}" + await self.emit_event( + {"host": str(event.host), "url": event.data, "description": description}, + "FINDING", + parent=event, + context=f"{{module}} scanned {event.data} and found HTTP smuggling ({{event.type}}): {text}", + ) diff --git a/bbot/modules/social.py b/bbot/modules/social.py new file mode 100644 index 0000000000..fb46dd3870 --- /dev/null +++ b/bbot/modules/social.py @@ -0,0 +1,57 @@ +import re +from bbot.modules.base import BaseModule + + +class social(BaseModule): + watched_events = ["URL_UNVERIFIED"] + produced_events = ["SOCIAL"] + meta = { + "description": "Look for social media links in webpages", + "created_date": "2023-03-28", + "author": "@TheTechromancer", + } + flags = ["passive", "safe", "social-enum"] + + # platform name : (regex, case_sensitive) + social_media_platforms = { + "linkedin": (r"linkedin.com/(?:in|company)/([a-zA-Z0-9-]+)", False), + "facebook": (r"facebook.com/([a-zA-Z0-9.]+)", False), + "twitter": (r"twitter.com/([a-zA-Z0-9_]{1,15})", False), + "github": (r"github.com/([a-zA-Z0-9_-]+)", False), + "instagram": (r"instagram.com/([a-zA-Z0-9_.]+)", False), + "youtube": (r"youtube.com/@([a-zA-Z0-9_]+)", False), + "bitbucket": (r"bitbucket.org/([a-zA-Z0-9_-]+)", False), + "gitlab": (r"gitlab.(?:com|org)/([a-zA-Z0-9_-]+)", False), + "discord": (r"discord.gg/([a-zA-Z0-9_-]+)", True), + "docker": (r"hub.docker.com/[ru]/([a-zA-Z0-9_-]+)", False), + "huggingface": (r"huggingface.co/([a-zA-Z0-9_-]+)", False), + "postman": (r"www.postman.com/([a-zA-Z0-9_-]+)", False), + } + + scope_distance_modifier = 1 + + async def setup(self): + self.compiled_regexes = {k: (re.compile(v), c) for k, (v, c) in self.social_media_platforms.items()} + return True + + async def handle_event(self, event): + for platform, (regex, case_sensitive) in self.compiled_regexes.items(): + for match in regex.finditer(event.data): + url = match.group() + profile_name = match.groups()[0] + if not case_sensitive: + url = url.lower() + profile_name = profile_name.lower() + url = f"https://{url}" + event_data = {"platform": platform, "url": url, "profile_name": profile_name} + # only emit if the same event isn't already in the parent chain + if not any(e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()): + social_event = self.make_event( + event_data, + "SOCIAL", + parent=event, + ) + await self.emit_event( + social_event, + context=f"{{module}} detected {platform} {{event.type}} at {url}", + ) diff --git a/bbot/modules/sslcert.py b/bbot/modules/sslcert.py index 68042d9968..814068f03f 100644 --- a/bbot/modules/sslcert.py +++ b/bbot/modules/sslcert.py @@ -1,37 +1,51 @@ -import select -import socket -import threading -from OpenSSL import SSL -from ssl import PROTOCOL_TLSv1 +import asyncio +from OpenSSL import crypto +from contextlib import suppress +from bbot.errors import ValidationError from bbot.modules.base import BaseModule -from bbot.core.errors import ValidationError -from bbot.core.helpers.threadpool import NamedLock +from bbot.core.helpers.async_helpers import NamedLock +from bbot.core.helpers.web.ssl_context import ssl_context_noverify class sslcert(BaseModule): watched_events = ["OPEN_TCP_PORT"] produced_events = ["DNS_NAME", "EMAIL_ADDRESS"] - flags = ["subdomain-enum", "email-enum", "active", "safe"] + flags = ["affiliates", "subdomain-enum", "email-enum", "active", "safe", "web-basic"] meta = { "description": "Visit open ports and retrieve SSL certificates", + "created_date": "2022-03-30", + "author": "@TheTechromancer", } - options = {"timeout": 5.0} - options_desc = {"timeout": "Socket connect timeout in seconds"} + options = {"timeout": 5.0, "skip_non_ssl": True} + options_desc = {"timeout": "Socket connect timeout in seconds", "skip_non_ssl": "Don't try common non-SSL ports"} deps_apt = ["openssl"] - deps_pip = ["pyOpenSSL"] - max_event_handlers = 20 - scope_distance_modifier = 0 + deps_pip = ["pyOpenSSL~=24.0.0"] + _module_threads = 25 + scope_distance_modifier = 1 _priority = 2 - def setup(self): + async def setup(self): + self.timeout = self.config.get("timeout", 5.0) + self.skip_non_ssl = self.config.get("skip_non_ssl", True) + self.non_ssl_ports = (22, 53, 80) + + # sometimes we run into a server with A LOT of SANs + # these are usually stupid and useless, so we abort based on a different threshold + # depending on whether the parent event is in scope + self.in_scope_abort_threshold = 50 + self.out_of_scope_abort_threshold = 10 + self.hosts_visited = set() - self.hosts_visited_lock = threading.Lock() self.ip_lock = NamedLock() return True - def handle_event(self, event): + async def filter_event(self, event): + if self.skip_non_ssl and event.port in self.non_ssl_ports: + return False, f"Port {event.port} doesn't typically use SSL" + return True + async def handle_event(self, event): _host = event.host if event.port: port = event.port @@ -42,85 +56,127 @@ def handle_event(self, event): if self.helpers.is_ip(_host): hosts = [_host] else: - hosts = list(self.helpers.resolve(_host)) - - for host in hosts: - for event_type, event_data in self.visit_host(host, port): - if event_data is not None and event_data != event: - self.debug(f"Discovered new {event_type} via SSL certificate parsing: [{event_data}]") - try: - ssl_event = self.make_event(event_data, event_type, source=event, raise_error=True) - if ssl_event: - self.emit_event(ssl_event) - except ValidationError as e: - self.hugeinfo(f'Malformed {event_type} "{event_data}" at {event.data}') - self.debug(f"Invalid data at {host}:{port}: {e}") - - def visit_host(self, host, port): + hosts = list(await self.helpers.resolve(_host)) + + if event.scope_distance == 0: + abort_threshold = self.in_scope_abort_threshold + else: + abort_threshold = self.out_of_scope_abort_threshold + + tasks = [self.visit_host(host, port) for host in hosts] + async for task in self.helpers.as_completed(tasks): + result = await task + if not isinstance(result, tuple) or not len(result) == 3: + continue + dns_names, emails, (host, port) = result + if len(dns_names) > abort_threshold: + netloc = self.helpers.make_netloc(host, port) + self.verbose( + f"Skipping Subject Alternate Names (SANs) on {netloc} because number of hostnames ({len(dns_names):,}) exceeds threshold ({abort_threshold})" + ) + dns_names = dns_names[:1] + [n for n in dns_names[1:] if self.scan.in_scope(n)] + for event_type, results in (("DNS_NAME", set(dns_names)), ("EMAIL_ADDRESS", emails)): + for event_data in results: + if event_data is not None and event_data != event: + self.debug(f"Discovered new {event_type} via SSL certificate parsing: [{event_data}]") + try: + ssl_event = self.make_event(event_data, event_type, parent=event, raise_error=True) + parent_event = ssl_event.get_parent() + if parent_event.scope_distance == 0: + tags = ["affiliate"] + else: + tags = None + if ssl_event: + await self.emit_event( + ssl_event, + tags=tags, + context=f"{{module}} parsed SSL certificate at {event.data} and found {{event.type}}: {{event.data}}", + ) + except ValidationError as e: + self.hugeinfo(f'Malformed {event_type} "{event_data}" at {event.data}') + self.debug(f"Invalid data at {host}:{port}: {e}") + + def on_success_callback(self, event): + parent_scope_distance = event.get_parent().scope_distance + if parent_scope_distance == 0 and event.scope_distance > 0: + event.add_tag("affiliate") + + async def visit_host(self, host, port): host = self.helpers.make_ip_type(host) + netloc = self.helpers.make_netloc(host, port) host_hash = hash((host, port)) - with self.ip_lock.get_lock(host_hash): - with self.hosts_visited_lock: - if host_hash in self.hosts_visited: - self.debug(f"Already processed {host} on port {port}, skipping") - return None, None - else: - self.hosts_visited.add(host_hash) - - socket_type = socket.AF_INET - if self.helpers.is_ip(host): - if host.version == 6: - socket_type = socket.AF_INET6 + dns_names = [] + emails = set() + async with self.ip_lock.lock(host_hash): + if host_hash in self.hosts_visited: + self.debug(f"Already processed {host} on port {port}, skipping") + return [], [], (host, port) + else: + self.hosts_visited.add(host_hash) + host = str(host) - sock = socket.socket(socket_type, socket.SOCK_STREAM) - timeout = self.config.get("timeout", 5.0) - sock.settimeout(timeout) - context = SSL.Context(PROTOCOL_TLSv1) - self.debug(f"Connecting to {host} on port {port}") + + # Connect to the host + try: + transport, _ = await asyncio.wait_for( + self.helpers.loop.create_connection( + lambda: asyncio.Protocol(), host, port, ssl=ssl_context_noverify + ), + timeout=self.timeout, + ) + except asyncio.TimeoutError: + self.debug(f"Timed out after {self.timeout} seconds while connecting to {netloc}") + return [], [], (host, port) + except Exception as e: + log_fn = self.warning + if isinstance(e, OSError): + log_fn = self.debug + log_fn(f"Error connecting to {netloc}: {e}") + return [], [], (host, port) + finally: + with suppress(Exception): + transport.close() + + # Get the SSL object try: - sock.connect((host, port)) + ssl_object = transport.get_extra_info("ssl_object") except Exception as e: - self.debug(f"Error connecting to {host} on port {port}: {e}") - return None, None - connection = SSL.Connection(context, sock) - connection.set_tlsext_host_name(self.helpers.smart_encode(host)) - connection.set_connect_state() + self.verbose(f"Error getting ssl_object: {e}", trace=True) + return [], [], (host, port) + + # Get the certificate + try: + der = ssl_object.getpeercert(binary_form=True) + except Exception as e: + self.verbose(f"Error getting peer cert: {e}", trace=True) + return [], [], (host, port) try: - while True: - try: - connection.do_handshake() - except SSL.WantReadError: - rd, _, _ = select.select([sock], [], [], sock.gettimeout()) - if not rd: - raise timeout("select timed out") - continue - break + cert = crypto.load_certificate(crypto.FILETYPE_ASN1, der) except Exception as e: - self.debug(f"Error with SSL handshake on {host} port {port}: {e}") - return None, None - cert = connection.get_peer_certificate() - sock.close() + self.verbose(f"Error loading certificate: {e}", trace=True) + return [], [], (host, port) issuer = cert.get_issuer() if issuer.emailAddress and self.helpers.regexes.email_regex.match(issuer.emailAddress): - yield "EMAIL_ADDRESS", issuer.emailAddress + emails.add(issuer.emailAddress) subject = cert.get_subject() if subject.emailAddress and self.helpers.regexes.email_regex.match(subject.emailAddress): - yield "EMAIL_ADDRESS", subject.emailAddress - common_name = subject.commonName - cert_results = self.get_cert_sans(cert) - cert_results.append(str(common_name).lstrip("*.").lower()) - for c in set(cert_results): - yield "DNS_NAME", c + emails.add(subject.emailAddress) + common_name = str(subject.commonName).lstrip("*.").lower() + dns_names = set(self.get_cert_sans(cert)) + with suppress(KeyError): + dns_names.remove(common_name) + dns_names = [common_name] + list(dns_names) + return dns_names, list(emails), (host, port) @staticmethod def get_cert_sans(cert): - sans = [] raw_sans = None ext_count = cert.get_extension_count() for i in range(0, ext_count): ext = cert.get_extension(i) - if "subjectAltName" in str(ext.get_short_name()): + short_name = str(ext.get_short_name()) + if "subjectAltName" in short_name: raw_sans = str(ext) if raw_sans is not None: for raw_san in raw_sans.split(","): diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py new file mode 100644 index 0000000000..077ccf1a6c --- /dev/null +++ b/bbot/modules/subdomaincenter.py @@ -0,0 +1,41 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class subdomaincenter(subdomain_enum): + flags = ["subdomain-enum", "passive", "safe"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = { + "description": "Query subdomain.center's API for subdomains", + "created_date": "2023-07-26", + "author": "@TheTechromancer", + } + + base_url = "https://api.subdomain.center" + retries = 2 + + async def sleep(self, time_to_wait): + self.info(f"Sleeping for {time_to_wait} seconds to avoid rate limit") + await self.helpers.sleep(time_to_wait) + + async def request_url(self, query): + url = f"{self.base_url}/?domain={self.helpers.quote(query)}" + response = None + status_code = 0 + for i, _ in enumerate(range(self.retries + 1)): + if i > 0: + self.verbose(f"Retry #{i} for {query} after response code {status_code}") + response = await self.helpers.request(url, timeout=self.http_timeout + 30) + status_code = getattr(response, "status_code", 0) + if status_code == 429: + await self.sleep(20) + else: + break + return response + + async def parse_results(self, r, query): + results = set() + json = r.json() + if json and isinstance(json, list): + results = set(json) + return results diff --git a/bbot/modules/subdomainradar.py b/bbot/modules/subdomainradar.py new file mode 100644 index 0000000000..2a4c987948 --- /dev/null +++ b/bbot/modules/subdomainradar.py @@ -0,0 +1,160 @@ +import time +import asyncio + +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class SubdomainRadar(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query the Subdomain API for subdomains", + "created_date": "2022-07-08", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "group": "fast", "timeout": 120} + options_desc = { + "api_key": "SubDomainRadar.io API key", + "group": "The enumeration group to use. Choose from fast, medium, deep", + "timeout": "Timeout in seconds", + } + + base_url = "https://api.subdomainradar.io" + ping_url = f"{base_url}/profile" + group_choices = ("fast", "medium", "deep") + + # set this really high so the poll loop finishes as soon as possible + _qsize = 9999999 + + async def setup(self): + self.group = self.config.get("group", "fast").strip().lower() + self.timeout = self.config.get("timeout", 120) + if self.group not in self.group_choices: + return False, f'Invalid group: "{self.group}", please choose from {",".join(self.group_choices)}' + success, reason = await self.require_api_key() + if not success: + return success, reason + # convert groups to enumerators + enumerators = {} + response = await self.api_request(f"{self.base_url}/enumerators/groups") + status_code = getattr(response, "status_code", 0) + if status_code != 200: + return False, f"Failed to get enumerators: (HTTP status code: {status_code})" + else: + try: + j = response.json() + except Exception: + return False, "Failed to get enumerators: failed to parse response as JSON" + for group in j: + group_name = group.get("name", "").strip().lower() + if group_name: + group_enumerators = [] + for enumerator in group.get("enumerators", []): + enumerator_name = enumerator.get("display_name", "") + if enumerator_name: + group_enumerators.append(enumerator_name) + if group_enumerators: + enumerators[group_name] = group_enumerators + + self.enumerators = enumerators.get(self.group, []) + if not self.enumerators: + return False, f'No enumerators found for group: "{self.group}" ({self.enumerators})' + + self.enum_tasks = {} + self.poll_task = asyncio.create_task(self.task_poll_loop()) + + return True + + def prepare_api_request(self, url, kwargs): + if self.api_key: + kwargs["headers"] = {"Authorization": f"Bearer {self.api_key}"} + return url, kwargs + + async def handle_event(self, event): + query = self.make_query(event) + # start enumeration task + url = f"{self.base_url}/enumerate" + response = await self.api_request( + url, method="POST", json={"domains": [query], "enumerators": self.enumerators} + ) + try: + j = response.json() + except Exception: + self.warning(f"Failed to parse response as JSON: {getattr(response, 'text', '')}") + return + task_id = j.get("tasks", {}).get(query, "") + if not task_id: + self.warning(f"Failed to start enumeration for {query}") + return + self.enum_tasks[query] = (task_id, time.time(), event) + self.debug(f"Started enumeration task for {query}; task id: {task_id}") + + async def task_poll_loop(self): + # async with self._task_counter.count(f"{self.name}.task_poll_loop()"): + while 1: + for query, (task_id, start_time, event) in list(self.enum_tasks.items()): + url = f"{self.base_url}/tasks/{task_id}" + response = await self.api_request(url) + if getattr(response, "status_code", 0) == 200: + finished = await self.parse_response(response, query, event) + if finished: + self.enum_tasks.pop(query) + continue + # if scan is finishing, consider timeout + if self.scan.status == "FINISHING": + if start_time + self.timeout < time.time(): + self.enum_tasks.pop(query) + self.info(f"Enumeration task for {query} timed out") + + if self.scan.status == "FINISHING" and not self.enum_tasks: + break + await self.helpers.sleep(5) + + async def parse_response(self, response, query, event): + j = response.json() + status = j.get("status", "") + if status.lower() == "completed": + for subdomain in j.get("subdomains", []): + hostname = subdomain.get("subdomain", "") + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched SubDomainRadar.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) + return True + return False + + async def finish(self): + start_time = time.time() + while self.enum_tasks and not self.poll_task.done(): + elapsed_time = time.time() - start_time + if elapsed_time >= self.timeout: + self.warning(f"Timed out waiting for the following tasks to finish: {self.enum_tasks}") + for query, (task_id, _, _) in list(self.enum_tasks.items()): + url = f"{self.base_url}/tasks/{task_id}" + self.warning(f" - {query} ({url})") + break + + self.verbose( + f"Waiting for enumeration task poll loop to finish ({int(elapsed_time)}/{self.timeout} seconds)" + ) + + try: + # Wait for the task to complete or for 10 seconds, whichever comes first + await asyncio.wait_for(asyncio.shield(self.poll_task), timeout=10) + except asyncio.TimeoutError: + # This just means our 10-second check has elapsed, not that the task failed + pass + + # Cancel the poll_task if it's still running + if not self.poll_task.done(): + self.poll_task.cancel() + try: + await self.poll_task + except asyncio.CancelledError: + pass diff --git a/bbot/modules/sublist3r.py b/bbot/modules/sublist3r.py deleted file mode 100644 index a278c11266..0000000000 --- a/bbot/modules/sublist3r.py +++ /dev/null @@ -1,21 +0,0 @@ -from .crobat import crobat - - -class sublist3r(crobat): - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] - meta = { - "description": "Query sublist3r's API for subdomains", - } - - base_url = "https://api.sublist3r.com/search.php" - - def request_url(self, query): - return self.helpers.request(f"{self.base_url}?domain={query}") - - def parse_results(self, r, query): - json = r.json() - if json: - for hostname in json: - yield hostname diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index bf2782201d..78874e303f 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -1,15 +1,32 @@ -from bbot.modules.base import BaseModule -from urllib.parse import urlparse from sys import executable +from urllib.parse import urlparse + +from bbot.modules.base import BaseModule class telerik(BaseModule): + """ + Test for endpoints associated with Telerik.Web.UI.dll + + Telerik.Web.UI.WebResource.axd (CVE-2017-11317) + Telerik.Web.UI.DialogHandler.aspx (CVE-2017-9248) + Telerik.Web.UI.SpellCheckHandler.axd (associated with CVE-2017-9248) + ChartImage.axd (CVE-2019-19790) + + For the Telerik Report Server vulnerability (CVE-2024-4358) Use the Nuclei Template: (https://github.com/projectdiscovery/nuclei-templates/blob/main/http/cves/2024/CVE-2024-4358.yaml) - watched_events = ["URL"] + With exploit_RAU_crypto enabled, the module will attempt to exploit CVE-2017-11317. THIS WILL UPLOAD A (benign) FILE IF SUCCESSFUL. + + Will dedupe to host by default (running against first received URL). With include_subdirs enabled, will run against every directory. + """ + + watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["VULNERABILITY", "FINDING"] - flags = ["active", "aggressive", "web"] + flags = ["active", "aggressive", "web-thorough"] meta = { "description": "Scan for critical Telerik vulnerabilities", + "created_date": "2022-04-10", + "author": "@liquidsec", } telerikVersions = [ @@ -91,6 +108,7 @@ class telerik(BaseModule): "2016.3.914", "2016.3.1018", "2016.3.1027", + "2016.1.1213", "2017.1.118", "2017.1.228", "2017.2.503", @@ -98,132 +116,280 @@ class telerik(BaseModule): "2017.2.711", "2017.3.913", ] + + DialogHandlerUrls = [ + "Telerik.Web.UI.DialogHandler.aspx", + "Telerik.Web.UI.DialogHandler.axd", + "Admin/ServerSide/Telerik.Web.UI.DialogHandler.aspx", + "App_Master/Telerik.Web.UI.DialogHandler.aspx", + "AsiCommon/Controls/ContentManagement/ContentDesigner/Telerik.Web.UI.DialogHandler.aspx", + "cms/portlets/telerik.web.ui.dialoghandler.aspx", + "common/admin/Calendar/Telerik.Web.UI.DialogHandler.aspx", + "common/admin/Jobs2/Telerik.Web.UI.DialogHandler.aspx", + "common/admin/PhotoGallery2/Telerik.Web.UI.DialogHandler.aspx", + "dashboard/UserControl/CMS/Page/Telerik.Web.UI.DialogHandler.aspx", + "DesktopModule/UIQuestionControls/UIAskQuestion/Telerik.Web.UI.DialogHandler.aspx", + "Desktopmodules/Admin/dnnWerk.Users/DialogHandler.aspx", + "DesktopModules/Admin/RadEditorProvider/DialogHandler.aspx", + "desktopmodules/base/editcontrols/telerik.web.ui.dialoghandler.aspx", + "desktopmodules/dnnwerk.radeditorprovider/dialoghandler.aspx", + "DesktopModules/RadEditorProvider/telerik.web.ui.dialoghandler.aspx", + "desktopmodules/tcmodules/tccategory/telerik.web.ui.dialoghandler.aspx", + "desktopmodules/telerikwebui/radeditorprovider/telerik.web.ui.dialoghandler.aspx", + "DesktopModules/TNComments/Telerik.Web.UI.DialogHandler.aspx", + "dotnetnuke/DesktopModules/Admin/RadEditorProvider/DialogHandler.aspx", + "Modules/CMS/Telerik.Web.UI.DialogHandler.aspx", + "modules/shop/manage/telerik.web.ui.dialoghandler.aspx", + "portal/channels/fa/Cms_HtmlText_Manage/Telerik.Web.UI.DialogHandler.aspx", + "providers/htmleditorproviders/telerik/telerik.web.ui.dialoghandler.aspx", + "Resources/Telerik.Web.UI.DialogHandler.aspx", + "sitecore/shell/applications/contentmanager/telerik.web.ui.dialoghandler.aspx", + "sitecore/shell/Controls/RichTextEditor/Telerik.Web.UI.DialogHandler.aspx", + "Sitefinity/ControlTemplates/Blogs/Telerik.Web.UI.DialogHandler.aspx", + "SiteTemplates/Telerik.Web.UI.DialogHandler.aspx", + "static/usercontrols/Telerik.Web.UI.DialogHandler.aspx", + "system/providers/htmleditor/Telerik.Web.UI.DialogHandler.aspx", + "WebUIDialogs/Telerik.Web.UI.DialogHandler.aspx", + ] + RAUConfirmed = [] - options = {"exploit_RAU_crypto": False} - options_desc = {"exploit_RAU_crypto": "Ettempt to confirm any RAU AXD detections are vulnerable"} + options = {"exploit_RAU_crypto": False, "include_subdirs": False} + options_desc = { + "exploit_RAU_crypto": "Attempt to confirm any RAU AXD detections are vulnerable", + "include_subdirs": "Include subdirectories in the scan (off by default)", # will create many finding events if used in conjunction with web spider or ffuf + } in_scope_only = True - deps_pip = ["pycryptodome"] + deps_pip = ["pycryptodome~=3.17"] deps_ansible = [ - {"name": "Create telerik dir", "file": {"state": "directory", "path": "{BBOT_TOOLS}/telerik/"}}, - {"file": {"state": "touch", "path": "{BBOT_TOOLS}/telerik/testfile.txt"}}, + {"name": "Create telerik dir", "file": {"state": "directory", "path": "#{BBOT_TOOLS}/telerik/"}}, + {"file": {"state": "touch", "path": "#{BBOT_TOOLS}/telerik/testfile.txt"}}, { "name": "Download RAU_crypto", "unarchive": { "src": "https://github.com/bao7uo/RAU_crypto/archive/refs/heads/master.zip", "include": "RAU_crypto-master/RAU_crypto.py", - "dest": "{BBOT_TOOLS}/telerik/", + "dest": "#{BBOT_TOOLS}/telerik/", "remote_src": True, }, }, ] - def handle_event(self, event): - - webresource = "Telerik.Web.UI.WebResource.axd?type=rau" - result = self.test_detector(event.data, webresource) - if result: - self.debug(result.text) - if "RadAsyncUpload handler is registered succesfully" in result.text: - self.debug(f"Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)") - description = f"Telerik RAU AXD Handler detected" - self.emit_event( - {"host": str(event.host), "url": f"{event.data}{webresource}", "description": description}, - "FINDING", - event, - ) - if self.config.get("exploit_RAU_crypto") == True: - hostname = urlparse(event.data).netloc - if hostname not in self.RAUConfirmed: - self.RAUConfirmed.append(hostname) - root_tool_path = self.scan.helpers.tools_dir / "telerik" - self.debug(root_tool_path) - - for version in self.telerikVersions: - command = [ - executable, - str(root_tool_path / "RAU_crypto-master/RAU_crypto.py"), - "-P", - "C:\\\\Windows\\\\Temp", - version, - str(root_tool_path / "testfile.txt"), - result.url, - ] - output = self.helpers.run(command) - description = f"[CVE-2017-11317] [{str(version)}] {webresource}" - if "fileInfo" in output.stdout: - self.debug(f"Confirmed Vulnerable Telerik (version: {str(version)}") - self.emit_event( - { - "severity": "CRITICAL", - "description": description, - "host": str(event.host), - "url": f"{event.data}{webresource}", - }, - "VULNERABILITY", - event, - ) - break - - DialogHandlerUrls = [ - "Telerik.Web.UI.DialogHandler.aspx?dp=1", - "DesktopModules/Admin/RadEditorProvider/DialogHandler.aspx?dp=1", - "providers/htmleditorproviders/telerik/telerik.web.ui.dialoghandler.aspx", - "desktopmodules/telerikwebui/radeditorprovider/telerik.web.ui.dialoghandler.aspx", - "desktopmodules/dnnwerk.radeditorprovider/dialoghandler.aspx", - ] - - for dh in DialogHandlerUrls: - result = self.test_detector(event.data, dh) + _module_threads = 5 + + @staticmethod + def normalize_url(url): + return str(url.rstrip("/") + "/").lower() + + def _incoming_dedup_hash(self, event): + if event.type == "URL": + if self.config.get("include_subdirs") is True: + return hash(f"{event.type}{self.normalize_url(event.data)}") + else: + return hash(f"{event.type}{event.netloc}") + else: # HTTP_RESPONSE + return hash(f"{event.type}{event.data['url']}") + + async def handle_event(self, event): + if event.type == "URL": + if self.config.get("include_subdirs"): + base_url = self.normalize_url(event.data) # Use the entire URL including subdirectories + + else: + base_url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" # path will be omitted + + # Check for RAU AXD Handler + webresource = "Telerik.Web.UI.WebResource.axd?type=rau" + result, _ = await self.test_detector(base_url, webresource) if result: - if "Cannot deserialize dialog parameters" in result.text: - self.debug(f"Detected Telerik UI instance ({dh})") - description = f"Telerik DialogHandler detected" - self.emit_event( - {"host": str(event.host), "url": f"{event.data}{dh}", "description": description}, + if "RadAsyncUpload handler is registered successfully" in result.text: + self.verbose("Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)") + + probe_data = { + "rauPostData": ( + None, + "mQheol55IDiQWWSxl+Atkc68JXWUJ6QSirwLhEwleMiw3vN4cwABE74V2fWsLGg8CFXHOP6np90M+sLrLDqFACGNvonxmgT8aBsTZPWbXErewMGNWBP34aX0DmMvXVyTEpQ6FkFhZi19cTtdYfRLI8Uc04uNSsdWnltDMQ2CX/sSLOXUFNnZdAwAXgUuprYhU28Zwh/GdgYh447ksXfAC2fuPqEJqKDDwBlltxsS/zSq8ipIg326ymB2dmOpH/P3hcAmTKOyzB0dW6a6pmJvqNVU+50DlrUC00RbBbTJwlV6Xm4s4XTvgXLvMQ6czz2OAYY18HI+HYX5uvajctj/25UR8edwu68ZCgedsD7EZHRSSthjxohxfAyrfshjcu1LnhCEd0ClowKxBS4eiaLxVxhJAdB7XcbbXxIS9WWKa7gtRMNc/jUAOlIpvOZ3N+bOQ6rsNMHv7TZk1g0bxPl99yBn9qvtAwDMNPDoADxoBSisAkIIl9mImKv7y7nAiKoj7ukApdu5XQuVo10SxwkLkqHcvEEgjxTrOlCbEbxK2/du9TgXxD9iqKyaPLHPzNZsnzCsG6qNXv0fNkeASP9tZAyvi/y1eLrpScE+J7blfT+kBkGPTTFc6Z4z6lN7GqSHofq/CDHC2S2+qdoRdC3C25V74j+Ae6MkpSfqYx4KZYNtxBAxjf9Uf3JVSiZh3X2W/7aFeimFft0h/liybSjJTzO+AwNJluI4kXqemFoHnjVFfUQViaIuk4UP0D861kCU6KIGLZLpOaa0g0KM8hmu3OjwVOy8QVXYtbx5lOmSX9h3imRzMDFRTXK25YpUJgD0/LFMgCeZLA8SCYzkThyN2d8f8n5l8iOScR47o8i8sqCp/fd3JTogSbwD7LxnHudpiw2W/OfpMGipgc6loQFoX4klQaYwKkA4w+GUzahfAJmIiukZuTLOPCPQvX4wKtLqw1YiHtuaLHvLYq2/F66QQXNrZ4SucUNED0p5TUVTvHGUbuA0zxAyYSfYVgTNZjXGguQBY7DsN1SkpCa/ltvIiGtCbHQR86OrvjJMACe0wdpMCqEg7JiGym3RrLqvmjpS&sbZRwxJ96gmXFBSbSvT0ve7jpvDoieqd6RbG+GIP0H7sO5/0ZnvheosB9jQAifuMabY7lW4UzZgr5o2iqE0tBl4SGhfWyYW7iCFXnd3aIuCnUvhT58Rp8g7kGkA/eU/s68E66KOBXNuBnokZR9cIsjE0Tt3Jfxrk018+CmVcXpjXp/RmhRwCJTgEAXQuNplb/KdkLxqDn519iRtbiU6aLZX8YctdFQBqyKVgkk8WYXxcXQ8wYnxtpEtGuBcsndUi1iPp4Od8rYY1HPWg+FIquW17YPHjfP4gO4dhZe4sd7gH0ARyGDjiYVj7ODDE0wGmwmFVdQTrDX5AaxKuJy0NbQ==", + ), + "file": ("blob", b"e1daf48a", "application/octet-stream"), + "fileName": (None, "df8dbc7a"), + "contentType": (None, "text/html"), + "lastModifiedDate": (None, "2020-01-02T08:02:01.067Z"), + "metadata": ( + None, + '{"TotalChunks":1,"ChunkIndex":0,"TotalFileSize":1,"UploadID":"3ea7b19db6c5.txt"}', + ), + } + + version = "unknown" + verbose_errors = False + # send probe + probe_response = await self.helpers.request( + f"{event.data}{webresource}", method="POST", files=probe_data + ) + + if probe_response: + if "Exception Details: " in probe_response.text: + verbose_errors = True + if ( + "Telerik.Web.UI.CryptoExceptionThrower.ThrowGenericCryptoException" + in probe_response.text + ): + version = "Post-2020 (Encrypt-Then-Mac Enabled, with Generic Crypto Failure Message)" + elif "Padding is invalid and cannot be removed" in probe_response.text: + version = "<= 2019 (Either Pre-2017 (vulnerable), or 2017-2019 w/ Encrypt-Then-Mac)" + + description = f"Telerik RAU AXD Handler detected. Verbose Errors Enabled: [{str(verbose_errors)}] Version Guess: [{version}]" + await self.emit_event( + {"host": str(event.host), "url": f"{base_url}{webresource}", "description": description}, "FINDING", event, + context=f"{{module}} scanned {base_url} and identified {{event.type}}: Telerik RAU AXD Handler", ) + if self.config.get("exploit_RAU_crypto") is True: + if base_url not in self.RAUConfirmed: + self.RAUConfirmed.append(base_url) + root_tool_path = self.scan.helpers.tools_dir / "telerik" + self.debug(root_tool_path) + + for version in self.telerikVersions: + command = [ + executable, + str(root_tool_path / "RAU_crypto-master/RAU_crypto.py"), + "-P", + "C:\\\\Windows\\\\Temp", + version, + str(root_tool_path / "testfile.txt"), + result.url, + ] + output = await self.run_process(command) + description = f"[CVE-2017-11317] [{str(version)}] {webresource}" + if "fileInfo" in output.stdout: + self.debug(f"Confirmed Vulnerable Telerik (version: {str(version)}") + await self.emit_event( + { + "severity": "CRITICAL", + "description": description, + "host": str(event.host), + "url": f"{base_url}{webresource}", + }, + "VULNERABILITY", + event, + context=f"{{module}} scanned {base_url} and identified critical {{event.type}}: {description}", + ) + break + + urls = {} + for dh in self.DialogHandlerUrls: + url = self.create_url(base_url, f"{dh}?dp=1") + urls[url] = dh - spellcheckhandler = "Telerik.Web.UI.SpellCheckHandler.axd" - result = self.test_detector(event.data, spellcheckhandler) - try: + gen = self.helpers.request_batch(list(urls)) + fail_count = 0 + async for url, response in gen: + # cancel if we run into timeouts etc. + if response is None: + fail_count += 1 + + # tolerate some random errors + if fail_count < 2: + continue + self.debug(f"Cancelling run against {base_url} due to failed request") + await gen.aclose() + else: + if "Cannot deserialize dialog parameters" in response.text: + self.debug(f"Detected Telerik UI instance ({dh})") + description = "Telerik DialogHandler detected" + await self.emit_event( + {"host": str(event.host), "url": f"{base_url}{dh}", "description": description}, + "FINDING", + event, + ) + # Once we have a match we need to stop, because the basic handler (Telerik.Web.UI.DialogHandler.aspx) usually works with a path wildcard + await gen.aclose() + + spellcheckhandler = "Telerik.Web.UI.SpellCheckHandler.axd" + result, _ = await self.test_detector(base_url, spellcheckhandler) + status_code = getattr(result, "status_code", 0) # The standard behavior for the spellcheck handler without parameters is a 500 - if result.status_code == 500: + if status_code == 500: # Sometimes webapps will just return 500 for everything, so rule out the false positive - validate_result = self.test_detector(event.data, self.helpers.rand_string()) + validate_result, _ = await self.test_detector(base_url, f"{self.helpers.rand_string()}.axd") self.debug(validate_result) - if validate_result.status_code != 500: - self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") - description = f"Telerik SpellCheckHandler detected" - self.emit_event( + validate_status_code = getattr(validate_result, "status_code", 0) + if validate_status_code not in (0, 500): + self.debug("Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") + description = "Telerik SpellCheckHandler detected" + await self.emit_event( { "host": str(event.host), - "url": f"{event.data}{spellcheckhandler}", + "url": f"{base_url}{spellcheckhandler}", "description": description, }, "FINDING", event, + context=f"{{module}} scanned {base_url} and identified {{event.type}}: Telerik SpellCheckHandler", ) - except Exception: - pass - def test_detector(self, baseurl, detector): + chartimagehandler = "ChartImage.axd?ImageName=bqYXJAqm315eEd6b%2bY4%2bGqZpe7a1kY0e89gfXli%2bjFw%3d" + result, _ = await self.test_detector(base_url, chartimagehandler) + status_code = getattr(result, "status_code", 0) + if status_code == 200: + chartimagehandler_error = "ChartImage.axd?ImageName=" + result_error, _ = await self.test_detector(base_url, chartimagehandler_error) + error_status_code = getattr(result_error, "status_code", 0) + if error_status_code not in (0, 200): + await self.emit_event( + { + "host": str(event.host), + "url": f"{base_url}{chartimagehandler}", + "description": "Telerik ChartImage AXD Handler Detected", + }, + "FINDING", + event, + context=f"{{module}} scanned {base_url} and identified {{event.type}}: Telerik ChartImage AXD Handler", + ) - result = None - if "/" != baseurl[-1]: - url = f"{baseurl}/{detector}" - else: - url = f"{baseurl}{detector}" - result = self.helpers.request(url) - return result + elif event.type == "HTTP_RESPONSE": + resp_body = event.data.get("body", None) + url = event.data["url"] + if resp_body: + if '":{"SerializedParameters":"' in resp_body: + await self.emit_event( + { + "host": str(event.host), + "url": url, + "description": "Telerik DialogHandler [SerializedParameters] Detected in HTTP Response", + }, + "FINDING", + event, + context="{module} searched HTTP_RESPONSE and identified {event.type}: Telerik ChartImage AXD Handler", + ) + elif '"_serializedConfiguration":"' in resp_body: + await self.emit_event( + { + "host": str(event.host), + "url": url, + "description": "Telerik AsyncUpload [serializedConfiguration] Detected in HTTP Response", + }, + "FINDING", + event, + context="{module} searched HTTP_RESPONSE and identified {event.type}: Telerik AsyncUpload", + ) + + def create_url(self, baseurl, detector): + return f"{baseurl}{detector}" - def filter_event(self, event): + async def test_detector(self, baseurl, detector): + result = None + url = self.create_url(baseurl, detector) + result = await self.helpers.request(url, timeout=self.scan.httpx_timeout) + return result, detector - if "endpoint" in event.tags: + async def filter_event(self, event): + if event.type == "URL" and "endpoint" in event.tags: return False else: return True diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py new file mode 100644 index 0000000000..3cd899d71c --- /dev/null +++ b/bbot/modules/templates/bucket.py @@ -0,0 +1,174 @@ +from bbot.modules.base import BaseModule + + +class bucket_template(BaseModule): + watched_events = ["DNS_NAME", "STORAGE_BUCKET"] + produced_events = ["STORAGE_BUCKET", "FINDING"] + flags = ["active", "safe", "cloud-enum", "web-basic"] + options = {"permutations": False} + options_desc = { + "permutations": "Whether to try permutations", + } + scope_distance_modifier = 3 + + cloud_helper_name = "amazon|google|digitalocean|etc" + delimiters = ("", ".", "-") + base_domains = ["s3.amazonaws.com|digitaloceanspaces.com|etc"] + regions = [None] + supports_open_check = True + + async def setup(self): + self.buckets_tried = set() + self.cloud_helper = self.helpers.cloud.providers[self.cloud_helper_name] + self.permutations = self.config.get("permutations", False) + return True + + async def filter_event(self, event): + if event.type == "DNS_NAME" and event.scope_distance > 0: + return False, "only accepts in-scope DNS_NAMEs" + if event.type == "STORAGE_BUCKET": + filter_result, reason = self.filter_bucket(event) + if not filter_result: + return (filter_result, reason) + return True + + def filter_bucket(self, event): + if f"cloud-{self.cloud_helper_name}" not in event.tags: + return False, "bucket belongs to a different cloud provider" + return True, "" + + async def handle_event(self, event): + if event.type == "DNS_NAME": + await self.handle_dns_name(event) + elif event.type == "STORAGE_BUCKET": + await self.handle_storage_bucket(event) + + async def handle_dns_name(self, event): + buckets = set() + base = self.helpers.unidecode(self.helpers.smart_decode_punycode(event.data)) + stem = self.helpers.domain_stem(base) + for b in [base, stem]: + split = b.split(".") + for d in self.delimiters: + bucket_name = d.join(split) + buckets.add(bucket_name) + async for bucket_name, url, tags, num_buckets in self.brute_buckets(buckets, permutations=self.permutations): + await self.emit_storage_bucket( + {"name": bucket_name, "url": url}, + "STORAGE_BUCKET", + parent=event, + tags=tags, + context=f"{{module}} tried {num_buckets:,} bucket variations of {event.data} and found {{event.type}} at {url}", + ) + + async def handle_storage_bucket(self, event): + url = event.data["url"] + bucket_name = event.data["name"] + if self.supports_open_check: + description, tags = await self._check_bucket_open(bucket_name, url) + if description: + event_data = {"host": event.host, "url": url, "description": description} + await self.emit_event( + event_data, + "FINDING", + parent=event, + tags=tags, + context=f"{{module}} scanned {event.type} and identified {{event.type}}: {description}", + ) + + async for bucket_name, new_url, tags, num_buckets in self.brute_buckets( + [bucket_name], permutations=self.permutations, omit_base=True + ): + await self.emit_storage_bucket( + {"name": bucket_name, "url": new_url}, + "STORAGE_BUCKET", + parent=event, + tags=tags, + context=f"{{module}} tried {num_buckets:,} variations of {url} and found {{event.type}} at {new_url}", + ) + + async def emit_storage_bucket(self, event_data, event_type, parent, tags, context): + event_data["url"] = self.clean_bucket_url(event_data["url"]) + await self.emit_event( + event_data, + event_type, + parent=parent, + tags=tags, + context=context, + ) + + async def brute_buckets(self, buckets, permutations=False, omit_base=False): + buckets = set(buckets) + new_buckets = set(buckets) + if permutations: + for b in buckets: + for mutation in self.helpers.word_cloud.mutations(b, cloud=False): + for d in self.delimiters: + new_buckets.add(d.join(mutation)) + if omit_base: + new_buckets = new_buckets - buckets + new_buckets = [b for b in new_buckets if self.valid_bucket_name(b)] + num_buckets = len(new_buckets) + bucket_urls_kwargs = [] + for base_domain in self.base_domains: + for region in self.regions: + for bucket_name in new_buckets: + url, kwargs = self.build_bucket_request(bucket_name, base_domain, region) + bucket_urls_kwargs.append((url, kwargs, (bucket_name, base_domain, region))) + async for url, kwargs, (bucket_name, base_domain, region), response in self.helpers.request_custom_batch( + bucket_urls_kwargs + ): + existent_bucket, tags = self._check_bucket_exists(bucket_name, response) + if existent_bucket: + yield bucket_name, url, tags, num_buckets + + def clean_bucket_url(self, url): + # if needed, modify the bucket url before emitting it + return url + + def build_bucket_request(self, bucket_name, base_domain, region): + url = self.build_url(bucket_name, base_domain, region) + return url, {} + + def _check_bucket_exists(self, bucket_name, response): + self.debug(f'Checking if bucket exists: "{bucket_name}"') + return self.check_bucket_exists(bucket_name, response) + + def check_bucket_exists(self, bucket_name, response): + tags = self.gen_tags_exists(response) + status_code = getattr(response, "status_code", 404) + existent_bucket = status_code != 404 + return (existent_bucket, tags) + + async def _check_bucket_open(self, bucket_name, url): + self.debug(f'Checking if bucket is misconfigured: "{bucket_name}"') + return await self.check_bucket_open(bucket_name, url) + + async def check_bucket_open(self, bucket_name, url): + response = await self.helpers.request(url) + tags = self.gen_tags_exists(response) + status_code = getattr(response, "status_code", 404) + content = getattr(response, "text", "") + open_bucket = status_code == 200 and "Contents" in content + msg = "" + if open_bucket: + msg = "Open storage bucket" + return (msg, tags) + + def valid_bucket_name(self, bucket_name): + valid = self.cloud_helper.is_valid_bucket_name(bucket_name) + if valid and not self.helpers.is_ip(bucket_name): + bucket_hash = hash(bucket_name) + if bucket_hash not in self.buckets_tried: + self.buckets_tried.add(bucket_hash) + return True + return False + + def build_url(self, bucket_name, base_domain, region): + return f"https://{bucket_name}.{base_domain}/" + + def gen_tags_exists(self, response): + return set() + + def gen_tags_open(self, response): + return set() diff --git a/bbot/modules/templates/github.py b/bbot/modules/templates/github.py new file mode 100644 index 0000000000..6bd5b70e4d --- /dev/null +++ b/bbot/modules/templates/github.py @@ -0,0 +1,46 @@ +import traceback + +from bbot.modules.base import BaseModule + + +class github(BaseModule): + """ + A template module for use of the GitHub API + Inherited by several other github modules. + """ + + _qsize = 1 + base_url = "https://api.github.com" + ping_url = f"{base_url}/zen" + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["Authorization"] = f"token {self.api_key}" + return url, kwargs + + async def setup(self): + await super().setup() + self.headers = {} + api_keys = set() + modules_config = self.scan.config.get("modules", {}) + git_modules = [m for m in modules_config if str(m).startswith("git")] + for module_name in git_modules: + module_config = modules_config.get(module_name, {}) + api_key = module_config.get("api_key", "") + if isinstance(api_key, str): + api_key = [api_key] + for key in api_key: + key = key.strip() + if key: + api_keys.add(key) + if not api_keys: + if self.auth_required: + return None, "No API key set" + self.api_key = api_keys + try: + await self.ping() + self.hugesuccess("API is ready") + return True + except Exception as e: + self.trace(traceback.format_exc()) + return None, f"Error with API ({str(e).strip()})" + return True diff --git a/bbot/modules/templates/postman.py b/bbot/modules/templates/postman.py new file mode 100644 index 0000000000..490c6e62eb --- /dev/null +++ b/bbot/modules/templates/postman.py @@ -0,0 +1,183 @@ +from bbot.modules.base import BaseModule + + +class postman(BaseModule): + """ + A template module for use of the GitHub API + Inherited by several other github modules. + """ + + base_url = "https://www.postman.com/_api" + api_url = "https://api.getpostman.com" + html_url = "https://www.postman.com" + ping_url = f"{api_url}/me" + + headers = { + "Content-Type": "application/json", + "X-App-Version": "11.27.4-250109-2338", + "X-Entity-Team-Id": "0", + "Origin": "https://www.postman.com", + "Referer": "https://www.postman.com/search?q=&scope=public&type=all", + } + auth_required = True + + async def setup(self): + await super().setup() + self.headers = {} + api_keys = set() + modules_config = self.scan.config.get("modules", {}) + postman_modules = [m for m in modules_config if str(m).startswith("postman")] + for module_name in postman_modules: + module_config = modules_config.get(module_name, {}) + api_key = module_config.get("api_key", "") + if isinstance(api_key, str): + api_key = [api_key] + for key in api_key: + key = key.strip() + if key: + api_keys.add(key) + if not api_keys: + if self.auth_required: + return None, "No API key set" + self.api_key = api_keys + if self.api_key: + try: + await self.ping() + self.hugesuccess("API is ready") + return True + except Exception as e: + self.trace() + return None, f"Error with API ({str(e).strip()})" + return True + + def prepare_api_request(self, url, kwargs): + if self.api_key: + kwargs["headers"]["X-Api-Key"] = self.api_key + return url, kwargs + + async def get_workspace_id(self, repo_url): + workspace_id = "" + profile = repo_url.split("/")[-2] + name = repo_url.split("/")[-1] + url = f"{self.base_url}/ws/proxy" + json = { + "service": "workspaces", + "method": "GET", + "path": f"/workspaces?handle={profile}&slug={name}", + } + r = await self.helpers.request(url, method="POST", json=json, headers=self.headers) + if r is None: + return workspace_id + status_code = getattr(r, "status_code", 0) + try: + json = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return workspace_id + data = json.get("data", []) + if len(data) == 1: + workspace_id = data[0]["id"] + return workspace_id + + async def request_workspace(self, id): + data = {"workspace": {}, "environments": [], "collections": []} + workspace = await self.get_workspace(id) + if workspace: + # Main Workspace + name = workspace["name"] + data["workspace"] = workspace + + # Workspace global variables + self.verbose(f"Searching globals for workspace {name}") + globals = await self.get_globals(id) + data["environments"].append(globals) + + # Workspace Environments + workspace_environments = workspace.get("environments", []) + if workspace_environments: + self.verbose(f"Searching environments for workspace {name}") + for _ in workspace_environments: + environment_id = _["uid"] + environment = await self.get_environment(environment_id) + data["environments"].append(environment) + + # Workspace Collections + workspace_collections = workspace.get("collections", []) + if workspace_collections: + self.verbose(f"Searching collections for workspace {name}") + for _ in workspace_collections: + collection_id = _["uid"] + collection = await self.get_collection(collection_id) + data["collections"].append(collection) + return data + + async def get_workspace(self, workspace_id): + workspace = {} + workspace_url = f"{self.api_url}/workspaces/{workspace_id}" + r = await self.api_request(workspace_url) + if r is None: + return workspace + status_code = getattr(r, "status_code", 0) + try: + json = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return workspace + workspace = json.get("workspace", {}) + return workspace + + async def get_globals(self, workspace_id): + globals = {} + globals_url = f"{self.base_url}/workspace/{workspace_id}/globals" + r = await self.helpers.request(globals_url, headers=self.headers) + if r is None: + return globals + status_code = getattr(r, "status_code", 0) + try: + json = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return globals + globals = json.get("data", {}) + return globals + + async def get_environment(self, environment_id): + environment = {} + environment_url = f"{self.api_url}/environments/{environment_id}" + r = await self.api_request(environment_url) + if r is None: + return environment + status_code = getattr(r, "status_code", 0) + try: + json = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return environment + environment = json.get("environment", {}) + return environment + + async def get_collection(self, collection_id): + collection = {} + collection_url = f"{self.api_url}/collections/{collection_id}" + r = await self.api_request(collection_url) + if r is None: + return collection + status_code = getattr(r, "status_code", 0) + try: + json = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return collection + collection = json.get("collection", {}) + return collection + + async def validate_workspace(self, workspace, environments, collections): + name = workspace.get("name", "") + full_wks = str([workspace, environments, collections]) + in_scope_hosts = await self.scan.extract_in_scope_hostnames(full_wks) + if in_scope_hosts: + self.verbose( + f'Found in-scope hostname(s): "{in_scope_hosts}" in workspace {name}, it appears to be in-scope' + ) + return True + return False diff --git a/bbot/modules/templates/shodan.py b/bbot/modules/templates/shodan.py new file mode 100644 index 0000000000..a38dbb1f83 --- /dev/null +++ b/bbot/modules/templates/shodan.py @@ -0,0 +1,35 @@ +import traceback + +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class shodan(subdomain_enum): + options = {"api_key": ""} + options_desc = {"api_key": "Shodan API key"} + + base_url = "https://api.shodan.io" + ping_url = f"{base_url}/api-info?key={{api_key}}" + + async def setup(self): + await super().setup() + api_keys = set() + for module_name in ("shodan", "shodan_dns", "shodan_port"): + module_config = self.scan.config.get("modules", {}).get(module_name, {}) + api_key = module_config.get("api_key", "") + if isinstance(api_key, str): + api_key = [api_key] + for key in api_key: + key = key.strip() + if key: + api_keys.add(key) + if not api_keys: + if self.auth_required: + return None, "No API key set" + self.api_key = api_keys + try: + await self.ping() + self.hugesuccess("API is ready") + return True + except Exception as e: + self.trace(traceback.format_exc()) + return None, f"Error with API ({str(e).strip()})" diff --git a/bbot/modules/templates/sql.py b/bbot/modules/templates/sql.py new file mode 100644 index 0000000000..39b4e6f00e --- /dev/null +++ b/bbot/modules/templates/sql.py @@ -0,0 +1,95 @@ +from contextlib import suppress +from sqlmodel import SQLModel +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + +from bbot.db.sql.models import Event, Scan, Target +from bbot.modules.output.base import BaseOutputModule + + +class SQLTemplate(BaseOutputModule): + meta = {"description": "SQL output module template"} + options = { + "database": "bbot", + "username": "", + "password": "", + "host": "127.0.0.1", + "port": 0, + } + options_desc = { + "database": "The database to use", + "username": "The username to use to connect to the database", + "password": "The password to use to connect to the database", + "host": "The host to use to connect to the database", + "port": "The port to use to connect to the database", + } + + protocol = "" + + async def setup(self): + self.database = self.config.get("database", "bbot") + self.username = self.config.get("username", "") + self.password = self.config.get("password", "") + self.host = self.config.get("host", "127.0.0.1") + self.port = self.config.get("port", 0) + + await self.init_database() + return True + + async def handle_event(self, event): + event_obj = Event(**event.json()).validated + + async with self.async_session() as session: + async with session.begin(): + # insert event + session.add(event_obj) + + # if it's a SCAN event, create/update the scan and target + if event_obj.type == "SCAN": + event_data = event_obj.get_data() + if not isinstance(event_data, dict): + raise ValueError(f"Invalid data for SCAN event: {event_data}") + scan = Scan(**event_data).validated + await session.merge(scan) # Insert or update scan + + target_data = event_data.get("target", {}) + if not isinstance(target_data, dict): + raise ValueError(f"Invalid target for SCAN event: {target_data}") + target = Target(**target_data).validated + await session.merge(target) # Insert or update target + + await session.commit() + + async def create_database(self): + pass + + async def init_database(self): + await self.create_database() + + # Now create the engine for the actual database + self.engine = create_async_engine(self.connection_string()) + # Create a session factory bound to the engine + self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession) + + # Use the engine directly to create all tables + async with self.engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + def connection_string(self, mask_password=False): + connection_string = f"{self.protocol}://" + if self.username: + password = self.password + if mask_password: + password = "****" + connection_string += f"{self.username}:{password}" + if self.host: + connection_string += f"@{self.host}" + if self.port: + connection_string += f":{self.port}" + if self.database: + connection_string += f"/{self.database}" + return connection_string + + async def cleanup(self): + with suppress(Exception): + await self.engine.dispose() diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py new file mode 100644 index 0000000000..a65d08f315 --- /dev/null +++ b/bbot/modules/templates/subdomain_enum.py @@ -0,0 +1,208 @@ +from bbot.modules.base import BaseModule + + +class subdomain_enum(BaseModule): + """ + A typical free API-based subdomain enumeration module + Inherited by many other modules including sublist3r, dnsdumpster, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query an API for subdomains"} + + base_url = "https://api.example.com" + + # set module error state after this many failed requests in a row + abort_after_failures = 5 + + # whether to reject wildcard DNS_NAMEs + reject_wildcards = "strict" + + # set qsize to 10. this helps combat rate limiting by ensuring the next query doesn't execute + # until the result from the previous queue have been consumed by the scan + # we don't use 1 because it causes delays due to the asyncio.sleep; 10 gives us reasonable buffer room + _qsize = 10 + + # how to deduplicate incoming events + # options: + # "highest_parent": dedupe by highest parent (highest parent of www.api.test.evilcorp.com is evilcorp.com) + # "lowest_parent": dedupe by lowest parent (lowest parent of www.api.test.evilcorp.com is api.test.evilcorp.com) + dedup_strategy = "highest_parent" + + # how many results to request per API call + page_size = 100 + # arguments to pass to api_page_iter + api_page_iter_kwargs = {} + + @property + def source_pretty_name(self): + return f"{self.__class__.__name__} API" + + def _incoming_dedup_hash(self, event): + """ + Determines the criteria for what is considered to be a duplicate event if `accept_dupes` is False. + """ + return hash(self.make_query(event)), f"dedup_strategy={self.dedup_strategy}" + + async def handle_event(self, event): + query = self.make_query(event) + results = await self.query(query) + if results: + for hostname in set(results): + if hostname: + try: + hostname = self.helpers.validators.validate_host(hostname) + except ValueError as e: + self.verbose(e) + continue + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}', + ) + + async def handle_event_paginated(self, event): + query = self.make_query(event) + async for result_batch in self.query_paginated(query): + for hostname in set(result_batch): + try: + hostname = self.helpers.validators.validate_host(hostname) + except ValueError as e: + self.verbose(e) + continue + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}', + ) + + async def request_url(self, query): + url = self.make_url(query) + return await self.api_request(url) + + def make_url(self, query): + return f"{self.base_url}/subdomains/{self.helpers.quote(query)}" + + def make_query(self, event): + query = event.data + parents = list(self.helpers.domain_parents(event.data)) + if self.dedup_strategy == "highest_parent": + parents = list(reversed(parents)) + elif self.dedup_strategy == "lowest_parent": + pass + else: + raise ValueError('self.dedup_strategy attribute must be set to either "highest_parent" or "lowest_parent"') + for p in parents: + if self.scan.in_scope(p): + query = p + break + return ".".join([s for s in query.split(".") if s != "_wildcard"]) + + async def parse_results(self, r, query=None): + json = r.json() + if json: + for hostname in json: + yield hostname + + async def query(self, query, request_fn=None, parse_fn=None): + if request_fn is None: + request_fn = self.request_url + if parse_fn is None: + parse_fn = self.parse_results + try: + response = await request_fn(query) + if response is None: + self.info(f'Query "{query}" failed (no response)') + return [] + try: + results = list(await parse_fn(response, query)) + except Exception as e: + if response: + self.info( + f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True + ) + self.log.trace(repr(response.text)) + else: + self.info(f'Error parsing results for "{query}": {e}', trace=True) + return + if results: + return results + self.debug(f'No results for "{query}"') + except Exception as e: + self.info(f"Error retrieving results for {query}: {e}", trace=True) + + async def query_paginated(self, query): + url = self.make_url(query) + agen = self.api_page_iter(url, page_size=self.page_size, **self.api_page_iter_kwargs) + try: + async for response in agen: + subdomains = await self.parse_results(response, query) + self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"') + if not subdomains: + break + yield subdomains + finally: + await agen.aclose() + + async def _is_wildcard(self, query): + rdtypes = ("A", "AAAA", "CNAME") + if self.helpers.is_dns_name(query): + for wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).values(): + if any(t in wildcard_rdtypes for t in rdtypes): + return True + return False + + async def filter_event(self, event): + query = self.make_query(event) + # check if wildcard + is_wildcard = await self._is_wildcard(query) + # check if cloud + is_cloud = False + if any(t.startswith("cloud-") for t in event.tags): + is_cloud = True + # reject if it's a cloud resource and not in our target + if is_cloud and event not in self.scan.target.whitelist: + return False, "Event is a cloud resource and not a direct target" + # optionally reject events with wildcards / errors + if self.reject_wildcards: + if any(t in event.tags for t in ("a-error", "aaaa-error")): + return False, "Event has a DNS resolution error" + if self.reject_wildcards == "strict": + if is_wildcard: + return False, "Event is a wildcard domain" + elif self.reject_wildcards == "cloud_only": + if is_wildcard and is_cloud: + return False, "Event is both a cloud resource and a wildcard domain" + return True, "" + + async def abort_if(self, event): + # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains + if "in-scope" not in event.tags: + return True + return False + + +class subdomain_enum_apikey(subdomain_enum): + """ + A typical module for authenticated, API-based subdomain enumeration + Inherited by several other modules including securitytrails, c99.nl, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query API for subdomains", "auth_required": True} + options = {"api_key": ""} + options_desc = {"api_key": "API key"} + + async def setup(self): + await super().setup() + return await self.require_api_key() diff --git a/bbot/modules/templates/webhook.py b/bbot/modules/templates/webhook.py new file mode 100644 index 0000000000..30168f01c3 --- /dev/null +++ b/bbot/modules/templates/webhook.py @@ -0,0 +1,87 @@ +import yaml + +from bbot.modules.output.base import BaseOutputModule + + +class WebhookOutputModule(BaseOutputModule): + """ + A template for webhook output modules such as Discord, Teams, and Slack + """ + + accept_dupes = False + message_size_limit = 2000 + content_key = "content" + vuln_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"] + + # abort module after 10 failed requests (not including retries) + _api_failure_abort_threshold = 10 + # retry each request up to 10 times, respecting the Retry-After header + _api_retries = 10 + + async def setup(self): + self._api_retries = self.config.get("retries", 10) + self.webhook_url = self.config.get("webhook_url", "") + self.min_severity = self.config.get("min_severity", "LOW").strip().upper() + assert self.min_severity in self.vuln_severities, ( + f"min_severity must be one of the following: {','.join(self.vuln_severities)}" + ) + self.allowed_severities = self.vuln_severities[self.vuln_severities.index(self.min_severity) :] + if not self.webhook_url: + self.warning("Must set Webhook URL") + return False + return await super().setup() + + async def handle_event(self, event): + message = self.format_message(event) + data = {self.content_key: message} + await self.api_request( + url=self.webhook_url, + method="POST", + json=data, + ) + + def get_watched_events(self): + if self._watched_events is None: + event_types = self.config.get("event_types", ["VULNERABILITY"]) + if isinstance(event_types, str): + event_types = [event_types] + self._watched_events = set(event_types) + return self._watched_events + + async def filter_event(self, event): + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "UNKNOWN") + if severity not in self.allowed_severities: + return False, f"{severity} is below min_severity threshold" + return True + + def format_message_str(self, event): + event_tags = ",".join(event.tags) + return f"`[{event.type}]`\t**`{event.data}`**\ttags:{event_tags}" + + def format_message_other(self, event): + event_yaml = yaml.dump(event.data) + event_type = f"**`[{event.type}]`**" + if event.type in ("VULNERABILITY", "FINDING"): + event_str, color = self.get_severity_color(event) + event_type = f"{color} {event_str} {color}" + return f"""**`{event_type}`**\n```yaml\n{event_yaml}```""" + + def get_severity_color(self, event): + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "UNKNOWN") + return f"{event.type} ({severity})", event.severity_colors[severity] + else: + return event.type, "🟦" + + def format_message(self, event): + if isinstance(event.data, str): + msg = self.format_message_str(event) + else: + msg = self.format_message_other(event) + if len(msg) > self.message_size_limit: + msg = msg[: self.message_size_limit - 3] + "..." + return msg + + def evaluate_response(self, response): + return getattr(response, "is_success", False) diff --git a/bbot/modules/threatminer.py b/bbot/modules/threatminer.py deleted file mode 100644 index e152c8c12e..0000000000 --- a/bbot/modules/threatminer.py +++ /dev/null @@ -1,19 +0,0 @@ -from bbot.modules.crobat import crobat - - -class threatminer(crobat): - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] - meta = { - "description": "Query threatminer's API for subdomains", - } - - base_url = "https://api.threatminer.org/v2" - - def request_url(self, query): - return self.helpers.request(f"{self.base_url}/domain.php?q={self.helpers.quote(query)}&rt=5") - - def parse_results(self, r, query): - j = r.json() - yield from j.get("results", []) diff --git a/bbot/modules/trickest.py b/bbot/modules/trickest.py new file mode 100644 index 0000000000..246fdcfdec --- /dev/null +++ b/bbot/modules/trickest.py @@ -0,0 +1,46 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class Trickest(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["affiliates", "subdomain-enum", "passive", "safe"] + meta = { + "description": "Query Trickest's API for subdomains", + "author": "@amiremami", + "created_date": "2024-07-27", + "auth_required": True, + } + options = { + "api_key": "", + } + options_desc = { + "api_key": "Trickest API key", + } + + base_url = "https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be" + ping_url = f"{base_url}/dataset" + dataset_id = "a0a49ca9-03bb-45e0-aa9a-ad59082ebdfc" + page_size = 50 + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["Authorization"] = f"Token {self.api_key}" + return url, kwargs + + async def handle_event(self, event): + await self.handle_event_paginated(event) + + def make_url(self, query): + url = f"{self.base_url}/view?q=hostname%20~%20%22.{self.helpers.quote(query)}%22" + url += f"&dataset_id={self.dataset_id}" + url += "&limit={page_size}&offset={offset}&select=hostname&orderby=hostname" + return url + + async def parse_results(self, j, query): + results = j.get("results", []) + subdomains = set() + for item in results: + hostname = item.get("hostname", "") + if hostname: + subdomains.add(hostname) + return subdomains diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py new file mode 100644 index 0000000000..41fe0ae8a6 --- /dev/null +++ b/bbot/modules/trufflehog.py @@ -0,0 +1,214 @@ +import json +from functools import partial +from bbot.modules.base import BaseModule + + +class trufflehog(BaseModule): + watched_events = ["CODE_REPOSITORY", "FILESYSTEM", "HTTP_RESPONSE", "RAW_TEXT"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["passive", "safe", "code-enum"] + meta = { + "description": "TruffleHog is a tool for finding credentials", + "created_date": "2024-03-12", + "author": "@domwhewell-sage", + } + + options = { + "version": "3.88.12", + "config": "", + "only_verified": True, + "concurrency": 8, + "deleted_forks": False, + } + options_desc = { + "version": "trufflehog version", + "config": "File path or URL to YAML trufflehog config", + "only_verified": "Only report credentials that have been verified", + "concurrency": "Number of concurrent workers", + "deleted_forks": "Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.", + } + deps_ansible = [ + { + "name": "Download trufflehog", + "unarchive": { + "src": "https://github.com/trufflesecurity/trufflehog/releases/download/v#{BBOT_MODULES_TRUFFLEHOG_VERSION}/trufflehog_#{BBOT_MODULES_TRUFFLEHOG_VERSION}_#{BBOT_OS_PLATFORM}_#{BBOT_CPU_ARCH}.tar.gz", + "include": "trufflehog", + "dest": "#{BBOT_TOOLS}", + "remote_src": True, + }, + } + ] + + scope_distance_modifier = 2 + + async def setup(self): + self.verified = self.config.get("only_verified", True) + self.config_file = self.config.get("config", "") + if self.config_file: + self.config_file = await self.helpers.wordlist(self.config_file) + self.concurrency = int(self.config.get("concurrency", 8)) + + self.deleted_forks = self.config.get("deleted_forks", False) + self.github_token = "" + if self.deleted_forks: + self.warning( + "Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours." + ) + for module_name in ("github", "github_codesearch", "github_org", "git_clone"): + module_config = self.scan.config.get("modules", {}).get(module_name, {}) + api_key = module_config.get("api_key", "") + if api_key: + self.github_token = api_key + break + + # soft-fail if we don't have a github token as well + if not self.github_token: + self.deleted_forks = False + return None, "A github api_key must be provided to the github modules for deleted forks to be scanned" + return True + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if self.deleted_forks: + if "git" not in event.tags: + return False, "Module only accepts git CODE_REPOSITORY events" + if "github" not in event.data["url"]: + return False, "Module only accepts github CODE_REPOSITORY events" + else: + return False, "Deleted forks is not enabled" + else: + if "unarchived-folder" in event.tags: + return False, "Not accepting unarchived-folder events" + return True + + async def handle_event(self, event): + description = "" + if isinstance(event.data, dict): + description = event.data.get("description", "") + + if event.type == "CODE_REPOSITORY": + path = event.data["url"] + if "git" in event.tags: + module = "github-experimental" + elif event.type == "FILESYSTEM": + path = event.data["path"] + if "git" in event.tags: + module = "git" + elif "docker" in event.tags: + module = "docker" + elif "postman" in event.tags: + module = "postman" + else: + module = "filesystem" + elif event.type in ("HTTP_RESPONSE", "RAW_TEXT"): + module = "filesystem" + file_data = event.raw_response if event.type == "HTTP_RESPONSE" else event.data + # write the response to a tempfile + # this is necessary because trufflehog doesn't yet support reading from stdin + # https://github.com/trufflesecurity/trufflehog/issues/162 + path = self.helpers.tempfile(file_data, pipe=False) + + if event.type == "CODE_REPOSITORY": + host = event.host + else: + host = str(event.parent.host) + async for ( + decoder_name, + detector_name, + raw_result, + rawv2_result, + verified, + source_metadata, + ) in self.execute_trufflehog(module, path): + verified_str = "Verified" if verified else "Possible" + finding_type = "VULNERABILITY" if verified else "FINDING" + data = { + "description": f"{verified_str} Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]", + } + if host: + data["host"] = host + if finding_type == "VULNERABILITY": + data["severity"] = "High" + if description: + data["description"] += f" Description: [{description}]" + data["description"] += f" Raw result: [{raw_result}]" + if rawv2_result: + data["description"] += f" RawV2 result: [{rawv2_result}]" + await self.emit_event( + data, + finding_type, + event, + context=f'{{module}} searched {event.type} using "{module}" method and found {verified_str.lower()} secret ({{event.type}}): {raw_result}', + ) + + # clean up the tempfile when we're done with it + if event.type in ("HTTP_RESPONSE", "RAW_TEXT"): + path.unlink(missing_ok=True) + + async def execute_trufflehog(self, module, path=None, string=None): + command = [ + "trufflehog", + "--json", + "--no-update", + ] + if self.verified: + command.append("--only-verified") + if self.config_file: + command.append("--config=" + str(self.config_file)) + command.append("--concurrency=" + str(self.concurrency)) + if module == "git": + command.append("git") + command.append("file://" + path) + elif module == "docker": + command.append("docker") + command.append("--image=file://" + path) + elif module == "postman": + command.append("postman") + command.append("--workspace-paths=" + path) + elif module == "filesystem": + command.append("filesystem") + command.append(path) + elif module == "github-experimental": + command.append("github-experimental") + command.append("--repo=" + path) + command.append("--object-discovery") + command.append("--delete-cached-data") + command.append("--token=" + self.github_token) + + stats_file = self.helpers.tempfile_tail(callback=partial(self.log_trufflehog_status, path)) + try: + with open(stats_file, "w") as stats_fh: + async for line in self.helpers.run_live(command, stderr=stats_fh): + try: + j = json.loads(line) + except json.decoder.JSONDecodeError: + self.debug(f"Failed to decode line: {line}") + continue + + decoder_name = j.get("DecoderName", "") + + detector_name = j.get("DetectorName", "") + + raw_result = j.get("Raw", "") + + rawv2_result = j.get("RawV2", "") + + verified = j.get("Verified", False) + + source_metadata = j.get("SourceMetadata", {}) + + yield (decoder_name, detector_name, raw_result, rawv2_result, verified, source_metadata) + finally: + stats_file.unlink() + + def log_trufflehog_status(self, path, line): + try: + line = json.loads(line) + except Exception: + self.info(str(line)) + return + message = line.get("msg", "") + ts = line.get("ts", "") + status = f"Message: {message} | Timestamp: {ts}" + self.verbose(f"Current scan target: {path}") + self.verbose(status) diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py new file mode 100644 index 0000000000..c36b7c39d5 --- /dev/null +++ b/bbot/modules/url_manipulation.py @@ -0,0 +1,108 @@ +from bbot.errors import HttpCompareError +from bbot.modules.base import BaseModule + + +class url_manipulation(BaseModule): + watched_events = ["URL"] + produced_events = ["FINDING"] + flags = ["active", "aggressive", "web-thorough"] + meta = { + "description": "Attempt to identify URL parsing/routing based vulnerabilities", + "created_date": "2022-09-27", + "author": "@liquidsec", + } + in_scope_only = True + + options = {"allow_redirects": True} + options_desc = { + "allow_redirects": "Allowing redirects will sometimes create false positives. Disallowing will sometimes create false negatives. Allowed by default." + } + + async def setup(self): + # ([string]method,[string]path,[bool]strip trailing slash) + self.signatures = [] + + self.rand_string = self.helpers.rand_string() + + # Test for abuse of extension based routing + extensions = [ + ".css", + ".js", + ".xls", + ".png", + ".jpg", + ".swf", + ".xml", + ".pdf", + ".gif", + ] + for ext in extensions: + self.signatures.append(("GET", "{scheme}://{netloc}/{path}?%s=%s" % (self.rand_string, ext), False)) + + self.allow_redirects = self.config.get("allow_redirects", True) + return True + + async def handle_event(self, event): + try: + compare_helper = self.helpers.http_compare( + event.data, allow_redirects=self.allow_redirects, include_cache_buster=False + ) + except HttpCompareError as e: + self.debug(e) + return + + try: + if not await compare_helper.canary_check(event.data, mode="getparam"): + raise HttpCompareError() + except HttpCompareError: + self.verbose(f'Aborting "{event.data}" due to failed canary check') + return + + for sig in self.signatures: + sig = self.format_signature(sig, event) + try: + match, reasons, reflection, subject_response = await compare_helper.compare( + sig[1], method=sig[0], allow_redirects=self.allow_redirects + ) + except HttpCompareError as e: + self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.data}]") + + if subject_response: + subject_content = "".join([str(x) for x in subject_response.headers]) + if subject_response.text is not None: + subject_content += subject_response.text + + if self.rand_string not in subject_content: + if match is False: + if str(subject_response.status_code).startswith("2"): + if "body" in reasons: + reported_signature = f"Modified URL: {sig[1]}" + description = f"Url Manipulation: [{','.join(reasons)}] Sig: [{reported_signature}]" + await self.emit_event( + {"description": description, "host": str(event.host), "url": event.data}, + "FINDING", + parent=event, + context=f"{{module}} probed {event.data} and identified {{event.type}}: {description}", + ) + else: + self.debug(f"Status code changed to {str(subject_response.status_code)}, ignoring") + else: + self.debug("Ignoring positive result due to presence of parameter name in result") + + async def filter_event(self, event): + accepted_status_codes = ["200", "301", "302"] + + for c in accepted_status_codes: + if f"status-{c}" in event.tags: + return True + return False + + def format_signature(self, sig, event): + if sig[2] is True: + cleaned_path = event.parsed_url.path.strip("/") + else: + cleaned_path = event.parsed_url.path.lstrip("/") + + kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} + formatted_url = sig[1].format(**kwargs) + return (sig[0], formatted_url) diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index 22d80176c1..5c7c78f478 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -1,45 +1,65 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class urlscan(crobat): +class urlscan(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME", "URL_UNVERIFIED"] meta = { "description": "Query urlscan.io for subdomains", + "created_date": "2022-06-09", + "author": "@TheTechromancer", } options = {"urls": False} options_desc = {"urls": "Emit URLs in addition to DNS_NAMEs"} base_url = "https://urlscan.io/api/v1" - def setup(self): + async def setup(self): self.urls = self.config.get("urls", False) - return super().setup() + return await super().setup() - def handle_event(self, event): + async def handle_event(self, event): query = self.make_query(event) - for domain, url in self.query(query): - source_event = event + for domain, url in await self.query(query): + parent_event = event if domain and domain != query: - domain_event = self.make_event(domain, "DNS_NAME", source=event) - if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): - self.emit_event(domain_event, abort_if=self.abort_if) - source_event = domain_event + domain_event = self.make_event(domain, "DNS_NAME", parent=event) + if domain_event: + if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): + await self.emit_event( + domain_event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) + parent_event = domain_event if url: - url_event = self.make_event(url, "URL_UNVERIFIED", source=source_event) - if str(url_event.host).endswith(query): - if self.urls: - self.emit_event(url_event, abort_if=self.abort_if) + url_event = self.make_event(url, "URL_UNVERIFIED", parent=parent_event) + if url_event: + if str(url_event.host).endswith(query): + if self.urls: + await self.emit_event( + url_event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) + else: + await self.emit_event( + str(url_event.host), + "DNS_NAME", + parent=event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) else: - self.emit_event(str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if) - else: - self.debug(f"{url_event.host} does not match {query}") + self.debug(f"{url_event.host} does not match {query}") - def query(self, query): - results = self.helpers.request(f"{self.base_url}/search/?q={self.helpers.quote(query)}") + async def query(self, query): + results = set() + url = f"{self.base_url}/search/?q={self.helpers.quote(query)}" + r = await self.helpers.request(url) try: - json = results.json() + json = r.json() if json and type(json) == dict: for result in json.get("results", []): if result and type(result) == dict: @@ -48,17 +68,15 @@ def query(self, query): domain = task.get("domain", "") url = task.get("url", "") if domain or url: - yield domain, url + results.add((domain, url)) page = result.get("page", {}) if page and type(page) == dict: domain = page.get("domain", "") url = page.get("url", "") if domain or url: - yield domain, url + results.add((domain, url)) else: self.debug(f'No results for "{query}"') except Exception: - import traceback - - self.warning(f"Error retrieving urlscan results") - self.debug(traceback.format_exc()) + self.verbose("Error retrieving urlscan results") + return results diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index 127c91a757..7a60b721ba 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -1,49 +1,56 @@ import re -from bs4 import BeautifulSoup from bbot.modules.base import BaseModule class viewdns(BaseModule): + """ + Todo: Also retrieve registrar? + """ watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] + flags = ["affiliates", "passive", "safe"] meta = { "description": "Query viewdns.info's reverse whois for related domains", + "created_date": "2022-07-04", + "author": "@TheTechromancer", } - deps_pip = ["beautifulsoup4", "lxml"] base_url = "https://viewdns.info" in_scope_only = True + per_domain_only = True + _qsize = 1 - def setup(self): - self.processed = set() + async def setup(self): self.date_regex = re.compile(r"\d{4}-\d{2}-\d{2}") return True - def filter_event(self, event): - _, domain = self.helpers.split_domain(event.data) - if hash(domain) in self.processed: - return False - self.processed.add(hash(domain)) - return True - - def handle_event(self, event): + async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) - for domain, _ in self.query(query): - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: registrar? + for domain, _ in await self.query(query): + await self.emit_event( + domain, + "DNS_NAME", + parent=event, + tags=["affiliate"], + context=f'{{module}} searched viewdns.info for "{query}" and found {{event.type}}: {{event.data}}', + ) - def query(self, query): + async def query(self, query): + results = set() url = f"{self.base_url}/reversewhois/?q={query}" - r = self.helpers.request(url) + r = await self.helpers.request(url) status_code = getattr(r, "status_code", 0) if status_code not in (200,): - self.warning(f"Error retrieving reverse whois results (status code: {status_code})") + self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") content = getattr(r, "content", b"") - html = BeautifulSoup(content, features="lxml") - yielded = set() + + html = self.helpers.beautifulsoup(content, "html.parser") + if html is False: + self.debug("BeautifulSoup returned False") + return results + found = set() for table_row in html.findAll("tr"): table_cells = table_row.findAll("td") # make double-sure we're in the right table by checking the date field @@ -54,10 +61,12 @@ def query(self, query): # registrar == last cell registrar = table_cells[-1].text.strip() if domain and not domain == query: - to_yield = (domain, registrar) - to_yield_hash = hash(to_yield) - if to_yield_hash not in yielded: - yield to_yield + result = (domain, registrar) + result_hash = hash(result) + if result_hash not in found: + found.add(result_hash) + results.add(result) except IndexError: self.debug(f"Invalid row {str(table_row)[:40]}...") continue + return results diff --git a/bbot/modules/virustotal.py b/bbot/modules/virustotal.py new file mode 100644 index 0000000000..b932419450 --- /dev/null +++ b/bbot/modules/virustotal.py @@ -0,0 +1,29 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class virustotal(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query VirusTotal's API for subdomains", + "created_date": "2022-08-25", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": ""} + options_desc = {"api_key": "VirusTotal API Key"} + + base_url = "https://www.virustotal.com/api/v3" + api_page_iter_kwargs = {"json": False, "next_key": lambda r: r.json().get("links", {}).get("next", "")} + + def make_url(self, query): + return f"{self.base_url}/domains/{self.helpers.quote(query)}/subdomains" + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["x-apikey"] = self.api_key + return url, kwargs + + async def parse_results(self, r, query): + text = getattr(r, "text", "") + return await self.scan.extract_in_scope_hostnames(text) diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py new file mode 100644 index 0000000000..760cf32fae --- /dev/null +++ b/bbot/modules/wafw00f.py @@ -0,0 +1,69 @@ +from bbot.modules.base import BaseModule +from wafw00f import main as wafw00f_main + +# disable wafw00f logging +import logging + +wafw00f_logger = logging.getLogger("wafw00f") +wafw00f_logger.setLevel(logging.CRITICAL + 100) + + +class wafw00f(BaseModule): + """ + https://github.com/EnableSecurity/wafw00f + """ + + watched_events = ["URL"] + produced_events = ["WAF"] + flags = ["active", "aggressive"] + meta = { + "description": "Web Application Firewall Fingerprinting Tool", + "created_date": "2023-02-15", + "author": "@liquidsec", + } + + deps_pip = ["wafw00f~=2.2.0"] + + options = {"generic_detect": True} + options_desc = {"generic_detect": "When no specific WAF detections are made, try to perform a generic detect"} + + in_scope_only = True + per_hostport_only = True + + async def filter_event(self, event): + http_status = getattr(event, "http_status", 0) + if not http_status or str(http_status).startswith("3"): + return False, f"Invalid HTTP status code: {http_status}" + return True, "" + + def _incoming_dedup_hash(self, event): + return hash(f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/") + + async def handle_event(self, event): + url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" + WW = await self.helpers.run_in_executor(wafw00f_main.WAFW00F, url, followredirect=False) + waf_detections = await self.helpers.run_in_executor(WW.identwaf) + if waf_detections: + for waf in waf_detections: + await self.emit_event( + {"host": str(event.host), "url": url, "waf": waf}, + "WAF", + parent=event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}", + ) + else: + if self.config.get("generic_detect") is True: + generic = await self.helpers.run_in_executor(WW.genericdetect) + if generic: + waf = "generic detection" + await self.emit_event( + { + "host": str(event.host), + "url": url, + "waf": waf, + "info": WW.knowledge["generic"]["reason"], + }, + "WAF", + parent=event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}", + ) diff --git a/bbot/modules/wappalyzer.py b/bbot/modules/wappalyzer.py index 50b31accd0..747d2ce972 100644 --- a/bbot/modules/wappalyzer.py +++ b/bbot/modules/wappalyzer.py @@ -11,28 +11,50 @@ class wappalyzer(BaseModule): - watched_events = ["HTTP_RESPONSE"] produced_events = ["TECHNOLOGY"] - flags = ["active", "safe", "web"] + flags = ["active", "safe", "web-basic"] meta = { "description": "Extract technologies from web responses", + "created_date": "2022-04-15", + "author": "@liquidsec", } - deps_pip = ["python-Wappalyzer"] + deps_pip = ["python-Wappalyzer~=0.3.1", "aiohttp~=3.9.0b0", "setuptools"] # accept all events regardless of scope distance scope_distance_modifier = None - max_event_handlers = 5 - - def setup(self): - self.wappalyzer = Wappalyzer.latest() + _module_threads = 5 + + @staticmethod + def process_headers(headers): + unique_headers = {} + count = {} + for k, v in headers.items(): + values = v if isinstance(v, list) else [v] + for item in values: + unique_key = k if k not in count else f"{k}_{count[k]}" + while unique_key in unique_headers: + count[k] = count.get(k, 0) + 1 + unique_key = f"{k}_{count[k]}" + unique_headers[unique_key] = item + count[k] = count.get(k, 0) + 1 + return unique_headers + + async def setup(self): + self.wappalyzer = await self.helpers.run_in_executor(Wappalyzer.latest) return True - def handle_event(self, event): - for res in self.wappalyze(event.data): - self.emit_event( - {"technology": res.lower(), "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event + async def handle_event(self, event): + for res in await self.helpers.run_in_executor(self.wappalyze, event.data): + res = res.lower() + await self.emit_event( + {"technology": res, "url": event.data["url"], "host": str(event.host)}, + "TECHNOLOGY", + event, + context=f"{{module}} analyzed HTTP_RESPONSE and identified {{event.type}}: {res}", ) def wappalyze(self, data): - w = WebPage(url=data["url"], html=data.get("response-body", ""), headers=data.get("header-dict", {})) + # Convert dictionary of lists to a dictionary of strings + header_dict = self.process_headers(data.get("header-dict", {})) + w = WebPage(url=data["url"], html=data.get("body", ""), headers=header_dict) return self.wappalyzer.analyze(w) diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index fef2ecd11f..fbb30da7a9 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -1,12 +1,16 @@ -from .crobat import crobat +from datetime import datetime +from bbot.modules.templates.subdomain_enum import subdomain_enum -class wayback(crobat): + +class wayback(subdomain_enum): flags = ["passive", "subdomain-enum", "safe"] watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED", "DNS_NAME"] meta = { "description": "Query archive.org's API for subdomains", + "created_date": "2022-04-01", + "author": "@liquidsec", } options = {"urls": False, "garbage_threshold": 10} options_desc = { @@ -17,28 +21,35 @@ class wayback(crobat): base_url = "http://web.archive.org" - def setup(self): + async def setup(self): self.urls = self.config.get("urls", False) self.garbage_threshold = self.config.get("garbage_threshold", 10) - return super().setup() + return await super().setup() - def handle_event(self, event): + async def handle_event(self, event): query = self.make_query(event) - for result, event_type in self.query(query): - self.emit_event(result, event_type, event, abort_if=self.abort_if) + for result, event_type in await self.query(query): + await self.emit_event( + result, + event_type, + event, + abort_if=self.abort_if, + context=f'{{module}} queried archive.org for "{query}" and found {{event.type}}: {{event.data}}', + ) - def query(self, query): + async def query(self, query): + results = set() waybackurl = f"{self.base_url}/cdx/search/cdx?url={self.helpers.quote(query)}&matchType=domain&output=json&fl=original&collapse=original" - r = self.helpers.request(waybackurl) + r = await self.helpers.request(waybackurl, timeout=self.http_timeout + 10) if not r: self.warning(f'Error connecting to archive.org for query "{query}"') - return + return results try: j = r.json() assert type(j) == list except Exception: self.warning(f'Error JSON-decoding archive.org response for query "{query}"') - return + return results urls = [] for result in j[1:]: @@ -48,13 +59,29 @@ def query(self, query): except KeyError: continue + self.verbose(f"Found {len(urls):,} URLs for {query}") + dns_names = set() - for parsed_url in self.helpers.collapse_urls(urls, threshold=self.garbage_threshold): + collapsed_urls = 0 + start_time = datetime.now() + # we consolidate URLs to cut down on garbage data + # this is CPU-intensive, so we do it in its own core. + parsed_urls = await self.helpers.run_in_executor_mp( + self.helpers.validators.collapse_urls, + urls, + threshold=self.garbage_threshold, + ) + for parsed_url in parsed_urls: + collapsed_urls += 1 if not self.urls: dns_name = parsed_url.hostname h = hash(dns_name) if h not in dns_names: dns_names.add(h) - yield dns_name, "DNS_NAME" + results.add((dns_name, "DNS_NAME")) else: - yield parsed_url.geturl(), "URL_UNVERIFIED" + results.add((parsed_url.geturl(), "URL_UNVERIFIED")) + end_time = datetime.now() + duration = self.helpers.human_timedelta(end_time - start_time) + self.verbose(f"Collapsed {len(urls):,} -> {collapsed_urls:,} URLs in {duration}") + return results diff --git a/bbot/modules/wpscan.py b/bbot/modules/wpscan.py new file mode 100644 index 0000000000..4f1a63a1b5 --- /dev/null +++ b/bbot/modules/wpscan.py @@ -0,0 +1,285 @@ +import json +from bbot.modules.base import BaseModule + + +class wpscan(BaseModule): + watched_events = ["HTTP_RESPONSE", "TECHNOLOGY"] + produced_events = ["URL_UNVERIFIED", "FINDING", "VULNERABILITY", "TECHNOLOGY"] + flags = ["active", "aggressive"] + meta = { + "description": "Wordpress security scanner. Highly recommended to use an API key for better results.", + "created_date": "2024-05-29", + "author": "@domwhewell-sage", + } + + options = { + "api_key": "", + "enumerate": "vp,vt,cb,dbe", + "threads": 5, + "request_timeout": 5, + "connection_timeout": 2, + "disable_tls_checks": True, + "force": False, + } + options_desc = { + "api_key": "WPScan API Key", + "enumerate": "Enumeration Process see wpscan help documentation (default: vp,vt,cb,dbe)", + "threads": "How many wpscan threads to spawn (default is 5)", + "request_timeout": "The request timeout in seconds (default 5)", + "connection_timeout": "The connection timeout in seconds (default 2)", + "disable_tls_checks": "Disables the SSL/TLS certificate verification (Default True)", + "force": "Do not check if the target is running WordPress or returns a 403", + } + deps_apt = ["curl", "make", "gcc"] + deps_ansible = [ + { + "name": "Install Ruby Deps (Debian)", + "package": {"name": ["ruby-rubygems", "ruby-dev"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Debian'", + }, + { + "name": "Install Ruby Deps (Arch)", + "package": {"name": ["rubygems"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Archlinux'", + }, + { + "name": "Install Ruby Deps (Fedora)", + "package": {"name": ["rubygems", "ruby-devel"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'RedHat'", + }, + { + "name": "Install Ruby Deps (Alpine)", + "package": {"name": ["ruby-dev", "ruby-bundler"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Alpine'", + }, + { + "name": "Install wpscan gem", + "gem": {"name": "wpscan", "state": "latest", "user_install": False}, + "become": True, + }, + ] + + async def setup(self): + self.processed = set() + self.ignore_events = ["xmlrpc", "readme"] + self.api_key = self.config.get("api_key", "") + self.enumerate = self.config.get("enumerate", "vp,vt,cb,dbe") + self.proxy = self.scan.web_config.get("http_proxy", "") + self.threads = self.config.get("threads", 5) + self.request_timeout = self.config.get("request_timeout", 5) + self.connection_timeout = self.config.get("connection_timeout", 2) + self.disable_tls_checks = self.config.get("disable_tls_checks", True) + self.force = self.config.get("force", False) + return True + + async def filter_event(self, event): + host_hash = hash(event.host) + if host_hash in self.processed: + return False, "Host has already been processed" + if event.type == "HTTP_RESPONSE": + is_redirect = str(event.data["status_code"]).startswith("30") + if is_redirect: + return False, "URL is a redirect" + elif event.type == "TECHNOLOGY": + if not event.data["technology"].lower().startswith("wordpress"): + return False, "technology is not wordpress" + self.processed.add(host_hash) + return True + + async def handle_event(self, event): + if event.type == "HTTP_RESPONSE": + await self.handle_http_response(event) + elif event.type == "TECHNOLOGY": + await self.handle_technology(event) + + async def handle_http_response(self, source_event): + url = source_event.parsed_url._replace(path="/").geturl() + command = self.construct_command(url) + output = await self.run_process(command) + for new_event in self.parse_wpscan_output(output.stdout, url, source_event): + await self.emit_event(new_event) + + async def handle_technology(self, source_event): + url = self.get_base_url(source_event) + command = self.construct_command(url) + output = await self.run_process(command) + for new_event in self.parse_wpscan_output(output.stdout, url, source_event): + await self.emit_event(new_event) + + def construct_command(self, url): + # base executable + command = ["wpscan", "--url", url] + # proxy + if self.proxy: + command += ["--proxy", str(self.proxy)] + # user agent + command += ["--user-agent", f"'{self.scan.useragent}'"] + # threads + command += ["--max-threads", str(self.threads)] + # request timeout + command += ["--request-timeout", str(self.request_timeout)] + # connection timeout + command += ["--connect-timeout", str(self.connection_timeout)] + # api key + if self.api_key: + command += ["--api-token", f"{self.api_key}"] + # enumerate + command += ["--enumerate", self.enumerate] + # disable tls checks + if self.disable_tls_checks: + command += ["--disable-tls-checks"] + # force + if self.force: + command += ["--force"] + # output format + command += ["--format", "json"] + return command + + def parse_wpscan_output(self, output, base_url, source_event): + json_output = json.loads(output) + interesting_json = json_output.get("interesting_findings", {}) or {} + version_json = json_output.get("version", {}) or {} + theme_json = json_output.get("main_theme", {}) or {} + plugins_json = json_output.get("plugins", {}) or {} + if interesting_json: + yield from self.parse_wp_misc(interesting_json, base_url, source_event) + if version_json: + yield from self.parse_wp_version(version_json, base_url, source_event) + if theme_json: + yield from self.parse_wp_themes(theme_json, base_url, source_event) + if plugins_json: + yield from self.parse_wp_plugins(plugins_json, base_url, source_event) + + def parse_wp_misc(self, interesting_json, base_url, source_event): + for finding in interesting_json: + url = finding.get("url", base_url) + type = finding["type"] + if type in self.ignore_events: + continue + description_string = finding["to_s"] + interesting_entries = finding["interesting_entries"] + if type == "headers": + for header in interesting_entries: + yield self.make_event( + {"technology": str(header).lower(), "url": url, "host": str(source_event.host)}, + "TECHNOLOGY", + source_event, + ) + else: + url_event = self.make_event(url, "URL_UNVERIFIED", parent=source_event, tags=["httpx-safe"]) + if url_event: + yield url_event + yield self.make_event( + {"description": description_string, "url": url, "host": str(source_event.host)}, + "FINDING", + source_event, + ) + + def parse_wp_version(self, version_json, url, source_event): + version = version_json.get("number", "") + if version: + technology = f"wordpress {version}" + else: + technology = "wordpress detect" + yield self.make_event( + {"technology": str(technology).lower(), "url": url, "host": str(source_event.host)}, + "TECHNOLOGY", + source_event, + ) + for wp_vuln in version_json.get("vulnerabilities", []): + yield self.make_event( + { + "severity": "HIGH", + "host": str(source_event.host), + "url": url, + "description": self.vulnerability_to_s(wp_vuln), + }, + "VULNERABILITY", + source_event, + ) + + def parse_wp_themes(self, theme_json, url, source_event): + name = theme_json.get("slug", "") + version = theme_json.get("version", {}).get("number", "") + if name: + if version: + technology = f"{name} v{version}" + else: + technology = name + yield self.make_event( + {"technology": str(technology).lower(), "url": url, "host": str(source_event.host)}, + "TECHNOLOGY", + source_event, + ) + for theme_vuln in theme_json.get("vulnerabilities", []): + yield self.make_event( + { + "severity": "HIGH", + "host": str(source_event.host), + "url": url, + "description": self.vulnerability_to_s(theme_vuln), + }, + "VULNERABILITY", + source_event, + ) + + def parse_wp_plugins(self, plugins_json, base_url, source_event): + for name, plugin in plugins_json.items(): + url = plugin.get("location", base_url) + if url != base_url: + url_event = self.make_event(url, "URL_UNVERIFIED", parent=source_event, tags=["httpx-safe"]) + if url_event: + yield url_event + version = plugin.get("version", {}).get("number", "") + if version: + technology = f"{name} {version}" + else: + technology = name + yield self.make_event( + {"technology": str(technology).lower(), "url": url, "host": str(source_event.host)}, + "TECHNOLOGY", + source_event, + ) + for vuln in plugin.get("vulnerabilities", []): + yield self.make_event( + { + "severity": "HIGH", + "host": str(source_event.host), + "url": url, + "description": self.vulnerability_to_s(vuln), + }, + "VULNERABILITY", + source_event, + ) + + def vulnerability_to_s(self, vuln_json): + string = [] + title = vuln_json.get("title", "") + string.append(f"Title: {title}") + fixed_in = vuln_json.get("fixed_in", "") + string.append(f"Fixed in: {fixed_in}") + references = vuln_json.get("references", {}) + if references: + cves = references.get("cve", []) + urls = references.get("url", []) + youtube_urls = references.get("youtube", []) + cves_list = [] + for cve in cves: + cves_list.append(f"CVE-{cve}") + if cves_list: + string.append(f"CVEs: [{', '.join(cves_list)}]") + if urls: + string.append(f"References: [{', '.join(urls)}]") + if youtube_urls: + string.append(f"Youtube Links: [{', '.join(youtube_urls)}]") + return " ".join(string) + + def get_base_url(self, event): + base_url = event.data.get("url", "") + if not base_url: + base_url = f"https://{event.host}" + return self.helpers.urlparse(base_url)._replace(path="/").geturl() diff --git a/bbot/modules/zoomeye.py b/bbot/modules/zoomeye.py index 5eb02c800f..c6cb256df0 100644 --- a/bbot/modules/zoomeye.py +++ b/bbot/modules/zoomeye.py @@ -1,11 +1,16 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class zoomeye(shodan_dns): +class zoomeye(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query ZoomEye's API for subdomains", "auth_required": True} + flags = ["affiliates", "subdomain-enum", "passive", "safe"] + meta = { + "description": "Query ZoomEye's API for subdomains", + "created_date": "2022-08-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "max_pages": 20, "include_related": False} options_desc = { "api_key": "ZoomEye API key", @@ -13,21 +18,25 @@ class zoomeye(shodan_dns): "include_related": "Include domains which may be related to the target", } - base_url = "https://api.zoomeye.org" + base_url = "https://api.zoomeye.hk" - def setup(self): + async def setup(self): self.max_pages = self.config.get("max_pages", 20) - self.headers = {"API-KEY": self.config.get("api_key", "")} self.include_related = self.config.get("include_related", False) - return super().setup() + return await super().setup() + + def prepare_api_request(self, url, kwargs): + kwargs["headers"]["API-KEY"] = self.api_key + return url, kwargs - def ping(self): - r = self.helpers.request(f"{self.base_url}/resources-info", headers=self.headers) + async def ping(self): + url = f"{self.base_url}/resources-info" + r = await self.api_request(url) assert int(r.json()["quota_info"]["remain_total_quota"]) > 0, "No quota remaining" - def handle_event(self, event): + async def handle_event(self, event): query = self.make_query(event) - results = self.query(query) + results = await self.query(query) if results: for hostname in results: if hostname == event: @@ -35,18 +44,34 @@ def handle_event(self, event): tags = [] if not hostname.endswith(f".{query}"): tags = ["affiliate"] - self.emit_event(hostname, "DNS_NAME", event, tags=tags) + await self.emit_event( + hostname, + "DNS_NAME", + event, + tags=tags, + context=f'{{module}} searched ZoomEye API for "{query}" and found {{event.type}}: {{event.data}}', + ) - def query(self, query): + async def query(self, query): + results = set() query_type = 0 if self.include_related else 1 url = f"{self.base_url}/domain/search?q={self.helpers.quote(query)}&type={query_type}&page=" + "{page}" - for i, j in enumerate(self.helpers.api_page_iter(url, headers=self.headers)): - results = list(self.parse_results(j)) - if results: - yield from results - if not results or i >= (self.max_pages - 1) or self.scan.stopping: - break - - def parse_results(self, r): + i = 0 + agen = self.api_page_iter(url) + try: + async for j in agen: + r = list(await self.parse_results(j)) + if r: + results.update(set(r)) + if not r or i >= (self.max_pages - 1): + break + i += 1 + finally: + await agen.aclose() + return results + + async def parse_results(self, r): + results = set() for entry in r.get("list", []): - yield entry["name"] + results.add(entry["name"]) + return results diff --git a/bbot/presets/baddns-intense.yml b/bbot/presets/baddns-intense.yml new file mode 100644 index 0000000000..8afeebd3d9 --- /dev/null +++ b/bbot/presets/baddns-intense.yml @@ -0,0 +1,12 @@ +description: Run all baddns modules and submodules. + + +modules: + - baddns + - baddns_zone + - baddns_direct + +config: + modules: + baddns: + enabled_submodules: [CNAME,references,MX,NS,TXT] diff --git a/bbot/presets/cloud-enum.yml b/bbot/presets/cloud-enum.yml new file mode 100644 index 0000000000..6f19c5c35c --- /dev/null +++ b/bbot/presets/cloud-enum.yml @@ -0,0 +1,7 @@ +description: Enumerate cloud resources such as storage buckets, etc. + +include: + - subdomain-enum + +flags: + - cloud-enum diff --git a/bbot/presets/code-enum.yml b/bbot/presets/code-enum.yml new file mode 100644 index 0000000000..8e91e56745 --- /dev/null +++ b/bbot/presets/code-enum.yml @@ -0,0 +1,4 @@ +description: Enumerate Git repositories, Docker images, etc. + +flags: + - code-enum diff --git a/bbot/presets/email-enum.yml b/bbot/presets/email-enum.yml new file mode 100644 index 0000000000..a5ffd6e3c8 --- /dev/null +++ b/bbot/presets/email-enum.yml @@ -0,0 +1,7 @@ +description: Enumerate email addresses from APIs, web crawling, etc. + +flags: + - email-enum + +output_modules: + - emails diff --git a/bbot/presets/fast.yml b/bbot/presets/fast.yml new file mode 100644 index 0000000000..675082b2a7 --- /dev/null +++ b/bbot/presets/fast.yml @@ -0,0 +1,16 @@ +description: Scan only the provided targets as fast as possible - no extra discovery + +exclude_modules: + - excavate + +config: + # only scan the exact targets specified + scope: + strict: true + # speed up dns resolution by doing A/AAAA only - not MX/NS/SRV/etc + dns: + minimal: true + # essential speculation only + modules: + speculate: + essential_only: true diff --git a/bbot/presets/kitchen-sink.yml b/bbot/presets/kitchen-sink.yml new file mode 100644 index 0000000000..c01039099e --- /dev/null +++ b/bbot/presets/kitchen-sink.yml @@ -0,0 +1,18 @@ +description: Everything everywhere all at once + +include: + - subdomain-enum + - cloud-enum + - code-enum + - email-enum + - spider + - web-basic + - paramminer + - dirbust-light + - web-screenshots + - baddns-intense + +config: + modules: + baddns: + enable_references: True diff --git a/bbot/presets/nuclei/nuclei-budget.yml b/bbot/presets/nuclei/nuclei-budget.yml new file mode 100644 index 0000000000..d4ac5c8163 --- /dev/null +++ b/bbot/presets/nuclei/nuclei-budget.yml @@ -0,0 +1,19 @@ +description: Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests + +modules: + - httpx + - nuclei + - portfilter + +config: + modules: + nuclei: + mode: budget + budget: 10 + directory_only: true # Do not run nuclei on individual non-directory URLs + +conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} diff --git a/bbot/presets/nuclei/nuclei-intense.yml b/bbot/presets/nuclei/nuclei-intense.yml new file mode 100644 index 0000000000..27f833c387 --- /dev/null +++ b/bbot/presets/nuclei/nuclei-intense.yml @@ -0,0 +1,28 @@ +description: Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. + +modules: + - httpx + - nuclei + - robots + - urlscan + - portfilter + - wayback + +config: + modules: + nuclei: + directory_only: False # Will run nuclei on ALL discovered URLs - Be careful! + wayback: + urls: true + +conditions: + - | + {% if config.web.spider_distance == 0 and config.modules.nuclei.directory_only == False %} + {{ warn("The 'nuclei-intense' preset turns the 'directory_only' limitation off on the nuclei module. To make the best use of this, you may want to enable spidering with 'spider' or 'spider-intense' preset.") }} + {% endif %} + + +# Example for also running a dirbust + +#include: +# - dirbust-light \ No newline at end of file diff --git a/bbot/presets/nuclei/nuclei-technology.yml b/bbot/presets/nuclei/nuclei-technology.yml new file mode 100644 index 0000000000..c2c4c8cf7a --- /dev/null +++ b/bbot/presets/nuclei/nuclei-technology.yml @@ -0,0 +1,23 @@ +description: Run nuclei scans against all discovered targets, running templates which match discovered technologies + +modules: + - httpx + - nuclei + - portfilter + +config: + modules: + nuclei: + mode: technology + directory_only: True # Do not run nuclei on individual non-directory URLs. This is less unsafe to disable with technology mode. + +conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + +# Example for also running a dirbust + +#include: +# - dirbust-light diff --git a/bbot/presets/nuclei/nuclei.yml b/bbot/presets/nuclei/nuclei.yml new file mode 100644 index 0000000000..d0d77978eb --- /dev/null +++ b/bbot/presets/nuclei/nuclei.yml @@ -0,0 +1,34 @@ +description: Run nuclei scans against all discovered targets + +modules: + - httpx + - nuclei + - portfilter + +config: + modules: + nuclei: + directory_only: True # Do not run nuclei on individual non-directory URLs + + +conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + + + +# Additional Examples: + +# Slowing Down Scan + +#config: +# modules: +# nuclei: +# ratelimit: 10 +# concurrency: 5 + + + + diff --git a/bbot/presets/spider-intense.yml b/bbot/presets/spider-intense.yml new file mode 100644 index 0000000000..18aff50a03 --- /dev/null +++ b/bbot/presets/spider-intense.yml @@ -0,0 +1,13 @@ +description: Recursive web spider with more aggressive settings + +include: + - spider + +config: + web: + # how many links to follow in a row + spider_distance: 4 + # don't follow links whose directory depth is higher than 6 + spider_depth: 6 + # maximum number of links to follow per page + spider_links_per_page: 50 diff --git a/bbot/presets/spider.yml b/bbot/presets/spider.yml new file mode 100644 index 0000000000..9e98ff4539 --- /dev/null +++ b/bbot/presets/spider.yml @@ -0,0 +1,17 @@ +description: Recursive web spider + +modules: + - httpx + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + +config: + web: + # how many links to follow in a row + spider_distance: 2 + # don't follow links whose directory depth is higher than 4 + spider_depth: 4 + # maximum number of links to follow per page + spider_links_per_page: 25 diff --git a/bbot/presets/subdomain-enum.yml b/bbot/presets/subdomain-enum.yml new file mode 100644 index 0000000000..d4230c3fc6 --- /dev/null +++ b/bbot/presets/subdomain-enum.yml @@ -0,0 +1,22 @@ +description: Enumerate subdomains via APIs, brute-force + +flags: + # enable every module with the subdomain-enum flag + - subdomain-enum + +output_modules: + # output unique subdomains to TXT file + - subdomains + +config: + dns: + threads: 25 + brute_threads: 1000 + # put your API keys here + # modules: + # github: + # api_key: "" + # chaos: + # api_key: "" + # securitytrails: + # api_key: "" diff --git a/bbot/presets/tech-detect.yml b/bbot/presets/tech-detect.yml new file mode 100644 index 0000000000..5943dacae9 --- /dev/null +++ b/bbot/presets/tech-detect.yml @@ -0,0 +1,11 @@ +description: Detect technologies via Wappalyzer, Nuclei, and FingerprintX + +modules: + - nuclei + - wappalyzer + - fingerprintx + +config: + modules: + nuclei: + tags: tech diff --git a/bbot/presets/web-basic.yml b/bbot/presets/web-basic.yml new file mode 100644 index 0000000000..166d973e94 --- /dev/null +++ b/bbot/presets/web-basic.yml @@ -0,0 +1,7 @@ +description: Quick web scan + +include: + - iis-shortnames + +flags: + - web-basic diff --git a/bbot/presets/web-screenshots.yml b/bbot/presets/web-screenshots.yml new file mode 100644 index 0000000000..4641e7be3a --- /dev/null +++ b/bbot/presets/web-screenshots.yml @@ -0,0 +1,14 @@ +description: Take screenshots of webpages + +flags: + - web-screenshots + +config: + modules: + gowitness: + resolution_x: 1440 + resolution_y: 900 + # folder to output web screenshots (default is inside ~/.bbot/scans/scan_name) + output_path: "" + # whether to take screenshots of social media pages + social: True diff --git a/bbot/presets/web-thorough.yml b/bbot/presets/web-thorough.yml new file mode 100644 index 0000000000..0294614f76 --- /dev/null +++ b/bbot/presets/web-thorough.yml @@ -0,0 +1,8 @@ +description: Aggressive web scan + +include: + # include the web-basic preset + - web-basic + +flags: + - web-thorough diff --git a/bbot/presets/web/dirbust-heavy.yml b/bbot/presets/web/dirbust-heavy.yml new file mode 100644 index 0000000000..effba2554a --- /dev/null +++ b/bbot/presets/web/dirbust-heavy.yml @@ -0,0 +1,39 @@ +description: Recursive web directory brute-force (aggressive) + +include: + - spider + +flags: + - iis-shortnames + +modules: + - ffuf + - wayback + +config: + modules: + iis_shortnames: + # we exploit the shortnames vulnerability to produce URL_HINTs which are consumed by ffuf_shortnames + detect_only: False + ffuf: + depth: 3 + lines: 5000 + extensions: + - php + - asp + - aspx + - ashx + - asmx + - jsp + - jspx + - cfm + - zip + - conf + - config + - xml + - json + - yml + - yaml + # emit URLs from wayback + wayback: + urls: True diff --git a/bbot/presets/web/dirbust-light.yml b/bbot/presets/web/dirbust-light.yml new file mode 100644 index 0000000000..d088ee24ee --- /dev/null +++ b/bbot/presets/web/dirbust-light.yml @@ -0,0 +1,13 @@ +description: Basic web directory brute-force (surface-level directories only) + +include: + - iis-shortnames + +modules: + - ffuf + +config: + modules: + ffuf: + # wordlist size = 1000 + lines: 1000 diff --git a/bbot/presets/web/dotnet-audit.yml b/bbot/presets/web/dotnet-audit.yml new file mode 100644 index 0000000000..993d6dd198 --- /dev/null +++ b/bbot/presets/web/dotnet-audit.yml @@ -0,0 +1,25 @@ +description: Comprehensive scan for all IIS/.NET specific modules and module settings + + +include: + - iis-shortnames + +modules: + - httpx + - badsecrets + - ffuf_shortnames + - ffuf + - telerik + - ajaxpro + - dotnetnuke + +config: + modules: + ffuf: + extensions: asp,aspx,ashx,asmx,ascx + extensions_ignore_case: True + ffuf_shortnames: + find_subwords: True + telerik: + exploit_RAU_crypto: True + include_subdirs: True # Run against every directory, not the default first received URL per-host diff --git a/bbot/presets/web/iis-shortnames.yml b/bbot/presets/web/iis-shortnames.yml new file mode 100644 index 0000000000..bae21c040e --- /dev/null +++ b/bbot/presets/web/iis-shortnames.yml @@ -0,0 +1,10 @@ +description: Recursively enumerate IIS shortnames + +flags: + - iis-shortnames + +config: + modules: + iis_shortnames: + # exploit the vulnerability + detect_only: false diff --git a/bbot/presets/web/paramminer.yml b/bbot/presets/web/paramminer.yml new file mode 100644 index 0000000000..7d36e3a849 --- /dev/null +++ b/bbot/presets/web/paramminer.yml @@ -0,0 +1,12 @@ +description: Discover new web parameters via brute-force + +flags: + - web-paramminer + +modules: + - httpx + +config: + web: + spider_distance: 1 + spider_depth: 4 diff --git a/bbot/scanner/__init__.py b/bbot/scanner/__init__.py index cc993af8a4..1622f4c208 100644 --- a/bbot/scanner/__init__.py +++ b/bbot/scanner/__init__.py @@ -1 +1,2 @@ +from .preset import Preset from .scanner import Scanner diff --git a/bbot/scanner/dispatcher.py b/bbot/scanner/dispatcher.py index a06fa770e3..a9c56c2b72 100644 --- a/bbot/scanner/dispatcher.py +++ b/bbot/scanner/dispatcher.py @@ -1,3 +1,9 @@ +import logging +import traceback + +log = logging.getLogger("bbot.scanner.dispatcher") + + class Dispatcher: """ Enables custom hooks/callbacks on certain scan events @@ -6,14 +12,21 @@ class Dispatcher: def set_scan(self, scan): self.scan = scan - def on_start(self, scan): + async def on_start(self, scan): return - def on_finish(self, scan): + async def on_finish(self, scan): return - def on_status(self, status, scan_id): + async def on_status(self, status, scan_id): """ Execute an event when the scan's status is updated """ - return + self.scan.debug(f"Setting scan status to {status}") + + async def catch(self, callback, *args, **kwargs): + try: + return await callback(*args, **kwargs) + except Exception as e: + log.error(f"Error in {callback.__qualname__}(): {e}") + log.trace(traceback.format_exc()) diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index ec0eadbc48..4b129d5243 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -1,441 +1,249 @@ -import queue -import logging -import threading -from time import sleep +import asyncio from contextlib import suppress -from datetime import datetime, timedelta -from ..core.errors import ScanCancelledError, ValidationError +from bbot.modules.base import BaseInterceptModule -log = logging.getLogger("bbot.scanner.manager") - -class ScanManager: +class ScanIngress(BaseInterceptModule): """ - Manages modules and events during a scan + This is always the first intercept module in the chain, responsible for basic scope checks + + It has its own incoming queue, but will also pull events from modules' outgoing queues """ - def __init__(self, scan): - self.scan = scan - self.event_queue = queue.PriorityQueue() - self.events_distributed = set() - self.events_distributed_lock = threading.Lock() - self.events_accepted = set() - self.events_accepted_lock = threading.Lock() - self.events_resolved = dict() - self.events_resolved_lock = threading.Lock() - self.dns_resolution = self.scan.config.get("dns_resolution", False) - - def init_events(self): + watched_events = ["*"] + # accept all events regardless of scope distance + scope_distance_modifier = None + _name = "_scan_ingress" + _qsize = -1 + + @property + def priority(self): + # we are the highest priority + return -99 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._module_priority_weights = None + self._non_intercept_modules = None + # track incoming duplicates module-by-module (for `suppress_dupes` attribute of modules) + self.incoming_dup_tracker = set() + + async def init_events(self, events=None): """ - seed scanner with target events + Initializes events by seeding the scanner with target events and distributing them for further processing. + + Notes: + - This method populates the event queue with initial target events. + - It also marks the Scan object as finished with initialization by setting `_finished_init` to True. """ - self.queue_event(self.scan.root_event) - for event in self.scan.target.events: - self.scan.verbose(f"Target: {event}") - self.emit_event(event) - # force submit batches - for mod in self.scan.modules.values(): - mod._handle_batch(force=True) + if events is None: + events = self.scan.target.seeds.events + async with self.scan._acatch(self.init_events), self._task_counter.count(self.init_events): + sorted_events = sorted(events, key=lambda e: len(e.data)) + for event in [self.scan.root_event] + sorted_events: + event._dummy = False + event.web_spider_distance = 0 + event.scan = self.scan + if event.parent is None: + event.parent = self.scan.root_event + if event.module is None: + event.module = self.scan._make_dummy_module(name="TARGET", _type="TARGET") + if event != self.scan.root_event: + event.discovery_context = f"Scan {self.scan.name} seeded with " + "{event.type}: {event.data}" + self.verbose(f"Target: {event}") + await self.queue_event(event, {}) + await asyncio.sleep(0.1) + self.scan._finished_init = True + + async def handle_event(self, event, **kwargs): + # don't accept dummy events + if event._dummy: + return False, "cannot emit dummy event" + + # don't accept events with self as parent + if not event.type == "SCAN": + if event == event.get_parent(): + return False, "event's parent is itself" + if not event.discovery_context: + self.warning(f"Event {event} has no discovery context") + + # don't accept duplicates + if self.is_incoming_duplicate(event, add=True): + if not event._graph_important: + return False, "event was already emitted by its module" + else: + self.debug( + f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" + ) - def emit_event(self, event, *args, **kwargs): - quick = kwargs.pop("quick", False) - release = kwargs.get("release", True) - if quick: - try: - kwargs.pop("abort_if") - kwargs.pop("on_success_callback") - self.queue_event(event, *args, **kwargs) - finally: - if release: - with suppress(Exception): - event.module._event_semaphore.release() - else: - # don't raise an exception if the thread pool has been shutdown - with suppress(RuntimeError): - self.scan._event_thread_pool.submit_task(self.catch, self._emit_event, event, *args, **kwargs) - - def _emit_event(self, event, *args, **kwargs): - release = kwargs.pop("release", True) - emit_event = True - try: - on_success_callback = kwargs.pop("on_success_callback", None) - abort_if = kwargs.pop("abort_if", None) - log.debug(f'module "{event.module}" raised {event}') + # update event's scope distance based on its parent + event.scope_distance = event.parent.scope_distance + 1 - if event._dummy: - log.warning(f"Cannot emit dummy event: {event}") - return + # special handling of URL extensions + url_extension = getattr(event, "url_extension", None) + if url_extension is not None: + if url_extension in self.scan.url_extension_httpx_only: + event.add_tag("httpx-only") + event._omit = True - if event == event.get_source(): - log.debug(f"Omitting event with self as source: {event}") - return + # blacklist by extension + if url_extension in self.scan.url_extension_blacklist: + self.debug( + f"Blacklisting {event} because its extension (.{url_extension}) is blacklisted in the config" + ) + event.add_tag("blacklisted") - # DNS resolution - dns_children, dns_tags, event_whitelisted_dns, event_blacklisted_dns = self.scan.helpers.dns.resolve_event( - event - ) - event_whitelisted = event_whitelisted_dns | self.scan.whitelisted(event) - event_blacklisted = event_blacklisted_dns | self.scan.blacklisted(event) - if event.type in ("DNS_NAME", "IP_ADDRESS"): - event.tags.update(dns_tags) - if event_blacklisted: - event.tags.add("blacklisted") - - # Blacklist purging - if "blacklisted" in event.tags: - reason = "event host" - if event_blacklisted_dns: - reason = "DNS associations" - log.debug(f"Omitting due to blacklisted {reason}: {event}") - emit_event = False - - # Wait for parent event to resolve (in case its scope distance changes) - while 1: - if self.scan.stopping: - raise ScanCancelledError() - resolved = event._resolved.wait(timeout=0.1) - if resolved: - # update event's scope distance based on its parent - event.scope_distance = event.source.scope_distance + 1 - break - - # Scope shepherding - event_is_duplicate = self.is_duplicate_event(event) - event_in_report_distance = event.scope_distance <= self.scan.scope_report_distance - set_scope_distance = event.scope_distance + # main scan blacklist + event_blacklisted = self.scan.blacklisted(event) + + # reject all blacklisted events + if event_blacklisted or "blacklisted" in event.tags: + return False, "event is blacklisted" + + # Scope shepherding + # here is where we make sure in-scope events are set to their proper scope distance + if event.host: + event_whitelisted = self.scan.whitelisted(event) if event_whitelisted: - set_scope_distance = 0 - if event.host: - if (event_whitelisted or event_in_report_distance) and not event_is_duplicate: - if set_scope_distance == 0: - log.debug(f"Making {event} in-scope") - event.make_in_scope(set_scope_distance) - else: - if event.scope_distance > self.scan.scope_report_distance: - log.debug( - f"Making {event} internal because its scope_distance ({event.scope_distance}) > scope_report_distance ({self.scan.scope_report_distance})" - ) - event.make_internal() - else: - log.debug(f"Making {event} in-scope because it does not have identifying scope information") - event.make_in_scope(0) - - # now that the event is properly tagged, we can finally make decisions about it - if callable(abort_if) and abort_if(event): - log.debug(f"{event.module}: not raising event {event} due to custom criteria in abort_if()") - return - - if not self.accept_event(event): - return - - # queue the event before emitting its DNS children - if emit_event: - self.queue_event(event) - - if callable(on_success_callback): - self.catch(on_success_callback, event) - - ### Emit DNS children ### - emit_children = -1 < event.scope_distance < self.scan.dns_search_distance - # speculate DNS_NAMES and IP_ADDRESSes from other event types - source_event = event - if event.host and event.type not in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE"): - source_module = self.scan.helpers._make_dummy_module("host", _type="internal") - source_event = self.scan.make_event(event.host, "DNS_NAME", module=source_module, source=event) - source_event.scope_distance = event.scope_distance - if "target" in event.tags: - source_event.tags.add("target") - if not str(event.module) == "speculate": - self.emit_event(source_event) - if self.dns_resolution and emit_children: - dns_child_events = [] - if dns_children: - for record, rdtype in dns_children: - module = self.scan.helpers.dns._get_dummy_module(rdtype) - try: - child_event = self.scan.make_event(record, "DNS_NAME", module=module, source=source_event) - dns_child_events.append(child_event) - except ValidationError as e: - log.warning( - f'Event validation failed for DNS child of {source_event}: "{record}" ({rdtype}): {e}' - ) - for child_event in dns_child_events: - self.emit_event(child_event) - - except ValidationError as e: - log.warning(f"Event validation failed with args={args}, kwargs={kwargs}: {e}") - import traceback - - log.debug(traceback.format_exc()) - - finally: - if release: - with suppress(Exception): - event.module._event_semaphore.release() - if emit_event: - self.scan.stats.event_emitted(event) - - def hash_event(self, event): - """ - Hash an event for duplicate detection + self.debug(f"Making {event} in-scope because its main host matches the scan target") + event.scope_distance = 0 + + # nerf event's priority if it's not in scope + event.module_priority += event.scope_distance + + @property + def non_intercept_modules(self): + if self._non_intercept_modules is None: + self._non_intercept_modules = [m for m in self.scan.modules.values() if not m._intercept] + return self._non_intercept_modules + + @property + def incoming_queues(self): + queues = [self.incoming_event_queue] + [m.outgoing_event_queue for m in self.non_intercept_modules] + return [q for q in queues if q is not False] + + @property + def module_priority_weights(self): + if not self._module_priority_weights: + # we subtract from six because lower priorities == higher weights + priorities = [5] + [6 - m.priority for m in self.non_intercept_modules] + self._module_priority_weights = priorities + return self._module_priority_weights + + async def get_incoming_event(self): + for q in self.helpers.weighted_shuffle(self.incoming_queues, self.module_priority_weights): + try: + return q.get_nowait() + except (asyncio.queues.QueueEmpty, AttributeError): + continue + raise asyncio.queues.QueueEmpty() - This is necessary because duplicate events from certain sources (e.g. DNS) - need to be allowed in order to preserve their relationship trail + def is_incoming_duplicate(self, event, add=False): """ - module_type = getattr(event.module, "_type", "") - if module_type == "DNS": - # allow duplicate events from dns resolution as long as their source event is unique - return hash((event, str(event.module), event.source_id)) - else: - return hash((event, str(event.module))) - - def is_duplicate_event(self, event, add=False): - event_hash = self.hash_event(event) - suppress_dupes = getattr(event.module, "suppress_dupes", True) - with self.events_accepted_lock: - duplicate_event = suppress_dupes and event_hash in self.events_accepted - if add: - self.events_accepted.add(event_hash) - return duplicate_event and not event._force_output - - def accept_event(self, event): - if self.is_duplicate_event(event, add=True): - log.debug(f"{event.module}: not raising duplicate event {event}") - return False - return True - - def catch(self, callback, *args, **kwargs): - """ - Wrapper to ensure error messages get surfaced to the user + Calculate whether an event is a duplicate in the context of the module that emitted it + This will return True if the event's parent module has raised the event before. """ - ret = None - on_finish_callback = kwargs.pop("_on_finish_callback", None) - force = kwargs.pop("_force", False) try: - if not self.scan.stopping or force: - ret = callback(*args, **kwargs) - except ScanCancelledError as e: - log.debug(f"ScanCancelledError in {callback.__qualname__}(): {e}") - except BrokenPipeError as e: - log.debug(f"BrokenPipeError in {callback.__qualname__}(): {e}") - except Exception as e: - import traceback - - log.error(f"Error in {callback.__qualname__}(): {e}") - log.debug(traceback.format_exc()) - except KeyboardInterrupt: - log.debug(f"Interrupted") - self.scan.stop() - if callable(on_finish_callback): - try: - on_finish_callback() - except Exception as e: - import traceback + event_hash = event.module._outgoing_dedup_hash(event) + except AttributeError: + module_name = str(getattr(event, "module", "")) + event_hash = hash((event, module_name)) + is_dup = event_hash in self.incoming_dup_tracker + if add: + self.incoming_dup_tracker.add(event_hash) + suppress_dupes = getattr(event.module, "suppress_dupes", True) + if suppress_dupes and is_dup: + return True + return False - log.error( - f"Error in on_finish_callback {on_finish_callback.__qualname__}() after {callback.__qualname__}(): {e}" - ) - log.debug(traceback.format_exc()) - return ret - def queue_event(self, *args, **kwargs): - """ - Queue event with manager - """ - event = self.scan.make_event(*args, **kwargs) - self.event_queue.put(event) +class ScanEgress(BaseInterceptModule): + """ + This is always the last intercept module in the chain, responsible for executing and acting on the + `abort_if` and `on_success_callback` functions. + """ + + watched_events = ["*"] + # accept all events regardless of scope distance + scope_distance_modifier = None + _name = "_scan_egress" + + @property + def priority(self): + # we are the lowest priority + return 99 + + async def handle_event(self, event, **kwargs): + abort_if = kwargs.pop("abort_if", None) + on_success_callback = kwargs.pop("on_success_callback", None) + + # omit certain event types + if event.type in self.scan.omitted_event_types: + if "target" in event.tags: + self.debug(f"Allowing omitted event: {event} because it's a target") + else: + event._omit = True + + # make event internal if it's above our configured report distance + event_in_report_distance = event.scope_distance <= self.scan.scope_report_distance + event_will_be_output = event.always_emit or event_in_report_distance - def distribute_event(self, event): + if not event_will_be_output: + self.debug( + f"Making {event} internal because its scope_distance ({event.scope_distance}) > scope_report_distance ({self.scan.scope_report_distance})" + ) + event.internal = True + + if event.type in self.scan.omitted_event_types: + self.debug(f"Omitting {event} because its type is omitted in the config") + event._omit = True + + # if we discovered something interesting from an internal event, + # make sure we preserve its chain of parents + parent = event.parent + event_is_graph_worthy = (not event.internal) or event._graph_important + parent_is_graph_worthy = (not parent.internal) or parent._graph_important + if event_is_graph_worthy and not parent_is_graph_worthy: + parent_in_report_distance = parent.scope_distance <= self.scan.scope_report_distance + if parent_in_report_distance: + parent.internal = False + if not parent._graph_important: + parent._graph_important = True + self.debug(f"Re-queuing internal event {parent} with parent {event} to prevent graph orphan") + await self.emit_event(parent) + + if event._suppress_chain_dupes: + for parent in event.get_parents(): + if parent == event: + return False, f"an identical parent {event} was found, and _suppress_chain_dupes=True" + + # custom callback - abort event emission it returns true + abort_result = False + if callable(abort_if): + async with self.scan._acatch(context=abort_if): + abort_result = await self.scan.helpers.execute_sync_or_async(abort_if, event) + msg = f"{event.module}: not raising event {event} due to custom criteria in abort_if()" + with suppress(ValueError, TypeError): + abort_result, reason = abort_result + msg += f": {reason}" + if abort_result: + return False, msg + + # run success callback before distributing event (so it can add tags, etc.) + if callable(on_success_callback): + async with self.scan._acatch(context=on_success_callback): + await self.scan.helpers.execute_sync_or_async(on_success_callback, event) + + async def forward_event(self, event, kwargs): """ Queue event with modules """ - # TODO: save memory by removing reference to source object (this causes bugs) - # if not event._internal: - # event._source = None - - dup = False - event_hash = hash(event) - # with self.events_distributed_lock: - if event_hash in self.events_distributed: - self.scan.verbose(f"{event.module}: Duplicate event: {event}") - dup = True - else: - self.events_distributed.add(event_hash) # absorb event into the word cloud if it's in scope - if not dup and -1 < event.scope_distance < 1: + if -1 < event.scope_distance < 1: self.scan.word_cloud.absorb_event(event) - stats_recorded = False - for mod in self.scan.modules.values(): - if not dup or mod.accept_dupes: - event_within_scope_distance = -1 < event.scope_distance <= self.scan.scope_search_distance - event_within_report_distance = -1 < event.scope_distance <= self.scan.scope_report_distance - if mod._type == "output": - if event_within_report_distance or (event._force_output and mod.emit_graph_trail): - mod.queue_event(event) - if not stats_recorded: - stats_recorded = True - self.scan.stats.event_produced(event) - else: - if event_within_scope_distance: - mod.queue_event(event) - - def loop_until_finished(self): - - counter = 0 - event_counter = 0 - timedelta_2secs = timedelta(seconds=2) - last_log_time = datetime.now() - - reported = False - try: - self.scan.dispatcher.on_start(self.scan) - - # watch for newly-generated events - while 1: - - if self.scan.status == "ABORTING": - while 1: - try: - # Empty event queue - self.event_queue.get_nowait() - except queue.Empty: - break - break - - # print status every 2 seconds - now = datetime.now() - time_since_last_log = now - last_log_time - if time_since_last_log > timedelta_2secs: - self.modules_status(_log=True, passes=1) - last_log_time = now - - try: - event = self.event_queue.get_nowait() - event_counter += 1 - except queue.Empty: - finished = self.modules_status().get("finished", False) - if finished and reported: - break - # If the scan finished - if finished: - # If new events were generated in the last iteration - if event_counter > 0: - self.scan.status = "FINISHING" - # Trigger .finished() on every module and start over - for mod in self.scan.modules.values(): - mod.queue_event("FINISHED") - event_counter = 0 - elif not reported: - # Run .report() on every module and start over - for mod in self.scan.modules.values(): - self.catch(mod.report) - reported = True - else: - # Otherwise stop the scan if no new events were generated in this iteration - break - else: - # save on CPU - sleep(0.01) - counter += 1 - continue - - # distribute event to modules - self.distribute_event(event) - - except KeyboardInterrupt: - self.scan.stop() - - except Exception: - import traceback - - log.critical(traceback.format_exc()) - - def modules_status(self, _log=False, passes=None): - - # If scan looks to be finished, check an additional five times to ensure that it really is - # There is a tiny chance of a race condition, which this helps to avoid - if passes is None: - passes = 5 - else: - passes = max(1, int(passes)) - - finished = True - while passes > 0: - - status = {"modules": {}, "scan": self.scan.status_detailed} - - for num_events in status["scan"]["queued_events"].values(): - if num_events > 0: - finished = False - - for num_tasks in status["scan"]["queued_tasks"].values(): - if num_tasks > 0: - finished = False - - for m in self.scan.modules.values(): - mod_status = m.status - if mod_status["running"]: - finished = False - status["modules"][m.name] = mod_status - - for mod in self.scan.modules.values(): - if mod.errored and mod.event_queue not in [None, False]: - with suppress(Exception): - mod.set_error_state() - - passes -= 1 - if finished and passes > 0: - sleep(0.1) - else: - break - - status["finished"] = finished - - modules_running = [m for m, s in status["modules"].items() if s["running"]] - modules_errored = [m for m, s in status["modules"].items() if s["errored"]] - - if _log: - events_queued = [ - (m, (s["events"]["incoming"], s["events"]["outgoing"])) for m, s in status["modules"].items() - ] - events_queued.sort(key=lambda x: sum(x[-1]), reverse=True) - events_queued = [(m, q) for m, q in events_queued if sum(q) > 0][:5] - events_queued_str = "" - if events_queued: - events_queued_str = " (" + ", ".join([f"{m}: I:{i:,} O:{o:,}" for m, (i, o) in events_queued]) + ")" - tasks_queued = [(m, s["tasks"]["total"]) for m, s in status["modules"].items()] - tasks_queued.sort(key=lambda x: x[-1], reverse=True) - tasks_queued = [(m, q) for m, q in tasks_queued if q > 0][:5] - tasks_queued_str = "" - if tasks_queued: - tasks_queued_str = " (" + ", ".join([f"{m}: {q:,}" for m, q in tasks_queued]) + ")" - - num_events_queued = sum([sum(m[-1]) for m in events_queued]) - self.scan.hugeverbose(f"Events queued: {num_events_queued:,}{events_queued_str}") - - num_tasks_queued = sum([m[-1] for m in tasks_queued]) - self.scan.hugeverbose(f"Module tasks queued: {num_tasks_queued:,}{tasks_queued_str}") - - num_scan_tasks = status["scan"]["queued_tasks"]["total"] - dns_tasks = status["scan"]["queued_tasks"]["dns"] - event_tasks = status["scan"]["queued_tasks"]["event"] - main_tasks = status["scan"]["queued_tasks"]["main"] - internal_tasks = status["scan"]["queued_tasks"]["internal"] - manager_events_queued = status["scan"]["queued_events"]["manager"] - self.scan.hugeverbose( - f"Scan tasks queued: {num_scan_tasks:,} (Main: {main_tasks:,}, Events: {event_tasks:,} waiting, {manager_events_queued:,} in queue, DNS: {dns_tasks:,}, Internal: {internal_tasks:,})" - ) - if modules_running: - self.scan.hugeverbose( - f'Modules running: {len(modules_running):,} ({", ".join([m for m in modules_running])})' - ) - if modules_errored: - self.scan.hugeverbose( - f'Modules errored: {len(modules_errored):,} ({", ".join([m for m in modules_errored])})' - ) - - status.update({"modules_running": len(modules_running), "modules_errored": len(modules_errored)}) - - return status + for mod in self.scan.modules.values(): + # don't distribute events to intercept modules + if not mod._intercept: + await mod.queue_event(event) diff --git a/bbot/scanner/preset/__init__.py b/bbot/scanner/preset/__init__.py new file mode 100644 index 0000000000..a6fbc24bb3 --- /dev/null +++ b/bbot/scanner/preset/__init__.py @@ -0,0 +1 @@ +from .preset import Preset diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py new file mode 100644 index 0000000000..a1bf8a693c --- /dev/null +++ b/bbot/scanner/preset/args.py @@ -0,0 +1,427 @@ +import re +import logging +import argparse +from omegaconf import OmegaConf + +from bbot.errors import * +from bbot.core.helpers.misc import chain_lists, get_closest_match, get_keys_in_dot_syntax + +log = logging.getLogger("bbot.presets.args") + + +class BBOTArgs: + # module config options to exclude from validation + exclude_from_validation = re.compile(r".*modules\.[a-z0-9_]+\.(?:batch_size|module_threads)$") + + scan_examples = [ + ( + "Subdomains", + "Perform a full subdomain enumeration on evilcorp.com", + "bbot -t evilcorp.com -p subdomain-enum", + ), + ( + "Subdomains (passive only)", + "Perform a passive-only subdomain enumeration on evilcorp.com", + "bbot -t evilcorp.com -p subdomain-enum -rf passive", + ), + ( + "Subdomains + port scan + web screenshots", + "Port-scan every subdomain, screenshot every webpage, output to current directory", + "bbot -t evilcorp.com -p subdomain-enum -m portscan gowitness -n my_scan -o .", + ), + ( + "Subdomains + basic web scan", + "A basic web scan includes wappalyzer, robots.txt, and other non-intrusive web modules", + "bbot -t evilcorp.com -p subdomain-enum web-basic", + ), + ( + "Web spider", + "Crawl www.evilcorp.com up to a max depth of 2, automatically extracting emails, secrets, etc.", + "bbot -t www.evilcorp.com -p spider -c web.spider_distance=2 web.spider_depth=2", + ), + ( + "Everything everywhere all at once", + "Subdomains, emails, cloud buckets, port scan, basic web, web screenshots, nuclei", + "bbot -t evilcorp.com -p kitchen-sink", + ), + ] + + usage_examples = [ + ( + "List modules", + "", + "bbot -l", + ), + ( + "List output modules", + "", + "bbot -lo", + ), + ( + "List presets", + "", + "bbot -lp", + ), + ( + "List flags", + "", + "bbot -lf", + ), + ] + + epilog = "EXAMPLES\n" + for example in (scan_examples, usage_examples): + for title, description, command in example: + epilog += f"\n {title}:\n {command}\n" + + def __init__(self, preset): + self.preset = preset + self._config = None + + self.parser = self.create_parser() + self._parsed = None + + @property + def parsed(self): + if self._parsed is None: + self._parsed = self.parser.parse_args() + self.sanitize_args() + return self._parsed + + def preset_from_args(self): + # the order here is important + # first we make the preset + args_preset = self.preset.__class__( + *self.parsed.targets, + whitelist=self.parsed.whitelist, + blacklist=self.parsed.blacklist, + name="args_preset", + ) + + # then we load requested preset + # this is important so we can load custom module directories, pull in custom flags, module config options, etc. + for preset_arg in self.parsed.preset: + try: + args_preset.include_preset(preset_arg) + except BBOTArgumentError: + raise + except Exception as e: + raise BBOTArgumentError(f'Error parsing preset "{preset_arg}": {e}') + + # then we set verbosity levels (so if the user enables -d they can see debug output) + if self.parsed.silent: + args_preset.silent = True + if self.parsed.verbose: + args_preset.verbose = True + if self.parsed.debug: + args_preset.debug = True + + # modules + flags + args_preset.exclude_modules.update(set(self.parsed.exclude_modules)) + args_preset.exclude_flags.update(set(self.parsed.exclude_flags)) + args_preset.require_flags.update(set(self.parsed.require_flags)) + args_preset.explicit_scan_modules.update(set(self.parsed.modules)) + args_preset.explicit_output_modules.update(set(self.parsed.output_modules)) + args_preset.flags.update(set(self.parsed.flags)) + + # output + if self.parsed.json: + args_preset.core.merge_custom({"modules": {"stdout": {"format": "json"}}}) + if self.parsed.brief: + args_preset.core.merge_custom( + {"modules": {"stdout": {"event_fields": ["type", "scope_description", "data"]}}} + ) + if self.parsed.event_types: + args_preset.core.merge_custom({"modules": {"stdout": {"event_types": self.parsed.event_types}}}) + if self.parsed.exclude_cdn: + args_preset.explicit_scan_modules.add("portfilter") + + # dependencies + deps_config = args_preset.core.custom_config.get("deps", {}) + if self.parsed.retry_deps: + deps_config["behavior"] = "retry_failed" + elif self.parsed.force_deps: + deps_config["behavior"] = "force_install" + elif self.parsed.no_deps: + deps_config["behavior"] = "disable" + elif self.parsed.ignore_failed_deps: + deps_config["behavior"] = "ignore_failed" + if deps_config: + args_preset.core.merge_custom({"deps": deps_config}) + + # other scan options + if self.parsed.name is not None: + args_preset.scan_name = self.parsed.name + if self.parsed.output_dir is not None: + args_preset.output_dir = self.parsed.output_dir + if self.parsed.force: + args_preset.force_start = self.parsed.force + + if self.parsed.proxy: + args_preset.core.merge_custom({"web": {"http_proxy": self.parsed.proxy}}) + + if self.parsed.custom_headers: + args_preset.core.merge_custom({"web": {"http_headers": self.parsed.custom_headers}}) + + if self.parsed.custom_yara_rules: + args_preset.core.merge_custom( + {"modules": {"excavate": {"custom_yara_rules": self.parsed.custom_yara_rules}}} + ) + + # Check if both user_agent and user_agent_suffix are set. If so combine them and merge into the config + if self.parsed.user_agent and self.parsed.user_agent_suffix: + modified_user_agent = f"{self.parsed.user_agent} {self.parsed.user_agent_suffix}" + args_preset.core.merge_custom({"web": {"user_agent": modified_user_agent}}) + + # If only user_agent_suffix is set, retrieve the existing user_agent from the merged config and append the suffix + elif self.parsed.user_agent_suffix: + existing_user_agent = args_preset.core.config.get("web", {}).get("user_agent", "") + modified_user_agent = f"{existing_user_agent} {self.parsed.user_agent_suffix}" + args_preset.core.merge_custom({"web": {"user_agent": modified_user_agent}}) + + # If only user_agent is set, merge it directly + elif self.parsed.user_agent: + args_preset.core.merge_custom({"web": {"user_agent": self.parsed.user_agent}}) + + # CLI config options (dot-syntax) + for config_arg in self.parsed.config: + try: + # if that fails, try to parse as key=value syntax + args_preset.core.merge_custom(OmegaConf.from_cli([config_arg])) + except Exception as e: + raise BBOTArgumentError(f'Error parsing command-line config option: "{config_arg}": {e}') + + # strict scope + if self.parsed.strict_scope: + args_preset.core.merge_custom({"scope": {"strict": True}}) + + return args_preset + + def create_parser(self, *args, **kwargs): + kwargs.update( + { + "description": "Bighuge BLS OSINT Tool", + "formatter_class": argparse.RawTextHelpFormatter, + "epilog": self.epilog, + } + ) + p = argparse.ArgumentParser(*args, **kwargs) + + target = p.add_argument_group(title="Target") + target.add_argument( + "-t", "--targets", nargs="+", default=[], help="Targets to seed the scan", metavar="TARGET" + ) + target.add_argument( + "-w", + "--whitelist", + nargs="+", + default=None, + help="What's considered in-scope (by default it's the same as --targets)", + ) + target.add_argument("-b", "--blacklist", nargs="+", default=[], help="Don't touch these things") + target.add_argument( + "--strict-scope", + action="store_true", + help="Don't consider subdomains of target/whitelist to be in-scope", + ) + presets = p.add_argument_group(title="Presets") + presets.add_argument( + "-p", + "--preset", + nargs="*", + help="Enable BBOT preset(s)", + metavar="PRESET", + default=[], + ) + presets.add_argument( + "-c", + "--config", + nargs="*", + help="Custom config options in key=value format: e.g. 'modules.shodan.api_key=1234'", + metavar="CONFIG", + default=[], + ) + presets.add_argument("-lp", "--list-presets", action="store_true", help="List available presets.") + + modules = p.add_argument_group(title="Modules") + modules.add_argument( + "-m", + "--modules", + nargs="+", + default=[], + help=f"Modules to enable. Choices: {','.join(sorted(self.preset.module_loader.scan_module_choices))}", + metavar="MODULE", + ) + modules.add_argument("-l", "--list-modules", action="store_true", help="List available modules.") + modules.add_argument( + "-lmo", "--list-module-options", action="store_true", help="Show all module config options" + ) + modules.add_argument( + "-em", "--exclude-modules", nargs="+", default=[], help="Exclude these modules.", metavar="MODULE" + ) + modules.add_argument( + "-f", + "--flags", + nargs="+", + default=[], + help=f"Enable modules by flag. Choices: {','.join(sorted(self.preset.module_loader.flag_choices))}", + metavar="FLAG", + ) + modules.add_argument("-lf", "--list-flags", action="store_true", help="List available flags.") + modules.add_argument( + "-rf", + "--require-flags", + nargs="+", + default=[], + help="Only enable modules with these flags (e.g. -rf passive)", + metavar="FLAG", + ) + modules.add_argument( + "-ef", + "--exclude-flags", + nargs="+", + default=[], + help="Disable modules with these flags. (e.g. -ef aggressive)", + metavar="FLAG", + ) + modules.add_argument("--allow-deadly", action="store_true", help="Enable the use of highly aggressive modules") + + scan = p.add_argument_group(title="Scan") + scan.add_argument("-n", "--name", help="Name of scan (default: random)", metavar="SCAN_NAME") + scan.add_argument("-v", "--verbose", action="store_true", help="Be more verbose") + scan.add_argument("-d", "--debug", action="store_true", help="Enable debugging") + scan.add_argument("-s", "--silent", action="store_true", help="Be quiet") + scan.add_argument( + "--force", + action="store_true", + help="Run scan even in the case of condition violations or failed module setups", + ) + scan.add_argument("-y", "--yes", action="store_true", help="Skip scan confirmation prompt") + scan.add_argument( + "--fast-mode", + action="store_true", + help="Scan only the provided targets as fast as possible, with no extra discovery", + ) + scan.add_argument("--dry-run", action="store_true", help="Abort before executing scan") + scan.add_argument( + "--current-preset", + action="store_true", + help="Show the current preset in YAML format", + ) + scan.add_argument( + "--current-preset-full", + action="store_true", + help="Show the current preset in its full form, including defaults", + ) + + output = p.add_argument_group(title="Output") + output.add_argument( + "-o", + "--output-dir", + help="Directory to output scan results", + metavar="DIR", + ) + output.add_argument( + "-om", + "--output-modules", + nargs="+", + default=[], + help=f"Output module(s). Choices: {','.join(sorted(self.preset.module_loader.output_module_choices))}", + metavar="MODULE", + ) + output.add_argument("-lo", "--list-output-modules", action="store_true", help="List available output modules") + output.add_argument("--json", "-j", action="store_true", help="Output scan data in JSON format") + output.add_argument("--brief", "-br", action="store_true", help="Output only the data itself") + output.add_argument("--event-types", nargs="+", default=[], help="Choose which event types to display") + output.add_argument( + "--exclude-cdn", + "-ec", + action="store_true", + help="Filter out unwanted open ports on CDNs/WAFs (80,443 only)", + ) + + deps = p.add_argument_group( + title="Module dependencies", description="Control how modules install their dependencies" + ) + g2 = deps.add_mutually_exclusive_group() + g2.add_argument("--no-deps", action="store_true", help="Don't install module dependencies") + g2.add_argument("--force-deps", action="store_true", help="Force install all module dependencies") + g2.add_argument("--retry-deps", action="store_true", help="Try again to install failed module dependencies") + g2.add_argument( + "--ignore-failed-deps", action="store_true", help="Run modules even if they have failed dependencies" + ) + g2.add_argument("--install-all-deps", action="store_true", help="Install dependencies for all modules") + + misc = p.add_argument_group(title="Misc") + misc.add_argument("--version", action="store_true", help="show BBOT version and exit") + misc.add_argument("--proxy", help="Use this proxy for all HTTP requests", metavar="HTTP_PROXY") + misc.add_argument( + "-H", + "--custom-headers", + nargs="+", + default=[], + help="List of custom headers as key value pairs (header=value).", + ) + misc.add_argument("--custom-yara-rules", "-cy", help="Add custom yara rules to excavate") + + misc.add_argument("--user-agent", "-ua", help="Set the user-agent for all HTTP requests") + misc.add_argument("--user-agent-suffix", "-uas", help=argparse.SUPPRESS, metavar="SUFFIX", default=None) + return p + + def sanitize_args(self): + # silent implies -y + if self.parsed.silent: + self.parsed.yes = True + # chain_lists allows either comma-separated or space-separated lists + self.parsed.modules = chain_lists(self.parsed.modules) + self.parsed.exclude_modules = chain_lists(self.parsed.exclude_modules) + self.parsed.output_modules = chain_lists(self.parsed.output_modules) + self.parsed.targets = chain_lists( + self.parsed.targets, try_files=True, msg="Reading targets from file: {filename}" + ) + if self.parsed.whitelist is not None: + self.parsed.whitelist = chain_lists( + self.parsed.whitelist, try_files=True, msg="Reading whitelist from file: {filename}" + ) + self.parsed.blacklist = chain_lists( + self.parsed.blacklist, try_files=True, msg="Reading blacklist from file: {filename}" + ) + self.parsed.flags = chain_lists(self.parsed.flags) + self.parsed.exclude_flags = chain_lists(self.parsed.exclude_flags) + self.parsed.require_flags = chain_lists(self.parsed.require_flags) + self.parsed.event_types = [t.upper() for t in chain_lists(self.parsed.event_types)] + + # Custom Header Parsing / Validation + custom_headers_dict = {} + custom_header_example = "Example: --custom-headers foo=bar foo2=bar2" + + for i in self.parsed.custom_headers: + parts = i.split("=", 1) + if len(parts) != 2: + raise ValidationError(f"Custom headers not formatted correctly (missing '='). {custom_header_example}") + k, v = parts + if not k or not v: + raise ValidationError( + f"Custom headers not formatted correctly (missing header name or value). {custom_header_example}" + ) + custom_headers_dict[k] = v + self.parsed.custom_headers = custom_headers_dict + + # --fast-mode + if self.parsed.fast_mode: + self.parsed.preset += ["fast"] + + def validate(self): + # validate config options + sentinel = object() + all_options = set(get_keys_in_dot_syntax(self.preset.core.default_config)) + for c in self.parsed.config: + c = c.split("=")[0].strip() + v = OmegaConf.select(self.preset.core.default_config, c, default=sentinel) + # if option isn't in the default config + if v is sentinel: + # skip if it's excluded from validation + if self.exclude_from_validation.match(c): + continue + # otherwise, ensure it exists as a module option + raise ValidationError(get_closest_match(c, all_options, msg="config option")) diff --git a/bbot/scanner/preset/conditions.py b/bbot/scanner/preset/conditions.py new file mode 100644 index 0000000000..261a5c76ee --- /dev/null +++ b/bbot/scanner/preset/conditions.py @@ -0,0 +1,54 @@ +import logging + +from bbot.errors import * + +log = logging.getLogger("bbot.preset.conditions") + +JINJA_ENV = None + + +class ConditionEvaluator: + def __init__(self, preset): + self.preset = preset + + @property + def context(self): + return { + "preset": self.preset, + "config": self.preset.config, + "abort": self.abort, + "warn": self.warn, + } + + def abort(self, message): + if not self.preset.force_start: + raise PresetAbortError(message) + + def warn(self, message): + log.warning(message) + + def evaluate(self): + context = self.context + already_evaluated = set() + for preset_name, condition in self.preset.conditions: + condition_str = str(condition) + if condition_str not in already_evaluated: + already_evaluated.add(condition_str) + try: + self.check_condition(condition_str, context) + except PresetAbortError as e: + raise PresetAbortError(f'Preset "{preset_name}" requested abort: {e} (--force to override)') + + @property + def jinja_env(self): + from jinja2.sandbox import SandboxedEnvironment + + global JINJA_ENV + if JINJA_ENV is None: + JINJA_ENV = SandboxedEnvironment() + return JINJA_ENV + + def check_condition(self, condition_str, context): + log.debug(f'Evaluating condition "{repr(condition_str)}"') + template = self.jinja_env.from_string(condition_str) + template.render(context) diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py new file mode 100644 index 0000000000..6dc5d8adae --- /dev/null +++ b/bbot/scanner/preset/environ.py @@ -0,0 +1,141 @@ +import os +import sys +import omegaconf +from pathlib import Path + +from bbot.core.helpers.misc import cpu_architecture, os_platform, os_platform_friendly + + +REQUESTS_PATCHED = False + + +def increase_limit(new_limit): + try: + import resource + + # Get current limit + soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) + + new_limit = min(new_limit, hard_limit) + + # Attempt to set new limit + resource.setrlimit(resource.RLIMIT_NOFILE, (new_limit, hard_limit)) + except Exception as e: + sys.stderr.write(f"Failed to set new ulimit: {e}\n") + + +increase_limit(65535) + + +# Custom custom omegaconf resolver to get environment variables +def env_resolver(env_name, default=None): + return os.getenv(env_name, default) + + +def add_to_path(v, k="PATH", environ=None): + """ + Add an entry to a colon-separated PATH variable. + If it's already contained in the value, shift it to be in first position. + """ + if environ is None: + environ = os.environ + var_list = os.environ.get(k, "").split(":") + deduped_var_list = [] + for _ in var_list: + if _ != v and _ not in deduped_var_list: + deduped_var_list.append(_) + deduped_var_list = [v] + deduped_var_list + new_var_str = ":".join(deduped_var_list).strip(":") + environ[k] = new_var_str + + +# if we're running in a virtual environment, make sure to include its /bin in PATH +if sys.prefix != sys.base_prefix: + bin_dir = str(Path(sys.prefix) / "bin") + add_to_path(bin_dir) + +# add ~/.local/bin to PATH +local_bin_dir = str(Path.home() / ".local" / "bin") +add_to_path(local_bin_dir) + + +# Register the new resolver +# this allows you to substitute environment variables in your config like "${env:PATH}"" +omegaconf.OmegaConf.register_new_resolver("env", env_resolver) + + +class BBOTEnviron: + def __init__(self, preset): + self.preset = preset + + def flatten_config(self, config, base="bbot"): + """ + Flatten a JSON-like config into a list of environment variables: + {"modules": [{"httpx": {"timeout": 5}}]} --> "BBOT_MODULES_HTTPX_TIMEOUT=5" + """ + if type(config) == omegaconf.dictconfig.DictConfig: + for k, v in config.items(): + new_base = f"{base}_{k}" + if type(v) == omegaconf.dictconfig.DictConfig: + yield from self.flatten_config(v, base=new_base) + elif type(v) != omegaconf.listconfig.ListConfig: + yield (new_base.upper(), str(v)) + + def prepare(self): + """ + Sync config to OS environment variables + """ + environ = dict(os.environ) + + # ensure bbot_tools + environ["BBOT_TOOLS"] = str(self.preset.core.tools_dir) + add_to_path(str(self.preset.core.tools_dir), environ=environ) + # ensure bbot_cache + environ["BBOT_CACHE"] = str(self.preset.core.cache_dir) + # ensure bbot_temp + environ["BBOT_TEMP"] = str(self.preset.core.temp_dir) + # ensure bbot_lib + environ["BBOT_LIB"] = str(self.preset.core.lib_dir) + # export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:~/.bbot/lib/ + add_to_path(str(self.preset.core.lib_dir), k="LD_LIBRARY_PATH", environ=environ) + + # platform variables + environ["BBOT_OS_PLATFORM"] = os_platform() + environ["BBOT_OS"] = os_platform_friendly() + environ["BBOT_CPU_ARCH"] = cpu_architecture() + + # copy config to environment + bbot_environ = self.flatten_config(self.preset.config) + environ.update(bbot_environ) + + # handle HTTP proxy + http_proxy = self.preset.config.get("web", {}).get("http_proxy", "") + if http_proxy: + environ["HTTP_PROXY"] = http_proxy + environ["HTTPS_PROXY"] = http_proxy + else: + environ.pop("HTTP_PROXY", None) + environ.pop("HTTPS_PROXY", None) + + # ssl verification + import urllib3 + + urllib3.disable_warnings() + ssl_verify = self.preset.config.get("ssl_verify", False) + + global REQUESTS_PATCHED + if not ssl_verify and not REQUESTS_PATCHED: + REQUESTS_PATCHED = True + import requests + import functools + + requests.adapters.BaseAdapter.send = functools.partialmethod( + requests.adapters.BaseAdapter.send, verify=False + ) + requests.adapters.HTTPAdapter.send = functools.partialmethod( + requests.adapters.HTTPAdapter.send, verify=False + ) + requests.Session.request = functools.partialmethod(requests.Session.request, verify=False) + requests.request = functools.partial(requests.request, verify=False) + + return environ diff --git a/bbot/scanner/preset/path.py b/bbot/scanner/preset/path.py new file mode 100644 index 0000000000..9b84566124 --- /dev/null +++ b/bbot/scanner/preset/path.py @@ -0,0 +1,68 @@ +import logging +from pathlib import Path + +from bbot.errors import * + +log = logging.getLogger("bbot.presets.path") + +DEFAULT_PRESET_PATH = Path(__file__).parent.parent.parent / "presets" + + +class PresetPath: + """ + Keeps track of where to look for preset .yaml files + """ + + def __init__(self): + self.paths = [DEFAULT_PRESET_PATH] + + def find(self, filename): + filename_path = Path(filename).resolve() + extension = filename_path.suffix.lower() + file_candidates = set() + extension_candidates = {".yaml", ".yml"} + if extension: + extension_candidates.add(extension.lower()) + else: + file_candidates.add(filename_path.stem) + for ext in extension_candidates: + file_candidates.add(f"{filename_path.stem}{ext}") + file_candidates = sorted(file_candidates) + file_candidates_str = ",".join([str(s) for s in file_candidates]) + paths_to_search = self.paths + if "/" in str(filename): + if filename_path.parent not in paths_to_search: + paths_to_search.append(filename_path.parent) + log.debug( + f"Searching for preset in {[str(p) for p in paths_to_search]}, file candidates: {file_candidates_str}" + ) + for path in paths_to_search: + for candidate in file_candidates: + for file in path.rglob(candidate): + if file.is_file(): + log.verbose(f'Found preset matching "{filename}" at {file}') + self.add_path(file.parent) + return file.resolve() + raise ValidationError( + f'Could not find preset at "{filename}" - file does not exist. Use -lp to list available presets' + ) + + def __str__(self): + return ":".join([str(s) for s in self.paths]) + + def add_path(self, path): + path = Path(path).resolve() + if path in self.paths: + return + if any(path.is_relative_to(p) for p in self.paths): + return + if not path.is_dir(): + log.debug(f'Path "{path.resolve()}" is not a directory') + return + self.paths.append(path) + + def __iter__(self): + yield from self.paths + + +PRESET_PATH = PresetPath() diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py new file mode 100644 index 0000000000..1cdddfa0bd --- /dev/null +++ b/bbot/scanner/preset/preset.py @@ -0,0 +1,1004 @@ +import os +import yaml +import logging +import omegaconf +import traceback +from copy import copy +from pathlib import Path +from contextlib import suppress + +from .path import PRESET_PATH + +from bbot.errors import * +from bbot.core import CORE +from bbot.core.helpers.misc import make_table, mkdir, get_closest_match + + +log = logging.getLogger("bbot.presets") + + +_preset_cache = {} + + +# cache default presets to prevent having to reload from disk +DEFAULT_PRESETS = None + + +class BasePreset(type): + def __call__(cls, *args, include=None, presets=None, name=None, description=None, _exclude=None, **kwargs): + """ + Handles loading of "included" presets, while preserving the proper load order + + Overriding __call__() allows us to reuse the logic from .merge() without duplicating functionality in __init__(). + """ + include_preset = None + + # "presets" is alias to "include" + if presets and include: + raise ValueError( + 'Cannot use both "presets" and "include" args at the same time (presets is an alias to include). Please pick one or the other :)' + ) + if presets and not include: + include = presets + # include other presets + if include and not isinstance(include, (list, tuple, set)): + include = [include] + + main_preset = type.__call__(cls, *args, name=name, description=description, _exclude=_exclude, **kwargs) + + if include: + include_preset = type.__call__(cls, name=name, description=description, _exclude=_exclude) + for included_preset in include: + include_preset.include_preset(included_preset) + include_preset.merge(main_preset) + return include_preset + + return main_preset + + +class Preset(metaclass=BasePreset): + """ + A preset is the central config for a BBOT scan. It contains everything a scan needs to run -- + targets, modules, flags, config options like API keys, etc. + + You can create a preset manually and pass it into `Scanner(preset=preset)`. + Or, you can pass `Preset`'s kwargs into `Scanner()` and it will create the preset for you implicitly. + + Presets can include other presets (which can in turn include other presets, and so on). + This works by merging each preset in turn using `Preset.merge()`. + The order matters. In case of a conflict, the last preset to be merged wins priority. + + Presets can be loaded from or saved to YAML. BBOT has a number of ready-made presets for common tasks like + subdomain enumeration, web spidering, dirbusting, etc. + + Presets are highly customizable via `conditions`, which use the Jinja2 templating engine. + Using `conditions`, you can define custom logic to inspect the final preset before the scan starts, and change it if need be. + Based on the state of the preset, you can print a warning message, abort the scan, enable/disable modules, etc.. + + Attributes: + target (Target): Target(s) of scan. + whitelist (Target): Scan whitelist (by default this is the same as `target`). + blacklist (Target): Scan blacklist (this takes ultimate precedence). + helpers (ConfigAwareHelper): Helper containing various reusable functions, regexes, etc. + output_dir (pathlib.Path): Output directory for scan. + scan_name (str): Name of scan. Defaults to random value, e.g. "demonic_jimmy". + name (str): Human-friendly name of preset. Used mainly for logging purposes. + description (str): Description of preset. + modules (set): Combined modules to enable for the scan. Includes scan modules, internal modules, and output modules. + scan_modules (set): Modules to enable for the scan. + output_modules (set): Output modules to enable for the scan. (note: if no output modules are specified, this is not populated until .bake()) + internal_modules (set): Internal modules for the scan. (note: not populated until .bake()) + exclude_modules (set): Modules to exclude from the scan. When set, automatically removes excluded modules. + flags (set): Flags to enable for the scan. When set, automatically enables modules. + require_flags (set): Require modules to have these flags. When set, automatically removes offending modules. + exclude_flags (set): Exclude modules that have any of these flags. When set, automatically removes offending modules. + module_dirs (set): Custom directories from which to load modules (alias to `self.module_loader.module_dirs`). When set, automatically preloads contained modules. + config (omegaconf.dictconfig.DictConfig): BBOT config (alias to `core.config`) + core (BBOTCore): Local copy of BBOTCore object. + verbose (bool): Whether log level is currently set to verbose. When set, updates log level for all BBOT log handlers. + debug (bool): Whether log level is currently set to debug. When set, updates log level for all BBOT log handlers. + silent (bool): Whether logging is currently disabled. When set to True, silences all stderr. + + Examples: + >>> preset = Preset( + "evilcorp.com", + "1.2.3.0/24", + flags=["subdomain-enum"], + modules=["nuclei"], + config={"web": {"http_proxy": "http://127.0.0.1"}} + ) + >>> scan = Scanner(preset=preset) + + >>> preset = Preset.from_yaml_file("my_preset.yml") + >>> scan = Scanner(preset=preset) + """ + + def __init__( + self, + *targets, + whitelist=None, + blacklist=None, + modules=None, + output_modules=None, + exclude_modules=None, + flags=None, + require_flags=None, + exclude_flags=None, + config=None, + module_dirs=None, + output_dir=None, + name=None, + description=None, + scan_name=None, + conditions=None, + force_start=False, + verbose=False, + debug=False, + silent=False, + _exclude=None, + _log=True, + ): + """ + Initializes the Preset class. + + Args: + *targets (str): Target(s) to scan. Types supported: hostnames, IPs, CIDRs, emails, open ports. + whitelist (list, optional): Whitelisted target(s) to scan. Defaults to the same as `targets`. + blacklist (list, optional): Blacklisted target(s). Takes ultimate precedence. Defaults to empty. + modules (list[str], optional): List of scan modules to enable for the scan. Defaults to empty list. + output_modules (list[str], optional): List of output modules to use. Defaults to csv, human, and json. + exclude_modules (list[str], optional): List of modules to exclude from the scan. + require_flags (list[str], optional): Only enable modules if they have these flags. + exclude_flags (list[str], optional): Don't enable modules if they have any of these flags. + module_dirs (list[str], optional): additional directories to load modules from. + config (dict, optional): Additional scan configuration settings. + include (list[str], optional): names or filenames of other presets to include. + presets (list[str], optional): an alias for `include`. + output_dir (str or Path, optional): Directory to store scan output. Defaults to BBOT home directory (`~/.bbot`). + scan_name (str, optional): Human-readable name of the scan. If not specified, it will be random, e.g. "demonic_jimmy". + name (str, optional): Human-readable name of the preset. Used mainly for logging. + description (str, optional): Description of the preset. + conditions (list[str], optional): Custom conditions to be executed before scan start. Written in Jinja2. + force_start (bool, optional): If True, ignore conditional aborts and failed module setups. Just run the scan! + verbose (bool, optional): Set the BBOT logger to verbose mode. + debug (bool, optional): Set the BBOT logger to debug mode. + silent (bool, optional): Silence all stderr (effectively disables the BBOT logger). + _exclude (list[Path], optional): Preset filenames to exclude from inclusion. Used internally to prevent infinite recursion in circular or self-referencing presets. + _log (bool, optional): Whether to enable logging for the preset. This will record which modules/flags are enabled, etc. + """ + # internal variables + self._cli = False + self._log = _log + self.scan = None + self._args = None + self._environ = None + self._helpers = None + self._module_loader = None + self._yaml_str = "" + self._baked = False + + self._default_output_modules = None + self._default_internal_modules = None + + # modules / flags + self.modules = set() + self.exclude_modules = set() + self.flags = set() + self.exclude_flags = set() + self.require_flags = set() + + # modules + flags + if modules is None: + modules = [] + if isinstance(modules, str): + modules = [modules] + if output_modules is None: + output_modules = [] + if isinstance(output_modules, str): + output_modules = [output_modules] + if exclude_modules is None: + exclude_modules = [] + if isinstance(exclude_modules, str): + exclude_modules = [exclude_modules] + if flags is None: + flags = [] + if isinstance(flags, str): + flags = [flags] + if exclude_flags is None: + exclude_flags = [] + if isinstance(exclude_flags, str): + exclude_flags = [exclude_flags] + if require_flags is None: + require_flags = [] + if isinstance(require_flags, str): + require_flags = [require_flags] + + # these are used only for preserving the modules as specified in the original preset + # this is to ensure the preset looks the same when reserialized + self.explicit_scan_modules = set() if modules is None else set(modules) + self.explicit_output_modules = set() if output_modules is None else set(output_modules) + + # whether to force-start the scan (ignoring conditional aborts and failed module setups) + self.force_start = force_start + + # scan output directory + self.output_dir = output_dir + # name of scan + self.scan_name = scan_name + + # name of preset, default blank + self.name = name or "" + # preset description, default blank + self.description = description or "" + + # custom conditions, evaluated during .bake() + self.conditions = [] + if conditions is not None: + for condition in conditions: + self.conditions.append((self.name, condition)) + + # keeps track of loaded preset files to prevent infinite circular inclusions + self._preset_files_loaded = set() + if _exclude is not None: + for _filename in _exclude: + self._preset_files_loaded.add(Path(_filename).resolve()) + + # bbot core config + self.core = CORE.copy() + if config is None: + config = omegaconf.OmegaConf.create({}) + # merge custom configs if specified by the user + self.core.merge_custom(config) + + # log verbosity + # actual log verbosity isn't set until .bake() + self.verbose = verbose + self.debug = debug + self.silent = silent + + # custom module directories + self._module_dirs = set() + self.module_dirs = module_dirs + + # target / whitelist / blacklist + # these are temporary receptacles until they all get .baked() together + self._seeds = set(targets if targets else []) + self._whitelist = set(whitelist) if whitelist else whitelist + self._blacklist = set(blacklist if blacklist else []) + + self._target = None + + # we don't fill self.modules yet (that happens in .bake()) + self.explicit_scan_modules.update(set(modules)) + self.explicit_output_modules.update(set(output_modules)) + self.exclude_modules.update(set(exclude_modules)) + self.flags.update(set(flags)) + self.exclude_flags.update(set(exclude_flags)) + self.require_flags.update(set(require_flags)) + + # log.critical(f"{self.name}: verbose: {self.verbose}, debug: {self.debug}, silent: {self.silent}") + + @property + def bbot_home(self): + return Path(self.config.get("home", "~/.bbot")).expanduser().resolve() + + @property + def target(self): + if self._target is None: + raise ValueError("Cannot access target before preset is baked (use ._seeds instead)") + return self._target + + @property + def seeds(self): + if self._seeds is None: + raise ValueError("Cannot access target before preset is baked (use ._seeds instead)") + return self.target.seeds + + @property + def whitelist(self): + if self._target is None: + raise ValueError("Cannot access whitelist before preset is baked (use ._whitelist instead)") + return self.target.whitelist + + @property + def blacklist(self): + if self._target is None: + raise ValueError("Cannot access blacklist before preset is baked (use ._blacklist instead)") + return self.target.blacklist + + @property + def preset_dir(self): + return self.bbot_home / "presets" + + @property + def default_output_modules(self): + if self._default_output_modules is not None: + output_modules = self._default_output_modules + else: + output_modules = ["python", "csv", "txt", "json"] + if self._cli: + output_modules.append("stdout") + return output_modules + + @property + def default_internal_modules(self): + preloaded_internal = self.module_loader.preloaded(type="internal") + if self._default_internal_modules is not None: + internal_modules = self._default_internal_modules + else: + internal_modules = list(preloaded_internal) + return {k: preloaded_internal[k] for k in internal_modules} + + def merge(self, other): + """ + Merge another preset into this one. + + If there are any config conflicts, `other` will win over `self`. + + Args: + other (Preset): The preset to merge into this one. + + Examples: + >>> preset1 = Preset(modules=["portscan"]) + >>> preset1.scan_modules + ['portscan'] + >>> preset2 = Preset(modules=["sslcert"]) + >>> preset2.scan_modules + ['sslcert'] + >>> preset1.merge(preset2) + >>> preset1.scan_modules + ['portscan', 'sslcert'] + """ + self.log_debug(f'Merging preset "{other.name}" into "{self.name}"') + + # config + self.core.merge_custom(other.core.custom_config) + self.module_loader.core = self.core + # module dirs + # modules + flags + # establish requirements / exclusions first + self.exclude_modules.update(other.exclude_modules) + self.require_flags.update(other.require_flags) + self.exclude_flags.update(other.exclude_flags) + # then it's okay to start enabling modules + self.explicit_scan_modules.update(other.explicit_scan_modules) + self.explicit_output_modules.update(other.explicit_output_modules) + self.flags.update(other.flags) + + # target / scope + self._seeds.update(other._seeds) + # leave whitelist as None until we encounter one + if other._whitelist is not None: + if self._whitelist is None: + self._whitelist = set(other._whitelist) + else: + self._whitelist.update(other._whitelist) + self._blacklist.update(other._blacklist) + + # module dirs + self.module_dirs = self.module_dirs.union(other.module_dirs) + + # log verbosity + if other.silent: + self.silent = other.silent + if other.verbose: + self.verbose = other.verbose + if other.debug: + self.debug = other.debug + # scan name + if other.scan_name is not None: + self.scan_name = other.scan_name + if other.output_dir is not None: + self.output_dir = other.output_dir + # conditions + if other.conditions: + self.conditions.extend(other.conditions) + # misc + self.force_start = self.force_start | other.force_start + self._cli = self._cli | other._cli + # transfer args + if other._args is not None: + self._args = other._args + + def bake(self, scan=None): + """ + Return a "baked" copy of this preset, ready for use by a BBOT scan. + + Baking a preset finalizes it by populating `preset.modules` based on flags, + performing final validations, and substituting environment variables in preloaded modules. + It also evaluates custom `conditions` as specified in the preset. + + This function is automatically called in Scanner.__init__(). There is no need to call it manually. + """ + self.log_debug("Getting baked") + # create a copy of self + baked_preset = copy(self) + baked_preset.scan = scan + # copy core + baked_preset.core = self.core.copy() + # copy module loader + baked_preset._module_loader = self.module_loader.copy() + # prepare os environment + os_environ = baked_preset.environ.prepare() + # find and replace preloaded modules with os environ + # this is different from the config variable substitution because it modifies + # the preloaded modules, i.e. their ansible playbooks + baked_preset.module_loader.find_and_replace(**os_environ) + # update os environ + os.environ.clear() + os.environ.update(os_environ) + + # validate flags, config options + baked_preset.validate() + + # validate log level options + baked_preset.apply_log_level(apply_core=scan is not None) + + # assign baked preset to our scan + if scan is not None: + scan.preset = baked_preset + + # now that our requirements / exclusions are validated, we can start enabling modules + # enable scan modules + for module in baked_preset.explicit_scan_modules: + baked_preset.add_module(module, module_type="scan") + + # enable output modules + output_modules_to_enable = set(baked_preset.explicit_output_modules) + default_output_modules = self.default_output_modules + output_module_override = any(m in default_output_modules for m in output_modules_to_enable) + # if none of the default output modules have been explicitly specified, enable them all + if not output_module_override: + output_modules_to_enable.update(self.default_output_modules) + for module in output_modules_to_enable: + baked_preset.add_module(module, module_type="output", raise_error=False) + + # enable internal modules + for internal_module, preloaded in self.default_internal_modules.items(): + is_enabled = baked_preset.config.get(internal_module, True) + is_excluded = internal_module in baked_preset.exclude_modules + if is_enabled and not is_excluded: + baked_preset.add_module(internal_module, module_type="internal", raise_error=False) + + # disable internal modules if requested + for internal_module in baked_preset.internal_modules: + if baked_preset.config.get(internal_module, True) is False: + baked_preset.exclude_modules.add(internal_module) + + # enable modules by flag + for flag in baked_preset.flags: + for module, preloaded in baked_preset.module_loader.preloaded().items(): + module_flags = preloaded.get("flags", []) + module_type = preloaded.get("type", "scan") + if flag in module_flags: + self.log_debug(f'Enabling module "{module}" because it has flag "{flag}"') + baked_preset.add_module(module, module_type, raise_error=False) + + # ensure we have output modules + if not baked_preset.output_modules: + for output_module in self.default_output_modules: + baked_preset.add_module(output_module, module_type="output", raise_error=False) + + # create target object + from bbot.scanner.target import BBOTTarget + + baked_preset._target = BBOTTarget( + *list(self._seeds), + whitelist=self._whitelist, + blacklist=self._blacklist, + strict_scope=self.strict_scope, + scan=scan, + ) + + # evaluate conditions + if baked_preset.conditions: + from .conditions import ConditionEvaluator + + evaluator = ConditionEvaluator(baked_preset) + evaluator.evaluate() + + self._baked = True + return baked_preset + + def parse_args(self): + """ + Parse CLI arguments, and merge them into this preset. + + Used in `cli.py`. + """ + self._cli = True + self.merge(self.args.preset_from_args()) + + @property + def module_dirs(self): + return self.module_loader.module_dirs + + @module_dirs.setter + def module_dirs(self, module_dirs): + if module_dirs: + if isinstance(module_dirs, str): + module_dirs = [module_dirs] + for m in module_dirs: + self.module_loader.add_module_dir(m) + self._module_dirs.add(m) + + @property + def scan_modules(self): + return [m for m in self.modules if self.preloaded_module(m).get("type", "scan") == "scan"] + + @property + def output_modules(self): + return [m for m in self.modules if self.preloaded_module(m).get("type", "scan") == "output"] + + @property + def internal_modules(self): + return [m for m in self.modules if self.preloaded_module(m).get("type", "scan") == "internal"] + + def add_module(self, module_name, module_type="scan", raise_error=True): + self.log_debug(f'Adding module "{module_name}" of type "{module_type}"') + is_valid, reason, preloaded = self._is_valid_module(module_name, module_type, raise_error=raise_error) + if not is_valid: + self.log_debug(f'Unable to add {module_type} module "{module_name}": {reason}') + return + self.modules.add(module_name) + for module_dep in preloaded.get("deps", {}).get("modules", []): + if module_dep != module_name and module_dep not in self.modules: + self.log_verbose(f'Adding module "{module_dep}" because {module_name} depends on it') + self.add_module(module_dep, raise_error=False) + + def preloaded_module(self, module): + return self.module_loader.preloaded()[module] + + @property + def config(self): + return self.core.config + + @property + def web_config(self): + return self.core.config.get("web", {}) + + @property + def scope_config(self): + return self.config.get("scope", {}) + + @property + def strict_scope(self): + return self.scope_config.get("strict", False) + + def apply_log_level(self, apply_core=False): + # silent takes precedence + if self.silent: + self.verbose = False + self.debug = False + if apply_core: + self.core.logger.log_level = "CRITICAL" + for key in ("verbose", "debug"): + with suppress(omegaconf.errors.ConfigKeyError): + del self.core.custom_config[key] + else: + # then debug + if self.debug: + self.verbose = False + if apply_core: + self.core.logger.log_level = "DEBUG" + with suppress(omegaconf.errors.ConfigKeyError): + del self.core.custom_config["verbose"] + else: + # finally verbose + if self.verbose and apply_core: + self.core.logger.log_level = "VERBOSE" + + @property + def helpers(self): + if self._helpers is None: + from bbot.core.helpers.helper import ConfigAwareHelper + + self._helpers = ConfigAwareHelper(preset=self) + return self._helpers + + @property + def module_loader(self): + self.environ + if self._module_loader is None: + from bbot.core.modules import MODULE_LOADER + + self._module_loader = MODULE_LOADER + self._module_loader.ensure_config_files() + + return self._module_loader + + @property + def environ(self): + if self._environ is None: + from .environ import BBOTEnviron + + self._environ = BBOTEnviron(self) + return self._environ + + @property + def args(self): + if self._args is None: + from .args import BBOTArgs + + self._args = BBOTArgs(self) + return self._args + + def in_scope(self, host): + return self.target.in_scope(host) + + def blacklisted(self, host): + return self.target.blacklisted(host) + + def whitelisted(self, host): + return self.target.whitelisted(host) + + @classmethod + def from_dict(cls, preset_dict, name=None, _exclude=None, _log=False): + """ + Create a preset from a Python dictionary object. + + Args: + preset_dict (dict): Preset in dictionary form + name (str, optional): Name of preset + _exclude (list[Path], optional): Preset filenames to exclude from inclusion. Used internally to prevent infinite recursion in circular or self-referencing presets. + _log (bool, optional): Whether to enable logging for the preset. This will record which modules/flags are enabled, etc. + + Returns: + Preset: The loaded preset + + Examples: + >>> preset = Preset.from_dict({"target": ["evilcorp.com"], "modules": ["portscan"]}) + """ + new_preset = cls( + *preset_dict.get("target", []), + whitelist=preset_dict.get("whitelist"), + blacklist=preset_dict.get("blacklist"), + modules=preset_dict.get("modules"), + output_modules=preset_dict.get("output_modules"), + exclude_modules=preset_dict.get("exclude_modules"), + flags=preset_dict.get("flags"), + require_flags=preset_dict.get("require_flags"), + exclude_flags=preset_dict.get("exclude_flags"), + verbose=preset_dict.get("verbose", False), + debug=preset_dict.get("debug", False), + silent=preset_dict.get("silent", False), + config=preset_dict.get("config"), + module_dirs=preset_dict.get("module_dirs", []), + include=list(preset_dict.get("include", [])), + scan_name=preset_dict.get("scan_name"), + output_dir=preset_dict.get("output_dir"), + name=preset_dict.get("name", name), + description=preset_dict.get("description"), + conditions=preset_dict.get("conditions", []), + _exclude=_exclude, + _log=_log, + ) + return new_preset + + def include_preset(self, filename): + """ + Load a preset from a yaml file and merge it into this one. + + If the full path is not specified, BBOT will look in all the usual places for it. + + The file extension is optional. + + Args: + filename (Path): The preset YAML file to merge + + Examples: + >>> preset.include_preset("/home/user/my_preset.yml") + """ + self.log_debug(f'Including preset "{filename}"') + preset_from_yaml = self.from_yaml_file(filename, _exclude=self._preset_files_loaded) + if preset_from_yaml is not False: + self.merge(preset_from_yaml) + self._preset_files_loaded.add(preset_from_yaml.filename) + + @classmethod + def from_yaml_file(cls, filename, _exclude=None, _log=False): + """ + Create a preset from a YAML file. If the full path is not specified, BBOT will look in all the usual places for it. + + The file extension is optional. + + Examples: + >>> preset = Preset.from_yaml_file("/home/user/my_preset.yml") + """ + filename = PRESET_PATH.find(filename) + try: + return _preset_cache[filename] + except KeyError: + if _exclude is None: + _exclude = set() + if _exclude is not None and filename in _exclude: + log.debug(f"Not loading {filename} because it was already loaded {_exclude}") + return False + log.debug(f"Loading {filename} because it's not in excluded list ({_exclude})") + _exclude = set(_exclude) + _exclude.add(filename) + try: + yaml_str = open(filename).read() + except FileNotFoundError: + raise PresetNotFoundError(f'Could not find preset at "{filename}" - file does not exist') + preset = cls.from_dict( + omegaconf.OmegaConf.create(yaml_str), name=filename.stem, _exclude=_exclude, _log=_log + ) + preset._yaml_str = yaml_str + preset.filename = filename + _preset_cache[filename] = preset + return preset + + @classmethod + def from_yaml_string(cls, yaml_preset): + """ + Create a preset from a YAML string. + + The file extension is optional. + + Examples: + >>> yaml_string = ''' + >>> target: + >>> - evilcorp.com + >>> modules: + >>> - portscan''' + >>> preset = Preset.from_yaml_string(yaml_string) + """ + return cls.from_dict(omegaconf.OmegaConf.create(yaml_preset)) + + def to_dict(self, include_target=False, full_config=False, redact_secrets=False): + """ + Convert this preset into a Python dictionary. + + Args: + include_target (bool, optional): If True, include target, whitelist, and blacklist in the dictionary + full_config (bool, optional): If True, include the entire config, not just what's changed from the defaults. + + Returns: + dict: The preset in dictionary form + + Examples: + >>> preset = Preset(flags=["subdomain-enum"], modules=["portscan"]) + >>> preset.to_dict() + {"flags": ["subdomain-enum"], "modules": ["portscan"]} + """ + preset_dict = {} + + if self.description: + preset_dict["description"] = self.description + + # config + if full_config: + config = self.core.config + else: + config = self.core.custom_config + config = omegaconf.OmegaConf.to_object(config) + if redact_secrets: + config = self.core.no_secrets_config(config) + if config: + preset_dict["config"] = config + + # scope + if include_target: + target = sorted(self.target.seeds.inputs) + whitelist = [] + if self.target.whitelist is not None: + whitelist = sorted(self.target.whitelist.inputs) + blacklist = sorted(self.target.blacklist.inputs) + if target: + preset_dict["target"] = target + if whitelist and whitelist != target: + preset_dict["whitelist"] = whitelist + if blacklist: + preset_dict["blacklist"] = blacklist + + # flags + modules + if self.require_flags: + preset_dict["require_flags"] = sorted(self.require_flags) + if self.exclude_flags: + preset_dict["exclude_flags"] = sorted(self.exclude_flags) + if self.exclude_modules: + preset_dict["exclude_modules"] = sorted(self.exclude_modules) + if self.flags: + preset_dict["flags"] = sorted(self.flags) + if self.explicit_scan_modules: + preset_dict["modules"] = sorted(self.explicit_scan_modules) + if self.explicit_output_modules: + preset_dict["output_modules"] = sorted(self.explicit_output_modules) + + # log verbosity + if self.verbose: + preset_dict["verbose"] = True + if self.debug: + preset_dict["debug"] = True + if self.silent: + preset_dict["silent"] = True + + # misc scan options + if self.scan_name: + preset_dict["scan_name"] = self.scan_name + if self.scan_name and self.output_dir is not None: + preset_dict["output_dir"] = self.output_dir + + # conditions + if self.conditions: + preset_dict["conditions"] = [c[-1] for c in self.conditions] + + return preset_dict + + def to_yaml(self, include_target=False, full_config=False, sort_keys=False): + """ + Return the preset in the form of a YAML string. + + Args: + include_target (bool, optional): If True, include target, whitelist, and blacklist in the dictionary + full_config (bool, optional): If True, include the entire config, not just what's changed from the defaults. + sort_keys (bool, optional): If True, sort YAML keys alphabetically + + Returns: + str: The preset in the form of a YAML string + + Examples: + >>> preset = Preset(flags=["subdomain-enum"], modules=["portscan"]) + >>> print(preset.to_yaml()) + flags: + - subdomain-enum + modules: + - portscan + """ + preset_dict = self.to_dict(include_target=include_target, full_config=full_config) + return yaml.dump(preset_dict, sort_keys=sort_keys) + + def _is_valid_module(self, module, module_type, name_only=False, raise_error=True): + if module_type == "scan": + module_choices = self.module_loader.scan_module_choices + elif module_type == "output": + module_choices = self.module_loader.output_module_choices + elif module_type == "internal": + module_choices = self.module_loader.internal_module_choices + else: + raise ValidationError(f'Unknown module type "{module}"') + + if module not in module_choices: + raise ValidationError(get_closest_match(module, module_choices, msg=f"{module_type} module")) + + try: + preloaded = self.module_loader.preloaded()[module] + except KeyError: + raise ValidationError(f'Unknown module "{module}"') + + if name_only: + return True, "", preloaded + + if module in self.exclude_modules: + reason = "the module has been excluded" + return False, reason, {} + + module_flags = preloaded.get("flags", []) + _module_type = preloaded.get("type", "scan") + if module_type: + if _module_type != module_type: + reason = f'its type ({_module_type}) is not "{module_type}"' + if raise_error: + raise ValidationError(f'Unable to add {module_type} module "{module}" because {reason}') + return False, reason, preloaded + + if _module_type == "scan": + if self.exclude_flags: + for f in module_flags: + if f in self.exclude_flags: + return False, f'it has excluded flag, "{f}"', preloaded + if self.require_flags and not all(f in module_flags for f in self.require_flags): + return False, f"it doesn't have the required flags ({','.join(self.require_flags)})", preloaded + + return True, "", preloaded + + def validate(self): + """ + Validate module/flag exclusions/requirements, and CLI config options if applicable. + """ + if self._cli: + self.args.validate() + + # validate excluded modules + for excluded_module in self.exclude_modules: + if excluded_module not in self.module_loader.all_module_choices: + raise ValidationError( + get_closest_match(excluded_module, self.module_loader.all_module_choices, msg="module") + ) + # validate excluded flags + for excluded_flag in self.exclude_flags: + if excluded_flag not in self.module_loader.flag_choices: + raise ValidationError(get_closest_match(excluded_flag, self.module_loader.flag_choices, msg="flag")) + # validate required flags + for required_flag in self.require_flags: + if required_flag not in self.module_loader.flag_choices: + raise ValidationError(get_closest_match(required_flag, self.module_loader.flag_choices, msg="flag")) + # validate flags + for flag in self.flags: + if flag not in self.module_loader.flag_choices: + raise ValidationError(get_closest_match(flag, self.module_loader.flag_choices, msg="flag")) + + @property + def all_presets(self): + """ + Recursively find all the presets and return them as a dictionary + """ + preset_dir = self.preset_dir + home_dir = Path.home() + + # first, add local preset dir to PRESET_PATH + PRESET_PATH.add_path(self.preset_dir) + + # ensure local preset directory exists + mkdir(preset_dir) + + global DEFAULT_PRESETS + if DEFAULT_PRESETS is None: + presets = {} + for ext in ("yml", "yaml"): + for preset_path in PRESET_PATH: + # for every yaml file + for original_filename in preset_path.rglob(f"**/*.{ext}"): + # not including symlinks + if original_filename.is_symlink(): + continue + + # try to load it as a preset + try: + loaded_preset = self.from_yaml_file(original_filename, _log=True) + if loaded_preset is False: + continue + except Exception as e: + log.warning(f'Failed to load preset at "{original_filename}": {e}') + log.trace(traceback.format_exc()) + continue + + # category is the parent folder(s), if any + category = str(original_filename.relative_to(preset_path).parent) + if category == ".": + category = "" + + local_preset = original_filename + # populate symlinks in local preset dir + if not original_filename.is_relative_to(preset_dir): + relative_preset = original_filename.relative_to(preset_path) + local_preset = preset_dir / relative_preset + mkdir(local_preset.parent, check_writable=False) + if not local_preset.exists(): + local_preset.symlink_to(original_filename) + + # collapse home directory into "~" + if local_preset.is_relative_to(home_dir): + local_preset = Path("~") / local_preset.relative_to(home_dir) + + presets[local_preset] = (loaded_preset, category, preset_path, original_filename) + + # sort by name + DEFAULT_PRESETS = dict(sorted(presets.items(), key=lambda x: x[-1][0].name)) + return DEFAULT_PRESETS + + def presets_table(self, include_modules=True): + """ + Return a table of all the presets in the form of a string + """ + table = [] + header = ["Preset", "Category", "Description", "# Modules"] + if include_modules: + header.append("Modules") + for loaded_preset, category, preset_path, original_file in self.all_presets.values(): + loaded_preset = loaded_preset.bake() + num_modules = f"{len(loaded_preset.scan_modules):,}" + row = [loaded_preset.name, category, loaded_preset.description, num_modules] + if include_modules: + row.append(", ".join(sorted(loaded_preset.scan_modules))) + table.append(row) + return make_table(table, header) + + def log_verbose(self, msg): + if self._log: + log.verbose(f"Preset {self.name}: {msg}") + + def log_debug(self, msg): + if self._log: + log.debug(f"Preset {self.name}: {msg}") diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index d7aefe7996..01d9654c87 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -1,30 +1,87 @@ +import sys +import asyncio import logging -import threading +import traceback +import contextlib +import regex as re from pathlib import Path -import concurrent.futures -from omegaconf import OmegaConf -from contextlib import suppress +from sys import exc_info +from datetime import datetime from collections import OrderedDict -from .stats import ScanStats -from .target import ScanTarget -from .manager import ScanManager -from .dispatcher import Dispatcher -from bbot.modules import module_loader +from bbot import __version__ from bbot.core.event import make_event -from bbot.core.logger import init_logging +from .manager import ScanIngress, ScanEgress from bbot.core.helpers.misc import sha1, rand_string -from bbot.core.helpers.helper import ConfigAwareHelper from bbot.core.helpers.names_generator import random_name -from bbot.core.helpers.threadpool import ThreadPoolWrapper -from bbot.core.errors import BBOTError, ScanError, ScanCancelledError, ValidationError +from bbot.core.multiprocess import SHARED_INTERPRETER_STATE +from bbot.core.helpers.async_helpers import async_to_sync_gen +from bbot.errors import BBOTError, ScanError, ValidationError log = logging.getLogger("bbot.scanner") -init_logging() - class Scanner: + """A class representing a single BBOT scan + + Examples: + Create scan with multiple targets: + >>> my_scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["portscan", "sslcert", "httpx"]) + + Create scan with custom config: + >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"portscan": {"top_ports": 2000}}} + >>> my_scan = Scanner("www.evilcorp.com", modules=["portscan", "httpx"], config=config) + + Start the scan, iterating over events as they're discovered (synchronous): + >>> for event in my_scan.start(): + >>> print(event) + + Start the scan, iterating over events as they're discovered (asynchronous): + >>> async for event in my_scan.async_start(): + >>> print(event) + + Start the scan without consuming events (synchronous): + >>> my_scan.start_without_generator() + + Start the scan without consuming events (asynchronous): + >>> await my_scan.async_start_without_generator() + + Attributes: + status (str): Status of scan, representing its current state. It can take on the following string values, each of which is mapped to an integer code in `_status_codes`: + ```markdown + - "NOT_STARTED" (0): Initial status before the scan starts. + - "STARTING" (1): Status when the scan is initializing. + - "RUNNING" (2): Status when the scan is in progress. + - "FINISHING" (3): Status when the scan is in the process of finalizing. + - "CLEANING_UP" (4): Status when the scan is cleaning up resources. + - "ABORTING" (5): Status when the scan is in the process of being aborted. + - "ABORTED" (6): Status when the scan has been aborted. + - "FAILED" (7): Status when the scan has encountered a failure. + - "FINISHED" (8): Status when the scan has successfully completed. + ``` + _status_code (int): The numerical representation of the current scan status, stored for internal use. It is mapped according to the values in `_status_codes`. + target (Target): Target of scan (alias to `self.preset.target`). + preset (Preset): The main scan Preset in its baked form. + config (omegaconf.dictconfig.DictConfig): BBOT config (alias to `self.preset.config`). + whitelist (Target): Scan whitelist (by default this is the same as `target`) (alias to `self.preset.whitelist`). + blacklist (Target): Scan blacklist (this takes ultimate precedence) (alias to `self.preset.blacklist`). + helpers (ConfigAwareHelper): Helper containing various reusable functions, regexes, etc. (alias to `self.preset.helpers`). + output_dir (pathlib.Path): Output directory for scan (alias to `self.preset.output_dir`). + name (str): Name of scan (alias to `self.preset.scan_name`). + dispatcher (Dispatcher): Triggers certain events when the scan `status` changes. + modules (dict): Holds all loaded modules in this format: `{"module_name": Module()}`. + stats (ScanStats): Holds high-level scan statistics such as how many events have been produced and consumed by each module. + home (pathlib.Path): Base output directory of the scan (default: `~/.bbot/scans/`). + running (bool): Whether the scan is currently running. + stopping (bool): Whether the scan is currently stopping. + stopped (bool): Whether the scan is currently stopped. + aborting (bool): Whether the scan is aborted or currently aborting. + + Notes: + - The status is read-only once set to "ABORTING" until it transitions to "ABORTED." + - Invalid statuses are logged but not applied. + - Setting a status will trigger the `on_status` event in the dispatcher. + """ _status_codes = { "NOT_STARTED": 0, @@ -41,99 +98,179 @@ class Scanner: def __init__( self, *targets, - whitelist=None, - blacklist=None, scan_id=None, - name=None, - modules=None, - output_modules=None, - config=None, dispatcher=None, - strict_scope=False, - force_start=False, + **kwargs, ): - if modules is None: - modules = [] - if output_modules is None: - output_modules = ["human"] - if config is None: - config = OmegaConf.create({}) - self.config = config - if name is None: - self.name = random_name() - else: - self.name = str(name) - self.strict_scope = strict_scope - self.force_start = force_start + """ + Initializes the Scanner class. + + If a premade `preset` is specified, it will be used for the scan. + Otherwise, `Scan` accepts the same arguments as `Preset`, which are passed through and used to create a new preset. + + Args: + *targets (list[str], optional): Scan targets (passed through to `Preset`). + preset (Preset, optional): Preset to use for the scan. + scan_id (str, optional): Unique identifier for the scan. Auto-generates if None. + dispatcher (Dispatcher, optional): Dispatcher object to use. Defaults to new Dispatcher. + **kwargs (list[str], optional): Additional keyword arguments (passed through to `Preset`). + """ + self._root_event = None + self._finish_event = None + self.start_time = None + self.end_time = None + self.duration = None + self.duration_human = None + self.duration_seconds = None + + self._success = False + self._scan_finish_status_message = None if scan_id is not None: self.id = str(scan_id) else: self.id = f"SCAN:{sha1(rand_string(20)).hexdigest()}" - self._status = "NOT_STARTED" - self._status_code = 0 - # Set up thread pools - max_workers = max(1, self.config.get("max_threads", 100)) - # Shared thread pool, for module use - self._thread_pool = ThreadPoolWrapper(concurrent.futures.ThreadPoolExecutor(max_workers=max_workers)) - # Event thread pool, for event construction, initialization - self._event_thread_pool = ThreadPoolWrapper(concurrent.futures.ThreadPoolExecutor(max_workers=max_workers * 2)) - # Internal thread pool, for handle_event(), module setup, cleanup callbacks, etc. - self._internal_thread_pool = ThreadPoolWrapper(concurrent.futures.ThreadPoolExecutor(max_workers=max_workers)) - self.process_pool = ThreadPoolWrapper(concurrent.futures.ProcessPoolExecutor()) - - self.helpers = ConfigAwareHelper(config=self.config, scan=self) - output_dir = self.config.get("output_dir", "") - if output_dir: - self.home = Path(output_dir).resolve() / self.name + custom_preset = kwargs.pop("preset", None) + kwargs["_log"] = True + + from .preset import Preset + + base_preset = Preset(*targets, **kwargs) + + if custom_preset is not None: + if not isinstance(custom_preset, Preset): + raise ValidationError(f'Preset must be of type Preset, not "{type(custom_preset).__name__}"') + base_preset.merge(custom_preset) + + self.preset = base_preset.bake(self) + + # scan name + if self.preset.scan_name is None: + tries = 0 + while 1: + if tries > 5: + scan_name = f"{rand_string(4)}_{rand_string(4)}" + break + scan_name = random_name() + if self.preset.output_dir is not None: + home_path = Path(self.preset.output_dir).resolve() / scan_name + else: + home_path = self.preset.bbot_home / "scans" / scan_name + if not home_path.exists(): + break + tries += 1 else: - self.home = self.helpers.bbot_home / "scans" / self.name + scan_name = str(self.preset.scan_name) + self.name = scan_name.replace("/", "_") - self.target = ScanTarget(self, *targets, strict_scope=strict_scope) + # make sure the preset has a description + if not self.preset.description: + self.preset.description = self.name + + # scan output dir + if self.preset.output_dir is not None: + self.home = Path(self.preset.output_dir).resolve() / self.name + else: + self.home = self.preset.bbot_home / "scans" / self.name + + self._status = "NOT_STARTED" + self._status_code = 0 self.modules = OrderedDict({}) - self._scan_modules = modules - self._internal_modules = list(self._internal_modules()) - self._output_modules = output_modules self._modules_loaded = False - - if not whitelist: - self.whitelist = self.target.copy() - else: - self.whitelist = ScanTarget(self, *whitelist, strict_scope=strict_scope) - if not blacklist: - blacklist = [] - self.blacklist = ScanTarget(self, *blacklist) + self.dummy_modules = {} if dispatcher is None: + from .dispatcher import Dispatcher + self.dispatcher = Dispatcher() else: self.dispatcher = dispatcher self.dispatcher.set_scan(self) - self.manager = ScanManager(self) - self.stats = ScanStats(self) + # scope distance + self.scope_config = self.config.get("scope", {}) + self.scope_search_distance = max(0, int(self.scope_config.get("search_distance", 0))) + self.scope_report_distance = int(self.scope_config.get("report_distance", 1)) + + # web config + self.web_config = self.config.get("web", {}) + self.web_spider_distance = self.web_config.get("spider_distance", 0) + self.web_spider_depth = self.web_config.get("spider_depth", 1) + self.web_spider_links_per_page = self.web_config.get("spider_links_per_page", 20) + max_redirects = self.web_config.get("http_max_redirects", 5) + self.web_max_redirects = max(max_redirects, self.web_spider_distance) + self.http_proxy = self.web_config.get("http_proxy", "") + self.http_timeout = self.web_config.get("http_timeout", 10) + self.httpx_timeout = self.web_config.get("httpx_timeout", 5) + self.http_retries = self.web_config.get("http_retries", 1) + self.httpx_retries = self.web_config.get("httpx_retries", 1) + self.useragent = self.web_config.get("user_agent", "BBOT") + # custom HTTP headers warning + self.custom_http_headers = self.web_config.get("http_headers", {}) + if self.custom_http_headers: + self.warning( + "You have enabled custom HTTP headers. These will be attached to all in-scope requests and all requests made by httpx." + ) - # prevent too many brute force modules from running at one time - # because they can bypass the global thread limit - self.max_brute_forcers = int(self.config.get("max_brute_forcers", 1)) - self._brute_lock = threading.Semaphore(self.max_brute_forcers) + # url file extensions + self.url_extension_blacklist = {e.lower() for e in self.config.get("url_extension_blacklist", [])} + self.url_extension_httpx_only = {e.lower() for e in self.config.get("url_extension_httpx_only", [])} - # scope distance - self.scope_search_distance = max(0, int(self.config.get("scope_search_distance", 1))) - self.dns_search_distance = max( - self.scope_search_distance, int(self.config.get("scope_dns_search_distance", 3)) - ) - self.scope_report_distance = int(self.config.get("scope_report_distance", 1)) + # url querystring behavior + self.url_querystring_remove = self.config.get("url_querystring_remove", True) + + # blob inclusion + self._file_blobs = self.config.get("file_blobs", False) + self._folder_blobs = self.config.get("folder_blobs", False) + + # how often to print scan status + self.status_frequency = self.config.get("status_frequency", 15) + + from .stats import ScanStats + + self.stats = ScanStats(self) self._prepped = False + self._finished_init = False + self._new_activity = False self._cleanedup = False + self._omitted_event_types = None + + self.__loop = None + self._manager_worker_loop_tasks = [] + self.init_events_task = None + self.ticker_task = None + self.dispatcher_tasks = [] + + self._stopping = False + + self._dns_strings = None + self._dns_regexes = None + self._dns_regexes_yara = None + self._dns_yara_rules_uncompiled = None + self._dns_yara_rules = None + + self.__log_handlers = None + self._log_handler_backup = [] + + async def _prep(self): + """ + Creates the scan's output folder, loads its modules, and calls their .setup() methods. + """ + + # update the master PID + SHARED_INTERPRETER_STATE.update_scan_pid() - def prep(self): self.helpers.mkdir(self.home) if not self._prepped: - start_msg = f"Scan with {len(self._scan_modules):,} modules seeded with {len(self.target)} targets" + # save scan preset + with open(self.home / "preset.yml", "w") as f: + f.write(self.preset.to_yaml()) + + # log scan overview + start_msg = f"Scan seeded with {len(self.seeds):,} targets" details = [] if self.whitelist != self.target: details.append(f"{len(self.whitelist):,} in whitelist") @@ -143,193 +280,648 @@ def prep(self): start_msg += f" ({', '.join(details)})" self.hugeinfo(start_msg) - self.load_modules() + # load scan modules (this imports and instantiates them) + # up to this point they were only preloaded + await self.load_modules() - self.info(f"Setting up modules...") - self.setup_modules() + # run each module's .setup() method + succeeded, hard_failed, soft_failed = await self.setup_modules() - self.success(f"Setup succeeded for {len(self.modules):,} modules.") + # intercept modules get sewn together like human centipede + self.intercept_modules = [m for m in self.modules.values() if m._intercept] + self.intercept_modules.sort(key=lambda x: x.priority) + for i, intercept_module in enumerate(self.intercept_modules[1:]): + prev_intercept_module = self.intercept_modules[i] + self.debug( + f"Setting intercept module {intercept_module.name}._incoming_event_queue to previous intercept module {prev_intercept_module.name}.outgoing_event_queue" + ) + interqueue = asyncio.Queue() + intercept_module._incoming_event_queue = interqueue + prev_intercept_module._outgoing_event_queue = interqueue + + # abort if there are no output modules + num_output_modules = len([m for m in self.modules.values() if m._type == "output"]) + if num_output_modules < 1: + raise ScanError("Failed to load output modules. Aborting.") + # abort if any of the module .setup()s hard-failed (i.e. they errored or returned False) + total_failed = len(hard_failed + soft_failed) + if hard_failed: + msg = f"Setup hard-failed for {len(hard_failed):,} modules ({','.join(hard_failed)})" + self._fail_setup(msg) + + total_modules = total_failed + len(self.modules) + success_msg = f"Setup succeeded for {len(self.modules):,}/{total_modules:,} modules." + + self.success(success_msg) self._prepped = True def start(self): + for event in async_to_sync_gen(self.async_start()): + yield event - self.prep() + def start_without_generator(self): + for event in async_to_sync_gen(self.async_start()): + pass - failed = True - - if not self.target: - self.warning(f"No scan targets specified") + async def async_start_without_generator(self): + async for event in self.async_start(): + pass + async def async_start(self): + """ """ + self.start_time = datetime.now() + self.root_event.data["started_at"] = self.start_time.isoformat() try: + await self._prep() + + self._start_log_handlers() + self.trace(f"Ran BBOT {__version__} at {self.start_time}, command: {' '.join(sys.argv)}") + self.trace(f"Target: {self.preset.target.json}") + self.trace(f"Preset: {self.preset.to_dict(redact_secrets=True)}") + + if not self.target: + self.warning("No scan targets specified") + + # start status ticker + self.ticker_task = asyncio.create_task( + self._status_ticker(self.status_frequency), name=f"{self.name}._status_ticker()" + ) + self.status = "STARTING" if not self.modules: - self.error(f"No modules loaded") + self.error("No modules loaded") self.status = "FAILED" return else: self.hugesuccess(f"Starting scan {self.name}") - if self.stopping: - return + await self.dispatcher.on_start(self) + + self.status = "RUNNING" + self._start_modules() + self.verbose(f"{len(self.modules):,} modules started") # distribute seed events - self.manager.init_events() + self.init_events_task = asyncio.create_task( + self.ingress_module.init_events(self.target.seeds.events), + name=f"{self.name}.ingress_module.init_events()", + ) - if self.stopping: - return + # main scan loop + while 1: + # abort if we're aborting + if self.aborting: + self._drain_queues() + break + + # yield events as they come (async for event in scan.async_start()) + if "python" in self.modules: + events, finish = await self.modules["python"]._events_waiting(batch_size=-1) + for e in events: + yield e + if events: + continue + + # break if initialization finished and the scan is no longer active + if self._finished_init and self.modules_finished: + new_activity = await self.finish() + if not new_activity: + self._success = True + scan_finish_event = await self._mark_finished() + yield scan_finish_event + break + + await asyncio.sleep(0.1) + + self._success = True + + except BaseException as e: + if self.helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + self.stop() + self._success = True + else: + try: + raise + except ScanError as e: + self.error(f"{e}") - self.status = "RUNNING" - self.start_modules() - self.verbose(f"{len(self.modules):,} modules started") + except BBOTError as e: + self.critical(f"Error during scan: {e}") - if self.stopping: - return + except Exception: + self.critical(f"Unexpected error during scan:\n{traceback.format_exc()}") - self.manager.loop_until_finished() - failed = False + finally: + tasks = self._cancel_tasks() + self.debug(f"Awaiting {len(tasks):,} tasks") + for task in tasks: + # self.debug(f"Awaiting {task}") + with contextlib.suppress(BaseException): + await asyncio.wait_for(task, timeout=0.1) + self.debug(f"Awaited {len(tasks):,} tasks") + await self._report() + await self._cleanup() + + await self.dispatcher.on_finish(self) + + self._stop_log_handlers() + + if self._scan_finish_status_message: + log_fn = self.hugesuccess + if self.status.startswith("ABORT"): + log_fn = self.hugewarning + elif not self._success: + log_fn = self.critical + log_fn(self._scan_finish_status_message) + + async def _mark_finished(self): + if self.status == "ABORTING": + status = "ABORTED" + elif not self._success: + status = "FAILED" + else: + status = "FINISHED" + + self.end_time = datetime.now() + self.duration = self.end_time - self.start_time + self.duration_seconds = self.duration.total_seconds() + self.duration_human = self.helpers.human_timedelta(self.duration) + + self._scan_finish_status_message = f"Scan {self.name} completed in {self.duration_human} with status {status}" + + scan_finish_event = self.finish_event(self._scan_finish_status_message, status) + + # queue final scan event with output modules + output_modules = [m for m in self.modules.values() if m._type == "output" and m.name != "python"] + for m in output_modules: + await m.queue_event(scan_finish_event) + # wait until output modules are flushed + while 1: + modules_finished = all(m.finished for m in output_modules) + if modules_finished: + break + await asyncio.sleep(0.05) + + self.status = status + return scan_finish_event + + def _start_modules(self): + self.verbose("Starting module worker loops") + for module in self.modules.values(): + module.start() - except KeyboardInterrupt: - self.stop() - failed = False + async def setup_modules(self, remove_failed=True): + """Asynchronously initializes all loaded modules by invoking their `setup()` methods. - except ScanCancelledError: - self.debug("Scan cancelled") + Args: + remove_failed (bool): Flag indicating whether to remove modules that fail setup. - except ScanError as e: - self.error(f"{e}") + Returns: + tuple: + succeeded - List of modules that successfully set up. + hard_failed - List of modules that encountered a hard failure during setup. + soft_failed - List of modules that encountered a soft failure during setup. - except BBOTError as e: - import traceback + Raises: + ScanError: If no output modules could be loaded. - self.critical(f"Error during scan: {e}") - self.debug(traceback.format_exc()) + Notes: + Hard-failed modules are set to an error state and removed if `remove_failed` is True. + Soft-failed modules are not set to an error state but are also removed if `remove_failed` is True. + """ + await self.load_modules() + self.verbose("Setting up modules") + succeeded = [] + hard_failed = [] + soft_failed = [] - except Exception: - import traceback + async for task in self.helpers.as_completed([m._setup() for m in self.modules.values()]): + module, status, msg = await task + if status is True: + self.debug(f"Setup succeeded for {module.name} ({msg})") + succeeded.append(module.name) + elif status is False: + self.warning(f"Setup hard-failed for {module.name}: {msg}") + self.modules[module.name].set_error_state() + hard_failed.append(module.name) + else: + self.info(f"Setup soft-failed for {module.name}: {msg}") + soft_failed.append(module.name) + if (not status) and (module._intercept or remove_failed): + # if a intercept module fails setup, we always remove it + self.modules.pop(module.name) - self.critical(f"Unexpected error during scan:\n{traceback.format_exc()}") + return succeeded, hard_failed, soft_failed - finally: + async def load_modules(self): + """Asynchronously import and instantiate all scan modules, including internal and output modules. - self.cleanup() - self.shutdown_threadpools(wait=True) + This method is automatically invoked by `setup_modules()`. It performs several key tasks in the following sequence: - log_fn = self.hugesuccess - if self.status == "ABORTING": - self.status = "ABORTED" - log_fn = self.hugewarning - elif failed: - self.status = "FAILED" - log_fn = self.critical - else: - self.status = "FINISHED" + 1. Install dependencies for each module via `self.helpers.depsinstaller.install()`. + 2. Load scan modules and updates the `modules` dictionary. + 3. Load internal modules and updates the `modules` dictionary. + 4. Load output modules and updates the `modules` dictionary. + 5. Sorts modules based on their `_priority` attribute. - log_fn(f"Scan {self.name} completed with status {self.status}") + If any modules fail to load or their dependencies fail to install, a ScanError will be raised (unless `self.force_start` is True). - self.dispatcher.on_finish(self) + Attributes: + succeeded, failed (tuple): A tuple containing lists of modules that succeeded or failed during the dependency installation. + loaded_modules, loaded_internal_modules, loaded_output_modules (dict): Dictionaries of successfully loaded modules. + failed, failed_internal, failed_output (list): Lists of module names that failed to load. - def start_modules(self): - self.verbose(f"Starting module threads") - for module_name, module in self.modules.items(): - module.start() + Raises: + ScanError: If any module dependencies fail to install or modules fail to load, and if `self.force_start` is False. - def setup_modules(self, remove_failed=True): - self.load_modules() - self.verbose(f"Setting up modules") - hard_failed = [] - soft_failed = [] - setup_futures = dict() + Returns: + None + + Note: + After all modules are loaded, they are sorted by `_priority` and stored in the `modules` dictionary. + """ + if not self._modules_loaded: + if not self.preset.modules: + self.warning("No modules to load") + return + + if not self.preset.scan_modules: + self.warning("No scan modules to load") + + # install module dependencies + succeeded, failed = await self.helpers.depsinstaller.install(*self.preset.modules) + if failed: + msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" + self._fail_setup(msg) + modules = sorted([m for m in self.preset.scan_modules if m in succeeded]) + output_modules = sorted([m for m in self.preset.output_modules if m in succeeded]) + internal_modules = sorted([m for m in self.preset.internal_modules if m in succeeded]) + + # Load scan modules + self.verbose(f"Loading {len(modules):,} scan modules: {','.join(modules)}") + loaded_modules, failed = self._load_modules(modules) + self.modules.update(loaded_modules) + if len(failed) > 0: + msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" + self._fail_setup(msg) + if loaded_modules: + self.info( + f"Loaded {len(loaded_modules):,}/{len(self.preset.scan_modules):,} scan modules ({','.join(loaded_modules)})" + ) + + # Load internal modules + self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(internal_modules)}") + loaded_internal_modules, failed_internal = self._load_modules(internal_modules) + self.modules.update(loaded_internal_modules) + if len(failed_internal) > 0: + msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" + self._fail_setup(msg) + if loaded_internal_modules: + self.info( + f"Loaded {len(loaded_internal_modules):,}/{len(self.preset.internal_modules):,} internal modules ({','.join(loaded_internal_modules)})" + ) + + # Load output modules + self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(output_modules)}") + loaded_output_modules, failed_output = self._load_modules(output_modules) + self.modules.update(loaded_output_modules) + if len(failed_output) > 0: + msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" + self._fail_setup(msg) + if loaded_output_modules: + self.info( + f"Loaded {len(loaded_output_modules):,}/{len(self.preset.output_modules):,} output modules, ({','.join(loaded_output_modules)})" + ) + + # builtin intercept modules + self.ingress_module = ScanIngress(self) + self.egress_module = ScanEgress(self) + self.modules[self.ingress_module.name] = self.ingress_module + self.modules[self.egress_module.name] = self.egress_module + + # sort modules by priority + self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "priority", 3))) + + self._modules_loaded = True + + @property + def modules_finished(self): + finished_modules = [m.finished for m in self.modules.values()] + return all(finished_modules) + + def kill_module(self, module_name, message=None): + from signal import SIGINT + + module = self.modules[module_name] + if module._intercept: + self.warning(f'Cannot kill module "{module_name}" because it is critical to the scan') + return + module.set_error_state(message=message, clear_outgoing_queue=True) + for proc in module._proc_tracker: + with contextlib.suppress(Exception): + proc.send_signal(SIGINT) + self.helpers.cancel_tasks_sync(module._tasks) + + @property + def incoming_event_queues(self): + return self.ingress_module.incoming_queues + + @property + def num_queued_events(self): + total = 0 + for q in self.incoming_event_queues: + total += len(q._queue) + return total + + def modules_status(self, _log=False): + finished = True + status = {"modules": {}} + sorted_modules = [] for module_name, module in self.modules.items(): - future = self._internal_thread_pool.submit_task(module._setup) - setup_futures[future] = module_name - for future in self.helpers.as_completed(setup_futures): - module_name = setup_futures[future] - status, msg = future.result() - if status == True: - self.debug(f"Setup succeeded for {module_name} ({msg})") - elif status == False: - self.error(f"Setup hard-failed for {module_name}: {msg}") - self.modules[module_name].set_error_state() - hard_failed.append(module_name) + if module_name.startswith("_"): + continue + sorted_modules.append(module) + mod_status = module.status + if mod_status["running"]: + finished = False + status["modules"][module_name] = mod_status + + # sort modules by name + sorted_modules.sort(key=lambda m: m.name) + + status["finished"] = finished + + modules_errored = [m for m, s in status["modules"].items() if s["errored"]] + + max_mem_percent = 90 + mem_status = self.helpers.memory_status() + # abort if we don't have the memory + mem_percent = mem_status.percent + if mem_percent > max_mem_percent: + free_memory = mem_status.available + free_memory_human = self.helpers.bytes_to_human(free_memory) + self.warning(f"System memory is at {mem_percent:.1f}% ({free_memory_human} remaining)") + + if _log: + modules_status = [] + for m, s in status["modules"].items(): + running = s["running"] + incoming = s["events"]["incoming"] + outgoing = s["events"]["outgoing"] + tasks = s["tasks"] + total = sum([incoming, outgoing, tasks]) + if running or total > 0: + modules_status.append((m, running, incoming, outgoing, tasks, total)) + modules_status.sort(key=lambda x: x[-1], reverse=True) + + if modules_status: + modules_status_str = ", ".join([f"{m}({i:,}:{t:,}:{o:,})" for m, r, i, o, t, _ in modules_status]) + self.info(f"{self.name}: Modules running (incoming:processing:outgoing) {modules_status_str}") else: - self.warning(f"Setup soft-failed for {module_name}: {msg}") - soft_failed.append(module_name) - if not status and remove_failed: - self.modules.pop(module_name) - - num_output_modules = len([m for m in self.modules.values() if m._type == "output"]) - if num_output_modules < 1: - raise ScanError("Failed to load output modules. Aborting.") - total_failed = len(hard_failed + soft_failed) - if hard_failed: - msg = f"Setup hard-failed for {len(hard_failed):,} modules ({','.join(hard_failed)})" - self.fail_setup(msg) - elif total_failed > 0: - self.warning(f"Setup failed for {total_failed:,} modules") - - def stop(self, wait=False): - if self.status != "ABORTING": + self.info(f"{self.name}: No modules running") + event_type_summary = sorted(self.stats.events_emitted_by_type.items(), key=lambda x: x[-1], reverse=True) + if event_type_summary: + self.info( + f"{self.name}: Events produced so far: {', '.join([f'{k}: {v}' for k, v in event_type_summary])}" + ) + else: + self.info(f"{self.name}: No events produced yet") + + if modules_errored: + self.verbose( + f"{self.name}: Modules errored: {len(modules_errored):,} ({', '.join(list(modules_errored))})" + ) + + num_queued_events = self.num_queued_events + if num_queued_events: + self.info( + f"{self.name}: {num_queued_events:,} events in queue ({self.stats.speedometer.speed:,} processed in the past {self.status_frequency} seconds)" + ) + else: + self.info( + f"{self.name}: No events in queue ({self.stats.speedometer.speed:,} processed in the past {self.status_frequency} seconds)" + ) + + if self.log_level <= logging.DEBUG: + # status debugging + scan_active_status = [] + scan_active_status.append(f"scan._finished_init: {self._finished_init}") + scan_active_status.append(f"scan.modules_finished: {self.modules_finished}") + for m in sorted_modules: + running = m.running + scan_active_status.append(f" {m}:") + # scan_active_status.append(f" running: {running}") + if running: + # scan_active_status.append(f" tasks:") + for task in list(m._task_counter.tasks.values()): + scan_active_status.append(f" - {task}:") + # scan_active_status.append(f" incoming_queue_size: {m.num_incoming_events}") + # scan_active_status.append(f" outgoing_queue_size: {m.outgoing_event_queue.qsize()}") + for line in scan_active_status: + self.debug(line) + + # log module memory usage + module_memory_usage = [] + for module in sorted_modules: + memory_usage = module.memory_usage + module_memory_usage.append((module.name, memory_usage)) + module_memory_usage.sort(key=lambda x: x[-1], reverse=True) + self.debug("MODULE MEMORY USAGE:") + for module_name, usage in module_memory_usage: + self.debug(f" - {module_name}: {self.helpers.bytes_to_human(usage)}") + + status.update({"modules_errored": len(modules_errored)}) + + return status + + def stop(self): + """Stops the in-progress scan and performs necessary cleanup. + + This method sets the scan's status to "ABORTING," cancels any pending tasks, and drains event queues. It also kills child processes spawned during the scan. + + Returns: + None + """ + if not self._stopping: + self._stopping = True self.status = "ABORTING" - self.hugewarning(f"Aborting scan") - for i in range(max(10, self.max_brute_forcers * 10)): - self._brute_lock.release() + self.hugewarning("Aborting scan") + self.trace() + self._cancel_tasks() + self._drain_queues() self.helpers.kill_children() - self.shutdown_threadpools(wait=False) + self._drain_queues() self.helpers.kill_children() + self.debug("Finished aborting scan") + + async def finish(self): + """Finalizes the scan by invoking the `finished()` method on all active modules if new activity is detected. + + The method is idempotent and will return False if no new activity has been recorded since the last invocation. + + Returns: + bool: True if new activity has been detected and the `finished()` method is invoked on all modules. + False if no new activity has been detected since the last invocation. - def shutdown_threadpools(self, wait=True): - pools = [ - self.process_pool, - self._internal_thread_pool, - self.helpers.dns._thread_pool, - self._event_thread_pool, - self._thread_pool, - ] - self.debug(f"Shutting down thread pools with wait={wait}") - threads = [] - for pool in pools: - t = threading.Thread(target=pool.shutdown, kwargs={"wait": False, "cancel_futures": True}, daemon=True) - t.start() - threads.append(t) - if wait: - for t in threads: - t.join() - if wait: - for pool in pools: - pool.shutdown(wait=True) - self.debug("Finished shutting down thread pools") - - def cleanup(self): - # clean up modules - self.status = "CLEANING_UP" + Notes: + This method alters the scan's status to "FINISHING" if new activity is detected. + """ + # if new events were generated since last time we were here + if self._new_activity: + self._new_activity = False + self.status = "FINISHING" + # Trigger .finished() on every module and start over + log.info("Finishing scan") + for module in self.modules.values(): + finished_event = self.make_event("FINISHED", "FINISHED", dummy=True, tags={module.name}) + await module.queue_event(finished_event) + self.verbose("Completed finish()") + return True + self.verbose("Completed final finish()") + # Return False if no new events were generated since last time + return False + + def _drain_queues(self): + """Empties all the event queues for each loaded module and the manager's incoming event queue. + + This method iteratively empties both the incoming and outgoing event queues of each module, as well as the incoming event queue of the scan manager. + + Returns: + None + """ + self.debug("Draining queues") + for module in self.modules.values(): + with contextlib.suppress(asyncio.queues.QueueEmpty): + while 1: + if module.incoming_event_queue not in (None, False): + module.incoming_event_queue.get_nowait() + with contextlib.suppress(asyncio.queues.QueueEmpty): + while 1: + if module.outgoing_event_queue not in (None, False): + module.outgoing_event_queue.get_nowait() + self.debug("Finished draining queues") + + def _cancel_tasks(self): + """Cancels all asynchronous tasks and shuts down the process pool. + + This method collects all pending tasks from each module, the dispatcher, + and the scan manager. After collecting these tasks, it cancels them synchronously + using a helper function. Finally, it shuts down the process pool, canceling any + pending futures. + + Returns: + None + """ + self.debug("Cancelling all scan tasks") + tasks = [] + # module workers + for m in self.modules.values(): + tasks += getattr(m, "_tasks", []) + # init events + if self.init_events_task: + tasks.append(self.init_events_task) + # ticker + if self.ticker_task: + tasks.append(self.ticker_task) + # dispatcher + tasks += self.dispatcher_tasks + # manager worker loops + tasks += self._manager_worker_loop_tasks + self.helpers.cancel_tasks_sync(tasks) + # process pool + self.helpers.process_pool.shutdown(cancel_futures=True) + self.debug("Finished cancelling all scan tasks") + return tasks + + async def _report(self): + """Asynchronously executes the `report()` method for each module in the scan. + + This method is called once at the end of each scan and is responsible for + triggering the `report()` function for each module. It executes irrespective + of whether the scan was aborted or completed successfully. The method makes + use of an asynchronous context manager (`_acatch`) to handle exceptions and + a task counter to keep track of the task's context. + + Returns: + None + """ for mod in self.modules.values(): - mod._cleanup() + context = f"{mod.name}.report()" + async with self._acatch(context), mod._task_counter.count(context): + await mod.report() + + async def _cleanup(self): + """Asynchronously executes the `cleanup()` method for each module in the scan. + + This method is called once at the end of the scan to perform resource cleanup + tasks. It is executed regardless of whether the scan was aborted or completed + successfully. The scan status is set to "CLEANING_UP" during the execution. + After calling the `cleanup()` method for each module, it performs additional + cleanup tasks such as removing the scan's home directory if empty and cleaning + old scans. + + Returns: + None + """ + # clean up self if not self._cleanedup: self._cleanedup = True - with suppress(Exception): + self.status = "CLEANING_UP" + # clean up dns engine + if self.helpers._dns is not None: + await self.helpers.dns.shutdown() + # clean up web engine + if self.helpers._web is not None: + await self.helpers.web.shutdown() + # clean up modules + for mod in self.modules.values(): + await mod._cleanup() + with contextlib.suppress(Exception): self.home.rmdir() self.helpers.clean_old_scans() - def in_scope(self, e): - """ - Checks whitelist and blacklist, also taking scope_distance into account - """ - try: - e = make_event(e, dummy=True) - except ValidationError: - return False - in_scope = e.scope_distance == 0 or self.whitelisted(e) - return in_scope and not self.blacklisted(e) + def in_scope(self, *args, **kwargs): + return self.preset.in_scope(*args, **kwargs) - def blacklisted(self, e): - e = make_event(e, dummy=True) - return e in self.blacklist + def whitelisted(self, *args, **kwargs): + return self.preset.whitelisted(*args, **kwargs) - def whitelisted(self, e): - e = make_event(e, dummy=True) - return e in self.whitelist + def blacklisted(self, *args, **kwargs): + return self.preset.blacklisted(*args, **kwargs) + + @property + def core(self): + return self.preset.core + + @property + def config(self): + return self.preset.core.config + + @property + def target(self): + return self.preset.target + + @property + def seeds(self): + return self.preset.seeds + + @property + def whitelist(self): + return self.preset.whitelist + + @property + def blacklist(self): + return self.preset.blacklist + + @property + def helpers(self): + return self.preset.helpers + + @property + def force_start(self): + return self.preset.force_start @property def word_cloud(self): @@ -339,14 +931,28 @@ def word_cloud(self): def stopping(self): return not self.running + @property + def stopped(self): + return self._status_code > 5 + @property def running(self): return 0 < self._status_code < 4 + @property + def aborting(self): + return 5 <= self._status_code <= 6 + @property def status(self): return self._status + @property + def omitted_event_types(self): + if self._omitted_event_types is None: + self._omitted_event_types = self.config.get("omit_event_types", []) + return self._omitted_event_types + @status.setter def status(self, status): """ @@ -357,204 +963,388 @@ def status(self, status): if self.status == "ABORTING" and not status == "ABORTED": self.debug(f'Attempt to set invalid status "{status}" on aborted scan') else: - self._status = status - self._status_code = self._status_codes[status] - self.dispatcher.on_status(self._status, self.id) + if status != self._status: + self._status = status + self._status_code = self._status_codes[status] + self.dispatcher_tasks.append( + asyncio.create_task( + self.dispatcher.catch(self.dispatcher.on_status, self._status, self.id), + name=f"{self.name}.dispatcher.on_status({status})", + ) + ) + else: + self.debug(f'Scan status is already "{status}"') else: self.debug(f'Attempt to set invalid status "{status}" on scan') - @property - def status_detailed(self): - main_tasks = self._thread_pool.num_tasks - dns_tasks = self.helpers.dns._thread_pool.num_tasks - event_threadpool_tasks = self._event_thread_pool.num_tasks - event_tasks = self.manager.event_queue.qsize() - internal_tasks = self._internal_thread_pool.num_tasks - process_tasks = self.process_pool.num_tasks - total_tasks = main_tasks + dns_tasks + event_tasks + internal_tasks - status = { - "queued_tasks": { - "main": main_tasks, - "dns": dns_tasks, - "internal": internal_tasks, - "process": process_tasks, - "event": event_threadpool_tasks, - "total": total_tasks, - }, - "queued_events": { - "manager": event_tasks, - }, - } - return status - def make_event(self, *args, **kwargs): kwargs["scan"] = self event = make_event(*args, **kwargs) return event - @property - def log(self): - if self._log is None: - self._log = logging.getLogger(f"bbot.agent.scanner") - return self._log - @property def root_event(self): - root_event = self.make_event(data=f"{self.name} ({self.id})", event_type="SCAN", dummy=True) + """ + The root scan event, e.g.: + ```json + { + "type": "SCAN", + "id": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "data": "pixilated_kathryn (SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54)", + "scope_distance": 0, + "scan": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "timestamp": 1694548779.616255, + "parent": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "tags": [ + "distance-0" + ], + "module": "TARGET", + "module_sequence": "TARGET" + } + ``` + """ + if self._root_event is None: + self._root_event = self.make_root_event(f"Scan {self.name} started at {self.start_time}") + self._root_event.data["status"] = self.status + return self._root_event + + def finish_event(self, context=None, status=None): + if self._finish_event is None: + if context is None or status is None: + raise ValueError("Must specify context and status") + self._finish_event = self.make_root_event(context) + self._finish_event.data["status"] = status + return self._finish_event + + def make_root_event(self, context): + root_event = self.make_event(data=self.json, event_type="SCAN", dummy=True, context=context) root_event._id = self.id root_event.scope_distance = 0 - root_event._resolved.set() - root_event.source = root_event - root_event.module = self.helpers._make_dummy_module(name="TARGET", _type="TARGET") + root_event.parent = root_event + root_event.module = self._make_dummy_module(name="TARGET", _type="TARGET") return root_event @property - def useragent(self): - return self.config.get("user_agent", "BBOT") + def dns_strings(self): + """ + A list of DNS hostname strings generated from the scan target + """ + if self._dns_strings is None: + dns_whitelist = {t.host for t in self.whitelist if t.host and isinstance(t.host, str)} + dns_whitelist = sorted(dns_whitelist, key=len) + dns_whitelist_set = set() + dns_strings = [] + for t in dns_whitelist: + if not any(x in dns_whitelist_set for x in self.helpers.domain_parents(t, include_self=True)): + dns_whitelist_set.add(t) + dns_strings.append(t) + self._dns_strings = dns_strings + return self._dns_strings + + def _generate_dns_regexes(self, pattern): + """ + Generates a list of compiled DNS hostname regexes based on the provided pattern. + This method centralizes the regex compilation to avoid redundancy in the dns_regexes and dns_regexes_yara methods. + + Args: + pattern (str): + Returns: + list[re.Pattern]: A list of compiled regex patterns if enabled, otherwise an empty list. + """ + + dns_regexes = [] + for t in self.dns_strings: + regex_pattern = re.compile(f"{pattern}{re.escape(t)})", re.I) + log.debug(f"Generated Regex [{regex_pattern.pattern}] for domain {t}") + dns_regexes.append(regex_pattern) + return dns_regexes + + @property + def dns_regexes(self): + """ + A list of DNS hostname regexes generated from the scan target + For the purpose of extracting hostnames + + Examples: + Extract hostnames from text: + >>> for regex in scan.dns_regexes: + ... for match in regex.finditer(response.text): + ... hostname = match.group().lower() + """ + if self._dns_regexes is None: + self._dns_regexes = self._generate_dns_regexes(r"((?:(?:[\w-]+)\.)+") + return self._dns_regexes + + @property + def dns_regexes_yara(self): + """ + Returns a list of DNS hostname regexes formatted specifically for compatibility with YARA rules. + """ + if self._dns_regexes_yara is None: + self._dns_regexes_yara = self._generate_dns_regexes(r"(([a-z0-9-]+\.)*") + return self._dns_regexes_yara + + @property + def dns_yara_rules_uncompiled(self): + if self._dns_yara_rules_uncompiled is None: + regexes_component_list = [] + for i, r in enumerate(self.dns_regexes_yara): + regexes_component_list.append(rf"$dns_name_{i} = /\b{r.pattern}/ nocase") + + # Chunk the regexes into groups of 10,000 + chunk_size = 10000 + rules = {} + for chunk_index in range(0, len(regexes_component_list), chunk_size): + chunk = regexes_component_list[chunk_index : chunk_index + chunk_size] + if chunk: + regexes_component = " ".join(chunk) + rule_name = f"hostname_extraction_{chunk_index // chunk_size}" + rule = f'rule {rule_name} {{meta: description = "matches DNS hostname pattern derived from target(s)" strings: {regexes_component} condition: any of them}}' + rules[rule_name] = rule + + self._dns_yara_rules_uncompiled = rules + return self._dns_yara_rules_uncompiled + + async def dns_yara_rules(self): + if self._dns_yara_rules is None: + if self.dns_yara_rules_uncompiled is not None: + import yara + + self._dns_yara_rules = await self.helpers.run_in_executor( + yara.compile, source="\n".join(self.dns_yara_rules_uncompiled.values()) + ) + return self._dns_yara_rules + + async def extract_in_scope_hostnames(self, s): + """ + Given a string, uses yara to extract hostnames matching scan targets + + Examples: + >>> await self.scan.extract_in_scope_hostnames("http://www.evilcorp.com") + ... {"www.evilcorp.com"} + """ + matches = set() + dns_yara_rules = await self.dns_yara_rules() + if dns_yara_rules is not None: + for match in await self.helpers.run_in_executor(dns_yara_rules.match, data=s): + for string in match.strings: + for instance in string.instances: + matches.add(str(instance)) + return matches @property def json(self): - j = dict() + """ + A dictionary representation of the scan including its name, ID, targets, whitelist, blacklist, and modules + """ + j = {} for i in ("id", "name"): v = getattr(self, i, "") if v: j.update({i: v}) - if self.target: - j.update({"targets": [str(e.data) for e in self.target]}) - if self.whitelist: - j.update({"whitelist": [str(e.data) for e in self.whitelist]}) - if self.blacklist: - j.update({"blacklist": [str(e.data) for e in self.blacklist]}) - if self.modules: - j.update({"modules": [str(m) for m in self.modules]}) + j["target"] = self.preset.target.json + j["preset"] = self.preset.to_dict(redact_secrets=True) + if self.start_time is not None: + j["started_at"] = self.start_time.isoformat() + if self.end_time is not None: + j["finished_at"] = self.end_time.isoformat() + if self.duration is not None: + j["duration_seconds"] = self.duration_seconds + if self.duration_human is not None: + j["duration"] = self.duration_human return j - def debug(self, *args, **kwargs): + def debug(self, *args, trace=False, **kwargs): log.debug(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def verbose(self, *args, **kwargs): + def verbose(self, *args, trace=False, **kwargs): log.verbose(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def hugeverbose(self, *args, **kwargs): + def hugeverbose(self, *args, trace=False, **kwargs): log.hugeverbose(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def info(self, *args, **kwargs): + def info(self, *args, trace=False, **kwargs): log.info(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def hugeinfo(self, *args, **kwargs): + def hugeinfo(self, *args, trace=False, **kwargs): log.hugeinfo(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def success(self, *args, **kwargs): + def success(self, *args, trace=False, **kwargs): log.success(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def hugesuccess(self, *args, **kwargs): + def hugesuccess(self, *args, trace=False, **kwargs): log.hugesuccess(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def warning(self, *args, **kwargs): + def warning(self, *args, trace=True, **kwargs): log.warning(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def hugewarning(self, *args, **kwargs): + def hugewarning(self, *args, trace=True, **kwargs): log.hugewarning(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def error(self, *args, **kwargs): + def error(self, *args, trace=True, **kwargs): log.error(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() + + def trace(self, msg=None): + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + log.trace(traceback.format_exc()) + else: + log.trace(msg) - def critical(self, *args, **kwargs): + def critical(self, *args, trace=True, **kwargs): log.critical(*args, extra={"scan_id": self.id}, **kwargs) + if trace: + self.trace() - def _internal_modules(self): - for modname in module_loader.preloaded(type="internal"): - if self.config.get(modname, True): - yield modname - - def load_modules(self): - - if not self._modules_loaded: - - all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) - if not all_modules: - self.warning(f"No modules to load") - return - - if not self._scan_modules: - self.warning(f"No scan modules to load") + @property + def log_level(self): + """ + Return the current log level, e.g. logging.INFO + """ + return self.core.logger.log_level - # install module dependencies - succeeded, failed = self.helpers.depsinstaller.install( - *self._scan_modules, *self._output_modules, *self._internal_modules + @property + def _log_handlers(self): + if self.__log_handlers is None: + self.helpers.mkdir(self.home) + main_handler = logging.handlers.TimedRotatingFileHandler( + str(self.home / "scan.log"), when="d", interval=1, backupCount=14 ) - if failed: - msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" - self.fail_setup(msg) - modules = [m for m in self._scan_modules if m in succeeded] - output_modules = [m for m in self._output_modules if m in succeeded] - internal_modules = [m for m in self._internal_modules if m in succeeded] - - # Load scan modules - self.verbose(f"Loading {len(modules):,} scan modules: {','.join(list(modules))}") - loaded_modules, failed = self._load_modules(modules) - self.modules.update(loaded_modules) - if len(failed) > 0: - msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" - self.fail_setup(msg) - if loaded_modules: - self.info( - f"Loaded {len(loaded_modules):,}/{len(self._scan_modules):,} scan modules ({','.join(list(loaded_modules))})" - ) - - # Load internal modules - self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(list(internal_modules))}") - loaded_internal_modules, failed_internal = self._load_modules(internal_modules) - self.modules.update(loaded_internal_modules) - if len(failed_internal) > 0: - msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" - self.fail_setup(msg) - if loaded_internal_modules: - self.info( - f"Loaded {len(loaded_internal_modules):,}/{len(self._internal_modules):,} internal modules ({','.join(list(loaded_internal_modules))})" - ) - - # Load output modules - self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(list(output_modules))}") - loaded_output_modules, failed_output = self._load_modules(output_modules) - self.modules.update(loaded_output_modules) - if len(failed_output) > 0: - msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" - self.fail_setup(msg) - if loaded_output_modules: - self.info( - f"Loaded {len(loaded_output_modules):,}/{len(self._output_modules):,} output modules, ({','.join(list(loaded_output_modules))})" - ) - - self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "_priority", 0))) - self._modules_loaded = True - - def fail_setup(self, msg): + main_handler.addFilter(lambda x: x.levelno != logging.TRACE and x.levelno >= logging.VERBOSE) + debug_handler = logging.handlers.TimedRotatingFileHandler( + str(self.home / "debug.log"), when="d", interval=1, backupCount=14 + ) + debug_handler.addFilter(lambda x: x.levelno >= logging.DEBUG) + self.__log_handlers = [main_handler, debug_handler] + return self.__log_handlers + + def _start_log_handlers(self): + # add log handlers + for handler in self._log_handlers: + self.core.logger.add_log_handler(handler) + # temporarily disable main ones + for handler_name in ("file_main", "file_debug"): + handler = self.core.logger.log_handlers.get(handler_name, None) + if handler is not None and handler not in self._log_handler_backup: + self._log_handler_backup.append(handler) + self.core.logger.remove_log_handler(handler) + + def _stop_log_handlers(self): + # remove log handlers + for handler in self._log_handlers: + self.core.logger.remove_log_handler(handler) + # restore main ones + for handler in self._log_handler_backup: + self.core.logger.add_log_handler(handler) + + def _fail_setup(self, msg): msg = str(msg) - if not self.force_start: - msg += " (--force to run module anyway)" if self.force_start: self.error(msg) else: + msg += " (--force to run module anyway)" raise ScanError(msg) def _load_modules(self, modules): - modules = [str(m) for m in modules] loaded_modules = {} failed = set() - for module_name, module_class in module_loader.load_modules(modules).items(): + for module_name, module_class in self.preset.module_loader.load_modules(modules).items(): if module_class: try: loaded_modules[module_name] = module_class(self) self.verbose(f'Loaded module "{module_name}"') continue except Exception: - import traceback - self.warning(f"Failed to load module {module_class}") - self.debug(traceback.format_exc()) else: self.warning(f'Failed to load unknown module "{module_name}"') failed.add(module_name) return loaded_modules, failed + + async def _status_ticker(self, interval=15): + async with self._acatch(): + while 1: + await asyncio.sleep(interval) + self.modules_status(_log=True) + + @contextlib.asynccontextmanager + async def _acatch(self, context="scan", finally_callback=None, unhandled_is_critical=False): + """ + Async version of catch() + + async with catch(): + await do_stuff() + """ + try: + yield + except BaseException as e: + self._handle_exception(e, context=context, unhandled_is_critical=unhandled_is_critical) + + def _handle_exception(self, e, context="scan", finally_callback=None, unhandled_is_critical=False): + if callable(context): + context = f"{context.__qualname__}()" + filename, lineno, funcname = self.helpers.get_traceback_details(e) + if self.helpers.in_exception_chain(e, (KeyboardInterrupt,)): + log.debug("Interrupted") + self.stop() + elif isinstance(e, BrokenPipeError): + log.debug(f"BrokenPipeError in {filename}:{lineno}:{funcname}(): {e}") + elif isinstance(e, asyncio.CancelledError): + raise + elif isinstance(e, Exception): + traceback_str = getattr(e, "engine_traceback", None) + if traceback_str is None: + traceback_str = traceback.format_exc() + if unhandled_is_critical: + log.critical(f"Error in {context}: {filename}:{lineno}:{funcname}(): {e}") + log.critical(traceback_str) + else: + log.error(f"Error in {context}: {filename}:{lineno}:{funcname}(): {e}") + log.trace(traceback_str) + if callable(finally_callback): + finally_callback(e) + + def _make_dummy_module(self, name, _type="scan"): + """ + Construct a dummy module, for attachment to events + """ + try: + return self.dummy_modules[name] + except KeyError: + dummy = DummyModule(scan=self, name=name, _type=_type) + self.dummy_modules[name] = dummy + return dummy + + +from bbot.modules.base import BaseModule + + +class DummyModule(BaseModule): + _priority = 4 + + def __init__(self, *args, **kwargs): + self._name = kwargs.pop("name") + self._type = kwargs.pop("_type") + super().__init__(*args, **kwargs) diff --git a/bbot/scanner/stats.py b/bbot/scanner/stats.py index fc6f24a557..38d95032f7 100644 --- a/bbot/scanner/stats.py +++ b/bbot/scanner/stats.py @@ -1,24 +1,59 @@ +import time import logging +from collections import deque log = logging.getLogger("bbot.scanner.stats") +def _increment(d, k): + try: + d[k] += 1 + except KeyError: + d[k] = 1 + + +class SpeedCounter: + """ + A simple class for keeping a rolling tally of the number of events inside a specific time window + """ + + def __init__(self, window=60): + self.timestamps = deque() + self.window = window + + def tick(self): + current_time = time.time() + self.timestamps.append(current_time) + self.remove_old_timestamps(current_time) + + def remove_old_timestamps(self, current_time): + while self.timestamps and current_time - self.timestamps[0] > self.window: + self.timestamps.popleft() + + @property + def speed(self): + self.remove_old_timestamps(time.time()) + return len(self.timestamps) + + class ScanStats: def __init__(self, scan): self.scan = scan self.module_stats = {} - - def event_emitted(self, event): - module_stat = self.get(event.module) - if module_stat is not None: - module_stat.increment_emitted(event) + self.events_emitted_by_type = {} + self.speedometer = SpeedCounter(scan.status_frequency) def event_produced(self, event): + _increment(self.events_emitted_by_type, event.type) module_stat = self.get(event.module) if module_stat is not None: module_stat.increment_produced(event) def event_consumed(self, event, module): + self.speedometer.tick() + # skip ingress/egress modules, etc. + if module.name.startswith("_"): + return module_stat = self.get(module) if module_stat is not None: module_stat.increment_consumed(event) @@ -49,43 +84,32 @@ def table(self): consumed_str = f"{mstat.consumed_total:,}" consumed = sorted(mstat.consumed.items(), key=lambda x: x[0]) if consumed: - consumed_str = " (" + ", ".join(f"{c:,} {t}" for t, c in consumed) + ")" + consumed_str += " (" + ", ".join(f"{c:,} {t}" for t, c in consumed) + ")" table_row.append(consumed_str) table.append(table_row) table.sort(key=lambda x: self.module_stats[x[0]].produced_total, reverse=True) return [header] + table - def __str__(self): + def _make_table(self): table = self.table() - return self.scan.helpers.make_table(table[1:], table[0]) + if len(table) == 1: + table += [["None", "None", "None"]] + return table[1:], table[0] class ModuleStat: def __init__(self, module): self.module = module - - self.emitted = {} - self.emitted_total = 0 self.produced = {} self.produced_total = 0 self.consumed = {} self.consumed_total = 0 - def increment_emitted(self, event): - self.emitted_total += 1 - self._increment(self.emitted, event.type) - def increment_produced(self, event): self.produced_total += 1 - self._increment(self.produced, event.type) + _increment(self.produced, event.type) def increment_consumed(self, event): - self.consumed_total += 1 - self._increment(self.consumed, event.type) - - @staticmethod - def _increment(d, k): - try: - d[k] += 1 - except KeyError: - d[k] = 1 + if event.type not in ("FINISHED",): + self.consumed_total += 1 + _increment(self.consumed, event.type) diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 8d84bd6631..bdd9edd107 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -1,112 +1,364 @@ import logging -import ipaddress +import regex as re +from hashlib import sha1 +from radixtarget import RadixTarget +from radixtarget.helpers import host_size_key + +from bbot.errors import * +from bbot.core.event import make_event, is_event +from bbot.core.helpers.misc import is_dns_name, is_ip -from bbot.core.errors import * -from bbot.core.event import make_event -from bbot.modules.base import BaseModule log = logging.getLogger("bbot.core.target") -class ScanTarget: - def __init__(self, scan, *targets, strict_scope=False): +def special_target_type(regex_pattern): + def decorator(func): + func._regex = re.compile(regex_pattern, re.IGNORECASE) + return func + + return decorator + + +class BaseTarget(RadixTarget): + """ + A collection of BBOT events that represent a scan target. + + Based on radixtarget, which allows extremely fast IP and DNS lookups. + + This class is inherited by all three components of the BBOT target: + - Whitelist + - Blacklist + - Seeds + """ + + special_target_types = { + # regex-callback pairs for handling special target types + # these aren't defined explicitly; instead they are decorated with @special_target_type + # the function must return a list of events + } + tags = [] + + def __init__(self, *targets, scan=None, **kwargs): self.scan = scan - self.dummy_module = ScanTargetDummyModule(scan) - self._events = dict() - if len(targets) > 0: - log.verbose(f"Creating events from {len(targets):,} targets") - for t in targets: - self.add_target(t) + self.events = set() + self.inputs = set() + # Register decorated methods + for method in dir(self): + if callable(getattr(self, method, None)): + func = getattr(self, method) + if hasattr(func, "_regex"): + self.special_target_types[func._regex] = func - self.strict_scope = strict_scope - self._hash = None + super().__init__(*targets, **kwargs) - def add_target(self, t): - if type(t) == self.__class__: - for k, v in t._events.items(): - self._events[k].update(v) + def get(self, event, **kwargs): + """ + Override default .get() to accept events + """ + if is_event(event): + host = event.host + # save resources by checking if the event is an IP or DNS name + elif is_ip(event, include_network=True) or is_dns_name(event): + host = event + elif isinstance(event, str): + event = self.make_event(event) + host = event.host else: - event = self.scan.make_event(t, source=self.scan.root_event, module=self.dummy_module, tags=["target"]) - event.make_in_scope() + raise ValueError(f"Invalid host/event: {event} ({type(event)})") + if not host: + if kwargs.get("raise_error", False): + raise KeyError(f"Host not found: '{event}'") + return None + results = super().get(host, **kwargs) + return results + + def make_event(self, *args, **kwargs): + # if it's already an event, return it + if args and is_event(args[0]): + return args[0] + # otherwise make a new one + if "tags" not in kwargs: + kwargs["tags"] = set() + kwargs["tags"].update(self.tags) + return make_event(*args, dummy=True, scan=self.scan, **kwargs) + + def add(self, targets): + if not isinstance(targets, (list, set, tuple)): + targets = [targets] + events = set() + for target in targets: + _events = [] + special_target_type, _events = self.check_special_target_types(str(target)) + if special_target_type: + self.inputs.add(str(target)) + else: + event = self.make_event(target) + if event: + self.inputs.add(target) + _events = [event] + for event in _events: + events.add(event) + + # sort by host size to ensure consistency + events = sorted(events, key=lambda e: ((0, 0) if not e.host else host_size_key(e.host))) + for event in events: + self.events.add(event) + self._add(event.host, data=event) + + def check_special_target_types(self, target): + for regex, callback in self.special_target_types.items(): + match = regex.match(target) + if match: + return True, callback(match) + return False, [] + + def __iter__(self): + yield from self.events + + +class ScanSeeds(BaseTarget): + """ + Initial events used to seed a scan. + + These are the targets specified by the user, e.g. via `-t` on the CLI. + """ + + tags = ["target"] + + @special_target_type(r"^(?:ORG|ORG_STUB):(.*)") + def handle_org_stub(self, match): + org_stub_event = self.make_event(match.group(1), event_type="ORG_STUB") + if org_stub_event: + return [org_stub_event] + return [] + + @special_target_type(r"^(?:USER|USERNAME):(.*)") + def handle_username(self, match): + username_event = self.make_event(match.group(1), event_type="USERNAME") + if username_event: + return [username_event] + return [] + + @special_target_type(r"^(?:FILESYSTEM|FILE|FOLDER|DIR|PATH):(.*)") + def handle_filesystem(self, match): + filesystem_event = self.make_event({"path": match.group(1)}, event_type="FILESYSTEM") + if filesystem_event: + return [filesystem_event] + return [] + + @special_target_type(r"^(?:MOBILE_APP|APK|IPA|APP):(.*)") + def handle_mobile_app(self, match): + mobile_app_event = self.make_event({"url": match.group(1)}, event_type="MOBILE_APP") + if mobile_app_event: + return [mobile_app_event] + return [] + + def get(self, event, single=True, **kwargs): + results = super().get(event, **kwargs) + if results and single: + return next(iter(results)) + return results + + def _add(self, host, data): + """ + Overrides the base method to enable having multiple events for the same host. + + The "data" attribute of the node is now a set of events. + """ + if host: try: - self._events[event.host].add(event) + event_set = self.get(host, raise_error=True, single=False) + event_set.add(data) except KeyError: - self._events[event.host] = { - event, - } + event_set = {data} + super()._add(host, data=event_set) + + def _hash_value(self): + # seeds get hashed by event data + return sorted(str(e.data).encode() for e in self.events) + + +class ACLTarget(BaseTarget): + def __init__(self, *args, **kwargs): + # ACL mode dedupes by host (and skips adding already-contained hosts) for efficiency + kwargs["acl_mode"] = True + super().__init__(*args, **kwargs) + + +class ScanWhitelist(ACLTarget): + """ + A collection of BBOT events that represent a scan's whitelist. + """ + + pass + + +class ScanBlacklist(ACLTarget): + """ + A collection of BBOT events that represent a scan's blacklist. + """ + + def __init__(self, *args, **kwargs): + self.blacklist_regexes = set() + super().__init__(*args, **kwargs) + + @special_target_type(r"^(?:RE|REGEX):(.*)") + def handle_regex(self, match): + pattern = match.group(1) + blacklist_regex = re.compile(pattern, re.IGNORECASE) + self.blacklist_regexes.add(blacklist_regex) + return [] + + def get(self, event, **kwargs): + """ + Here, for the blacklist, we modify this method to also consider any special regex patterns specified by the user + """ + event = self.make_event(event) + # first, check event's host against blacklist + try: + event_result = super().get(event, raise_error=True) + except KeyError: + event_result = None + if event_result is not None: + return event_result + # next, check event's host against regexes + host_or_url = event.host_filterable + if host_or_url: + for regex in self.blacklist_regexes: + if regex.search(str(host_or_url)): + return event + if kwargs.get("raise_error", False): + raise KeyError(f"Host not found: '{event.data}'") + return None + + def _hash_value(self): + # regexes are included in blacklist hash + regex_patterns = [str(r.pattern).encode() for r in self.blacklist_regexes] + hosts = [str(h).encode() for h in self.sorted_hosts] + return hosts + regex_patterns + + def __len__(self): + return super().__len__() + len(self.blacklist_regexes) + + def __bool__(self): + return bool(len(self)) + + +class BBOTTarget: + """ + A convenient abstraction of a scan target that contains three subtargets: + - seeds + - whitelist + - blacklist + + Provides high-level functions like in_scope(), which includes both whitelist and blacklist checks. + """ + + def __init__(self, *seeds, whitelist=None, blacklist=None, strict_scope=False, scan=None): + self.scan = scan + self.strict_scope = strict_scope + self.seeds = ScanSeeds(*seeds, strict_dns_scope=strict_scope, scan=scan) + if whitelist is None: + whitelist = self.seeds.hosts + self.whitelist = ScanWhitelist(*whitelist, strict_dns_scope=strict_scope, scan=scan) + if blacklist is None: + blacklist = [] + self.blacklist = ScanBlacklist(*blacklist, scan=scan) @property - def events(self): - for _events in self._events.values(): - yield from _events + def json(self): + return { + "seeds": sorted([e.data for e in self.seeds]), + "whitelist": sorted([e.data for e in self.whitelist]), + "blacklist": sorted([e.data for e in self.blacklist]), + "strict_scope": self.strict_scope, + "hash": self.hash.hex(), + "seed_hash": self.seeds.hash.hex(), + "whitelist_hash": self.whitelist.hash.hex(), + "blacklist_hash": self.blacklist.hash.hex(), + "scope_hash": self.scope_hash.hex(), + } - def copy(self): - self_copy = self.__class__(self.scan, strict_scope=self.strict_scope) - self_copy._events = dict(self._events) - return self_copy + @property + def hash(self): + sha1_hash = sha1() + for target_hash in [t.hash for t in (self.seeds, self.whitelist, self.blacklist)]: + sha1_hash.update(target_hash) + return sha1_hash.digest() + + @property + def scope_hash(self): + sha1_hash = sha1() + # Consider only the hash values of the whitelist and blacklist + for target_hash in [t.hash for t in (self.whitelist, self.blacklist)]: + sha1_hash.update(target_hash) + return sha1_hash.digest() + + def in_scope(self, host): + """ + Check whether a hostname, url, IP, etc. is in scope. + Accepts either events or string data. - def _contains(self, other): + Checks whitelist and blacklist. + If `host` is an event and its scope distance is zero, it will automatically be considered in-scope. + + Examples: + Check if a URL is in scope: + >>> preset.in_scope("http://www.evilcorp.com") + True + """ try: - other = make_event(other, dummy=True) + e = make_event(host, dummy=True) except ValidationError: return False - if other in self.events: - return True - if other.host: - if other.host in self._events: - return True - if self.scan.helpers.is_ip_type(other.host): - for n in self.scan.helpers.ip_network_parents(other.host, include_self=True): - if n in self._events: - return True - elif not self.strict_scope: - for h in self.scan.helpers.domain_parents(other.host): - if h in self._events: - return True - return False - - def __str__(self): - return ",".join([str(e.data) for e in self.events][:5]) - - def __iter__(self): - yield from self.events + in_scope = e.scope_distance == 0 or self.whitelisted(e) + return in_scope and not self.blacklisted(e) - def __contains__(self, other): - # if "other" is a ScanTarget - if type(other) == self.__class__: - contained_in_self = [self._contains(e) for e in other.events] - return all(contained_in_self) - else: - return self._contains(other) + def blacklisted(self, host): + """ + Check whether a hostname, url, IP, etc. is blacklisted. - def __bool__(self): - return bool(self._events) + Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. - def __eq__(self, other): - return hash(self) == hash(other) + Args: + host (str or IPAddress or Event): The host to check against the blacklist - def __hash__(self): - if self._hash is None: - events = tuple(sorted(list(self.events), key=lambda e: hash(e))) - self._hash = hash(events) - return self._hash + Examples: + Check if a URL's host is blacklisted: + >>> preset.blacklisted("http://www.evilcorp.com") + True + """ + return host in self.blacklist - def __len__(self): + def whitelisted(self, host): """ - Returns the total number of HOSTS (not events) in the target + Check whether a hostname, url, IP, etc. is whitelisted. + + Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. + + Args: + host (str or IPAddress or Event): The host to check against the whitelist + + Examples: + Check if a URL's host is whitelisted: + >>> preset.whitelisted("http://www.evilcorp.com") + True """ - num_hosts = 0 - for host, _events in self._events.items(): - if type(host) in (ipaddress.IPv4Network, ipaddress.IPv6Network): - num_hosts += host.num_addresses - else: - num_hosts += len(_events) - return num_hosts + return host in self.whitelist + @property + def minimal(self): + """ + A slimmer, serializable version of the target designed for simple scope checks -class ScanTargetDummyModule(BaseModule): - _type = "TARGET" - name = "TARGET" + This version doesn't have the events, only their hosts. This allows it to be passed across process boundaries. + """ + return self.__class__( + whitelist=self.whitelist.inputs, + blacklist=self.blacklist.inputs, + strict_scope=self.strict_scope, + ) - def __init__(self, scan): - self.scan = scan + def __eq__(self, other): + return self.hash == other.hash diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py new file mode 100755 index 0000000000..708102f7ca --- /dev/null +++ b/bbot/scripts/docs.py @@ -0,0 +1,296 @@ +#!/usr/bin/env python3 + +import os +import re +import json +import yaml +from pathlib import Path + +from bbot import Preset +from bbot.core.modules import MODULE_LOADER + + +DEFAULT_PRESET = Preset() + +os.environ["BBOT_TABLE_FORMAT"] = "github" + + +# Make a regex pattern which will match any group of non-space characters that include a blacklisted character +blacklist_chars = ["<", ">"] +blacklist_re = re.compile(r"\|([^|]*[" + re.escape("".join(blacklist_chars)) + r"][^|]*)\|") + +bbot_code_dir = Path(__file__).parent.parent.parent + + +def gen_chord_data(): + # This function generates the dataset for the chord graph in the documentation + # showing relationships between BBOT modules and their consumed/produced event types + preloaded_mods = sorted(MODULE_LOADER.preloaded().items(), key=lambda x: x[0]) + + entity_lookup_table = {} + rels = [] + entities = {} + entity_counter = 1 + + def add_entity(entity, parent_id): + if entity not in entity_lookup_table: + nonlocal entity_counter + e_id = entity_counter + entity_counter += 1 + entity_lookup_table[entity] = e_id + entity_lookup_table[e_id] = entity + entities[e_id] = {"id": e_id, "name": entity, "parent": parent_id, "consumes": [], "produces": []} + return entity_lookup_table[entity] + + # create entities for all the modules and event types + for module, preloaded in preloaded_mods: + watched = [e for e in preloaded["watched_events"] if e != "*"] + produced = [e for e in preloaded["produced_events"] if e != "*"] + if watched or produced: + m_id = add_entity(module, 99999999) + for event_type in watched: + e_id = add_entity(event_type, 88888888) + entities[m_id]["consumes"].append(e_id) + entities[e_id]["consumes"].append(m_id) + for event_type in produced: + e_id = add_entity(event_type, 88888888) + entities[m_id]["produces"].append(e_id) + entities[e_id]["produces"].append(m_id) + + def add_rel(incoming, outgoing, t): + if incoming == "*" or outgoing == "*": + return + i_id = entity_lookup_table[incoming] + o_id = entity_lookup_table[outgoing] + rels.append({"source": i_id, "target": o_id, "type": t}) + + # create all the module <--> event type relationships + for module, preloaded in preloaded_mods: + for event_type in preloaded["watched_events"]: + add_rel(module, event_type, "consumes") + for event_type in preloaded["produced_events"]: + add_rel(event_type, module, "produces") + + # write them to JSON files + data_dir = Path(__file__).parent.parent.parent / "docs" / "data" / "chord_graph" + data_dir.mkdir(parents=True, exist_ok=True) + entity_file = data_dir / "entities.json" + rels_file = data_dir / "rels.json" + + entities = [ + {"id": 77777777, "name": "root"}, + {"id": 99999999, "name": "module", "parent": 77777777}, + {"id": 88888888, "name": "event_type", "parent": 77777777}, + ] + sorted(entities.values(), key=lambda x: x["name"]) + + with open(entity_file, "w") as f: + json.dump(entities, f, indent=4) + + with open(rels_file, "w") as f: + json.dump(rels, f, indent=4) + + +def homedir_collapseuser(f): + f = Path(f) + home_dir = Path.home() + if f.is_relative_to(home_dir): + return Path("~") / f.relative_to(home_dir) + return f + + +def enclose_tags(text): + # Use re.sub() to replace matched words with the same words enclosed in backticks + result = blacklist_re.sub(r"|`\1`|", text) + return result + + +def find_replace_markdown(content, keyword, replace): + begin_re = re.compile(r"", re.I) + end_re = re.compile(r"", re.I) + + begin_match = begin_re.search(content) + end_match = end_re.search(content) + + new_content = str(content) + if begin_match and end_match: + start_index = begin_match.span()[-1] + 1 + end_index = end_match.span()[0] - 1 + new_content = new_content[:start_index] + enclose_tags(replace) + new_content[end_index:] + return new_content + + +def find_replace_file(file, keyword, replace): + with open(file) as f: + content = f.read() + new_content = find_replace_markdown(content, keyword, replace) + if new_content != content: + if "BBOT_TESTING" not in os.environ: + with open(file, "w") as f: + f.write(new_content) + + +def update_docs(): + md_files = [p for p in bbot_code_dir.glob("**/*.md") if p.is_file()] + + def update_md_files(keyword, s): + for file in md_files: + find_replace_file(file, keyword, s) + + def update_individual_module_options(): + regex = re.compile("BBOT MODULE OPTIONS ([A-Z_]+)") + for file in md_files: + with open(file) as f: + content = f.read() + for match in regex.finditer(content): + module_name = match.groups()[0].lower() + bbot_module_options_table = DEFAULT_PRESET.module_loader.modules_options_table(modules=[module_name]) + find_replace_file(file, f"BBOT MODULE OPTIONS {module_name.upper()}", bbot_module_options_table) + + # Example commands + bbot_example_commands = [] + for title, description, command in DEFAULT_PRESET.args.scan_examples: + example = "" + example += f"**{title}:**\n\n" + # example += f"{description}\n" + example += f"```bash\n# {description}\n{command}\n```" + bbot_example_commands.append(example) + bbot_example_commands = "\n\n".join(bbot_example_commands) + assert len(bbot_example_commands.splitlines()) > 10 + update_md_files("BBOT EXAMPLE COMMANDS", bbot_example_commands) + + # Help output + bbot_help_output = DEFAULT_PRESET.args.parser.format_help().replace("docs.py", "bbot") + bbot_help_output = f"```text\n{bbot_help_output}\n```" + assert len(bbot_help_output.splitlines()) > 50 + update_md_files("BBOT HELP OUTPUT", bbot_help_output) + + # BBOT events + bbot_event_table = DEFAULT_PRESET.module_loader.events_table() + assert len(bbot_event_table.splitlines()) > 10 + update_md_files("BBOT EVENTS", bbot_event_table) + + # BBOT modules + bbot_module_table = DEFAULT_PRESET.module_loader.modules_table(include_author=True, include_created_date=True) + assert len(bbot_module_table.splitlines()) > 50 + update_md_files("BBOT MODULES", bbot_module_table) + + # BBOT output modules + bbot_output_module_table = DEFAULT_PRESET.module_loader.modules_table( + mod_type="output", include_author=True, include_created_date=True + ) + assert len(bbot_output_module_table.splitlines()) > 10 + update_md_files("BBOT OUTPUT MODULES", bbot_output_module_table) + + # BBOT module options + bbot_module_options_table = DEFAULT_PRESET.module_loader.modules_options_table() + assert len(bbot_module_options_table.splitlines()) > 100 + update_md_files("BBOT MODULE OPTIONS", bbot_module_options_table) + update_individual_module_options() + + # BBOT module flags + bbot_module_flags_table = DEFAULT_PRESET.module_loader.flags_table() + assert len(bbot_module_flags_table.splitlines()) > 10 + update_md_files("BBOT MODULE FLAGS", bbot_module_flags_table) + + # BBOT presets + bbot_presets_table = DEFAULT_PRESET.presets_table(include_modules=True) + assert len(bbot_presets_table.splitlines()) > 5 + update_md_files("BBOT PRESETS", bbot_presets_table) + + # BBOT presets + for yaml_file, (loaded_preset, category, preset_path, original_filename) in DEFAULT_PRESET.all_presets.items(): + preset_yaml = f""" +```yaml title={yaml_file.name} +{loaded_preset._yaml_str} +``` +""" + preset_yaml_expandable = f""" +
+{yaml_file.name} + +```yaml +{loaded_preset._yaml_str} +``` + +
+""" + update_md_files(f"BBOT {loaded_preset.name.upper()} PRESET", preset_yaml) + update_md_files(f"BBOT {loaded_preset.name.upper()} PRESET EXPANDABLE", preset_yaml_expandable) + + content = [] + for yaml_file, (loaded_preset, category, preset_path, original_filename) in DEFAULT_PRESET.all_presets.items(): + yaml_str = loaded_preset._yaml_str + indent = " " * 4 + yaml_str = f"\n{indent}".join(yaml_str.splitlines()) + filename = homedir_collapseuser(yaml_file) + + num_modules = len(loaded_preset.scan_modules) + modules = ", ".join(sorted([f"`{m}`" for m in loaded_preset.scan_modules])) + category = f"Category: {category}" if category else "" + + content.append( + f"""## **{loaded_preset.name}** + +{loaded_preset.description} + +??? note "`{filename.name}`" + ```yaml title="{filename}" + {yaml_str} + ``` + +{category} + +Modules: [{num_modules:,}]("{modules}")""" + ) + assert len(content) > 5 + update_md_files("BBOT PRESET YAML", "\n\n".join(content)) + + # Default config + default_config_file = bbot_code_dir / "bbot" / "defaults.yml" + with open(default_config_file) as f: + default_config_yml = f.read() + default_config_yml = f'```yaml title="defaults.yml"\n{default_config_yml}\n```' + assert len(default_config_yml.splitlines()) > 20 + update_md_files("BBOT DEFAULT CONFIG", default_config_yml) + + # Table of Contents + base_url = "https://www.blacklanternsecurity.com/bbot/Stable" + + def format_section(section_title, section_path): + path = section_path.split("index.md")[0] + path = path.split(".md")[0] + return f"- [{section_title}]({base_url}/{path})\n" + + bbot_docs_toc = "" + + def update_toc(section, level=0): + nonlocal bbot_docs_toc + indent = " " * 4 * level + if isinstance(section, dict): + for section_title, subsections in section.items(): + if isinstance(subsections, str): + bbot_docs_toc += f"{indent}{format_section(section_title, subsections)}" + else: + bbot_docs_toc += f"{indent}- **{section_title}**\n" + for subsection in subsections: + update_toc(subsection, level=level + 1) + + mkdocs_yml_file = bbot_code_dir / "mkdocs.yml" + yaml.SafeLoader.add_constructor( + "tag:yaml.org,2002:python/name:pymdownx.superfences.fence_code_format", lambda x, y: {} + ) + + with open(mkdocs_yml_file, "r") as f: + mkdocs_yaml = yaml.safe_load(f) + nav = mkdocs_yaml["nav"] + for section in nav: + update_toc(section) + bbot_docs_toc = bbot_docs_toc.strip() + # assert len(bbot_docs_toc.splitlines()) == 2 + update_md_files("BBOT DOCS TOC", bbot_docs_toc) + + # generate data for chord graph + gen_chord_data() + + +update_docs() diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index f2bad4e100..86bc83433f 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -1,79 +1,96 @@ +import os # noqa +import sys import pytest -import urllib3 -import requests +import shutil # noqa +import asyncio # noqa +import logging +import subprocess import tldextract +import pytest_httpserver from pathlib import Path -from omegaconf import OmegaConf +from omegaconf import OmegaConf # noqa -# make the necessary web requests before nuking them to high heaven -example_url = "https://api.publicapis.org/health" -http = urllib3.PoolManager() -urllib_response = http.request("GET", example_url) -requests_response = requests.get(example_url) -tldextract.extract("www.evilcorp.com") +from werkzeug.wrappers import Request +from bbot.errors import * # noqa: F401 +from bbot.core import CORE +from bbot.scanner import Preset +from bbot.core.helpers.async_helpers import get_event_loop +from bbot.core.helpers.misc import mkdir, rand_string, get_python_constraints -@pytest.fixture -def neograph(monkeypatch, helpers): - helpers.depsinstaller.pip_install(["py2neo"]) - class NeoGraph: - def __init__(self, *args, **kwargs): - pass +log = logging.getLogger("bbot.test.fixtures") - def merge(self, *args, **kwargs): - return True - import py2neo +bbot_test_dir = Path("/tmp/.bbot_test") +mkdir(bbot_test_dir) - monkeypatch.setattr(py2neo, "Graph", NeoGraph) - from bbot.db.neo4j import Neo4j - return Neo4j(uri="bolt://127.0.0.1:1111") +DEFAULT_PRESET = Preset() +available_modules = list(DEFAULT_PRESET.module_loader.configs(type="scan")) +available_output_modules = list(DEFAULT_PRESET.module_loader.configs(type="output")) +available_internal_modules = list(DEFAULT_PRESET.module_loader.configs(type="internal")) -@pytest.fixture -def neuter_ansible(monkeypatch): - from ansible_runner.interface import run - class AnsibleRunnerResult: - status = "successful" - rc = 0 - events = [] +def tempwordlist(content): + filename = bbot_test_dir / f"{rand_string(8)}" + with open(filename, "w", errors="ignore") as f: + for c in content: + line = f"{c}\n" + f.write(line) + return filename - def ansible_run(*args, **kwargs): - module = kwargs.get("module", "") - if module != "pip": - return AnsibleRunnerResult() - else: - return run(*args, **kwargs) - from bbot.core.helpers.depsinstaller import installer +def tempapkfile(): + current_dir = Path(__file__).parent + with open(current_dir / "owasp_mastg.apk", "rb") as f: + apk_file = f.read() + return apk_file - ensure_root = installer.DepsInstaller.ensure_root - monkeypatch.setattr(installer, "run", ansible_run) - monkeypatch.setattr(installer.DepsInstaller, "ensure_root", lambda *args, **kwargs: None) +@pytest.fixture +def clean_default_config(monkeypatch): + clean_config = OmegaConf.merge( + CORE.files_config.get_default_config(), {"modules": DEFAULT_PRESET.module_loader.configs()} + ) + with monkeypatch.context() as m: + m.setattr("bbot.core.core.DEFAULT_CONFIG", clean_config) + yield + + +class SubstringRequestMatcher(pytest_httpserver.httpserver.RequestMatcher): + def match_data(self, request: Request) -> bool: + if self.data is None: + return True + return self.data in request.data - return run, ensure_root + +pytest_httpserver.httpserver.RequestMatcher = SubstringRequestMatcher + +# silence pytest_httpserver +log = logging.getLogger("werkzeug") +log.setLevel(logging.CRITICAL) + +tldextract.extract("www.evilcorp.com") @pytest.fixture -def bbot_config(): - from bbot import config as default_config +def bbot_scanner(): + from bbot.scanner import Scanner - test_config = OmegaConf.load(Path(__file__).parent / "test.conf") - config = OmegaConf.merge(default_config, test_config) - return config + return Scanner @pytest.fixture -def scan(neuter_ansible, patch_requests, patch_commands, bbot_config): +def scan(): from bbot.scanner import Scanner - bbot_scan = Scanner("127.0.0.1", modules=["ipneighbor"], config=bbot_config) - bbot_scan.status = "RUNNING" - return bbot_scan + bbot_scan = Scanner("127.0.0.1", modules=["ipneighbor"]) + yield bbot_scan + + loop = get_event_loop() + loop.run_until_complete(bbot_scan._cleanup()) @pytest.fixture @@ -82,68 +99,96 @@ def helpers(scan): httpx_response = { - "timestamp": "2022-06-29T09:56:19.927240577-04:00", - "request": "GET / HTTP/1.1\r\nHost: example.com\r\nUser-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.35 Safari/537.36\r\nAccept-Charset: utf-8\r\nAccept-Encoding: gzip\r\n\r\n", - "response-header": 'HTTP/1.1 200 OK\r\nConnection: close\r\nAccept-Ranges: bytes\r\nAge: 557710\r\nCache-Control: max-age=604800\r\nContent-Type: text/html; charset=UTF-8\r\nDate: Wed, 29 Jun 2022 13:56:16 GMT\r\nEtag: "3147526947"\r\nExpires: Wed, 06 Jul 2022 13:56:16 GMT\r\nLast-Modified: Thu, 17 Oct 2019 07:18:26 GMT\r\nServer: ECS (agb/A438)\r\nVary: Accept-Encoding\r\nX-Cache: HIT\r\n\r\n', - "scheme": "http", + "timestamp": "2022-11-14T12:14:27.377566416-05:00", + "hash": { + "body_md5": "84238dfc8092e5d9c0dac8ef93371a07", + "body_mmh3": "-1139337416", + "body_sha256": "ea8fac7c65fb589b0d53560f5251f74f9e9b243478dcb6b3ea79b5e36449c8d9", + "body_simhash": "9899951357530060719", + "header_md5": "6e483c85c3b9b96f0e33d84237ca651e", + "header_mmh3": "-957156428", + "header_sha256": "5a809d8a53aded843179237365bb6dd069fba75ff8603ac2f6dc6c05d6bf0e76", + "header_simhash": "15614709017155972941", + }, "port": "80", - "path": "/", "url": "http://example.com:80", - "input": "http://example.com", + "input": "http://example.com:80", "title": "Example Domain", - "webserver": "ECS (agb/A438)", - "response-body": '\n\n\n Example Domain\n\n \n \n \n \n\n\n\n
\n\n\n', - "content-type": "text/html", + "scheme": "http", + "webserver": "ECS (agb/A445)", + "body": '\n\n\n Example Domain\n\n \n \n \n \n\n\n\n
\n

Example Domain

\n

This domain is for use in illustrative examples in documents. You may use this\n domain in literature without prior coordination or asking for permission.

\n

More information...

\n
\n\n\n', + "content_type": "text/html", "method": "GET", "host": "93.184.216.34", - "content-length": 1256, - "status-code": 200, - "response-time": "95.343985ms", - "failed": False, - "hashes": { - "body-md5": "84238dfc8092e5d9c0dac8ef93371a07", - "body-mmh3": "-1139337416", - "body-sha256": "ea8fac7c65fb589b0d53560f5251f74f9e9b243478dcb6b3ea79b5e36449c8d9", - "body-simhash": "9899951357530060719", - "header-md5": "99b650ea40a9e95550d7540996b67b60", - "header-mmh3": "1831947040", - "header-sha256": "eecbd4d9798c44295df0c5f2beebd939e7e51d9c6c16842dd73be83273f406bd", - "header-simhash": "15614709017155964779", + "path": "/", + "header": { + "age": "526111", + "cache_control": "max-age=604800", + "content_type": "text/html; charset=UTF-8", + "date": "Mon, 14 Nov 2022 17:14:27 GMT", + "etag": '"3147526947+ident+gzip"', + "expires": "Mon, 21 Nov 2022 17:14:27 GMT", + "last_modified": "Thu, 17 Oct 2019 07:18:26 GMT", + "server": "ECS (agb/A445)", + "vary": "Accept-Encoding", + "x_cache": "HIT", }, - "lines": 47, + "raw_header": 'HTTP/1.1 200 OK\r\nConnection: close\r\nAge: 526111\r\nCache-Control: max-age=604800\r\nContent-Type: text/html; charset=UTF-8\r\nDate: Mon, 14 Nov 2022 17:14:27 GMT\r\nEtag: "3147526947+ident+gzip"\r\nExpires: Mon, 21 Nov 2022 17:14:27 GMT\r\nLast-Modified: Thu, 17 Oct 2019 07:18:26 GMT\r\nServer: ECS (agb/A445)\r\nVary: Accept-Encoding\r\nX-Cache: HIT\r\n\r\n', + "request": "GET / HTTP/1.1\r\nHost: example.com\r\nUser-Agent: Mozilla/5.0 (SymbianOS/9.1; U; de) AppleWebKit/413 (KHTML, like Gecko) Safari/413\r\nAccept-Charset: utf-8\r\nAccept-Encoding: gzip\r\n\r\n", + "time": "112.128324ms", + "a": ["93.184.216.34", "2606:2800:220:1:248:1893:25c8:1946"], "words": 298, + "lines": 47, + "status_code": 200, + "content_length": 1256, + "failed": False, } @pytest.fixture def events(scan): class bbot_events: - localhost = scan.make_event("127.0.0.1", dummy=True) - ipv4 = scan.make_event("8.8.8.8", dummy=True) - netv4 = scan.make_event("8.8.8.8/30", dummy=True) - ipv6 = scan.make_event("2001:4860:4860::8888", dummy=True) - netv6 = scan.make_event("2001:4860:4860::8888/126", dummy=True) - domain = scan.make_event("publicAPIs.org", dummy=True) - subdomain = scan.make_event("api.publicAPIs.org", dummy=True) - email = scan.make_event("bob@evilcorp.co.uk", "EMAIL_ADDRESS", dummy=True) - open_port = scan.make_event("api.publicAPIs.org:443", dummy=True) - protocol = scan.make_event({"host": "api.publicAPIs.org:443", "protocol": "HTTP"}, "PROTOCOL", dummy=True) - ipv4_open_port = scan.make_event("8.8.8.8:443", dummy=True) - ipv6_open_port = scan.make_event("[2001:4860:4860::8888]:443", "OPEN_TCP_PORT", dummy=True) - url_unverified = scan.make_event("https://api.publicAPIs.org:443/hellofriend", dummy=True) - ipv4_url_unverified = scan.make_event("https://8.8.8.8:443/hellofriend", dummy=True) - ipv6_url_unverified = scan.make_event("https://[2001:4860:4860::8888]:443/hellofriend", dummy=True) - url = scan.make_event("https://api.publicAPIs.org:443/hellofriend", "URL", dummy=True) - ipv4_url = scan.make_event("https://8.8.8.8:443/hellofriend", "URL", dummy=True) - ipv6_url = scan.make_event("https://[2001:4860:4860::8888]:443/hellofriend", "URL", dummy=True) - url_hint = scan.make_event("https://api.publicAPIs.org:443/hello.ash", "URL_HINT", dummy=True) + localhost = scan.make_event("127.0.0.1", parent=scan.root_event) + ipv4 = scan.make_event("8.8.8.8", parent=scan.root_event) + netv4 = scan.make_event("8.8.8.8/30", parent=scan.root_event) + ipv6 = scan.make_event("2001:4860:4860::8888", parent=scan.root_event) + netv6 = scan.make_event("2001:4860:4860::8888/126", parent=scan.root_event) + domain = scan.make_event("publicAPIs.org", parent=scan.root_event) + subdomain = scan.make_event("api.publicAPIs.org", parent=scan.root_event) + email = scan.make_event("bob@evilcorp.co.uk", "EMAIL_ADDRESS", parent=scan.root_event) + open_port = scan.make_event("api.publicAPIs.org:443", parent=scan.root_event) + protocol = scan.make_event( + {"host": "api.publicAPIs.org", "port": 443, "protocol": "HTTP"}, "PROTOCOL", parent=scan.root_event + ) + ipv4_open_port = scan.make_event("8.8.8.8:443", parent=scan.root_event) + ipv6_open_port = scan.make_event("[2001:4860:4860::8888]:443", "OPEN_TCP_PORT", parent=scan.root_event) + url_unverified = scan.make_event("https://api.publicAPIs.org:443/hellofriend", parent=scan.root_event) + ipv4_url_unverified = scan.make_event("https://8.8.8.8:443/hellofriend", parent=scan.root_event) + ipv6_url_unverified = scan.make_event("https://[2001:4860:4860::8888]:443/hellofriend", parent=scan.root_event) + url = scan.make_event( + "https://api.publicAPIs.org:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event + ) + ipv4_url = scan.make_event( + "https://8.8.8.8:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event + ) + ipv6_url = scan.make_event( + "https://[2001:4860:4860::8888]:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event + ) + url_hint = scan.make_event("https://api.publicAPIs.org:443/hello.ash", "URL_HINT", parent=url) vulnerability = scan.make_event( - {"host": "evilcorp.com", "severity": "INFO", "description": "asdf"}, "VULNERABILITY", dummy=True + {"host": "evilcorp.com", "severity": "INFO", "description": "asdf"}, + "VULNERABILITY", + parent=scan.root_event, ) - finding = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "FINDING", dummy=True) - vhost = scan.make_event({"host": "evilcorp.com", "vhost": "www.evilcorp.com"}, "VHOST", dummy=True) - http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", dummy=True) - emoji = scan.make_event("💩", "WHERE_IS_YOUR_GOD_NOW", dummy=True) + finding = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "FINDING", parent=scan.root_event) + vhost = scan.make_event({"host": "evilcorp.com", "vhost": "www.evilcorp.com"}, "VHOST", parent=scan.root_event) + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", parent=scan.root_event) + storage_bucket = scan.make_event( + {"name": "storage", "url": "https://storage.blob.core.windows.net"}, + "STORAGE_BUCKET", + parent=scan.root_event, + ) + emoji = scan.make_event("💩", "WHERE_IS_YOUR_GOD_NOW", parent=scan.root_event) bbot_events.all = [ # noqa: F841 bbot_events.localhost, @@ -169,90 +214,22 @@ class bbot_events: bbot_events.finding, bbot_events.vhost, bbot_events.http_response, + bbot_events.storage_bucket, bbot_events.emoji, ] for e in bbot_events.all: - e.make_in_scope() + e.scope_distance = 0 return bbot_events -@pytest.fixture -def patch_requests(monkeypatch): - from bbot.core.helpers.web import request, download - - monkeypatch.setattr("urllib3.connectionpool.HTTPConnectionPool.urlopen", lambda *args, **kwargs: urllib_response) - monkeypatch.setattr("urllib3.poolmanager.PoolManager.urlopen", lambda *args, **kwargs: urllib_response) - monkeypatch.setattr("requests.adapters.HTTPAdapter.send", lambda *args, **kwargs: requests_response) - monkeypatch.setattr("bbot.core.helpers.web.request", lambda *args, **kwargs: requests_response) - current_dir = Path(__file__).resolve().parent - downloaded_file = current_dir / "test_output.json" - monkeypatch.setattr("bbot.core.helpers.web.download", lambda *args, **kwargs: downloaded_file) - return request, download - - -@pytest.fixture -def patch_commands(monkeypatch): - import subprocess - - sample_output = [ - # massdns - """{"name":"www.blacklanternsecurity.com.","type":"A","class":"IN","status":"NOERROR","rx_ts":1659985004071981831,"data":{"answers":[{"ttl":3580,"type":"CNAME","class":"IN","name":"www.blacklanternsecurity.com.","data":"blacklanternsecurity.github.io."},{"ttl":3580,"type":"A","class":"IN","name":"blacklanternsecurity.github.io.","data":"185.199.108.153"},{"ttl":3580,"type":"A","class":"IN","name":"blacklanternsecurity.github.io.","data":"185.199.109.153"},{"ttl":3580,"type":"A","class":"IN","name":"blacklanternsecurity.github.io.","data":"185.199.110.153"},{"ttl":3580,"type":"A","class":"IN","name":"blacklanternsecurity.github.io.","data":"185.199.111.153"}]},"flags":["rd","ra"],"resolver":"8.8.8.8:53"}""", - # httpx - """{"timestamp":"2022-04-15T17:08:29.436778586-04:00","request":"GET /health HTTP/1.1\\r\\nHost: api.publicapis.org\\r\\nUser-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36\\r\\nAccept-Charset: utf-8\\r\\nAccept-Encoding: gzip\\r\\n\\r\\n","response-header":"HTTP/1.1 200 OK\\r\\nConnection: close\\r\\nContent-Length: 15\\r\\nContent-Type: text/plain; charset=utf-8\\r\\nDate: Fri, 15 Apr 2022 21:08:29 GMT\\r\\nServer: Caddy\\r\\nX-Rate-Limit-Duration: 1\\r\\nX-Rate-Limit-Limit: 10.00\\r\\nX-Rate-Limit-Request-Forwarded-For: 50.240.76.25\\r\\nX-Rate-Limit-Request-Remote-Addr: 172.17.0.1:32910\\r\\n\\r\\n","scheme":"https","port":"443","path":"/health","body-sha256":"6c63d4b385b07fe0e09a8a1f95b826e8a7d0401dfd12d649fe7c64b8a785023e","header-sha256":"161187846622dc97219392ab70195f4a477457e55dadf4b39f1b6c734e396120","url":"https://api.publicapis.org:443/health","input":"https://api.publicapis.org/health","webserver":"Caddy","response-body":"{\\"alive\\": true}","content-type":"text/plain","method":"GET","host":"138.197.231.124","content-length":15,"status-code":200,"response-time":"412.587433ms","failed":false,"lines":1,"words":2}""", - # nuclei - """{"template":"technologies/tech-detect.yaml","template-url":"https://github.com/projectdiscovery/nuclei-templates/blob/master/technologies/tech-detect.yaml","template-id":"tech-detect","info":{"name":"Wappalyzer Technology Detection","author":["hakluke"],"tags":["tech"],"reference":null,"severity":"info"},"matcher-name":"caddy","type":"http","host":"https://api.publicapis.org/health","matched-at":"https://api.publicapis.org:443/health","ip":"138.197.231.124","timestamp":"2022-04-15T17:09:01.021589723-04:00","curl-command":"curl -X 'GET' -d '' -H 'Accept: */*' -H 'Accept-Language: en' -H 'User-Agent: Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36' 'https://api.publicapis.org/health'","matcher-status":true,"matched-line":null}""", - # naabu - """{"ip":"8.8.8.8","port":443,"timestamp":"2022-08-03T16:01:15.684442081Z"}""" - # ffuf - """{"input":{"FUZZ":"L2luZGV4Lmh0bWw="},"position":1,"status":200,"length":1256,"words":298,"lines":47,"content-type":"text/html;charset=UTF-8","redirectlocation":"","url":"http://example.com:80//index.html","duration":101243249,"resultfile":"","host":"example.com:80"}""", - "https://api.publicapis.org:443/health", - # open port - "api.publicapis.org:443", - # host - "api.publicapis.org", - # url - "https://8.8.8.8", - ] - - def run(*args, **kwargs): - text = kwargs.get("text", True) - return subprocess.run(["echo", "\n".join(sample_output)], text=text, stdout=subprocess.PIPE) - - def run_live(*args, **kwargs): - for line in sample_output: - yield line - - from bbot.core.helpers.command import run as old_run, run_live as old_run_live - - monkeypatch.setattr("bbot.core.helpers.command.run", run) - monkeypatch.setattr("bbot.core.helpers.command.run_live", run_live) - - return old_run, old_run_live - - -@pytest.fixture -def agent(monkeypatch): - class WebSocketApp: - def __init__(*args, **kwargs): - return - - def send(self, message): - assert type(message) == str - - def run_forever(*args, **kwargs): - return False - - def close(self): - return - - from bbot import agent - from bbot.modules.output.websocket import Websocket +# @pytest.fixture(scope="session", autouse=True) +# def install_all_python_deps(): +# deps_pip = set() +# for module in DEFAULT_PRESET.module_loader.preloaded().values(): +# deps_pip.update(set(module.get("deps", {}).get("pip", []))) - monkeypatch.setattr(Websocket, "send", lambda *args, **kwargs: True) +# constraint_file = tempwordlist(get_python_constraints()) - test_agent = agent.Agent({"agent_url": "test", "agent_token": "test"}) - test_agent.setup() - monkeypatch.setattr(test_agent, "ws", WebSocketApp()) - return test_agent +# subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip)) diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 2dc475bf13..ed9aec159d 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -1,6 +1,335 @@ +import os +import ssl +import shutil import pytest +import asyncio import logging -import shutil +from pathlib import Path +from contextlib import suppress +from omegaconf import OmegaConf +from pytest_httpserver import HTTPServer +import time +import queue + +from bbot.core import CORE +from bbot.core.helpers.misc import execute_sync_or_async +from bbot.core.helpers.interactsh import server_list as interactsh_servers + +# silence stdout + trace +root_logger = logging.getLogger() +pytest_debug_file = Path(__file__).parent.parent.parent / "pytest_debug.log" +debug_handler = logging.FileHandler(pytest_debug_file) +debug_handler.setLevel(logging.DEBUG) +debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") +debug_handler.setFormatter(debug_format) +root_logger.addHandler(debug_handler) + +test_config = OmegaConf.load(Path(__file__).parent / "test.conf") + +os.environ["BBOT_DEBUG"] = "True" +CORE.logger.log_level = logging.DEBUG + +# silence all stderr output: +stderr_handler = CORE.logger.log_handlers["stderr"] +stderr_handler.setLevel(logging.CRITICAL) +handlers = list(CORE.logger.listener.handlers) +handlers.remove(stderr_handler) +CORE.logger.listener.handlers = tuple(handlers) + +for h in root_logger.handlers: + h.addFilter(lambda x: x.levelname not in ("STDOUT", "TRACE")) + + +CORE.merge_default(test_config) + + +@pytest.fixture +def assert_all_responses_were_requested() -> bool: + return False + + +@pytest.fixture(autouse=True) +def silence_live_logging(): + for handler in logging.getLogger().handlers: + if type(handler).__name__ == "_LiveLoggingStreamHandler": + handler.setLevel(logging.CRITICAL) + + +def stop_server(server): + server.stop() + while server.is_running(): + time.sleep(0.1) # Wait a bit before checking again + + +@pytest.fixture +def bbot_httpserver(): + server = HTTPServer(host="127.0.0.1", port=8888, threaded=True) + server.start() + + yield server + + server.clear() + stop_server(server) # Ensure the server is fully stopped + + server.check_assertions() + server.clear() + + +@pytest.fixture +def bbot_httpserver_ssl(): + context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + current_dir = Path(__file__).parent + keyfile = str(current_dir / "testsslkey.pem") + certfile = str(current_dir / "testsslcert.pem") + context.load_cert_chain(certfile, keyfile) + server = HTTPServer(host="127.0.0.1", port=9999, ssl_context=context, threaded=True) + server.start() + + yield server + + server.clear() + stop_server(server) # Ensure the server is fully stopped + + server.check_assertions() + server.clear() + + +def should_mock(request): + return request.url.host not in ["127.0.0.1", "localhost", "raw.githubusercontent.com"] + interactsh_servers + + +def pytest_collection_modifyitems(config, items): + # make sure all tests have the httpx_mock marker + for item in items: + item.add_marker( + pytest.mark.httpx_mock( + should_mock=should_mock, + assert_all_requests_were_expected=False, + assert_all_responses_were_requested=False, + can_send_already_matched_responses=True, + ) + ) + + +@pytest.fixture +def bbot_httpserver_allinterfaces(): + server = HTTPServer(host="0.0.0.0", port=5556, threaded=True) + server.start() + + yield server + + server.clear() + if server.is_running(): + server.stop() + server.check_assertions() + server.clear() + + +class Interactsh_mock: + def __init__(self, name): + self.name = name + self.log = logging.getLogger(f"bbot.interactsh.{self.name}") + self.interactions = asyncio.Queue() # Use an asyncio queue for async access + self.correlation_id = "deadbeef-dead-beef-dead-beefdeadbeef" + self.stop = False + self.poll_task = None + + def mock_interaction(self, subdomain_tag, msg=None): + self.log.info(f"Mocking interaction to subdomain tag: {subdomain_tag}") + if msg is not None: + self.log.info(msg) + self.interactions.put_nowait(subdomain_tag) # Add to the thread-safe queue + + async def register(self, callback=None): + if callable(callback): + self.poll_task = asyncio.create_task(self.poll_loop(callback)) + return "fakedomain.fakeinteractsh.com" + + async def deregister(self, callback=None): + await asyncio.sleep(1) + self.stop = True + if self.poll_task is not None: + self.poll_task.cancel() + with suppress(asyncio.CancelledError): + await self.poll_task + + async def poll_loop(self, callback=None): + while not self.stop: + data_list = await self.poll(callback) + if not data_list: + await asyncio.sleep(0.5) + continue + await asyncio.sleep(1) + await self.poll(callback) + + async def poll(self, callback=None): + poll_results = [] + while not self.interactions.empty(): + subdomain_tag = await self.interactions.get() # Get the first element from the asyncio queue + for protocol in ["HTTP", "DNS"]: + result = {"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": protocol} + poll_results.append(result) + if callback is not None: + await execute_sync_or_async(callback, result) + await asyncio.sleep(0.1) + return poll_results + + +import threading +import http.server +import socketserver +import urllib.request + + +class Proxy(http.server.SimpleHTTPRequestHandler): + protocol_version = "HTTP/1.0" + server_version = "Proxy" + urls = [] + + def do_GET(self): + self.urls.append(self.path) + + # Extract host and port from path + netloc = urllib.parse.urlparse(self.path).netloc + host, _, port = netloc.partition(":") + + # Fetch the content + conn = http.client.HTTPConnection(host, port if port else 80) + conn.request("GET", self.path, headers=self.headers) + response = conn.getresponse() + + # Send the response back to the client + self.send_response(response.status) + for header, value in response.getheaders(): + self.send_header(header, value) + self.end_headers() + self.copyfile(response, self.wfile) + + response.close() + conn.close() + + +@pytest.fixture +def proxy_server(): + # Set up an HTTP server that acts as a simple proxy. + server = socketserver.ThreadingTCPServer(("localhost", 0), Proxy) + + # Start the server in a new thread. + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + yield server + + # Stop the server. + server.shutdown() + server_thread.join() + + +def pytest_terminal_summary(terminalreporter, exitstatus, config): # pragma: no cover + RED = "\033[1;31m" + GREEN = "\033[1;32m" + YELLOW = "\033[1;33m" + BLUE = "\033[1;34m" + CYAN = "\033[1;36m" + RESET = "\033[0m" + stats = terminalreporter.stats + total_tests = len(terminalreporter._session.items) + passed = len(stats.get("passed", [])) + skipped = len(stats.get("skipped", [])) + errors = len(stats.get("error", [])) + failed = stats.get("failed", []) + + terminalreporter.write("\nTest Session Summary:") + terminalreporter.write(f"\nTotal tests run: {total_tests}") + terminalreporter.write( + f"\n{GREEN}Passed: {passed}{RESET}, {RED}Failed: {len(failed)}{RESET}, {YELLOW}Skipped: {skipped}{RESET}, Errors: {errors}" + ) + + if failed: + terminalreporter.write(f"\n{RED}Detailed failed test report:{RESET}") + for item in failed: + test_name = item.nodeid.split("::")[-1] if "::" in item.nodeid else item.nodeid + file_and_line = f"{item.location[0]}:{item.location[1]}" # File path and line number + terminalreporter.write(f"\n{BLUE}Test Name: {test_name}{RESET} {CYAN}({file_and_line}){RESET}") + terminalreporter.write(f"\n{RED}Location: {item.nodeid} at {item.location[0]}:{item.location[1]}{RESET}") + terminalreporter.write(f"\n{RED}Failure details:\n{item.longreprtext}{RESET}") + + +# BELOW: debugging for frozen/hung tests +import psutil +import traceback +import inspect + + +def _print_detailed_info(): # pragma: no cover + """ + Debugging pytests hanging + """ + print("=== Detailed Thread and Process Information ===\n") + try: + print("=== Threads ===") + for thread in threading.enumerate(): + print(f"Thread Name: {thread.name}") + print(f"Thread ID: {thread.ident}") + print(f"Is Alive: {thread.is_alive()}") + print(f"Daemon: {thread.daemon}") + + if hasattr(thread, "_target"): + target = thread._target + if target: + qualname = ( + f"{target.__module__}.{target.__qualname__}" + if hasattr(target, "__qualname__") + else str(target) + ) + print(f"Target Function: {qualname}") + + if hasattr(thread, "_args"): + args = thread._args + kwargs = thread._kwargs if hasattr(thread, "_kwargs") else {} + arg_spec = inspect.getfullargspec(target) + + all_args = list(args) + [f"{k}={v}" for k, v in kwargs.items()] + + if inspect.ismethod(target) and arg_spec.args[0] == "self": + arg_spec.args.pop(0) + + named_args = list(zip(arg_spec.args, all_args)) + if arg_spec.varargs: + named_args.extend((f"*{arg_spec.varargs}", arg) for arg in all_args[len(arg_spec.args) :]) + + print("Arguments:") + for name, value in named_args: + print(f" {name}: {value}") + else: + print("Target Function: None") + else: + print("Target Function: Unknown") + + print() + + print("=== Processes ===") + current_process = psutil.Process() + for child in current_process.children(recursive=True): + print(f"Process ID: {child.pid}") + print(f"Name: {child.name()}") + print(f"Status: {child.status()}") + print(f"CPU Times: {child.cpu_times()}") + print(f"Memory Info: {child.memory_info()}") + print() + + print("=== Current Process ===") + print(f"Process ID: {current_process.pid}") + print(f"Name: {current_process.name()}") + print(f"Status: {current_process.status()}") + print(f"CPU Times: {current_process.cpu_times()}") + print(f"Memory Info: {current_process.memory_info()}") + print() + + except Exception as e: + print(f"An error occurred: {str(e)}") + print("Traceback:") + traceback.print_exc() @pytest.hookimpl(tryfirst=True, hookwrapper=True) @@ -16,3 +345,13 @@ def pytest_sessionfinish(session, exitstatus): shutil.rmtree("/tmp/.bbot_test", ignore_errors=True) yield + + # temporarily suspend stdout capture and print detailed thread info + capmanager = session.config.pluginmanager.get_plugin("capturemanager") + if capmanager: + capmanager.suspend_global_capture(in_=True) + + _print_detailed_info() + + if capmanager: + capmanager.resume_global_capture() diff --git a/bbot/test/coverage.cfg b/bbot/test/coverage.cfg new file mode 100644 index 0000000000..f029098983 --- /dev/null +++ b/bbot/test/coverage.cfg @@ -0,0 +1,2 @@ +[coverage:run] +parallel = true diff --git a/bbot/test/fastapi_test.py b/bbot/test/fastapi_test.py new file mode 100644 index 0000000000..f0c7b2d789 --- /dev/null +++ b/bbot/test/fastapi_test.py @@ -0,0 +1,17 @@ +from typing import List +from bbot import Scanner +from fastapi import FastAPI, Query + +app = FastAPI() + + +@app.get("/start") +async def start(targets: List[str] = Query(...)): + scanner = Scanner(*targets, modules=["httpx"]) + events = [e async for e in scanner.async_start()] + return [e.json() for e in events] + + +@app.get("/ping") +async def ping(): + return {"status": "ok"} diff --git a/bbot/test/owasp_mastg.apk b/bbot/test/owasp_mastg.apk new file mode 100644 index 0000000000..9a4f638f1c Binary files /dev/null and b/bbot/test/owasp_mastg.apk differ diff --git a/bbot/test/pytest.ini b/bbot/test/pytest.ini deleted file mode 100644 index 3396baacfc..0000000000 --- a/bbot/test/pytest.ini +++ /dev/null @@ -1 +0,0 @@ -log_level=DEBUG \ No newline at end of file diff --git a/bbot/test/run_tests.sh b/bbot/test/run_tests.sh index 0388e27aee..55e7b430bb 100755 --- a/bbot/test/run_tests.sh +++ b/bbot/test/run_tests.sh @@ -3,15 +3,23 @@ bbot_dir="$( realpath "$(dirname "$(dirname "${BASH_SOURCE[0]}")")")" echo -e "[+] BBOT dir: $bbot_dir\n" -echo "[+] Checking code formatting with black" +echo "[+] Checking code formatting with ruff" echo "=======================================" -black --check "$bbot_dir" || exit 1 +ruff format "$bbot_dir" || exit 1 echo -echo "[+] Linting with flake8" +echo "[+] Linting with ruff" echo "=======================" -flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" "$bbot_dir" || exit 1 +ruff check "$bbot_dir" || exit 1 echo -echo "[+] Testing with pytest" -pytest --disable-warnings --log-cli-level=ERROR "$bbot_dir" \ No newline at end of file +if [ "${1}x" != "x" ] ; then + MODULES=`echo ${1} | sed -e 's/,/ /g'` + for MODULE in ${MODULES} ; do + echo "[+] Testing ${MODULE} with pytest" + pytest --exitfirst --disable-warnings --log-cli-level=ERROR "$bbot_dir" --cov=bbot/test/test_step_2/test_cli.py --cov-report="term-missing" --cov-config="$bbot_dir/test/coverage.cfg" -k ${MODULE} + done +else + echo "[+] Testing all modules with pytest" + pytest --exitfirst --disable-warnings --log-cli-level=ERROR "$bbot_dir" --cov=bbot/test/test_step_2/test_cli.py --cov-report="term-missing" --cov-config="$bbot_dir/test/coverage.cfg" +fi diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 21d253f209..1c6a19dbf7 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -4,32 +4,47 @@ modules: wordlist: https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/deepmagic.com-prefixes-top500.txt ffuf: prefix_busting: true - ipneighbor: - test_option: ipneighbor - gowitness: - output_path: /tmp/.bbot_test/gowitness -output_modules: http: url: http://127.0.0.1:11111 username: username password: password bearer: bearer websocket: - url: http://127.0.0.1/ws:11111 + url: ws://127.0.0.1/ws:11111 token: asdf - neo4j: - uri: bolt://127.0.0.1:11111 - human: - test_option: human -internal_modules: - speculate: - test_option: speculate -http_proxy: -ssl_verify: false -scope_search_distance: 1 -scope_report_distance: 1 -scope_dns_search_distance: 1 -plumbus: asdf -dns_debug: true -http_debug: true -keep_scans: 1 \ No newline at end of file +web: + http_proxy: + http_headers: { "test": "header" } + ssl_verify: false + user_agent: "BBOT Test User-Agent" + debug: false +scope: + search_distance: 0 + report_distance: 0 +dns: + disable: false + minimal: true + search_distance: 1 + debug: false + timeout: 1 + wildcard_ignore: + - blacklanternsecurity.com + - fakedomain + - notreal + - google + - google.com + - example.com + - evilcorp.com + - one +deps: + behavior: retry_failed +engine: + debug: true +agent_url: ws://127.0.0.1:8765 +agent_token: test +speculate: false +excavate: false +aggregate: false +cloudcheck: false +omit_event_types: [] +debug: true diff --git a/bbot/test/test_bbot.py b/bbot/test/test_bbot.py deleted file mode 100644 index 5fa3a225a6..0000000000 --- a/bbot/test/test_bbot.py +++ /dev/null @@ -1,1251 +0,0 @@ -import os -import sys -import json -import logging -import ipaddress -from time import sleep - -import bbot.core.logger # noqa: F401 -from bbot.core.errors import * -from .bbot_fixtures import * # noqa: F401 -from bbot.modules import module_loader - -log = logging.getLogger(f"bbot.test") - -# silence stdout -root_logger = logging.getLogger() -for h in root_logger.handlers: - h.addFilter(lambda x: x.levelno != 100) - -os.environ["BBOT_SUDO_PASS"] = "nah" - -available_modules = list(module_loader.configs(type="scan")) -available_output_modules = list(module_loader.configs(type="output")) - - -def test_events(events, scan, helpers, bbot_config): - - assert events.ipv4.type == "IP_ADDRESS" - assert events.ipv6.type == "IP_ADDRESS" - assert events.netv4.type == "IP_RANGE" - assert events.netv6.type == "IP_RANGE" - assert events.domain.type == "DNS_NAME" - assert "domain" in events.domain.tags - assert events.subdomain.type == "DNS_NAME" - assert "subdomain" in events.subdomain.tags - assert events.open_port.type == "OPEN_TCP_PORT" - assert events.url_unverified.type == "URL_UNVERIFIED" - assert events.ipv4_url_unverified.type == "URL_UNVERIFIED" - assert events.ipv6_url_unverified.type == "URL_UNVERIFIED" - assert "" not in events.ipv4 - assert None not in events.ipv4 - assert 1 not in events.ipv4 - assert False not in events.ipv4 - - # ip tests - assert events.ipv4 == scan.make_event("8.8.8.8", dummy=True) - assert "8.8.8.8" in events.ipv4 - assert "8.8.8.8" == events.ipv4 - assert "8.8.8.8" in events.netv4 - assert "8.8.8.9" not in events.ipv4 - assert "8.8.9.8" not in events.netv4 - assert "8.8.8.8/31" in events.netv4 - assert "8.8.8.8/30" in events.netv4 - assert "8.8.8.8/29" not in events.netv4 - assert "2001:4860:4860::8888" in events.ipv6 - assert "2001:4860:4860::8888" in events.netv6 - assert "2001:4860:4860::8889" not in events.ipv6 - assert "2002:4860:4860::8888" not in events.netv6 - assert "2001:4860:4860::8888/127" in events.netv6 - assert "2001:4860:4860::8888/126" in events.netv6 - assert "2001:4860:4860::8888/125" not in events.netv6 - assert events.emoji not in events.ipv4 - assert events.emoji not in events.netv6 - assert events.netv6 not in events.emoji - assert "dead::c0de" == scan.make_event(" [DEaD::c0De]:88", "DNS_NAME", dummy=True) - - # hostname tests - assert events.domain.host == "publicapis.org" - assert events.subdomain.host == "api.publicapis.org" - assert events.domain.host_stem == "publicapis" - assert events.subdomain.host_stem == "api.publicapis" - assert "api.publicapis.org" in events.domain - assert "api.publicapis.org" in events.subdomain - assert "fsocie.ty" not in events.domain - assert "fsocie.ty" not in events.subdomain - assert events.subdomain in events.domain - assert events.domain not in events.subdomain - assert not events.ipv4 in events.domain - assert not events.netv6 in events.domain - assert events.emoji not in events.domain - assert events.domain not in events.emoji - assert "evilcorp.com" == scan.make_event(" eViLcorp.COM.:88", "DNS_NAME", dummy=True) - - # url tests - assert scan.make_event("http://evilcorp.com", dummy=True) == scan.make_event("http://evilcorp.com/", dummy=True) - assert events.url_unverified.host == "api.publicapis.org" - assert events.url_unverified in events.domain - assert events.url_unverified in events.subdomain - assert "api.publicapis.org:443" in events.url_unverified - assert "publicapis.org" not in events.url_unverified - assert events.ipv4_url_unverified in events.ipv4 - assert events.ipv4_url_unverified in events.netv4 - assert events.ipv6_url_unverified in events.ipv6 - assert events.ipv6_url_unverified in events.netv6 - assert events.emoji not in events.url_unverified - assert events.emoji not in events.ipv6_url_unverified - assert events.url_unverified not in events.emoji - assert "https://evilcorp.com" == scan.make_event("https://evilcorp.com:443", dummy=True) - assert "http://evilcorp.com" == scan.make_event("http://evilcorp.com:80", dummy=True) - assert "http://evilcorp.com:80/asdf.js" in scan.make_event("http://evilcorp.com/asdf.js", dummy=True) - assert "http://evilcorp.com/asdf.js" in scan.make_event("http://evilcorp.com:80/asdf.js", dummy=True) - assert "https://evilcorp.com:443" == scan.make_event("https://evilcorp.com", dummy=True) - assert "http://evilcorp.com:80" == scan.make_event("http://evilcorp.com", dummy=True) - assert "https://evilcorp.com:80" == scan.make_event("https://evilcorp.com:80", dummy=True) - assert "http://evilcorp.com:443" == scan.make_event("http://evilcorp.com:443", dummy=True) - assert scan.make_event("https://evilcorp.com", dummy=True).with_port().geturl() == "https://evilcorp.com:443/" - assert scan.make_event("https://evilcorp.com:666", dummy=True).with_port().geturl() == "https://evilcorp.com:666/" - assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" - assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" - assert "status-200" in scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]).tags - with pytest.raises(ValidationError, match=".*status tag.*"): - scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) - - # http response - assert events.http_response.host == "example.com" - assert events.http_response.port == 80 - assert events.http_response.parsed.scheme == "http" - assert events.http_response.with_port().geturl() == "http://example.com:80/" - - # open port tests - assert events.open_port in events.domain - assert "api.publicapis.org:443" in events.open_port - assert "bad.publicapis.org:443" not in events.open_port - assert "publicapis.org:443" not in events.open_port - assert events.ipv4_open_port in events.ipv4 - assert events.ipv4_open_port in events.netv4 - assert "8.8.8.9" not in events.ipv4_open_port - assert events.ipv6_open_port in events.ipv6 - assert events.ipv6_open_port in events.netv6 - assert "2002:4860:4860::8888" not in events.ipv6_open_port - assert events.emoji not in events.ipv6_open_port - assert events.ipv6_open_port not in events.emoji - - # attribute tests - assert events.ipv4.host == ipaddress.ip_address("8.8.8.8") - assert events.ipv4.port is None - assert events.ipv6.host == ipaddress.ip_address("2001:4860:4860::8888") - assert events.ipv6.port is None - assert events.domain.port is None - assert events.subdomain.port is None - assert events.open_port.host == "api.publicapis.org" - assert events.open_port.port == 443 - assert events.ipv4_open_port.host == ipaddress.ip_address("8.8.8.8") - assert events.ipv4_open_port.port == 443 - assert events.ipv6_open_port.host == ipaddress.ip_address("2001:4860:4860::8888") - assert events.ipv6_open_port.port == 443 - assert events.url_unverified.host == "api.publicapis.org" - assert events.url_unverified.port == 443 - assert events.ipv4_url_unverified.host == ipaddress.ip_address("8.8.8.8") - assert events.ipv4_url_unverified.port == 443 - assert events.ipv6_url_unverified.host == ipaddress.ip_address("2001:4860:4860::8888") - assert events.ipv6_url_unverified.port == 443 - - javascript_event = scan.make_event("http://evilcorp.com/asdf/a.js?b=c#d", "URL_UNVERIFIED", dummy=True) - assert "extension-js" in javascript_event.tags - assert "httpx-only" in javascript_event.tags - - # scope distance - event1 = scan.make_event("1.2.3.4", dummy=True) - assert event1._scope_distance == -1 - event1.make_in_scope() - assert event1._scope_distance == 0 - event2 = scan.make_event("2.3.4.5", source=event1) - assert event2._scope_distance == 1 - event3 = scan.make_event("3.4.5.6", source=event2) - assert event3._scope_distance == 2 - - # internal event tracking - root_event = scan.make_event("0.0.0.0", dummy=True) - internal_event1 = scan.make_event("1.2.3.4", source=root_event, internal=True) - assert internal_event1._internal == True - assert internal_event1._made_internal == True - internal_event1.make_in_scope() - assert internal_event1._internal == False - assert internal_event1._made_internal == False - internal_event2 = scan.make_event("2.3.4.5", source=internal_event1, internal=True) - internal_event3 = scan.make_event("3.4.5.6", source=internal_event2, internal=True) - internal_event4 = scan.make_event("4.5.6.7", source=internal_event3) - source_trail = internal_event4.make_in_scope() - assert internal_event4._internal == False - assert internal_event3._internal == False - assert internal_event2._internal == False - assert len(source_trail) == 2 - assert internal_event2 in source_trail - assert internal_event3 in source_trail - - # event sorting - sort1 = scan.make_event("127.0.0.1", dummy=True) - sort1._priority = 1 - sort2 = scan.make_event("127.0.0.1", dummy=True) - sort2._priority = 2 - sort3 = scan.make_event("127.0.0.1", dummy=True) - sort3._priority = 3 - mod1 = helpers._make_dummy_module(name="MOD1", _type="ASDF") - mod1._priority = 1 - mod2 = helpers._make_dummy_module(name="MOD2", _type="ASDF") - mod2._priority = 2 - mod3 = helpers._make_dummy_module(name="MOD3", _type="ASDF") - mod3._priority = 3 - sort1.module = mod1 - sort2.module = mod2 - sort3.module = mod3 - assert 2 < sort1.priority < 2.01 - assert sort1 < sort2 - assert sort1 < sort3 - assert 4 < sort2.priority < 4.01 - assert sort2 > sort1 - assert sort2 < sort3 - assert 6 < sort3.priority < 6.01 - assert sort3 > sort1 - assert sort3 > sort2 - assert tuple(sorted([sort3, sort2, sort1])) == (sort1, sort2, sort3) - - # test validation - test_vuln = scan.make_event( - {"host": "EVILcorp.com", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True - ) - assert test_vuln.data["host"] == "evilcorp.com" - assert test_vuln.data["severity"] == "INFO" - test_vuln2 = scan.make_event( - {"host": "192.168.1.1", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True - ) - assert json.loads(test_vuln2.data_human)["severity"] == "INFO" - assert test_vuln2.host.is_private - with pytest.raises(ValidationError, match=".*severity.*\n.*field required.*"): - test_vuln = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "VULNERABILITY", dummy=True) - with pytest.raises(ValidationError, match=".*host.*\n.*Invalid host.*"): - test_vuln = scan.make_event( - {"host": "!@#$", "severity": "INFO", "description": "asdf"}, "VULNERABILITY", dummy=True - ) - with pytest.raises(ValidationError, match=".*severity.*\n.*Invalid severity.*"): - test_vuln = scan.make_event( - {"host": "evilcorp.com", "severity": "WACK", "description": "asdf"}, "VULNERABILITY", dummy=True - ) - - -def test_manager(bbot_config): - from bbot.scanner import Scanner - - # test _emit_event - results = [] - success_callback = lambda e: results.append("success") - scan1 = Scanner("127.0.0.1", config=bbot_config) - scan1.status = "RUNNING" - scan1.manager.queue_event = lambda e: results.append(e) - manager = scan1.manager - localhost = scan1.make_event("127.0.0.1", source=scan1.root_event) - - class DummyModule1: - _type = "output" - suppress_dupes = True - - class DummyModule2: - _type = "DNS" - suppress_dupes = True - - localhost.module = DummyModule1() - # test abort_if - manager._emit_event(localhost, abort_if=lambda e: e.module._type == "output") - assert len(results) == 0 - manager._emit_event( - localhost, on_success_callback=success_callback, abort_if=lambda e: e.module._type == "plumbus" - ) - assert localhost in results - assert "success" in results - results.clear() - # test deduplication - manager._emit_event(localhost, on_success_callback=success_callback) - assert len(results) == 0 - # test dns resolution - googledns = scan1.make_event("8.8.8.8", source=scan1.root_event) - googledns.module = DummyModule2() - googledns.source = "asdf" - googledns.make_in_scope() - event_children = [] - manager.emit_event = lambda e, *args, **kwargs: event_children.append(e) - manager._emit_event(googledns) - assert len(event_children) > 0 - assert googledns in results - results.clear() - event_children.clear() - # same dns event - manager._emit_event(googledns) - assert len(results) == 0 - assert len(event_children) == 0 - # same dns event but with _force_output - googledns._force_output = True - manager._emit_event(googledns) - assert googledns in results - assert len(event_children) == 0 - googledns._force_output = False - results.clear() - # same dns event but different source - googledns.source_id = "fdsa" - manager._emit_event(googledns) - assert len(event_children) == 0 - assert googledns in results - - # event filtering based on scope_distance - output_queue = [] - module_queue = [] - manager_queue = [] - scan1 = Scanner("127.0.0.1", modules=["ipneighbor"], output_modules=["json"], config=bbot_config) - scan1.status = "RUNNING" - scan1.load_modules() - manager = scan1.manager - test_event1 = scan1.make_event("1.2.3.4", source=scan1.root_event) - test_event1.make_in_scope() - test_event2 = scan1.make_event("2.3.4.5", source=test_event1) - test_event3 = scan1.make_event("3.4.5.6", source=test_event2) - - scan1.manager.queue_event = lambda e: manager_queue.append(e) - - scan1.scope_search_distance = 1 - scan1.scope_report_distance = 0 - assert test_event1.scope_distance == 0 - manager._emit_event(test_event1) - assert test_event1 in manager_queue - assert test_event1._internal == False - assert test_event2.scope_distance == 1 - manager._emit_event(test_event2) - assert test_event2 in manager_queue - assert test_event2._internal == True - manager_queue.clear() - manager.events_accepted.clear() - - scan1.modules["json"].queue_event = lambda e: output_queue.append(e) - scan1.modules["ipneighbor"].queue_event = lambda e: module_queue.append(e) - - # in-scope event - assert test_event1.scope_distance == 0 - manager.distribute_event(test_event1) - assert hash(test_event1) in manager.events_distributed - assert test_event1 in module_queue - assert test_event1 in output_queue - module_queue.clear() - output_queue.clear() - # duplicate event - manager.distribute_event(test_event1) - assert test_event1 not in module_queue - assert test_event1 in output_queue - module_queue.clear() - output_queue.clear() - manager.events_distributed.clear() - # event.scope_distance == 1 - assert test_event2.scope_distance == 1 - manager.distribute_event(test_event2) - assert test_event2 in module_queue - assert test_event2 not in output_queue - assert test_event2._internal == True - assert test_event2._force_output == False - assert scan1.modules["json"]._filter_event(test_event2) == False - module_queue.clear() - output_queue.clear() - manager.events_distributed.clear() - # event.scope_distance == 2 - assert test_event3.scope_distance == 2 - manager.distribute_event(test_event3) - assert test_event3 not in module_queue - assert test_event3 not in output_queue - module_queue.clear() - output_queue.clear() - manager.events_distributed.clear() - # event.scope_distance == 2 and _force_output == True - test_event3._force_output = True - assert test_event3.scope_distance == 2 - manager.distribute_event(test_event3) - assert test_event3 not in module_queue - assert test_event3 in output_queue - - -def test_curl(helpers): - - helpers.curl() - helpers.curl(url="http://www.example.com", ignore_bbot_global_settings=True) - helpers.curl(url="http://www.example.com", head_mode=True) - helpers.curl(url="http://www.example.com", raw_body=True) - helpers.curl( - url="http://www.example.com", - raw_path=True, - headers={"test": "test", "test2": ["test2"]}, - ignore_bbot_global_settings=False, - post_data={"test": "test"}, - method="POST", - cookies={"test": "test"}, - path_override="/index.html", - ) - - -def test_helpers(patch_requests, patch_commands, helpers, scan): - - old_run, old_run_live = patch_commands - request, download = patch_requests - - ### URL ### - bad_urls = ( - "http://e.co/index.html", - "http://e.co/u/1111/info", - "http://e.co/u/2222/info", - "http://e.co/u/3333/info", - "http://e.co/u/4444/info", - "http://e.co/u/5555/info", - ) - new_urls = tuple(helpers.collapse_urls(bad_urls, threshold=4)) - assert len(new_urls) == 2 - new_urls = tuple(sorted([u.geturl() for u in helpers.collapse_urls(bad_urls, threshold=5)])) - assert new_urls == bad_urls - - new_url = helpers.add_get_params("http://evilcorp.com/a?p=1&q=2", {"r": 3, "s": "asdf"}).geturl() - query = dict(s.split("=") for s in new_url.split("?")[-1].split("&")) - query = tuple(sorted(query.items(), key=lambda x: x[0])) - assert query == ( - ("p", "1"), - ("q", "2"), - ("r", "3"), - ("s", "asdf"), - ) - assert tuple(sorted(helpers.get_get_params("http://evilcorp.com/a?p=1&q=2#frag").items())) == ( - ("p", ["1"]), - ("q", ["2"]), - ) - - assert helpers.clean_url("http://evilcorp.com:80").geturl() == "http://evilcorp.com/" - assert helpers.clean_url("http://evilcorp.com/asdf?a=asdf#frag").geturl() == "http://evilcorp.com/asdf" - assert helpers.clean_url("http://evilcorp.com//asdf").geturl() == "http://evilcorp.com/asdf" - - assert helpers.url_depth("http://evilcorp.com/asdf/user/") == 2 - assert helpers.url_depth("http://evilcorp.com/asdf/user") == 2 - assert helpers.url_depth("http://evilcorp.com/asdf/") == 1 - assert helpers.url_depth("http://evilcorp.com/asdf") == 1 - assert helpers.url_depth("http://evilcorp.com/") == 0 - assert helpers.url_depth("http://evilcorp.com") == 0 - - ### HTTP COMPARE ### - compare_helper = helpers.http_compare("http://www.example.com") - compare_helper.compare("http://www.example.com", headers={"asdf": "asdf"}) - compare_helper.compare("http://www.example.com", cookies={"asdf": "asdf"}) - compare_helper.compare("http://www.example.com", check_reflection=True) - compare_helper.compare_body({"asdf": "fdsa"}, {"fdsa": "asdf"}) - for mode in ("getparam", "header", "cookie"): - compare_helper.canary_check("http://www.example.com", mode=mode) == True - - ### MISC ### - assert helpers.is_domain("evilcorp.co.uk") - assert not helpers.is_domain("www.evilcorp.co.uk") - assert helpers.is_subdomain("www.evilcorp.co.uk") - assert not helpers.is_subdomain("evilcorp.co.uk") - assert helpers.is_url("http://evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert helpers.is_url("https://evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert not helpers.is_url("https:/evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert not helpers.is_url("/evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert not helpers.is_url("ftp://evilcorp.co.uk") - assert helpers.parent_domain("www.evilcorp.co.uk") == "evilcorp.co.uk" - assert helpers.parent_domain("evilcorp.co.uk") == "evilcorp.co.uk" - assert helpers.parent_domain("localhost") == "localhost" - assert list(helpers.domain_parents("test.www.evilcorp.co.uk")) == ["www.evilcorp.co.uk", "evilcorp.co.uk"] - assert list(helpers.domain_parents("www.evilcorp.co.uk", include_self=True)) == [ - "www.evilcorp.co.uk", - "evilcorp.co.uk", - ] - assert list(helpers.domain_parents("evilcorp.co.uk", include_self=True)) == ["evilcorp.co.uk"] - assert list(helpers.ip_network_parents("0.0.0.0/2")) == [ - ipaddress.ip_network("0.0.0.0/1"), - ipaddress.ip_network("0.0.0.0/0"), - ] - assert list(helpers.ip_network_parents("0.0.0.0/1", include_self=True)) == [ - ipaddress.ip_network("0.0.0.0/1"), - ipaddress.ip_network("0.0.0.0/0"), - ] - assert helpers.is_ip("127.0.0.1") - assert not helpers.is_ip("127.0.0.0.1") - - assert helpers.domain_stem("evilcorp.co.uk") == "evilcorp" - assert helpers.domain_stem("www.evilcorp.co.uk") == "www.evilcorp" - - assert helpers.host_in_host("www.evilcorp.com", "evilcorp.com") == True - assert helpers.host_in_host("asdf.www.evilcorp.com", "evilcorp.com") == True - assert helpers.host_in_host("evilcorp.com", "www.evilcorp.com") == False - assert helpers.host_in_host("evilcorp.com", "evilcorp.com") == True - assert helpers.host_in_host("evilcorp.com", "eevilcorp.com") == False - assert helpers.host_in_host("eevilcorp.com", "evilcorp.com") == False - assert helpers.host_in_host("evilcorp.com", "evilcorp") == False - assert helpers.host_in_host("evilcorp", "evilcorp.com") == False - assert helpers.host_in_host("evilcorp.com", "com") == True - - assert tuple(helpers.extract_emails("asdf@asdf.com\nT@t.Com&a=a@a.com__ b@b.com")) == ( - "asdf@asdf.com", - "t@t.com", - "a@a.com", - "b@b.com", - ) - - assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) - assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) - assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) - assert helpers.split_host_port("evilcorp.co.uk") == ("evilcorp.co.uk", None) - assert helpers.split_host_port("d://wat:wat") == ("wat", None) - assert helpers.split_host_port("https://[dead::beef]:8338") == (ipaddress.ip_address("dead::beef"), 8338) - extracted_words = helpers.extract_words("blacklanternsecurity") - assert "black" in extracted_words - # assert "blacklantern" in extracted_words - # assert "lanternsecurity" in extracted_words - # assert "blacklanternsecurity" in extracted_words - assert "bls" in extracted_words - ipv4_netloc = helpers.make_netloc("192.168.1.1", 80) - assert ipv4_netloc == "192.168.1.1:80" - ipv6_netloc = helpers.make_netloc("dead::beef", "443") - assert ipv6_netloc == "[dead::beef]:443" - - assert helpers.get_file_extension("https://evilcorp.com/evilcorp.com/test/asdf.TXT") == "txt" - assert helpers.get_file_extension("/etc/conf/test.tar.gz") == "gz" - assert helpers.get_file_extension("/etc/passwd") == "" - - assert list(helpers.search_dict_by_key("asdf", {"asdf": "fdsa", 4: [{"asdf": 5}]})) == ["fdsa", 5] - assert list(helpers.search_dict_by_key("asdf", {"wat": {"asdf": "fdsa"}})) == ["fdsa"] - assert list(helpers.search_dict_by_key("asdf", [{"wat": {"nope": 1}}, {"wat": [{"asdf": "fdsa"}]}])) == ["fdsa"] - assert not list(helpers.search_dict_by_key("asdf", [{"wat": {"nope": 1}}, {"wat": [{"fdsa": "asdf"}]}])) - assert not list(helpers.search_dict_by_key("asdf", "asdf")) - - filtered_dict = helpers.filter_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "api_key" - ) - assert "api_key" in filtered_dict["modules"]["c99"] - assert "filterme" not in filtered_dict["modules"]["c99"] - assert "ipneighbor" not in filtered_dict["modules"] - - filtered_dict2 = helpers.filter_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "c99" - ) - assert "api_key" in filtered_dict2["modules"]["c99"] - assert "filterme" in filtered_dict2["modules"]["c99"] - assert "ipneighbor" not in filtered_dict2["modules"] - - filtered_dict3 = helpers.filter_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, - "key", - fuzzy=True, - ) - assert "api_key" in filtered_dict3["modules"]["c99"] - assert "filterme" not in filtered_dict3["modules"]["c99"] - assert "ipneighbor" not in filtered_dict3["modules"] - - cleaned_dict = helpers.clean_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "api_key" - ) - assert "api_key" not in cleaned_dict["modules"]["c99"] - assert "filterme" in cleaned_dict["modules"]["c99"] - assert "ipneighbor" in cleaned_dict["modules"] - - cleaned_dict2 = helpers.clean_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "c99" - ) - assert "c99" not in cleaned_dict2["modules"] - assert "ipneighbor" in cleaned_dict2["modules"] - - cleaned_dict3 = helpers.clean_dict( - {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, - "key", - fuzzy=True, - ) - assert "api_key" not in cleaned_dict3["modules"]["c99"] - assert "filterme" in cleaned_dict3["modules"]["c99"] - assert "ipneighbor" in cleaned_dict3["modules"] - - replaced = helpers.search_format_dict({"asdf": [{"wat": {"here": "{replaceme}!"}}, {500: True}]}, replaceme="asdf") - assert replaced["asdf"][1][500] == True - assert replaced["asdf"][0]["wat"]["here"] == "asdf!" - - assert helpers.split_list([1, 2, 3, 4, 5]) == [[1, 2], [3, 4, 5]] - assert list(helpers.grouper("ABCDEFG", 3)) == [["A", "B", "C"], ["D", "E", "F"], ["G"]] - - assert len(helpers.rand_string(3)) == 3 - assert len(helpers.rand_string(1)) == 1 - assert len(helpers.rand_string(0)) == 0 - assert type(helpers.rand_string(0)) == str - - test_file = Path(scan.config["home"]) / "testfile.asdf" - test_file.touch() - - assert test_file.is_file() - backup = helpers.backup_file(test_file) - assert backup.name == "testfile.1.asdf" - assert not test_file.exists() - assert backup.is_file() - test_file.touch() - backup2 = helpers.backup_file(test_file) - assert backup2.name == "testfile.1.asdf" - assert not test_file.exists() - assert backup2.is_file() - older_backup = Path(scan.config["home"]) / "testfile.2.asdf" - assert older_backup.is_file() - older_backup.unlink() - backup.unlink() - - with open(test_file, "w") as f: - f.write("asdf\nfdsa") - - assert "asdf" in helpers.str_or_file(str(test_file)) - assert "nope" in helpers.str_or_file("nope") - assert tuple(helpers.chain_lists([str(test_file), "nope"], try_files=True)) == ("asdf", "fdsa", "nope") - assert test_file.is_file() - - with pytest.raises(DirectoryCreationError, match="Failed to create.*"): - helpers.mkdir(test_file) - - helpers._rm_at_exit(test_file) - assert not test_file.exists() - - ### VALIDATORS ### - # hosts - assert helpers.validators.validate_host(" evilCorp.COM") == "evilcorp.com" - assert helpers.validators.validate_host("LOCALHOST ") == "localhost" - assert helpers.validators.validate_host(" 192.168.1.1") == "192.168.1.1" - assert helpers.validators.validate_host(" Dead::c0dE ") == "dead::c0de" - assert helpers.validators.soft_validate(" evilCorp.COM", "host") == True - assert helpers.validators.soft_validate("!@#$", "host") == False - with pytest.raises(ValueError): - assert helpers.validators.validate_host("!@#$") - # ports - assert helpers.validators.validate_port(666) == 666 - assert helpers.validators.validate_port(666666) == 65535 - assert helpers.validators.soft_validate(666, "port") == True - assert helpers.validators.soft_validate("!@#$", "port") == False - with pytest.raises(ValueError): - helpers.validators.validate_port("asdf") - # urls - assert helpers.validators.validate_url(" httP://evilcorP.com/asdf?a=b&c=d#e") == "http://evilcorp.com/asdf" - assert ( - helpers.validators.validate_url_parsed(" httP://evilcorP.com/asdf?a=b&c=d#e").geturl() - == "http://evilcorp.com/asdf" - ) - assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") == True - assert helpers.validators.soft_validate("!@#$", "url") == False - with pytest.raises(ValueError): - helpers.validators.validate_url("!@#$") - # severities - assert helpers.validators.validate_severity(" iNfo") == "INFO" - assert helpers.validators.soft_validate(" iNfo", "severity") == True - assert helpers.validators.soft_validate("NOPE", "severity") == False - with pytest.raises(ValueError): - helpers.validators.validate_severity("NOPE") - # emails - assert helpers.validators.validate_email(" bOb@eViLcorp.COM") == "bob@evilcorp.com" - assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") == True - assert helpers.validators.soft_validate("!@#$", "email") == False - with pytest.raises(ValueError): - helpers.validators.validate_email("!@#$") - - assert type(helpers.make_date()) == str - - def raise_filenotfound(): - raise FileNotFoundError("asdf") - - def raise_brokenpipe(): - raise BrokenPipeError("asdf") - - from bbot.core.helpers import command - - command.catch(raise_filenotfound) - command.catch(raise_brokenpipe) - - ### COMMAND ### - assert "plumbus\n" in old_run(helpers, ["echo", "plumbus"], text=True).stdout - assert "plumbus\n" in list(old_run_live(helpers, ["echo", "plumbus"])) - expected_output = ["lumbus\n", "plumbus\n", "rumbus\n"] - assert list(old_run_live(helpers, ["cat"], input="lumbus\nplumbus\nrumbus")) == expected_output - - def plumbus_generator(): - yield "lumbus" - yield "plumbus" - - assert "plumbus\n" in list(old_run_live(helpers, ["cat"], input=plumbus_generator())) - tempfile = helpers.tempfile(("lumbus", "plumbus"), pipe=True) - with open(tempfile) as f: - assert "plumbus\n" in list(f) - tempfile = helpers.tempfile(("lumbus", "plumbus"), pipe=False) - with open(tempfile) as f: - assert "plumbus\n" in list(f) - - ### CACHE ### - helpers.cache_put("string", "wat") - helpers.cache_put("binary", b"wat") - assert helpers.cache_get("string") == "wat" - assert helpers.cache_get("binary") == "wat" - assert helpers.cache_get("binary", text=False) == b"wat" - cache_filename = helpers.cache_filename("string") - (m, i, d, n, u, g, sz, atime, mtime, ctime) = os.stat(str(cache_filename)) - # change modified time to be 10 days in the past - os.utime(str(cache_filename), times=(atime, mtime - (3600 * 24 * 10))) - assert helpers.cache_get("string", cache_hrs=24 * 7) is None - assert helpers.cache_get("string", cache_hrs=24 * 14) == "wat" - - cache_dict = helpers.CacheDict(max_size=10) - cache_dict.put("1", 2) - assert cache_dict["1"] == 2 - assert cache_dict.get("1") == 2 - assert len(cache_dict) == 1 - cache_dict["2"] = 3 - assert cache_dict["2"] == 3 - assert cache_dict.get("2") == 3 - assert len(cache_dict) == 2 - for i in range(20): - cache_dict[str(i)] = i + 1 - assert len(cache_dict) == 10 - assert tuple(cache_dict) == tuple(hash(str(x)) for x in range(10, 20)) - - ### WEB ### - assert getattr(request(helpers, "https://api.publicapis.org/health"), "text", "").startswith("{") - assert getattr(request(helpers, "https://api.publicapis.org/health", cache_for=60), "text", "").startswith("{") - filename = download(helpers, "https://api.publicapis.org/health", cache_hrs=1) - assert Path(str(filename)).is_file() - assert helpers.is_cached("https://api.publicapis.org/health") - - assert helpers.wordlist("https://api.publicapis.org/healthasdf").is_file() - test_file = Path(scan.config["home"]) / "testfile.asdf" - with open(test_file, "w") as f: - for i in range(100): - f.write(f"{i}\n") - assert len(list(open(test_file).readlines())) == 100 - assert helpers.wordlist(test_file).is_file() - truncated_file = helpers.wordlist(test_file, lines=10) - assert truncated_file.is_file() - assert len(list(open(truncated_file).readlines())) == 10 - with pytest.raises(WordlistError): - helpers.wordlist("/tmp/a9pseoysadf/asdkgjaosidf") - test_file.unlink() - - ### DNS ### - # resolution - assert all([helpers.is_ip(i) for i in helpers.resolve("scanme.nmap.org")]) - assert "dns.google" in helpers.resolve("8.8.8.8") - assert "dns.google" in helpers.resolve("2001:4860:4860::8888") - resolved_ips = helpers.resolve("dns.google") - assert "2001:4860:4860::8888" in resolved_ips - assert "8.8.8.8" in resolved_ips - assert any([helpers.is_subdomain(h) for h in helpers.resolve("google.com", type="mx")]) - v6_ips = helpers.resolve("www.google.com", type="AAAA") - assert all([i.version == 6 for i in [ipaddress.ip_address(_) for _ in v6_ips]]) - assert not helpers.resolve(f"{helpers.rand_string(length=30)}.com") - # batch resolution - batch_results = list(helpers.resolve_batch(["8.8.8.8", "dns.google"])) - assert len(batch_results) == 2 - batch_results = dict(batch_results) - assert any([x in batch_results["dns.google"] for x in ("8.8.8.8", "8.8.4.4")]) - assert "dns.google" in batch_results["8.8.8.8"] - # "any" type - resolved = helpers.resolve("google.com", type="any") - assert any([helpers.is_subdomain(h) for h in resolved]) - # wildcards - assert helpers.is_wildcard("asdf.wat.blacklanternsecurity.github.io") == (True, "_wildcard.github.io") - assert hash("github.io") in helpers.dns._wildcard_cache - assert helpers.dns._wildcard_cache[hash("github.io")] == True - assert helpers.is_wildcard("asdf.asdf.asdf.github.io") == (True, "_wildcard.github.io") - assert helpers.is_wildcard("github.io") == (False, "github.io") - assert helpers.is_wildcard("mail.google.com") == (False, "mail.google.com") - wildcard_event1 = scan.make_event("wat.asdf.fdsa.github.io", "DNS_NAME", dummy=True) - wildcard_event2 = scan.make_event("wats.asd.fdsa.github.io", "DNS_NAME", dummy=True) - children, event_tags1, event_whitelisted1, event_blacklisted1 = scan.helpers.resolve_event(wildcard_event1) - children, event_tags2, event_whitelisted2, event_blacklisted2 = scan.helpers.resolve_event(wildcard_event2) - assert "wildcard" in event_tags1 - assert "wildcard" in event_tags2 - assert wildcard_event1.data == "_wildcard.github.io" - assert wildcard_event2.data == "_wildcard.github.io" - assert event_tags1 == event_tags2 - assert event_whitelisted1 == event_whitelisted2 - assert event_blacklisted1 == event_blacklisted2 - - msg = "Ignore this error, it belongs here" - - def raise_e(): - raise Exception(msg) - - def raise_k(): - raise KeyboardInterrupt(msg) - - def raise_s(): - raise ScanCancelledError(msg) - - def raise_b(): - raise BrokenPipeError(msg) - - helpers.dns._catch_keyboardinterrupt(raise_e) - helpers.dns._catch_keyboardinterrupt(raise_k) - scan.manager.catch(raise_e, _on_finish_callback=raise_e) - scan.manager.catch(raise_k) - scan.manager.catch(raise_s) - scan.manager.catch(raise_b) - - ## NTLM - testheader = "TlRMTVNTUAACAAAAHgAeADgAAAAVgorilwL+bvnVipUAAAAAAAAAAJgAmABWAAAACgBjRQAAAA9XAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAACAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgAAQAeAFcASQBOAC0AUwA0ADIATgBPAEIARABWAFQASwA4AAQAHgBXAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAADAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgABwAIAHUwOZlfoNgBAAAAAA==" - decoded = helpers.ntlm.ntlmdecode(testheader) - assert decoded["NetBIOS_Domain_Name"] == "WIN-S42NOBDVTK8" - assert decoded["NetBIOS_Computer_Name"] == "WIN-S42NOBDVTK8" - assert decoded["DNS_Domain_name"] == "WIN-S42NOBDVTK8" - assert decoded["FQDN"] == "WIN-S42NOBDVTK8" - assert decoded["Timestamp"] == b"u09\x99_\xa0\xd8\x01" - with pytest.raises(NTLMError): - helpers.ntlm.ntlmdecode("asdf") - - # interact.sh - interactsh_client = helpers.interactsh() - with pytest.raises(InteractshError): - interactsh_client.register() - assert not list(interactsh_client.poll()) - with pytest.raises(InteractshError): - interactsh_client.deregister() - - -def test_dns_resolvers(patch_requests, helpers): - assert type(helpers.dns.resolvers) == set - assert hasattr(helpers.dns.resolver_file, "is_file") - assert hasattr(helpers.dns.mass_resolver_file, "is_file") - - -def test_word_cloud(helpers, bbot_config): - number_mutations = helpers.word_cloud.get_number_mutations("base2_p013", n=5, padding=2) - assert "base0_p013" in number_mutations - assert "base7_p013" in number_mutations - assert "base8_p013" not in number_mutations - assert "base2_p008" in number_mutations - assert "base2_p007" not in number_mutations - assert "base2_p018" in number_mutations - assert "base2_p0134" in number_mutations - assert "base2_p0135" not in number_mutations - - permutations = helpers.word_cloud.mutations("_base", numbers=1) - assert ("_base", "dev") in permutations - assert ("dev", "_base") in permutations - - # saving and loading - from bbot.scanner.scanner import Scanner - - scan1 = Scanner("127.0.0.1", config=bbot_config) - word_cloud = scan1.helpers.word_cloud - word_cloud.add_word("lantern") - word_cloud.add_word("black") - word_cloud.add_word("black") - word_cloud.save() - with open(word_cloud.default_filename) as f: - word_cloud_content = [l.rstrip() for l in f.read().splitlines()] - assert len(word_cloud_content) == 2 - assert "2\tblack" in word_cloud_content - assert "1\tlantern" in word_cloud_content - word_cloud.save(limit=1) - with open(word_cloud.default_filename) as f: - word_cloud_content = [l.rstrip() for l in f.read().splitlines()] - assert len(word_cloud_content) == 1 - assert "2\tblack" in word_cloud_content - assert "1\tlantern" not in word_cloud_content - word_cloud.clear() - with open(word_cloud.default_filename, "w") as f: - f.write("plumbus\nrumbus") - word_cloud.load() - assert word_cloud["plumbus"] == 1 - assert word_cloud["rumbus"] == 1 - - -def test_modules(patch_requests, patch_commands, scan, helpers, events, bbot_config): - - # base module _filter_event() - from bbot.modules.base import BaseModule - - base_module = BaseModule(scan) - localhost2 = scan.make_event("127.0.0.2", source=events.subdomain) - localhost2.make_in_scope() - # base cases - assert base_module._filter_event("FINISHED") == True - assert base_module._filter_event("WAT") == False - base_module._watched_events = None - base_module.watched_events = ["*"] - assert base_module._filter_event("WAT") == False - assert base_module._filter_event(events.emoji) == True - base_module._watched_events = None - base_module.watched_events = ["IP_ADDRESS"] - assert base_module._filter_event(events.ipv4) == True - assert base_module._filter_event(events.domain) == False - assert base_module._filter_event(events.localhost) == True - assert base_module._filter_event(localhost2) == True - # target only - base_module.target_only = True - assert base_module._filter_event(localhost2) == False - localhost2.tags.add("target") - assert base_module._filter_event(localhost2) == True - base_module.target_only = False - # in scope only - localhost3 = scan.make_event("127.0.0.2", source=events.subdomain) - base_module.in_scope_only = True - assert base_module._filter_event(events.localhost) == True - assert base_module._filter_event(localhost3) == False - base_module.in_scope_only = False - # scope distance - base_module.scope_distance_modifier = 0 - localhost2._scope_distance = 0 - assert base_module._filter_event(localhost2) == True - localhost2._scope_distance = 1 - assert base_module._filter_event(localhost2) == True - localhost2._scope_distance = 2 - assert base_module._filter_event(localhost2) == False - localhost2._scope_distance = -1 - assert base_module._filter_event(localhost2) == False - base_module.scope_distance_modifier = -1 - # special case for IPs and ranges - base_module.watched_events = ["IP_ADDRESS", "IP_RANGE"] - ip_range = scan.make_event("127.0.0.0/24", dummy=True) - localhost4 = scan.make_event("127.0.0.1", source=ip_range) - localhost4.make_in_scope() - localhost4.module = "plumbus" - assert base_module._filter_event(localhost4) == True - localhost4.module = "speculate" - assert base_module._filter_event(localhost4) == False - - from bbot.scanner.scanner import Scanner - - scan2 = Scanner(modules=list(available_modules), output_modules=list(available_output_modules), config=bbot_config) - scan2.load_modules() - scan2.status = "RUNNING" - - # attributes, descriptions, etc. - for module_name, module in scan2.modules.items(): - # flags - assert module._type in ("internal", "output", "scan") - - # module preloading - all_preloaded = module_loader.preloaded() - assert "massdns" in all_preloaded - assert "DNS_NAME" in all_preloaded["massdns"]["watched_events"] - assert "DNS_NAME" in all_preloaded["massdns"]["produced_events"] - assert "subdomain-enum" in all_preloaded["massdns"]["flags"] - assert "wordlist" in all_preloaded["massdns"]["config"] - assert type(all_preloaded["massdns"]["config"]["max_resolvers"]) == int - assert all_preloaded["sslcert"]["deps"]["pip"] - assert all_preloaded["sslcert"]["deps"]["apt"] - assert all_preloaded["massdns"]["deps"]["ansible"] - - for module_name, preloaded in all_preloaded.items(): - # either active or passive and never both - flags = preloaded.get("flags", []) - if preloaded["type"] == "scan": - assert ("active" in flags and not "passive" in flags) or ( - not "active" in flags and "passive" in flags - ), f'module "{module_name}" must have either "active" or "passive" flag' - assert preloaded["meta"]["description"], f"{module_name} must have a description" - - # attribute checks - watched_events = preloaded.get("watched_events") - produced_events = preloaded.get("produced_events") - - assert type(watched_events) == list - assert type(produced_events) == list - assert watched_events, f"{module_name}.watched_events must not be empty" - assert type(watched_events) == list, f"{module_name}.watched_events must be of type list" - assert type(produced_events) == list, f"{module_name}.produced_events must be of type list" - assert all( - [type(t) == str for t in watched_events] - ), f"{module_name}.watched_events entries must be of type string" - assert all( - [type(t) == str for t in produced_events] - ), f"{module_name}.produced_events entries must be of type string" - - assert type(preloaded.get("deps_pip", [])) == list, f"{module_name}.deps_pipe must be of type list" - assert type(preloaded.get("deps_apt", [])) == list, f"{module_name}.deps_apt must be of type list" - assert type(preloaded.get("deps_shell", [])) == list, f"{module_name}.deps_shell must be of type list" - assert type(preloaded.get("options", {})) == dict, f"{module_name}.options must be of type list" - assert type(preloaded.get("options_desc", {})) == dict, f"{module_name}.options_desc must be of type list" - # options must have descriptions - assert set(preloaded.get("options", {})) == set( - preloaded.get("options_desc", {}) - ), f"{module_name}.options do not match options_desc" - # descriptions most not be blank - assert all( - o for o in preloaded.get("options_desc", {}).values() - ), f"{module_name}.options_desc descriptions must not be blank" - - # setups - futures = {} - for module_name, module in scan2.modules.items(): - log.info(f"Testing {module_name}.setup()") - future = scan2._thread_pool.submit_task(module.setup) - futures[future] = module - for future in helpers.as_completed(futures): - module = futures[future] - result = future.result() - if type(result) == tuple: - assert len(result) == 2, f"if tuple, {module.name}.setup() return value must have length of 2" - status, msg = result - assert status in ( - True, - False, - None, - ), f"if tuple, the first element of {module.name}.setup()'s return value must be either True, False, or None" - assert ( - type(msg) == str - ), f"if tuple, the second element of {module.name}.setup()'s return value must be a message of type str" - else: - assert result in ( - True, - False, - None, - ), f"{module.name}.setup() must return a status of either True, False, or None" - if result == False: - module.set_error_state() - - futures.clear() - - # handle_event / handle_batch - futures = {} - for module_name, module in scan2.modules.items(): - module.emit_event = lambda *args, **kwargs: None - module._filter = lambda *args, **kwargs: True - events_to_submit = [e for e in events.all if e.type in module.watched_events] - if module.batch_size > 1: - log.info(f"Testing {module_name}.handle_batch()") - future = scan2._thread_pool.submit_task(module.handle_batch, *events_to_submit) - futures[future] = module - else: - for e in events_to_submit: - log.info(f"Testing {module_name}.handle_event()") - future = scan2._thread_pool.submit_task(module.handle_event, e) - futures[future] = module - for future in helpers.as_completed(futures): - try: - assert future.result() == None - except Exception as e: - import traceback - - module = futures[future] - assert module.errored == True, f'Error in module "{module}": {e}\n{traceback.format_exc()}' - futures.clear() - - # finishes - futures = {} - for module_name, module in scan2.modules.items(): - log.info(f"Testing {module_name}.finish()") - future = scan2._thread_pool.submit_task(module.finish) - futures[future] = module - for future in helpers.as_completed(futures): - assert future.result() == None - futures.clear() - - # cleanups - futures = {} - for module_name, module in scan2.modules.items(): - log.info(f"Testing {module_name}.cleanup()") - future = scan2._thread_pool.submit_task(module.cleanup) - futures[future] = module - for future in helpers.as_completed(futures): - assert future.result() == None - futures.clear() - - # event filters - for module_name, module in scan2.modules.items(): - log.info(f"Testing {module_name}.filter_event()") - assert module.filter_event(events.emoji) in (True, False) - - -def test_config(bbot_config): - from bbot.scanner.scanner import Scanner - - scan1 = Scanner("127.0.0.1", modules=["ipneighbor"], config=bbot_config) - scan1.load_modules() - assert scan1.config.plumbus == "asdf" - assert scan1.modules["ipneighbor"].config.test_option == "ipneighbor" - assert scan1.modules["human"].config.test_option == "human" - assert scan1.modules["speculate"].config.test_option == "speculate" - - -def test_target(neuter_ansible, patch_requests, patch_commands, bbot_config): - from bbot.scanner.scanner import Scanner - - scan1 = Scanner("api.publicapis.org", "8.8.8.8/30", "2001:4860:4860::8888/126", config=bbot_config) - scan2 = Scanner("8.8.8.8/29", "publicapis.org", "2001:4860:4860::8888/125", config=bbot_config) - scan3 = Scanner("8.8.8.8/29", "publicapis.org", "2001:4860:4860::8888/125", config=bbot_config) - scan4 = Scanner("8.8.8.8/29", config=bbot_config) - scan5 = Scanner(config=bbot_config) - assert not scan5.target - assert len(scan1.target) == 9 - assert len(scan4.target) == 8 - assert "8.8.8.9" in scan1.target - assert "8.8.8.12" not in scan1.target - assert "8.8.8.8/31" in scan1.target - assert "8.8.8.8/30" in scan1.target - assert "8.8.8.8/29" not in scan1.target - assert "2001:4860:4860::8889" in scan1.target - assert "2001:4860:4860::888c" not in scan1.target - assert "www.api.publicapis.org" in scan1.target - assert "api.publicapis.org" in scan1.target - assert "publicapis.org" not in scan1.target - assert "bob@www.api.publicapis.org" in scan1.target - assert "https://www.api.publicapis.org" in scan1.target - assert "www.api.publicapis.org:80" in scan1.target - assert scan1.make_event("https://[2001:4860:4860::8888]:80", dummy=True) in scan1.target - assert scan1.make_event("[2001:4860:4860::8888]:80", "OPEN_TCP_PORT", dummy=True) in scan1.target - assert scan1.make_event("[2001:4860:4860::888c]:80", "OPEN_TCP_PORT", dummy=True) not in scan1.target - assert scan1.target in scan2.target - assert scan2.target not in scan1.target - assert scan3.target in scan2.target - assert scan2.target == scan3.target - assert scan4.target != scan1.target - - -def test_scan(neuter_ansible, patch_requests, patch_commands, events, bbot_config, helpers, neograph): - from bbot.scanner.scanner import Scanner - - scan0 = Scanner("8.8.8.8/31", "evilcorp.com", blacklist=["8.8.8.8/28", "www.evilcorp.com"], config=bbot_config) - assert scan0.whitelisted("8.8.8.8") - assert scan0.whitelisted("8.8.8.9") - assert scan0.blacklisted("8.8.8.15") - assert not scan0.blacklisted("8.8.8.16") - assert scan0.blacklisted("8.8.8.8/30") - assert not scan0.blacklisted("8.8.8.8/27") - assert not scan0.in_scope("8.8.8.8") - assert scan0.whitelisted("api.evilcorp.com") - assert scan0.whitelisted("www.evilcorp.com") - assert not scan0.blacklisted("api.evilcorp.com") - assert scan0.blacklisted("asdf.www.evilcorp.com") - assert scan0.in_scope("test.api.evilcorp.com") - assert not scan0.in_scope("test.www.evilcorp.com") - assert not scan0.in_scope("www.evilcorp.co.uk") - - scan1 = Scanner("8.8.8.8", whitelist=["8.8.4.4"], config=bbot_config) - assert not scan1.blacklisted("8.8.8.8") - assert not scan1.blacklisted("8.8.4.4") - assert not scan1.whitelisted("8.8.8.8") - assert scan1.whitelisted("8.8.4.4") - assert scan1.in_scope("8.8.4.4") - assert not scan1.in_scope("8.8.8.8") - - scan2 = Scanner("8.8.8.8", config=bbot_config) - assert not scan2.blacklisted("8.8.8.8") - assert not scan2.blacklisted("8.8.4.4") - assert scan2.whitelisted("8.8.8.8") - assert not scan2.whitelisted("8.8.4.4") - assert scan2.in_scope("8.8.8.8") - assert not scan2.in_scope("8.8.4.4") - - scan3 = Scanner( - "127.0.0.0/30", - "127.0.0.2:8443", - "https://localhost", - "[::1]:80", - "http://[::1]:8080", - modules=["ipneighbor"], - output_modules=list(available_output_modules), - config=bbot_config, - blacklist=["http://127.0.0.3:8000/asdf"], - whitelist=["127.0.0.0/29"], - ) - assert "targets" in scan3.json - assert "127.0.0.3" in scan3.target - assert "127.0.0.4" not in scan3.target - assert "127.0.0.4" in scan3.whitelist - assert scan3.whitelisted("127.0.0.4") - assert "127.0.0.3" in scan3.blacklist - assert scan3.blacklisted("127.0.0.3") - assert scan3.in_scope("127.0.0.1") - assert not scan3.in_scope("127.0.0.3") - scan3.start() - - -def test_threadpool(): - from concurrent.futures import ThreadPoolExecutor - from bbot.core.helpers.threadpool import ThreadPoolWrapper, NamedLock, as_completed - - with ThreadPoolExecutor(max_workers=3) as executor: - pool = ThreadPoolWrapper(executor) - add_one = lambda x: x + 1 - futures = [pool.submit_task(add_one, y) for y in [0, 1, 2, 3, 4]] - results = [] - for f in as_completed(futures): - results.append(f.result()) - assert tuple(sorted(results)) == (1, 2, 3, 4, 5) - - nl = NamedLock(max_size=5) - for i in range(50): - nl.get_lock(str(i)) - assert len(nl._cache) == 5 - assert tuple(nl._cache.keys()) == tuple(hash(str(x)) for x in [45, 46, 47, 48, 49]) - - -def test_agent(agent): - agent.start() - agent.on_error(agent.ws, "test") - agent.on_close(agent.ws, "test", "test") - agent.on_open(agent.ws) - agent.on_message( - agent.ws, - '{"conversation": "90196cc1-299f-4555-82a0-bc22a4247590", "command": "start_scan", "arguments": {"scan_id": "90196cc1-299f-4555-82a0-bc22a4247590", "targets": ["www.blacklanternsecurity.com"], "modules": ["ipneighbor"], "output_modules": ["human"]}}', - ) - sleep(0.5) - agent.scan_status() - agent.stop_scan() - - -def test_cli(monkeypatch, bbot_config): - - from bbot import cli - - monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) - monkeypatch.setattr(cli, "config", bbot_config) - monkeypatch.setattr(sys, "argv", ["bbot", "-y", "--current-config", "-t", "127.0.0.1", "-m", "ipneighbor"]) - cli.main() - - home_dir = Path(bbot_config["home"]) - monkeypatch.setattr( - sys, - "argv", - ["bbot", "-y", "-t", "localhost", "-m", "ipneighbor", "-om", "human", "csv", "json", "-n", "test_scan"], - ) - cli.main() - scan_home = home_dir / "scans" / "test_scan" - assert (scan_home / "wordcloud.tsv").is_file() - assert (scan_home / "output.txt").is_file() - assert (scan_home / "output.csv").is_file() - assert (scan_home / "output.json").is_file() - with open(scan_home / "output.csv") as f: - lines = f.readlines() - assert lines[0] == "Event type,Event data,Source Module,Scope Distance,Event Tags\n" - assert len(lines) > 1 - - -def test_depsinstaller(monkeypatch, neuter_ansible, bbot_config): - # un-neuter ansible - from bbot.core.helpers.depsinstaller import installer - - run, ensure_root = neuter_ansible - ensure_root = installer.DepsInstaller.ensure_root - monkeypatch.setattr(installer, "run", run) - monkeypatch.setattr(installer.DepsInstaller, "ensure_root", ensure_root) - - from bbot.scanner.scanner import Scanner - - scan = Scanner( - "127.0.0.1", - modules=["dnsresolve"], - config=bbot_config, - ) - - # test shell - test_file = Path("/tmp/test_file") - test_file.unlink(missing_ok=True) - scan.helpers.depsinstaller.shell(module="plumbus", commands=[f"touch {test_file}"]) - assert test_file.is_file() - test_file.unlink(missing_ok=True) - - # test tasks - scan.helpers.depsinstaller.tasks( - module="plumbus", - tasks=[{"name": "test task execution", "ansible.builtin.shell": {"cmd": f"touch {test_file}"}}], - ) - assert test_file.is_file() - test_file.unlink(missing_ok=True) diff --git a/bbot/test/test_output.json b/bbot/test/test_output.ndjson similarity index 99% rename from bbot/test/test_output.json rename to bbot/test/test_output.ndjson index 94312708ce..1048b2d1e6 100644 --- a/bbot/test/test_output.json +++ b/bbot/test/test_output.ndjson @@ -13,4 +13,4 @@ "checked_at": "2022-04-17T08:03:50.419919Z", "created_at": "2020-07-16T14:19:04.514857Z" } -] \ No newline at end of file +] diff --git a/bbot/test/test_step_1/__init__.py b/bbot/test/test_step_1/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bbot/test/test_step_1/test__module__tests.py b/bbot/test/test_step_1/test__module__tests.py new file mode 100644 index 0000000000..e50f67a910 --- /dev/null +++ b/bbot/test/test_step_1/test__module__tests.py @@ -0,0 +1,37 @@ +import logging +import importlib +from pathlib import Path + +from bbot import Preset +from ..test_step_2.module_tests.base import ModuleTestBase + +log = logging.getLogger("bbot.test.modules") + +module_tests_dir = Path(__file__).parent.parent / "test_step_2" / "module_tests" + +_module_test_files = list(module_tests_dir.glob("test_module_*.py")) +_module_test_files.sort(key=lambda p: p.name) +module_test_files = [m.name.split("test_module_")[-1].split(".")[0] for m in _module_test_files] + + +def test__module__tests(): + preset = Preset() + + # make sure each module has a .py file + for module_name in preset.module_loader.preloaded(): + module_name = module_name.lower() + assert module_name in module_test_files, f'No test file found for module "{module_name}"' + + # make sure each test file has a test class + for file in _module_test_files: + module_name = file.stem + import_path = f"bbot.test.test_step_2.module_tests.{module_name}" + module_test_variables = importlib.import_module(import_path, "bbot") + module_pass = False + for var_name in dir(module_test_variables): + if var_name.startswith("Test"): + test_class = getattr(module_test_variables, var_name) + if ModuleTestBase in getattr(test_class, "__mro__", ()): + module_pass = True + break + assert module_pass, f"Couldn't find a test class for {module_name} in {file}" diff --git a/bbot/test/test_step_1/test_bbot_fastapi.py b/bbot/test/test_step_1/test_bbot_fastapi.py new file mode 100644 index 0000000000..1136963a3d --- /dev/null +++ b/bbot/test/test_step_1/test_bbot_fastapi.py @@ -0,0 +1,79 @@ +import time +import httpx +import multiprocessing +from pathlib import Path +from subprocess import Popen +from contextlib import suppress + +cwd = Path(__file__).parent.parent.parent + + +def run_bbot_multiprocess(queue): + from bbot import Scanner + + scan = Scanner("http://127.0.0.1:8888", "blacklanternsecurity.com", modules=["httpx"]) + events = [e.json() for e in scan.start()] + queue.put(events) + + +def test_bbot_multiprocess(bbot_httpserver): + bbot_httpserver.expect_request("/").respond_with_data("test@blacklanternsecurity.com") + + queue = multiprocessing.Queue() + events_process = multiprocessing.Process(target=run_bbot_multiprocess, args=(queue,)) + events_process.start() + events_process.join() + events = queue.get() + assert len(events) >= 3 + scan_events = [e for e in events if e["type"] == "SCAN"] + assert len(scan_events) == 2 + assert any(e["data"] == "test@blacklanternsecurity.com" for e in events) + + +def test_bbot_fastapi(bbot_httpserver): + bbot_httpserver.expect_request("/").respond_with_data("test@blacklanternsecurity.com") + fastapi_process = start_fastapi_server() + + try: + # wait for the server to start with a timeout of 60 seconds + start_time = time.time() + while True: + try: + response = httpx.get("http://127.0.0.1:8978/ping") + response.raise_for_status() + break + except httpx.HTTPError: + if time.time() - start_time > 60: + raise TimeoutError("Server did not start within 60 seconds.") + time.sleep(0.1) + continue + + # run a scan + response = httpx.get( + "http://127.0.0.1:8978/start", + params={"targets": ["http://127.0.0.1:8888", "blacklanternsecurity.com"]}, + timeout=100, + ) + events = response.json() + assert len(events) >= 3 + scan_events = [e for e in events if e["type"] == "SCAN"] + assert len(scan_events) == 2 + assert any(e["data"] == "test@blacklanternsecurity.com" for e in events) + + finally: + with suppress(Exception): + fastapi_process.terminate() + + +def start_fastapi_server(): + import os + import sys + + env = os.environ.copy() + with suppress(KeyError): + del env["BBOT_TESTING"] + python_executable = str(sys.executable) + process = Popen( + [python_executable, "-m", "uvicorn", "bbot.test.fastapi_test:app", "--port", "8978"], cwd=cwd, env=env + ) + return process diff --git a/bbot/test/test_step_1/test_bloom_filter.py b/bbot/test/test_step_1/test_bloom_filter.py new file mode 100644 index 0000000000..0a43f34157 --- /dev/null +++ b/bbot/test/test_step_1/test_bloom_filter.py @@ -0,0 +1,70 @@ +import time +import pytest +import string +import random + + +@pytest.mark.asyncio +async def test_bloom_filter(): + def generate_random_strings(n, length=10): + """Generate a list of n random strings.""" + return ["".join(random.choices(string.ascii_letters + string.digits, k=length)) for _ in range(n)] + + from bbot.scanner import Scanner + + scan = Scanner() + + n_items_to_add = 100000 + n_items_to_test = 100000 + bloom_filter_size = 8000000 + + # Initialize the simple bloom filter and the set + bloom_filter = scan.helpers.bloom_filter(size=bloom_filter_size) + + test_set = set() + + # Generate random strings to add + print(f"Generating {n_items_to_add:,} items to add") + items_to_add = set(generate_random_strings(n_items_to_add)) + + # Generate random strings to test + print(f"Generating {n_items_to_test:,} items to test") + items_to_test = generate_random_strings(n_items_to_test) + + print("Adding items") + start = time.time() + for item in items_to_add: + bloom_filter.add(item) + test_set.add(hash(item)) + end = time.time() + elapsed = end - start + print(f"elapsed: {elapsed:.2f} ({int(n_items_to_test / elapsed)}/s)") + # this shouldn't take longer than 5 seconds + assert elapsed < 5 + + # make sure we have 100% accuracy + start = time.time() + for item in items_to_add: + assert item in bloom_filter + end = time.time() + elapsed = end - start + print(f"elapsed: {elapsed:.2f} ({int(n_items_to_test / elapsed)}/s)") + # this shouldn't take longer than 5 seconds + assert elapsed < 5 + + print("Measuring false positives") + # Check for false positives + false_positives = 0 + for item in items_to_test: + if bloom_filter.check(item) and hash(item) not in test_set: + false_positives += 1 + false_positive_percent = false_positives / len(items_to_test) * 100 + + print(f"False positive rate: {false_positive_percent:.2f}% ({false_positives}/{len(items_to_test)})") + + # ensure false positives are less than .02 percent + assert false_positive_percent < 0.02 + + bloom_filter.close() + + await scan._cleanup() diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py new file mode 100644 index 0000000000..a8e457b11f --- /dev/null +++ b/bbot/test/test_step_1/test_cli.py @@ -0,0 +1,764 @@ +import yaml + +from ..bbot_fixtures import * + +from bbot import cli + + +@pytest.mark.asyncio +async def test_cli_scope(monkeypatch, capsys): + import json + + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # basic target without whitelist + monkeypatch.setattr( + "sys.argv", + ["bbot", "-t", "one.one.one.one", "-c", "scope.report_distance=10", "dns.minimal=false", "--json"], + ) + result = await cli._main() + out, err = capsys.readouterr() + assert result is True + lines = [json.loads(l) for l in out.splitlines()] + dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] + assert dns_events + assert all(l["scope_distance"] == 0 and "in-scope" in l["tags"] for l in dns_events) + assert 1 == len( + [ + l + for l in dns_events + if l["module"] == "TARGET" + and l["scope_distance"] == 0 + and "in-scope" in l["tags"] + and "target" in l["tags"] + ] + ) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] + assert ip_events + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] + assert ip_events + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events) + + # with whitelist + monkeypatch.setattr( + "sys.argv", + [ + "bbot", + "-t", + "one.one.one.one", + "-w", + "192.168.0.1", + "-c", + "scope.report_distance=10", + "dns.minimal=false", + "dns.search_distance=2", + "--json", + ], + ) + result = await cli._main() + out, err = capsys.readouterr() + assert result is True + lines = [json.loads(l) for l in out.splitlines()] + lines = [l for l in lines if l["type"] != "SCAN"] + assert lines + assert not any(l["scope_distance"] == 0 for l in lines) + dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] + assert dns_events + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in dns_events) + assert 1 == len( + [ + l + for l in dns_events + if l["module"] == "TARGET" + and l["scope_distance"] == 1 + and "distance-1" in l["tags"] + and "target" in l["tags"] + ] + ) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] + assert ip_events + assert all(l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] + assert ip_events + assert all(l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events) + + +@pytest.mark.asyncio +async def test_cli_scan(monkeypatch): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + scans_home = bbot_test_dir / "scans" + + # basic scan + monkeypatch.setattr( + sys, + "argv", + ["bbot", "-y", "-t", "127.0.0.1", "www.example.com", "-n", "test_cli_scan", "-c", "dns.disable=true"], + ) + result = await cli._main() + assert result is True + + scan_home = scans_home / "test_cli_scan" + assert (scan_home / "preset.yml").is_file(), "preset.yml not found" + assert (scan_home / "wordcloud.tsv").is_file(), "wordcloud.tsv not found" + assert (scan_home / "output.txt").is_file(), "output.txt not found" + assert (scan_home / "output.csv").is_file(), "output.csv not found" + assert (scan_home / "output.json").is_file(), "output.json not found" + + with open(scan_home / "preset.yml") as f: + text = f.read() + assert " dns:\n disable: true" in text + + with open(scan_home / "output.csv") as f: + lines = f.readlines() + assert lines[0] == "Event type,Event data,IP Address,Source Module,Scope Distance,Event Tags,Discovery Path\n" + assert len(lines) > 1, "output.csv is not long enough" + + ip_success = False + dns_success = False + output_filename = scan_home / "output.txt" + with open(output_filename) as f: + lines = f.read().splitlines() + for line in lines: + if "[IP_ADDRESS] \t127.0.0.1\tTARGET" in line: + ip_success = True + if "[DNS_NAME] \twww.example.com\tTARGET" in line: + dns_success = True + assert ip_success and dns_success, "IP_ADDRESS and/or DNS_NAME are not present in output.txt" + + +@pytest.mark.asyncio +async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): + caplog.set_level(logging.INFO) + + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # show version + monkeypatch.setattr("sys.argv", ["bbot", "--version"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert len(out.splitlines()) == 1 + assert out.count(".") > 1 + + # deps behavior + monkeypatch.setattr("sys.argv", ["bbot", "-n", "depstest", "--retry-deps", "--current-preset"]) + result = await cli._main() + assert result is None + out, err = capsys.readouterr() + print(out) + # parse YAML output + preset = yaml.safe_load(out) + assert preset == { + "description": "depstest", + "scan_name": "depstest", + "config": {"deps": {"behavior": "retry_failed"}}, + } + + # list modules + monkeypatch.setattr("sys.argv", ["bbot", "--list-modules"]) + result = await cli._main() + assert result is None + out, err = capsys.readouterr() + # internal modules + assert "| excavate " in out + # no output modules + assert not "| csv " in out + # scan modules + assert "| wayback " in out + + # list output modules + monkeypatch.setattr("sys.argv", ["bbot", "--list-output-modules"]) + result = await cli._main() + assert result == None + out, err = capsys.readouterr() + # no internal modules + assert not "| excavate " in out + # output modules + assert "| csv " in out + # no scan modules + assert not "| wayback " in out + + # output dir and scan name + output_dir = bbot_test_dir / "bbot_cli_args_output" + scan_name = "bbot_cli_args_scan_name" + scan_dir = output_dir / scan_name + assert not output_dir.exists() + monkeypatch.setattr("sys.argv", ["bbot", "-o", str(output_dir), "-n", scan_name, "-y"]) + result = await cli._main() + assert result is True + assert output_dir.is_dir() + assert scan_dir.is_dir() + assert "[SCAN]" in open(scan_dir / "output.txt").read() + assert "[INFO]" in open(scan_dir / "scan.log").read() + shutil.rmtree(output_dir) + + # list module options + monkeypatch.setattr("sys.argv", ["bbot", "--list-module-options"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| modules.wayback.urls" in out + assert "| bool" in out + assert "| emit URLs in addition to DNS_NAMEs" in out + assert "| False" in out + assert "| modules.dnsbrute.wordlist" in out + assert "| modules.robots.include_allow" in out + + # list module options by flag + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "--list-module-options"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| modules.wayback.urls" in out + assert "| bool" in out + assert "| emit URLs in addition to DNS_NAMEs" in out + assert "| False" in out + assert "| modules.dnsbrute.wordlist" in out + assert "| modules.robots.include_allow" not in out + + # list module options by module + monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrute", "-lmo"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert out.count("modules.") == out.count("modules.dnsbrute.") + assert "| modules.wayback.urls" not in out + assert "| modules.dnsbrute.wordlist" in out + assert "| modules.robots.include_allow" not in out + + # list output module options by module + monkeypatch.setattr("sys.argv", ["bbot", "-om", "stdout", "-lmo"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert out.count("modules.") == out.count("modules.stdout.") + + # list flags + monkeypatch.setattr("sys.argv", ["bbot", "--list-flags"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| safe " in out + assert "| Non-intrusive, safe to run " in out + assert "| active " in out + assert "| passive " in out + + # list only a single flag + monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "--list-flags"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| safe " not in out + assert "| active " in out + assert "| passive " not in out + + # list multiple flags + monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "safe", "--list-flags"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| safe " in out + assert "| active " in out + assert "| passive " not in out + + # no args + monkeypatch.setattr("sys.argv", ["bbot"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "-t TARGET [TARGET ...]" in out + + # list modules + monkeypatch.setattr("sys.argv", ["bbot", "-l"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| dnsbrute " in out + assert "| httpx " in out + assert "| robots " in out + + # list modules by flag + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-l"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| dnsbrute " in out + assert "| httpx " in out + assert "| robots " not in out + + # list modules by flag + required flag + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-rf", "passive", "-l"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| chaos " in out + assert "| httpx " not in out + + # list modules by flag + excluded flag + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-ef", "active", "-l"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| chaos " in out + assert "| httpx " not in out + + # list modules by flag + excluded module + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-em", "dnsbrute", "-l"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is None + assert "| dnsbrute " not in out + assert "| httpx " in out + + # output modules override + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-om", "csv,json", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 2/2 output modules, (csv,json)" in caplog.text + caplog.clear() + monkeypatch.setattr("sys.argv", ["bbot", "-em", "csv,json", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 3/3 output modules, (python,stdout,txt)" in caplog.text + + # output modules override + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-om", "subdomains", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 6/6 output modules, (csv,json,python,stdout,subdomains,txt)" in caplog.text + + # internal modules override + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 6/6 internal modules (aggregate,cloudcheck,dnsresolve,excavate,speculate,unarchive)" in caplog.text + caplog.clear() + monkeypatch.setattr("sys.argv", ["bbot", "-em", "excavate", "speculate", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 4/4 internal modules (aggregate,cloudcheck,dnsresolve,unarchive)" in caplog.text + caplog.clear() + monkeypatch.setattr("sys.argv", ["bbot", "-c", "speculate=false", "-y"]) + result = await cli._main() + assert result is True + assert "Loaded 5/5 internal modules (aggregate,cloudcheck,dnsresolve,excavate,unarchive)" in caplog.text + + # custom target type + out, err = capsys.readouterr() + monkeypatch.setattr("sys.argv", ["bbot", "-t", "ORG:evilcorp", "-y"]) + result = await cli._main() + out, err = capsys.readouterr() + assert result is True + assert "[ORG_STUB] evilcorp TARGET" in out + + # activate modules by flag + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-f", "passive"]) + result = await cli._main() + assert result is True + + # unconsoleable output module + monkeypatch.setattr("sys.argv", ["bbot", "-om", "web_report"]) + result = await cli._main() + assert result is True + + # unresolved dependency + monkeypatch.setattr("sys.argv", ["bbot", "-m", "wappalyzer"]) + result = await cli._main() + assert result is True + + # require flags + monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-rf", "passive"]) + result = await cli._main() + assert result is True + + # excluded flags + monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-ef", "active"]) + result = await cli._main() + assert result is True + + # slow modules + monkeypatch.setattr("sys.argv", ["bbot", "-m", "bucket_digitalocean"]) + result = await cli._main() + assert result is True + + # deadly modules + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei"]) + result = await cli._main() + assert result is False, "-m nuclei ran without --allow-deadly" + assert "Please specify --allow-deadly to continue" in caplog.text + + # --allow-deadly + monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei", "--allow-deadly"]) + result = await cli._main() + assert result is True, "-m nuclei failed to run with --allow-deadly" + + # install all deps + monkeypatch.setattr("sys.argv", ["bbot", "--install-all-deps"]) + success = await cli._main() + assert success is True, "--install-all-deps failed for at least one module" + + +@pytest.mark.asyncio +async def test_cli_customheaders(monkeypatch, caplog, capsys): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # test custom headers + monkeypatch.setattr( + "sys.argv", ["bbot", "--custom-headers", "foo=bar", "foo2=bar2", "foo3=bar=3", "--current-preset"] + ) + success = await cli._main() + assert success is None, "setting custom headers on command line failed" + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["web"]["http_headers"] == {"foo": "bar", "foo2": "bar2", "foo3": "bar=3"} + + # test custom headers invalid (no "=") + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "justastring", "--current-preset"]) + result = await cli._main() + assert result is None + assert "Custom headers not formatted correctly (missing '=')" in caplog.text + caplog.clear() + + # test custom headers invalid (missing key) + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "=nokey", "--current-preset"]) + result = await cli._main() + assert result is None + assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text + caplog.clear() + + # test custom headers invalid (missing value) + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "missingvalue=", "--current-preset"]) + result = await cli._main() + assert result is None + assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text + + +def test_cli_config_validation(monkeypatch, caplog): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # incorrect module option + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-c", "modules.ipnegibhor.num_bits=4"]) + cli.main() + assert 'Could not find config option "modules.ipnegibhor.num_bits"' in caplog.text + assert 'Did you mean "modules.ipneighbor.num_bits"?' in caplog.text + + # incorrect global option + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-c", "web_spier_distance=4"]) + cli.main() + assert 'Could not find config option "web_spier_distance"' in caplog.text + assert 'Did you mean "web.spider_distance"?' in caplog.text + + +def test_cli_module_validation(monkeypatch, caplog): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # incorrect module + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrutes"]) + cli.main() + assert 'Could not find scan module "dnsbrutes"' in caplog.text + assert 'Did you mean "dnsbrute"?' in caplog.text + + # incorrect excluded module + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-em", "dnsbrutes"]) + cli.main() + assert 'Could not find module "dnsbrutes"' in caplog.text + assert 'Did you mean "dnsbrute"?' in caplog.text + + # incorrect output module + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-om", "neoo4j"]) + cli.main() + assert 'Could not find output module "neoo4j"' in caplog.text + assert 'Did you mean "neo4j"?' in caplog.text + + # output module setup failed + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-om", "websocket", "-c", "modules.websocket.url=", "-y"]) + cli.main() + lines = caplog.text.splitlines() + assert "Loaded 6/6 output modules, (csv,json,python,stdout,txt,websocket)" in caplog.text + assert 1 == len( + [ + l + for l in lines + if l.startswith("WARNING bbot.scanner:scanner.py") + and l.endswith("Setup hard-failed for websocket: Must set URL") + ] + ) + assert 1 == len( + [ + l + for l in lines + if l.startswith("WARNING bbot.modules.output.websocket:base.py") and l.endswith("Setting error state") + ] + ) + assert 1 == len( + [ + l + for l in lines + if l.startswith("ERROR bbot.cli:cli.py") + and l.endswith("Setup hard-failed for 1 modules (websocket) (--force to run module anyway)") + ] + ) + + # only output module setup failed + caplog.clear() + assert not caplog.text + monkeypatch.setattr( + "sys.argv", + ["bbot", "-om", "websocket", "-em", "python,stdout,csv,json,txt", "-c", "modules.websocket.url=", "-y"], + ) + cli.main() + lines = caplog.text.splitlines() + assert "Loaded 1/1 output modules, (websocket)" in caplog.text + assert 1 == len( + [ + l + for l in lines + if l.startswith("WARNING bbot.scanner:scanner.py") + and l.endswith("Setup hard-failed for websocket: Must set URL") + ] + ) + assert 1 == len( + [ + l + for l in lines + if l.startswith("WARNING bbot.modules.output.websocket:base.py") and l.endswith("Setting error state") + ] + ) + assert 1 == len( + [ + l + for l in lines + if l.startswith("ERROR bbot.cli:cli.py") and l.endswith("Failed to load output modules. Aborting.") + ] + ) + + # bad target + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-t", "asdf:::sdf"]) + cli.main() + assert 'Unable to autodetect event type from "asdf:::sdf"' in caplog.text + + # incorrect flag + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomainenum"]) + cli.main() + assert 'Could not find flag "subdomainenum"' in caplog.text + assert 'Did you mean "subdomain-enum"?' in caplog.text + + # incorrect excluded flag + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-ef", "subdomainenum"]) + cli.main() + assert 'Could not find flag "subdomainenum"' in caplog.text + assert 'Did you mean "subdomain-enum"?' in caplog.text + + # incorrect required flag + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-rf", "subdomainenum"]) + cli.main() + assert 'Could not find flag "subdomainenum"' in caplog.text + assert 'Did you mean "subdomain-enum"?' in caplog.text + + +def test_cli_presets(monkeypatch, capsys, caplog): + import yaml + + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + + # show current preset + monkeypatch.setattr("sys.argv", ["bbot", "-c", "web.http_proxy=currentpresettest", "--current-preset"]) + cli.main() + captured = capsys.readouterr() + assert " http_proxy: currentpresettest" in captured.out + + # show current preset (full) + monkeypatch.setattr("sys.argv", ["bbot", "-cmodules.c99.api_key=asdf", "--current-preset-full"]) + cli.main() + captured = capsys.readouterr() + assert " api_key: asdf" in captured.out + + preset_dir = bbot_test_dir / "test_cli_presets" + preset_dir.mkdir(exist_ok=True) + + preset1_file = preset_dir / "cli_preset1.conf" + with open(preset1_file, "w") as f: + f.write( + """ +config: + web: + http_proxy: http://proxy1 + """ + ) + + preset2_file = preset_dir / "cli_preset2.yml" + with open(preset2_file, "w") as f: + f.write( + """ +config: + web: + http_proxy: http://proxy2 + """ + ) + + # test reading single preset + monkeypatch.setattr("sys.argv", ["bbot", "-p", str(preset1_file.resolve()), "--current-preset"]) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["web"]["http_proxy"] == "http://proxy1" + + # preset overrides preset + monkeypatch.setattr( + "sys.argv", ["bbot", "-p", str(preset2_file.resolve()), str(preset1_file.resolve()), "--current-preset"] + ) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["web"]["http_proxy"] == "http://proxy1" + + # override other way + monkeypatch.setattr( + "sys.argv", ["bbot", "-p", str(preset1_file.resolve()), str(preset2_file.resolve()), "--current-preset"] + ) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["web"]["http_proxy"] == "http://proxy2" + + # --fast-mode + monkeypatch.setattr("sys.argv", ["bbot", "--current-preset"]) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert list(stdout_preset) == ["description"] + + monkeypatch.setattr("sys.argv", ["bbot", "--fast", "--current-preset"]) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + stdout_preset.pop("description") + assert stdout_preset == { + "config": { + "scope": {"strict": True}, + "dns": {"minimal": True}, + "modules": {"speculate": {"essential_only": True}}, + }, + "exclude_modules": ["excavate"], + } + + # --proxy + monkeypatch.setattr("sys.argv", ["bbot", "--proxy", "http://127.0.0.1:8080", "--current-preset"]) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + stdout_preset.pop("description") + assert stdout_preset == {"config": {"web": {"http_proxy": "http://127.0.0.1:8080"}}} + + # cli config overrides all presets + monkeypatch.setattr( + "sys.argv", + [ + "bbot", + "-p", + str(preset1_file.resolve()), + str(preset2_file.resolve()), + "-c", + "web.http_proxy=asdf", + "--current-preset", + ], + ) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["web"]["http_proxy"] == "asdf" + + # invalid preset + caplog.clear() + assert not caplog.text + monkeypatch.setattr("sys.argv", ["bbot", "-p", "asdfasdfasdf", "-y"]) + cli.main() + assert "file does not exist. Use -lp to list available presets" in caplog.text + + preset1_file.unlink() + preset2_file.unlink() + + # test output dir preset + output_dir_preset_file = bbot_test_dir / "output_dir_preset.yml" + scan_name = "cli_output_dir_test" + output_dir = bbot_test_dir / "cli_output_dir_preset" + scan_dir = output_dir / scan_name + output_file = scan_dir / "output.txt" + + with open(output_dir_preset_file, "w") as f: + f.write( + f""" +output_dir: {output_dir} +scan_name: {scan_name} + """ + ) + + assert not output_dir.exists() + assert not scan_dir.exists() + assert not output_file.exists() + + monkeypatch.setattr("sys.argv", ["bbot", "-p", str(output_dir_preset_file.resolve()), "--current-preset"]) + cli.main() + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["output_dir"] == str(output_dir) + assert stdout_preset["scan_name"] == scan_name + + shutil.rmtree(output_dir, ignore_errors=True) + shutil.rmtree(scan_dir, ignore_errors=True) + shutil.rmtree(output_file, ignore_errors=True) + + assert not output_dir.exists() + assert not scan_dir.exists() + assert not output_file.exists() + + monkeypatch.setattr("sys.argv", ["bbot", "-p", str(output_dir_preset_file.resolve())]) + cli.main() + captured = capsys.readouterr() + assert output_dir.is_dir() + assert scan_dir.is_dir() + assert output_file.is_file() + + shutil.rmtree(output_dir, ignore_errors=True) + shutil.rmtree(scan_dir, ignore_errors=True) + shutil.rmtree(output_file, ignore_errors=True) + output_dir_preset_file.unlink() diff --git a/bbot/test/test_step_1/test_command.py b/bbot/test/test_step_1/test_command.py new file mode 100644 index 0000000000..7a99aed9bc --- /dev/null +++ b/bbot/test/test_step_1/test_command.py @@ -0,0 +1,147 @@ +import time +from ..bbot_fixtures import * +from subprocess import CalledProcessError + + +@pytest.mark.asyncio +async def test_command(bbot_scanner): + scan1 = bbot_scanner() + + # test timeouts + command = ["sleep", "3"] + start = time.time() + with pytest.raises(asyncio.exceptions.TimeoutError): + await scan1.helpers.run(command, idle_timeout=1) + end = time.time() + elapsed = end - start + assert 0 < elapsed < 2 + + start = time.time() + with pytest.raises(asyncio.exceptions.TimeoutError): + async for line in scan1.helpers.run_live(command, idle_timeout=1): + print(line) + end = time.time() + elapsed = end - start + assert 0 < elapsed < 2 + + # run + assert "plumbus\n" == (await scan1.helpers.run(["echo", "plumbus"])).stdout + assert b"plumbus\n" == (await scan1.helpers.run(["echo", "plumbus"], text=False)).stdout + result = (await scan1.helpers.run(["cat"], input="some\nrandom\nstdin")).stdout + assert result.splitlines() == ["some", "random", "stdin"] + result = (await scan1.helpers.run(["cat"], input=b"some\nrandom\nstdin", text=False)).stdout + assert result.splitlines() == [b"some", b"random", b"stdin"] + result = (await scan1.helpers.run(["cat"], input=["some", "random", "stdin"])).stdout + assert result.splitlines() == ["some", "random", "stdin"] + result = (await scan1.helpers.run(["cat"], input=[b"some", b"random", b"stdin"], text=False)).stdout + assert result.splitlines() == [b"some", b"random", b"stdin"] + + # test overflow - run + tmpfile_path = Path("/tmp/test_bigfile") + with open(tmpfile_path, "w") as f: + # write 2MB + f.write("A" * 1024 * 1024 * 2) + result = (await scan1.helpers.run(["cat", str(tmpfile_path)], limit=1024 * 64, text=False)).stdout + assert len(result) == 1024 * 1024 * 2 + tmpfile_path.unlink(missing_ok=True) + # test overflow - run_live + tmpfile_path = Path("/tmp/test_bigfile") + with open(tmpfile_path, "w") as f: + # write 2MB + f.write("A" * 10 + "\n") + f.write("B" * 1024 * 1024 * 2 + "\n") + f.write("C" * 10 + "\n") + lines = [] + async for line in scan1.helpers.run_live(["cat", str(tmpfile_path)], limit=1024 * 64): + lines.append(line) + # only a small bit of the overflowed line survives, that's okay. + assert lines == ["AAAAAAAAAA", "BBBBBBBBBBB", "CCCCCCCCCC"] + tmpfile_path.unlink(missing_ok=True) + + # run_live + lines = [] + async for line in scan1.helpers.run_live(["echo", "plumbus"]): + lines.append(line) + assert lines == ["plumbus"] + lines = [] + async for line in scan1.helpers.run_live(["echo", "plumbus"], text=False): + lines.append(line) + assert lines == [b"plumbus"] + lines = [] + async for line in scan1.helpers.run_live(["cat"], input="some\nrandom\nstdin"): + lines.append(line) + assert lines == ["some", "random", "stdin"] + lines = [] + async for line in scan1.helpers.run_live(["cat"], input=["some", "random", "stdin"]): + lines.append(line) + assert lines == ["some", "random", "stdin"] + + # test check=True + with pytest.raises(CalledProcessError) as excinfo: + lines = [line async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True)] + assert "No such file or directory" in excinfo.value.stderr + with pytest.raises(CalledProcessError) as excinfo: + lines = [line async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True, text=False)] + assert b"No such file or directory" in excinfo.value.stderr + with pytest.raises(CalledProcessError) as excinfo: + await scan1.helpers.run(["ls", "/aslkdjflasdkfsd"], check=True) + assert "No such file or directory" in excinfo.value.stderr + with pytest.raises(CalledProcessError) as excinfo: + await scan1.helpers.run(["ls", "/aslkdjflasdkfsd"], check=True, text=False) + assert b"No such file or directory" in excinfo.value.stderr + + # test piping + lines = [] + async for line in scan1.helpers.run_live( + ["cat"], input=scan1.helpers.run_live(["echo", "-en", r"some\nrandom\nstdin"]) + ): + lines.append(line) + assert lines == ["some", "random", "stdin"] + lines = [] + async for line in scan1.helpers.run_live( + ["cat"], input=scan1.helpers.run_live(["echo", "-en", r"some\nrandom\nstdin"], text=False), text=False + ): + lines.append(line) + assert lines == [b"some", b"random", b"stdin"] + + # test missing executable + result = await scan1.helpers.run(["sgkjlskdfsdf"]) + assert result is None + lines = [l async for l in scan1.helpers.run_live(["ljhsdghsdf"])] + assert not lines + # test stderr + result = await scan1.helpers.run(["ls", "/sldikgjasldkfsdf"]) + assert "No such file or directory" in result.stderr + lines = [l async for l in scan1.helpers.run_live(["ls", "/sldikgjasldkfsdf"])] + assert not lines + + # test sudo + existence of environment variables + await scan1.load_modules() + path_parts = os.environ.get("PATH", "").split(":") + assert "/tmp/.bbot_test/tools" in path_parts + run_lines = (await scan1.helpers.run(["env"])).stdout.splitlines() + assert "BBOT_WEB_USER_AGENT=BBOT Test User-Agent" in run_lines + for line in run_lines: + if line.startswith("PATH="): + path_parts = line.split("=", 1)[-1].split(":") + assert "/tmp/.bbot_test/tools" in path_parts + run_lines_sudo = (await scan1.helpers.run(["env"], sudo=True)).stdout.splitlines() + assert "BBOT_WEB_USER_AGENT=BBOT Test User-Agent" in run_lines_sudo + for line in run_lines_sudo: + if line.startswith("PATH="): + path_parts = line.split("=", 1)[-1].split(":") + assert "/tmp/.bbot_test/tools" in path_parts + run_live_lines = [l async for l in scan1.helpers.run_live(["env"])] + assert "BBOT_WEB_USER_AGENT=BBOT Test User-Agent" in run_live_lines + for line in run_live_lines: + if line.startswith("PATH="): + path_parts = line.strip().split("=", 1)[-1].split(":") + assert "/tmp/.bbot_test/tools" in path_parts + run_live_lines_sudo = [l async for l in scan1.helpers.run_live(["env"], sudo=True)] + assert "BBOT_WEB_USER_AGENT=BBOT Test User-Agent" in run_live_lines_sudo + for line in run_live_lines_sudo: + if line.startswith("PATH="): + path_parts = line.strip().split("=", 1)[-1].split(":") + assert "/tmp/.bbot_test/tools" in path_parts + + await scan1._cleanup() diff --git a/bbot/test/test_step_1/test_config.py b/bbot/test/test_step_1/test_config.py new file mode 100644 index 0000000000..72f7961379 --- /dev/null +++ b/bbot/test/test_step_1/test_config.py @@ -0,0 +1,25 @@ +from ..bbot_fixtures import * # noqa: F401 + + +@pytest.mark.asyncio +async def test_config(bbot_scanner): + config = OmegaConf.create( + { + "plumbus": "asdf", + "speculate": True, + "modules": { + "ipneighbor": {"test_option": "ipneighbor"}, + "python": {"test_option": "asdf"}, + "speculate": {"test_option": "speculate"}, + }, + } + ) + scan1 = bbot_scanner("127.0.0.1", modules=["ipneighbor"], config=config) + await scan1.load_modules() + assert scan1.config.web.user_agent == "BBOT Test User-Agent" + assert scan1.config.plumbus == "asdf" + assert scan1.modules["ipneighbor"].config.test_option == "ipneighbor" + assert scan1.modules["python"].config.test_option == "asdf" + assert scan1.modules["speculate"].config.test_option == "speculate" + + await scan1._cleanup() diff --git a/bbot/test/test_step_1/test_depsinstaller.py b/bbot/test/test_step_1/test_depsinstaller.py new file mode 100644 index 0000000000..9dff1c0281 --- /dev/null +++ b/bbot/test/test_step_1/test_depsinstaller.py @@ -0,0 +1,25 @@ +from ..bbot_fixtures import * + + +@pytest.mark.asyncio +async def test_depsinstaller(monkeypatch, bbot_scanner): + scan = bbot_scanner( + "127.0.0.1", + ) + + # test shell + test_file = Path("/tmp/test_file") + test_file.unlink(missing_ok=True) + scan.helpers.depsinstaller.shell(module="plumbus", commands=[f"touch {test_file}"]) + assert test_file.is_file() + test_file.unlink(missing_ok=True) + + # test tasks + scan.helpers.depsinstaller.tasks( + module="plumbus", + tasks=[{"name": "test task execution", "ansible.builtin.shell": {"cmd": f"touch {test_file}"}}], + ) + assert test_file.is_file() + test_file.unlink(missing_ok=True) + + await scan._cleanup() diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py new file mode 100644 index 0000000000..a8bfefa3a1 --- /dev/null +++ b/bbot/test/test_step_1/test_dns.py @@ -0,0 +1,831 @@ +from ..bbot_fixtures import * + +from bbot.core.helpers.dns.helpers import extract_targets, service_record, common_srvs + + +mock_records = { + "one.one.one.one": { + "A": ["1.1.1.1", "1.0.0.1"], + "AAAA": ["2606:4700:4700::1111", "2606:4700:4700::1001"], + "TXT": [ + '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' + ], + }, + "1.1.1.1.in-addr.arpa": {"PTR": ["one.one.one.one."]}, +} + + +@pytest.mark.asyncio +async def test_dns_engine(bbot_scanner): + scan = bbot_scanner() + await scan.helpers._mock_dns( + {"one.one.one.one": {"A": ["1.1.1.1"]}, "1.1.1.1.in-addr.arpa": {"PTR": ["one.one.one.one"]}} + ) + result = await scan.helpers.resolve("one.one.one.one") + assert "1.1.1.1" in result + assert "2606:4700:4700::1111" not in result + + results = [_ async for _ in scan.helpers.resolve_batch(("one.one.one.one", "1.1.1.1"))] + pass_1 = False + pass_2 = False + for query, result in results: + if query == "one.one.one.one" and "1.1.1.1" in result: + pass_1 = True + elif query == "1.1.1.1" and "one.one.one.one" in result: + pass_2 = True + assert pass_1 and pass_2 + + results = [_ async for _ in scan.helpers.resolve_raw_batch((("one.one.one.one", "A"), ("1.1.1.1", "PTR")))] + pass_1 = False + pass_2 = False + for (query, rdtype), (answers, errors) in results: + results = [] + for answer in answers: + for t in extract_targets(answer): + results.append(t[1]) + if query == "one.one.one.one" and "1.1.1.1" in results: + pass_1 = True + elif query == "1.1.1.1" and "one.one.one.one" in results: + pass_2 = True + assert pass_1 and pass_2 + + from bbot.core.helpers.dns.mock import MockResolver + + # ensure dns records are being properly cleaned + mockresolver = MockResolver({"evilcorp.com": {"MX": ["0 ."]}}) + mx_records = await mockresolver.resolve("evilcorp.com", rdtype="MX") + results = set() + for r in mx_records: + results.update(extract_targets(r)) + assert not results + + await scan._cleanup() + + +@pytest.mark.asyncio +async def test_dns_resolution(bbot_scanner): + scan = bbot_scanner("1.1.1.1") + + from bbot.core.helpers.dns.engine import DNSEngine + + dnsengine = DNSEngine(None) + await dnsengine._mock_dns(mock_records) + + # lowest level functions + a_responses = await dnsengine._resolve_hostname("one.one.one.one") + aaaa_responses = await dnsengine._resolve_hostname("one.one.one.one", rdtype="AAAA") + ip_responses = await dnsengine._resolve_ip("1.1.1.1") + assert a_responses[0].response.answer[0][0].address in ("1.1.1.1", "1.0.0.1") + assert aaaa_responses[0].response.answer[0][0].address in ("2606:4700:4700::1111", "2606:4700:4700::1001") + assert ip_responses[0].response.answer[0][0].target.to_text() in ("one.one.one.one.",) + + # mid level functions + answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="A") + responses = [] + for answer in answers: + responses += list(extract_targets(answer)) + assert ("A", "1.1.1.1") in responses + assert ("AAAA", "2606:4700:4700::1111") not in responses + answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="AAAA") + responses = [] + for answer in answers: + responses += list(extract_targets(answer)) + assert ("A", "1.1.1.1") not in responses + assert ("AAAA", "2606:4700:4700::1111") in responses + answers, errors = await dnsengine.resolve_raw("1.1.1.1") + responses = [] + for answer in answers: + responses += list(extract_targets(answer)) + assert ("PTR", "one.one.one.one") in responses + + await dnsengine._shutdown() + + # high level functions + dnsengine = DNSEngine(None) + assert "1.1.1.1" in await dnsengine.resolve("one.one.one.one") + assert "2606:4700:4700::1111" in await dnsengine.resolve("one.one.one.one", type="AAAA") + assert "one.one.one.one" in await dnsengine.resolve("1.1.1.1") + for rdtype in ("NS", "SOA", "MX", "TXT"): + results = await dnsengine.resolve("google.com", type=rdtype) + assert len(results) > 0 + + # batch resolution + batch_results = [r async for r in dnsengine.resolve_batch(["1.1.1.1", "one.one.one.one"])] + assert len(batch_results) == 2 + batch_results = dict(batch_results) + assert any(x in batch_results["one.one.one.one"] for x in ("1.1.1.1", "1.0.0.1")) + assert "one.one.one.one" in batch_results["1.1.1.1"] + + # custom batch resolution + batch_results = [r async for r in dnsengine.resolve_raw_batch([("1.1.1.1", "PTR"), ("one.one.one.one", "A")])] + batch_results_new = [] + for query, (answers, errors) in batch_results: + for answer in answers: + batch_results_new.append((answer.to_text(), answer.rdtype.name)) + assert len(batch_results_new) == 3 + assert any(answer == "1.0.0.1" and rdtype == "A" for answer, rdtype in batch_results_new) + assert any(answer == "one.one.one.one." and rdtype == "PTR" for answer, rdtype in batch_results_new) + + # dns cache + dnsengine._dns_cache.clear() + assert hash(("1.1.1.1", "PTR")) not in dnsengine._dns_cache + assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache + assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache + await dnsengine.resolve("1.1.1.1", use_cache=False) + await dnsengine.resolve("one.one.one.one", use_cache=False) + assert hash(("1.1.1.1", "PTR")) not in dnsengine._dns_cache + assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache + assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache + + await dnsengine.resolve("1.1.1.1") + assert hash(("1.1.1.1", "PTR")) in dnsengine._dns_cache + await dnsengine.resolve("one.one.one.one", type="A") + assert hash(("one.one.one.one", "A")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache + dnsengine._dns_cache.clear() + await dnsengine.resolve("one.one.one.one", type="AAAA") + assert hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache + + await dnsengine._shutdown() + + # Ensure events with hosts have resolved_hosts attribute populated + await scan._prep() + resolved_hosts_event1 = scan.make_event("one.one.one.one", "DNS_NAME", parent=scan.root_event) + resolved_hosts_event2 = scan.make_event("http://one.one.one.one/", "URL_UNVERIFIED", parent=scan.root_event) + dnsresolve = scan.modules["dnsresolve"] + await dnsresolve.handle_event(resolved_hosts_event1) + await dnsresolve.handle_event(resolved_hosts_event2) + assert "1.1.1.1" in resolved_hosts_event2.resolved_hosts + # URL event should not have dns_children + assert not resolved_hosts_event2.dns_children + assert resolved_hosts_event1.resolved_hosts == resolved_hosts_event2.resolved_hosts + # DNS_NAME event should have dns_children + assert "1.1.1.1" in resolved_hosts_event1.dns_children["A"] + assert "A" in resolved_hosts_event1.raw_dns_records + assert "AAAA" in resolved_hosts_event1.raw_dns_records + assert "a-record" in resolved_hosts_event1.tags + assert "a-record" not in resolved_hosts_event2.tags + + scan2 = bbot_scanner("evilcorp.com", config={"dns": {"minimal": False}}) + await scan2.helpers.dns._mock_dns( + { + "evilcorp.com": {"TXT": ['"v=spf1 include:cloudprovider.com ~all"']}, + "cloudprovider.com": {"A": ["1.2.3.4"]}, + }, + ) + events = [e async for e in scan2.async_start()] + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "cloudprovider.com" and "affiliate" in e.tags] + ) + + await scan._cleanup() + await scan2._cleanup() + + +@pytest.mark.asyncio +async def test_wildcards(bbot_scanner): + scan = bbot_scanner("1.1.1.1") + helpers = scan.helpers + + from bbot.core.helpers.dns.engine import DNSEngine, all_rdtypes + + dnsengine = DNSEngine(None, debug=True) + + # is_wildcard_domain + wildcard_domains = await dnsengine.is_wildcard_domain("asdf.github.io", all_rdtypes) + assert len(dnsengine._wildcard_cache) == len(all_rdtypes) + (len(all_rdtypes) - 2) + for rdtype in all_rdtypes: + assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache + if rdtype not in ("A", "AAAA"): + assert hash(("asdf.github.io", rdtype)) in dnsengine._wildcard_cache + assert "github.io" in wildcard_domains + assert "A" in wildcard_domains["github.io"] + assert "SRV" not in wildcard_domains["github.io"] + assert wildcard_domains["github.io"]["A"] and all(helpers.is_ip(r) for r in wildcard_domains["github.io"]["A"][0]) + dnsengine._wildcard_cache.clear() + + # is_wildcard + for test_domain in ("blacklanternsecurity.github.io", "asdf.asdf.asdf.github.io"): + wildcard_rdtypes = await dnsengine.is_wildcard(test_domain, all_rdtypes) + assert "A" in wildcard_rdtypes + assert "SRV" not in wildcard_rdtypes + assert wildcard_rdtypes["A"] == (True, "github.io") + assert wildcard_rdtypes["AAAA"] == (True, "github.io") + assert len(dnsengine._wildcard_cache) == 2 + for rdtype in ("A", "AAAA"): + assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache + assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))]) == 2 + assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))][0]) > 0 + assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))][1]) > 0 + dnsengine._wildcard_cache.clear() + + ### wildcard TXT record ### + + custom_lookup = """ +def custom_lookup(query, rdtype): + if rdtype == "TXT" and query.strip(".").endswith("test.evilcorp.com"): + return {""} +""" + + mock_data = { + "evilcorp.com": {"A": ["127.0.0.1"]}, + "test.evilcorp.com": {"A": ["127.0.0.2"]}, + "www.test.evilcorp.com": {"AAAA": ["dead::beef"]}, + } + + # basic sanity checks + + await dnsengine._mock_dns(mock_data, custom_lookup_fn=custom_lookup) + + a_result = await dnsengine.resolve("evilcorp.com") + assert a_result == {"127.0.0.1"} + aaaa_result = await dnsengine.resolve("www.test.evilcorp.com", type="AAAA") + assert aaaa_result == {"dead::beef"} + txt_result = await dnsengine.resolve("asdf.www.test.evilcorp.com", type="TXT") + assert txt_result == set() + txt_result_raw, errors = await dnsengine.resolve_raw("asdf.www.test.evilcorp.com", type="TXT") + txt_result_raw = list(txt_result_raw) + assert txt_result_raw + + await dnsengine._shutdown() + + # first, we check with wildcard detection disabled + + scan = bbot_scanner( + "bbot.fdsa.www.test.evilcorp.com", + whitelist=["evilcorp.com"], + config={ + "dns": {"minimal": False, "disable": False, "search_distance": 5, "wildcard_ignore": ["evilcorp.com"]}, + "speculate": True, + }, + ) + await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) + + events = [e async for e in scan.async_start()] + assert len(events) == 12 + assert len([e for e in events if e.type == "DNS_NAME"]) == 5 + assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 + assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ + "bbot.fdsa.www.test.evilcorp.com", + "evilcorp.com", + "fdsa.www.test.evilcorp.com", + "test.evilcorp.com", + "www.test.evilcorp.com", + ] + + dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} + assert dns_names_by_host["evilcorp.com"].tags == {"domain", "private-ip", "in-scope", "a-record"} + assert dns_names_by_host["evilcorp.com"].resolved_hosts == {"127.0.0.1"} + assert dns_names_by_host["test.evilcorp.com"].tags == { + "subdomain", + "private-ip", + "in-scope", + "a-record", + "txt-record", + } + assert dns_names_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} + assert dns_names_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "aaaa-record", "txt-record"} + assert dns_names_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} + assert dns_names_by_host["fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert dns_names_by_host["fdsa.www.test.evilcorp.com"].resolved_hosts == set() + assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { + "target", + "subdomain", + "in-scope", + "txt-record", + } + assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() + + raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} + assert raw_records_by_host["test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert raw_records_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} + assert raw_records_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert raw_records_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} + assert raw_records_by_host["fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert raw_records_by_host["fdsa.www.test.evilcorp.com"].resolved_hosts == set() + assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() + + # then we run it again with wildcard detection enabled + + scan = bbot_scanner( + "bbot.fdsa.www.test.evilcorp.com", + whitelist=["evilcorp.com"], + config={ + "dns": {"minimal": False, "disable": False, "search_distance": 5, "wildcard_ignore": []}, + "speculate": True, + }, + ) + await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) + + events = [e async for e in scan.async_start()] + assert len(events) == 12 + assert len([e for e in events if e.type == "DNS_NAME"]) == 5 + assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 + assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ + "_wildcard.test.evilcorp.com", + "bbot.fdsa.www.test.evilcorp.com", + "evilcorp.com", + "test.evilcorp.com", + "www.test.evilcorp.com", + ] + + dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} + assert dns_names_by_host["evilcorp.com"].tags == {"domain", "private-ip", "in-scope", "a-record"} + assert dns_names_by_host["evilcorp.com"].resolved_hosts == {"127.0.0.1"} + assert dns_names_by_host["test.evilcorp.com"].tags == { + "subdomain", + "private-ip", + "in-scope", + "a-record", + "txt-record", + } + assert dns_names_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} + assert dns_names_by_host["_wildcard.test.evilcorp.com"].tags == { + "subdomain", + "in-scope", + "txt-record", + "txt-wildcard", + "wildcard", + } + assert dns_names_by_host["_wildcard.test.evilcorp.com"].resolved_hosts == set() + assert dns_names_by_host["www.test.evilcorp.com"].tags == { + "subdomain", + "in-scope", + "aaaa-record", + "txt-record", + "txt-wildcard", + "wildcard", + } + assert dns_names_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} + assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { + "target", + "subdomain", + "in-scope", + "txt-record", + "txt-wildcard", + "wildcard", + } + assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() + + raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} + assert raw_records_by_host["test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} + assert raw_records_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} + assert raw_records_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record", "txt-wildcard"} + assert raw_records_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} + assert raw_records_by_host["_wildcard.test.evilcorp.com"].tags == { + "subdomain", + "in-scope", + "txt-record", + "txt-wildcard", + } + assert raw_records_by_host["_wildcard.test.evilcorp.com"].resolved_hosts == set() + assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { + "subdomain", + "in-scope", + "txt-record", + "txt-wildcard", + } + assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() + + ### runaway SRV wildcard ### + + custom_lookup = """ +def custom_lookup(query, rdtype): + if rdtype == "SRV" and query.strip(".").endswith("evilcorp.com"): + return {f"0 100 389 test.{query}"} +""" + + mock_data = { + "evilcorp.com": {"A": ["127.0.0.1"]}, + "test.evilcorp.com": {"AAAA": ["dead::beef"]}, + } + + scan = bbot_scanner( + "evilcorp.com", + config={ + "dns": { + "minimal": False, + "disable": False, + "search_distance": 5, + "wildcard_ignore": [], + "runaway_limit": 3, + }, + }, + ) + await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) + + events = [e async for e in scan.async_start()] + + assert len(events) == 11 + assert len([e for e in events if e.type == "DNS_NAME"]) == 5 + assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 + assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ + "evilcorp.com", + "test.evilcorp.com", + "test.test.evilcorp.com", + "test.test.test.evilcorp.com", + "test.test.test.test.evilcorp.com", + ] + + dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} + assert dns_names_by_host["evilcorp.com"].tags == { + "target", + "a-record", + "in-scope", + "domain", + "srv-record", + "private-ip", + } + assert dns_names_by_host["test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "aaaa-record", + "srv-wildcard-possible", + "wildcard-possible", + "subdomain", + } + assert dns_names_by_host["test.test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "wildcard-possible", + "subdomain", + } + assert dns_names_by_host["test.test.test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "wildcard-possible", + "subdomain", + } + assert dns_names_by_host["test.test.test.test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "wildcard-possible", + "subdomain", + "runaway-dns-3", + } + + raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} + assert raw_records_by_host["evilcorp.com"].tags == {"in-scope", "srv-record", "domain"} + assert raw_records_by_host["test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "subdomain", + } + assert raw_records_by_host["test.test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "subdomain", + } + assert raw_records_by_host["test.test.test.evilcorp.com"].tags == { + "in-scope", + "srv-record", + "srv-wildcard-possible", + "subdomain", + } + + scan = bbot_scanner("1.1.1.1") + helpers = scan.helpers + + # event resolution + wildcard_event1 = scan.make_event("wat.asdf.fdsa.github.io", "DNS_NAME", parent=scan.root_event) + wildcard_event1.scope_distance = 0 + wildcard_event2 = scan.make_event("wats.asd.fdsa.github.io", "DNS_NAME", parent=scan.root_event) + wildcard_event2.scope_distance = 0 + wildcard_event3 = scan.make_event("github.io", "DNS_NAME", parent=scan.root_event) + wildcard_event3.scope_distance = 0 + + await scan._prep() + dnsresolve = scan.modules["dnsresolve"] + await dnsresolve.handle_event(wildcard_event1) + await dnsresolve.handle_event(wildcard_event2) + await dnsresolve.handle_event(wildcard_event3) + assert "wildcard" in wildcard_event1.tags + assert "a-wildcard" in wildcard_event1.tags + assert "srv-wildcard" not in wildcard_event1.tags + assert "wildcard" in wildcard_event2.tags + assert "a-wildcard" in wildcard_event2.tags + assert "srv-wildcard" not in wildcard_event2.tags + assert wildcard_event1.data == "_wildcard.github.io" + assert wildcard_event2.data == "_wildcard.github.io" + assert wildcard_event3.data == "github.io" + + # dns resolve distance + event_distance_0 = scan.make_event( + "8.8.8.8", module=scan.modules["dnsresolve"]._make_dummy_module("PTR"), parent=scan.root_event + ) + assert event_distance_0.dns_resolve_distance == 0 + event_distance_1 = scan.make_event( + "evilcorp.com", module=scan.modules["dnsresolve"]._make_dummy_module("A"), parent=event_distance_0 + ) + assert event_distance_1.dns_resolve_distance == 1 + event_distance_2 = scan.make_event( + "1.2.3.4", module=scan.modules["dnsresolve"]._make_dummy_module("PTR"), parent=event_distance_1 + ) + assert event_distance_2.dns_resolve_distance == 1 + event_distance_3 = scan.make_event( + "evilcorp.org", module=scan.modules["dnsresolve"]._make_dummy_module("A"), parent=event_distance_2 + ) + assert event_distance_3.dns_resolve_distance == 2 + + await scan._cleanup() + + from bbot.scanner import Scanner + + # test with full scan + scan2 = Scanner("asdfl.gashdgkjsadgsdf.github.io", whitelist=["github.io"], config={"dns": {"minimal": False}}) + await scan2._prep() + other_event = scan2.make_event( + "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dnsresolve"], parent=scan2.root_event + ) + await scan2.ingress_module.queue_event(other_event, {}) + events = [e async for e in scan2.async_start()] + assert len(events) == 4 + assert 2 == len([e for e in events if e.type == "SCAN"]) + unmodified_wildcard_events = [ + e for e in events if e.type == "DNS_NAME" and e.data == "asdfl.gashdgkjsadgsdf.github.io" + ] + assert len(unmodified_wildcard_events) == 1 + assert unmodified_wildcard_events[0].tags.issuperset( + { + "a-record", + "target", + "aaaa-wildcard", + "in-scope", + "subdomain", + "aaaa-record", + "wildcard", + "a-wildcard", + } + ) + modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] + assert len(modified_wildcard_events) == 1 + assert modified_wildcard_events[0].tags.issuperset( + { + "a-record", + "aaaa-wildcard", + "in-scope", + "subdomain", + "aaaa-record", + "wildcard", + "a-wildcard", + } + ) + assert modified_wildcard_events[0].host_original == "lkjg.sdfgsg.jgkhajshdsadf.github.io" + + # test with full scan (wildcard detection disabled for domain) + scan2 = Scanner( + "asdfl.gashdgkjsadgsdf.github.io", + whitelist=["github.io"], + config={"dns": {"wildcard_ignore": ["github.io"]}}, + exclude_modules=["cloudcheck"], + ) + await scan2._prep() + other_event = scan2.make_event( + "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dnsresolve"], parent=scan2.root_event + ) + await scan2.ingress_module.queue_event(other_event, {}) + events = [e async for e in scan2.async_start()] + assert len(events) == 4 + assert 2 == len([e for e in events if e.type == "SCAN"]) + unmodified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and "_wildcard" not in e.data] + assert len(unmodified_wildcard_events) == 2 + assert 1 == len( + [ + e + for e in unmodified_wildcard_events + if e.data == "asdfl.gashdgkjsadgsdf.github.io" + and e.tags.issuperset( + { + "target", + "a-record", + "in-scope", + "subdomain", + "aaaa-record", + } + ) + ] + ) + assert 1 == len( + [ + e + for e in unmodified_wildcard_events + if e.data == "lkjg.sdfgsg.jgkhajshdsadf.github.io" + and e.tags.issuperset( + { + "a-record", + "in-scope", + "subdomain", + "aaaa-record", + } + ) + ] + ) + modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] + assert len(modified_wildcard_events) == 0 + + +@pytest.mark.asyncio +async def test_wildcard_deduplication(bbot_scanner): + custom_lookup = """ +def custom_lookup(query, rdtype): + if rdtype == "TXT" and query.strip(".").endswith("evilcorp.com"): + return {""} +""" + + mock_data = { + "evilcorp.com": {"A": ["127.0.0.1"]}, + } + + from bbot.modules.base import BaseModule + + class DummyModule(BaseModule): + watched_events = ["DNS_NAME"] + per_domain_only = True + + async def handle_event(self, event): + for i in range(30): + await self.emit_event(f"www{i}.evilcorp.com", "DNS_NAME", parent=event) + + # scan without omitted event type + scan = bbot_scanner( + "evilcorp.com", config={"dns": {"minimal": False, "wildcard_ignore": []}, "omit_event_types": []} + ) + await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) + dummy_module = DummyModule(scan) + scan.modules["dummy_module"] = dummy_module + events = [e async for e in scan.async_start()] + dns_name_events = [e for e in events if e.type == "DNS_NAME"] + assert len(dns_name_events) == 2 + assert 1 == len([e for e in dns_name_events if e.data == "_wildcard.evilcorp.com"]) + + +@pytest.mark.asyncio +async def test_dns_raw_records(bbot_scanner): + from bbot.modules.base import BaseModule + + class DummyModule(BaseModule): + watched_events = ["*"] + + async def setup(self): + self.events = [] + return True + + async def handle_event(self, event): + self.events.append(event) + + # scan without omitted event type + scan = bbot_scanner("one.one.one.one", "1.1.1.1", config={"dns": {"minimal": False}, "omit_event_types": []}) + await scan.helpers.dns._mock_dns(mock_records) + dummy_module = DummyModule(scan) + scan.modules["dummy_module"] = dummy_module + events = [e async for e in scan.async_start()] + assert 1 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) + assert 1 == len( + [ + e + for e in events + if e.type == "RAW_DNS_RECORD" + and e.host == "one.one.one.one" + and e.data["host"] == "one.one.one.one" + and e.data["type"] == "TXT" + and e.data["answer"] + == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' + and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" + ] + ) + assert 1 == len( + [ + e + for e in dummy_module.events + if e.type == "RAW_DNS_RECORD" + and e.host == "one.one.one.one" + and e.data["host"] == "one.one.one.one" + and e.data["type"] == "TXT" + and e.data["answer"] + == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' + and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" + ] + ) + # scan with omitted event type + scan = bbot_scanner("one.one.one.one", config={"dns": {"minimal": False}, "omit_event_types": ["RAW_DNS_RECORD"]}) + await scan.helpers.dns._mock_dns(mock_records) + dummy_module = DummyModule(scan) + scan.modules["dummy_module"] = dummy_module + events = [e async for e in scan.async_start()] + # no raw records should be emitted + assert 0 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) + assert 0 == len([e for e in dummy_module.events if e.type == "RAW_DNS_RECORD"]) + + # scan with watching module + DummyModule.watched_events = ["RAW_DNS_RECORD"] + scan = bbot_scanner("one.one.one.one", config={"dns": {"minimal": False}, "omit_event_types": ["RAW_DNS_RECORD"]}) + await scan.helpers.dns._mock_dns(mock_records) + dummy_module = DummyModule(scan) + scan.modules["dummy_module"] = dummy_module + events = [e async for e in scan.async_start()] + # no raw records should be output + assert 0 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) + # but they should still make it to the module + assert 1 == len( + [ + e + for e in dummy_module.events + if e.type == "RAW_DNS_RECORD" + and e.host == "one.one.one.one" + and e.data["host"] == "one.one.one.one" + and e.data["type"] == "TXT" + and e.data["answer"] + == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' + and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" + ] + ) + + +@pytest.mark.asyncio +async def test_dns_graph_structure(bbot_scanner): + scan = bbot_scanner("https://evilcorp.com", config={"dns": {"search_distance": 1, "minimal": False}}) + await scan.helpers.dns._mock_dns( + { + "evilcorp.com": { + "CNAME": [ + "www.evilcorp.com", + ] + }, + "www.evilcorp.com": {"CNAME": ["test.evilcorp.com"]}, + "test.evilcorp.com": {"A": ["127.0.0.1"]}, + } + ) + events = [e async for e in scan.async_start()] + assert len(events) == 6 + non_scan_events = [e for e in events if e.type != "SCAN"] + assert sorted([e.type for e in non_scan_events]) == ["DNS_NAME", "DNS_NAME", "DNS_NAME", "URL_UNVERIFIED"] + events_by_data = {e.data: e for e in non_scan_events} + assert set(events_by_data) == {"https://evilcorp.com/", "evilcorp.com", "www.evilcorp.com", "test.evilcorp.com"} + assert events_by_data["test.evilcorp.com"].parent.data == "www.evilcorp.com" + assert str(events_by_data["test.evilcorp.com"].module) == "CNAME" + assert events_by_data["www.evilcorp.com"].parent.data == "evilcorp.com" + assert str(events_by_data["www.evilcorp.com"].module) == "CNAME" + assert events_by_data["evilcorp.com"].parent.data == "https://evilcorp.com/" + assert str(events_by_data["evilcorp.com"].module) == "host" + + +@pytest.mark.asyncio +async def test_hostname_extraction(bbot_scanner): + scan = bbot_scanner("evilcorp.com", config={"dns": {"minimal": False}}) + await scan.helpers.dns._mock_dns( + { + "evilcorp.com": { + "A": ["127.0.0.1"], + "TXT": [ + "v=spf1 include:spf-a.evilcorp.com include:spf-b.evilcorp.com include:icpbounce.com include:shops.shopify.com include:_spf.qemailserver.com include:spf.mandrillapp.com include:spf.protection.office365.us include:spf-003ea501.gpphosted.com 127.0.0.1 -all" + ], + } + } + ) + events = [e async for e in scan.async_start()] + dns_name_events = [e for e in events if e.type == "DNS_NAME"] + main_dns_event = [e for e in dns_name_events if e.data == "evilcorp.com"] + assert len(main_dns_event) == 1 + main_dns_event = main_dns_event[0] + dns_children = main_dns_event.dns_children + assert dns_children["A"] == {"127.0.0.1"} + assert dns_children["TXT"] == { + "spf-a.evilcorp.com", + "spf-b.evilcorp.com", + "icpbounce.com", + "shops.shopify.com", + "_spf.qemailserver.com", + "spf.mandrillapp.com", + "spf.protection.office365.us", + "spf-003ea501.gpphosted.com", + "127.0.0.1", + } + + +@pytest.mark.asyncio +async def test_dns_helpers(bbot_scanner): + assert service_record("") is False + assert service_record("localhost") is False + assert service_record("www.example.com") is False + assert service_record("www.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "A") is False + # top 100 most common SRV records + for srv_record in common_srvs[:100]: + hostname = f"{srv_record}.example.com" + assert service_record(hostname) is True + + # make sure system nameservers are excluded from use by DNS brute force + brute_nameservers = tempwordlist(["1.2.3.4", "8.8.4.4", "4.3.2.1", "8.8.8.8"]) + scan = bbot_scanner(config={"dns": {"brute_nameservers": brute_nameservers}}) + scan.helpers.dns.system_resolvers = ["8.8.8.8", "8.8.4.4"] + resolver_file = await scan.helpers.dns.brute.resolver_file() + resolvers = set(scan.helpers.read_file(resolver_file)) + assert resolvers == {"1.2.3.4", "4.3.2.1"} diff --git a/bbot/test/test_step_1/test_docs.py b/bbot/test/test_step_1/test_docs.py new file mode 100644 index 0000000000..a86947ff02 --- /dev/null +++ b/bbot/test/test_step_1/test_docs.py @@ -0,0 +1,4 @@ +def test_docs(): + from bbot.scripts.docs import update_docs + + update_docs() diff --git a/bbot/test/test_step_1/test_engine.py b/bbot/test/test_step_1/test_engine.py new file mode 100644 index 0000000000..653c3dcd6c --- /dev/null +++ b/bbot/test/test_step_1/test_engine.py @@ -0,0 +1,146 @@ +from ..bbot_fixtures import * + + +@pytest.mark.asyncio +async def test_engine(): + from bbot.core.engine import EngineClient, EngineServer + + counter = 0 + yield_cancelled = False + yield_errored = False + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + + class TestEngineServer(EngineServer): + CMDS = { + 0: "return_thing", + 1: "yield_stuff", + } + + async def return_thing(self, n): + nonlocal return_started + nonlocal return_finished + nonlocal return_cancelled + nonlocal return_errored + try: + return_started = True + await asyncio.sleep(n) + return_finished = True + return f"thing{n}" + except asyncio.CancelledError: + return_cancelled = True + raise + except Exception: + return_errored = True + raise + + async def yield_stuff(self, n): + nonlocal counter + nonlocal yield_cancelled + nonlocal yield_errored + try: + for i in range(n): + yield f"thing{i}" + counter += 1 + await asyncio.sleep(0.1) + except asyncio.CancelledError: + yield_cancelled = True + raise + except Exception: + yield_errored = True + raise + + class TestEngineClient(EngineClient): + SERVER_CLASS = TestEngineServer + + async def return_thing(self, n): + return await self.run_and_return("return_thing", n) + + async def yield_stuff(self, n): + async for _ in self.run_and_yield("yield_stuff", n): + yield _ + + test_engine = TestEngineClient() + + # test return functionality + return_res = await test_engine.return_thing(1) + assert return_res == "thing1" + + # test async generator + assert counter == 0 + assert yield_cancelled is False + yield_res = [r async for r in test_engine.yield_stuff(13)] + assert yield_res == [f"thing{i}" for i in range(13)] + assert len(yield_res) == 13 + assert counter == 13 + + # test async generator with cancellation + counter = 0 + yield_cancelled = False + yield_errored = False + agen = test_engine.yield_stuff(1000) + async for r in agen: + if counter > 10: + await agen.aclose() + break + await asyncio.sleep(5) + assert yield_cancelled is True + assert yield_errored is False + assert counter < 15 + + # test async generator with error + yield_cancelled = False + yield_errored = False + agen = test_engine.yield_stuff(None) + with pytest.raises(BBOTEngineError): + async for _ in agen: + pass + assert yield_cancelled is False + assert yield_errored is True + + # test return with cancellation + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + task = asyncio.create_task(test_engine.return_thing(2)) + await asyncio.sleep(1) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + await asyncio.sleep(0.1) + assert return_started is True + assert return_finished is False + assert return_cancelled is True + assert return_errored is False + + # test return with late cancellation + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + task = asyncio.create_task(test_engine.return_thing(1)) + await asyncio.sleep(2) + task.cancel() + result = await task + assert result == "thing1" + assert return_started is True + assert return_finished is True + assert return_cancelled is False + assert return_errored is False + + # test return with error + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + with pytest.raises(BBOTEngineError): + result = await test_engine.return_thing(None) + assert return_started is True + assert return_finished is False + assert return_cancelled is False + assert return_errored is True + + await test_engine.shutdown() diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py new file mode 100644 index 0000000000..6c9d58003d --- /dev/null +++ b/bbot/test/test_step_1/test_events.py @@ -0,0 +1,1097 @@ +import json +import random +import ipaddress + +from ..bbot_fixtures import * +from bbot.scanner import Scanner +from bbot.core.helpers.regexes import event_uuid_regex + + +@pytest.mark.asyncio +async def test_events(events, helpers): + scan = Scanner() + await scan._prep() + + assert events.ipv4.type == "IP_ADDRESS" + assert events.ipv4.netloc == "8.8.8.8" + assert events.ipv4.port is None + assert events.ipv6.type == "IP_ADDRESS" + assert events.ipv6.netloc == "[2001:4860:4860::8888]" + assert events.ipv6.port is None + assert events.ipv6_open_port.netloc == "[2001:4860:4860::8888]:443" + assert events.netv4.type == "IP_RANGE" + assert events.netv4.netloc is None + assert "netloc" not in events.netv4.json() + assert events.netv6.type == "IP_RANGE" + assert events.domain.type == "DNS_NAME" + assert events.domain.netloc == "publicapis.org" + assert events.domain.port is None + assert "domain" in events.domain.tags + assert events.subdomain.type == "DNS_NAME" + assert "subdomain" in events.subdomain.tags + assert events.open_port.type == "OPEN_TCP_PORT" + assert events.url_unverified.type == "URL_UNVERIFIED" + assert events.ipv4_url_unverified.type == "URL_UNVERIFIED" + assert events.ipv6_url_unverified.type == "URL_UNVERIFIED" + assert "" not in events.ipv4 + assert None not in events.ipv4 + assert 1 not in events.ipv4 + assert False not in events.ipv4 + + # ip tests + assert events.ipv4 == scan.make_event("8.8.8.8", dummy=True) + assert "8.8.8.8" in events.ipv4 + assert events.ipv4.host_filterable == "8.8.8.8" + assert "8.8.8.8" == events.ipv4 + assert "8.8.8.8" in events.netv4 + assert "8.8.8.9" not in events.ipv4 + assert "8.8.9.8" not in events.netv4 + assert "8.8.8.8/31" in events.netv4 + assert "8.8.8.8/30" in events.netv4 + assert "8.8.8.8/29" not in events.netv4 + assert "2001:4860:4860::8888" in events.ipv6 + assert "2001:4860:4860::8888" in events.netv6 + assert "2001:4860:4860::8889" not in events.ipv6 + assert "2002:4860:4860::8888" not in events.netv6 + assert "2001:4860:4860::8888/127" in events.netv6 + assert "2001:4860:4860::8888/126" in events.netv6 + assert "2001:4860:4860::8888/125" not in events.netv6 + assert events.emoji not in events.ipv4 + assert events.emoji not in events.netv6 + assert events.netv6 not in events.emoji + ipv6_event = scan.make_event(" [DEaD::c0De]:88", "DNS_NAME", dummy=True) + assert "dead::c0de" == ipv6_event + assert ipv6_event.host_filterable == "dead::c0de" + range_to_ip = scan.make_event("1.2.3.4/32", dummy=True) + assert range_to_ip.type == "IP_ADDRESS" + range_to_ip = scan.make_event("dead::beef/128", dummy=True) + assert range_to_ip.type == "IP_ADDRESS" + + # hostname tests + assert events.domain.host == "publicapis.org" + assert events.domain.host_filterable == "publicapis.org" + assert events.subdomain.host == "api.publicapis.org" + assert events.subdomain.host_filterable == "api.publicapis.org" + assert events.domain.host_stem == "publicapis" + assert events.subdomain.host_stem == "api.publicapis" + assert "api.publicapis.org" in events.domain + assert "api.publicapis.org" in events.subdomain + assert "fsocie.ty" not in events.domain + assert "fsocie.ty" not in events.subdomain + assert events.subdomain in events.domain + assert events.domain not in events.subdomain + assert events.ipv4 not in events.domain + assert events.netv6 not in events.domain + assert events.emoji not in events.domain + assert events.domain not in events.emoji + open_port_event = scan.make_event(" eViLcorp.COM.:88", "DNS_NAME", dummy=True) + dns_event = scan.make_event("evilcorp.com.", "DNS_NAME", dummy=True) + for e in (open_port_event, dns_event): + assert "evilcorp.com" == e + assert e.netloc == "evilcorp.com" + assert e.json()["netloc"] == "evilcorp.com" + assert e.port is None + assert "port" not in e.json() + + # url tests + url_no_trailing_slash = scan.make_event("http://evilcorp.com", dummy=True) + url_trailing_slash = scan.make_event("http://evilcorp.com/", dummy=True) + assert url_no_trailing_slash == url_trailing_slash + assert url_no_trailing_slash.host_filterable == "http://evilcorp.com/" + assert url_trailing_slash.host_filterable == "http://evilcorp.com/" + assert events.url_unverified.host == "api.publicapis.org" + assert events.url_unverified in events.domain + assert events.url_unverified in events.subdomain + assert "api.publicapis.org:443" in events.url_unverified + assert "publicapis.org" not in events.url_unverified + assert events.ipv4_url_unverified in events.ipv4 + assert events.ipv4_url_unverified.netloc == "8.8.8.8:443" + assert events.ipv4_url_unverified.port == 443 + assert events.ipv4_url_unverified.json()["port"] == 443 + assert events.ipv4_url_unverified in events.netv4 + assert events.ipv6_url_unverified in events.ipv6 + assert events.ipv6_url_unverified.netloc == "[2001:4860:4860::8888]:443" + assert events.ipv6_url_unverified.port == 443 + assert events.ipv6_url_unverified.json()["port"] == 443 + assert events.ipv6_url_unverified in events.netv6 + assert events.emoji not in events.url_unverified + assert events.emoji not in events.ipv6_url_unverified + assert events.url_unverified not in events.emoji + assert "https://evilcorp.com" == scan.make_event("https://evilcorp.com:443", dummy=True) + assert "http://evilcorp.com" == scan.make_event("http://evilcorp.com:80", dummy=True) + assert "http://evilcorp.com:80/asdf.js" in scan.make_event("http://evilcorp.com/asdf.js", dummy=True) + assert "http://evilcorp.com/asdf.js" in scan.make_event("http://evilcorp.com:80/asdf.js", dummy=True) + assert "https://evilcorp.com:443" == scan.make_event("https://evilcorp.com", dummy=True) + assert "http://evilcorp.com:80" == scan.make_event("http://evilcorp.com", dummy=True) + assert "https://evilcorp.com:80" == scan.make_event("https://evilcorp.com:80", dummy=True) + assert "http://evilcorp.com:443" == scan.make_event("http://evilcorp.com:443", dummy=True) + assert scan.make_event("https://evilcorp.com", dummy=True).with_port().geturl() == "https://evilcorp.com:443/" + assert scan.make_event("https://evilcorp.com:666", dummy=True).with_port().geturl() == "https://evilcorp.com:666/" + assert scan.make_event("https://evilcorp.com.:666", dummy=True) == "https://evilcorp.com:666/" + assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" + assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" + url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) + assert "status-200" in url_event.tags + assert url_event.http_status == 200 + with pytest.raises(ValidationError, match=".*status tag.*"): + scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) + + # http response + assert events.http_response.host == "example.com" + assert events.http_response.port == 80 + assert events.http_response.parsed_url.scheme == "http" + assert events.http_response.with_port().geturl() == "http://example.com:80/" + assert events.http_response.host_filterable == "http://example.com/" + + http_response = scan.make_event( + { + "port": "80", + "title": "HTTP%20RESPONSE", + "url": "http://www.evilcorp.com:80", + "input": "http://www.evilcorp.com:80", + "raw_header": "HTTP/1.1 301 Moved Permanently\r\nLocation: http://www.evilcorp.com/asdf\r\n\r\n", + "location": "/asdf", + "status_code": 301, + }, + "HTTP_RESPONSE", + dummy=True, + ) + assert http_response.http_status == 301 + assert http_response.http_title == "HTTP RESPONSE" + assert http_response.redirect_location == "http://www.evilcorp.com/asdf" + + # http response url validation + http_response_2 = scan.make_event( + { + "port": "80", + "url": "http://evilcorp.com:80/asdf", + "raw_header": "HTTP/1.1 301 Moved Permanently\r\nLocation: http://www.evilcorp.com/asdf\r\n\r\n", + }, + "HTTP_RESPONSE", + dummy=True, + ) + assert http_response_2.data["url"] == "http://evilcorp.com/asdf" + + # open port tests + assert events.open_port in events.domain + assert "api.publicapis.org:443" in events.open_port + assert "bad.publicapis.org:443" not in events.open_port + assert "publicapis.org:443" not in events.open_port + assert events.ipv4_open_port in events.ipv4 + assert events.ipv4_open_port in events.netv4 + assert "8.8.8.9" not in events.ipv4_open_port + assert events.ipv6_open_port in events.ipv6 + assert events.ipv6_open_port in events.netv6 + assert "2002:4860:4860::8888" not in events.ipv6_open_port + assert events.emoji not in events.ipv6_open_port + assert events.ipv6_open_port not in events.emoji + + # attribute tests + assert events.ipv4.host == ipaddress.ip_address("8.8.8.8") + assert events.ipv4.port is None + assert events.ipv6.host == ipaddress.ip_address("2001:4860:4860::8888") + assert events.ipv6.port is None + assert events.domain.port is None + assert events.subdomain.port is None + assert events.open_port.host == "api.publicapis.org" + assert events.open_port.port == 443 + assert events.ipv4_open_port.host == ipaddress.ip_address("8.8.8.8") + assert events.ipv4_open_port.port == 443 + assert events.ipv6_open_port.host == ipaddress.ip_address("2001:4860:4860::8888") + assert events.ipv6_open_port.port == 443 + assert events.url_unverified.host == "api.publicapis.org" + assert events.url_unverified.port == 443 + assert events.ipv4_url_unverified.host == ipaddress.ip_address("8.8.8.8") + assert events.ipv4_url_unverified.port == 443 + assert events.ipv6_url_unverified.host == ipaddress.ip_address("2001:4860:4860::8888") + assert events.ipv6_url_unverified.port == 443 + + javascript_event = scan.make_event("http://evilcorp.com/asdf/a.js?b=c#d", "URL_UNVERIFIED", parent=scan.root_event) + assert "extension-js" in javascript_event.tags + await scan.ingress_module.handle_event(javascript_event) + assert "httpx-only" in javascript_event.tags + + # scope distance + event1 = scan.make_event("1.2.3.4", dummy=True) + assert event1._scope_distance is None + event1.scope_distance = 0 + assert event1._scope_distance == 0 + event2 = scan.make_event("2.3.4.5", parent=event1) + assert event2._scope_distance == 1 + event3 = scan.make_event("3.4.5.6", parent=event2) + assert event3._scope_distance == 2 + event4 = scan.make_event("3.4.5.6", parent=event3) + assert event4._scope_distance == 2 + event5 = scan.make_event("4.5.6.7", parent=event4) + assert event5._scope_distance == 3 + + url_1 = scan.make_event("https://127.0.0.1/asdf", "URL_UNVERIFIED", parent=scan.root_event) + assert url_1.scope_distance == 1 + url_2 = scan.make_event("https://127.0.0.1/test", "URL_UNVERIFIED", parent=url_1) + assert url_2.scope_distance == 1 + url_3 = scan.make_event("https://127.0.0.2/asdf", "URL_UNVERIFIED", parent=url_1) + assert url_3.scope_distance == 2 + + org_stub_1 = scan.make_event("STUB1", "ORG_STUB", parent=scan.root_event) + org_stub_1.scope_distance == 1 + assert org_stub_1.netloc is None + assert "netloc" not in org_stub_1.json() + org_stub_2 = scan.make_event("STUB2", "ORG_STUB", parent=org_stub_1) + org_stub_2.scope_distance == 2 + + # internal event tracking + root_event = scan.make_event("0.0.0.0", dummy=True) + root_event.scope_distance = 0 + internal_event1 = scan.make_event("1.2.3.4", parent=root_event, internal=True) + assert internal_event1._internal is True + assert "internal" in internal_event1.tags + + # tag inheritance + for tag in ("affiliate", "mutation-1"): + affiliate_event = scan.make_event("1.2.3.4", parent=root_event, tags=tag) + assert tag in affiliate_event.tags + affiliate_event2 = scan.make_event("1.2.3.4:88", parent=affiliate_event) + affiliate_event3 = scan.make_event("4.3.2.1:88", parent=affiliate_event) + assert tag in affiliate_event2.tags + assert tag not in affiliate_event3.tags + + # discovery context + event = scan.make_event( + "127.0.0.1", parent=scan.root_event, context="something discovered {event.type}: {event.data}" + ) + assert event.discovery_context == "something discovered IP_ADDRESS: 127.0.0.1" + + # updating an already-created event with make_event() + # updating tags + event1 = scan.make_event("127.0.0.1", parent=scan.root_event) + updated_event = scan.make_event(event1, tags="asdf") + assert "asdf" not in event1.tags + assert "asdf" in updated_event.tags + # updating parent + event2 = scan.make_event("127.0.0.1", parent=scan.root_event) + updated_event = scan.make_event(event2, parent=event1) + assert event2.parent == scan.root_event + assert updated_event.parent == event1 + # updating module + event3 = scan.make_event("127.0.0.1", parent=scan.root_event) + updated_event = scan.make_event(event3, internal=True) + assert event3.internal is False + assert updated_event.internal is True + + # event sorting + parent1 = scan.make_event("127.0.0.1", parent=scan.root_event) + parent2 = scan.make_event("127.0.0.1", parent=scan.root_event) + parent2_child1 = scan.make_event("127.0.0.1", parent=parent2) + parent1_child1 = scan.make_event("127.0.0.1", parent=parent1) + parent1_child2 = scan.make_event("127.0.0.1", parent=parent1) + parent1_child2_child1 = scan.make_event("127.0.0.1", parent=parent1_child2) + parent1_child2_child2 = scan.make_event("127.0.0.1", parent=parent1_child2) + parent1_child1_child1 = scan.make_event("127.0.0.1", parent=parent1_child1) + parent2_child2 = scan.make_event("127.0.0.1", parent=parent2) + parent1_child2_child1_child1 = scan.make_event("127.0.0.1", parent=parent1_child2_child1) + + sortable_events = { + "parent1": parent1, + "parent2": parent2, + "parent2_child1": parent2_child1, + "parent1_child1": parent1_child1, + "parent1_child2": parent1_child2, + "parent1_child2_child1": parent1_child2_child1, + "parent1_child2_child2": parent1_child2_child2, + "parent1_child1_child1": parent1_child1_child1, + "parent2_child2": parent2_child2, + "parent1_child2_child1_child1": parent1_child2_child1_child1, + } + + ordered_list = [ + parent1, + parent1_child1, + parent1_child1_child1, + parent1_child2, + parent1_child2_child1, + parent1_child2_child1_child1, + parent1_child2_child2, + parent2, + parent2_child1, + parent2_child2, + ] + + shuffled_list = list(sortable_events.values()) + random.shuffle(shuffled_list) + + sorted_events = sorted(shuffled_list) + assert sorted_events == ordered_list + + # test validation + corrected_event1 = scan.make_event("asdf@asdf.com", "DNS_NAME", dummy=True) + assert corrected_event1.type == "EMAIL_ADDRESS" + corrected_event2 = scan.make_event("127.0.0.1", "DNS_NAME", dummy=True) + assert corrected_event2.type == "IP_ADDRESS" + corrected_event3 = scan.make_event("wat.asdf.com", "IP_ADDRESS", dummy=True) + assert corrected_event3.type == "DNS_NAME" + + corrected_event4 = scan.make_event("bob@evilcorp.com", "USERNAME", dummy=True) + assert corrected_event4.type == "EMAIL_ADDRESS" + assert "affiliate" in corrected_event4.tags + + test_vuln = scan.make_event( + {"host": "EVILcorp.com", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True + ) + assert test_vuln.data["host"] == "evilcorp.com" + assert test_vuln.data["severity"] == "INFO" + test_vuln2 = scan.make_event( + {"host": "192.168.1.1", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True + ) + assert json.loads(test_vuln2.data_human)["severity"] == "INFO" + assert test_vuln2.host.is_private + with pytest.raises(ValidationError, match=".*validation error.*\nseverity\n.*Field required.*"): + test_vuln = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "VULNERABILITY", dummy=True) + with pytest.raises(ValidationError, match=".*host.*\n.*Invalid host.*"): + test_vuln = scan.make_event( + {"host": "!@#$", "severity": "INFO", "description": "asdf"}, "VULNERABILITY", dummy=True + ) + with pytest.raises(ValidationError, match=".*severity.*\n.*Invalid severity.*"): + test_vuln = scan.make_event( + {"host": "evilcorp.com", "severity": "WACK", "description": "asdf"}, "VULNERABILITY", dummy=True + ) + + # test tagging + ip_event_1 = scan.make_event("8.8.8.8", dummy=True) + assert "private-ip" not in ip_event_1.tags + ip_event_2 = scan.make_event("192.168.0.1", dummy=True) + assert "private-ip" in ip_event_2.tags + dns_event_1 = scan.make_event("evilcorp.com", dummy=True) + assert "domain" in dns_event_1.tags + dns_event_2 = scan.make_event("www.evilcorp.com", dummy=True) + assert "subdomain" in dns_event_2.tags + + # punycode - event type detection + + # japanese + assert scan.make_event("ドメイン.テスト", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@ドメイン.テスト", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("テスト@ドメイン.テスト", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("ドメイン.テスト:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://ドメイン.テスト:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).type == "URL_UNVERIFIED" + + assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("テスト@xn--eckwd4c7c.xn--zckzah", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80/テスト", dummy=True).type == "URL_UNVERIFIED" + + # thai + assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).type == "URL_UNVERIFIED" + + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80/ทดสอบ", dummy=True).type == "URL_UNVERIFIED" + + # punycode - encoding / decoding tests + + # japanese + assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("テスト@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" + assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" + assert ( + scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) + + assert scan.make_event("ドメイン.テスト", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("bob@ドメイン.テスト", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" + assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" + assert ( + scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) + # thai + assert ( + scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) + assert ( + scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data + == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) + assert ( + scan.make_event("ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data + == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) + assert ( + scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data + == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" + ) + assert ( + scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + ) + assert ( + scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80/ทดสอบ", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/ทดสอบ" + ) + + assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" + assert ( + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + ) + assert ( + scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/ทดสอบ" + ) + + # test event uuid + import uuid + + parent_event1 = scan.make_event("evilcorp.com", parent=scan.root_event, context="test context") + parent_event2 = scan.make_event("evilcorp.com", parent=scan.root_event, context="test context") + + event1 = scan.make_event("evilcorp.com:80", parent=parent_event1, context="test context") + assert hasattr(event1, "_uuid") + assert hasattr(event1, "uuid") + assert isinstance(event1._uuid, uuid.UUID) + assert isinstance(event1.uuid, str) + assert event1.uuid == f"{event1.type}:{event1._uuid}" + event2 = scan.make_event("evilcorp.com:80", parent=parent_event2, context="test context") + assert hasattr(event2, "_uuid") + assert hasattr(event2, "uuid") + assert isinstance(event2._uuid, uuid.UUID) + assert isinstance(event2.uuid, str) + assert event2.uuid == f"{event2.type}:{event2._uuid}" + # ids should match because the event type + data is the same + assert event1.id == event2.id + # but uuids should be unique! + assert event1.uuid != event2.uuid + # parent ids should match + assert event1.parent_id == event2.parent_id == parent_event1.id == parent_event2.id + # uuids should not + assert event1.parent_uuid == parent_event1.uuid + assert event2.parent_uuid == parent_event2.uuid + assert event1.parent_uuid != event2.parent_uuid + + # test event serialization + from bbot.core.event import event_from_json + + db_event = scan.make_event("evilcorp.com:80", parent=scan.root_event, context="test context") + assert db_event.parent == scan.root_event + assert db_event.parent is scan.root_event + db_event._resolved_hosts = {"127.0.0.1"} + db_event.scope_distance = 1 + assert db_event.discovery_context == "test context" + assert db_event.discovery_path == ["test context"] + assert len(db_event.parent_chain) == 1 + assert all(event_uuid_regex.match(u) for u in db_event.parent_chain) + assert db_event.parent_chain[0] == str(db_event.uuid) + assert db_event.parent.uuid == scan.root_event.uuid + assert db_event.parent_uuid == scan.root_event.uuid + timestamp = db_event.timestamp.isoformat() + json_event = db_event.json() + assert isinstance(json_event["uuid"], str) + assert json_event["uuid"] == str(db_event.uuid) + assert json_event["parent_uuid"] == str(scan.root_event.uuid) + assert json_event["scope_distance"] == 1 + assert json_event["data"] == "evilcorp.com:80" + assert json_event["type"] == "OPEN_TCP_PORT" + assert json_event["host"] == "evilcorp.com" + assert json_event["timestamp"] == timestamp + assert json_event["discovery_context"] == "test context" + assert json_event["discovery_path"] == ["test context"] + assert json_event["parent_chain"] == db_event.parent_chain + assert json_event["parent_chain"][0] == str(db_event.uuid) + reconstituted_event = event_from_json(json_event) + assert isinstance(reconstituted_event._uuid, uuid.UUID) + assert str(reconstituted_event.uuid) == json_event["uuid"] + assert str(reconstituted_event.parent_uuid) == json_event["parent_uuid"] + assert reconstituted_event.uuid == db_event.uuid + assert reconstituted_event.parent_uuid == scan.root_event.uuid + assert reconstituted_event.scope_distance == 1 + assert reconstituted_event.timestamp.isoformat() == timestamp + assert reconstituted_event.data == "evilcorp.com:80" + assert reconstituted_event.type == "OPEN_TCP_PORT" + assert reconstituted_event.host == "evilcorp.com" + assert reconstituted_event.discovery_context == "test context" + assert reconstituted_event.discovery_path == ["test context"] + assert reconstituted_event.parent_chain == db_event.parent_chain + assert "127.0.0.1" in reconstituted_event.resolved_hosts + hostless_event = scan.make_event("asdf", "ASDF", dummy=True) + hostless_event_json = hostless_event.json() + assert hostless_event_json["type"] == "ASDF" + assert hostless_event_json["data"] == "asdf" + assert "host" not in hostless_event_json + + # SIEM-friendly serialize/deserialize + json_event_siemfriendly = db_event.json(siem_friendly=True) + assert json_event_siemfriendly["scope_distance"] == 1 + assert json_event_siemfriendly["data"] == {"OPEN_TCP_PORT": "evilcorp.com:80"} + assert json_event_siemfriendly["type"] == "OPEN_TCP_PORT" + assert json_event_siemfriendly["host"] == "evilcorp.com" + assert json_event_siemfriendly["timestamp"] == timestamp + reconstituted_event2 = event_from_json(json_event_siemfriendly, siem_friendly=True) + assert reconstituted_event2.scope_distance == 1 + assert reconstituted_event2.timestamp.isoformat() == timestamp + assert reconstituted_event2.data == "evilcorp.com:80" + assert reconstituted_event2.type == "OPEN_TCP_PORT" + assert reconstituted_event2.host == "evilcorp.com" + assert "127.0.0.1" in reconstituted_event2.resolved_hosts + + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", parent=scan.root_event) + assert http_response.parent_id == scan.root_event.id + assert http_response.data["input"] == "http://example.com:80" + assert ( + http_response.raw_response + == 'HTTP/1.1 200 OK\r\nConnection: close\r\nAge: 526111\r\nCache-Control: max-age=604800\r\nContent-Type: text/html; charset=UTF-8\r\nDate: Mon, 14 Nov 2022 17:14:27 GMT\r\nEtag: "3147526947+ident+gzip"\r\nExpires: Mon, 21 Nov 2022 17:14:27 GMT\r\nLast-Modified: Thu, 17 Oct 2019 07:18:26 GMT\r\nServer: ECS (agb/A445)\r\nVary: Accept-Encoding\r\nX-Cache: HIT\r\n\r\n\n\n\n Example Domain\n\n \n \n \n \n\n\n\n
\n

Example Domain

\n

This domain is for use in illustrative examples in documents. You may use this\n domain in literature without prior coordination or asking for permission.

\n

More information...

\n
\n\n\n' + ) + json_event = http_response.json(mode="graph") + assert isinstance(json_event["data"], str) + json_event = http_response.json() + assert isinstance(json_event["data"], dict) + assert json_event["type"] == "HTTP_RESPONSE" + assert json_event["host"] == "example.com" + assert json_event["parent"] == scan.root_event.id + reconstituted_event = event_from_json(json_event) + assert isinstance(reconstituted_event.data, dict) + assert reconstituted_event.data["input"] == "http://example.com:80" + assert reconstituted_event.host == "example.com" + assert reconstituted_event.type == "HTTP_RESPONSE" + assert reconstituted_event.parent_id == scan.root_event.id + + event_1 = scan.make_event("127.0.0.1", parent=scan.root_event) + event_2 = scan.make_event("127.0.0.2", parent=event_1) + event_3 = scan.make_event("127.0.0.3", parent=event_2) + event_3._omit = True + event_4 = scan.make_event("127.0.0.4", parent=event_3) + event_5 = scan.make_event("127.0.0.5", parent=event_4) + assert event_5.get_parents() == [event_4, event_3, event_2, event_1, scan.root_event] + assert event_5.get_parents(omit=True) == [event_4, event_2, event_1, scan.root_event] + assert event_5.get_parents(include_self=True) == [event_5, event_4, event_3, event_2, event_1, scan.root_event] + + # test host backup + host_event = scan.make_event("asdf.evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert host_event.host_original == "asdf.evilcorp.com" + host_event.host = "_wildcard.evilcorp.com" + assert host_event.host == "_wildcard.evilcorp.com" + assert host_event.host_original == "asdf.evilcorp.com" + + # test storage bucket validation + bucket_event = scan.make_event( + {"name": "ASDF.s3.amazonaws.com", "url": "https://ASDF.s3.amazonaws.com"}, + "STORAGE_BUCKET", + parent=scan.root_event, + ) + assert bucket_event.data["name"] == "asdf.s3.amazonaws.com" + assert bucket_event.data["url"] == "https://asdf.s3.amazonaws.com/" + + # test module sequence + module = scan._make_dummy_module("mymodule") + parent_event_1 = scan.make_event("127.0.0.1", module=module, parent=scan.root_event) + assert str(parent_event_1.module) == "mymodule" + assert str(parent_event_1.module_sequence) == "mymodule" + parent_event_2 = scan.make_event("127.0.0.2", module=module, parent=parent_event_1) + assert str(parent_event_2.module) == "mymodule" + assert str(parent_event_2.module_sequence) == "mymodule" + parent_event_3 = scan.make_event("127.0.0.3", module=module, parent=parent_event_2) + assert str(parent_event_3.module) == "mymodule" + assert str(parent_event_3.module_sequence) == "mymodule" + + module = scan._make_dummy_module("mymodule") + parent_event_1 = scan.make_event("127.0.0.1", module=module, parent=scan.root_event) + parent_event_1._omit = True + assert str(parent_event_1.module) == "mymodule" + assert str(parent_event_1.module_sequence) == "mymodule" + parent_event_2 = scan.make_event("127.0.0.2", module=module, parent=parent_event_1) + parent_event_2._omit = True + assert str(parent_event_2.module) == "mymodule" + assert str(parent_event_2.module_sequence) == "mymodule->mymodule" + parent_event_3 = scan.make_event("127.0.0.3", module=module, parent=parent_event_2) + assert str(parent_event_3.module) == "mymodule" + assert str(parent_event_3.module_sequence) == "mymodule->mymodule->mymodule" + + # event with no data + with pytest.raises(ValidationError): + event = scan.make_event(None, "DNS_NAME", parent=scan.root_event) + + await scan._cleanup() + + +@pytest.mark.asyncio +async def test_event_discovery_context(): + from bbot.modules.base import BaseModule + + scan = Scanner("evilcorp.com") + await scan.helpers.dns._mock_dns( + { + "evilcorp.com": {"A": ["1.2.3.4"]}, + "one.evilcorp.com": {"A": ["1.2.3.4"]}, + "two.evilcorp.com": {"A": ["1.2.3.4"]}, + "three.evilcorp.com": {"A": ["1.2.3.4"]}, + "four.evilcorp.com": {"A": ["1.2.3.4"]}, + } + ) + await scan._prep() + + dummy_module_1 = scan._make_dummy_module("module_1") + dummy_module_2 = scan._make_dummy_module("module_2") + + class DummyModule(BaseModule): + watched_events = ["DNS_NAME"] + _name = "dummy_module" + + async def handle_event(self, event): + new_event = None + if event.data == "evilcorp.com": + new_event = scan.make_event( + "one.evilcorp.com", + "DNS_NAME", + event, + context="{module} invoked forbidden magick to discover {event.type} {event.data}", + module=dummy_module_1, + ) + elif event.data == "one.evilcorp.com": + new_event = scan.make_event( + "two.evilcorp.com", + "DNS_NAME", + event, + context="{module} pledged its allegiance to cthulu and was awarded {event.type} {event.data}", + module=dummy_module_1, + ) + elif event.data == "two.evilcorp.com": + new_event = scan.make_event( + "three.evilcorp.com", + "DNS_NAME", + event, + context="{module} asked nicely and was given {event.type} {event.data}", + module=dummy_module_2, + ) + elif event.data == "three.evilcorp.com": + new_event = scan.make_event( + "four.evilcorp.com", + "DNS_NAME", + event, + context="{module} used brute force to obtain {event.type} {event.data}", + module=dummy_module_2, + ) + if new_event is not None: + await self.emit_event(new_event) + + dummy_module = DummyModule(scan) + + scan.modules["dummy_module"] = dummy_module + + # test discovery context + test_event = dummy_module.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert test_event.discovery_context == "dummy_module discovered DNS_NAME: evilcorp.com" + + test_event2 = dummy_module.make_event( + "evilcorp.com", "DNS_NAME", parent=scan.root_event, context="{module} {found} {event.host}" + ) + assert test_event2.discovery_context == "dummy_module {found} evilcorp.com" + # jank input + test_event3 = dummy_module.make_event( + "http://evilcorp.com/{http://evilcorp.org!@#%@#$:,,,}", "URL_UNVERIFIED", parent=scan.root_event + ) + assert ( + test_event3.discovery_context + == "dummy_module discovered URL_UNVERIFIED: http://evilcorp.com/{http:/evilcorp.org!@" + ) + + events = [e async for e in scan.async_start()] + assert len(events) == 7 + + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "evilcorp.com" + and e.discovery_context == f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com" + and e.discovery_path == [f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com"] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "one.evilcorp.com" + and e.discovery_context == "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + ] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "two.evilcorp.com" + and e.discovery_context + == "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + ] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "three.evilcorp.com" + and e.discovery_context == "module_2 asked nicely and was given DNS_NAME three.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + "module_2 asked nicely and was given DNS_NAME three.evilcorp.com", + ] + ] + ) + final_path = [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + "module_2 asked nicely and was given DNS_NAME three.evilcorp.com", + "module_2 used brute force to obtain DNS_NAME four.evilcorp.com", + ] + final_event = [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "four.evilcorp.com" + and e.discovery_context == "module_2 used brute force to obtain DNS_NAME four.evilcorp.com" + and e.discovery_path == final_path + ] + assert 1 == len(final_event) + j = final_event[0].json() + assert j["discovery_path"] == final_path + + await scan._cleanup() + + # test to make sure this doesn't come back + # https://github.com/blacklanternsecurity/bbot/issues/1498 + scan = Scanner("http://blacklanternsecurity.com", config={"dns": {"minimal": False}}) + await scan.helpers.dns._mock_dns( + {"blacklanternsecurity.com": {"TXT": ["blsops.com"], "A": ["127.0.0.1"]}, "blsops.com": {"A": ["127.0.0.1"]}} + ) + events = [e async for e in scan.async_start()] + blsops_event = [e for e in events if e.type == "DNS_NAME" and e.data == "blsops.com"] + assert len(blsops_event) == 1 + assert blsops_event[0].discovery_path[1] == "URL_UNVERIFIED has host DNS_NAME: blacklanternsecurity.com" + + await scan._cleanup() + + +@pytest.mark.asyncio +async def test_event_web_spider_distance(bbot_scanner): + # make sure web spider distance inheritance works as intended + # and we don't have any runaway situations with SOCIAL events + URLs + + # URL_UNVERIFIED events should not increment web spider distance + scan = bbot_scanner(config={"web": {"spider_distance": 1}}) + url_event_1 = scan.make_event("http://www.evilcorp.com/test1", "URL_UNVERIFIED", parent=scan.root_event) + assert url_event_1.web_spider_distance == 0 + url_event_2 = scan.make_event("http://www.evilcorp.com/test2", "URL_UNVERIFIED", parent=url_event_1) + assert url_event_2.web_spider_distance == 0 + url_event_3 = scan.make_event( + "http://www.evilcorp.com/test3", "URL_UNVERIFIED", parent=url_event_2, tags=["spider-danger"] + ) + assert url_event_3.web_spider_distance == 0 + assert "spider-danger" in url_event_3.tags + assert "spider-max" not in url_event_3.tags + + # URL events should increment web spider distance + scan = bbot_scanner(config={"web": {"spider_distance": 1}}) + url_event_1 = scan.make_event("http://www.evilcorp.com/test1", "URL", parent=scan.root_event, tags="status-200") + assert url_event_1.web_spider_distance == 0 + url_event_2 = scan.make_event("http://www.evilcorp.com/test2", "URL", parent=url_event_1, tags="status-200") + assert url_event_2.web_spider_distance == 0 + url_event_3 = scan.make_event( + "http://www.evilcorp.com/test3", "URL_UNVERIFIED", parent=url_event_2, tags=["spider-danger"] + ) + assert url_event_3.web_spider_distance == 1 + assert "spider-danger" in url_event_3.tags + assert "spider-max" not in url_event_3.tags + + # SOCIAL events should inherit spider distance + social_event = scan.make_event( + {"platform": "github", "url": "http://www.evilcorp.com/test4"}, "SOCIAL", parent=url_event_3 + ) + assert social_event.web_spider_distance == 1 + assert "spider-danger" in social_event.tags + url_event_4 = scan.make_event("http://www.evilcorp.com/test4", "URL_UNVERIFIED", parent=social_event) + assert url_event_4.web_spider_distance == 2 + assert "spider-danger" in url_event_4.tags + assert "spider-max" in url_event_4.tags + social_event_2 = scan.make_event( + {"platform": "github", "url": "http://www.evilcorp.com/test5"}, "SOCIAL", parent=url_event_4 + ) + assert social_event_2.web_spider_distance == 2 + assert "spider-danger" in social_event_2.tags + assert "spider-max" in social_event_2.tags + url_event_5 = scan.make_event("http://www.evilcorp.com/test5", "URL_UNVERIFIED", parent=social_event_2) + assert url_event_5.web_spider_distance == 3 + assert "spider-danger" in url_event_5.tags + assert "spider-max" in url_event_5.tags + + url_event = scan.make_event("http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event) + assert url_event.web_spider_distance == 0 + assert "spider-danger" not in url_event.tags + assert "spider-max" not in url_event.tags + url_event_2 = scan.make_event( + "http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event, tags="spider-danger" + ) + url_event_2b = scan.make_event("http://www.evilcorp.com", "URL", parent=url_event_2, tags="status-200") + assert url_event_2b.web_spider_distance == 0 + assert "spider-danger" in url_event_2b.tags + assert "spider-max" not in url_event_2b.tags + url_event_3 = scan.make_event( + "http://www.evilcorp.com/3", "URL_UNVERIFIED", parent=url_event_2b, tags="spider-danger" + ) + assert url_event_3.web_spider_distance == 1 + assert "spider-danger" in url_event_3.tags + assert "spider-max" not in url_event_3.tags + url_event_4 = scan.make_event("http://evilcorp.com", "URL", parent=url_event_3, tags="status-200") + assert url_event_4.web_spider_distance == 0 + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags + url_event_4.add_tag("spider-danger") + assert url_event_4.web_spider_distance == 0 + assert "spider-danger" in url_event_4.tags + assert "spider-max" not in url_event_4.tags + url_event_4.remove_tag("spider-danger") + assert url_event_4.web_spider_distance == 0 + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags + url_event_5 = scan.make_event("http://evilcorp.com/5", "URL_UNVERIFIED", parent=url_event_4) + assert url_event_5.web_spider_distance == 0 + assert "spider-danger" not in url_event_5.tags + assert "spider-max" not in url_event_5.tags + url_event_5.add_tag("spider-danger") + # if host is the same as parent, web spider distance should auto-increment after adding spider-danger tag + assert url_event_5.web_spider_distance == 1 + assert "spider-danger" in url_event_5.tags + assert "spider-max" not in url_event_5.tags + + +def test_event_confidence(): + scan = Scanner() + # default 100 + event1 = scan.make_event("evilcorp.com", "DNS_NAME", dummy=True) + assert event1.confidence == 100 + assert event1.cumulative_confidence == 100 + # custom confidence + event2 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, dummy=True) + assert event2.confidence == 90 + assert event2.cumulative_confidence == 90 + # max 100 + event3 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=999, dummy=True) + assert event3.confidence == 100 + assert event3.cumulative_confidence == 100 + # min 1 + event4 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=0, dummy=True) + assert event4.confidence == 1 + assert event4.cumulative_confidence == 1 + # first event in chain + event5 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, parent=scan.root_event) + assert event5.confidence == 90 + assert event5.cumulative_confidence == 90 + # compounding confidence + event6 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event5) + assert event6.confidence == 50 + assert event6.cumulative_confidence == 45 + event7 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event6) + assert event7.confidence == 50 + assert event7.cumulative_confidence == 22 + # 100 confidence resets + event8 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=100, parent=event7) + assert event8.confidence == 100 + assert event8.cumulative_confidence == 100 + + +def test_event_closest_host(): + scan = Scanner() + # first event has a host + event1 = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert event1.host == "evilcorp.com" + # second event has a host + url + event2 = scan.make_event( + { + "method": "GET", + "url": "http://www.evilcorp.com/asdf", + "hash": {"header_mmh3": "1", "body_mmh3": "2"}, + "raw_header": "HTTP/1.1 301 Moved Permanently\r\nLocation: http://www.evilcorp.com/asdf\r\n\r\n", + }, + "HTTP_RESPONSE", + parent=event1, + ) + assert event2.host == "www.evilcorp.com" + # third event has a path + event3 = scan.make_event({"path": "/tmp/asdf.txt"}, "FILESYSTEM", parent=event2) + assert not event3.host + # finding automatically uses the host from the second event + finding = scan.make_event({"description": "test"}, "FINDING", parent=event3) + assert finding.data["host"] == "www.evilcorp.com" + assert finding.data["url"] == "http://www.evilcorp.com/asdf" + assert finding.data["path"] == "/tmp/asdf.txt" + assert finding.host == "www.evilcorp.com" + # same with vuln + vuln = scan.make_event({"description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3) + assert vuln.data["host"] == "www.evilcorp.com" + assert vuln.data["url"] == "http://www.evilcorp.com/asdf" + assert vuln.data["path"] == "/tmp/asdf.txt" + assert vuln.host == "www.evilcorp.com" + + # no host and no path == not allowed + event3 = scan.make_event("wat", "ASDF", parent=scan.root_event) + assert not event3.host + with pytest.raises(ValueError): + finding = scan.make_event({"description": "test"}, "FINDING", parent=event3) + finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event3) + assert finding is not None + finding = scan.make_event({"host": "evilcorp.com", "description": "test"}, "FINDING", parent=event3) + assert finding is not None + with pytest.raises(ValueError): + vuln = scan.make_event({"description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3) + vuln = scan.make_event( + {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3 + ) + assert vuln is not None + vuln = scan.make_event( + {"host": "evilcorp.com", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3 + ) + assert vuln is not None + + +def test_event_magic(): + from bbot.core.helpers.libmagic import get_magic_info, get_compression + + import base64 + + zip_base64 = "UEsDBAoDAAAAAOMmZ1lR4FaHBQAAAAUAAAAIAAAAYXNkZi50eHRhc2RmClBLAQI/AwoDAAAAAOMmZ1lR4FaHBQAAAAUAAAAIACQAAAAAAAAAIICkgQAAAABhc2RmLnR4dAoAIAAAAAAAAQAYAICi2B77MNsBgKLYHvsw2wGAotge+zDbAVBLBQYAAAAAAQABAFoAAAArAAAAAAA=" + zip_bytes = base64.b64decode(zip_base64) + zip_file = Path("/tmp/.bbottestzipasdkfjalsdf.zip") + with open(zip_file, "wb") as f: + f.write(zip_bytes) + + # test magic helpers + extension, mime_type, description, confidence = get_magic_info(zip_file) + assert extension == ".zip" + assert mime_type == "application/zip" + assert description == "PKZIP Archive file" + assert confidence > 0 + assert get_compression(mime_type) == "zip" + + # test filesystem event - file + scan = Scanner() + event = scan.make_event({"path": zip_file}, "FILESYSTEM", parent=scan.root_event) + assert event.data == { + "path": "/tmp/.bbottestzipasdkfjalsdf.zip", + "magic_extension": ".zip", + "magic_mime_type": "application/zip", + "magic_description": "PKZIP Archive file", + "magic_confidence": 0.9, + "compression": "zip", + } + assert event.tags == {"file", "zip-archive", "compressed"} + + # test filesystem event - folder + scan = Scanner() + event = scan.make_event({"path": "/tmp"}, "FILESYSTEM", parent=scan.root_event) + assert event.data == {"path": "/tmp"} + assert event.tags == {"folder"} + + zip_file.unlink() + + +@pytest.mark.asyncio +async def test_mobile_app(): + scan = Scanner() + with pytest.raises(ValidationError): + scan.make_event("com.evilcorp.app", "MOBILE_APP", parent=scan.root_event) + with pytest.raises(ValidationError): + scan.make_event({"id": "com.evilcorp.app"}, "MOBILE_APP", parent=scan.root_event) + with pytest.raises(ValidationError): + scan.make_event({"url": "https://play.google.com/store/apps/details"}, "MOBILE_APP", parent=scan.root_event) + mobile_app = scan.make_event( + {"url": "https://play.google.com/store/apps/details?id=com.evilcorp.app"}, "MOBILE_APP", parent=scan.root_event + ) + assert sorted(mobile_app.data.items()) == [ + ("id", "com.evilcorp.app"), + ("url", "https://play.google.com/store/apps/details?id=com.evilcorp.app"), + ] + + scan = Scanner("MOBILE_APP:https://play.google.com/store/apps/details?id=com.evilcorp.app") + events = [e async for e in scan.async_start()] + assert len(events) == 3 + mobile_app_event = [e for e in events if e.type == "MOBILE_APP"][0] + assert mobile_app_event.type == "MOBILE_APP" + assert sorted(mobile_app_event.data.items()) == [ + ("id", "com.evilcorp.app"), + ("url", "https://play.google.com/store/apps/details?id=com.evilcorp.app"), + ] + + +@pytest.mark.asyncio +async def test_filesystem(): + scan = Scanner("FILESYSTEM:/tmp/asdf") + events = [e async for e in scan.async_start()] + assert len(events) == 3 + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert len(filesystem_events) == 1 + assert filesystem_events[0].type == "FILESYSTEM" + assert filesystem_events[0].data == {"path": "/tmp/asdf"} + + +def test_event_hashing(): + scan = Scanner("example.com") + url_event = scan.make_event("https://api.example.com/", "URL_UNVERIFIED", parent=scan.root_event) + host_event_1 = scan.make_event("www.example.com", "DNS_NAME", parent=url_event) + host_event_2 = scan.make_event("test.example.com", "DNS_NAME", parent=url_event) + finding_data = {"description": "Custom Yara Rule [find_string] Matched via identifier [str1]"} + finding1 = scan.make_event(finding_data, "FINDING", parent=host_event_1) + finding2 = scan.make_event(finding_data, "FINDING", parent=host_event_2) + finding3 = scan.make_event(finding_data, "FINDING", parent=host_event_2) + + assert finding1.data == { + "description": "Custom Yara Rule [find_string] Matched via identifier [str1]", + "host": "www.example.com", + } + assert finding2.data == { + "description": "Custom Yara Rule [find_string] Matched via identifier [str1]", + "host": "test.example.com", + } + assert finding3.data == { + "description": "Custom Yara Rule [find_string] Matched via identifier [str1]", + "host": "test.example.com", + } + assert finding1.id != finding2.id + assert finding2.id == finding3.id + assert finding1.data_id != finding2.data_id + assert finding2.data_id == finding3.data_id + assert finding1.data_hash != finding2.data_hash + assert finding2.data_hash == finding3.data_hash + assert hash(finding1) != hash(finding2) + assert hash(finding2) == hash(finding3) diff --git a/bbot/test/test_step_1/test_files.py b/bbot/test/test_step_1/test_files.py new file mode 100644 index 0000000000..feb6b928c3 --- /dev/null +++ b/bbot/test/test_step_1/test_files.py @@ -0,0 +1,24 @@ +import asyncio + +from ..bbot_fixtures import * + + +@pytest.mark.asyncio +async def test_files(bbot_scanner): + scan1 = bbot_scanner() + + # tempfile + tempfile = scan1.helpers.tempfile(("line1", "line2"), pipe=False) + assert list(scan1.helpers.read_file(tempfile)) == ["line1", "line2"] + tempfile = scan1.helpers.tempfile(("line1", "line2"), pipe=True) + assert list(scan1.helpers.read_file(tempfile)) == ["line1", "line2"] + + # tempfile tail + results = [] + tempfile = scan1.helpers.tempfile_tail(callback=lambda x: results.append(x)) + with open(tempfile, "w") as f: + f.write("asdf\n") + await asyncio.sleep(0.1) + assert "asdf" in results + + await scan1._cleanup() diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py new file mode 100644 index 0000000000..9cec291941 --- /dev/null +++ b/bbot/test/test_step_1/test_helpers.py @@ -0,0 +1,949 @@ +import asyncio +import datetime +import ipaddress + +from ..bbot_fixtures import * + + +@pytest.mark.asyncio +async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): + ### URL ### + bad_urls = ( + "http://e.co/index.html", + "http://e.co/u/1111/info", + "http://e.co/u/2222/info", + "http://e.co/u/3333/info", + "http://e.co/u/4444/info", + "http://e.co/u/5555/info", + ) + new_urls = tuple(helpers.validators.collapse_urls(bad_urls, threshold=4)) + assert len(new_urls) == 2 + new_urls = tuple(sorted([u.geturl() for u in helpers.validators.collapse_urls(bad_urls, threshold=5)])) + assert new_urls == bad_urls + + new_url = helpers.add_get_params("http://evilcorp.com/a?p=1&q=2", {"r": 3, "s": "asdf"}).geturl() + query = dict(s.split("=") for s in new_url.split("?")[-1].split("&")) + query = tuple(sorted(query.items(), key=lambda x: x[0])) + assert query == ( + ("p", "1"), + ("q", "2"), + ("r", "3"), + ("s", "asdf"), + ) + assert tuple(sorted(helpers.get_get_params("http://evilcorp.com/a?p=1&q=2#frag").items())) == ( + ("p", ["1"]), + ("q", ["2"]), + ) + + assert helpers.validators.clean_url("http://evilcorp.com:80").geturl() == "http://evilcorp.com/" + assert helpers.validators.clean_url("http://evilcorp.com/asdf?a=asdf#frag").geturl() == "http://evilcorp.com/asdf" + assert helpers.validators.clean_url("http://evilcorp.com//asdf").geturl() == "http://evilcorp.com/asdf" + assert helpers.validators.clean_url("http://evilcorp.com.").geturl() == "http://evilcorp.com/" + with pytest.raises(ValueError): + helpers.validators.clean_url("http://evilcorp,com") + + assert helpers.url_depth("http://evilcorp.com/asdf/user/") == 2 + assert helpers.url_depth("http://evilcorp.com/asdf/user") == 2 + assert helpers.url_depth("http://evilcorp.com/asdf/") == 1 + assert helpers.url_depth("http://evilcorp.com/asdf") == 1 + assert helpers.url_depth("http://evilcorp.com/") == 0 + assert helpers.url_depth("http://evilcorp.com") == 0 + + assert helpers.parent_url("http://evilcorp.com/subdir1/subdir2?foo=bar") == "http://evilcorp.com/subdir1" + + ### MISC ### + assert helpers.is_domain("evilcorp.co.uk") + assert not helpers.is_domain("www.evilcorp.co.uk") + assert helpers.is_domain("evilcorp.notreal") + assert not helpers.is_domain("asdf.evilcorp.notreal") + assert not helpers.is_domain("notreal") + assert helpers.is_subdomain("www.evilcorp.co.uk") + assert not helpers.is_subdomain("evilcorp.co.uk") + assert helpers.is_subdomain("www.evilcorp.notreal") + assert not helpers.is_subdomain("evilcorp.notreal") + assert not helpers.is_subdomain("notreal") + assert helpers.is_url("http://evilcorp.co.uk/asdf?a=b&c=d#asdf") + assert helpers.is_url("https://evilcorp.co.uk/asdf?a=b&c=d#asdf") + assert helpers.is_uri("ftp://evilcorp.co.uk") is True + assert helpers.is_uri("http://evilcorp.co.uk") is True + assert helpers.is_uri("evilcorp.co.uk", return_scheme=True) == "" + assert helpers.is_uri("ftp://evilcorp.co.uk", return_scheme=True) == "ftp" + assert helpers.is_uri("FTP://evilcorp.co.uk", return_scheme=True) == "ftp" + assert not helpers.is_url("https:/evilcorp.co.uk/asdf?a=b&c=d#asdf") + assert not helpers.is_url("/evilcorp.co.uk/asdf?a=b&c=d#asdf") + assert not helpers.is_url("ftp://evilcorp.co.uk") + assert helpers.parent_domain("www.evilcorp.co.uk") == "evilcorp.co.uk" + assert helpers.parent_domain("evilcorp.co.uk") == "evilcorp.co.uk" + assert helpers.parent_domain("localhost") == "localhost" + assert helpers.parent_domain("www.evilcorp.notreal") == "evilcorp.notreal" + assert helpers.parent_domain("evilcorp.notreal") == "evilcorp.notreal" + assert helpers.parent_domain("notreal") == "notreal" + assert list(helpers.domain_parents("test.www.evilcorp.co.uk")) == ["www.evilcorp.co.uk", "evilcorp.co.uk"] + assert list(helpers.domain_parents("www.evilcorp.co.uk", include_self=True)) == [ + "www.evilcorp.co.uk", + "evilcorp.co.uk", + ] + assert list(helpers.domain_parents("evilcorp.co.uk", include_self=True)) == ["evilcorp.co.uk"] + assert list(helpers.ip_network_parents("0.0.0.0/2")) == [ + ipaddress.ip_network("0.0.0.0/1"), + ipaddress.ip_network("0.0.0.0/0"), + ] + assert list(helpers.ip_network_parents("0.0.0.0/1", include_self=True)) == [ + ipaddress.ip_network("0.0.0.0/1"), + ipaddress.ip_network("0.0.0.0/0"), + ] + assert helpers.is_ip("127.0.0.1") + assert helpers.is_ip("127.0.0.1", include_network=True) + assert helpers.is_ip("127.0.0.1", version=4) + assert not helpers.is_ip("127.0.0.1", version=6) + assert not helpers.is_ip("127.0.0.0.1") + + assert helpers.is_ip("dead::beef") + assert helpers.is_ip("dead::beef", include_network=True) + assert not helpers.is_ip("dead::beef", version=4) + assert helpers.is_ip("dead::beef", version=6) + assert not helpers.is_ip("dead:::beef") + + assert not helpers.is_ip("1.2.3.4/24") + assert helpers.is_ip("1.2.3.4/24", include_network=True) + assert not helpers.is_ip("1.2.3.4/24", version=4) + assert helpers.is_ip("1.2.3.4/24", include_network=True, version=4) + assert not helpers.is_ip("1.2.3.4/24", include_network=True, version=6) + + assert not helpers.is_ip_type("127.0.0.1") + assert helpers.is_ip_type(ipaddress.ip_address("127.0.0.1")) + assert not helpers.is_ip_type(ipaddress.ip_address("127.0.0.1"), network=True) + assert helpers.is_ip_type(ipaddress.ip_address("127.0.0.1"), network=False) + assert helpers.is_ip_type(ipaddress.ip_network("127.0.0.0/8")) + assert helpers.is_ip_type(ipaddress.ip_network("127.0.0.0/8"), network=True) + assert not helpers.is_ip_type(ipaddress.ip_network("127.0.0.0/8"), network=False) + + assert helpers.is_dns_name("evilcorp.com") + assert not helpers.is_dns_name("evilcorp.com:80") + assert not helpers.is_dns_name("http://evilcorp.com:80") + assert helpers.is_dns_name("evilcorp") + assert helpers.is_dns_name("evilcorp.") + assert helpers.is_dns_name("ドメイン.テスト") + assert not helpers.is_dns_name("127.0.0.1") + assert not helpers.is_dns_name("dead::beef") + assert not helpers.is_dns_name("bob@evilcorp.com") + + assert helpers.domain_stem("evilcorp.co.uk") == "evilcorp" + assert helpers.domain_stem("www.evilcorp.co.uk") == "www.evilcorp" + + assert tuple(await helpers.re.extract_emails("asdf@asdf.com\nT@t.Com&a=a@a.com__ b@b.com")) == ( + "asdf@asdf.com", + "t@t.com", + "a@a.com", + "b@b.com", + ) + + assert helpers.extract_host("evilcorp.com:80") == ("evilcorp.com", "", ":80") + assert helpers.extract_host("http://evilcorp.com:80/asdf.php?a=b") == ( + "evilcorp.com", + "http://", + ":80/asdf.php?a=b", + ) + assert helpers.extract_host("http://evilcorp.com:80/asdf.php?a=b@a.com") == ( + "evilcorp.com", + "http://", + ":80/asdf.php?a=b@a.com", + ) + assert helpers.extract_host("bob@evilcorp.com") == ("evilcorp.com", "bob@", "") + assert helpers.extract_host("[dead::beef]:22") == ("dead::beef", "[", "]:22") + assert helpers.extract_host("scp://[dead::beef]:22") == ("dead::beef", "scp://[", "]:22") + assert helpers.extract_host("https://[dead::beef]:22?a=b") == ("dead::beef", "https://[", "]:22?a=b") + assert helpers.extract_host("https://[dead::beef]/?a=b") == ("dead::beef", "https://[", "]/?a=b") + assert helpers.extract_host("https://[dead::beef]?a=b") == ("dead::beef", "https://[", "]?a=b") + assert helpers.extract_host("ftp://username:password@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:password@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:p@ssword@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:p@ssword@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:password:/@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@dead::beef/my-file.csv") == ( + None, + "ftp://username:password:/@dead::beef/my-file.csv", + "", + ) + assert helpers.extract_host("ftp://username:password:/@[dead::beef]/my-file.csv") == ( + "dead::beef", + "ftp://username:password:/@[", + "]/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@[dead::beef]:22/my-file.csv") == ( + "dead::beef", + "ftp://username:password:/@[", + "]:22/my-file.csv", + ) + + assert helpers.best_http_status(200, 404) == 200 + assert helpers.best_http_status(500, 400) == 400 + assert helpers.best_http_status(301, 302) == 301 + assert helpers.best_http_status(0, 302) == 302 + assert helpers.best_http_status(500, 0) == 500 + + assert helpers.split_domain("www.evilcorp.co.uk") == ("www", "evilcorp.co.uk") + assert helpers.split_domain("asdf.www.test.notreal") == ("asdf.www", "test.notreal") + assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") + assert helpers.split_domain("test.notreal") == ("", "test.notreal") + assert helpers.split_domain("notreal") == ("", "notreal") + assert helpers.split_domain("192.168.0.1") == ("", "192.168.0.1") + assert helpers.split_domain("dead::beef") == ("", "dead::beef") + + assert helpers.subdomain_depth("a.s.d.f.evilcorp.co.uk") == 4 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.com") == 4 + assert helpers.subdomain_depth("evilcorp.com") == 0 + assert helpers.subdomain_depth("a.evilcorp.com") == 1 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.notreal") == 4 + + assert helpers.split_host_port("http://evilcorp.co.uk") == ("evilcorp.co.uk", 80) + assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) + assert helpers.split_host_port("ws://evilcorp.co.uk") == ("evilcorp.co.uk", 80) + assert helpers.split_host_port("wss://evilcorp.co.uk") == ("evilcorp.co.uk", 443) + assert helpers.split_host_port("WSS://evilcorp.co.uk") == ("evilcorp.co.uk", 443) + assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) + assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) + assert helpers.split_host_port("evilcorp.co.uk") == ("evilcorp.co.uk", None) + assert helpers.split_host_port("192.168.0.1") == (ipaddress.ip_address("192.168.0.1"), None) + assert helpers.split_host_port("192.168.0.1:80") == (ipaddress.ip_address("192.168.0.1"), 80) + assert helpers.split_host_port("[e]:80") == ("e", 80) + assert helpers.split_host_port("d://wat:wat") == ("wat", None) + assert helpers.split_host_port("https://[dead::beef]:8338") == (ipaddress.ip_address("dead::beef"), 8338) + assert helpers.split_host_port("[dead::beef]") == (ipaddress.ip_address("dead::beef"), None) + assert helpers.split_host_port("dead::beef") == (ipaddress.ip_address("dead::beef"), None) + extracted_words = helpers.extract_words("blacklanternsecurity") + assert "black" in extracted_words + # assert "blacklantern" in extracted_words + # assert "lanternsecurity" in extracted_words + # assert "blacklanternsecurity" in extracted_words + assert "bls" in extracted_words + + choices = ["asdf.fdsa", "asdf.1234", "4321.5678"] + best_match = helpers.closest_match("asdf.123a", choices) + assert best_match == "asdf.1234" + best_matches = helpers.closest_match("asdf.123a", choices, n=2) + assert len(best_matches) == 2 + assert best_matches[0] == "asdf.1234" + assert best_matches[1] == "asdf.fdsa" + + ipv4_netloc = helpers.make_netloc("192.168.1.1", 80) + assert ipv4_netloc == "192.168.1.1:80" + assert helpers.make_netloc("192.168.1.1") == "192.168.1.1" + assert helpers.make_netloc(ipaddress.ip_address("192.168.1.1"), None) == "192.168.1.1" + assert helpers.make_netloc("dead::beef", "443") == "[dead::beef]:443" + assert helpers.make_netloc(ipaddress.ip_address("dead::beef"), 443) == "[dead::beef]:443" + assert helpers.make_netloc("dead::beef", None) == "[dead::beef]" + assert helpers.make_netloc(ipaddress.ip_address("dead::beef"), None) == "[dead::beef]" + + assert helpers.get_file_extension("https://evilcorp.com/evilcorp.com/test/asdf.TXT") == "txt" + assert helpers.get_file_extension("/etc/conf/test.tar.gz") == "gz" + assert helpers.get_file_extension("/etc/passwd") == "" + + assert helpers.tagify("HttP -_Web Title-- ") == "http-web-title" + tagged_event = scan.make_event("127.0.0.1", parent=scan.root_event, tags=["HttP web -__- title "]) + assert "http-web-title" in tagged_event.tags + tagged_event.remove_tag("http-web-title") + assert "http-web-title" not in tagged_event.tags + tagged_event.add_tag("Another tag ") + assert "another-tag" in tagged_event.tags + tagged_event.tags = ["Some other tag "] + assert isinstance(tagged_event._tags, set) + assert "another-tag" not in tagged_event.tags + assert "some-other-tag" in tagged_event.tags + + assert list(helpers.search_dict_by_key("asdf", {"asdf": "fdsa", 4: [{"asdf": 5}]})) == ["fdsa", 5] + assert list(helpers.search_dict_by_key("asdf", {"wat": {"asdf": "fdsa"}})) == ["fdsa"] + assert list(helpers.search_dict_by_key("asdf", [{"wat": {"nope": 1}}, {"wat": [{"asdf": "fdsa"}]}])) == ["fdsa"] + assert not list(helpers.search_dict_by_key("asdf", [{"wat": {"nope": 1}}, {"wat": [{"fdsa": "asdf"}]}])) + assert not list(helpers.search_dict_by_key("asdf", "asdf")) + + from bbot.core.helpers.regexes import url_regexes + + dict_to_search = { + "key1": { + "key2": [{"key3": "A url of some kind: https://www.evilcorp.com/asdf"}], + "key4": "A url of some kind: https://www.evilcorp.com/fdsa", + } + } + assert set(helpers.search_dict_values(dict_to_search, *url_regexes)) == { + "https://www.evilcorp.com/asdf", + "https://www.evilcorp.com/fdsa", + } + + replaced = helpers.search_format_dict( + {"asdf": [{"wat": {"here": "#{replaceme}!"}}, {500: True}]}, replaceme="asdf" + ) + assert replaced["asdf"][1][500] is True + assert replaced["asdf"][0]["wat"]["here"] == "asdf!" + + filtered_dict = helpers.filter_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "api_key" + ) + assert "api_key" in filtered_dict["modules"]["c99"] + assert "filterme" not in filtered_dict["modules"]["c99"] + assert "ipneighbor" not in filtered_dict["modules"] + + filtered_dict2 = helpers.filter_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "c99" + ) + assert "api_key" in filtered_dict2["modules"]["c99"] + assert "filterme" in filtered_dict2["modules"]["c99"] + assert "ipneighbor" not in filtered_dict2["modules"] + + filtered_dict3 = helpers.filter_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, + "key", + fuzzy=True, + ) + assert "api_key" in filtered_dict3["modules"]["c99"] + assert "filterme" not in filtered_dict3["modules"]["c99"] + assert "ipneighbor" not in filtered_dict3["modules"] + + filtered_dict4 = helpers.filter_dict( + {"modules": {"secrets_db": {"api_key": "1234"}, "ipneighbor": {"secret": "test", "asdf": "1234"}}}, + "secret", + fuzzy=True, + exclude_keys="modules", + ) + assert "secrets_db" not in filtered_dict4["modules"] + assert "ipneighbor" in filtered_dict4["modules"] + assert "secret" in filtered_dict4["modules"]["ipneighbor"] + assert "asdf" not in filtered_dict4["modules"]["ipneighbor"] + + cleaned_dict = helpers.clean_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "api_key" + ) + assert "api_key" not in cleaned_dict["modules"]["c99"] + assert "filterme" in cleaned_dict["modules"]["c99"] + assert "ipneighbor" in cleaned_dict["modules"] + + cleaned_dict2 = helpers.clean_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, "c99" + ) + assert "c99" not in cleaned_dict2["modules"] + assert "ipneighbor" in cleaned_dict2["modules"] + + cleaned_dict3 = helpers.clean_dict( + {"modules": {"c99": {"api_key": "1234", "filterme": "asdf"}, "ipneighbor": {"test": "test"}}}, + "key", + fuzzy=True, + ) + assert "api_key" not in cleaned_dict3["modules"]["c99"] + assert "filterme" in cleaned_dict3["modules"]["c99"] + assert "ipneighbor" in cleaned_dict3["modules"] + + cleaned_dict4 = helpers.clean_dict( + {"modules": {"secrets_db": {"api_key": "1234"}, "ipneighbor": {"secret": "test", "asdf": "1234"}}}, + "secret", + fuzzy=True, + exclude_keys="modules", + ) + assert "secrets_db" in cleaned_dict4["modules"] + assert "ipneighbor" in cleaned_dict4["modules"] + assert "secret" not in cleaned_dict4["modules"]["ipneighbor"] + assert "asdf" in cleaned_dict4["modules"]["ipneighbor"] + + assert helpers.split_list([1, 2, 3, 4, 5]) == [[1, 2], [3, 4, 5]] + assert list(helpers.grouper("ABCDEFG", 3)) == [["A", "B", "C"], ["D", "E", "F"], ["G"]] + + assert len(helpers.rand_string(3)) == 3 + assert len(helpers.rand_string(1)) == 1 + assert len(helpers.rand_string(0)) == 0 + assert type(helpers.rand_string(0)) == str + + test_file = Path(scan.config["home"]) / "testfile.asdf" + test_file.touch() + + assert test_file.is_file() + backup = helpers.backup_file(test_file) + assert backup.name == "testfile.1.asdf" + assert not test_file.exists() + assert backup.is_file() + test_file.touch() + backup2 = helpers.backup_file(test_file) + assert backup2.name == "testfile.1.asdf" + assert not test_file.exists() + assert backup2.is_file() + older_backup = Path(scan.config["home"]) / "testfile.2.asdf" + assert older_backup.is_file() + older_backup.unlink() + backup.unlink() + + with open(test_file, "w") as f: + f.write("asdf\nfdsa") + + assert "asdf" in helpers.str_or_file(str(test_file)) + assert "nope" in helpers.str_or_file("nope") + assert tuple(helpers.chain_lists([str(test_file), "nope"], try_files=True)) == ("asdf", "fdsa", "nope") + assert tuple(helpers.chain_lists("one, two", try_files=True)) == ("one", "two") + assert tuple(helpers.chain_lists("one, two three ,four five")) == ("one", "two", "three", "four", "five") + assert test_file.is_file() + + with pytest.raises(DirectoryCreationError, match="Failed to create.*"): + helpers.mkdir(test_file) + + helpers.delete_file(test_file) + assert not test_file.exists() + + timedelta = datetime.timedelta(hours=1, minutes=2, seconds=3) + assert helpers.human_timedelta(timedelta) == "1 hour, 2 minutes, 3 seconds" + timedelta = datetime.timedelta(hours=3, seconds=1) + assert helpers.human_timedelta(timedelta) == "3 hours, 1 second" + timedelta = datetime.timedelta(seconds=2) + assert helpers.human_timedelta(timedelta) == "2 seconds" + + ### VALIDATORS ### + # hosts + assert helpers.validators.validate_host(" evilCorp.COM.") == "evilcorp.com" + assert helpers.validators.validate_host("LOCALHOST ") == "localhost" + assert helpers.validators.validate_host(" 192.168.1.1") == "192.168.1.1" + assert helpers.validators.validate_host(" Dead::c0dE ") == "dead::c0de" + assert helpers.validators.soft_validate(" evilCorp.COM", "host") is True + assert helpers.validators.soft_validate("!@#$", "host") is False + with pytest.raises(ValueError): + assert helpers.validators.validate_host("!@#$") + # ports + assert helpers.validators.validate_port(666) == 666 + assert helpers.validators.validate_port(666666) == 65535 + assert helpers.validators.soft_validate(666, "port") is True + assert helpers.validators.soft_validate("!@#$", "port") is False + with pytest.raises(ValueError): + helpers.validators.validate_port("asdf") + # top tcp ports + top_tcp_ports = helpers.top_tcp_ports(100) + assert len(top_tcp_ports) == 100 + assert len(set(top_tcp_ports)) == 100 + top_tcp_ports = helpers.top_tcp_ports(800000) + assert top_tcp_ports[:10] == [80, 23, 443, 21, 22, 25, 3389, 110, 445, 139] + assert top_tcp_ports[-10:] == [65526, 65527, 65528, 65529, 65530, 65531, 65532, 65533, 65534, 65535] + assert len(top_tcp_ports) == 65535 + assert len(set(top_tcp_ports)) == 65535 + assert all(isinstance(i, int) for i in top_tcp_ports) + top_tcp_ports = helpers.top_tcp_ports(10, as_string=True) + assert top_tcp_ports == "80,23,443,21,22,25,3389,110,445,139" + # urls + assert helpers.validators.validate_url(" httP://evilcorP.com/asdf?a=b&c=d#e") == "http://evilcorp.com/asdf" + assert ( + helpers.validators.validate_url_parsed(" httP://evilcorP.com/asdf?a=b&c=d#e").geturl() + == "http://evilcorp.com/asdf" + ) + assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") is True + assert helpers.validators.soft_validate("!@#$", "url") is False + with pytest.raises(ValueError): + helpers.validators.validate_url("!@#$") + # severities + assert helpers.validators.validate_severity(" iNfo") == "INFO" + assert helpers.validators.soft_validate(" iNfo", "severity") is True + assert helpers.validators.soft_validate("NOPE", "severity") is False + with pytest.raises(ValueError): + helpers.validators.validate_severity("NOPE") + # emails + assert helpers.validators.validate_email(" bOb@eViLcorp.COM") == "bob@evilcorp.com" + assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") is True + assert helpers.validators.soft_validate("!@#$", "email") is False + with pytest.raises(ValueError): + helpers.validators.validate_email("!@#$") + + assert type(helpers.make_date()) == str + + # string formatter + s = "asdf {unused} {used}" + assert helpers.safe_format(s, used="fdsa") == "asdf {unused} fdsa" + + # punycode + assert helpers.smart_encode_punycode("ドメイン.テスト") == "xn--eckwd4c7c.xn--zckzah" + assert helpers.smart_decode_punycode("xn--eckwd4c7c.xn--zckzah") == "ドメイン.テスト" + assert helpers.smart_encode_punycode("evilcorp.com") == "evilcorp.com" + assert helpers.smart_decode_punycode("evilcorp.com") == "evilcorp.com" + assert helpers.smart_encode_punycode("bob_smith@ドメイン.テスト") == "bob_smith@xn--eckwd4c7c.xn--zckzah" + assert helpers.smart_decode_punycode("bob_smith@xn--eckwd4c7c.xn--zckzah") == "bob_smith@ドメイン.テスト" + assert helpers.smart_encode_punycode("ドメイン.テスト:80") == "xn--eckwd4c7c.xn--zckzah:80" + assert helpers.smart_decode_punycode("xn--eckwd4c7c.xn--zckzah:80") == "ドメイン.テスト:80" + + assert await helpers.re.recursive_decode("Hello%20world%21") == "Hello world!" + assert ( + await helpers.re.recursive_decode("Hello%20%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442") == "Hello Привет" + ) + assert ( + await helpers.re.recursive_decode("%5Cu0020%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442%5Cu0021") + == " Привет!" + ) + assert await helpers.re.recursive_decode("Hello%2520world%2521") == "Hello world!" + assert ( + await helpers.re.recursive_decode( + "Hello%255Cu0020%255Cu041f%255Cu0440%255Cu0438%255Cu0432%255Cu0435%255Cu0442" + ) + == "Hello Привет" + ) + assert ( + await helpers.re.recursive_decode( + "%255Cu0020%255Cu041f%255Cu0440%255Cu0438%255Cu0432%255Cu0435%255Cu0442%255Cu0021" + ) + == " Привет!" + ) + assert ( + await helpers.re.recursive_decode(r"Hello\\nWorld\\\tGreetings\\\\nMore\nText") + == "Hello\nWorld\tGreetings\nMore\nText" + ) + + ### CACHE ### + helpers.cache_put("string", "wat") + helpers.cache_put("binary", b"wat") + assert helpers.cache_get("string") == "wat" + assert helpers.cache_get("binary") == "wat" + assert helpers.cache_get("binary", text=False) == b"wat" + cache_filename = helpers.cache_filename("string") + (m, i, d, n, u, g, sz, atime, mtime, ctime) = os.stat(str(cache_filename)) + # change modified time to be 10 days in the past + os.utime(str(cache_filename), times=(atime, mtime - (3600 * 24 * 10))) + assert helpers.cache_get("string", cache_hrs=24 * 7) is None + assert helpers.cache_get("string", cache_hrs=24 * 14) == "wat" + + test_file = Path(scan.config["home"]) / "testfile.asdf" + with open(test_file, "w") as f: + for i in range(100): + f.write(f"{i}\n") + assert len(list(open(test_file).readlines())) == 100 + assert (await helpers.wordlist(test_file)).is_file() + truncated_file = await helpers.wordlist(test_file, lines=10) + assert truncated_file.is_file() + assert len(list(open(truncated_file).readlines())) == 10 + with pytest.raises(WordlistError): + await helpers.wordlist("/tmp/a9pseoysadf/asdkgjaosidf") + test_file.unlink() + + # filename truncation + super_long_filename = "/tmp/" + ("a" * 1024) + ".txt" + with pytest.raises(OSError): + with open(super_long_filename, "w") as f: + f.write("wat") + truncated_filename = helpers.truncate_filename(super_long_filename) + with open(truncated_filename, "w") as f: + f.write("wat") + truncated_filename.unlink() + + # misc DNS helpers + assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") is True + assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") is False + assert helpers.is_ptr("11wat.evilcorp.com") is False + + ## NTLM + testheader = "TlRMTVNTUAACAAAAHgAeADgAAAAVgorilwL+bvnVipUAAAAAAAAAAJgAmABWAAAACgBjRQAAAA9XAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAACAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgAAQAeAFcASQBOAC0AUwA0ADIATgBPAEIARABWAFQASwA4AAQAHgBXAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAADAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgABwAIAHUwOZlfoNgBAAAAAA==" + decoded = helpers.ntlm.ntlmdecode(testheader) + assert decoded["NetBIOS_Domain_Name"] == "WIN-S42NOBDVTK8" + assert decoded["NetBIOS_Computer_Name"] == "WIN-S42NOBDVTK8" + assert decoded["DNS_Domain_name"] == "WIN-S42NOBDVTK8" + assert decoded["FQDN"] == "WIN-S42NOBDVTK8" + assert decoded["Timestamp"] == b"u09\x99_\xa0\xd8\x01" + with pytest.raises(NTLMError): + helpers.ntlm.ntlmdecode("asdf") + + test_filesize = bbot_test_dir / "test_filesize" + test_filesize.touch() + assert test_filesize.is_file() + assert helpers.filesize(test_filesize) == 0 + assert helpers.filesize(bbot_test_dir / "glkasjdlgksadlkfsdf") == 0 + + # memory stuff + int(helpers.memory_status().available) + int(helpers.swap_status().total) + + assert helpers.bytes_to_human(459819198709) == "428.24GB" + assert helpers.human_to_bytes("428.24GB") == 459819198709 + + # ordinals + assert helpers.integer_to_ordinal(1) == "1st" + assert helpers.integer_to_ordinal(2) == "2nd" + assert helpers.integer_to_ordinal(3) == "3rd" + assert helpers.integer_to_ordinal(4) == "4th" + assert helpers.integer_to_ordinal(11) == "11th" + assert helpers.integer_to_ordinal(12) == "12th" + assert helpers.integer_to_ordinal(13) == "13th" + assert helpers.integer_to_ordinal(21) == "21st" + assert helpers.integer_to_ordinal(22) == "22nd" + assert helpers.integer_to_ordinal(23) == "23rd" + assert helpers.integer_to_ordinal(101) == "101st" + assert helpers.integer_to_ordinal(111) == "111th" + assert helpers.integer_to_ordinal(112) == "112th" + assert helpers.integer_to_ordinal(113) == "113th" + assert helpers.integer_to_ordinal(0) == "0th" + + await scan._cleanup() + + scan1 = bbot_scanner(modules="ipneighbor") + await scan1.load_modules() + assert int(helpers.get_size(scan1.modules["ipneighbor"])) > 0 + + await scan1._cleanup() + + # weighted shuffle (used for module queues) + items = ["a", "b", "c", "d", "e"] + first_frequencies = {i: 0 for i in items} + weights = [1, 2, 3, 4, 5] + for i in range(10000): + shuffled = helpers.weighted_shuffle(items, weights) + first = shuffled[0] + first_frequencies[first] += 1 + assert ( + first_frequencies["a"] + < first_frequencies["b"] + < first_frequencies["c"] + < first_frequencies["d"] + < first_frequencies["e"] + ) + + # error handling helpers + test_ran = False + try: + try: + raise KeyboardInterrupt("asdf") + except KeyboardInterrupt: + raise ValueError("asdf") + except Exception as e: + assert len(helpers.get_exception_chain(e)) == 2 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, KeyboardInterrupt)]) == 1 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is True + assert helpers.in_exception_chain(e, (TypeError, OSError)) is False + test_ran = True + assert test_ran + test_ran = False + try: + try: + raise AttributeError("asdf") + except AttributeError: + raise ValueError("asdf") + except Exception as e: + assert len(helpers.get_exception_chain(e)) == 2 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, AttributeError)]) == 1 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) is True + assert helpers.in_exception_chain(e, (AttributeError,)) is True + test_ran = True + assert test_ran + + +@pytest.mark.asyncio +async def test_word_cloud(helpers, bbot_scanner): + number_mutations = helpers.word_cloud.get_number_mutations("base2_p013", n=5, padding=2) + assert "base0_p013" in number_mutations + assert "base7_p013" in number_mutations + assert "base8_p013" not in number_mutations + assert "base2_p008" in number_mutations + assert "base2_p007" not in number_mutations + assert "base2_p018" in number_mutations + assert "base2_p0134" in number_mutations + assert "base2_p0135" not in number_mutations + + permutations = helpers.word_cloud.mutations("_base", numbers=1) + assert ("_base", "dev") in permutations + assert ("dev", "_base") in permutations + + # saving and loading + scan1 = bbot_scanner("127.0.0.1") + word_cloud = scan1.helpers.word_cloud + word_cloud.add_word("lantern") + word_cloud.add_word("black") + word_cloud.add_word("black") + word_cloud.save() + with open(word_cloud.default_filename) as f: + word_cloud_content = [l.rstrip() for l in f.read().splitlines()] + assert len(word_cloud_content) == 2 + assert "2\tblack" in word_cloud_content + assert "1\tlantern" in word_cloud_content + word_cloud.save(limit=1) + with open(word_cloud.default_filename) as f: + word_cloud_content = [l.rstrip() for l in f.read().splitlines()] + assert len(word_cloud_content) == 1 + assert "2\tblack" in word_cloud_content + assert "1\tlantern" not in word_cloud_content + word_cloud.clear() + with open(word_cloud.default_filename, "w") as f: + f.write("plumbus\nrumbus") + word_cloud.load() + assert word_cloud["plumbus"] == 1 + assert word_cloud["rumbus"] == 1 + + # mutators + from bbot.core.helpers.wordcloud import DNSMutator + + m = DNSMutator() + m.add_word("blacklantern-security237") + mutations = set(m) + assert mutations == { + (None,), + (None, "237"), + (None, "-security237"), + (None, "lanternsecurity237"), + (None, "lantern-security237"), + ("blacklantern-", None), + ("blacklantern", None, "237"), + ("blacklantern-", None, "237"), + ("black", None, "security237"), + ("black", None, "-security237"), + } + + m = DNSMutator() + m.add_word("blacklantern-security") + m.add_word("sec") + m.add_word("sec2") + m.add_word("black2") + mutations = sorted(m.mutations("whitebasket")) + assert mutations == sorted( + [ + "basket", + "basket-security", + "basket2", + "basketlantern-security", + "basketlanternsecurity", + "blackbasket-security", + "blackbasketsecurity", + "blacklantern-basket", + "blacklantern-white", + "blacklantern-whitebasket", + "blacklanternbasket", + "blacklanternwhite", + "blacklanternwhitebasket", + "blackwhite-security", + "blackwhitebasket-security", + "blackwhitebasketsecurity", + "blackwhitesecurity", + "white", + "white-security", + "white2", + "whitebasket", + "whitebasket-security", + "whitebasket2", + "whitebasketlantern-security", + "whitebasketlanternsecurity", + "whitelantern-security", + "whitelanternsecurity", + ] + ) + top_mutations = sorted(m.top_mutations().items(), key=lambda x: x[-1], reverse=True) + assert top_mutations[:2] == [((None,), 4), ((None, "2"), 2)] + + await scan1._cleanup() + + +def test_names(helpers): + assert helpers.names == sorted(helpers.names) + assert helpers.adjectives == sorted(helpers.adjectives) + + +@pytest.mark.asyncio +async def test_ratelimiter(helpers): + from bbot.core.helpers.ratelimiter import RateLimiter + + results = [] + + async def web_request(r): + async with r: + await asyncio.sleep(0.12345) + results.append(None) + + # allow 10 requests per second + r = RateLimiter(10, "Test") + tasks = [] + # start 500 requests + for i in range(500): + tasks.append(asyncio.create_task(web_request(r))) + # sleep for 5 seconds + await asyncio.sleep(5) + await helpers.cancel_tasks(tasks) + # 5 seconds * 10 requests per second == 50 + assert 45 <= len(results) <= 55 + + +def test_sync_to_async(): + from bbot.core.helpers.async_helpers import async_to_sync_gen + + # async to sync generator converter + async def async_gen(): + for i in range(5): + await asyncio.sleep(0.1) + yield i + + sync_gen = async_to_sync_gen(async_gen()) + + l = [] + while 1: + try: + l.append(next(sync_gen)) + except StopIteration: + break + assert l == [0, 1, 2, 3, 4] + + +@pytest.mark.asyncio +async def test_async_helpers(): + import random + from bbot.core.helpers.misc import as_completed + + async def do_stuff(r): + await asyncio.sleep(r) + return r + + random_ints = [random.random() for _ in range(1000)] + tasks = [do_stuff(r) for r in random_ints] + results = set() + async for t in as_completed(tasks): + results.add(await t) + assert len(results) == 1000 + assert sorted(random_ints) == sorted(results) + + +def test_portparse(helpers): + assert helpers.parse_port_string("80,443,22") == [80, 443, 22] + assert helpers.parse_port_string(80) == [80] + + assert helpers.parse_port_string("80,443,22,1000-1002") == [80, 443, 22, 1000, 1001, 1002] + + with pytest.raises(ValueError) as e: + helpers.parse_port_string("80,443,22,70000") + assert str(e.value) == "Invalid port: 70000" + + with pytest.raises(ValueError) as e: + helpers.parse_port_string("80,443,22,1000-70000") + assert str(e.value) == "Invalid port range: 1000-70000" + + with pytest.raises(ValueError) as e: + helpers.parse_port_string("80,443,22,1000-1001-1002") + assert str(e.value) == "Invalid port or port range: 1000-1001-1002" + + with pytest.raises(ValueError) as e: + helpers.parse_port_string("80,443,22,1002-1000") + assert str(e.value) == "Invalid port range: 1002-1000" + + with pytest.raises(ValueError) as e: + helpers.parse_port_string("80,443,22,foo") + assert str(e.value) == "Invalid port or port range: foo" + + +# test chain_lists helper + + +def test_liststring_valid_strings(helpers): + assert helpers.chain_lists("hello,world,bbot") == ["hello", "world", "bbot"] + + +def test_liststring_invalid_string(helpers): + with pytest.raises(ValueError) as e: + helpers.chain_lists("hello,world,\x01", validate=True) + assert str(e.value) == "Invalid character in string: \x01" + + +def test_liststring_singleitem(helpers): + assert helpers.chain_lists("hello") == ["hello"] + + +def test_liststring_invalidfnchars(helpers): + with pytest.raises(ValueError) as e: + helpers.chain_lists("hello,world,bbot|test", validate=True) + assert str(e.value) == "Invalid character in string: bbot|test" + + +# test parameter validation +@pytest.mark.asyncio +async def test_parameter_validation(helpers): + getparam_valid_params = { + "name", + "age", + "valid_name", + "valid-name", + "session_token", + "user.id", + "user-name", + "client.id", + "auth-token", + "access_token", + "abcd", + "jqueryget", + " + + + + + + + """ + + async def setup_before_prep(self, module_test): + # Simulate HTTP_RESPONSE detection + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": self.http_response_data} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + ajaxpro_httpresponse_detection = False + for e in events: + if e.type == "TECHNOLOGY" and e.data["technology"] == "ajaxpro": + ajaxpro_httpresponse_detection = True + assert ajaxpro_httpresponse_detection, "Ajaxpro HTTP_RESPONSE detection failed" diff --git a/bbot/test/test_step_2/module_tests/test_module_anubisdb.py b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py new file mode 100644 index 0000000000..7b1bc6659d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py @@ -0,0 +1,13 @@ +from .base import ModuleTestBase + + +class TestAnubisdb(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.module.abort_if = lambda e: False + module_test.httpx_mock.add_response( + url="https://jldc.me/anubis/subdomains/blacklanternsecurity.com", + json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_apkpure.py b/bbot/test/test_step_2/module_tests/test_module_apkpure.py new file mode 100644 index 0000000000..65919c62e3 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_apkpure.py @@ -0,0 +1,71 @@ +from pathlib import Path +from .base import ModuleTestBase, tempapkfile + + +class TestAPKPure(ModuleTestBase): + modules_overrides = ["apkpure", "google_playstore", "speculate"] + apk_file = tempapkfile() + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.99"]}}) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/search?q=blacklanternsecurity&c=apps", + text=""" + + + "blacklanternsecurity" - Android Apps on Google Play + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/apps/details?id=com.bbot.test", + text=""" + + + BBOT + + + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://d.apkpure.com/b/XAPK/com.bbot.test?version=latest", + content=self.apk_file, + headers={ + "Content-Type": "application/vnd.android.package-archive", + "Content-Disposition": "attachment; filename=com.bbot.test.apk", + }, + ) + + def check(self, module_test, events): + assert len(events) == 6 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ), "Failed to find ORG_STUB" + assert 1 == len( + [ + e + for e in events + if e.type == "MOBILE_APP" + and "android" in e.tags + and e.data["id"] == "com.bbot.test" + and e.data["url"] == "https://play.google.com/store/apps/details?id=com.bbot.test" + ] + ), "Failed to find bbot android app" + filesystem_event = [e for e in events if e.type == "FILESYSTEM" and "com.bbot.test.apk" in e.data["path"]] + assert 1 == len(filesystem_event), "Failed to download apk" + file = Path(filesystem_event[0].data["path"]) + assert file.is_file(), "Destination apk doesn't exist" diff --git a/bbot/test/test_step_2/module_tests/test_module_asn.py b/bbot/test/test_step_2/module_tests/test_module_asn.py new file mode 100644 index 0000000000..fbd3558a43 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_asn.py @@ -0,0 +1,239 @@ +from .base import ModuleTestBase + + +class TestASNBGPView(ModuleTestBase): + targets = ["8.8.8.8"] + module_name = "asn" + config_overrides = {"scope": {"report_distance": 2}} + + response_get_asn_bgpview = { + "status": "ok", + "status_message": "Query was successful", + "data": { + "ip": "8.8.8.8", + "ptr_record": "dns.google", + "prefixes": [ + { + "prefix": "8.8.8.0/24", + "ip": "8.8.8.0", + "cidr": 24, + "asn": {"asn": 15169, "name": "GOOGLE", "description": "Google LLC", "country_code": "US"}, + "name": "LVLT-GOGL-8-8-8", + "description": "Google LLC", + "country_code": "US", + } + ], + "rir_allocation": { + "rir_name": "ARIN", + "country_code": None, + "ip": "8.0.0.0", + "cidr": 9, + "prefix": "8.0.0.0/9", + "date_allocated": "1992-12-01 00:00:00", + "allocation_status": "allocated", + }, + "iana_assignment": { + "assignment_status": "legacy", + "description": "Administered by ARIN", + "whois_server": "whois.arin.net", + "date_assigned": None, + }, + "maxmind": {"country_code": None, "city": None}, + }, + "@meta": {"time_zone": "UTC", "api_version": 1, "execution_time": "567.18 ms"}, + } + response_get_emails_bgpview = { + "status": "ok", + "status_message": "Query was successful", + "data": { + "asn": 15169, + "name": "GOOGLE", + "description_short": "Google LLC", + "description_full": ["Google LLC"], + "country_code": "US", + "website": "https://about.google/intl/en/", + "email_contacts": ["network-abuse@google.com", "arin-contact@google.com"], + "abuse_contacts": ["network-abuse@google.com"], + "looking_glass": None, + "traffic_estimation": None, + "traffic_ratio": "Mostly Outbound", + "owner_address": ["1600 Amphitheatre Parkway", "Mountain View", "CA", "94043", "US"], + "rir_allocation": { + "rir_name": "ARIN", + "country_code": "US", + "date_allocated": "2000-03-30 00:00:00", + "allocation_status": "assigned", + }, + "iana_assignment": { + "assignment_status": None, + "description": None, + "whois_server": None, + "date_assigned": None, + }, + "date_updated": "2023-02-07 06:39:11", + }, + "@meta": {"time_zone": "UTC", "api_version": 1, "execution_time": "56.55 ms"}, + } + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.bgpview.io/ip/8.8.8.8", json=self.response_get_asn_bgpview + ) + module_test.httpx_mock.add_response( + url="https://api.bgpview.io/asn/15169", json=self.response_get_emails_bgpview + ) + module_test.module.sources = ["bgpview"] + + def check(self, module_test, events): + assert any(e.type == "ASN" for e in events) + assert any(e.type == "EMAIL_ADDRESS" for e in events) + + +class TestASNRipe(ModuleTestBase): + targets = ["8.8.8.8"] + module_name = "asn" + config_overrides = {"scope": {"report_distance": 2}} + + response_get_asn_ripe = { + "messages": [], + "see_also": [], + "version": "1.1", + "data_call_name": "network-info", + "data_call_status": "supported", + "cached": False, + "data": {"asns": ["15169"], "prefix": "8.8.8.0/24"}, + "query_id": "20230217212133-f278ff23-d940-4634-8115-a64dee06997b", + "process_time": 5, + "server_id": "app139", + "build_version": "live.2023.2.1.142", + "status": "ok", + "status_code": 200, + "time": "2023-02-17T21:21:33.428469", + } + response_get_asn_metadata_ripe = { + "messages": [], + "see_also": [], + "version": "4.1", + "data_call_name": "whois", + "data_call_status": "supported - connecting to ursa", + "cached": False, + "data": { + "records": [ + [ + {"key": "ASNumber", "value": "15169", "details_link": None}, + {"key": "ASName", "value": "GOOGLE", "details_link": None}, + {"key": "ASHandle", "value": "15169", "details_link": "https://stat.ripe.net/AS15169"}, + {"key": "RegDate", "value": "2000-03-30", "details_link": None}, + { + "key": "Ref", + "value": "https://rdap.arin.net/registry/autnum/15169", + "details_link": "https://rdap.arin.net/registry/autnum/15169", + }, + {"key": "source", "value": "ARIN", "details_link": None}, + ], + [ + {"key": "OrgAbuseHandle", "value": "ABUSE5250-ARIN", "details_link": None}, + {"key": "OrgAbuseName", "value": "Abuse", "details_link": None}, + {"key": "OrgAbusePhone", "value": "+1-650-253-0000", "details_link": None}, + { + "key": "OrgAbuseEmail", + "value": "network-abuse@google.com", + "details_link": "mailto:network-abuse@google.com", + }, + { + "key": "OrgAbuseRef", + "value": "https://rdap.arin.net/registry/entity/ABUSE5250-ARIN", + "details_link": "https://rdap.arin.net/registry/entity/ABUSE5250-ARIN", + }, + {"key": "source", "value": "ARIN", "details_link": None}, + ], + [ + {"key": "OrgName", "value": "Google LLC", "details_link": None}, + {"key": "OrgId", "value": "GOGL", "details_link": None}, + {"key": "Address", "value": "1600 Amphitheatre Parkway", "details_link": None}, + {"key": "City", "value": "Mountain View", "details_link": None}, + {"key": "StateProv", "value": "CA", "details_link": None}, + {"key": "PostalCode", "value": "94043", "details_link": None}, + {"key": "Country", "value": "US", "details_link": None}, + {"key": "RegDate", "value": "2000-03-30", "details_link": None}, + { + "key": "Comment", + "value": "Please note that the recommended way to file abuse complaints are located in the following links.", + "details_link": None, + }, + { + "key": "Comment", + "value": "To report abuse and illegal activity: https://www.google.com/contact/", + "details_link": None, + }, + { + "key": "Comment", + "value": "For legal requests: http://support.google.com/legal", + "details_link": None, + }, + {"key": "Comment", "value": "Regards,", "details_link": None}, + {"key": "Comment", "value": "The Google Team", "details_link": None}, + { + "key": "Ref", + "value": "https://rdap.arin.net/registry/entity/GOGL", + "details_link": "https://rdap.arin.net/registry/entity/GOGL", + }, + {"key": "source", "value": "ARIN", "details_link": None}, + ], + [ + {"key": "OrgTechHandle", "value": "ZG39-ARIN", "details_link": None}, + {"key": "OrgTechName", "value": "Google LLC", "details_link": None}, + {"key": "OrgTechPhone", "value": "+1-650-253-0000", "details_link": None}, + { + "key": "OrgTechEmail", + "value": "arin-contact@google.com", + "details_link": "mailto:arin-contact@google.com", + }, + { + "key": "OrgTechRef", + "value": "https://rdap.arin.net/registry/entity/ZG39-ARIN", + "details_link": "https://rdap.arin.net/registry/entity/ZG39-ARIN", + }, + {"key": "source", "value": "ARIN", "details_link": None}, + ], + [ + {"key": "RTechHandle", "value": "ZG39-ARIN", "details_link": None}, + {"key": "RTechName", "value": "Google LLC", "details_link": None}, + {"key": "RTechPhone", "value": "+1-650-253-0000", "details_link": None}, + {"key": "RTechEmail", "value": "arin-contact@google.com", "details_link": None}, + { + "key": "RTechRef", + "value": "https://rdap.arin.net/registry/entity/ZG39-ARIN", + "details_link": None, + }, + {"key": "source", "value": "ARIN", "details_link": None}, + ], + ], + "irr_records": [], + "authorities": ["arin"], + "resource": "15169", + "query_time": "2023-02-17T21:25:00", + }, + "query_id": "20230217212529-75f57efd-59f4-473f-8bdd-803062e94290", + "process_time": 268, + "server_id": "app143", + "build_version": "live.2023.2.1.142", + "status": "ok", + "status_code": 200, + "time": "2023-02-17T21:25:29.417812", + } + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://stat.ripe.net/data/network-info/data.json?resource=8.8.8.8", + json=self.response_get_asn_ripe, + ) + module_test.httpx_mock.add_response( + url="https://stat.ripe.net/data/whois/data.json?resource=15169", + json=self.response_get_asn_metadata_ripe, + ) + module_test.module.sources = ["ripe"] + + def check(self, module_test, events): + assert any(e.type == "ASN" for e in events) + assert any(e.type == "EMAIL_ADDRESS" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py new file mode 100644 index 0000000000..5cb2f36033 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py @@ -0,0 +1,83 @@ +from .base import ModuleTestBase + + +class TestAsset_Inventory(ModuleTestBase): + targets = ["127.0.0.1", "bbottest.notreal"] + scan_name = "asset_inventory_test" + config_overrides = {"dns": {"minimal": False}, "modules": {"portscan": {"ports": "9999"}}} + modules_overrides = ["asset_inventory", "portscan", "sslcert"] + + masscan_output = """{ "ip": "127.0.0.1", "timestamp": "1680197558", "ports": [ {"port": 9999, "proto": "tcp", "status": "open", "reason": "syn-ack", "ttl": 54} ] }""" + + async def setup_before_prep(self, module_test): + async def run_masscan(command, *args, **kwargs): + if "masscan" in command[:2]: + targets = open(command[11]).read().splitlines() + yield "[" + for l in self.masscan_output.splitlines(): + if "127.0.0.1/32" in targets: + yield self.masscan_output + yield "]" + else: + async for l in module_test.scan.helpers.run_live(command, *args, **kwargs): + yield l + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", run_masscan) + + await module_test.mock_dns( + { + "1.0.0.127.in-addr.arpa": {"PTR": ["www.bbottest.notreal"]}, + "www.bbottest.notreal": {"A": ["127.0.0.1"]}, + } + ) + + def check(self, module_test, events): + assert any(e.data == "127.0.0.1:9999" for e in events), "No open port found" + assert any(e.data == "www.bbottest.notreal" for e in events), "No DNS name found" + filename = next(module_test.scan.home.glob("asset-inventory.csv")) + with open(filename) as f: + content = f.read() + assert "www.bbottest.notreal,,,127.0.0.1" in content + filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) + with open(filename) as f: + assert "127.0.0.0/16" in f.read() + filename = next(module_test.scan.home.glob("asset-inventory-domains-table*.txt")) + with open(filename) as f: + content = f.read() + assert "bbottest.notreal" in content + + +class TestAsset_InventoryEmitPrevious(TestAsset_Inventory): + config_overrides = {"dns": {"minimal": False}, "modules": {"asset_inventory": {"use_previous": True}}} + modules_overrides = ["asset_inventory"] + + def check(self, module_test, events): + assert any(e.data == "www.bbottest.notreal:9999" for e in events), "No open port found" + assert any(e.data == "www.bbottest.notreal" for e in events), "No DNS name found" + filename = next(module_test.scan.home.glob("asset-inventory.csv")) + with open(filename) as f: + content = f.read() + assert "www.bbottest.notreal,,,127.0.0.1" in content + filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) + with open(filename) as f: + assert "127.0.0.0/16" in f.read() + filename = next(module_test.scan.home.glob("asset-inventory-domains-table*.txt")) + with open(filename) as f: + content = f.read() + assert "bbottest.notreal" in content + + +class TestAsset_InventoryRecheck(TestAsset_Inventory): + config_overrides = { + "dns": {"minimal": False}, + "modules": {"asset_inventory": {"use_previous": True, "recheck": True}}, + } + modules_overrides = ["asset_inventory"] + + def check(self, module_test, events): + assert not any(e.type == "OPEN_TCP_PORT" for e in events), "Open port was emitted" + assert any(e.data == "www.bbottest.notreal" for e in events), "No DNS name found" + filename = next(module_test.scan.home.glob("asset-inventory.csv")) + with open(filename) as f: + content = f.read() + assert "www.bbottest.notreal,,,127.0.0.1" in content diff --git a/bbot/test/test_step_2/module_tests/test_module_azure_realm.py b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py new file mode 100644 index 0000000000..2b1317629f --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py @@ -0,0 +1,32 @@ +from .base import ModuleTestBase + + +class TestAzure_Realm(ModuleTestBase): + targets = ["evilcorp.com"] + config_overrides = {"scope": {"report_distance": 1}} + + response_json = { + "State": 3, + "UserState": 2, + "Login": "test@evilcorp.com", + "NameSpaceType": "Federated", + "DomainName": "evilcorp.com", + "FederationGlobalVersion": -1, + "AuthURL": "https://evilcorp.okta.com/app/office365/deadbeef/sso/wsfed/passive?username=test%40evilcorp.com&wa=wsignin1.0&wtrevilcorplm=urn%3afederation%3aMicrosoftOnline&wctx=", + "FederationBrandName": "EvilCorp", + "AuthNForwardType": 1, + "CloudInstanceName": "microsoftonline.com", + "CloudInstanceIssuerUri": "urn:federation:MicrosoftOnline", + } + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.5"]}}) + module_test.httpx_mock.add_response( + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + json=self.response_json, + ) + + def check(self, module_test, events): + assert any(e.data == "https://evilcorp.okta.com/app/office365/deadbeef/sso/wsfed/passive" for e in events), ( + "Failed to detect URL" + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py b/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py new file mode 100644 index 0000000000..b7986d3a11 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py @@ -0,0 +1,107 @@ +from .base import ModuleTestBase + + +class TestAzure_Tenant(ModuleTestBase): + tenant_response = """ + + + + http://schemas.microsoft.com/exchange/2010/Autodiscover/Autodiscover/GetFederationInformationResponse + + 15 + 20 + 6411 + 14 + Exchange2015 + + + + + + NoError + + outlook.com + + blacklanternsecurity.onmicrosoft.com + + + + https://login.microsoftonline.com/extSTS.srf + urn:federation:MicrosoftOnline + + + + + +""" + + openid_config_azure = { + "token_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/token", + "token_endpoint_auth_methods_supported": ["client_secret_post", "private_key_jwt", "client_secret_basic"], + "jwks_uri": "https://login.windows.net/common/discovery/keys", + "response_modes_supported": ["query", "fragment", "form_post"], + "subject_types_supported": ["pairwise"], + "id_token_signing_alg_values_supported": ["RS256"], + "response_types_supported": ["code", "id_token", "code id_token", "token id_token", "token"], + "scopes_supported": ["openid"], + "issuer": "https://sts.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/", + "microsoft_multi_refresh_token": True, + "authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/authorize", + "device_authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/devicecode", + "http_logout_supported": True, + "frontchannel_logout_supported": True, + "end_session_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/logout", + "claims_supported": [ + "sub", + "iss", + "cloud_instance_name", + "cloud_instance_host_name", + "cloud_graph_host_name", + "msgraph_host", + "aud", + "exp", + "iat", + "auth_time", + "acr", + "amr", + "nonce", + "email", + "given_name", + "family_name", + "nickname", + ], + "check_session_iframe": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/checksession", + "userinfo_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/openid/userinfo", + "kerberos_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/kerberos", + "tenant_region_scope": "NA", + "cloud_instance_name": "microsoftonline.com", + "cloud_graph_host_name": "graph.windows.net", + "msgraph_host": "graph.microsoft.com", + "rbac_url": "https://pas.windows.net", + } + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + method="POST", + url="https://autodiscover-s.outlook.com/autodiscover/autodiscover.svc", + text=self.tenant_response, + ) + module_test.httpx_mock.add_response( + url="https://login.windows.net/blacklanternsecurity.com/.well-known/openid-configuration", + json=self.openid_config_azure, + ) + + def check(self, module_test, events): + assert any( + e.type.startswith("DNS_NAME") + and e.data == "blacklanternsecurity.onmicrosoft.com" + and "affiliate" in e.tags + for e in events + ) + assert any( + e.type == "AZURE_TENANT" + and e.data["tenant-id"] == "cc74fc12-4142-400e-a653-f98bdeadbeef" + and "blacklanternsecurity.onmicrosoft.com" in e.data["domains"] + and "blacklanternsecurity" in e.data["tenant-names"] + for e in events + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns.py b/bbot/test/test_step_2/module_tests/test_module_baddns.py new file mode 100644 index 0000000000..877e973b2b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_baddns.py @@ -0,0 +1,67 @@ +from .base import ModuleTestBase + + +class BaseTestBaddns(ModuleTestBase): + modules_overrides = ["baddns"] + targets = ["bad.dns"] + config_overrides = {"dns": {"minimal": False}} + + async def dispatchWHOIS(x): + return None + + def select_modules(self): + from baddns.base import get_all_modules + + selected_modules = [] + for m in get_all_modules(): + if m.name in ["CNAME"]: + selected_modules.append(m) + return selected_modules + + +class TestBaddns_cname_nxdomain(BaseTestBaddns): + async def setup_after_prep(self, module_test): + from bbot.modules import baddns as baddns_module + from baddns.lib.whoismanager import WhoisManager + + await module_test.mock_dns( + {"bad.dns": {"CNAME": ["baddns.azurewebsites.net."]}, "_NXDOMAIN": ["baddns.azurewebsites.net"]} + ) + module_test.monkeypatch.setattr(baddns_module.baddns, "select_modules", self.select_modules) + module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) + + def check(self, module_test, events): + assert any(e.data == "baddns.azurewebsites.net" for e in events), "CNAME detection failed" + assert any(e.type == "VULNERABILITY" for e in events), "Failed to emit VULNERABILITY" + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" + + +class TestBaddns_cname_signature(BaseTestBaddns): + targets = ["bad.dns:8888"] + modules_overrides = ["baddns", "speculate"] + + async def setup_after_prep(self, module_test): + from bbot.modules import baddns as baddns_module + from baddns.base import BadDNS_base + from baddns.lib.whoismanager import WhoisManager + + def set_target(self, target): + return "127.0.0.1:8888" + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "

Oops! We couldn’t find that page.

", "status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + await module_test.mock_dns( + {"bad.dns": {"CNAME": ["baddns.bigcartel.com."]}, "baddns.bigcartel.com": {"A": ["127.0.0.1"]}} + ) + module_test.monkeypatch.setattr(baddns_module.baddns, "select_modules", self.select_modules) + module_test.monkeypatch.setattr(BadDNS_base, "set_target", set_target) + module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) + + def check(self, module_test, events): + assert any(e for e in events) + assert any(e.type == "VULNERABILITY" and "bigcartel.com" in e.data["description"] for e in events), ( + "Failed to emit VULNERABILITY" + ) + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py new file mode 100644 index 0000000000..b2b49717c8 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py @@ -0,0 +1,62 @@ +from .base import ModuleTestBase +from bbot.modules.base import BaseModule + + +class BaseTestBaddns(ModuleTestBase): + modules_overrides = ["baddns_direct"] + targets = ["bad.dns"] + config_overrides = {"dns": {"minimal": False}, "cloudcheck": True} + + +class TestBaddns_direct_cloudflare(BaseTestBaddns): + targets = ["bad.dns:8888"] + modules_overrides = ["baddns_direct"] + + async def dispatchWHOIS(self): + return None + + class DummyModule(BaseModule): + watched_events = ["DNS_NAME"] + _name = "dummy_module" + events_seen = [] + + async def handle_event(self, event): + if event.data == "bad.dns": + await self.helpers.sleep(0.5) + self.events_seen.append(event.data) + url = "http://bad.dns:8888/" + url_event = self.scan.make_event( + url, "URL", parent=self.scan.root_event, tags=["cdn-cloudflare", "in-scope", "status-401"] + ) + if url_event is not None: + await self.emit_event(url_event) + + async def setup_after_prep(self, module_test): + from baddns.base import BadDNS_base + from baddns.lib.whoismanager import WhoisManager + + def set_target(self, target): + return "127.0.0.1:8888" + + self.module_test = module_test + + self.dummy_module = self.DummyModule(module_test.scan) + module_test.scan.modules["dummy_module"] = self.dummy_module + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "The specified bucket does not exist", "status": 401} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + await module_test.mock_dns({"bad.dns": {"A": ["127.0.0.1"]}}) + + module_test.monkeypatch.setattr(BadDNS_base, "set_target", set_target) + module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and "Possible [AWS Bucket Takeover Detection] via direct BadDNS analysis. Indicator: [[Words: The specified bucket does not exist | Condition: and | Part: body] Matchers-Condition: and] Trigger: [self] baddns Module: [CNAME]" + in e.data["description"] + for e in events + ), "Failed to emit FINDING" + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py b/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py new file mode 100644 index 0000000000..d8138a3f7c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py @@ -0,0 +1,62 @@ +import dns +from .base import ModuleTestBase + + +class BaseTestBaddns_zone(ModuleTestBase): + modules_overrides = ["baddns_zone"] + targets = ["bad.dns"] + config_overrides = {"dns": {"minimal": False}} + + async def dispatchWHOIS(x): + return None + + +class TestBaddns_zone_zonetransfer(BaseTestBaddns_zone): + async def setup_after_prep(self, module_test): + from baddns.lib.whoismanager import WhoisManager + + def from_xfr(*args, **kwargs): + zone_text = """ +@ 600 IN SOA ns.bad.dns. admin.bad.dns. ( + 1 ; Serial + 3600 ; Refresh + 900 ; Retry + 604800 ; Expire + 86400 ) ; Minimum TTL +@ 600 IN NS ns.bad.dns. +@ 600 IN A 127.0.0.1 +asdf 600 IN A 127.0.0.1 +zzzz 600 IN AAAA dead::beef +""" + zone = dns.zone.from_text(zone_text, origin="bad.dns.") + return zone + + await module_test.mock_dns({"bad.dns": {"NS": ["ns1.bad.dns."]}, "ns1.bad.dns": {"A": ["127.0.0.1"]}}) + module_test.monkeypatch.setattr("dns.zone.from_xfr", from_xfr) + module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) + + def check(self, module_test, events): + assert any(e.data == "zzzz.bad.dns" for e in events), "Zone transfer failed (1)" + assert any(e.data == "asdf.bad.dns" for e in events), "Zone transfer failed (2)" + assert any(e.type == "VULNERABILITY" for e in events), "Failed to emit VULNERABILITY" + assert any("baddns-zonetransfer" in e.tags for e in events), "Failed to add baddns tag" + + +class TestBaddns_zone_nsec(BaseTestBaddns_zone): + async def setup_after_prep(self, module_test): + from baddns.lib.whoismanager import WhoisManager + + await module_test.mock_dns( + { + "bad.dns": {"A": ["127.0.0.5"], "NSEC": ["asdf.bad.dns"]}, + "asdf.bad.dns": {"NSEC": ["zzzz.bad.dns"]}, + "zzzz.bad.dns": {"NSEC": ["xyz.bad.dns"]}, + } + ) + module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) + + def check(self, module_test, events): + assert any(e.data == "zzzz.bad.dns" for e in events), "NSEC Walk Failed (1)" + assert any(e.data == "xyz.bad.dns" for e in events), "NSEC Walk Failed (2)" + assert any(e.type == "VULNERABILITY" for e in events), "Failed to emit VULNERABILITY" + assert any("baddns-nsec" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_badsecrets.py b/bbot/test/test_step_2/module_tests/test_module_badsecrets.py new file mode 100644 index 0000000000..9eda654eb6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_badsecrets.py @@ -0,0 +1,164 @@ +from .base import ModuleTestBase, tempwordlist + + +class TestBadSecrets(ModuleTestBase): + targets = [ + "http://127.0.0.1:8888/", + "http://127.0.0.1:8888/test.aspx", + "http://127.0.0.1:8888/cookie.aspx", + "http://127.0.0.1:8888/cookie2.aspx", + "http://127.0.0.1:8888/cookie3.aspx", + ] + + sample_viewstate = """ +
+
+ +
+ +
+ + + +
+
+ + +""" + + sample_jsf_notvuln = """ +

+""" + + modules_overrides = ["badsecrets", "httpx"] + + async def setup_after_prep(self, module_test): + expect_args = {"uri": "/test.aspx"} + respond_args = {"response_data": self.sample_viewstate} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.sample_jsf_notvuln} + module_test.set_expect_requests(respond_args=respond_args) + + expect_args = {"uri": "/cookie.aspx"} + respond_args = { + "response_data": "

JWT Cookie Test

", + "headers": { + "set-cookie": "vulnjwt=eyJhbGciOiJIUzI1NiJ9.eyJJc3N1ZXIiOiJJc3N1ZXIiLCJVc2VybmFtZSI6IkJhZFNlY3JldHMiLCJleHAiOjE1OTMxMzM0ODMsImlhdCI6MTQ2NjkwMzA4M30.ovqRikAo_0kKJ0GVrAwQlezymxrLGjcEiW_s3UJMMCo; secure" + }, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"uri": "/cookie2.aspx"} + respond_args = { + "response_data": "

Express Cookie Test (ES)

", + "headers": { + "set-cookie": "connect.sid=s%3A8FnPwdeM9kdGTZlWvdaVtQ0S1BCOhY5G.qys7H2oGSLLdRsEq7sqh7btOohHsaRKqyjV4LiVnBvc; Path=/; Expires=Wed, 05 Apr 2023 04:47:29 GMT; HttpOnly" + }, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"uri": "/cookie3.aspx"} + respond_args = { + "response_data": "

Express Cookie Test (CS)

", + "headers": { + "set-cookie": [ + "foo=eyJ1c2VybmFtZSI6IkJib3RJc0xpZmUifQ==; path=/; HttpOnly", + "foo.sig=zOQU7v7aTe_3zu7tnVuHi1MJ2DU; path=/; HttpOnly", + ], + }, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + SecretFound = False + IdentifyOnly = False + CookieBasedDetection = False + CookieBasedDetection_2 = False + CookieBasedDetection_3 = False + + for e in events: + if ( + e.type == "VULNERABILITY" + and "Known Secret Found." in e.data["description"] + and "validationKey: 0F97BAE23F6F36801ABDB5F145124E00A6F795A97093D778EE5CD24F35B78B6FC4C0D0D4420657689C4F321F8596B59E83F02E296E970C4DEAD2DFE226294979 validationAlgo: SHA1 encryptionKey: 8CCFBC5B7589DD37DC3B4A885376D7480A69645DAEEC74F418B4877BEC008156 encryptionAlgo: AES" + in e.data["description"] + ): + SecretFound = True + + if ( + e.type == "FINDING" + and "AHo0wmLu5ceItIi+I7XkEi1GAb4h12WZ894pA+Z4OH7bco2jXEy1RSCWwjtJcZNbWPcvPqL5zzfl03DoeMZfGGX7a9PSv+fUT8MAeKNouAGj1dZuO8srXt8xZIGg+wPCWWCzcX6IhWOtgWUwiXeSojCDTKXklsYt+kAAAAk5wOsXvb2lTJoO0Q==" + in e.data["description"] + ): + IdentifyOnly = True + + if ( + e.type == "VULNERABILITY" + and "1234" in e.data["description"] + and "eyJhbGciOiJIUzI1NiJ9.eyJJc3N1ZXIiOiJJc3N1ZXIiLCJVc2VybmFtZSI6IkJhZFNlY3JldHMiLCJleHAiOjE1OTMxMzM0ODMsImlhdCI6MTQ2NjkwMzA4M30.ovqRikAo_0kKJ0GVrAwQlezymxrLGjcEiW_s3UJMMCo" + in e.data["description"] + ): + CookieBasedDetection = True + + if ( + e.type == "VULNERABILITY" + and "keyboard cat" in e.data["description"] + and "s%3A8FnPwdeM9kdGTZlWvdaVtQ0S1BCOhY5G.qys7H2oGSLLdRsEq7sqh7btOohHsaRKqyjV4LiVnBvc" + in e.data["description"] + ): + CookieBasedDetection_2 = True + + if ( + e.type == "VULNERABILITY" + and "Express.js Secret (cookie-session)" in e.data["description"] + and "zOQU7v7aTe_3zu7tnVuHi1MJ2DU" in e.data["description"] + ): + CookieBasedDetection_3 = True + + assert SecretFound, "No secret found" + assert IdentifyOnly, "No crypto product identified" + assert CookieBasedDetection, "No JWT cookie vuln detected" + assert CookieBasedDetection_2, "No Express.js cookie vuln detected" + assert CookieBasedDetection_3, "No Express.js (cs dual cookies) vuln detected" + + +class TestBadSecrets_customsecrets(TestBadSecrets): + config_overrides = { + "modules": { + "badsecrets": { + "custom_secrets": tempwordlist( + [ + "DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF,DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF" + ] + ) + } + } + } + + sample_viewstate = """ +
+
+ +
+ +
+ + + +
+
+ + +""" + + def check(self, module_test, events): + SecretFound = False + for e in events: + if ( + e.type == "VULNERABILITY" + and "Known Secret Found." in e.data["description"] + and "DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF" in e.data["description"] + ): + SecretFound = True + assert SecretFound, "No secret found" diff --git a/bbot/test/test_step_2/module_tests/test_module_bevigil.py b/bbot/test/test_step_2/module_tests/test_module_bevigil.py new file mode 100644 index 0000000000..7e616752fa --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bevigil.py @@ -0,0 +1,51 @@ +import random + +from .base import ModuleTestBase + + +class TestBeVigil(ModuleTestBase): + module_name = "bevigil" + config_overrides = {"modules": {"bevigil": {"api_key": "asdf", "urls": True}}} + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + match_headers={"X-Access-Token": "asdf"}, + json={ + "domain": "blacklanternsecurity.com", + "subdomains": [ + "asdf.blacklanternsecurity.com", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "https://asdf.blacklanternsecurity.com/" for e in events), "Failed to detect url" + + +class TestBeVigilMultiKey(TestBeVigil): + api_keys = ["1234", "4321", "asdf", "fdsa"] + random.shuffle(api_keys) + config_overrides = {"modules": {"bevigil": {"api_key": api_keys, "urls": True}}} + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + match_headers={"X-Access-Token": "fdsa"}, + json={ + "domain": "blacklanternsecurity.com", + "subdomains": [ + "asdf.blacklanternsecurity.com", + ], + }, + ) + module_test.httpx_mock.add_response( + match_headers={"X-Access-Token": "asdf"}, + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_binaryedge.py b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py new file mode 100644 index 0000000000..348e2efb24 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py @@ -0,0 +1,33 @@ +from .base import ModuleTestBase + + +class TestBinaryEdge(ModuleTestBase): + config_overrides = {"modules": {"binaryedge": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.binaryedge.io/v2/query/domains/subdomain/blacklanternsecurity.com", + match_headers={"X-Key": "asdf"}, + json={ + "query": "blacklanternsecurity.com", + "page": 1, + "pagesize": 100, + "total": 1, + "events": [ + "asdf.blacklanternsecurity.com", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.binaryedge.io/v2/user/subscription", + match_headers={"X-Key": "asdf"}, + json={ + "subscription": {"name": "Free"}, + "end_date": "2023-06-17", + "requests_left": 249, + "requests_plan": 250, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py new file mode 100644 index 0000000000..5015d863b6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py @@ -0,0 +1,97 @@ +import re + +from .base import ModuleTestBase +from bbot.core.helpers.misc import rand_string + +__all__ = ["random_bucket_name_1", "random_bucket_name_2", "random_bucket_name_3", "Bucket_Amazon_Base"] + +# first one is a normal bucket +random_bucket_name_1 = rand_string(15, digits=False) +# second one is open/vulnerable +random_bucket_name_2 = rand_string(15, digits=False) +# third one is a mutation +random_bucket_name_3 = f"{random_bucket_name_2}-dev" + + +class Bucket_Amazon_Base(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + provider = "amazon" + + random_bucket_1 = f"{random_bucket_name_1}.s3.amazonaws.com" + random_bucket_2 = f"{random_bucket_name_2}.s3-ap-southeast-2.amazonaws.com" + random_bucket_3 = f"{random_bucket_name_3}.s3.amazonaws.com" + + open_bucket_body = """ + vpn-static1000falsestyle.css2017-03-18T06:41:59.000Z"bf9e72bdab09b785f05ff0395023cc35"429STANDARD""" + + @property + def config_overrides(self): + return {"modules": {self.module_name: {"permutations": True}}} + + @property + def module_name(self): + return self.__class__.__name__.lower().split("test")[-1] + + @property + def modules_overrides(self): + return ["excavate", "speculate", "httpx", self.module_name, "cloudcheck"] + + def url_setup(self): + self.url_1 = f"https://{self.random_bucket_1}/" + self.url_2 = f"https://{self.random_bucket_2}/" + self.url_3 = f"https://{self.random_bucket_3}/" + + def bucket_setup(self): + self.url_setup() + self.website_body = f""" +
+ + """ + + async def setup_after_prep(self, module_test): + self.bucket_setup() + # patch mutations + module_test.scan.helpers.word_cloud.mutations = lambda b, cloud=False: [ + (b, "dev"), + ] + module_test.set_expect_requests( + expect_args={"method": "GET", "uri": "/"}, respond_args={"response_data": self.website_body} + ) + if module_test.module.supports_open_check: + module_test.httpx_mock.add_response( + url=self.url_2, + text=self.open_bucket_body, + ) + module_test.httpx_mock.add_response( + url=self.url_3, + text="", + ) + module_test.httpx_mock.add_response(url=re.compile(".*"), text="", status_code=404) + + def check(self, module_test, events): + # make sure buckets were excavated + assert any(e.type == "STORAGE_BUCKET" and str(e.module) == f"cloud_{self.provider}" for e in events), ( + f'bucket not found for module "{self.module_name}"' + ) + # make sure open buckets were found + if module_test.module.supports_open_check: + assert any(e.type == "FINDING" and str(e.module) == self.module_name for e in events), ( + f'open bucket not found for module "{self.module_name}"' + ) + for e in events: + if e.type == "FINDING" and str(e.module) == self.module_name: + url = e.data.get("url", "") + assert self.random_bucket_2 in url + assert self.random_bucket_1 not in url + assert self.random_bucket_3 not in url + # make sure bucket mutations were found + assert any( + e.type == "STORAGE_BUCKET" + and str(e.module) == self.module_name + and f"{random_bucket_name_3}" in e.data["url"] + for e in events + ), f'bucket (dev mutation: {self.random_bucket_3}) not found for module "{self.module_name}"' + + +class TestBucket_Amazon(Bucket_Amazon_Base): + pass diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py new file mode 100644 index 0000000000..3b172eaaba --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py @@ -0,0 +1,56 @@ +from .test_module_bucket_amazon import * +from .base import ModuleTestBase + + +class TestBucket_Azure(Bucket_Amazon_Base): + provider = "azure" + random_bucket_1 = f"{random_bucket_name_1}.blob.core.windows.net" + random_bucket_2 = f"{random_bucket_name_2}.blob.core.windows.net" + random_bucket_3 = f"{random_bucket_name_3}.blob.core.windows.net" + + def url_setup(self): + self.url_1 = f"https://{self.random_bucket_1}" + self.url_2 = f"https://{self.random_bucket_2}" + self.url_3 = f"https://{self.random_bucket_3}/{random_bucket_name_3}?restype=container" + + +class TestBucket_Azure_NoDup(ModuleTestBase): + targets = ["tesla.com"] + module_name = "bucket_azure" + config_overrides = {"cloudcheck": True} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://tesla.blob.core.windows.net/tesla?restype=container", + text="", + ) + await module_test.mock_dns( + { + "tesla.com": {"A": ["1.2.3.4"]}, + "tesla.blob.core.windows.net": {"A": ["1.2.3.4"]}, + } + ) + + def check(self, module_test, events): + assert 1 == len([e for e in events if e.type == "STORAGE_BUCKET"]) + bucket_event = [e for e in events if e.type == "STORAGE_BUCKET"][0] + assert bucket_event.data["name"] == "tesla" + assert bucket_event.data["url"] == "https://tesla.blob.core.windows.net/" + assert ( + bucket_event.discovery_context + == f"bucket_azure tried bucket variations of {event.data} and found {{event.type}} at {url}" + ) + + +class TestBucket_Azure_NoDup(TestBucket_Azure_NoDup): + """ + This tests _suppress_chain_dupes functionality to make sure it works as expected + """ + + async def setup_after_prep(self, module_test): + from bbot.core.event.base import STORAGE_BUCKET + + module_test.monkeypatch.setattr(STORAGE_BUCKET, "_suppress_chain_dupes", False) + + def check(self, module_test, events): + assert 2 == len([e for e in events if e.type == "STORAGE_BUCKET"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_digitalocean.py b/bbot/test/test_step_2/module_tests/test_module_bucket_digitalocean.py new file mode 100644 index 0000000000..4d6e1ca490 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_digitalocean.py @@ -0,0 +1,10 @@ +from .test_module_bucket_amazon import * + + +class TestBucket_DigitalOcean(Bucket_Amazon_Base): + provider = "digitalocean" + random_bucket_1 = f"{random_bucket_name_1}.fra1.digitaloceanspaces.com" + random_bucket_2 = f"{random_bucket_name_2}.fra1.digitaloceanspaces.com" + random_bucket_3 = f"{random_bucket_name_3}.fra1.digitaloceanspaces.com" + + open_bucket_body = """cloud011000falsetest.doc2020-10-14T15:23:37.545Z"4d25c8699f7347acc9f41e57148c62c0"13362425STANDARD19578831957883Normal""" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_file_enum.py b/bbot/test/test_step_2/module_tests/test_module_bucket_file_enum.py new file mode 100644 index 0000000000..d28d44aad8 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_file_enum.py @@ -0,0 +1,40 @@ +from .base import ModuleTestBase + + +class TestBucket_File_Enum(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["bucket_file_enum", "filedownload", "httpx", "excavate", "cloudcheck"] + config_overrides = {"scope": {"report_distance": 5}} + + open_bucket_url = "https://testbucket.s3.amazonaws.com/" + open_bucket_body = """testbucket1000falseindex.html2023-05-22T23:04:38.000Z"4a2d2d114f3abf90f8bd127c1f25095a"5STANDARDtest.pdf2022-04-30T21:13:40.000Z"723b0018c2f5a7ef06a34f84f6fa97e4"388901STANDARD""" + + pdf_data = """%PDF-1. +1 0 obj<>endobj +2 0 obj<>endobj +3 0 obj<>endobj +trailer <>""" + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(f'') + module_test.httpx_mock.add_response( + url=self.open_bucket_url, + text=self.open_bucket_body, + ) + module_test.httpx_mock.add_response( + url=f"{self.open_bucket_url}test.pdf", + text=self.pdf_data, + headers={"Content-Type": "application/pdf"}, + ) + module_test.httpx_mock.add_response( + url=f"{self.open_bucket_url}test.css", + text="", + ) + + def check(self, module_test, events): + download_dir = module_test.scan.home / "filedownload" + files = list(download_dir.glob("*.pdf")) + assert any(e.type == "URL_UNVERIFIED" and e.data.endswith("test.pdf") for e in events) + assert not any(e.type == "URL_UNVERIFIED" and e.data.endswith("test.css") for e in events) + assert any(f.name.endswith("test.pdf") for f in files), "Failed to download PDF file from open bucket" + assert not any(f.name.endswith("test.css") for f in files), "Unwanted CSS file was downloaded" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_firebase.py b/bbot/test/test_step_2/module_tests/test_module_bucket_firebase.py new file mode 100644 index 0000000000..c63655f73c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_firebase.py @@ -0,0 +1,13 @@ +from .test_module_bucket_amazon import * + + +class TestBucket_Firebase(Bucket_Amazon_Base): + provider = "google" + random_bucket_1 = f"{random_bucket_name_1}.firebaseio.com" + random_bucket_2 = f"{random_bucket_name_2}.firebaseio.com" + random_bucket_3 = f"{random_bucket_name_3}.firebaseio.com" + + def url_setup(self): + self.url_1 = f"https://{self.random_bucket_1}" + self.url_2 = f"https://{self.random_bucket_2}/.json" + self.url_3 = f"https://{self.random_bucket_3}/.json" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_google.py b/bbot/test/test_step_2/module_tests/test_module_bucket_google.py new file mode 100644 index 0000000000..ee1f32ab0d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_google.py @@ -0,0 +1,27 @@ +from .test_module_bucket_amazon import * + + +class TestBucket_Google(Bucket_Amazon_Base): + provider = "google" + random_bucket_1 = f"{random_bucket_name_1}.storage.googleapis.com" + random_bucket_2 = f"{random_bucket_name_2}.storage.googleapis.com" + random_bucket_3 = f"{random_bucket_name_3}.storage.googleapis.com" + open_bucket_body = """{ + "kind": "storage#testIamPermissionsResponse", + "permissions": [ + "storage.objects.create", + "storage.objects.list" + ] +}""" + + def bucket_setup(self): + self.url_setup() + self.website_body = f""" + + + """ + + def url_setup(self): + self.url_1 = f"{random_bucket_name_1}.storage.googleapis.com" + self.url_2 = f"https://www.googleapis.com/storage/v1/b/{random_bucket_name_2}/iam/testPermissions?&permissions=storage.buckets.get&permissions=storage.buckets.list&permissions=storage.buckets.create&permissions=storage.buckets.delete&permissions=storage.buckets.setIamPolicy&permissions=storage.objects.get&permissions=storage.objects.list&permissions=storage.objects.create&permissions=storage.objects.delete&permissions=storage.objects.setIamPolicy" + self.url_3 = f"https://www.googleapis.com/storage/v1/b/{random_bucket_name_3}" diff --git a/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py b/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py new file mode 100644 index 0000000000..e77127bc38 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py @@ -0,0 +1,35 @@ +from .base import ModuleTestBase + + +class TestBufferOverrun(ModuleTestBase): + config_overrides = {"modules": {"bufferoverrun": {"api_key": "asdf", "commercial": False}}} + + async def setup_before_prep(self, module_test): + # Mock response for non-commercial API + module_test.httpx_mock.add_response( + url="https://tls.bufferover.run/dns?q=.blacklanternsecurity.com", + match_headers={"x-api-key": "asdf"}, + json={"Results": ["1.2.3.4,example.com,*,*,sub.blacklanternsecurity.com"]}, + ) + + def check(self, module_test, events): + assert any(e.data == "sub.blacklanternsecurity.com" for e in events), "Failed to detect subdomain for free API" + + +class TestBufferOverrunCommercial(ModuleTestBase): + modules_overrides = ["bufferoverrun"] + module_name = "bufferoverrun" + config_overrides = {"modules": {"bufferoverrun": {"api_key": "asdf", "commercial": True}}} + + async def setup_before_prep(self, module_test): + # Mock response for commercial API + module_test.httpx_mock.add_response( + url="https://bufferover-run-tls.p.rapidapi.com/ipv4/dns?q=.blacklanternsecurity.com", + match_headers={"x-rapidapi-host": "bufferover-run-tls.p.rapidapi.com", "x-rapidapi-key": "asdf"}, + json={"Results": ["5.6.7.8,blacklanternsecurity.com,*,*,sub.blacklanternsecurity.com"]}, + ) + + def check(self, module_test, events): + assert any(e.data == "sub.blacklanternsecurity.com" for e in events), ( + "Failed to detect subdomain for commercial API" + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_builtwith.py b/bbot/test/test_step_2/module_tests/test_module_builtwith.py new file mode 100644 index 0000000000..d11c8940d2 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_builtwith.py @@ -0,0 +1,110 @@ +from .base import ModuleTestBase + + +class TestBuiltWith(ModuleTestBase): + config_overrides = {"modules": {"builtwith": {"api_key": "asdf"}}} + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.builtwith.com/v20/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes", + json={ + "Results": [ + { + "Result": { + "IsDB": "True", + "Spend": 734, + "Paths": [ + { + "Technologies": [ + { + "Name": "nginx", + "Tag": "Web Server", + "FirstDetected": 1533510000000, + "LastDetected": 1559516400000, + "IsPremium": "no", + }, + { + "Parent": "nginx", + "Name": "Nginx 1.14", + "Tag": "Web Server", + "FirstDetected": 1555542000000, + "LastDetected": 1559516400000, + "IsPremium": "no", + }, + { + "Name": "Domain Not Resolving", + "Tag": "hosting", + "FirstDetected": 1613894400000, + "LastDetected": 1633244400000, + "IsPremium": "no", + }, + ], + "FirstIndexed": 1533510000000, + "LastIndexed": 1633244400000, + "Domain": "blacklanternsecurity.com", + "Url": "", + "SubDomain": "asdf", + } + ], + }, + "Meta": { + "Majestic": 0, + "Umbrella": 0, + "Vertical": "", + "Social": None, + "CompanyName": None, + "Telephones": None, + "Emails": [], + "City": None, + "State": None, + "Postcode": None, + "Country": "US", + "Names": None, + "ARank": 6249242, + "QRank": -1, + }, + "Attributes": { + "Employees": 0, + "MJRank": 0, + "MJTLDRank": 0, + "RefSN": 0, + "RefIP": 0, + "Followers": 0, + "Sitemap": 0, + "GTMTags": 0, + "QubitTags": 0, + "TealiumTags": 0, + "AdobeTags": 0, + "CDimensions": 0, + "CGoals": 0, + "CMetrics": 0, + "ProductCount": 0, + }, + "FirstIndexed": 1389481200000, + "LastIndexed": 1684220400000, + "Lookup": "blacklanternsecurity.com", + "SalesRevenue": 0, + } + ], + "Errors": [], + "Trust": None, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.builtwith.com/redirect1/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com", + json={ + "Lookup": "blacklanternsecurity.com", + "Inbound": [ + { + "Domain": "blacklanternsecurity.github.io", + "FirstDetected": 1564354800000, + "LastDetected": 1683783431121, + } + ], + "Outbound": None, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "blacklanternsecurity.github.io" for e in events), "Failed to detect redirect" diff --git a/bbot/test/test_step_2/module_tests/test_module_bypass403.py b/bbot/test/test_step_2/module_tests/test_module_bypass403.py new file mode 100644 index 0000000000..57c1a8bedf --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bypass403.py @@ -0,0 +1,101 @@ +import re +from .base import ModuleTestBase + + +class TestBypass403(ModuleTestBase): + targets = ["http://127.0.0.1:8888/test"] + modules_overrides = ["bypass403", "httpx"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/test..;/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + module_test.httpserver.no_handler_status_code = 403 + + def check(self, module_test, events): + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 1 + finding = findings[0] + assert "http://127.0.0.1:8888/test..;/" in finding.data["description"] + + +class TestBypass403_collapsethreshold(ModuleTestBase): + targets = ["http://127.0.0.1:8888/test"] + modules_overrides = ["bypass403", "httpx"] + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": "alive"} + + # some of these won't work outside of the module because of the complex logic. This doesn't matter, we just need to get more alerts than the threshold. + + query_payloads = [ + "%09", + "%20", + "%23", + "%2e", + "%2f", + ".", + "?", + ";", + "..;", + ";%09", + ";%09..", + ";%09..;", + ";%2f..", + "*", + "/*", + "..;/", + ";/", + "/..;/", + "/;/", + "/./", + "//", + "/.", + "/?anything", + ".php", + ".json", + ".html", + ] + + for qp in query_payloads: + expect_args = {"method": "GET", "uri": f"/test{qp}"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + module_test.httpserver.no_handler_status_code = 403 + + def check(self, module_test, events): + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 1 + finding = findings[0] + assert "403 Bypass MULTIPLE SIGNATURES (exceeded threshold" in finding.data["description"] + + +class TestBypass403_aspnetcookieless(ModuleTestBase): + targets = ["http://127.0.0.1:8888/admin.aspx"] + modules_overrides = ["bypass403", "httpx"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": re.compile(r"\/\([sS]\(\w+\)\)\/.+\.aspx")} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + module_test.httpserver.no_handler_status_code = 403 + + def check(self, module_test, events): + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 2 + assert all("(S(X))/admin.aspx" in e.data["description"] for e in findings) + + +class TestBypass403_waf(ModuleTestBase): + targets = ["http://127.0.0.1:8888/test"] + modules_overrides = ["bypass403", "httpx"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/test..;/"} + respond_args = {"response_data": "The requested URL was rejected"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + module_test.httpserver.no_handler_status_code = 403 + + def check(self, module_test, events): + findings = [e for e in events if e.type == "FINDING"] + assert not any(findings) diff --git a/bbot/test/test_step_2/module_tests/test_module_c99.py b/bbot/test/test_step_2/module_tests/test_module_c99.py new file mode 100644 index 0000000000..ce9c7c8878 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_c99.py @@ -0,0 +1,151 @@ +import httpx + +from .base import ModuleTestBase + + +class TestC99(ModuleTestBase): + module_name = "c99" + config_overrides = {"modules": {"c99": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.c99.nl/randomnumber?key=asdf&between=1,100&json", + json={"success": True, "output": 65}, + ) + module_test.httpx_mock.add_response( + url="https://api.c99.nl/subdomainfinder?key=asdf&domain=blacklanternsecurity.com&json", + json={ + "success": True, + "subdomains": [ + {"subdomain": "asdf.blacklanternsecurity.com", "ip": "1.2.3.4", "cloudflare": True}, + ], + "cached": True, + "cache_time": "2023-05-19 03:13:05", + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + + +class TestC99AbortThreshold1(TestC99): + config_overrides = {"modules": {"c99": {"api_key": ["6789", "fdsa", "1234", "4321"]}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.c99.nl/randomnumber?key=fdsa&between=1,100&json", + json={"success": True, "output": 65}, + ) + + self.url_count = {} + + async def custom_callback(request): + url = str(request.url) + try: + self.url_count[url] += 1 + except KeyError: + self.url_count[url] = 1 + raise httpx.TimeoutException("timeout") + + module_test.httpx_mock.add_callback(custom_callback) + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 13 + assert module_test.module.errored is False + # assert module_test.module._api_request_failures == 4 + assert module_test.module.api_retries == 4 + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com"} + assert self.url_count == { + "https://api.c99.nl/randomnumber?key=6789&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=4321&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=1234&between=1,100&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=blacklanternsecurity.com&json": 1, + } + + +class TestC99AbortThreshold2(TestC99AbortThreshold1): + targets = ["blacklanternsecurity.com", "evilcorp.com"] + + async def setup_before_prep(self, module_test): + await super().setup_before_prep(module_test) + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["127.0.0.88"]}, + "evilcorp.com": {"A": ["127.0.0.11"]}, + "evilcorp.net": {"A": ["127.0.0.22"]}, + "evilcorp.co.uk": {"A": ["127.0.0.33"]}, + } + ) + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 13 + assert module_test.module.errored is False + assert module_test.module._api_request_failures == 8 + assert module_test.module.api_retries == 4 + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com", "evilcorp.com"} + assert self.url_count == { + "https://api.c99.nl/randomnumber?key=6789&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=4321&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=1234&between=1,100&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=evilcorp.com&json": 1, + } + + +class TestC99AbortThreshold3(TestC99AbortThreshold2): + targets = ["blacklanternsecurity.com", "evilcorp.com", "evilcorp.net"] + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 13 + assert module_test.module.errored is False + assert module_test.module._api_request_failures == 12 + assert module_test.module.api_retries == 4 + assert {e.data for e in events if e.type == "DNS_NAME"} == { + "blacklanternsecurity.com", + "evilcorp.com", + "evilcorp.net", + } + assert self.url_count == { + "https://api.c99.nl/randomnumber?key=6789&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=4321&between=1,100&json": 1, + "https://api.c99.nl/randomnumber?key=1234&between=1,100&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=blacklanternsecurity.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=evilcorp.com&json": 1, + "https://api.c99.nl/subdomainfinder?key=fdsa&domain=evilcorp.net&json": 1, + "https://api.c99.nl/subdomainfinder?key=6789&domain=evilcorp.net&json": 1, + "https://api.c99.nl/subdomainfinder?key=4321&domain=evilcorp.net&json": 1, + "https://api.c99.nl/subdomainfinder?key=1234&domain=evilcorp.net&json": 1, + } + + +class TestC99AbortThreshold4(TestC99AbortThreshold3): + targets = ["blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", "evilcorp.co.uk"] + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 13 + assert module_test.module.errored is True + assert module_test.module._api_request_failures == 13 + assert module_test.module.api_retries == 4 + assert {e.data for e in events if e.type == "DNS_NAME"} == { + "blacklanternsecurity.com", + "evilcorp.com", + "evilcorp.net", + "evilcorp.co.uk", + } + assert len(self.url_count) == 16 + assert all(v == 1 for v in self.url_count.values()) diff --git a/bbot/test/test_step_2/module_tests/test_module_censys.py b/bbot/test/test_step_2/module_tests/test_module_censys.py new file mode 100644 index 0000000000..14e72921e1 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_censys.py @@ -0,0 +1,81 @@ +from .base import ModuleTestBase + + +class TestCensys(ModuleTestBase): + config_overrides = {"modules": {"censys": {"api_key": "api_id:api_secret"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://search.censys.io/api/v1/account", + match_headers={"Authorization": "Basic YXBpX2lkOmFwaV9zZWNyZXQ="}, + json={ + "email": "info@blacklanternsecurity.com", + "login": "nope", + "first_login": "1917-08-03 20:03:55", + "last_login": "1918-05-19 01:15:22", + "quota": {"used": 26, "allowance": 250, "resets_at": "1919-06-03 16:30:32"}, + }, + ) + module_test.httpx_mock.add_response( + url="https://search.censys.io/api/v2/certificates/search", + match_headers={"Authorization": "Basic YXBpX2lkOmFwaV9zZWNyZXQ="}, + match_content=b'{"q": "names: blacklanternsecurity.com", "per_page": 100}', + json={ + "code": 200, + "status": "OK", + "result": { + "query": "names: blacklanternsecurity.com", + "total": 196, + "duration_ms": 1046, + "hits": [ + { + "parsed": { + "validity_period": { + "not_before": "2021-11-18T00:09:46Z", + "not_after": "2022-11-18T00:09:46Z", + }, + "issuer_dn": "C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com\\, Inc., OU=http://certs.godaddy.com/repository/, CN=Go Daddy Secure Certificate Authority - G2", + "subject_dn": "CN=asdf.blacklanternsecurity.com", + }, + "fingerprint_sha256": "590ad51b8db62925f0fd3f300264c6a36692e20ceec2b5a22e7e4b41c1575cdc", + "names": ["asdf.blacklanternsecurity.com", "asdf2.blacklanternsecurity.com"], + }, + ], + "links": {"next": "NextToken", "prev": ""}, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://search.censys.io/api/v2/certificates/search", + match_headers={"Authorization": "Basic YXBpX2lkOmFwaV9zZWNyZXQ="}, + match_content=b'{"q": "names: blacklanternsecurity.com", "per_page": 100, "cursor": "NextToken"}', + json={ + "code": 200, + "status": "OK", + "result": { + "query": "names: blacklanternsecurity.com", + "total": 196, + "duration_ms": 1046, + "hits": [ + { + "parsed": { + "validity_period": { + "not_before": "2021-11-18T00:09:46Z", + "not_after": "2022-11-18T00:09:46Z", + }, + "issuer_dn": "C=US, ST=Arizona, L=Scottsdale, O=GoDaddy.com\\, Inc., OU=http://certs.godaddy.com/repository/, CN=Go Daddy Secure Certificate Authority - G2", + "subject_dn": "CN=zzzz.blacklanternsecurity.com", + }, + "fingerprint_sha256": "590ad51b8db62925f0fd3f300264c6a36692e20ceec2b5a22e7e4b41c1575cdc", + "names": ["zzzz.blacklanternsecurity.com"], + }, + ], + "links": {"next": "", "prev": ""}, + }, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect asdf subdomain" + assert any(e.data == "asdf2.blacklanternsecurity.com" for e in events), "Failed to detect asdf2 subdomain" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect zzzz subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_certspotter.py b/bbot/test/test_step_2/module_tests/test_module_certspotter.py new file mode 100644 index 0000000000..a9ab7eb36b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_certspotter.py @@ -0,0 +1,14 @@ +from .base import ModuleTestBase + + +class TestCertspotter(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.module.abort_if = lambda e: False + for t in self.targets: + module_test.httpx_mock.add_response( + url="https://api.certspotter.com/v1/issuances?domain=blacklanternsecurity.com&include_subdomains=true&expand=dns_names", + json=[{"dns_names": ["*.asdf.blacklanternsecurity.com"]}], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_chaos.py b/bbot/test/test_step_2/module_tests/test_module_chaos.py new file mode 100644 index 0000000000..193bded584 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_chaos.py @@ -0,0 +1,25 @@ +from .base import ModuleTestBase + + +class TestChaos(ModuleTestBase): + config_overrides = {"modules": {"chaos": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://dns.projectdiscovery.io/dns/example.com", + match_headers={"Authorization": "asdf"}, + json={"domain": "example.com", "subdomains": 65}, + ) + module_test.httpx_mock.add_response( + url="https://dns.projectdiscovery.io/dns/blacklanternsecurity.com/subdomains", + match_headers={"Authorization": "asdf"}, + json={ + "domain": "blacklanternsecurity.com", + "subdomains": [ + "*.asdf.cloud", + ], + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.cloud.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py new file mode 100644 index 0000000000..0c8f9fe620 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py @@ -0,0 +1,100 @@ +from .base import ModuleTestBase + +from bbot.scanner import Scanner + + +class TestCloudCheck(ModuleTestBase): + targets = ["http://127.0.0.1:8888", "asdf2.storage.googleapis.com"] + modules_overrides = ["httpx", "excavate", "cloudcheck"] + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests({"uri": "/"}, {"response_data": ""}) + + scan = Scanner(config={"cloudcheck": True}) + await scan._prep() + module = scan.modules["cloudcheck"] + providers = scan.helpers.cloud.providers + # make sure we have all the providers + provider_names = ( + "amazon", + "google", + "azure", + "digitalocean", + "oracle", + "akamai", + "cloudflare", + "github", + "zoho", + "fastly", + ) + for provider_name in provider_names: + assert provider_name in providers + + amazon_ranges = list(providers["amazon"].ranges) + assert amazon_ranges + amazon_range = next(iter(amazon_ranges)) + amazon_address = amazon_range.broadcast_address + + ip_event = scan.make_event(amazon_address, parent=scan.root_event) + aws_event1 = scan.make_event("amazonaws.com", parent=scan.root_event) + aws_event2 = scan.make_event("asdf.amazonaws.com", parent=scan.root_event) + aws_event3 = scan.make_event("asdfamazonaws.com", parent=scan.root_event) + aws_event4 = scan.make_event("test.asdf.aws", parent=scan.root_event) + + other_event1 = scan.make_event("cname.evilcorp.com", parent=scan.root_event) + other_event2 = scan.make_event("cname2.evilcorp.com", parent=scan.root_event) + other_event3 = scan.make_event("cname3.evilcorp.com", parent=scan.root_event) + other_event2._resolved_hosts = {amazon_address} + other_event3._resolved_hosts = {"asdf.amazonaws.com"} + + for event in (ip_event, aws_event1, aws_event2, aws_event4, other_event2, other_event3): + await module.handle_event(event) + assert "cloud-amazon" in event.tags, f"{event} was not properly cloud-tagged" + + assert "cloud-domain" in aws_event1.tags + assert "cloud-ip" in other_event2.tags + assert "cloud-cname" in other_event3.tags + + for event in (aws_event3, other_event1): + await module.handle_event(event) + assert "cloud-amazon" not in event.tags, f"{event} was improperly cloud-tagged" + assert not any(t for t in event.tags if t.startswith("cloud-") or t.startswith("cdn-")), ( + f"{event} was improperly cloud-tagged" + ) + + google_event1 = scan.make_event("asdf.googleapis.com", parent=scan.root_event) + google_event2 = scan.make_event("asdf.google", parent=scan.root_event) + google_event3 = scan.make_event("asdf.evilcorp.com", parent=scan.root_event) + google_event3._resolved_hosts = {"asdf.storage.googleapis.com"} + + for event in (google_event1, google_event2, google_event3): + await module.handle_event(event) + assert "cloud-google" in event.tags, f"{event} was not properly cloud-tagged" + assert "cloud-storage-bucket" in google_event3.tags + + await scan._cleanup() + + def check(self, module_test, events): + for e in events: + self.log.debug(e) + assert 2 == len([e for e in events if e.type == "STORAGE_BUCKET"]) + assert 1 == len( + [ + e + for e in events + if e.type == "STORAGE_BUCKET" + and e.data["name"] == "asdf" + and "cloud-amazon" in e.tags + and "cloud-storage-bucket" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "STORAGE_BUCKET" + and e.data["name"] == "asdf2" + and "cloud-google" in e.tags + and "cloud-storage-bucket" in e.tags + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_code_repository.py b/bbot/test/test_step_2/module_tests/test_module_code_repository.py new file mode 100644 index 0000000000..bfb01ef03f --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_code_repository.py @@ -0,0 +1,69 @@ +from .base import ModuleTestBase + + +class TestCodeRepository(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "excavate", "code_repository"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = { + "response_data": """ + + + + + + + + """ + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert 5 == len([e for e in events if e.type == "CODE_REPOSITORY"]) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/blacklanternsecurity/bbot" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://gitlab.com/blacklanternsecurity/bbot" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://gitlab.org/blacklanternsecurity/bbot" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "docker" in e.tags + and e.data["url"] == "https://hub.docker.com/r/blacklanternsecurity/bbot" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "postman" in e.tags + and e.data["url"] == "https://www.postman.com/blacklanternsecurity/bbot" + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py new file mode 100644 index 0000000000..a6b1e65c51 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -0,0 +1,104 @@ +from .base import ModuleTestBase + + +credshed_auth_response = { + "access_token": "big_access_token", + "login": True, +} + + +credshed_response = { + "accounts": [ + { + "e": "bob@blacklanternsecurity.com", + "h": [], + "m": "hello my name is bob", + "p": "", + "s": [121562], + "u": "", + }, + { + "e": "judy@blacklanternsecurity.com", + "h": [ + "539FE8942DEADBEEFBC49E6EB2F175AC", + "D2D8F0E9A4A2DEADBEEF1AC80F36D61F", + "$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm", + ], + "m": "hello my name is judy", + "p": "", + "s": [80437], + "u": "", + }, + { + "e": "tim@blacklanternsecurity.com", + "h": [], + "m": "hello my name is tim", + "p": "TimTamSlam69", + "s": [80437], + "u": "tim", + }, + ], + "stats": { + "accounts_searched": 9820758365, + "elapsed": "0.00", + "limit": 1000, + "query": "blacklanternsecurity.com", + "query_type": "domain", + "sources_searched": 129957, + "total_count": 3, + "unique_count": 3, + }, +} + + +class TestCredshed(ModuleTestBase): + config_overrides = { + "modules": {"credshed": {"username": "admin", "password": "password", "credshed_url": "https://credshed.com"}} + } + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://credshed.com/api/auth", + json=credshed_auth_response, + method="POST", + ) + module_test.httpx_mock.add_response( + url="https://credshed.com/api/search", + json=credshed_response, + method="POST", + ) + + def check(self, module_test, events): + assert len(events) == 11 + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "judy@blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "tim@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:539FE8942DEADBEEFBC49E6EB2F175AC" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:D2D8F0E9A4A2DEADBEEF1AC80F36D61F" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data + == "judy@blacklanternsecurity.com:$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" + ] + ) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:tim"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_crt.py b/bbot/test/test_step_2/module_tests/test_module_crt.py new file mode 100644 index 0000000000..327cde6689 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_crt.py @@ -0,0 +1,25 @@ +from .base import ModuleTestBase + + +class TestCRT(ModuleTestBase): + async def setup_after_prep(self, module_test): + class AsyncMock: + async def fetch(self, *args, **kwargs): + print("mock_fetch", args, kwargs) + return [ + {"name_value": "asdf.blacklanternsecurity.com"}, + {"name_value": "zzzz.blacklanternsecurity.com"}, + ] + + async def close(self): + pass + + async def mock_connect(*args, **kwargs): + print("mock_connect", args, kwargs) + return AsyncMock() + + module_test.monkeypatch.setattr("asyncpg.connect", mock_connect) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_csv.py b/bbot/test/test_step_2/module_tests/test_module_csv.py new file mode 100644 index 0000000000..5a9575372d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_csv.py @@ -0,0 +1,15 @@ +from .base import ModuleTestBase + + +class TestCSV(ModuleTestBase): + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.5"]}}) + + def check(self, module_test, events): + csv_file = module_test.scan.home / "output.csv" + context_data = f"Scan {module_test.scan.name} seeded with DNS_NAME: blacklanternsecurity.com" + + with open(csv_file) as f: + data = f.read() + assert "blacklanternsecurity.com,127.0.0.5,TARGET" in data + assert context_data in data diff --git a/bbot/test/test_step_2/module_tests/test_module_dastardly.py b/bbot/test/test_step_2/module_tests/test_module_dastardly.py new file mode 100644 index 0000000000..a3b59ef13e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dastardly.py @@ -0,0 +1,70 @@ +import json +from werkzeug import Response + +from .base import ModuleTestBase + + +class TestDastardly(ModuleTestBase): + targets = ["http://127.0.0.1:5556/"] + modules_overrides = ["httpx", "dastardly"] + skip_distro_tests = True + + web_response = """ + + + visit this + + """ + + def xss_handler(self, request): + response = f""" + + + Email Form + + + {request.args.get("test", "")} + + """ + return Response(response, content_type="text/html") + + async def get_docker_ip(self, module_test): + docker_ip = "172.17.0.1" + try: + ip_output = await module_test.scan.helpers.run(["ip", "-j", "-4", "a", "show", "dev", "docker0"]) + interface_json = json.loads(ip_output.stdout) + docker_ip = interface_json[0]["addr_info"][0]["local"] + except Exception: + pass + return docker_ip + + async def setup_after_prep(self, module_test): + httpserver = module_test.request_fixture.getfixturevalue("bbot_httpserver_allinterfaces") + httpserver.expect_request("/").respond_with_data(self.web_response) + httpserver.expect_request("/test").respond_with_handler(self.xss_handler) + + # get docker IP + docker_ip = await self.get_docker_ip(module_test) + module_test.scan.target.seeds.add(docker_ip) + + # replace 127.0.0.1 with docker host IP to allow dastardly access to local http server + old_filter_event = module_test.module.filter_event + + def new_filter_event(event): + self.new_url = f"http://{docker_ip}:5556/" + event.data["url"] = self.new_url + event.parsed_url = module_test.scan.helpers.urlparse(self.new_url) + return old_filter_event(event) + + module_test.monkeypatch.setattr(module_test.module, "filter_event", new_filter_event) + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "VULNERABILITY" + and f"{self.new_url}test" in e.data["description"] + and "Cross-site scripting".lower() in e.data["description"].lower() + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py new file mode 100644 index 0000000000..f642a444b6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -0,0 +1,102 @@ +from .base import ModuleTestBase + +dehashed_domain_response = { + "balance": 10000, + "entries": [ + { + "id": "4363462346", + "email": "bob@blacklanternsecurity.com", + "ip_address": "", + "username": "bob@bob.com", + "password": "", + "hashed_password": "$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve", + "name": "Bob Smith", + "vin": "", + "address": "", + "phone": "+91283423839", + "database_name": "eatstreet", + }, + { + "id": "234623453454", + "email": "tim@blacklanternsecurity.com", + "ip_address": "", + "username": "timmy", + "password": "TimTamSlam69", + "hashed_password": "", + "name": "Tim Tam", + "vin": "", + "address": "", + "phone": "+123455667", + "database_name": "eatstreet", + }, + ], + "success": True, + "took": "61µs", + "total": 2, +} + + +class TestDehashed(ModuleTestBase): + modules_overrides = ["dehashed", "speculate"] + config_overrides = { + "scope": {"report_distance": 2}, + "modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}, + } + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1", + json=dehashed_domain_response, + ) + await module_test.mock_dns( + { + "bob.com": {"A": ["127.0.0.1"]}, + "blacklanternsecurity.com": {"A": ["127.0.0.1"]}, + } + ) + + def check(self, module_test, events): + assert len(events) == 12 + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity"]) + assert 1 == len( + [ + e + for e in events + if e.type == "EMAIL_ADDRESS" + and e.data == "bob@bob.com" + and e.scope_distance == 1 + and "affiliate" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "USERNAME" + and e.data == "bob@blacklanternsecurity.com:bob@bob.com" + and e.parent.data == "bob@blacklanternsecurity.com" + ] + ) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "tim@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data + == "bob@blacklanternsecurity.com:$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" + ] + ) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:timmy"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_digitorus.py b/bbot/test/test_step_2/module_tests/test_module_digitorus.py new file mode 100644 index 0000000000..a683a17d8f --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_digitorus.py @@ -0,0 +1,20 @@ +from .base import ModuleTestBase + + +class TestDigitorus(ModuleTestBase): + web_response = """www.blacklanternsecurity.com
+chat.blacklanternsecurity.com
+www.blacklanternsecurity.com
+tasks.blacklanternsecurity.com
+gitlab.blacklanternsecurity.com
+""" + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://certificatedetails.com/blacklanternsecurity.com", + text=self.web_response, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_discord.py b/bbot/test/test_step_2/module_tests/test_module_discord.py new file mode 100644 index 0000000000..d1aeb5c60f --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_discord.py @@ -0,0 +1,41 @@ +import httpx + +from .base import ModuleTestBase + + +class TestDiscord(ModuleTestBase): + targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx", "foo.bar"] + modules_overrides = ["discord", "excavate", "badsecrets", "httpx"] + + webhook_url = "https://discord.com/api/webhooks/1234/deadbeef-P-uF-asdf" + config_overrides = {"modules": {"discord": {"webhook_url": webhook_url}}} + + def custom_setup(self, module_test): + respond_args = { + "response_data": '

Express Cookie Test

', + "headers": { + "set-cookie": "connect.sid=s%3A8FnPwdeM9kdGTZlWvdaVtQ0S1BCOhY5G.qys7H2oGSLLdRsEq7sqh7btOohHsaRKqyjV4LiVnBvc; Path=/; Expires=Wed, 05 Apr 2023 04:47:29 GMT; HttpOnly" + }, + } + module_test.set_expect_requests(expect_args={"uri": "/cookie.aspx"}, respond_args=respond_args) + module_test.set_expect_requests(expect_args={"uri": "/cookie2.aspx"}, respond_args=respond_args) + module_test.request_count = 0 + + async def setup_after_prep(self, module_test): + self.custom_setup(module_test) + + def custom_response(request: httpx.Request): + module_test.request_count += 1 + if module_test.request_count == 2: + return httpx.Response(status_code=429, json={"retry_after": 0.01}) + else: + return httpx.Response(status_code=200) + + module_test.httpx_mock.add_callback(custom_response, url=self.webhook_url) + + def check(self, module_test, events): + vulns = [e for e in events if e.type == "VULNERABILITY"] + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 1 + assert len(vulns) == 2 + assert module_test.request_count == 4 diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py b/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py new file mode 100644 index 0000000000..8079cda82a --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py @@ -0,0 +1,103 @@ +from .base import ModuleTestBase + +raw_bimi_txt_default = ( + '"v=BIMI1;l=https://bimi.test.localdomain/logo.svg; a=https://bimi.test.localdomain/certificate.pem"' +) +raw_bimi_txt_nondefault = '"v=BIMI1; l=https://nondefault.thirdparty.tld/brand/logo.svg;a=https://nondefault.thirdparty.tld/brand/certificate.pem;"' + + +class TestBIMI(ModuleTestBase): + targets = ["test.localdomain"] + modules_overrides = ["dnsbimi", "speculate"] + config_overrides = { + "modules": {"dnsbimi": {"emit_raw_dns_records": True, "selectors": "default,nondefault"}}, + } + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "test.localdomain": { + "A": ["127.0.0.11"], + }, + "bimi.test.localdomain": { + "A": ["127.0.0.22"], + }, + "_bimi.test.localdomain": { + "A": ["127.0.0.33"], + }, + "default._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_default], + }, + "nondefault._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_nondefault], + }, + "_bimi.default._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_default], + }, + "_bimi.nondefault._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_default], + }, + "default._bimi.default._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_default], + }, + "nondefault._bimi.nondefault._bimi.test.localdomain": { + "A": ["127.0.0.44"], + "TXT": [raw_bimi_txt_nondefault], + }, + } + ) + + def check(self, module_test, events): + assert any( + e.type == "RAW_DNS_RECORD" + and e.data["host"] == "default._bimi.test.localdomain" + and e.data["type"] == "TXT" + and e.data["answer"] == raw_bimi_txt_default + for e in events + ), "Failed to emit RAW_DNS_RECORD" + assert any( + e.type == "RAW_DNS_RECORD" + and e.data["host"] == "nondefault._bimi.test.localdomain" + and e.data["type"] == "TXT" + and e.data["answer"] == raw_bimi_txt_nondefault + for e in events + ), "Failed to emit RAW_DNS_RECORD" + + assert any(e.type == "DNS_NAME" and e.data == "bimi.test.localdomain" for e in events), ( + "Failed to emit DNS_NAME" + ) + + # This should be filtered by a default BBOT configuration + assert not any(str(e.data) == "https://nondefault.thirdparty.tld/brand/logo.svg" for e in events) + + # This should not be filtered by a default BBOT configuration + assert any( + e.type == "URL_UNVERIFIED" and e.data == "https://bimi.test.localdomain/certificate.pem" for e in events + ), "Failed to emit URL_UNVERIFIED" + + # These should be filtered simply due to distance + assert not any(str(e.data) == "https://nondefault.thirdparty.tld/brand/logo.svg" for e in events) + assert not any(str(e.data) == "https://nondefault.thirdparty.tld/certificate.pem" for e in events) + + # These should have been filtered via filter_event() + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "default._bimi.default._bimi.test.localdomain" + for e in events + ), "Unwanted recursion occurring" + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "nondefault._bimi.nondefault._bimi.test.localdomain" + for e in events + ), "Unwanted recursion occurring" + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "nondefault._bimi.default._bimi.test.localdomain" + for e in events + ), "Unwanted recursion occurring" + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "default._bimi.nondefault._bimi.test.localdomain" + for e in events + ), "Unwanted recursion occurring" diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py new file mode 100644 index 0000000000..0cbee8440e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py @@ -0,0 +1,97 @@ +from .base import ModuleTestBase, tempwordlist + + +class TestDnsbrute(ModuleTestBase): + subdomain_wordlist = tempwordlist(["www", "asdf"]) + blacklist = ["api.asdf.blacklanternsecurity.com"] + config_overrides = {"modules": {"dnsbrute": {"wordlist": str(subdomain_wordlist), "max_depth": 3}}} + + async def setup_after_prep(self, module_test): + old_run_live = module_test.scan.helpers.run_live + + async def new_run_live(*command, check=False, text=True, **kwargs): + if "massdns" in command[:2]: + _input = [l async for l in kwargs["input"]] + if "asdf.blacklanternsecurity.com" in _input: + yield """{"name": "asdf.blacklanternsecurity.com.", "type": "A", "class": "IN", "status": "NOERROR", "rx_ts": 1713974911725326170, "data": {"answers": [{"ttl": 86400, "type": "A", "class": "IN", "name": "asdf.blacklanternsecurity.com.", "data": "1.2.3.4."}]}, "flags": ["rd", "ra"], "resolver": "195.226.187.130:53", "proto": "UDP"}""" + else: + async for _ in old_run_live(*command, check=False, text=True, **kwargs): + yield _ + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", new_run_live) + + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["4.3.2.1"]}, + "asdf.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + } + ) + + module = module_test.module + scan = module_test.scan + + # test query logic + event = scan.make_event("blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + event = scan.make_event("asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + event = scan.make_event("api.asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "asdf.blacklanternsecurity.com" + event = scan.make_event("test.api.asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "asdf.blacklanternsecurity.com" + + assert module.dedup_strategy == "lowest_parent" + module.dedup_strategy = "highest_parent" + event = scan.make_event("blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + event = scan.make_event("asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + event = scan.make_event("api.asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + event = scan.make_event("test.api.asdf.blacklanternsecurity.com", "DNS_NAME", dummy=True) + assert module.make_query(event) == "blacklanternsecurity.com" + module.dedup_strategy = "lowest_parent" + + # test recursive brute-force event filtering + event = module_test.scan.make_event("blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is True + event = module_test.scan.make_event( + "www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event + ) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is True + event = module_test.scan.make_event( + "test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event + ) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is True + event = module_test.scan.make_event( + "asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event + ) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is True + event = module_test.scan.make_event( + "wat.asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event + ) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is False + assert reason == "subdomain depth of *.asdf.test.www.blacklanternsecurity.com (4) > max_depth (3)" + event = module_test.scan.make_event( + "hmmm.ptr1234.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event + ) + event.scope_distance = 0 + result, reason = await module_test.module.filter_event(event) + assert result is False + assert reason == '"ptr1234.blacklanternsecurity.com" looks like an autogenerated PTR' + + def check(self, module_test, events): + assert len(events) == 4 + assert 1 == len( + [e for e in events if e.data == "asdf.blacklanternsecurity.com" and str(e.module) == "dnsbrute"] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py new file mode 100644 index 0000000000..4f4009825f --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py @@ -0,0 +1,70 @@ +from .base import ModuleTestBase + + +class TestDnsbrute_mutations(ModuleTestBase): + targets = [ + "blacklanternsecurity.com", + "rrrr.blacklanternsecurity.com", + "asdff-ffdsa.blacklanternsecurity.com", + "hmmmm.test1.blacklanternsecurity.com", + "uuuuu.test2.blacklanternsecurity.com", + ] + + async def setup_after_prep(self, module_test): + old_run_live = module_test.scan.helpers.run_live + + async def new_run_live(*command, check=False, text=True, **kwargs): + if "massdns" in command[:2]: + _input = [l async for l in kwargs["input"]] + if "rrrr-test.blacklanternsecurity.com" in _input: + yield """{"name": "rrrr-test.blacklanternsecurity.com.", "type": "A", "class": "IN", "status": "NOERROR", "rx_ts": 1713974911725326170, "data": {"answers": [{"ttl": 86400, "type": "A", "class": "IN", "name": "rrrr-test.blacklanternsecurity.com.", "data": "1.2.3.4."}]}, "flags": ["rd", "ra"], "resolver": "195.226.187.130:53", "proto": "UDP"}""" + if "rrrr-ffdsa.blacklanternsecurity.com" in _input: + yield """{"name": "rrrr-ffdsa.blacklanternsecurity.com.", "type": "A", "class": "IN", "status": "NOERROR", "rx_ts": 1713974911725326170, "data": {"answers": [{"ttl": 86400, "type": "A", "class": "IN", "name": "rrrr-ffdsa.blacklanternsecurity.com.", "data": "1.2.3.4."}]}, "flags": ["rd", "ra"], "resolver": "195.226.187.130:53", "proto": "UDP"}""" + if "hmmmm.test2.blacklanternsecurity.com" in _input: + yield """{"name": "hmmmm.test2.blacklanternsecurity.com.", "type": "A", "class": "IN", "status": "NOERROR", "rx_ts": 1713974911725326170, "data": {"answers": [{"ttl": 86400, "type": "A", "class": "IN", "name": "hmmmm.test2.blacklanternsecurity.com.", "data": "1.2.3.4."}]}, "flags": ["rd", "ra"], "resolver": "195.226.187.130:53", "proto": "UDP"}""" + else: + async for _ in old_run_live(*command, check=False, text=True, **kwargs): + yield _ + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", new_run_live) + + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + # targets + "rrrr.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "asdff-ffdsa.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "hmmmm.test1.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "uuuuu.test2.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + # devops mutation + "rrrr-test.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + # target-specific dns mutation + "rrrr-ffdsa.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + # subdomain from one subdomain on a different subdomain + "hmmmm.test2.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + } + ) + + def check(self, module_test, events): + assert len(events) == 10 + assert 1 == len( + [ + e + for e in events + if e.data == "rrrr-test.blacklanternsecurity.com" and str(e.module) == "dnsbrute_mutations" + ] + ), "Failed to find devops mutation (word_cloud)" + assert 1 == len( + [ + e + for e in events + if e.data == "rrrr-ffdsa.blacklanternsecurity.com" and str(e.module) == "dnsbrute_mutations" + ] + ), "Failed to find target-specific mutation (word_cloud.dns_mutator)" + assert 1 == len( + [ + e + for e in events + if e.data == "hmmmm.test2.blacklanternsecurity.com" and str(e.module) == "dnsbrute_mutations" + ] + ), "Failed to find subdomain taken from another subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_dnscaa.py b/bbot/test/test_step_2/module_tests/test_module_dnscaa.py new file mode 100644 index 0000000000..cd1546fb1b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnscaa.py @@ -0,0 +1,62 @@ +from .base import ModuleTestBase + + +class TestDNSCAA(ModuleTestBase): + targets = ["blacklanternsecurity.notreal"] + modules_overrides = ["dnscaa", "speculate"] + config_overrides = { + "scope": { + "report_distance": 1, + } + } + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.notreal": { + "A": ["127.0.0.11"], + "CAA": [ + '0 iodef "https://caa.blacklanternsecurity.notreal"', + '128 iodef "mailto:caa@blacklanternsecurity.notreal"', + '0 issue "comodoca.com"', + '1 issue "digicert.com; cansignhttpexchanges=yes"', + '0 issuewild "letsencrypt.org"', + '128 issuewild "pki.goog; cansignhttpexchanges=yes"', + ], + }, + "caa.blacklanternsecurity.notreal": {"A": ["127.0.0.22"]}, + "comodoca.com": { + "A": ["127.0.0.33"], + "CAA": [ + '0 iodef "https://caa.comodoca.com"', + ], + }, + "caa.comodoca.com": {"A": ["127.0.0.33"]}, + "digicert.com": {"A": ["127.0.0.44"]}, + "letsencrypt.org": {"A": ["127.0.0.55"]}, + "pki.goog": {"A": ["127.0.0.66"]}, + } + ) + + def check(self, module_test, events): + assert any(e.type == "DNS_NAME" and e.data == "comodoca.com" for e in events), "Failed to detect CA DNS name" + assert any(e.type == "DNS_NAME" and e.data == "digicert.com" for e in events), "Failed to detect CA DNS name" + assert any(e.type == "DNS_NAME" and e.data == "letsencrypt.org" for e in events), ( + "Failed to detect CA DNS name" + ) + assert any(e.type == "DNS_NAME" and e.data == "pki.goog" for e in events), "Failed to detect CA DNS name" + assert any( + e.type == "URL_UNVERIFIED" and e.data == "https://caa.blacklanternsecurity.notreal/" for e in events + ), "Failed to detect URL" + assert any(e.type == "EMAIL_ADDRESS" and e.data == "caa@blacklanternsecurity.notreal" for e in events), ( + "Failed to detect email address" + ) + # make sure we're not checking CAA records for out-of-scope hosts + assert not any(str(e.host) == "caa.comodoca.com" for e in events) + + +class TestDNSCAAInScopeFalse(TestDNSCAA): + config_overrides = {"scope": {"report_distance": 3}, "modules": {"dnscaa": {"in_scope_only": False}}} + + def check(self, module_test, events): + assert any(str(e.host) == "caa.comodoca.com" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py b/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py new file mode 100644 index 0000000000..53c6ff21be --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py @@ -0,0 +1,114 @@ +from .base import ModuleTestBase + + +class TestDNSCommonSRV(ModuleTestBase): + targets = ["media.www.test.api.blacklanternsecurity.com"] + whitelist = ["blacklanternsecurity.com"] + modules_overrides = ["dnscommonsrv", "speculate"] + config_overrides = {"dns": {"minimal": False}} + + async def setup_after_prep(self, module_test): + old_run_live = module_test.scan.helpers.run_live + + async def new_run_live(*command, check=False, text=True, **kwargs): + if "massdns" in command[:2]: + _input = [l async for l in kwargs["input"]] + if "_ldap._tcp.gc._msdcs.blacklanternsecurity.com" in _input: + yield """{"name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","type":"SRV","class":"IN","status":"NOERROR","rx_ts":1713974911725326170,"data":{"answers":[{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."},{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."}]},"flags":["rd","ra"],"resolver":"195.226.187.130:53","proto":"UDP"}""" + if "_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com" in _input: + yield """{"name":"_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com.","type":"SRV","class":"IN","status":"NOERROR","rx_ts":1713974911725326170,"data":{"answers":[{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."},{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."}]},"flags":["rd","ra"],"resolver":"195.226.187.130:53","proto":"UDP"}""" + if "_ldap._tcp.gc._msdcs.test.api.blacklanternsecurity.com" in _input: + yield """{"name":"_ldap._tcp.gc._msdcs.test.api.blacklanternsecurity.com.","type":"SRV","class":"IN","status":"NOERROR","rx_ts":1713974911725326170,"data":{"answers":[{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.test.api.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."},{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."}]},"flags":["rd","ra"],"resolver":"195.226.187.130:53","proto":"UDP"}""" + if "_ldap._tcp.gc._msdcs.www.test.api.blacklanternsecurity.com" in _input: + yield """{"name":"_ldap._tcp.gc._msdcs.www.test.api.blacklanternsecurity.com.","type":"SRV","class":"IN","status":"NOERROR","rx_ts":1713974911725326170,"data":{"answers":[{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.www.test.api.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."},{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."}]},"flags":["rd","ra"],"resolver":"195.226.187.130:53","proto":"UDP"}""" + if "_ldap._tcp.gc._msdcs.media.www.test.api.blacklanternsecurity.com" in _input: + yield """{"name":"_ldap._tcp.gc._msdcs.www.test.api.blacklanternsecurity.com.","type":"SRV","class":"IN","status":"NOERROR","rx_ts":1713974911725326170,"data":{"answers":[{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.media.www.test.api.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."},{"ttl":86400,"type":"SRV","class":"IN","name":"_ldap._tcp.gc._msdcs.blacklanternsecurity.com.","data":"10 10 1720 asdf.blacklanternsecurity.com."}]},"flags":["rd","ra"],"resolver":"195.226.187.130:53","proto":"UDP"}""" + else: + async for _ in old_run_live(*command, check=False, text=True, **kwargs): + yield _ + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", new_run_live) + + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "api.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "test.api.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "www.test.api.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "media.www.test.api.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "_ldap._tcp.gc._msdcs.blacklanternsecurity.com": {"SRV": ["0 100 3268 asdf.blacklanternsecurity.com"]}, + "_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com": { + "SRV": ["0 100 3268 asdf.blacklanternsecurity.com"] + }, + "_ldap._tcp.gc._msdcs.test.api.blacklanternsecurity.com": { + "SRV": ["0 100 3268 asdf.blacklanternsecurity.com"] + }, + "_ldap._tcp.gc._msdcs.www.test.api.blacklanternsecurity.com": { + "SRV": ["0 100 3268 asdf.blacklanternsecurity.com"] + }, + "_ldap._tcp.gc._msdcs.media.www.test.api.blacklanternsecurity.com": { + "SRV": ["0 100 3268 asdf.blacklanternsecurity.com"] + }, + "asdf.blacklanternsecurity.com": {"A": ["1.2.3.5"]}, + "_msdcs.api.blacklanternsecurity.com": {"A": ["1.2.3.5"]}, + } + ) + + def check(self, module_test, events): + assert len(events) == 20 + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "_ldap._tcp.gc._msdcs.blacklanternsecurity.com" + and str(e.module) == "dnscommonsrv" + ] + ), "Failed to detect subdomain 1" + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com" + and str(e.module) == "dnscommonsrv" + ] + ), "Failed to detect subdomain 2" + assert 2 == len([e for e in events if e.type == "DNS_NAME" and e.data == "asdf.blacklanternsecurity.com"]), ( + "Failed to detect subdomain 3" + ) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "api.blacklanternsecurity.com"]), ( + "Failed to detect subdomain 4" + ) + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "test.api.blacklanternsecurity.com"] + ), "Failed to detect subdomain 5" + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "_msdcs.api.blacklanternsecurity.com"] + ), "Failed to detect subdomain 5" + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]), ( + "Failed to detect main domain" + ) + assert 1 == len( + [ + e + for e in events + if e.type == "RAW_DNS_RECORD" + and e.data["host"] == "_ldap._tcp.gc._msdcs.api.blacklanternsecurity.com" + and e.data["answer"] == "0 100 3268 asdf.blacklanternsecurity.com" + ] + ), "Failed to emit RAW_DNS_RECORD for _ldap._tcp.gc._msdcs.api.blacklanternsecurity.com" + assert 1 == len( + [ + e + for e in events + if e.type == "RAW_DNS_RECORD" + and e.data["host"] == "_ldap._tcp.gc._msdcs.blacklanternsecurity.com" + and e.data["answer"] == "0 100 3268 asdf.blacklanternsecurity.com" + ] + ), "Failed to emit RAW_DNS_RECORD for _ldap._tcp.gc._msdcs.blacklanternsecurity.com" + assert 2 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) + assert 10 == len([e for e in events if e.type == "DNS_NAME"]) + assert 5 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED"]) + assert 5 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and str(e.module) == "speculate"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py new file mode 100644 index 0000000000..714a610c0c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py @@ -0,0 +1,18 @@ +from .base import ModuleTestBase + + +class TestDNSDumpster(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://dnsdumpster.com", + headers={"Set-Cookie": "csrftoken=asdf"}, + content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n\n\n\n\n\n\n \n\n', + ) + module_test.httpx_mock.add_response( + url="https://dnsdumpster.com/", + method="POST", + content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n

Showing results for blacklanternsecurity.com

\n
\n
\n
\n
\n

Hosting (IP block owners)

\n
\n
\n

GeoIP of Host Locations

\n
\n
\n
\n\n

DNS Servers

\n
\n \n \n \n \n \n \n
ns01.domaincontrol.com.
\n\n\n \n
\n
\n
97.74.100.1
ns01.domaincontrol.com
GODADDY-DNS
United States
ns02.domaincontrol.com.
\n\n\n \n
\n
\n
173.201.68.1
ns02.domaincontrol.com
GODADDY-DNS
United States
\n
\n\n

MX Records ** This is where email for the domain goes...

\n
\n \n \n \n \n
asdf.blacklanternsecurity.com.mail.protection.outlook.com.
\n\n\n
\n
\n
104.47.55.138
mail-bn8nam120138.inbound.protection.outlook.com
MICROSOFT-CORP-MSN-AS-BLOCK
United States
\n
\n\n

TXT Records ** Find more hosts in Sender Policy Framework (SPF) configurations

\n
\n \n\n\n\n\n\n\n\n\n\n
"MS=ms26206678"
"v=spf1 ip4:50.240.76.25 include:spf.protection.outlook.com -all"
"google-site-verification=O_PoQFTGJ_hZ9LqfNT9OEc0KPFERKHQ_1t1m0YTx_1E"
"google-site-verification=7XKUMxJSTHBSzdvT7gH47jLRjNAS76nrEfXmzhR_DO4"
\n
\n\n\n

Host Records (A) ** this data may not be current as it uses a static database (updated monthly)

\n
\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n GitHub.com\n\n\n\n\n\n\n\n\n
HTTP TECH: \n varnish\n\n\n\n
185.199.108.153
cdn-185-199-108-153.github.com
FASTLY
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n
SSH: \n SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3\n\n\n\n\n\n\n\n
143.244.156.80
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n Apache/2.4.29 (Ubuntu)\n\n\n\n\n\n\n\n\n
HTTP TECH: \n Ubuntu
Apache,2.4.29
\n\n\n\n
64.227.8.231
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.34.56.157
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.241.216.208
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
167.71.95.71
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
157.245.247.197
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
\n
\n\n\n\n\n\n
\n

Mapping the domain ** click for full size image

\n

\n\n

\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n\n\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n \n \n \n\n \n\n\n\n\n\n\n\n\n\n\n\n\n \n\n', + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsresolve.py b/bbot/test/test_step_2/module_tests/test_module_dnsresolve.py new file mode 100644 index 0000000000..7940b7aea6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnsresolve.py @@ -0,0 +1,60 @@ +from .base import ModuleTestBase + + +class TestDNSREsolve(ModuleTestBase): + config_overrides = {"dns": {"minimal": False}, "scope": {"report_distance": 1}} + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.com": { + "A": ["192.168.0.7"], + "AAAA": ["::1"], + "CNAME": ["www.blacklanternsecurity.com"], + }, + "www.blacklanternsecurity.com": {"A": ["192.168.0.8"]}, + } + ) + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "blacklanternsecurity.com" + and "a-record" in e.tags + and "aaaa-record" in e.tags + and "cname-record" in e.tags + and "private-ip" in e.tags + and e.scope_distance == 0 + and "192.168.0.7" in e.resolved_hosts + and "::1" in e.resolved_hosts + and "www.blacklanternsecurity.com" in e.resolved_hosts + and e.dns_children + == {"A": {"192.168.0.7"}, "AAAA": {"::1"}, "CNAME": {"www.blacklanternsecurity.com"}} + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "www.blacklanternsecurity.com" + and "a-record" in e.tags + and "private-ip" in e.tags + and e.scope_distance == 0 + and "192.168.0.8" in e.resolved_hosts + and e.dns_children == {"A": {"192.168.0.8"}} + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "IP_ADDRESS" + and e.data == "192.168.0.7" + and "private-ip" in e.tags + and e.scope_distance == 1 + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py new file mode 100644 index 0000000000..61900b9189 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py @@ -0,0 +1,64 @@ +from .base import ModuleTestBase + +raw_smtp_tls_txt = '"v=TLSRPTv1; rua=mailto:tlsrpt@sub.blacklanternsecurity.notreal,mailto:test@on.thirdparty.com, https://tlspost.example.com;"' + + +class TestDNSTLSRPT(ModuleTestBase): + targets = ["blacklanternsecurity.notreal"] + modules_overrides = ["dnstlsrpt", "speculate"] + config_overrides = {"modules": {"dnstlsrpt": {"emit_raw_dns_records": True}}, "scope": {"report_distance": 1}} + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.notreal": { + "A": ["127.0.0.11"], + }, + "_tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.22"], + }, + "_smtp._tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.33"], + "TXT": [raw_smtp_tls_txt], + }, + "_tls._smtp._tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.44"], + }, + "_smtp._tls._smtp._tls.blacklanternsecurity.notreal": { + "TXT": [raw_smtp_tls_txt], + }, + "sub.blacklanternsecurity.notreal": { + "A": ["127.0.0.55"], + }, + } + ) + + def check(self, module_test, events): + assert any(e.type == "RAW_DNS_RECORD" and e.data["answer"] == raw_smtp_tls_txt for e in events), ( + "Failed to emit RAW_DNS_RECORD" + ) + assert any(e.type == "DNS_NAME" and e.data == "sub.blacklanternsecurity.notreal" for e in events), ( + "Failed to detect sub-domain" + ) + assert any( + e.type == "EMAIL_ADDRESS" and e.data == "tlsrpt@sub.blacklanternsecurity.notreal" for e in events + ), "Failed to detect email address" + assert any(e.type == "EMAIL_ADDRESS" and e.data == "test@on.thirdparty.com" for e in events), ( + "Failed to detect third party email address" + ) + assert any(e.type == "URL_UNVERIFIED" and e.data == "https://tlspost.example.com/" for e in events), ( + "Failed to detect third party URL" + ) + + +class TestDNSTLSRPTRecursiveRecursion(TestDNSTLSRPT): + config_overrides = { + "scope": {"report_distance": 1}, + "modules": {"dnstlsrpt": {"emit_raw_dns_records": True}}, + } + + def check(self, module_test, events): + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "_mta-sts._mta-sts.blacklanternsecurity.notreal" + for e in events + ), "Unwanted recursion occurring" diff --git a/bbot/test/test_step_2/module_tests/test_module_docker_pull.py b/bbot/test/test_step_2/module_tests/test_module_docker_pull.py new file mode 100644 index 0000000000..36d1da80a0 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_docker_pull.py @@ -0,0 +1,447 @@ +import io +import tarfile +from pathlib import Path + +from .base import ModuleTestBase + + +class TestDockerPull(ModuleTestBase): + modules_overrides = ["speculate", "dockerhub", "docker_pull"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/users/blacklanternsecurity", + json={ + "id": "f90895d9cf484d9182c6dbbef2632329", + "uuid": "f90895d9-cf48-4d91-82c6-dbbef2632329", + "username": "blacklanternsecurity", + "full_name": "", + "location": "", + "company": "Black Lantern Security", + "profile_url": "https://github.com/blacklanternsecurity", + "date_joined": "2022-08-29T15:27:10.227081Z", + "gravatar_url": "", + "gravatar_email": "", + "type": "User", + }, + ) + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/repositories/blacklanternsecurity?page_size=25&page=1", + json={ + "count": 2, + "next": None, + "previous": None, + "results": [ + { + "name": "helloworld", + "namespace": "blacklanternsecurity", + "repository_type": "image", + "status": 1, + "status_description": "active", + "description": "", + "is_private": False, + "star_count": 0, + "pull_count": 1, + "last_updated": "2021-12-20T17:19:58.88296Z", + "date_registered": "2021-12-20T17:19:58.507614Z", + "affiliation": "", + "media_types": ["application/vnd.docker.container.image.v1+json"], + "content_types": ["image"], + "categories": [], + }, + { + "name": "testimage", + "namespace": "blacklanternsecurity", + "repository_type": "image", + "status": 1, + "status_description": "active", + "description": "", + "is_private": False, + "star_count": 0, + "pull_count": 1, + "last_updated": "2022-01-10T20:16:46.170738Z", + "date_registered": "2022-01-07T13:28:59.756641Z", + "affiliation": "", + "media_types": ["application/vnd.docker.container.image.v1+json"], + "content_types": ["image"], + "categories": [], + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/tags/list", + json={ + "errors": [ + { + "code": "UNAUTHORIZED", + "message": "authentication required", + "detail": [ + { + "Type": "repository", + "Class": "", + "Name": "blacklanternsecurity/helloworld", + "Action": "pull", + } + ], + } + ] + }, + headers={ + "www-authenticate": 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="blacklanternsecurity/helloworld:pull"' + }, + status_code=401, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/testimage/tags/list", + json={ + "errors": [ + { + "code": "UNAUTHORIZED", + "message": "authentication required", + "detail": [ + { + "Type": "repository", + "Class": "", + "Name": "blacklanternsecurity/testimage", + "Action": "pull", + } + ], + } + ] + }, + headers={ + "www-authenticate": 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="blacklanternsecurity/testimage:pull"' + }, + status_code=401, + ) + module_test.httpx_mock.add_response( + url="https://auth.docker.io/token?service=registry.docker.io&scope=blacklanternsecurity/helloworld:pull", + json={ + "token": "QWERTYUIOPASDFGHJKLZXCBNM", + }, + ) + module_test.httpx_mock.add_response( + url="https://auth.docker.io/token?service=registry.docker.io&scope=blacklanternsecurity/testimage:pull", + json={ + "token": "QWERTYUIOPASDFGHJKLZXCBNM", + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/tags/list", + json={ + "name": "blacklanternsecurity/helloworld", + "tags": [ + "dev", + "latest", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/testimage/tags/list", + json={ + "name": "blacklanternsecurity/testimage", + "tags": [ + "dev", + "latest", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/manifests/latest", + json={ + "schemaVersion": 2, + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "config": { + "mediaType": "application/vnd.docker.container.image.v1+json", + "size": 8614, + "digest": "sha256:a9910947b74a4f0606cfc8669ae8808d2c328beaee9e79f489dc17df14cd50b1", + }, + "layers": [ + { + "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", + "size": 29124181, + "digest": "sha256:8a1e25ce7c4f75e372e9884f8f7b1bedcfe4a7a7d452eb4b0a1c7477c9a90345", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/testimage/manifests/latest", + json={ + "mediaType": "application/vnd.docker.distribution.manifest.list.v2+json", + "schemaVersion": 2, + "manifests": [ + { + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "platform": {"os": "linux", "architecture": "s390x"}, + "digest": "sha256:3e8a8b63afab946f4a64c1dc63563d91b2cb1e5eadadac1eff20231695c53d24", + "size": 1953, + }, + { + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "platform": {"os": "linux", "architecture": "amd64"}, + "digest": "sha256:7c75331408141f1e3ef37eac7c45938fbfb0d421a86201ad45d2ab8b70ddd527", + "size": 1953, + }, + { + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "platform": {"os": "linux", "architecture": "ppc64le"}, + "digest": "sha256:33d30a60996db4bc8158151ce516a8503cc56ce8d146e450e117a57ca5bf06e7", + "size": 1953, + }, + { + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "platform": {"os": "linux", "architecture": "arm64", "variant": "v8"}, + "digest": "sha256:d0eacd0089db7309a5ce40ec3334fcdd4ce7d67324f1ccc4433dd4fae4a771a4", + "size": 1953, + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/blobs/sha256:a9910947b74a4f0606cfc8669ae8808d2c328beaee9e79f489dc17df14cd50b1", + json={ + "architecture": "amd64", + "config": { + "Env": [ + "PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "LANG=C.UTF-8", + "GPG_KEY=QWERTYUIOPASDFGHJKLZXCBNM", + "PYTHON_VERSION=3.10.14", + "PYTHON_PIP_VERSION=23.0.1", + "PYTHON_SETUPTOOLS_VERSION=65.5.1", + "PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/dbf0c85f76fb6e1ab42aa672ffca6f0a675d9ee4/public/get-pip.py", + "PYTHON_GET_PIP_SHA256=dfe9fd5c28dc98b5ac17979a953ea550cec37ae1b47a5116007395bfacff2ab9", + "LC_ALL=C.UTF-8", + "PIP_NO_CACHE_DIR=off", + ], + "Entrypoint": ["helloworld"], + "WorkingDir": "/root", + "ArgsEscaped": True, + "OnBuild": None, + }, + "created": "2024-03-24T03:46:29.788993495Z", + "history": [ + { + "created": "2024-03-12T01:21:01.529814652Z", + "created_by": "/bin/sh -c #(nop) ADD file:b86ae1c7ca3586d8feedcd9ff1b2b1e8ab872caf6587618f1da689045a5d7ae4 in / ", + }, + { + "created": "2024-03-12T01:21:01.866693306Z", + "created_by": '/bin/sh -c #(nop) CMD ["bash"]', + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV LANG=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "RUN /bin/sh -c set -eux; \tapt-get update; \tapt-get install -y --no-install-recommends \t\tca-certificates \t\tnetbase \t\ttzdata \t; \trm -rf /var/lib/apt/lists/* # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV GPG_KEY=QWERTYUIOPASDFGHJKLZXCBNM", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_VERSION=3.10.14", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \t\tsavedAptMark="$(apt-mark showmanual)"; \tapt-get update; \tapt-get install -y --no-install-recommends \t\tdpkg-dev \t\tgcc \t\tgnupg \t\tlibbluetooth-dev \t\tlibbz2-dev \t\tlibc6-dev \t\tlibdb-dev \t\tlibexpat1-dev \t\tlibffi-dev \t\tlibgdbm-dev \t\tliblzma-dev \t\tlibncursesw5-dev \t\tlibreadline-dev \t\tlibsqlite3-dev \t\tlibssl-dev \t\tmake \t\ttk-dev \t\tuuid-dev \t\twget \t\txz-utils \t\tzlib1g-dev \t; \t\twget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz"; \twget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc"; \tGNUPGHOME="$(mktemp -d)"; export GNUPGHOME; \tgpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$GPG_KEY"; \tgpg --batch --verify python.tar.xz.asc python.tar.xz; \tgpgconf --kill all; \trm -rf "$GNUPGHOME" python.tar.xz.asc; \tmkdir -p /usr/src/python; \ttar --extract --directory /usr/src/python --strip-components=1 --file python.tar.xz; \trm python.tar.xz; \t\tcd /usr/src/python; \tgnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)"; \t./configure \t\t--build="$gnuArch" \t\t--enable-loadable-sqlite-extensions \t\t--enable-optimizations \t\t--enable-option-checking=fatal \t\t--enable-shared \t\t--with-lto \t\t--with-system-expat \t\t--without-ensurepip \t; \tnproc="$(nproc)"; \tEXTRA_CFLAGS="$(dpkg-buildflags --get CFLAGS)"; \tLDFLAGS="$(dpkg-buildflags --get LDFLAGS)"; \tLDFLAGS="${LDFLAGS:--Wl},--strip-all"; \tmake -j "$nproc" \t\t"EXTRA_CFLAGS=${EXTRA_CFLAGS:-}" \t\t"LDFLAGS=${LDFLAGS:-}" \t\t"PROFILE_TASK=${PROFILE_TASK:-}" \t; \trm python; \tmake -j "$nproc" \t\t"EXTRA_CFLAGS=${EXTRA_CFLAGS:-}" \t\t"LDFLAGS=${LDFLAGS:--Wl},-rpath=\'\\$\\$ORIGIN/../lib\'" \t\t"PROFILE_TASK=${PROFILE_TASK:-}" \t\tpython \t; \tmake install; \t\tcd /; \trm -rf /usr/src/python; \t\tfind /usr/local -depth \t\t\\( \t\t\t\\( -type d -a \\( -name test -o -name tests -o -name idle_test \\) \\) \t\t\t-o \\( -type f -a \\( -name \'*.pyc\' -o -name \'*.pyo\' -o -name \'libpython*.a\' \\) \\) \t\t\\) -exec rm -rf \'{}\' + \t; \t\tldconfig; \t\tapt-mark auto \'.*\' > /dev/null; \tapt-mark manual $savedAptMark; \tfind /usr/local -type f -executable -not \\( -name \'*tkinter*\' \\) -exec ldd \'{}\' \';\' \t\t| awk \'/=>/ { so = $(NF-1); if (index(so, "/usr/local/") == 1) { next }; gsub("^/(usr/)?", "", so); printf "*%s\\n", so }\' \t\t| sort -u \t\t| xargs -r dpkg-query --search \t\t| cut -d: -f1 \t\t| sort -u \t\t| xargs -r apt-mark manual \t; \tapt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \trm -rf /var/lib/apt/lists/*; \t\tpython3 --version # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \tfor src in idle3 pydoc3 python3 python3-config; do \t\tdst="$(echo "$src" | tr -d 3)"; \t\t[ -s "/usr/local/bin/$src" ]; \t\t[ ! -e "/usr/local/bin/$dst" ]; \t\tln -svT "$src" "/usr/local/bin/$dst"; \tdone # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_PIP_VERSION=23.0.1", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_SETUPTOOLS_VERSION=65.5.1", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/dbf0c85f76fb6e1ab42aa672ffca6f0a675d9ee4/public/get-pip.py", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_GET_PIP_SHA256=dfe9fd5c28dc98b5ac17979a953ea550cec37ae1b47a5116007395bfacff2ab9", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \t\tsavedAptMark="$(apt-mark showmanual)"; \tapt-get update; \tapt-get install -y --no-install-recommends wget; \t\twget -O get-pip.py "$PYTHON_GET_PIP_URL"; \techo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum -c -; \t\tapt-mark auto \'.*\' > /dev/null; \t[ -z "$savedAptMark" ] || apt-mark manual $savedAptMark > /dev/null; \tapt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \trm -rf /var/lib/apt/lists/*; \t\texport PYTHONDONTWRITEBYTECODE=1; \t\tpython get-pip.py \t\t--disable-pip-version-check \t\t--no-cache-dir \t\t--no-compile \t\t"pip==$PYTHON_PIP_VERSION" \t\t"setuptools==$PYTHON_SETUPTOOLS_VERSION" \t; \trm -f get-pip.py; \t\tpip --version # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'CMD ["python3"]', + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV LANG=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV LC_ALL=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV PIP_NO_CACHE_DIR=off", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "WORKDIR /usr/src/helloworld", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:45:52.226201188Z", + "created_by": "RUN /bin/sh -c apt-get update && apt-get install -y openssl gcc git make unzip curl wget vim nano sudo # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:45:52.391597947Z", + "created_by": "COPY . . # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.76589069Z", + "created_by": "RUN /bin/sh -c pip install . # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.788993495Z", + "created_by": "WORKDIR /root", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.788993495Z", + "created_by": 'ENTRYPOINT ["helloworld"]', + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + ], + "os": "linux", + "rootfs": { + "type": "layers", + "diff_ids": [ + "sha256:a483da8ab3e941547542718cacd3258c6c705a63e94183c837c9bc44eb608999", + "sha256:c8f253aef5606f6716778771171c3fdf6aa135b76a5fa8bf66ba45c12c15b540", + "sha256:b4a9dcc697d250c7be53887bb8e155c8f7a06f9c63a3aa627c647bb4a426d3f0", + "sha256:120fda24c420b4e5d52f1c288b35c75b07969057bce41ec34cfb05606b2d7c11", + "sha256:c2287f03e33f4896b2720f0cb64e6b6050759a3eb5914e531e98fc3499b4e687", + "sha256:afe6e55a5cf240c050a4d2b72ec7b7d009a131cba8fe2753e453a8e62ef7e45c", + "sha256:ae6df275ba2e8f40c598e30588afe43f6bfa92e4915e8450b77cb5db5c89dfd5", + "sha256:621ab22fb386a9e663178637755b651beddc0eb4762804e74d8996cce0ddd441", + "sha256:4c534ad16bd2df668c0b8f637616517746ede530ba8546d85f28772bc748e06f", + "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + ], + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/testimage/manifests/sha256:7c75331408141f1e3ef37eac7c45938fbfb0d421a86201ad45d2ab8b70ddd527", + json={ + "name": "testimage", + "tag": "latest", + "architecture": "amd64", + "fsLayers": [ + {"blobSum": "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef"}, + ], + "history": [ + { + "v1Compatibility": '{"id":"e45a5af57b00862e5ef5782a9925979a02ba2b12dff832fd0991335f4a11e5c5","parent":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","created":"2014-12-31T22:57:59.178729048Z","container":"27b45f8fb11795b52e9605b686159729b0d9ca92f76d40fb4f05a62e19c46b4f","container_config":{"Hostname":"8ce6509d66e2","Domainname":"","User":"","Memory":0,"MemorySwap":0,"CpuShares":0,"Cpuset":"","AttachStdin":false,"AttachStdout":false,"AttachStderr":false,"PortSpecs":null,"ExposedPorts":null,"Tty":false,"OpenStdin":false,"StdinOnce":false,"Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/bin/sh","-c","#(nop) CMD [/hello]"],"Image":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","Volumes":null,"WorkingDir":"","Entrypoint":null,"NetworkDisabled":false,"MacAddress":"","OnBuild":[],"SecurityOpt":null,"Labels":null},"docker_version":"1.4.1","config":{"Hostname":"8ce6509d66e2","Domainname":"","User":"","Memory":0,"MemorySwap":0,"CpuShares":0,"Cpuset":"","AttachStdin":false,"AttachStdout":false,"AttachStderr":false,"PortSpecs":null,"ExposedPorts":null,"Tty":false,"OpenStdin":false,"StdinOnce":false,"Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/hello"],"Image":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","Volumes":null,"WorkingDir":"","Entrypoint":null,"NetworkDisabled":false,"MacAddress":"","OnBuild":[],"SecurityOpt":null,"Labels":null},"architecture":"amd64","os":"linux","Size":0}\n' + }, + { + "v1Compatibility": '{"id":"e45a5af57b00862e5ef5782a9925979a02ba2b12dff832fd0991335f4a11e5c5","parent":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","created":"2014-12-31T22:57:59.178729048Z","container":"27b45f8fb11795b52e9605b686159729b0d9ca92f76d40fb4f05a62e19c46b4f","container_config":{"Hostname":"8ce6509d66e2","Domainname":"","User":"","Memory":0,"MemorySwap":0,"CpuShares":0,"Cpuset":"","AttachStdin":false,"AttachStdout":false,"AttachStderr":false,"PortSpecs":null,"ExposedPorts":null,"Tty":false,"OpenStdin":false,"StdinOnce":false,"Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/bin/sh","-c","#(nop) CMD [/hello]"],"Image":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","Volumes":null,"WorkingDir":"","Entrypoint":null,"NetworkDisabled":false,"MacAddress":"","OnBuild":[],"SecurityOpt":null,"Labels":null},"docker_version":"1.4.1","config":{"Hostname":"8ce6509d66e2","Domainname":"","User":"","Memory":0,"MemorySwap":0,"CpuShares":0,"Cpuset":"","AttachStdin":false,"AttachStdout":false,"AttachStderr":false,"PortSpecs":null,"ExposedPorts":null,"Tty":false,"OpenStdin":false,"StdinOnce":false,"Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/hello"],"Image":"31cbccb51277105ba3ae35ce33c22b69c9e3f1002e76e4c736a2e8ebff9d7b5d","Volumes":null,"WorkingDir":"","Entrypoint":null,"NetworkDisabled":false,"MacAddress":"","OnBuild":[],"SecurityOpt":null,"Labels":null},"architecture":"amd64","os":"linux","Size":0}\n' + }, + ], + "schemaVersion": 1, + "signatures": [ + { + "header": { + "jwk": { + "crv": "P-256", + "kid": "OD6I:6DRK:JXEJ:KBM4:255X:NSAA:MUSF:E4VM:ZI6W:CUN2:L4Z6:LSF4", + "kty": "EC", + "x": "3gAwX48IQ5oaYQAYSxor6rYYc_6yjuLCjtQ9LUakg4A", + "y": "t72ge6kIA1XOjqjVoEOiPPAURltJFBMGDSQvEGVB010", + }, + "alg": "ES256", + }, + "signature": "XREm0L8WNn27Ga_iE_vRnTxVMhhYY0Zst_FfkKopg6gWSoTOZTuW4rK0fg_IqnKkEKlbD83tD46LKEGi5aIVFg", + "protected": "eyJmb3JtYXRMZW5ndGgiOjY2MjgsImZvcm1hdFRhaWwiOiJDbjAiLCJ0aW1lIjoiMjAxNS0wNC0wOFQxODo1Mjo1OVoifQ", + } + ], + }, + ) + temp_path = Path("/tmp/.bbot_test") + tar_path = temp_path / "docker_pull_test.tar.gz" + with tarfile.open(tar_path, "w:gz") as tar: + file_io = io.BytesIO("This is a test file".encode()) + file_info = tarfile.TarInfo(name="file.txt") + file_info.size = len(file_io.getvalue()) + file_io.seek(0) + tar.addfile(file_info, file_io) + with open(tar_path, "rb") as file: + layer_file = file.read() + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/blobs/sha256:8a1e25ce7c4f75e372e9884f8f7b1bedcfe4a7a7d452eb4b0a1c7477c9a90345", + content=layer_file, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/testimage/blobs/sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + content=layer_file, + ) + + def check(self, module_test, events): + filesystem_events = [ + e + for e in events + if e.type == "FILESYSTEM" + and ( + "blacklanternsecurity_helloworld_latest.tar" in e.data["path"] + or "blacklanternsecurity_testimage_latest.tar" in e.data["path"] + ) + and "docker" in e.tags + and e.scope_distance == 1 + ] + assert 2 == len(filesystem_events), "Failed to download docker images" + filesystem_event = filesystem_events[0] + folder = Path(filesystem_event.data["path"]) + assert folder.is_file(), "Destination tar doesn't exist" diff --git a/bbot/test/test_step_2/module_tests/test_module_dockerhub.py b/bbot/test/test_step_2/module_tests/test_module_dockerhub.py new file mode 100644 index 0000000000..67b75ac6e3 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dockerhub.py @@ -0,0 +1,96 @@ +from .base import ModuleTestBase + + +class TestDockerhub(ModuleTestBase): + modules_overrides = ["dockerhub", "speculate"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/users/blacklanternsecurity", + json={ + "id": "f90895d9cf484d9182c6dbbef2632329", + "uuid": "f90895d9-cf48-4d91-82c6-dbbef2632329", + "username": "blacklanternsecurity", + "full_name": "", + "location": "", + "company": "Black Lantern Security", + "profile_url": "https://github.com/blacklanternsecurity", + "date_joined": "2022-08-29T15:27:10.227081Z", + "gravatar_url": "", + "gravatar_email": "", + "type": "User", + }, + ) + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/repositories/blacklanternsecurity?page_size=25&page=1", + json={ + "count": 2, + "next": None, + "previous": None, + "results": [ + { + "name": "helloworld", + "namespace": "blacklanternsecurity", + "repository_type": "image", + "status": 1, + "status_description": "active", + "description": "", + "is_private": False, + "star_count": 0, + "pull_count": 1, + "last_updated": "2021-12-20T17:19:58.88296Z", + "date_registered": "2021-12-20T17:19:58.507614Z", + "affiliation": "", + "media_types": ["application/vnd.docker.container.image.v1+json"], + "content_types": ["image"], + "categories": [], + }, + { + "name": "testimage", + "namespace": "blacklanternsecurity", + "repository_type": "image", + "status": 1, + "status_description": "active", + "description": "", + "is_private": False, + "star_count": 0, + "pull_count": 1, + "last_updated": "2022-01-10T20:16:46.170738Z", + "date_registered": "2022-01-07T13:28:59.756641Z", + "affiliation": "", + "media_types": ["application/vnd.docker.container.image.v1+json"], + "content_types": ["image"], + "categories": [], + }, + ], + }, + ) + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "docker" + and e.data["profile_name"] == "blacklanternsecurity" + ] + ), "Failed to find blacklanternsecurity docker" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and e.data["url"] == "https://hub.docker.com/r/blacklanternsecurity/helloworld" + and "docker" in e.tags + ] + ), "Failed to find helloworld docker repo" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and e.data["url"] == "https://hub.docker.com/r/blacklanternsecurity/testimage" + and "docker" in e.tags + ] + ), "Failed to find testimage docker repo" diff --git a/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py b/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py new file mode 100644 index 0000000000..8035316de7 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py @@ -0,0 +1,176 @@ +import asyncio +import re +from .base import ModuleTestBase +from werkzeug.wrappers import Response + + +dotnetnuke_http_response = """ + + + + + + + + + +""" + + +class TestDotnetnuke(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "dotnetnuke"] + config_overrides = {"interactsh_disable": "True"} + + exploit_probe = { + "Cookie": r'DNNPersonalization=<profile><item key="name1: key1" type="System.Data.Services.Internal.ExpandedWrapper`2[[DotNetNuke.Common.Utilities.FileSystemUtils],[System.Windows.Data.ObjectDataProvider, PresentationFramework, Version=4.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35]], System.Data.Services, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089"><ExpandedWrapperOfFileSystemUtilsObjectDataProvider xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><ExpandedElement/><ProjectedProperty0><MethodName>WriteFile</MethodName><MethodParameters><anyType xsi:type="xsd:string">C:\Windows\win.ini</anyType></MethodParameters><ObjectInstance xsi:type="FileSystemUtils"></ObjectInstance></ProjectedProperty0></ExpandedWrapperOfFileSystemUtilsObjectDataProvider></item></profile>' + } + + exploit_response = """ + ; for 16-bit app support +[fonts] +[extensions] +[mci extensions] +[files] +[Mail] +MAPI=1 +""" + + webconfig_response = """ + <?xml version="1.0" encoding="utf-8"?> +<configuration> + <!-- register local configuration handlers --> + <configSections> + <sectionGroup name="dotnetnuke"> + </sectionGroup> + </configSections> +</configuration> + """ + + async def setup_before_prep(self, module_test): + # Simulate DotNetNuke Instance + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": dotnetnuke_http_response} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # DNNPersonalization Deserialization Detection + expect_args = {"method": "GET", "uri": "/__", "headers": self.exploit_probe} + respond_args = {"response_data": self.exploit_response} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # NewsArticlesSlider ImageHandler.ashx File Read + expect_args = { + "method": "GET", + "uri": "/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx", + "query_string": b"img=~/web.config", + } + respond_args = {"response_data": self.webconfig_response} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # DNNArticle GetCSS.ashx File Read + expect_args = { + "method": "GET", + "uri": "/DesktopModules/DNNArticle/getcss.ashx", + "query_string": b"CP=%2fweb.config&smid=512&portalid=3", + } + respond_args = {"response_data": self.webconfig_response} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # InstallWizard SuperUser Privilege Escalation + expect_args = {"method": "GET", "uri": "/Install/InstallWizard.aspx", "query_string": b"__viewstate=1"} + respond_args = {"status": 500} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/Install/InstallWizard.aspx"} + respond_args = {"status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + dnn_technology_detection = False + dnn_personalization_deserialization_detection = False + dnn_getcss_fileread_detection = False + dnn_imagehandler_fileread_detection = False + dnn_installwizard_privesc_detection = False + + for e in events: + if e.type == "TECHNOLOGY" and "DotNetNuke" in e.data["technology"]: + dnn_technology_detection = True + + if ( + e.type == "VULNERABILITY" + and "DotNetNuke Personalization Cookie Deserialization" in e.data["description"] + ): + dnn_personalization_deserialization_detection = True + + if ( + e.type == "VULNERABILITY" + and "DotNetNuke DNNArticle Module GetCSS.ashx Arbitrary File Read" in e.data["description"] + ): + dnn_getcss_fileread_detection = True + + if ( + e.type == "VULNERABILITY" + and "DotNetNuke dnnUI_NewsArticlesSlider Module Arbitrary File Read" in e.data["description"] + ): + dnn_imagehandler_fileread_detection = True + + if ( + e.type == "VULNERABILITY" + and "DotNetNuke InstallWizard SuperUser Privilege Escalation" in e.data["description"] + ): + dnn_installwizard_privesc_detection = True + + assert dnn_technology_detection, "DNN Technology Detection Failed" + assert dnn_personalization_deserialization_detection, "DNN Personalization Deserialization Detection Failed" + assert dnn_getcss_fileread_detection, "getcss.ashx File Read Detection Failed" + assert dnn_imagehandler_fileread_detection, "imagehandler.ashx File Read Detection Failed" + assert dnn_installwizard_privesc_detection, "InstallWizard privesc Detection Failed" + + +def extract_subdomain_tag(data): + pattern = r"([a-z0-9]{4})\.fakedomain\.fakeinteractsh\.com" + match = re.search(pattern, data) + if match: + return match.group(1) + + +class TestDotnetnuke_blindssrf(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + module_name = "dotnetnuke" + modules_overrides = ["httpx", "dotnetnuke"] + + def request_handler(self, request): + subdomain_tag = None + subdomain_tag = extract_subdomain_tag(request.full_path) + if subdomain_tag: + self.interactsh_mock_instance.mock_interaction(subdomain_tag) + return Response("alive", status=200) + + async def setup_before_prep(self, module_test): + self.interactsh_mock_instance = module_test.mock_interactsh("dotnetnuke_blindssrf") + module_test.monkeypatch.setattr( + module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance + ) + + async def setup_after_prep(self, module_test): + # Simulate DotNetNuke Instance + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": dotnetnuke_http_response} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + dnn_technology_detection = False + dnn_dnnimagehandler_blindssrf = False + + for e in events: + if e.type == "TECHNOLOGY" and "DotNetNuke" in e.data["technology"]: + dnn_technology_detection = True + + if e.type == "VULNERABILITY" and "DotNetNuke Blind-SSRF (CVE 2017-0929)" in e.data["description"]: + dnn_dnnimagehandler_blindssrf = True + + assert dnn_technology_detection, "DNN Technology Detection Failed" + assert dnn_dnnimagehandler_blindssrf, "dnnimagehandler.ashx Blind SSRF Detection Failed" diff --git a/bbot/test/test_step_2/module_tests/test_module_emailformat.py b/bbot/test/test_step_2/module_tests/test_module_emailformat.py new file mode 100644 index 0000000000..fdac8cb42a --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_emailformat.py @@ -0,0 +1,12 @@ +from .base import ModuleTestBase + + +class TestEmailFormat(ModuleTestBase): + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://www.email-format.com/d/blacklanternsecurity.com/", + text="<p>info@blacklanternsecurity.com</a>", + ) + + def check(self, module_test, events): + assert any(e.data == "info@blacklanternsecurity.com" for e in events), "Failed to detect email" diff --git a/bbot/test/test_step_2/module_tests/test_module_emails.py b/bbot/test/test_step_2/module_tests/test_module_emails.py new file mode 100644 index 0000000000..44b9ab0787 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_emails.py @@ -0,0 +1,23 @@ +from .base import ModuleTestBase + + +class TestEmais(ModuleTestBase): + modules_overrides = ["emails", "emailformat", "skymem"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://www.email-format.com/d/blacklanternsecurity.com/", + text="<p>info@blacklanternsecurity.com</p>", + ) + module_test.httpx_mock.add_response( + url="https://www.skymem.info/srch?q=blacklanternsecurity.com", + text="<p>info@blacklanternsecurity.com</p>", + ) + + def check(self, module_test, events): + assert 2 == len([e for e in events if e.data == "info@blacklanternsecurity.com"]) + email_file = module_test.scan.home / "emails.txt" + emails = open(email_file).read().splitlines() + # make sure deduping works as intended + assert len(emails) == 1 + assert set(emails) == {"info@blacklanternsecurity.com"} diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py new file mode 100644 index 0000000000..1e1e8db436 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -0,0 +1,1045 @@ +from bbot.modules.base import BaseModule +from .base import ModuleTestBase, tempwordlist + +from bbot.modules.internal.excavate import ExcavateRule + +from pathlib import Path +import yara + + +class TestExcavate(ModuleTestBase): + targets = ["http://127.0.0.1:8888/", "test.notreal", "http://127.0.0.1:8888/subdir/links.html"] + modules_overrides = ["excavate", "httpx"] + config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} + + async def setup_before_prep(self, module_test): + response_data = """ + ftp://ftp.test.notreal + \\nhttps://www1.test.notreal + \\x3dhttps://www2.test.notreal + %0ahttps://www3.test.notreal + \\u000ahttps://www4.test.notreal: + \nwww5.test.notreal + \\x3dwww6.test.notreal + %0awww7.test.notreal + \\u000awww8.test.notreal + # these ones shouldn't get emitted because they're .js (url_extension_httpx_only) + <a href="/a_relative.js"> + <link href="/link_relative.js"> + # these ones should + <a href="/a_relative.txt"> + <link href="/link_relative.txt"> + <a href="mailto:bob@evilcorp.org?subject=help">Help</a> + <li class="toctree-l3"><a class="reference internal" href="miscellaneous.html#x50-uart-driver">16x50 UART Driver</a></li> + """ + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": response_data} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # verify relatives path a-tag parsing is working correctly + + expect_args = {"method": "GET", "uri": "/subdir/links.html"} + respond_args = {"response_data": "<a href='../relative.html'/><a href='/2/depth2.html'/>"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/relative.html"} + respond_args = {"response_data": "<a href='/distance2.html'/>"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + module_test.httpserver.no_handler_status_code = 404 + + def check(self, module_test, events): + event_data = [e.data for e in events] + assert "https://www1.test.notreal/" in event_data + assert "https://www2.test.notreal/" in event_data + assert "https://www3.test.notreal/" in event_data + assert "https://www4.test.notreal/" in event_data + assert "www1.test.notreal" in event_data + assert "www2.test.notreal" in event_data + assert "www3.test.notreal" in event_data + assert "www4.test.notreal" in event_data + assert "www5.test.notreal" in event_data + assert "www6.test.notreal" in event_data + assert "www7.test.notreal" in event_data + assert "www8.test.notreal" in event_data + assert "http://127.0.0.1:8888/a_relative.js" not in event_data + assert "http://127.0.0.1:8888/link_relative.js" not in event_data + assert "http://127.0.0.1:8888/a_relative.txt" in event_data + assert "http://127.0.0.1:8888/link_relative.txt" in event_data + + assert "nhttps://www1.test.notreal/" not in event_data + assert "x3dhttps://www2.test.notreal/" not in event_data + assert "a2https://www3.test.notreal/" not in event_data + assert "uac20https://www4.test.notreal/" not in event_data + + assert any( + e.type == "FINDING" and e.data.get("description", "") == "Non-HTTP URI: ftp://ftp.test.notreal" + for e in events + ) + assert any( + e.type == "PROTOCOL" + and e.data.get("protocol", "") == "FTP" + and e.data.get("host", "") == "ftp.test.notreal" + for e in events + ) + + assert any( + e.type == "URL_UNVERIFIED" + and e.data == "http://127.0.0.1:8888/relative.html" + and "spider-max" not in e.tags + and "endpoint" in e.tags + and "extension-html" in e.tags + and "in-scope" in e.tags + and e.scope_distance == 0 + for e in events + ) + + assert any( + e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/2/depth2.html" and "spider-max" in e.tags + for e in events + ) + + assert any( + e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/distance2.html" and "spider-max" in e.tags + for e in events + ) + + assert any( + e.type == "URL_UNVERIFIED" and "miscellaneous.html" in e.data and "x50-uart-driver" not in e.data + for e in events + ) + + +class TestExcavate2(TestExcavate): + targets = ["http://127.0.0.1:8888/", "test.notreal", "http://127.0.0.1:8888/subdir/"] + + async def setup_before_prep(self, module_test): + # root relative + expect_args = {"method": "GET", "uri": "/rootrelative.html"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # page relative + expect_args = {"method": "GET", "uri": "/subdir/pagerelative.html"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/subdir/"} + respond_args = { + "response_data": """ + <a href='/rootrelative.html'>root relative</a> + <a href='pagerelative1.html'>page relative 1</a> + <a href='./pagerelative2.html'>page relative 2</a> + """ + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + module_test.httpserver.no_handler_status_code = 404 + + def check(self, module_test, events): + root_relative_detection = False + page_relative_detection_1 = False + page_relative_detection_1 = False + root_page_confusion_1 = False + root_page_confusion_2 = False + + for e in events: + if e.type == "URL_UNVERIFIED": + # these cases represent the desired behavior for parsing relative links + if e.data == "http://127.0.0.1:8888/rootrelative.html": + root_relative_detection = True + if e.data == "http://127.0.0.1:8888/subdir/pagerelative1.html": + page_relative_detection_1 = True + if e.data == "http://127.0.0.1:8888/subdir/pagerelative2.html": + page_relative_detection_2 = True + + # these cases indicates that excavate parsed the relative links incorrectly + if e.data == "http://127.0.0.1:8888/pagerelative.html": + root_page_confusion_1 = True + if e.data == "http://127.0.0.1:8888/subdir/rootrelative.html": + root_page_confusion_2 = True + + assert root_relative_detection, "Failed to properly excavate root-relative URL" + assert page_relative_detection_1, "Failed to properly excavate page-relative URL" + assert page_relative_detection_2, "Failed to properly excavate page-relative URL" + assert not root_page_confusion_1, "Incorrectly detected page-relative URL" + assert not root_page_confusion_2, "Incorrectly detected root-relative URL" + + +class TestExcavateRedirect(TestExcavate): + targets = ["http://127.0.0.1:8888/", "http://127.0.0.1:8888/relative/", "http://127.0.0.1:8888/nonhttpredirect/"] + config_overrides = {"scope": {"report_distance": 1}} + + async def setup_before_prep(self, module_test): + # absolute redirect + module_test.httpserver.expect_request("/").respond_with_data( + "", status=302, headers={"Location": "https://www.test.notreal/yep"} + ) + module_test.httpserver.expect_request("/relative/").respond_with_data( + "", status=302, headers={"Location": "./owa/"} + ) + module_test.httpserver.expect_request("/relative/owa/").respond_with_data( + "ftp://127.0.0.1:2121\nsmb://127.0.0.1\nssh://127.0.0.2" + ) + module_test.httpserver.expect_request("/nonhttpredirect/").respond_with_data( + "", status=302, headers={"Location": "awb://127.0.0.1:7777"} + ) + module_test.httpserver.no_handler_status_code = 404 + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" and e.data == "https://www.test.notreal/yep" and e.scope_distance == 1 + ] + ) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/relative/owa/"]) + assert 1 == len( + [ + e + for e in events + if e.type == "FINDING" and e.data["description"] == "Non-HTTP URI: awb://127.0.0.1:7777" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "PROTOCOL" and e.data["protocol"] == "AWB" and e.data.get("port", 0) == 7777 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "FINDING" and e.data["description"] == "Non-HTTP URI: ftp://127.0.0.1:2121" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "PROTOCOL" and e.data["protocol"] == "FTP" and e.data.get("port", 0) == 2121 + ] + ) + assert 1 == len( + [e for e in events if e.type == "FINDING" and e.data["description"] == "Non-HTTP URI: smb://127.0.0.1"] + ) + assert 1 == len( + [e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SMB" and "port" not in e.data] + ) + assert 0 == len([e for e in events if e.type == "FINDING" and "ssh://127.0.0.1" in e.data["description"]]) + assert 0 == len([e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SSH"]) + + +class TestExcavateQuerystringRemoveTrue(TestExcavate): + targets = ["http://127.0.0.1:8888/"] + config_overrides = {"url_querystring_remove": True, "url_querystring_collapse": True} + lots_of_params = """ + <a href="http://127.0.0.1:8888/endpoint?foo=1"/> + <a href="http://127.0.0.1:8888/endpoint?foo=2"/> + <a href="http://127.0.0.1:8888/endpoint?foo=3"/> + <a href="http://127.0.0.1:8888/endpoint?foo=4"/> + <a href="http://127.0.0.1:8888/endpoint?foo=5"/> + <a href="http://127.0.0.1:8888/endpoint?foo=6"/> + <a href="http://127.0.0.1:8888/endpoint?foo=7"/> + <a href="http://127.0.0.1:8888/endpoint?foo=8"/> + <a href="http://127.0.0.1:8888/endpoint?foo=9"/> + <a href="http://127.0.0.1:8888/endpoint?foo=10"/> + """ + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.lots_of_params) + + def check(self, module_test, events): + assert len([e for e in events if e.type == "URL_UNVERIFIED"]) == 2 + assert ( + len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/endpoint"]) == 1 + ) + + +class TestExcavateQuerystringRemoveFalse(TestExcavateQuerystringRemoveTrue): + config_overrides = {"url_querystring_remove": False, "url_querystring_collapse": True} + + def check(self, module_test, events): + assert ( + len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" and e.data.startswith("http://127.0.0.1:8888/endpoint?") + ] + ) + == 1 + ) + + +class TestExcavateQuerystringCollapseFalse(TestExcavateQuerystringRemoveTrue): + config_overrides = {"url_querystring_remove": False, "url_querystring_collapse": False} + + def check(self, module_test, events): + assert ( + len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" and e.data.startswith("http://127.0.0.1:8888/endpoint?") + ] + ) + == 10 + ) + + +class TestExcavateMaxLinksPerPage(TestExcavate): + targets = ["http://127.0.0.1:8888/"] + config_overrides = {"web": {"spider_links_per_page": 10, "spider_distance": 1}} + + lots_of_links = """ + <a href="http://127.0.0.1:8888/1"/> + <a href="http://127.0.0.1:8888/2"/> + <a href="http://127.0.0.1:8888/3"/> + <a href="http://127.0.0.1:8888/4"/> + <a href="http://127.0.0.1:8888/5"/> + <a href="http://127.0.0.1:8888/6"/> + <a href="http://127.0.0.1:8888/7"/> + <a href="http://127.0.0.1:8888/8"/> + <a href="http://127.0.0.1:8888/9"/> + <a href="http://127.0.0.1:8888/10"/> + <a href="http://127.0.0.1:8888/11"/> + <a href="http://127.0.0.1:8888/12"/> + <a href="http://127.0.0.1:8888/13"/> + <a href="http://127.0.0.1:8888/14"/> + <a href="http://127.0.0.1:8888/15"/> + <a href="http://127.0.0.1:8888/16"/> + <a href="http://127.0.0.1:8888/17"/> + <a href="http://127.0.0.1:8888/18"/> + <a href="http://127.0.0.1:8888/19"/> + <a href="http://127.0.0.1:8888/20"/> + <a href="http://127.0.0.1:8888/21"/> + <a href="http://127.0.0.1:8888/22"/> + <a href="http://127.0.0.1:8888/23"/> + <a href="http://127.0.0.1:8888/24"/> + <a href="http://127.0.0.1:8888/25"/> + """ + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.lots_of_links) + + def check(self, module_test, events): + url_unverified_events = [e for e in events if e.type == "URL_UNVERIFIED"] + # base URL + 25 links = 26 + assert len(url_unverified_events) == 26 + url_data = [e.data for e in url_unverified_events if "spider-max" not in e.tags and "spider-danger" in e.tags] + assert len(url_data) >= 10 and len(url_data) <= 12 + url_events = [e for e in events if e.type == "URL"] + assert len(url_events) == 11 + + +class TestExcavateCSP(TestExcavate): + csp_test_header = "default-src 'self'; script-src asdf.test.notreal; object-src 'none';" + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"headers": {"Content-Security-Policy": self.csp_test_header}} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.data == "asdf.test.notreal" for e in events) + + +class TestExcavateURL(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "SomeSMooshedDATAhttps://asdffoo.test.notreal/some/path" + ) + + def check(self, module_test, events): + assert any(e.data == "asdffoo.test.notreal" for e in events) + assert any(e.data == "https://asdffoo.test.notreal/some/path" for e in events) + + +class TestExcavateURL_IP(TestExcavate): + targets = ["http://127.0.0.1:8888/", "127.0.0.2"] + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data("SomeSMooshedDATAhttps://127.0.0.2/some/path") + + def check(self, module_test, events): + assert any(e.data == "127.0.0.2" for e in events) + assert any(e.data == "https://127.0.0.2/some/path" for e in events) + + +class TestExcavateSerializationNegative(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "<html><p>llsdtVVFlJxhcGGYTo2PMGTRNFVKZxeKTVbhyosM3Sm/5apoY1/yUmN6HVcn+Xt798SPzgXQlZMttsqp1U1iJFmFO2aCGL/v3tmm/fs7itYsoNnJCelWvm9P4ic1nlKTBOpMjT5B5NmriZwTAzZ5ASjCKcmN8Vh=</p></html>" + ) + + def check(self, module_test, events): + assert not any(e.type == "FINDING" for e in events), "Found Results without word boundary" + + +class TestExcavateSerializationPositive(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + """<html> +<h1>.NET</h1> +<p>AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==</p> +<h1>Java</h1> +<p>rO0ABXQADUhlbGxvLCB3b3JsZCE=</p> +<h1>PHP (string)</h1> +<p>czoyNDoiSGVsbG8sIHdvcmxkISBNb3JlIHRleHQuIjs=</p> +<h1>PHP (array)</h1> +<p>YTo0OntpOjA7aToxO2k6MTtzOjE0OiJzZWNvbmQgZWxlbWVudCI7aToyO2k6MztpOjM7czoxODoiTW9yZSB0ZXh0IGluIGFycmF5Ijt9</p> +<h1>PHP (object)</h1> +<p>TzoxMjoiU2FtcGxlT2JqZWN0IjoyOntzOjg6InByb3BlcnR5IjtzOjEzOiJJbml0aWFsIHZhbHVlIjtzOjE2OiJhZGRpdGlvbmFsU3RyaW5nIjtzOjIxOiJFeHRyYSB0ZXh0IGluIG9iamVjdC4iO30=</p> +<h1>Compression</h1> +<p>H4sIAAAAAAAA/yu2MjS2UvJIzcnJ11Eozy/KSVFUsgYAZN5upRUAAAA=</p> +</html> +""" + ) + + def check(self, module_test, events): + for serialize_type in ["Java", "DOTNET", "PHP_Array", "PHP_String", "PHP_Object", "Possible_Compressed"]: + assert any(e.type == "FINDING" and serialize_type in e.data["description"] for e in events), ( + f"Did not find {serialize_type} Serialized Object" + ) + + +class TestExcavateNonHttpScheme(TestExcavate): + targets = ["http://127.0.0.1:8888/", "test.notreal"] + + non_http_scheme_html = """ + + <html> + <head> + </head> + <body> + <p>hxxp://test.notreal</p> + <p>ftp://test.notreal</p> + <p>nonsense://test.notreal</p> + </body> + </html> + """ + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.non_http_scheme_html) + + def check(self, module_test, events): + found_hxxp_url = False + found_ftp_url = False + found_nonsense_url = False + + for e in events: + if e.type == "FINDING": + if e.data["description"] == "Non-HTTP URI: hxxp://test.notreal": + found_hxxp_url = True + if e.data["description"] == "Non-HTTP URI: ftp://test.notreal": + found_ftp_url = True + if "nonsense" in e.data["description"]: + found_nonsense_url = True + assert found_hxxp_url + assert found_ftp_url + assert not found_nonsense_url + + +class TestExcavateParameterExtraction(TestExcavate): + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["excavate", "httpx", "hunt"] + targets = ["http://127.0.0.1:8888/"] + parameter_extraction_html = """ + <html> + <head> + <title>Get extract + + + + +

Simple GET Form

+

Use the form below to submit a GET request:

+
+ +

+ +
+

Simple POST Form

+

Use the form below to submit a POST request:

+
+ +

+ +
+

Links

+ href + img + + + + """ + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.parameter_extraction_html) + + def check(self, module_test, events): + found_jquery_get = False + found_jquery_post = False + found_form_get = False + found_form_post = False + found_jquery_get_original_value = False + found_jquery_post_original_value = False + found_form_get_original_value = False + found_form_post_original_value = False + found_htmltags_a = False + found_htmltags_img = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["description"] == "HTTP Extracted Parameter [jqueryget] (GET jquery Submodule)": + found_jquery_get = True + if e.data["original_value"] == "value1": + found_jquery_get_original_value = True + + if e.data["description"] == "HTTP Extracted Parameter [jquerypost] (POST jquery Submodule)": + found_jquery_post = True + if e.data["original_value"] == "value2": + found_jquery_post_original_value = True + + if e.data["description"] == "HTTP Extracted Parameter [q] (GET Form Submodule)": + found_form_get = True + if e.data["original_value"] == "flowers": + found_form_get_original_value = True + + if e.data["description"] == "HTTP Extracted Parameter [q] (POST Form Submodule)": + found_form_post = True + if e.data["original_value"] == "boats": + found_form_post_original_value = True + + if e.data["description"] == "HTTP Extracted Parameter [age] (HTML Tags Submodule)": + if e.data["original_value"] == "456": + if "id" in e.data["additional_params"].keys(): + found_htmltags_a = True + + if e.data["description"] == "HTTP Extracted Parameter [size] (HTML Tags Submodule)": + if e.data["original_value"] == "m": + if "fit" in e.data["additional_params"].keys(): + found_htmltags_img = True + + assert found_jquery_get, "Did not extract Jquery GET parameters" + assert found_jquery_post, "Did not extract Jquery POST parameters" + assert found_form_get, "Did not extract Form GET parameters" + assert found_form_post, "Did not extract Form POST parameters" + assert found_jquery_get_original_value, "Did not extract Jquery GET parameter original_value" + assert found_jquery_post_original_value, "Did not extract Jquery POST parameter original_value" + assert found_form_get_original_value, "Did not extract Form GET parameter original_value" + assert found_form_post_original_value, "Did not extract Form POST parameter original_value" + assert found_htmltags_a, "Did not extract parameter(s) from a-tag" + assert found_htmltags_img, "Did not extract parameter(s) from img-tag" + + +class TestExcavateParameterExtraction_getparam(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + getparam_extract_html = """ +ping + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_getparam_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [hack] (HTML Tags Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" + + +class TestExcavateParameterExtraction_json(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "paramminer_getparams"] + config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}}} + getparam_extract_json = """ + { + "obscureParameter": 1, + "common": 1 +} + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_json, "headers": {"Content-Type": "application/json"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_json_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "HTTP Extracted Parameter (speculative from json content) [obscureParameter]" + in e.data["description"] + ): + excavate_json_extraction = True + assert excavate_json_extraction, "Excavate failed to extract json parameter" + + +class TestExcavateParameterExtraction_xml(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "paramminer_getparams"] + config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}}} + getparam_extract_xml = """ + + 1 + 1 + + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_xml, "headers": {"Content-Type": "application/xml"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_xml_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "HTTP Extracted Parameter (speculative from xml content) [obscureParameter]" + in e.data["description"] + ): + excavate_xml_extraction = True + assert excavate_xml_extraction, "Excavate failed to extract xml parameter" + + +class excavateTestRule(ExcavateRule): + yara_rules = { + "SearchForText": 'rule SearchForText { meta: description = "Contains the text AAAABBBBCCCC" strings: $text = "AAAABBBBCCCC" condition: $text }', + "SearchForText2": 'rule SearchForText2 { meta: description = "Contains the text DDDDEEEEFFFF" strings: $text2 = "DDDDEEEEFFFF" condition: $text2 }', + } + + +class TestExcavateYara(TestExcavate): + targets = ["http://127.0.0.1:8888/"] + yara_test_html = """ + + + + +

AAAABBBBCCCC

+

filler

+

DDDDEEEEFFFF

+ + +""" + + async def setup_before_prep(self, module_test): + self.modules_overrides = ["excavate", "httpx"] + module_test.httpserver.expect_request("/").respond_with_data(self.yara_test_html) + + async def setup_after_prep(self, module_test): + excavate_module = module_test.scan.modules["excavate"] + excavateruleinstance = excavateTestRule(excavate_module) + excavate_module.add_yara_rule( + "SearchForText", + 'rule SearchForText { meta: description = "Contains the text AAAABBBBCCCC" strings: $text = "AAAABBBBCCCC" condition: $text }', + excavateruleinstance, + ) + excavate_module.add_yara_rule( + "SearchForText2", + 'rule SearchForText2 { meta: description = "Contains the text DDDDEEEEFFFF" strings: $text2 = "DDDDEEEEFFFF" condition: $text2 }', + excavateruleinstance, + ) + excavate_module.yara_rules = yara.compile(source="\n".join(excavate_module.yara_rules_dict.values())) + + def check(self, module_test, events): + found_yara_string_1 = False + found_yara_string_2 = False + for e in events: + if e.type == "FINDING": + if e.data["description"] == "HTTP response (body) Contains the text AAAABBBBCCCC": + found_yara_string_1 = True + if e.data["description"] == "HTTP response (body) Contains the text DDDDEEEEFFFF": + found_yara_string_2 = True + + assert found_yara_string_1, "Did not extract Match YARA rule (1)" + assert found_yara_string_2, "Did not extract Match YARA rule (2)" + + +class TestExcavateYaraCustom(TestExcavateYara): + rule_file = [ + 'rule SearchForText { meta: description = "Contains the text AAAABBBBCCCC" strings: $text = "AAAABBBBCCCC" condition: $text }', + 'rule SearchForText2 { meta: description = "Contains the text DDDDEEEEFFFF" strings: $text2 = "DDDDEEEEFFFF" condition: $text2 }', + ] + f = tempwordlist(rule_file) + config_overrides = {"modules": {"excavate": {"custom_yara_rules": f}}} + + +class TestExcavateSpiderDedupe(ModuleTestBase): + class DummyModule(BaseModule): + watched_events = ["URL_UNVERIFIED"] + _name = "dummy_module" + + events_seen = [] + + async def handle_event(self, event): + await self.helpers.sleep(0.5) + self.events_seen.append(event.data) + new_event = self.scan.make_event(event.data, "URL_UNVERIFIED", self.scan.root_event) + if new_event is not None: + await self.emit_event(new_event) + + dummy_text = "spider" + modules_overrides = ["excavate", "httpx"] + targets = ["http://127.0.0.1:8888/"] + + async def setup_after_prep(self, module_test): + self.dummy_module = self.DummyModule(module_test.scan) + module_test.scan.modules["dummy_module"] = self.dummy_module + module_test.httpserver.expect_request("/").respond_with_data(self.dummy_text) + module_test.httpserver.expect_request("/spider").respond_with_data("hi") + + def check(self, module_test, events): + found_url_unverified_spider_max = False + found_url_unverified_dummy = False + found_url_event = False + + assert sorted(self.dummy_module.events_seen) == ["http://127.0.0.1:8888/", "http://127.0.0.1:8888/spider"] + + for e in events: + if e.type == "URL_UNVERIFIED": + if e.data == "http://127.0.0.1:8888/spider": + if str(e.module) == "excavate" and "spider-danger" in e.tags and "spider-max" in e.tags: + found_url_unverified_spider_max = True + if ( + str(e.module) == "dummy_module" + and "spider-danger" not in e.tags + and "spider-max" not in e.tags + ): + found_url_unverified_dummy = True + if e.type == "URL" and e.data == "http://127.0.0.1:8888/spider": + found_url_event = True + + assert found_url_unverified_spider_max, "Excavate failed to find /spider link" + assert found_url_unverified_dummy, "Dummy module did not correctly re-emit" + assert found_url_event, "URL was not emitted from non-spider-max URL_UNVERIFIED" + + +class TestExcavateParameterExtraction_targeturl(ModuleTestBase): + targets = ["http://127.0.0.1:8888/?foo=1"] + modules_overrides = ["httpx", "excavate", "hunt"] + config_overrides = { + "url_querystring_remove": False, + "url_querystring_collapse": False, + "interactsh_disable": True, + "modules": { + "excavate": { + "retain_querystring": True, + } + }, + } + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/", "query_string": "foo=1"} + respond_args = { + "response_data": "alive", + "status": 200, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + web_parameter_emit = False + for e in events: + if e.type == "WEB_PARAMETER" and "HTTP Extracted Parameter [foo] (Target URL)" in e.data["description"]: + web_parameter_emit = True + + assert web_parameter_emit + + +class TestExcavate_retain_querystring(ModuleTestBase): + targets = ["http://127.0.0.1:8888/?foo=1"] + modules_overrides = ["httpx", "excavate", "hunt"] + config_overrides = { + "url_querystring_remove": False, + "url_querystring_collapse": False, + "interactsh_disable": True, + "web_spider_depth": 4, + "web_spider_distance": 4, + "modules": { + "excavate": { + "retain_querystring": True, + } + }, + } + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/", "query_string": "foo=1"} + respond_args = { + "response_data": "alive", + "headers": {"Set-Cookie": "a=b"}, + "status": 200, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + web_parameter_emit = False + for e in events: + if e.type == "WEB_PARAMETER" and "foo" in e.data["url"]: + web_parameter_emit = True + + assert web_parameter_emit + + +class TestExcavate_retain_querystring_not(TestExcavate_retain_querystring): + config_overrides = { + "url_querystring_remove": False, + "url_querystring_collapse": False, + "interactsh_disable": True, + "web_spider_depth": 4, + "web_spider_distance": 4, + "modules": { + "excavate": { + "retain_querystring": True, + } + }, + } + + def check(self, module_test, events): + web_parameter_emit = False + for e in events: + if e.type == "WEB_PARAMETER" and "foo" not in e.data["url"]: + web_parameter_emit = True + + assert web_parameter_emit + + +class TestExcavate_webparameter_outofscope(ModuleTestBase): + html_body = "" + + targets = ["http://127.0.0.1:8888", "socialmediasite.com"] + modules_overrides = ["httpx", "excavate", "hunt"] + config_overrides = {"interactsh_disable": True} + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = { + "response_data": self.html_body, + "status": 200, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + web_parameter_differentsite = False + web_parameter_outofscope = False + + for e in events: + if e.type == "WEB_PARAMETER" and "in-scope" in e.tags and e.host == "socialmediasite.com": + web_parameter_differentsite = True + + if e.type == "WEB_PARAMETER" and e.host == "outofscope.com": + web_parameter_outofscope = True + + assert web_parameter_differentsite, "WEB_PARAMETER was not emitted" + assert not web_parameter_outofscope, "Out of scope domain was emitted" + + +class TestExcavateHeaders(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["excavate", "httpx", "hunt"] + config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "

test

", + status=200, + headers={ + "Set-Cookie": [ + "COOKIE1=aaaa; Secure; HttpOnly", + "COOKIE2=bbbb; Secure; HttpOnly; SameSite=None", + ] + }, + ) + + def check(self, module_test, events): + found_first_cookie = False + found_second_cookie = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["name"] == "COOKIE1": + found_first_cookie = True + if e.data["name"] == "COOKIE2": + found_second_cookie = True + + assert found_first_cookie is True + assert found_second_cookie is True + + +class TestExcavateRAWTEXT(ModuleTestBase): + targets = ["http://127.0.0.1:8888/", "test.notreal"] + modules_overrides = ["excavate", "httpx", "filedownload", "extractous"] + config_overrides = {"scope": {"report_distance": 1}, "web": {"spider_distance": 2, "spider_depth": 2}} + + pdf_data = r"""%PDF-1.3 +%���� ReportLab Generated PDF document http://www.reportlab.com +1 0 obj +<< +/F1 2 0 R +>> +endobj +2 0 obj +<< +/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font +>> +endobj +3 0 obj +<< +/Contents 7 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 6 0 R /Resources << +/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] +>> /Rotate 0 /Trans << + +>> + /Type /Page +>> +endobj +4 0 obj +<< +/PageMode /UseNone /Pages 6 0 R /Type /Catalog +>> +endobj +5 0 obj +<< +/Author (anonymous) /CreationDate (D:20240807182842+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240807182842+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) + /Subject (unspecified) /Title (untitled) /Trapped /False +>> +endobj +6 0 obj +<< +/Count 1 /Kids [ 3 0 R ] /Type /Pages +>> +endobj +7 0 obj +<< +/Filter [ /ASCII85Decode /FlateDecode ] /Length 742 +>> +stream +Gas2F;0/Hc'SYHA/+V9II1V!>b>-epMEjN4$Udfu3WXha!?H`crq_UNGP5IS$'WT'SF]Hm/eEhd_JY>@!1knV$j`L/E!kN:0EQJ+FF:uKph>GV#ju48hu\;DS#c\h,:/udaV^[@;X>;"'ep>>)(B?I-n?2pLTEZKb$BFgKRF(b#Pc?SYeqN_Q<+X%64E)"g-fPCbq][OcNlQLW_hs%Z%g83]3b]0V$sluS:l]fd*^-UdD=#bCpInTen.cfe189iIh6\.p.U0GF:oK9b'->\lOqObp&ppaGMoCcp"4SVDq!<>6ZV]FD>,rrdc't<[N2!Ai12-2bU`S*gNOt?NS4WgtN@KuL)HOb>`9L>S$_ert"UNW*,("+*>]m)4`k"8SUOCpM7`cEe!(7?`JV*GMajff(^atd&EX#qdMBmI'Q(YYb&m.O>0MYJ4XfJH@("`jPF^W5.*84$HY?2JY[WU48,IqkD_]b:_615)BA3RM*]q4>2Gf_1aMGFGu.Zt]!p5h;`XYO/FCmQ4/3ZX09kH$X+QI/JJh`lb\dBu:d$%Ld1=H=-UbKXP_&26H00T.?":f@40#m]NM5JYq@VFSk+#OR+sc4eX`Oq]N([T/;kQ>>WZOJNWnM"#msq:#?Km~>endstream +endobj +xref +0 8 +0000000000 65535 f +0000000073 00000 n +0000000104 00000 n +0000000211 00000 n +0000000414 00000 n +0000000482 00000 n +0000000778 00000 n +0000000837 00000 n +trailer +<< +/ID +[<3c7340500fa2fe72523c5e6f07511599><3c7340500fa2fe72523c5e6f07511599>] +% ReportLab generated PDF document -- digest (http://www.reportlab.com) + +/Info 5 0 R +/Root 4 0 R +/Size 8 +>> +startxref +1669 +%%EOF""" + extractous_response = """This is an email example@blacklanternsecurity.notreal + +An example JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c + +A serialized DOTNET object AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA== + +A full url https://www.test.notreal/about + +A href
Click me""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + {"uri": "/"}, + {"response_data": ''}, + ) + module_test.set_expect_requests( + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 1 == len(filesystem_events), filesystem_events + filesystem_event = filesystem_events[0] + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file doesn't exist" + assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" + raw_text_events = [e for e in events if e.type == "RAW_TEXT"] + assert 1 == len(raw_text_events), "Failed to emit RAW_TEXT event" + assert raw_text_events[0].data == self.extractous_response, ( + f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" + ) + email_events = [e for e in events if e.type == "EMAIL_ADDRESS"] + assert 1 == len(email_events), "Failed to emit EMAIL_ADDRESS event" + assert email_events[0].data == "example@blacklanternsecurity.notreal", ( + f"Email extracted from extractous text is incorrect, got {email_events[0].data}" + ) + finding_events = [e for e in events if e.type == "FINDING"] + assert 2 == len(finding_events), "Failed to emit FINDING events" + assert any( + e.type == "FINDING" + and "JWT" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events + ), f"Failed to emit JWT event got {finding_events}" + assert any( + e.type == "FINDING" + and "DOTNET" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events + ), f"Failed to emit serialized event got {finding_events}" + assert finding_events[0].data["path"] == str(file), "File path not included in finding event" + url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] + assert "https://www.test.notreal/about" in url_events, ( + f"URL extracted from extractous text is incorrect, got {url_events}" + ) + assert "/donot_detect.js" not in url_events, ( + f"URL extracted from extractous text is incorrect, got {url_events}" + ) + + +class TestExcavateBadURLs(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["excavate", "httpx", "hunt"] + config_overrides = {"interactsh_disable": True, "scope": {"report_distance": 10}} + + bad_url_data = """ +Help +Help +""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests({"uri": "/"}, {"response_data": self.bad_url_data}) + + def check(self, module_test, events): + log_file = module_test.scan.home / "debug.log" + log_text = log_file.read_text() + # make sure our logging is working + assert "Setting scan status to STARTING" in log_text + # make sure we don't have any URL validation errors + assert "Error Parsing reconstructed URL" not in log_text + assert "Error sanitizing event data" not in log_text + + url_events = [e for e in events if e.type == "URL_UNVERIFIED"] + assert sorted([e.data for e in url_events]) == sorted(["https://ssl/", "http://127.0.0.1:8888/"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_extractous.py b/bbot/test/test_step_2/module_tests/test_module_extractous.py new file mode 100644 index 0000000000..c32173163d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_extractous.py @@ -0,0 +1,54 @@ +import base64 +from pathlib import Path +from .base import ModuleTestBase + + +class TestExtractous(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["extractous", "filedownload", "httpx", "excavate", "speculate"] + config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} + + pdf_data = base64.b64decode( + "JVBERi0xLjMKJe+/ve+/ve+/ve+/vSBSZXBvcnRMYWIgR2VuZXJhdGVkIFBERiBkb2N1bWVudCBodHRwOi8vd3d3LnJlcG9ydGxhYi5jb20KMSAwIG9iago8PAovRjEgMiAwIFIKPj4KZW5kb2JqCjIgMCBvYmoKPDwKL0Jhc2VGb250IC9IZWx2ZXRpY2EgL0VuY29kaW5nIC9XaW5BbnNpRW5jb2RpbmcgL05hbWUgL0YxIC9TdWJ0eXBlIC9UeXBlMSAvVHlwZSAvRm9udAo+PgplbmRvYmoKMyAwIG9iago8PAovQ29udGVudHMgNyAwIFIgL01lZGlhQm94IFsgMCAwIDU5NS4yNzU2IDg0MS44ODk4IF0gL1BhcmVudCA2IDAgUiAvUmVzb3VyY2VzIDw8Ci9Gb250IDEgMCBSIC9Qcm9jU2V0IFsgL1BERiAvVGV4dCAvSW1hZ2VCIC9JbWFnZUMgL0ltYWdlSSBdCj4+IC9Sb3RhdGUgMCAvVHJhbnMgPDwKCj4+IAogIC9UeXBlIC9QYWdlCj4+CmVuZG9iago0IDAgb2JqCjw8Ci9QYWdlTW9kZSAvVXNlTm9uZSAvUGFnZXMgNiAwIFIgL1R5cGUgL0NhdGFsb2cKPj4KZW5kb2JqCjUgMCBvYmoKPDwKL0F1dGhvciAoYW5vbnltb3VzKSAvQ3JlYXRpb25EYXRlIChEOjIwMjQwNjAzMTg1ODE2KzAwJzAwJykgL0NyZWF0b3IgKFJlcG9ydExhYiBQREYgTGlicmFyeSAtIHd3dy5yZXBvcnRsYWIuY29tKSAvS2V5d29yZHMgKCkgL01vZERhdGUgKEQ6MjAyNDA2MDMxODU4MTYrMDAnMDAnKSAvUHJvZHVjZXIgKFJlcG9ydExhYiBQREYgTGlicmFyeSAtIHd3dy5yZXBvcnRsYWIuY29tKSAKICAvU3ViamVjdCAodW5zcGVjaWZpZWQpIC9UaXRsZSAodW50aXRsZWQpIC9UcmFwcGVkIC9GYWxzZQo+PgplbmRvYmoKNiAwIG9iago8PAovQ291bnQgMSAvS2lkcyBbIDMgMCBSIF0gL1R5cGUgL1BhZ2VzCj4+CmVuZG9iago3IDAgb2JqCjw8Ci9GaWx0ZXIgWyAvQVNDSUk4NURlY29kZSAvRmxhdGVEZWNvZGUgXSAvTGVuZ3RoIDEwNwo+PgpzdHJlYW0KR2FwUWgwRT1GLDBVXEgzVFxwTllUXlFLaz90Yz5JUCw7VyNVMV4yM2loUEVNXz9DVzRLSVNpOTBNakdeMixGUyM8UkM1K2MsbilaOyRiSyRiIjVJWzwhXlREI2dpXSY9NVgsWzVAWUBWfj5lbmRzdHJlYW0KZW5kb2JqCnhyZWYKMCA4CjAwMDAwMDAwMDAgNjU1MzUgZiAKMDAwMDAwMDA3MyAwMDAwMCBuIAowMDAwMDAwMTA0IDAwMDAwIG4gCjAwMDAwMDAyMTEgMDAwMDAgbiAKMDAwMDAwMDQxNCAwMDAwMCBuIAowMDAwMDAwNDgyIDAwMDAwIG4gCjAwMDAwMDA3NzggMDAwMDAgbiAKMDAwMDAwMDgzNyAwMDAwMCBuIAp0cmFpbGVyCjw8Ci9JRCAKWzw4MGQ5ZjViOTY0ZmM5OTI4NDUwMWRlYjdhNmE2MzdmNz48ODBkOWY1Yjk2NGZjOTkyODQ1MDFkZWI3YTZhNjM3Zjc+XQolIFJlcG9ydExhYiBnZW5lcmF0ZWQgUERGIGRvY3VtZW50IC0tIGRpZ2VzdCAoaHR0cDovL3d3dy5yZXBvcnRsYWIuY29tKQoKL0luZm8gNSAwIFIKL1Jvb3QgNCAwIFIKL1NpemUgOAo+PgpzdGFydHhyZWYKMTAzNAolJUVPRg==" + ) + + docx_data = base64.b64decode( + "UEsDBBQAAAAIAK+YSUqNEzDqOgEAAKcCAAAQAAAAZG9jUHJvcHMvYXBwLnhtbK2SzU4CMRSFX6Xp3ungghjCDDGwcKHGBMR1be8wjf1Le0Hm2Vz4SL6C7SAM6s7YXc/5eu5P+vH2Pp3tjSY7CFE5W9FRUVICVjip7KaiW2wuriiJyK3k2lmoaAeRzuop95OH4DwEVBBJyrCxoi2inzAWRQuGxyLZNjmNC4ZjuoYNc02jBCyc2BqwyC7Lcsxgj2AlyAt/CqSHxMkO/xoqncj9xfWq80Me9//ZZL+Fa++1EhzT+uo7JYKLrkHy5IIkKZNgC+QVnqfsB5qfpgpLENugsKvLnjhXMrEUXMM8VawbriP0zKBlYu6M57Yj7MC3PIBMKef8ScvETVpH0Mq+xHnL7QbkGfnb+xpwffge9WhclOkchznKmVqB8Zoj1Pd5kbqQDk3PnYxM3ebwR79yi6wMlb/rvTT8rvoTUEsDBBQAAAAIAAVuZllQ34JjWAEAAIwCAAARAAAAZG9jUHJvcHMvY29yZS54bWyNUt1OgzAUfhXSeyiFDWcDLFHjhXGJiUs03tVytuGAkvZMtmfzwkfyFey6wZzxQq4O5/tpvw++Pj7T6bauvHfQplRNRlgQEg8aqYqyWWZkgwt/QqZ5KpWGB61a0FiC8aymMbyQGVkhtpzSdqOrQOklLSSFCmpo0FAWMEoGLoKuzZ8ChwzMrSkHVtd1QRc7XhSGjD7P7h/lCmrhl41B0Ug4qgaFcbAJ7FUbiyyUrgUa59AKuRZL2DsltAYUhUBB98n8dohG8rSQHEusIE/t5frRTmbz+gYSD+vhZQ27TunC2PVptIQCjNRli7bWg+JscayDSw0CofBsaI67FjLSI0/x9c38luRRGI18xvwwmUeMjyac2VrjywsWxi973zOfk3Ftv+Ci/LdzxFnCx+Pg0j7jMPnh3Bu5UO4YpfM7BZU3U7Y6F61fp5UwODsKrnZntF9Q6oo//VL5N1BLAwQUAAAACAAFbmZZnlZ1fjgCAAACCQAAEgAAAHdvcmQvZm9udFRhYmxlLnhtbOWV0W7aMBRAf8Xye4mTAKWoaUWhSJOmPUz7AeM4xFpsR76GwLftYZ+0X9hNSAAVoTaVxsuClIR7fY/t44v48+v34/NOF2QrHShrEhoOGCXSCJsqs07oxmd3E0rAc5PywhqZ0L0E+vz0WE0zazwQrDYw1SKhufflNAhA5FJzGNhSGkxm1mnu8atbB5q7n5vyTlhdcq9WqlB+H0SMjWmLcR+h2CxTQi6s2GhpfFMfOFkg0RrIVQkdrfoIrbIuLZ0VEgB3rIsDT3NljphweAHSSjgLNvMD3Ey7ogaF5SFr3nRxAoz6AaK3gPJzSzjta+F4hY/TisYg+xFH7ZoC2Gu5OwMJlfYjjTsSVp5x+kEmF47HQu4+xwiw8txM6tO8FynqTjyoa7nnOYecEi2mX9bGOr4qUDa2EcFOIPVhkuYA6js6qB/Nq9yRbnra/cBINTVcY/mcF2rlVJMoubEgQ8xteZFQnH/JRnivP0MW13dKgnqkyLkD6Y8jWRvPuFbFvgtDpQDaTKm8yLvEljtVr77NgVpjZgMrltBXhle0XNJDJEzoEAOz+TES1dM1V9hG4mMEl4FraziHEQ/LNhKej8FJg4OGCx0/lJZAvsmKfLeamytaIjZGHSOUUuuJe2pxDbm/lmh2rmWOkfvJML7Q8vC+lmVfLW2XkK9qnfurvRLfuFdmTa+8vumViN2/XEhh/6BXZqW3QBYKyoLvr0h5Qciw1RLdRAr+z6CDyf1JSruX+HZS/lsZ7Qs8/QVQSwMEFAAAAAgABW5mWcFNiUYeBAAADgwAABEAAAB3b3JkL3NldHRpbmdzLnhtbLVWXW7bOBC+iqDndWxJtpMITQv/xNsW8XYRZw9AiSObCH8EkrLjFnuyfdgj7RV2KImRnRhBkqIvNjnfzDcz5HBG//3z74dPD4IHW9CGKXkVRmeDMACZK8rk+iqsbNG7CANjiaSEKwlX4R5M+Onjh11qwFpUMgESSJOK/CrcWFum/b7JNyCIOVMlSAQLpQWxuNXrviD6vip7uRIlsSxjnNl9Px4MxmFLo9CplmlL0RMs18qowjqTVBUFy6H98xb6NX4bk7nKKwHS1h77GjjGoKTZsNJ4NvFeNgQ3nmT7UhJbwb3eLhq8It2d0vTR4jXhOYNSqxyMwQsS3AfIZOd4+Izo0fcZ+m5TrKnQPBrUq8PIR28jiJ8QGP6aTBrohmWa6P2JNMr35dEdzlyTHf51aY0NvI1x1CbWN3sBDwdEOaNvYxp7JrQ84HkbycWzixrn8PA+jj5aHp4MtXTzJqbYl03f2RJLNsTgIxF5+mUtlSYZx8PGWgywnAJ3mUF9Ae4Xz8D91Ut4CLz70HWe70qJYJeWoHN8fti0Bti0+g6xmuT3t7BlrpsZ1NkSrLOCcAOtBoWCVNzekWxlVek1zmPPkG8IcljQq5LkWBkzJa1W3CtS9YeyM+xeGuvHm9TNrFutmsaIJpIITPCo2S0VxVB2aaXZ648y9O6j0ZHPp54U9nHNKGB2HFZ2z2GB4a/Yd5hI+rUyliFl3fN+IoQXIwDpXH/Dl3y3L2EBxFZ4Ur/KW30bC87KJdNa6S+SYj38Om+sKECjB0YsLLGImFa7+qg/A6E4QX/Wcf+wlnAgU+MXt0pZr5sk0/PRYHzRxupgDw0W15NJMr8+Ab1gNVwk04tZkpyAxuPr6XQ6n52Azi/iZLaYDH3kbbwidYPwT+1XrgAD0ZjMiMg0I8GyHpVoJtJM30+Z9AoZYPOHI2hVZR7t9VrECML5Ap+pR5rHK1LKTDmHotnwJdHrjtvr6NNi7AtfH/lcWwH9u1ZV2cI7TcqmvLxONBx6WybtDRMeMFW2erSTOLcOsErSb1vdHFl3Uti3sFDqt3pD6oKrlUH2/lr5iuR65aoJlqQsm6LM1tFVyNl6Y7F6kAJ3FL+u6k22jlssrrG4weoNyV16qN0uOhlqtYtOlnhZ0slw5raLToafA+2ik+HoahdOtsGGoDmT9/g+/NLJC8W52gH93OHPRO0pmA0pYd50cKw11Qjalm6CbQoPOA6AMovfrCWjguDkiwbxuLZv1TnZq8oeKTvMaZfHFG5iPb7NI+u64p9E42ZLzrA0V3uRdRPjrI2dM4MdpcTpYpX24G8NGA1TqvIvbuINTz3XaFTPJXvnxhve/i0UU2KAetAbjxrjH5NoMru8XiS9y+Q87g3Pk6h3OR7Pe1izl4vF5SCeRbO//cP13/Ef/wdQSwMEFAAAAAgABW5mWX+NfPRJDgAA56IAAA8AAAB3b3JkL3N0eWxlcy54bWztXdt227gV/RUuPbUPHlm8Scoaz6zESZpMc/HEns4zREIWY4pUeYnt/lof+kn9hQIgSFEiIZPEtuzMdGWtWLxgAzz77IMrif/++z8//ny3Do1vNEmDODobTX44HRk08mI/iK7PRnm2PJmNjDQjkU/COKJno3uajn7+6cfbF2l2H9LUYMmj9MXtZmKfjVZZtnkxHqfeiq5J+sM68JI4jZfZD168HsfLZeDR8W2c+GPzdHIqfm2S2KNpyjJ7nZBb9mckAddeAy7e0IhdXMbJmmTsMLker0lyk29OGPyGZMEiCIPsnoGfuiVM0gWlKNnr2MvXNMpE+nFCQ4YYR+kq2KQl2m0XtN3HWocF3poEUQUzzFbrcAvg9AMwGwBuSvtBOBJinN6v6V0NyAv8fkhuicRS1nD6gcyaT+TRu2EYY5aybhk/81e9kMySoDFPSzKyIulqZKy9F++vozghi5AZm7FuMOIMLhVDEMD/Zzbgf8RPemeU2Y+4wvzYe02XJA+zlB8mF4k8lEfiz9s4ylLj9gVJvSC4YmVlWa0Dluu7l1EajNiVFf/ReoWSNHuZBqR+8Y08x697aVa78irwWaqx0P6/2NVvJDwbmXZ16jxtnAxJdF2epNHJb5f1XM9GX8nJLxf81IJBn41IcnL5UqQcy+cb7z/1Zv9IZL0hHpMaN8Iyo0zyE5dFMZZ7wAOWOZ2XB19yTgTJs7jMRSCMd3HHDcuzUMACw2UR8NhVuvwQezfUv8zYhbORyIyd/O39RRLECYtBZ6P5XJ68pOvgXeD7NKrdGK0Cn/6+otFvKfW35399K+KIPOHFecR+W9OJ8IYw9d/ceXTDoxK7GhFOzCeegIng9kUebDMXyf9Zgk1KMtoAVpTwUG9M9jHm/THMVoy0ZoAil72nn/TPyTpaTsyTj5STc7ScWO14pJymR8uJtVKOlNP80XMKIp9VBZOusA8BCVkigITqEEBCVAggoRkEkJAEAkh4PAJIODQCaK4PlMVes4KwQMCNWgMF3KgkUMCNOgEF3KgCUMCNiI8CbgR4FHAjnqOA548BXDTDjPdMcFGmD7eM4yyKM2pk9A4ARyIGJnqzIEBeFdIE85wInCLQyQpaH84j4rjhKA66os94z9CIl8YyuM4TNrCiXXQafaMhG5QwiO8zQCRiQrM8iYDOndAlTdhYE4V6OBCVdxmNKF8vED66Idc4MBr5aBOWkJgIUXk262yvuH4ChHevCRuDQVQDBBcsPgQpwF4cxXiVhyFFgX0CuZoAA3QhBA6gByFwAB0IgeNAmYOZScKhrCXhUEaTcA7UUWG2k3Ao20k4lO0kHMB2V0EW0v0myqTHyN95GPMJCv2SXAbXEWFtA0AlJAddjQuSkOuEbFYGH95uPKV+Rq9i/964glR1FRSs+S885Zw9eBDlAKPuwMF0VgGilFYBorRWAQLU9pG1pXkD7h2o53OZL7JWAffoPVySMC8avQDhsYkMpBTeBkmKE0Q7LsKVP/EmLycVEgm35QQUbQtm4YMUtoASE1HOkE2sgQLzu/sNTVgf7kYf6m0chvEt9YGQl1kSFz5X179pdtf/m/WGzTMHaQOjRyOgXPRgfCQb/We6CNkqBxB7b07YkonQADYu3l19/GBcxRveLeXGASG+irMsXuNA5VjiX36ni7+CiviSdZuje9QDv0QNLQm08wBR8xRQsY+CYg3RIAowdasA/Du9X8Qk8UFwF2zkR+g7oyjIS7LehDCZsUB5y8IRoq0kAP9BkoCPKcH0dYVBq408pvniK/UAoe9TbGBGlT7nmRjDFM1hQK9pBw/QgtjBA7QeBKesyuCOjHjeHTzA8+7gwZ73PCRsqaGcoUUCwp64BIQ/sg0DjMM4WeYh0IglIs6KJSLOjHGYr6MU+tACEPnMAhD+yEjPEYAOCvBvCVsSCmNEoMHoEGgwLgQajAiBhmUBsCqohgZYGlRDm6HQUI2DGhrM37ANA9TUUQ0N5m8CDeZvAg3mbwIN5m/Wa4Mul6yhDKx3apgw36thAmufKKPrTZyQ5B6F+Sak1wQxylrAXSTxkr+6EkfFunJIi5eNdkNb5AUejGo21IIrHAeDlgwxrErY+GWMGprb1kJta+keSideNoGMNXp0FYdsOkb1WAd72JfFSyP7TzDpPnb6IbheZcblqpo8qOPwN1AeSlp28nfSdciyzfKueXj6yg/ydVnW5lpe1+qRurFg17U7pN42M3aSOl2TNnN1OyTdNqZ3kk67Jm3mOuuatLH82D0ojtckuWn1iOlBT6o6hQo/nE46pW7N2OyUtM0bp1Zn4bDBaY9PQEyGKkgN0FFKaoBemlLD9BKXGqa7ytQYB+X2hX4LeMXfK5SKHKv1Go0Kwe4eT3/N2WTsPoDZ4z2096xxFaXUaAWyesyK7cQdtTG7ByA1RvdIpMboHpLUGN1ikzJ9vyClhukerdQY3cOWGqN//DJ145epG79MTPwyMfFLp5WgxujeXFBj9JetCZCtTktCjdFPtiZGtiZAtiZAtiZAtpaubC1d2VoY2VoY2VoA2VoA2VoA2VoA2VoA2Q7tCSjTD5OtBZCtBZCtBZCtrStbW1e2Nka2Nka2NkC2NkC2NkC2NkC2NkC2tqZsbYxsbYBsbYBsbYBsHV3ZOrqydTCydTCydQCydQCydQCydQCydQCydTRl62Bk6wBk6wBk6wBk6+rK1tWVrYuRrYuRrQuQrQuQrQuQrQuQrQuQraspWxcjWxcgWxcg2ybGQU+VM6KqVwImA0ZRla8X9Jgik8X6Un9NfWdQdtK/XGqwHu9OvIrjG6P1FUrL6oESLMIgFgPf9w0cxPKLz+f1l5OGfbWk68PIlzfEHG1jQNTunLQxKGObXZM2Ooa21TVpo3Fq212TNipI+2AgFiIt18WwaqqR+rRj6okivdsxfdPQ044pm3aedUzZNPO8Y0rH4BF7P7nT1Vhutfq1ATHpCDFVQ5j9KFNOG3TnTg3RmUQ1RGc21RD9aFXiDOBXjdWfaDXWQMZNfcY1ZKuG6M24CWLcBDJuAhk3UYxb+oxb+oxrRGw1xDDGLSDjFpBxC8W4rc+4rc+4rc+4bmWtxNFg3AYybqMYd/QZd/QZd/QZd0CMO0DGHSDjDopxV59xV59xV59xF8S4C2TcBTLu9mNcjMJodK9q6Xu202ope1bWtZQ9I3Yt5ZDuVS350O5VDWJo96pJ2cDuVZ27gd2rOokDu1d1Ngd2rxq0DuxetfI7sHvVSvTA7pWacVOfcQ3ZDuxetTFughg3gYybQMZNFOOWPuOWPuMaEXtg90rJuAVk3AIybqEYt/UZt/UZt/UZ162sB3avDjJuAxm3UYw7+ow7+ow7+ow7IMYdIOMOkHEHxbirz7irz7irz7gLYtwFMu4CGVd1r8a7e15V2/2xu7P7DUPd1F/4EZfe+/XdqPziQ658ypEn5kUp9wErbxJFllOTMk8B1MzMW7HcvPJbUmVm8lux1atH5ZdiD2St+rysKMrWDOXtpV23s6zyzp1J1sNlFx9C3ym34KKHpcovVSkKyTca61hKVqZFWGyZxn68j3yGcSt3CytK698RicZuOKdh+JEUt8ebA/eGdJkVlyens7YbFsUn8tQIiYgeaojxboGKQ7l1m8Lwxaf2y896bj20fNPxoN3l+5D6Ju/r1HK2f3LOrvILXp4yywkVtpVU3s7ib5GgsDZhmX+O9px+TycFcUF0swc1UT91ydWhjQfJV9XGgztX9jce5BfbNx7kV2obD3o8gJUlOn1rT/lCMmZSfrcIbmcjIkLb9jRf4MPXarxt7F1YTdXX9y6UJ2s7ECoIbA+BlRn3qapttNfG0k5UjPhHVdsutBFWY15NWi3OVtsm3lC6+cRzGpdHH4KIptIk1Z6KC/6lQfa8VrGpotxicVbaLi6+4fbhW1jRUhpQ5vN/h+mgeLOn4k2Y4s0/keL5ErGG4uVJTcWbSsWbYMVLV3mAtLbqHxAFJl2jwOQPGwX0nOhgFLB6RgELFgWs3lHgeZBhztr2H54hFG0pFW2BFW19F4o2H1D0H8EhDqrT7qlOG6ZO+3jqDOQfZjwMOZoqtJUqtMEqtJ9ShbO6CG21CK3HEuET8H5QbE5PsTkwsTnfUVWoKS5HKS4HLC7nexCX/WxrOE0xuT3F5MLE5D6TmsuZ83/7RuebXW5NfhVEbDzwpYtQlqtUlgtWlvuUyrLrylILy3mSWusROD+osmlPlU1hKps+UZV1bFVNlaqaglU1/Q5U5R6lujq2imY9VTSDqWj2TOoqc8r/dbH4a8hAx0ypqhlYVbOnUNWDOpo+Se30CCwf1NW8p67mMF3Nn6h2OraO5kodzcE6mj9LHc2OUh8dTTfi8wAdRSPu1ReM/CKBgle+u/P4yedStg4hCnVSleqGJlHLKGzVjnBbBmblyaHCK+zVSgZKcTUveIiWNm015FOMRXDlMCvZ1cGXnDsWybO48vmI+3ROQvml+sPa+hO4QLtKy52UOwq1vF1fq9stnFV+US71eCbt8gZvE+co02mVoVRcoKS66woPsdKmVrZorPgRhC0z2fLqs+twPT6x7dKTX+SpPhS0z2/zS0I95dZkz2xlT/LwbKYwxAfoOwYkca9+NJLfvFeZba+hfdBS9in/18X/NCchijK3GgQVEmpMPGSaw7X3zmy5uOWrV0JwF+Ie0FZBH9vSB5Xaxy93NlPQ9896CZRc8D0anljQ7Z66U/qDlkI5bpOxh2zW5r+bV35t/ba4P2X+XKxIF9YcYEdeh4i5skIf3HtO9xd6P3ZW4+2zPbRSVRwVfiRWvPPV6qwZzj/ZKBeeyyOUqo9Z00g32X4TT+Wcta/maVfC1QK4tkp4IfFL06TMv8NzskFZqtHYqeaW9gxY/kp/+h9QSwMEFAAAAAgAAAAhAFtt/ZMDAQAA8QEAABQAAAB3b3JkL3dlYlNldHRpbmdzLnhtbJXRwUoDMRAG4LvgOyy5t9kWFVm6LYhUvIigPkCazrbBTCbMpK716R1rrUgv9ZZJMh8z/JPZO8bqDVgCpdaMhrWpIHlahrRqzcvzfHBtKikuLV2kBK3ZgpjZ9Pxs0jc9LJ6gFP0plSpJGvStWZeSG2vFrwGdDClD0seOGF3RklcWHb9u8sATZlfCIsRQtnZc11dmz/ApCnVd8HBLfoOQyq7fMkQVKck6ZPnR+lO0nniZmTyI6D4Yvz10IR2Y0cURhMEzCXVlqMvsJ9pR2j6qdyeMv8Dl/4DxAUDf3K8SsVtEjUAnqRQzU82AcgkYPmBOfMPUC7D9unYxUv/4cKeF/RPU9BNQSwMEFAAAAAgAM2tQVmndDRX5BQAASxsAABUAAAB3b3JkL3RoZW1lL3RoZW1lMS54bWztWV2v0zYY/itW7ks+mqQJoqB+wgYHEOeMiUs3cRNznDiK3XNOhZAmuJw0aRqbdjGk3e1i2oYE0m7Yrzkb08Yk/sIcN22T1oGxlQkkWukcfzzP68fva7920nMXThICjlDOME27mnnG0ABKAxriNOpqMz5teRpgHKYhJDRFXW2OmHbh/Dl4lscoQUCwUybKiel0tZjz7Kyus0B0QXYmwUFOGZ3yMwFNdDqd4gDpkpYQ3TJMS08gTrXSBtzi0wylom9K8wRyUc0jPczhsVAm+YZb8lOYCGHXpH1wUNjXVgJHRPxJOSsaApLvF6ZRjSGx4aFZ/GNzNiA5OIKkq4lxQnp8gE64BghkXHR0NUN+NKCfP6evWIQ3kCvEsfwsiSUjPLQkMY8mK6YxsjzbXI8gEYRvA0de8V1blAgYBGK25hbYdFzDs5bgCmpRVFj3O2Z7g1AZob09gu/2LbtOkKhF0d6e6NgfDZ06QaIWRWeL0DOsvt+uEyRqUXS3CPao17FGdYJExQSnh9twt+N57hK+wkwpuaTE+65rdIZL/BqmV1bawkDKm9ZdAm/TfCwAMsqQ4xTweYamMBC4XsYpA0PMMgLnGshgSploNizTFIvQNqzVd+F3eBbBCr1sC9h2WyEJsCDHGe9qHwrDWgXz4ukPL54+Bqf3npze+/n0/v3Tez+paJdgGlVpz7/7/K+Hn4A/H3/7/MGXDQRWJfz246e//vJFA5JXkc++evT7k0fPvv7sj+8fqPC9HE6q+AOcIAauomNwgybF5BRDoEn+mpSDGOIqpZdGDKawIKngIx7X4FfnkEAVsI/qjryZi+ShRF6c3a6J3o/zGccq5OU4qSH3KCV9mqsndlkOV/HFLI0axs9nVeANCI+Uww82Qj2aZWL9Y6XRQYxqUq8TEX0YoRRxUPTRQ4RUvFsY1/y7tzxtwC0M+hCrHXOAJ1zNuoQTEaA5bAh9zUN7N0GfEuUAQ3RUh4ptAonSKCI1b16EMw4TtWqYkCr0CuSxUuj+PA9qjmdcBD1ChIJRiBhTkq7lxazXpMtQJDL1Ctgj86QOzTk+VEKvQEqr0CE9HMQwydS6cRpXwR+wQ7FiIbhOuVoHre+Zoi4CAtPmyN/EiL/mjv8IR7F6sRQ9s1y5RxCt79E5mUK0MK9vJPwEp6/I/v971hdJ9tk3D9+xfN/LsXqLbWb5RuBmbh/QPMTvRmofwll6HRXb531mf5/Z32f2l+zyN5HP1ylcr171pZ2k8d4/xYTs8zlBV5hM/kwcXuFYNMqKJK2eM7JYFJfj1YBRDmUZ5JR/jHm8H8NMjGPKISJW2o4YyCgTJ4jWaFyeP7Nkj4blw5y5eswVDMjXHYaz7hDnFV80u53KU/FqBFmLWFVDwX4dHdXh6jraKh2d9j/UIee3GyG+SohnvlSIXgmPuGsBcbQK39jlywUWQILCImClgWWcdx7zRpfW526ppujbu4t5TUd17dV1VBdlDEO01b7jqPuV2NYkWmolHe/NRF3fThgkrdfAsdiFbUeQA5h1tam4TYpikgmDrLiDQBKJ13sBL/39r9JNljM+hCxe4GRX6YMEc5QDghOx8mvRIOlanmmJLPE26/PFJn8L9emb0UbTKQp4Q8u6KvpKK8ru/4ouKnQmdO/H4TGYkFl+AwpvOR2z8GKIGV+5NMR5ZaGvXbmRw8qdWXtJuN6xkGQxLI+bWppf4GV5pacyESl1c1r1ejmbSTTeybH8atZGJm06W4pTtSGfvLl7QEVXu0GXo05/vvfKA+S/HxUVeV6DvHaDvMZzZZe3hsqA62XaeHzs/JzYXMN65Roqa1u/itDJbbEPhuJ6OyOclW8UTsRrIyFpwStTg2xeJpwTDmY57mp3DKdnDyxn0DI8Z9Sy27bR8pxeu9VznLY5ckxj2LfuCs/In4gWo4/FWy4y38lPR4qffgAWzrnjWmO/7ffdlt/ujVv2sO+1/IHbbw3dQWc4Hg4czx/f1cCRBNu99sB2R17LNQeDlu0ahXzPb3Vsy+rZnZ43snsCXKbHkzKflM5Y+vT831BLAwQUAAAACAAFbmZZ8IgaroYCAAA2CAAAEQAcAHdvcmQvZG9jdW1lbnQueG1sIKIYACigFAAAAAAAAAAAAAAAAAAAAAAAAAAAAKWVS27bMBBAr6Jq3YT6OLYrxAla59MsCgTNomuaoiQiEocgacvu1brokXqFDinLVhIgsOOFRA7JefOThv/+/L28Xjd1sOLaCJCzMD6PwoBLBrmQ5Sxc2uJsGgbGUpnTGiSfhRtuwuuryzbLgS0bLm2AAGmyVrFZWFmrMkIMq3hDzXkjmAYDhT1n0BAoCsE4aUHnJIniyM+UBsaNQWtzKlfUhFtc85YGikvcLEA31KKoS9JQ/bxUZ0hX1IqFqIXdIDsa9xjAGLTMtoiznUNOJesc2g69hj7Ebqdys82At0g0r9EHkKYSah/GR2m4WfWQ1XtBrJq6P9eqeHRaDW40bXHYAw9xP++Umrrz/H1iHB1QEYfYaRziwkubvScNFXJv+EOpGSQ3vjgOkLwGqPK04txrWKo9TZxGe5DPO5b7r49gbYs8DM2c5sxTRdXuD2zjsTnOofhi6xAxm4avByAm8uNIfWgENQec4yDTN9/OmPH1xxgENYeZyW1eHUVK+i+ZOF1qaUUNtpaGZQ+lBE0XNSYbf48Av/DAtZDAF8C9MQdu8FO+DnrzoWv/C8g3blReJ1NU0wfMdTKPJtPbcXpqT3LJ82DL19aDvyTj9DaaeOM6wMeI/OcsHN2l36bzNO3WH3VA3MRefed1DZ+DX6Dr/NMlcUvurV+pT6ZJOr/7Onqt/lIF38qtG87so0eo8uk3UrA7xUkywkuzzbAs8cW0m4MW2MpnoQJtNRU27Liq/EGdcQvYWeNRd1aLssKjvbgAawHvjV6ueTHYrTjNOd5Rk8SLBYAdiOXSehEFb49BbXDZKMqwyP6QX8e7+1674ma1kPxRWIbOp+NuG4Pt48RpV2ec9Pf91X9QSwMEFAAAAAgABW5mWXz5ghLhAAAAQQIAAAsAAABfcmVscy8ucmVsc52SS04DMQyGrxJ53/G0SAihpt100x1CvYCVeGYimocS98HZWHAkrkDoBiLxUpe2f3/6HOXt5XW5Pvu9OnIuLgYN864HxcFE68Ko4SDD7A7Wq+Uj70lqokwuFVVXQtEwiaR7xGIm9lS6mDjUyRCzJ6llHjGReaKRcdH3t5i/MqBlqt1z4v8Q4zA4w5toDp6DfANGPgsHy3aWct3P4riA2lEeWTTYaB5quyCl1FU0qK3VkLf2BhReqfTzkehZyJIQmpj5d6GPRGO0uN7o70dqE586p5gtVqdLu9GZX3Sw+Qird1BLAwQUAAAACAAFbmZZvn2nPeMAAAAmAwAAHAAAAHdvcmQvX3JlbHMvZG9jdW1lbnQueG1sLnJlbHO1kj1uwzAMha8icK9lpz8oiihZumRNfQFFpmwjtiSITNqcrUOP1CtUcIHWQjN08fgeyfe+gZ/vH+vt2ziIM0bqvVNQFSUIdMY3vWsVnNjePMJ2s97joDltUNcHEunEkYKOOTxJSabDUVPhA7o0sT6OmpOMrQzaHHWLclWWDzLOMyDPFPUl4H8SvbW9wWdvTiM6vhIsX/HwgsyJn0DUOrbICmZmkRJB7BoFcdfcgpCLkdAfDLrGsFqUgS8DzgkmnfVXS/ZzusXf+kl+m1UGcb8khPWOa30YZiA/VkZxN1HI7Ns3X1BLAwQUAAAACAAccmZZUlo+hkwBAAAaBQAAEwAAAFtDb250ZW50X1R5cGVzXS54bWy1lE1OwzAQha9ieVslblkghJp2AWyhEr2A60xSC8e27Onf2VhwJK7AJGkjhEqDaLuJlMy8972xMv58/xhPt5VhawhRO5vxUTrkDKxyubZlxldYJHd8OhnPdx4io1YbM75E9PdCRLWESsbUebBUKVyoJNJrKIWX6k2WIG6Gw1uhnEWwmGDtwSfjRyjkyiB72tLnFhvARM4e2saalXHpvdFKItXF2uY/KMmekJKy6YlL7eOAGjgTRxFN6VfCQfhCJxF0DmwmAz7LitrExoVc5E6tKpKmp32OJHVFoRV0+trNB6cgRjriyqRdpZLaDnqDRNwZiJeP0fr+gQ+IpLhGgr1zf4YNLF6vFuObeX+SgsBzuTBw+RyddX8KpEWE9jk6O0hjc5JJrbPgfKTNDv8Y/LC6tTqhkT0E1D2/Xock77MnhPpWyCE/BhfNTTf5AlBLAQIUABQAAAAIAK+YSUqNEzDqOgEAAKcCAAAQAAAAAAAAAAAAAAAAAAAAAABkb2NQcm9wcy9hcHAueG1sUEsBAhQAFAAAAAgABW5mWVDfgmNYAQAAjAIAABEAAAAAAAAAAAAAAAAAaAEAAGRvY1Byb3BzL2NvcmUueG1sUEsBAhQAFAAAAAgABW5mWZ5WdX44AgAAAgkAABIAAAAAAAAAAAAAAAAA7wIAAHdvcmQvZm9udFRhYmxlLnhtbFBLAQIUABQAAAAIAAVuZlnBTYlGHgQAAA4MAAARAAAAAAAAAAAAAAAAAFcFAAB3b3JkL3NldHRpbmdzLnhtbFBLAQIUABQAAAAIAAVuZll/jXz0SQ4AAOeiAAAPAAAAAAAAAAAAAAAAAKQJAAB3b3JkL3N0eWxlcy54bWxQSwECFAAUAAAACAAAACEAW239kwMBAADxAQAAFAAAAAAAAAAAAAAAAAAaGAAAd29yZC93ZWJTZXR0aW5ncy54bWxQSwECFAAUAAAACAAza1BWad0NFfkFAABLGwAAFQAAAAAAAAAAAAAAAABPGQAAd29yZC90aGVtZS90aGVtZTEueG1sUEsBAhQAFAAAAAgABW5mWfCIGq6GAgAANggAABEAAAAAAAAAAAAAAAAAex8AAHdvcmQvZG9jdW1lbnQueG1sUEsBAhQAFAAAAAgABW5mWXz5ghLhAAAAQQIAAAsAAAAAAAAAAAAAAAAATCIAAF9yZWxzLy5yZWxzUEsBAhQAFAAAAAgABW5mWb59pz3jAAAAJgMAABwAAAAAAAAAAAAAAAAAViMAAHdvcmQvX3JlbHMvZG9jdW1lbnQueG1sLnJlbHNQSwECFAAUAAAACAAccmZZUlo+hkwBAAAaBQAAEwAAAAAAAAAAAAAAAABzJAAAW0NvbnRlbnRfVHlwZXNdLnhtbFBLBQYAAAAACwALAMECAADwJQAAAAA=" + ) + + expected_result_pdf = "Hello, World!" + expected_result_docx = "Hello, World!!" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + {"uri": "/"}, + {"response_data": ''}, + ) + module_test.set_expect_requests( + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, + ) + module_test.set_expect_requests( + {"uri": "/Test_DOCX"}, + { + "response_data": self.docx_data, + "headers": {"Content-Type": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, + }, + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 2 == len(filesystem_events), filesystem_events + for filesystem_event in filesystem_events: + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file doesn't exist" + assert open(file, "rb").read() == self.pdf_data or open(file, "rb").read() == self.docx_data, ( + f"File at {file} does not contain the correct content" + ) + raw_text_events = [e for e in events if e.type == "RAW_TEXT"] + assert 2 == len(raw_text_events), "Failed to emit RAW_TEXT event" + for raw_text_event in raw_text_events: + assert raw_text_event.data in [ + self.expected_result_pdf, + self.expected_result_docx, + ], f"Text extracted from {raw_text_event.data['path']} is incorrect, got {raw_text_event.data}" diff --git a/bbot/test/test_step_2/module_tests/test_module_ffuf.py b/bbot/test/test_step_2/module_tests/test_module_ffuf.py new file mode 100644 index 0000000000..3df659e159 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ffuf.py @@ -0,0 +1,90 @@ +from .base import ModuleTestBase, tempwordlist + + +class TestFFUF(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + test_wordlist = ["11111111", "admin", "junkword1", "zzzjunkword2"] + config_overrides = { + "modules": { + "ffuf": { + "wordlist": tempwordlist(test_wordlist), + } + } + } + modules_overrides = ["ffuf", "httpx"] + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/admin"} + respond_args = {"response_data": "alive admin page"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "URL_UNVERIFIED" and "admin" in e.data for e in events) + assert not any(e.type == "URL_UNVERIFIED" and "11111111" in e.data for e in events) + + +class TestFFUF2(TestFFUF): + test_wordlist = ["11111111", "console", "junkword1", "zzzjunkword2"] + config_overrides = {"modules": {"ffuf": {"wordlist": tempwordlist(test_wordlist), "extensions": "php"}}} + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/console.php"} + respond_args = {"response_data": "alive admin page"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "URL_UNVERIFIED" and "console" in e.data for e in events) + assert not any(e.type == "URL_UNVERIFIED" and "11111111" in e.data for e in events) + + +class TestFFUF_ignorecase(TestFFUF): + test_wordlist = ["11111111", "Admin", "admin", "zzzjunkword2"] + config_overrides = { + "modules": {"ffuf": {"wordlist": tempwordlist(test_wordlist), "extensions": "php", "ignore_case": True}} + } + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/admin"} + respond_args = {"response_data": "alive admin page"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/Admin"} + respond_args = {"response_data": "alive admin page"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "URL_UNVERIFIED" and "admin" in e.data for e in events) + assert not any(e.type == "URL_UNVERIFIED" and "Admin" in e.data for e in events) + + +class TestFFUFHeaders(TestFFUF): + test_wordlist = ["11111111", "console", "junkword1", "zzzjunkword2"] + config_overrides = { + "modules": {"ffuf": {"wordlist": tempwordlist(test_wordlist), "extensions": "php"}}, + "web": {"http_headers": {"test": "test2"}}, + } + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "headers": {"test": "test2"}, "uri": "/console.php"} + respond_args = {"response_data": "alive admin page"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "URL_UNVERIFIED" and "console" in e.data for e in events) + assert not any(e.type == "URL_UNVERIFIED" and "11111111" in e.data for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py b/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py new file mode 100644 index 0000000000..a10d8a1eae --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py @@ -0,0 +1,226 @@ +from .base import ModuleTestBase, tempwordlist + + +class TestFFUFShortnames(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + test_wordlist = ["11111111", "administrator", "portal", "console", "junkword1", "zzzjunkword2", "directory"] + config_overrides = { + "modules": { + "ffuf_shortnames": { + "find_common_prefixes": True, + "find_subwords": True, + "wordlist": tempwordlist(test_wordlist), + } + } + } + modules_overrides = ["ffuf_shortnames", "httpx"] + + async def setup_after_prep(self, module_test): + module_test.httpserver.no_handler_status_code = 404 + + seed_events = [] + parent_event = module_test.scan.make_event( + "http://127.0.0.1:8888/", + "URL", + module_test.scan.root_event, + module="httpx", + tags=["status-200", "distance-0"], + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ADMINI~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ADM_PO~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ABCZZZ~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ABCXXX~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ABCYYY~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ABCCON~1.ASP", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/DIRECT~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/ADM_DI~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/XYZDIR~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/XYZAAA~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/XYZBBB~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/XYZCCC~1", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-directory"], + ) + ) + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/SHORT~1.PL", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + + seed_events.append( + module_test.scan.make_event( + "http://127.0.0.1:8888/newpro~1.asp", + "URL_HINT", + parent_event, + module="iis_shortnames", + tags=["shortname-endpoint"], + ) + ) + module_test.scan.target.seeds.events = set(seed_events) + + expect_args = {"method": "GET", "uri": "/administrator.aspx"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/adm_portal.aspx"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/abcconsole.aspx"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/directory/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/adm_directory/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/xyzdirectory/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/short.pl"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/newproxy.aspx"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + basic_detection = False + directory_detection = False + prefix_detection = False + delimiter_detection = False + directory_delimiter_detection = False + prefix_delimiter_detection = False + short_extensions_detection = False + subword_detection = False + + for e in events: + if e.type == "URL_UNVERIFIED": + if e.data == "http://127.0.0.1:8888/administrator.aspx": + basic_detection = True + if e.data == "http://127.0.0.1:8888/directory/": + directory_detection = True + if e.data == "http://127.0.0.1:8888/adm_portal.aspx": + prefix_detection = True + if e.data == "http://127.0.0.1:8888/abcconsole.aspx": + delimiter_detection = True + if e.data == "http://127.0.0.1:8888/adm_directory/": + directory_delimiter_detection = True + if e.data == "http://127.0.0.1:8888/xyzdirectory/": + prefix_delimiter_detection = True + if e.data == "http://127.0.0.1:8888/short.pl": + short_extensions_detection = True + if e.data == "http://127.0.0.1:8888/newproxy.aspx": + subword_detection = True + + assert basic_detection + assert directory_detection + assert prefix_detection + assert delimiter_detection + assert directory_delimiter_detection + assert prefix_delimiter_detection + assert short_extensions_detection + assert subword_detection diff --git a/bbot/test/test_step_2/module_tests/test_module_filedownload.py b/bbot/test/test_step_2/module_tests/test_module_filedownload.py new file mode 100644 index 0000000000..6e046aa473 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_filedownload.py @@ -0,0 +1,87 @@ +from pathlib import Path +from .base import ModuleTestBase + + +class TestFileDownload(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["filedownload", "httpx", "excavate", "speculate"] + config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} + + pdf_data = """%PDF-1. +1 0 obj<>endobj +2 0 obj<>endobj +3 0 obj<>endobj +trailer <>""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + {"uri": "/"}, + { + "response_data": '' + }, + ) + module_test.set_expect_requests( + {"uri": "/Test_File.txt"}, + { + "response_data": "juicy stuff", + }, + ) + module_test.set_expect_requests( + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, + ) + module_test.set_expect_requests( + {"uri": "/test.html"}, + {"response_data": "", "headers": {"Content-Type": "text/html"}}, + ) + module_test.set_expect_requests( + {"uri": "/test2"}, + {"response_data": "", "headers": {"Content-Type": "text/html"}}, + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + download_dir = module_test.scan.home / "filedownload" + + # text file + text_file_event = [e for e in filesystem_events if "test-file.txt" in e.data["path"]] + assert 1 == len(text_file_event), f"No text file found at {download_dir}" + file = Path(text_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + assert open(file).read() == "juicy stuff", f"File at {file} does not contain the correct content" + + # PDF file (no extension) + pdf_file_event = [e for e in filesystem_events if "test-pdf.pdf" in e.data["path"]] + assert 1 == len(pdf_file_event), f"No PDF file found at {download_dir}" + file = Path(pdf_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" + + # we don't want html files + html_files = list(download_dir.glob("*.html")) + assert len(html_files) == 0, "HTML files were erroneously downloaded" + + +class TestFileDownloadLongFilename(TestFileDownload): + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + {"uri": "/"}, + { + "response_data": '' + }, + ) + module_test.set_expect_requests( + { + "uri": "/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity/blacklanternsecurity.txt" + }, + { + "response_data": "juicy stuff", + }, + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert len(filesystem_events) == 1 + file_path = Path(filesystem_events[0].data["path"]) + assert file_path.is_file(), f"File not found at {file_path}" + assert file_path.read_text() == "juicy stuff", f"File at {file_path} does not contain the correct content" diff --git a/bbot/test/test_step_2/module_tests/test_module_fingerprintx.py b/bbot/test/test_step_2/module_tests/test_module_fingerprintx.py new file mode 100644 index 0000000000..7e0cc3a169 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_fingerprintx.py @@ -0,0 +1,14 @@ +from .base import ModuleTestBase + + +class TestFingerprintx(ModuleTestBase): + targets = ["127.0.0.1:8888"] + + def check(self, module_test, events): + assert any( + event.type == "PROTOCOL" + and event.host == module_test.scan.helpers.make_ip_type("127.0.0.1") + and event.port == 8888 + and event.data["protocol"] == "HTTP" + for event in events + ), "HTTP protocol not detected" diff --git a/bbot/test/test_step_2/module_tests/test_module_fullhunt.py b/bbot/test/test_step_2/module_tests/test_module_fullhunt.py new file mode 100644 index 0000000000..547063495a --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_fullhunt.py @@ -0,0 +1,52 @@ +from .base import ModuleTestBase + + +class TestFullhunt(ModuleTestBase): + config_overrides = {"modules": {"fullhunt": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://fullhunt.io/api/v1/auth/status", + match_headers={"x-api-key": "asdf"}, + json={ + "message": "", + "status": 200, + "user": { + "company": "nightwatch", + "email": "jonsnow@nightwatch.notreal", + "first_name": "Jon", + "last_name": "Snow", + "plan": "free", + }, + "user_credits": { + "credits_usage": 0, + "max_results_per_request": 3000, + "remaining_credits": 100, + "total_credits_per_month": 100, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://fullhunt.io/api/v1/domain/blacklanternsecurity.com/subdomains", + match_headers={"x-api-key": "asdf"}, + json={ + "domain": "blacklanternsecurity.com", + "hosts": [ + "asdf.blacklanternsecurity.com", + ], + "message": "", + "metadata": { + "all_results_count": 11, + "available_results_for_user": 11, + "domain": "blacklanternsecurity.com", + "last_scanned": 1647083421, + "max_results_for_user": 3000, + "timestamp": 1684541940, + "user_plan": "free", + }, + "status": 200, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py b/bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py new file mode 100644 index 0000000000..c0911fd661 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_generic_ssrf.py @@ -0,0 +1,88 @@ +import re +import asyncio +from werkzeug.wrappers import Response + +from .base import ModuleTestBase + + +def extract_subdomain_tag(data): + pattern = r"http://([a-z0-9]{4})\.fakedomain\.fakeinteractsh\.com" + match = re.search(pattern, data) + if match: + return match.group(1) + + +class TestGeneric_SSRF(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "generic_ssrf"] + + def request_handler(self, request): + subdomain_tag = None + + if request.method == "GET": + subdomain_tag = extract_subdomain_tag(request.full_path) + elif request.method == "POST": + subdomain_tag = extract_subdomain_tag(request.data.decode()) + if subdomain_tag: + asyncio.run( + self.interactsh_mock_instance.mock_interaction( + subdomain_tag, msg=f"{request.method}: {request.data.decode()}" + ) + ) + + return Response("alive", status=200) + + async def setup_before_prep(self, module_test): + self.interactsh_mock_instance = module_test.mock_interactsh("generic_ssrf") + module_test.monkeypatch.setattr( + module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance + ) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + total_vulnerabilities = 0 + total_findings = 0 + + for e in events: + if e.type == "VULNERABILITY": + total_vulnerabilities += 1 + elif e.type == "FINDING": + total_findings += 1 + + assert total_vulnerabilities == 30, "Incorrect number of vulnerabilities detected" + assert total_findings == 30, "Incorrect number of findings detected" + + assert any( + e.type == "VULNERABILITY" + and "Out-of-band interaction: [Generic SSRF (GET)]" + and "[Triggering Parameter: Dest]" in e.data["description"] + for e in events + ), "Failed to detect Generic SSRF (GET)" + assert any( + e.type == "VULNERABILITY" and "Out-of-band interaction: [Generic SSRF (POST)]" in e.data["description"] + for e in events + ), "Failed to detect Generic SSRF (POST)" + assert any( + e.type == "VULNERABILITY" and "Out-of-band interaction: [Generic XXE] [HTTP]" in e.data["description"] + for e in events + ), "Failed to detect Generic SSRF (XXE)" + + +class TestGeneric_SSRF_httponly(TestGeneric_SSRF): + config_overrides = {"modules": {"generic_ssrf": {"skip_dns_interaction": True}}} + + def check(self, module_test, events): + total_vulnerabilities = 0 + total_findings = 0 + + for e in events: + if e.type == "VULNERABILITY": + total_vulnerabilities += 1 + elif e.type == "FINDING": + total_findings += 1 + + assert total_vulnerabilities == 30, "Incorrect number of vulnerabilities detected" + assert total_findings == 0, "Incorrect number of findings detected" diff --git a/bbot/test/test_step_2/module_tests/test_module_git.py b/bbot/test/test_step_2/module_tests/test_module_git.py new file mode 100644 index 0000000000..6c986da55c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_git.py @@ -0,0 +1,44 @@ +from .base import ModuleTestBase + + +class TestGit(ModuleTestBase): + targets = [ + "http://127.0.0.1:8888/", + "http://127.0.0.1:8888/test/asdf", + "http://127.0.0.1:8888/test2", + ] + + modules_overrides = ["git", "httpx"] + + git_config = """[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + expect_args={"uri": "/.git/config"}, respond_args={"response_data": self.git_config} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/config"}, respond_args={"response_data": self.git_config} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/asdf/.git/config"}, respond_args={"response_data": self.git_config} + ) + module_test.set_expect_requests(expect_args={"uri": "/test2/.git/config"}, respond_args={"response_data": ""}) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" and "http://127.0.0.1:8888/.git/config" in e.data["description"] for e in events + ) + assert any( + e.type == "FINDING" and "http://127.0.0.1:8888/test/.git/config" in e.data["description"] for e in events + ) + assert any( + e.type == "FINDING" and "http://127.0.0.1:8888/test/asdf/.git/config" in e.data["description"] + for e in events + ) + assert not any( + e.type == "FINDING" and "http://127.0.0.1:8888/test2/.git/config" in e.data["description"] for e in events + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_git_clone.py b/bbot/test/test_step_2/module_tests/test_module_git_clone.py new file mode 100644 index 0000000000..d6a994402a --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_git_clone.py @@ -0,0 +1,226 @@ +import io +import base64 +import shutil +import tarfile +import subprocess +from pathlib import Path + +from .base import ModuleTestBase + + +class TestGit_Clone(ModuleTestBase): + config_overrides = {"modules": {"git_clone": {"api_key": "asdf"}}} + modules_overrides = ["github_org", "speculate", "git_clone"] + + file_content = "https://admin:admin@the-internet.herokuapp.com/basic_auth" + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response(url="https://api.github.com/zen") + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity", + json={ + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "url": "https://api.github.com/orgs/blacklanternsecurity", + "repos_url": "https://api.github.com/orgs/blacklanternsecurity/repos", + "events_url": "https://api.github.com/orgs/blacklanternsecurity/events", + "hooks_url": "https://api.github.com/orgs/blacklanternsecurity/hooks", + "issues_url": "https://api.github.com/orgs/blacklanternsecurity/issues", + "members_url": "https://api.github.com/orgs/blacklanternsecurity/members{/member}", + "public_members_url": "https://api.github.com/orgs/blacklanternsecurity/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "description": "Security Organization", + "name": "Black Lantern Security", + "company": None, + "blog": "www.blacklanternsecurity.com", + "location": "Charleston, SC", + "email": None, + "twitter_username": None, + "is_verified": False, + "has_organization_projects": True, + "has_repository_projects": True, + "public_repos": 70, + "public_gists": 0, + "followers": 415, + "following": 0, + "html_url": "https://github.com/blacklanternsecurity", + "created_at": "2017-01-24T00:14:46Z", + "updated_at": "2022-03-28T11:39:03Z", + "archived_at": None, + "type": "Organization", + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/repos?per_page=100&page=1", + json=[ + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "test_keys", + "full_name": "blacklanternsecurity/test_keys", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/test_keys", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/test_keys.git", + "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", + "clone_url": "https://github.com/blacklanternsecurity/test_keys.git", + "svn_url": "https://github.com/blacklanternsecurity/test_keys", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + } + ], + ) + + async def setup_after_prep(self, module_test): + temp_path = Path("/tmp/.bbot_test") + shutil.rmtree(temp_path / "test_keys", ignore_errors=True) + subprocess.run(["git", "init", "test_keys"], cwd=temp_path) + temp_repo_path = temp_path / "test_keys" + with open(temp_repo_path / "keys.txt", "w") as f: + f.write(self.file_content) + subprocess.run(["git", "add", "."], cwd=temp_repo_path) + subprocess.run( + [ + "git", + "-c", + "user.name='BBOT Test'", + "-c", + "user.email='bbot@blacklanternsecurity.com'", + "commit", + "-m", + "Initial commit", + ], + check=True, + cwd=temp_repo_path, + ) + + old_filter_event = module_test.scan.modules["git_clone"].filter_event + + def new_filter_event(event): + event.data["url"] = event.data["url"].replace( + "https://github.com/blacklanternsecurity", f"file://{temp_path}" + ) + return old_filter_event(event) + + module_test.monkeypatch.setattr(module_test.scan.modules["git_clone"], "filter_event", new_filter_event) + + def check(self, module_test, events): + filesystem_events = [ + e + for e in events + if e.type == "FILESYSTEM" + and "git_repos/.bbot_test/test_keys" in e.data["path"] + and "git" in e.tags + and e.scope_distance == 1 + ] + assert 1 == len(filesystem_events), "Failed to git clone CODE_REPOSITORY" + # make sure the binary blob isn't here + assert not any("blob" in e.data for e in [e for e in events if e.type == "FILESYSTEM"]) + filesystem_event = filesystem_events[0] + folder = Path(filesystem_event.data["path"]) + assert folder.is_dir(), "Destination folder doesn't exist" + with open(folder / "keys.txt") as f: + content = f.read() + assert content == self.file_content, "File content doesn't match" + + +class TestGit_CloneWithBlob(TestGit_Clone): + config_overrides = {"folder_blobs": True} + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert len(filesystem_events) == 1 + assert all("blob" in e.data for e in filesystem_events) + filesystem_event = filesystem_events[0] + blob = filesystem_event.data["blob"] + tar_bytes = base64.b64decode(blob) + tar_stream = io.BytesIO(tar_bytes) + with tarfile.open(fileobj=tar_stream, mode="r:gz") as tar: + assert "test_keys/keys.txt" in tar.getnames() diff --git a/bbot/test/test_step_2/module_tests/test_module_gitdumper.py b/bbot/test/test_step_2/module_tests/test_module_gitdumper.py new file mode 100644 index 0000000000..ad8c237a6b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_gitdumper.py @@ -0,0 +1,385 @@ +from pathlib import Path +from .base import ModuleTestBase + + +class TestGitDumper_Dirlisting(ModuleTestBase): + targets = [ + "http://127.0.0.1:8888/test", + ] + + modules_overrides = ["git", "gitdumper", "httpx"] + + index_html = """ + + Index of /.git + + +

Index of /.git

+ + + + + + + + + + + + + +
NameSize
<branches>
COMMIT_EDITMSG157B
config157B
description73B
HEAD23B
<hooks>
<info>
<objects>
index
<refs>
<logs>
+ + """ + + info_index = """ + + Index of /.git/info + + +

Index of /.git/info

+ + + + + +
NameSize
[..]
exclude240B
excludeme0B
+ + """ + + objects_index = """ + + Index of /.git/objects + + +

Index of /.git/objects

+ + + + + + + + +
NameSize
[..]
<05>
<34>
<c2>
<pack>
<info>
+ + """ + + objects_o5_index = """ + + Index of /.git/objects/05 + + +

Index of /.git/objects/05

+ + + + +
NameSize
[..]
27e6bd2d76b45e2933183f1b506c7ac49f5872
+ + """ + + objects_34_index = """ + + Index of /.git/objects/34 + + +

Index of /.git/objects/34

+ + + + +
NameSize
[..]
dc86f0247798892a89553e7c5c2d5aa06c2c5b
+ + """ + + objects_c2_index = """ + + Index of /.git/objects/c2 + + +

Index of /.git/objects/c2

+ + + + +
NameSize
[..]
69d751b8e2fd0be0d0dc7a6437a4dce4ec0200
+ + """ + + refs_index = """ + + Index of /.git/refs + + +

Index of /.git/refs

+ + + + + +
NameSize
[..]
<heads>
<tags>
+ + + """ + + refs_heads_index = """ + + Index of /.git/refs/heads + + +

Index of /.git/refs/heads

+ + + + +
NameSize
[..]
master
+ + + """ + + logs_index = """ + + Index of /.git/logs + + +

Index of /.git/logs

+ + + + + +
NameSize
[..]
HEAD
<tags>
+ + + """ + + logs_refs_index = """ + + Index of /.git/logs/refs + + +

Index of /.git/logs/refs

+ + + + +
NameSize
[..]
<heads>
+ + + """ + + logs_refs_heads_index = """ + + Index of /.git/logs/refs/heads + + +

Index of /.git/logs/refs/heads

+ + + + +
NameSize
[..]
master
+ + + """ + + empty_index = """ + + Index of /.git/... + + +

Index of /.git/...

+ + + +
NameSize
[..]
+ + """ + + git_head = "ref: refs/heads/master" + + refs_head = "34dc86f0247798892a89553e7c5c2d5aa06c2c5b" + + logs_head = "0000000000000000000000000000000000000000 34dc86f0247798892a89553e7c5c2d5aa06c2c5b Test 1738516534 +0000 commit (initial): Initial commit" + + logs_master_head = "0000000000000000000000000000000000000000 34dc86f0247798892a89553e7c5c2d5aa06c2c5b Test 1738516534 +0000 commit (initial): Initial commit" + + git_description = "Unnamed repository; edit this file 'description' to name the repository." + + git_commit_editmsg = "Initial commit" + + git_config = """[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true""" + + git_exclude = """# git ls-files --others --exclude-from=.git/info/exclude + # Lines that start with '#' are comments. + # For a project mostly in C, the following would be a good set of + # exclude patterns (uncomment them if you want to use them): + # *.[oa] + # *~""" + + filebytes_gitindex = b"DIRC\x00\x00\x00\x02\x00\x00\x00\x01g\x9f\xbe\x04\x14\xfcb\xd1g\x9f\xbe\x04\x14\xfcb\xd1\x00\x00\x08 \x00\x04aD\x00\x00\x81\xa4\x00\x00\x03\xe8\x00\x00\x03\xe8\x00\x00\x00\x0f\x05'\xe6\xbd-v\xb4^)3\x18?\x1bPlz\xc4\x9fXr\x00\x08test.txt\x00\x00TREE\x00\x00\x00\x19\x001 0\n\xc2i\xd7Q\xb8\xe2\xfd\x0b\xe0\xd0\xdczd7\xa4\xdc\xe4\xec\x02\x00\xe8m|iw\xbb\xd6\x88;f\xdbW\x10yY\xd2\xb0G\xcfJ" + filebytes_27e6bd2d76b45e2933183f1b506c7ac49f5872 = ( + b"x\x01K\xca\xc9OR04e\x08\xc9\xc8,V\x00\xa2D\x85\x92\xd4\xe2\x12.\x00U\xab\x07%" + ) + filebytes_dc86f0247798892a89553e7c5c2d5aa06c2c5b = b"x\x01\x9d\x8dK\n\x021\x10D]\xe7\x14\xbd\x17\x86\xce?\x82\x88\x0b7\x9e\xc0u\xa6\xd3:\x81\xc4\xc0\x18\x99\xeb\x1b\x98\x1bX\xbbzP\xaf\xa8\xd5\x9a;\xc8\xa0\x0f}e\x06R\xee\x94\xbc\x95s`\xf5L83&L\xe4\xa33\xdaG\x93\x88\r\x13*D\x11\xbf}i+\xdcZ\x85\xc7\xc2\x1b\x97\x02\xe7\xd4\xea\xb4\xed\xe5\xfa\x89/\x9e\xa8\xd5\x0bH\xaf\x83\x95\xcej\x03G\x1c\x11\x83\x8e\xcf\xce\xff\xad\xc5\xfd\x9d{\x8e\x05v\x8d\xf8\x01\xfaF<\x05" + filebytes_69d751b8e2fd0be0d0dc7a6437a4dce4ec0200 = b"x\x01+)JMU06c040031Q(I-.\xd1+\xa9(a`U\x7f\xb6W\xb7lK\x9c\xa6\xb1\x84\xbdt@N\xd5\x91\xf9\x11E\x00*\x05\x0e\x8c" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/"}, respond_args={"response_data": self.index_html} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/index"}, respond_args={"response_data": self.filebytes_gitindex} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/COMMIT_EDITMSG"}, respond_args={"response_data": self.git_commit_editmsg} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/config"}, respond_args={"response_data": self.git_config} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/branches/"}, respond_args={"response_data": self.empty_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/description"}, respond_args={"response_data": self.git_description} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/HEAD"}, respond_args={"response_data": self.git_head} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/hooks/"}, respond_args={"response_data": self.empty_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/info/"}, respond_args={"response_data": self.info_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/info/exclude"}, respond_args={"response_data": self.git_exclude} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/"}, respond_args={"response_data": self.objects_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/05/"}, respond_args={"response_data": self.objects_o5_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/05/27e6bd2d76b45e2933183f1b506c7ac49f5872"}, + respond_args={"response_data": self.filebytes_27e6bd2d76b45e2933183f1b506c7ac49f5872}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/34/"}, respond_args={"response_data": self.objects_34_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/34/dc86f0247798892a89553e7c5c2d5aa06c2c5b"}, + respond_args={"response_data": self.filebytes_dc86f0247798892a89553e7c5c2d5aa06c2c5b}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/c2/"}, respond_args={"response_data": self.objects_c2_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/c2/69d751b8e2fd0be0d0dc7a6437a4dce4ec0200"}, + respond_args={"response_data": self.filebytes_69d751b8e2fd0be0d0dc7a6437a4dce4ec0200}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/info/"}, respond_args={"response_data": self.empty_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/pack/"}, respond_args={"response_data": self.empty_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/refs/"}, respond_args={"response_data": self.refs_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/refs/heads/"}, respond_args={"response_data": self.refs_heads_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/refs/heads/master"}, respond_args={"response_data": self.refs_head} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/refs/tags/"}, respond_args={"response_data": self.empty_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/"}, respond_args={"response_data": self.logs_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/refs/"}, respond_args={"response_data": self.logs_refs_index} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/refs/heads/"}, + respond_args={"response_data": self.logs_refs_heads_index}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/refs/heads/master"}, + respond_args={"response_data": self.logs_master_head}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/HEAD"}, respond_args={"response_data": self.logs_head} + ) + + def check(self, module_test, events): + assert any( + e.type == "CODE_REPOSITORY" + and "git-directory" in e.tags + and e.data["url"] == "http://127.0.0.1:8888/test/.git/" + for e in events + ) + filesystem_events = [ + e + for e in events + if e.type == "FILESYSTEM" and "http-127-0-0-1-8888-test-git" in e.data["path"] and "git" in e.tags + ] + assert 1 == len(filesystem_events), "Failed to git clone CODE_REPOSITORY" + filesystem_event = filesystem_events[0] + folder = Path(filesystem_event.data["path"]) + assert folder.is_dir(), "Destination folder doesn't exist" + with open(folder / "test.txt") as f: + content = f.read() + assert content == "This is a test\n", "File content doesn't match" + + +class TestGitDumper_NoDirlisting(TestGitDumper_Dirlisting): + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/index"}, respond_args={"response_data": self.filebytes_gitindex} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/COMMIT_EDITMSG"}, respond_args={"response_data": self.git_commit_editmsg} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/config"}, respond_args={"response_data": self.git_config} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/description"}, respond_args={"response_data": self.git_description} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/HEAD"}, respond_args={"response_data": self.git_head} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/info/exclude"}, respond_args={"response_data": self.git_exclude} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/05/27e6bd2d76b45e2933183f1b506c7ac49f5872"}, + respond_args={"response_data": self.filebytes_27e6bd2d76b45e2933183f1b506c7ac49f5872}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/34/dc86f0247798892a89553e7c5c2d5aa06c2c5b"}, + respond_args={"response_data": self.filebytes_dc86f0247798892a89553e7c5c2d5aa06c2c5b}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/objects/c2/69d751b8e2fd0be0d0dc7a6437a4dce4ec0200"}, + respond_args={"response_data": self.filebytes_69d751b8e2fd0be0d0dc7a6437a4dce4ec0200}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/refs/heads/master"}, respond_args={"response_data": self.refs_head} + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/refs/heads/master"}, + respond_args={"response_data": self.logs_master_head}, + ) + module_test.set_expect_requests( + expect_args={"uri": "/test/.git/logs/HEAD"}, respond_args={"response_data": self.logs_head} + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py new file mode 100644 index 0000000000..2bd9993b20 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py @@ -0,0 +1,104 @@ +from .base import ModuleTestBase + + +class TestGithub_Codesearch(ModuleTestBase): + config_overrides = { + "modules": { + "github_codesearch": {"api_key": "asdf", "limit": 1}, + "trufflehog": {"only_verified": False}, + }, + "omit_event_types": [], + "scope": {"report_distance": 2}, + } + modules_overrides = ["github_codesearch", "httpx", "trufflehog"] + + github_file_endpoint = ( + "/projectdiscovery/nuclei/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go" + ) + github_file_url = f"http://127.0.0.1:8888{github_file_endpoint}" + github_file_content = """-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAOBY2pd9PSQvuxqu +WXFNVgILTWuUc721Wc2sFNvp4beowhUe1lfxaq5ZfCJcz7z4QsqFhOeks69O9UIb +oiOTDocPDog9PHO8yZXopHm0StFZvSjjKSNuFvy/WopPTGpxUZ5boCaF1CXumY7W +FL+jIap5faimLL9prIwaQKBwv80lAgMBAAECgYEAxvpHtgCgD849tqZYMgOTevCn +U/kwxltoMOClB39icNA+gxj8prc6FTTMwnVq0oGmS5UskX8k1yHCqUV1AvRU9o+q +I8L8a3F3TQKQieI/YjiUNK8A87bKkaiN65ooOnhT+I3ZjZMPR5YEyycimMp22jsv +LyX/35J/wf1rNiBs/YECQQDvtxgmMhE+PeajXqw1w2C3Jds27hI3RPDnamEyWr/L +KkSplbKTF6FuFDYOFdJNPrfxm1tx2MZ2cBfs+h/GnCJVAkEA75Z9w7q8obbqGBHW +9bpuFvLjW7bbqO7HBuXYX9zQcZL6GSArFP0ba5lhgH1qsVQfxVWVyiV9/chme7xc +ljfvkQJBAJ7MpSPQcRnRefNp6R0ok+5gFqt55PlWI1y6XS81bO7Szm+laooE0n0Q +yIpmLE3dqY9VgquVlkupkD/9poU0s40CQD118ZVAVht1/N9n1Cj9RjiE3mYspnTT +rCLM25Db6Gz6M0Y2xlaAB4S2uBhqE/Chj/TjW6WbsJJl0kRzsZynhMECQFYKiM1C +T4LB26ynW00VE8z4tEWSoYt4/Vn/5wFhalVjzoSJ8Hm2qZiObRYLQ1m0X4KnkShk +Gnl54dJHT+EhlfY= +-----END PRIVATE KEY-----""" + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": self.github_file_endpoint} + respond_args = {"response_data": self.github_file_content} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + module_test.httpx_mock.add_response(url="https://api.github.com/zen") + module_test.httpx_mock.add_response( + url="https://api.github.com/search/code?per_page=100&type=Code&q=blacklanternsecurity.com&page=1", + json={ + "total_count": 214, + "incomplete_results": False, + "items": [ + { + "html_url": "https://github.com/projectdiscovery/nuclei/blob/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go", + "repository": { + "html_url": "https://github.com/projectdiscovery/nuclei", + }, + }, + { + "html_url": "https://github.com/projectdiscovery/nuclei/blob/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go2", + "repository": { + "html_url": "https://github.com/projectdiscovery/nuclei", + }, + }, + { + "html_url": "https://github.com/projectdiscovery/nuclei/blob/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go3", + "repository": { + "html_url": "https://github.com/projectdiscovery/nuclei", + }, + }, + ], + }, + ) + + async def setup_after_prep(self, module_test): + module_test.module.github_raw_url = "http://127.0.0.1:8888/" + + def check(self, module_test, events): + assert 1 == len([e for e in events if e.type == "URL_UNVERIFIED"]) + assert 1 == len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" and e.data == self.github_file_url and e.scope_distance == 2 + ] + ), "Failed to emit URL_UNVERIFIED" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/projectdiscovery/nuclei" + and e.scope_distance == 1 + ] + ), "Failed to emit CODE_REPOSITORY" + assert 1 == len( + [e for e in events if e.type == "URL" and e.data == self.github_file_url and e.scope_distance == 2] + ), "Failed to visit URL" + assert 1 == len( + [ + e + for e in events + if e.type == "HTTP_RESPONSE" and e.data["url"] == self.github_file_url and e.scope_distance == 2 + ] + ), "Failed to visit URL" + assert [e for e in events if e.type == "FINDING" and str(e.module) == "trufflehog"], ( + "Failed to find secret in repo file" + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_github_org.py b/bbot/test/test_step_2/module_tests/test_module_github_org.py new file mode 100644 index 0000000000..d8003fd2a5 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_github_org.py @@ -0,0 +1,441 @@ +from .base import ModuleTestBase + + +class TestGithub_Org(ModuleTestBase): + config_overrides = {"modules": {"github_org": {"api_key": "asdf"}}} + modules_overrides = ["github_org", "speculate"] + + async def setup_before_prep(self, module_test): + await module_test.mock_dns( + {"blacklanternsecurity.com": {"A": ["127.0.0.99"]}, "github.com": {"A": ["127.0.0.99"]}} + ) + + module_test.httpx_mock.add_response( + url="https://api.github.com/zen", match_headers={"Authorization": "token asdf"} + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity", + match_headers={"Authorization": "token asdf"}, + json={ + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "url": "https://api.github.com/orgs/blacklanternsecurity", + "repos_url": "https://api.github.com/orgs/blacklanternsecurity/repos", + "events_url": "https://api.github.com/orgs/blacklanternsecurity/events", + "hooks_url": "https://api.github.com/orgs/blacklanternsecurity/hooks", + "issues_url": "https://api.github.com/orgs/blacklanternsecurity/issues", + "members_url": "https://api.github.com/orgs/blacklanternsecurity/members{/member}", + "public_members_url": "https://api.github.com/orgs/blacklanternsecurity/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "description": "Security Organization", + "name": "Black Lantern Security", + "company": None, + "blog": "www.blacklanternsecurity.com", + "location": "Charleston, SC", + "email": None, + "twitter_username": None, + "is_verified": False, + "has_organization_projects": True, + "has_repository_projects": True, + "public_repos": 70, + "public_gists": 0, + "followers": 415, + "following": 0, + "html_url": "https://github.com/blacklanternsecurity", + "created_at": "2017-01-24T00:14:46Z", + "updated_at": "2022-03-28T11:39:03Z", + "archived_at": None, + "type": "Organization", + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/repos?per_page=100&page=1", + match_headers={"Authorization": "token asdf"}, + json=[ + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "test_keys", + "full_name": "blacklanternsecurity/test_keys", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/test_keys", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/test_keys.git", + "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", + "clone_url": "https://github.com/blacklanternsecurity/test_keys.git", + "svn_url": "https://github.com/blacklanternsecurity/test_keys", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + } + ], + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/members?per_page=100&page=1", + match_headers={"Authorization": "token asdf"}, + json=[ + { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + } + ], + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/users/TheTechromancer/repos?per_page=100&page=1", + match_headers={"Authorization": "token asdf"}, + json=[ + { + "id": 688270318, + "node_id": "R_kgDOKQYr7g", + "name": "websitedemo", + "full_name": "TheTechromancer/websitedemo", + "private": False, + "owner": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "html_url": "https://github.com/TheTechromancer/websitedemo", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/TheTechromancer/websitedemo", + "forks_url": "https://api.github.com/repos/TheTechromancer/websitedemo/forks", + "keys_url": "https://api.github.com/repos/TheTechromancer/websitedemo/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/TheTechromancer/websitedemo/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/TheTechromancer/websitedemo/teams", + "hooks_url": "https://api.github.com/repos/TheTechromancer/websitedemo/hooks", + "issue_events_url": "https://api.github.com/repos/TheTechromancer/websitedemo/issues/events{/number}", + "events_url": "https://api.github.com/repos/TheTechromancer/websitedemo/events", + "assignees_url": "https://api.github.com/repos/TheTechromancer/websitedemo/assignees{/user}", + "branches_url": "https://api.github.com/repos/TheTechromancer/websitedemo/branches{/branch}", + "tags_url": "https://api.github.com/repos/TheTechromancer/websitedemo/tags", + "blobs_url": "https://api.github.com/repos/TheTechromancer/websitedemo/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/TheTechromancer/websitedemo/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/TheTechromancer/websitedemo/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/TheTechromancer/websitedemo/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/TheTechromancer/websitedemo/statuses/{sha}", + "languages_url": "https://api.github.com/repos/TheTechromancer/websitedemo/languages", + "stargazers_url": "https://api.github.com/repos/TheTechromancer/websitedemo/stargazers", + "contributors_url": "https://api.github.com/repos/TheTechromancer/websitedemo/contributors", + "subscribers_url": "https://api.github.com/repos/TheTechromancer/websitedemo/subscribers", + "subscription_url": "https://api.github.com/repos/TheTechromancer/websitedemo/subscription", + "commits_url": "https://api.github.com/repos/TheTechromancer/websitedemo/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/TheTechromancer/websitedemo/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/TheTechromancer/websitedemo/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/TheTechromancer/websitedemo/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/TheTechromancer/websitedemo/contents/{+path}", + "compare_url": "https://api.github.com/repos/TheTechromancer/websitedemo/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/TheTechromancer/websitedemo/merges", + "archive_url": "https://api.github.com/repos/TheTechromancer/websitedemo/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/TheTechromancer/websitedemo/downloads", + "issues_url": "https://api.github.com/repos/TheTechromancer/websitedemo/issues{/number}", + "pulls_url": "https://api.github.com/repos/TheTechromancer/websitedemo/pulls{/number}", + "milestones_url": "https://api.github.com/repos/TheTechromancer/websitedemo/milestones{/number}", + "notifications_url": "https://api.github.com/repos/TheTechromancer/websitedemo/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/TheTechromancer/websitedemo/labels{/name}", + "releases_url": "https://api.github.com/repos/TheTechromancer/websitedemo/releases{/id}", + "deployments_url": "https://api.github.com/repos/TheTechromancer/websitedemo/deployments", + "created_at": "2023-09-07T02:18:28Z", + "updated_at": "2023-09-07T02:20:18Z", + "pushed_at": "2023-09-07T02:34:45Z", + "git_url": "git://github.com/TheTechromancer/websitedemo.git", + "ssh_url": "git@github.com:TheTechromancer/websitedemo.git", + "clone_url": "https://github.com/TheTechromancer/websitedemo.git", + "svn_url": "https://github.com/TheTechromancer/websitedemo", + "homepage": None, + "size": 1, + "stargazers_count": 0, + "watchers_count": 0, + "language": "HTML", + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": True, + "has_discussions": False, + "forks_count": 0, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 0, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "main", + } + ], + ) + + def check(self, module_test, events): + assert len(events) == 7 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ), "Failed to find ORG_STUB" + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + and str(e.module) == "github_org" + and "github-org" in e.tags + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github" + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "TheTechromancer" + and str(e.module) == "github_org" + and "github-org-member" in e.tags + and e.scope_distance == 2 + ] + ), "Failed to find TheTechromancer github" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/blacklanternsecurity/test_keys" + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github repo" + + +class TestGithub_Org_No_Members(TestGithub_Org): + config_overrides = {"modules": {"github_org": {"include_members": False}, "github": {"api_key": "asdf"}}} + + def check(self, module_test, events): + assert len(events) == 6 + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + and str(e.module) == "github_org" + and "github-org" in e.tags + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github" + assert 0 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "TheTechromancer" + ] + ), "Found TheTechromancer github" + + +class TestGithub_Org_MemberRepos(TestGithub_Org): + config_overrides = {"modules": {"github_org": {"include_member_repos": True}, "github": {"api_key": "asdf"}}} + + def check(self, module_test, events): + assert len(events) == 8 + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/TheTechromancer/websitedemo" + and e.scope_distance == 2 + ] + ), "Failed to find TheTechromancer github repo" + + +class TestGithub_Org_Custom_Target(TestGithub_Org): + targets = ["ORG:blacklanternsecurity"] + config_overrides = { + "scope": {"report_distance": 10}, + "omit_event_types": [], + "speculate": True, + "modules": {"github": {"api_key": "asdf"}}, + } + + def check(self, module_test, events): + assert len(events) == 8 + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + and e.scope_distance == 1 + and str(e.module) == "github_org" + and e.parent.type == "ORG_STUB" + ] + ) + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "github.com" and e.scope_distance == 1] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" + and e.data == "https://github.com/blacklanternsecurity" + and e.scope_distance == 1 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and e.data["url"] == "https://github.com/blacklanternsecurity/test_keys" + and e.scope_distance == 1 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "TheTechromancer" + and e.scope_distance == 2 + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py new file mode 100644 index 0000000000..621df5871e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -0,0 +1,517 @@ +import io +import zipfile +from pathlib import Path + +from .base import ModuleTestBase + + +class TestGithub_Workflows(ModuleTestBase): + config_overrides = {"modules": {"github_org": {"api_key": "asdf"}}} + modules_overrides = ["github_workflows", "github_org", "speculate"] + + data = io.BytesIO() + with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as zipfile: + zipfile.writestr("test.txt", "This is some test data") + zipfile.writestr("test2.txt", "This is some more test data") + zipfile.writestr("folder/test3.txt", "This is yet more test data") + data.seek(0) + zip_content = data.getvalue() + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response(url="https://api.github.com/zen") + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity", + json={ + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "url": "https://api.github.com/orgs/blacklanternsecurity", + "repos_url": "https://api.github.com/orgs/blacklanternsecurity/repos", + "events_url": "https://api.github.com/orgs/blacklanternsecurity/events", + "hooks_url": "https://api.github.com/orgs/blacklanternsecurity/hooks", + "issues_url": "https://api.github.com/orgs/blacklanternsecurity/issues", + "members_url": "https://api.github.com/orgs/blacklanternsecurity/members{/member}", + "public_members_url": "https://api.github.com/orgs/blacklanternsecurity/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "description": "Security Organization", + "name": "Black Lantern Security", + "company": None, + "blog": "www.blacklanternsecurity.com", + "location": "Charleston, SC", + "email": None, + "twitter_username": None, + "is_verified": False, + "has_organization_projects": True, + "has_repository_projects": True, + "public_repos": 70, + "public_gists": 0, + "followers": 415, + "following": 0, + "html_url": "https://github.com/blacklanternsecurity", + "created_at": "2017-01-24T00:14:46Z", + "updated_at": "2022-03-28T11:39:03Z", + "archived_at": None, + "type": "Organization", + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/repos?per_page=100&page=1", + json=[ + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "test_keys", + "full_name": "blacklanternsecurity/test_keys", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/test_keys.git", + "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", + "clone_url": "https://github.com/blacklanternsecurity/bbot.git", + "svn_url": "https://github.com/blacklanternsecurity/bbot", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + } + ], + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows?per_page=100&page=1", + json={ + "total_count": 3, + "workflows": [ + { + "id": 22452226, + "node_id": "W_kwDOG_O3ns4BVpgC", + "name": "tests", + "path": ".github/workflows/tests.yml", + "state": "active", + "created_at": "2022-03-23T15:09:22.000Z", + "updated_at": "2022-09-27T17:49:34.000Z", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "html_url": "https://github.com/blacklanternsecurity/bbot/blob/stable/.github/workflows/tests.yml", + "badge_url": "https://github.com/blacklanternsecurity/bbot/workflows/tests/badge.svg", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?status=success&per_page=1", + json={ + "total_count": 2993, + "workflow_runs": [ + { + "id": 8839360698, + "name": "tests", + "node_id": "WFR_kwLOG_O3ns8AAAACDt3wug", + "head_branch": "dnsbrute-helperify", + "head_sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "path": ".github/workflows/tests.yml", + "display_title": "Helperify Massdns", + "run_number": 4520, + "event": "pull_request", + "status": "completed", + "conclusion": "success", + "workflow_id": 22452226, + "check_suite_id": 23162098295, + "check_suite_node_id": "CS_kwDOG_O3ns8AAAAFZJGSdw", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698", + "html_url": "https://github.com/blacklanternsecurity/bbot/actions/runs/8839360698", + "pull_requests": [ + { + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls/1303", + "id": 1839332952, + "number": 1303, + "head": { + "ref": "dnsbrute-helperify", + "sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + "base": { + "ref": "faster-regexes", + "sha": "7baf219c7f3a4ba165639c5ddb62322453a8aea8", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + } + ], + "created_at": "2024-04-25T21:04:32Z", + "updated_at": "2024-04-25T21:19:43Z", + "actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "run_attempt": 1, + "referenced_workflows": [], + "run_started_at": "2024-04-25T21:04:32Z", + "triggering_actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "jobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/jobs", + "logs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", + "check_suite_url": "https://api.github.com/repos/blacklanternsecurity/bbot/check-suites/23162098295", + "artifacts_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + "cancel_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/cancel", + "rerun_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/rerun", + "previous_attempt_url": None, + "workflow_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "head_commit": { + "id": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "tree_id": "fe9b345c0745a5bbacb806225e92e1c48fccf35c", + "message": "remove debug message", + "timestamp": "2024-04-25T21:02:37Z", + "author": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + "committer": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + }, + "repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + "head_repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", + headers={ + "location": "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" + }, + status_code=302, + ) + module_test.httpx_mock.add_response( + url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02", + content=self.zip_content, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + json={ + "total_count": 1, + "artifacts": [ + { + "id": 1829832535, + "node_id": "MDg6QXJ0aWZhY3QxODI5ODMyNTM1", + "name": "build.tar.gz", + "size_in_bytes": 245770648, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535", + "archive_download_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + "expired": False, + "created_at": "2024-08-19T22:32:17Z", + "updated_at": "2024-08-19T22:32:18Z", + "expires_at": "2024-09-02T22:21:59Z", + "workflow_run": { + "id": 10461468466, + "repository_id": 89290483, + "head_repository_id": 799444840, + "head_branch": "not-a-real-branch", + "head_sha": "1eeb5354ab7b1e4141b8a6473846e2a5ea0dd2c6", + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + headers={ + "location": "https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D" + }, + status_code=302, + ) + module_test.httpx_mock.add_response( + url="https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D", + content=self.zip_content, + ) + + def check(self, module_test, events): + assert len(events) == 9 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ), "Failed to find ORG_STUB" + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + and e.data["url"] == "https://github.com/blacklanternsecurity" + and str(e.module) == "github_org" + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/blacklanternsecurity/bbot" + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github repo" + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 3 == len(filesystem_events), filesystem_events + for filesystem_event in filesystem_events: + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file does not exist" diff --git a/bbot/test/test_step_2/module_tests/test_module_gitlab.py b/bbot/test/test_step_2/module_tests/test_module_gitlab.py new file mode 100644 index 0000000000..6d593adf65 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_gitlab.py @@ -0,0 +1,276 @@ +from .base import ModuleTestBase + + +class TestGitlab(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["gitlab", "httpx"] + config_overrides = {"modules": {"gitlab": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(headers={"X-Gitlab-Meta": "asdf"}) + module_test.httpserver.expect_request( + "/api/v4/projects", query_string="simple=true", headers={"Authorization": "Bearer asdf"} + ).respond_with_json( + [ + { + "id": 33, + "description": None, + "name": "bbot", + "name_with_namespace": "bbot / BBOT", + "path": "bbot", + "path_with_namespace": "bbotgroup/bbot", + "created_at": "2023-09-07T15:14:05.540Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "git@127.0.0.1:8888:bbot/bbot.git", + "http_url_to_repo": "http://127.0.0.1:8888/bbotgroup/bbot.git", + "web_url": "http://127.0.0.1:8888/bbotgroup/bbot", + "readme_url": "http://127.0.0.1:8888/bbotgroup/bbot/-/blob/master/README.md", + "forks_count": 0, + "avatar_url": None, + "star_count": 1, + "last_activity_at": "2024-03-11T19:13:20.691Z", + "namespace": { + "id": 9, + "name": "bbotgroup", + "path": "bbotgroup", + "kind": "group", + "full_path": "bbotgroup", + "parent_id": None, + "avatar_url": "/uploads/-/system/group/avatar/9/index.png", + "web_url": "http://127.0.0.1:8888/groups/bbotgroup", + }, + }, + ], + ) + module_test.httpserver.expect_request( + "/api/v4/groups", query_string="simple=true", headers={"Authorization": "Bearer asdf"} + ).respond_with_json( + [ + { + "id": 9, + "web_url": "http://127.0.0.1:8888/groups/bbotgroup", + "name": "bbotgroup", + "path": "bbotgroup", + "description": "OSINT automation for hackers.", + "visibility": "public", + "share_with_group_lock": False, + "require_two_factor_authentication": False, + "two_factor_grace_period": 48, + "project_creation_level": "developer", + "auto_devops_enabled": None, + "subgroup_creation_level": "owner", + "emails_disabled": False, + "emails_enabled": True, + "mentions_disabled": None, + "lfs_enabled": True, + "math_rendering_limits_enabled": True, + "lock_math_rendering_limits_enabled": False, + "default_branch_protection": 2, + "default_branch_protection_defaults": { + "allowed_to_push": [{"access_level": 30}], + "allow_force_push": True, + "allowed_to_merge": [{"access_level": 30}], + }, + "avatar_url": "http://127.0.0.1:8888/uploads/-/system/group/avatar/9/index.png", + "request_access_enabled": False, + "full_name": "bbotgroup", + "full_path": "bbotgroup", + "created_at": "2018-05-15T14:31:12.027Z", + "parent_id": None, + "organization_id": 1, + "shared_runners_setting": "enabled", + "ldap_cn": None, + "ldap_access": None, + "marked_for_deletion_on": None, + "wiki_access_level": "enabled", + } + ] + ) + module_test.httpserver.expect_request( + "/api/v4/groups/bbotgroup/projects", query_string="simple=true", headers={"Authorization": "Bearer asdf"} + ).respond_with_json( + [ + { + "id": 33, + "description": None, + "name": "bbot2", + "name_with_namespace": "bbotgroup / bbot2", + "path": "bbot2", + "path_with_namespace": "bbotgroup/bbot2", + "created_at": "2023-09-07T15:14:05.540Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "git@blacklanternsecurity.com:bbotgroup/bbot2.git", + "http_url_to_repo": "http://127.0.0.1:8888/bbotgroup/bbot2.git", + "web_url": "http://127.0.0.1:8888/bbotgroup/bbot2", + "readme_url": "http://127.0.0.1:8888/bbotgroup/bbot2/-/blob/master/README.md", + "forks_count": 0, + "avatar_url": None, + "star_count": 1, + "last_activity_at": "2024-03-11T19:13:20.691Z", + "namespace": { + "id": 9, + "name": "bbotgroup", + "path": "bbotgroup", + "kind": "group", + "full_path": "bbotgroup", + "parent_id": None, + "avatar_url": "/uploads/-/system/group/avatar/9/index.png", + "web_url": "http://127.0.0.1:8888/groups/bbotgroup", + }, + }, + ] + ) + module_test.httpserver.expect_request( + "/api/v4/users/bbotgroup/projects", query_string="simple=true", headers={"Authorization": "Bearer asdf"} + ).respond_with_json( + [ + { + "id": 33, + "description": None, + "name": "bbot3", + "name_with_namespace": "bbotgroup / bbot3", + "path": "bbot3", + "path_with_namespace": "bbotgroup/bbot3", + "created_at": "2023-09-07T15:14:05.540Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "git@blacklanternsecurity.com:bbotgroup/bbot3.git", + "http_url_to_repo": "http://127.0.0.1:8888/bbotgroup/bbot3.git", + "web_url": "http://127.0.0.1:8888/bbotgroup/bbot3", + "readme_url": "http://127.0.0.1:8888/bbotgroup/bbot3/-/blob/master/README.md", + "forks_count": 0, + "avatar_url": None, + "star_count": 1, + "last_activity_at": "2024-03-11T19:13:20.691Z", + "namespace": { + "id": 9, + "name": "bbotgroup", + "path": "bbotgroup", + "kind": "group", + "full_path": "bbotgroup", + "parent_id": None, + "avatar_url": "/uploads/-/system/group/avatar/9/index.png", + "web_url": "http://127.0.0.1:8888/groups/bbotgroup", + }, + }, + ] + ) + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "TECHNOLOGY" + and e.data["technology"] == "GitLab" + and e.data["url"] == "http://127.0.0.1:8888/" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "gitlab" + and e.data["profile_name"] == "bbotgroup" + and e.data["url"] == "http://127.0.0.1:8888/bbotgroup" + and str(e.module) == "gitlab" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "http://127.0.0.1:8888/bbotgroup/bbot" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "http://127.0.0.1:8888/bbotgroup/bbot2" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "http://127.0.0.1:8888/bbotgroup/bbot3" + ] + ) + + +class TestGitlabDotOrg(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["gitlab", "httpx", "social", "excavate"] + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data("
") + module_test.httpx_mock.add_response( + url="https://gitlab.org/api/v4/groups/veilidgroup/projects?simple=true", + json=[ + { + "id": 55490429, + "description": None, + "name": "Veilid", + "name_with_namespace": "Veilid / Veilid", + "path": "veilid", + "path_with_namespace": "veilidgroup/veilid", + "created_at": "2024-03-03T05:22:53.169Z", + "default_branch": "master", + "tag_list": [], + "topics": [], + "ssh_url_to_repo": "git@gitlab.org:veilid/veilid.git", + "http_url_to_repo": "https://gitlab.org/veilidgroup/veilid.git", + "web_url": "https://gitlab.org/veilidgroup/veilid", + "readme_url": "https://gitlab.org/veilidgroup/veilid/-/blob/master/README.md", + "forks_count": 0, + "avatar_url": None, + "star_count": 0, + "last_activity_at": "2024-03-03T05:22:53.097Z", + "namespace": { + "id": 66882294, + "name": "veilidgroup", + "path": "veilidgroup", + "kind": "group", + "full_path": "veilidgroup", + "parent_id": None, + "avatar_url": "/uploads/-/system/group/avatar/66882294/signal-2023-07-04-192426_003.jpeg", + "web_url": "https://gitlab.org/groups/veilidgroup", + }, + }, + ], + ) + + def check(self, module_test, events): + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "gitlab" + and e.data["profile_name"] == "veilidgroup" + and e.data["url"] == "https://gitlab.org/veilidgroup" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://gitlab.org/veilidgroup/veilid" + and str(e.module) == "gitlab" + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_google_playstore.py b/bbot/test/test_step_2/module_tests/test_module_google_playstore.py new file mode 100644 index 0000000000..f73a79bf39 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_google_playstore.py @@ -0,0 +1,83 @@ +from .base import ModuleTestBase + + +class TestGoogle_Playstore(ModuleTestBase): + modules_overrides = ["google_playstore", "speculate"] + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.99"]}}) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/search?q=blacklanternsecurity&c=apps", + text=""" + + + "blacklanternsecurity" - Android Apps on Google Play + + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/apps/details?id=com.bbot.test", + text=""" + + + BBOT + + + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/apps/details?id=com.bbot.other", + text=""" + + + BBOT + + + + + + + + """, + ) + + def check(self, module_test, events): + assert len(events) == 6 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ), "Failed to find ORG_STUB" + assert 1 == len( + [ + e + for e in events + if e.type == "MOBILE_APP" + and "android" in e.tags + and e.data["id"] == "com.bbot.test" + and e.data["url"] == "https://play.google.com/store/apps/details?id=com.bbot.test" + ] + ), "Failed to find bbot android app" + assert 1 == len( + [ + e + for e in events + if e.type == "MOBILE_APP" + and "android" in e.tags + and e.data["id"] == "com.bbot.other" + and e.data["url"] == "https://play.google.com/store/apps/details?id=com.bbot.other" + ] + ), "Failed to find other bbot android app" diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py new file mode 100644 index 0000000000..47246f931e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -0,0 +1,136 @@ +from pathlib import Path + +from .base import ModuleTestBase + + +class TestGowitness(ModuleTestBase): + targets = ["127.0.0.1:8888"] + modules_overrides = ["gowitness", "httpx", "social", "excavate"] + import shutil + from pathlib import Path + + home_dir = Path("/tmp/.bbot_gowitness_test") + shutil.rmtree(home_dir, ignore_errors=True) + config_overrides = { + "force_deps": True, + "home": str(home_dir), + "scope": {"report_distance": 2}, + "omit_event_types": [], + } + + async def setup_after_prep(self, module_test): + respond_args = { + "response_data": """BBOT is life + + + + +""", + "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, + } + module_test.set_expect_requests(respond_args=respond_args) + request_args = {"uri": "/blacklanternsecurity"} + respond_args = {"response_data": """blacklanternsecurity github BBOT is lifeBBOT is life", + "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, + } + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + webscreenshots = [e for e in events if e.type == "WEBSCREENSHOT"] + assert webscreenshots, "failed to raise WEBSCREENSHOT events" + assert len(webscreenshots) == 1 + webscreenshot = webscreenshots[0] + filename = Path(webscreenshot.data["path"]) + # sadly this file doesn't exist because gowitness doesn't truncate properly + assert not filename.exists() diff --git a/bbot/test/test_step_2/module_tests/test_module_hackertarget.py b/bbot/test/test_step_2/module_tests/test_module_hackertarget.py new file mode 100644 index 0000000000..748b645c63 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_hackertarget.py @@ -0,0 +1,13 @@ +from .base import ModuleTestBase + + +class TestHackertarget(ModuleTestBase): + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.hackertarget.com/hostsearch/?q=blacklanternsecurity.com", + text="asdf.blacklanternsecurity.com\nzzzz.blacklanternsecurity.com", + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_host_header.py b/bbot/test/test_step_2/module_tests/test_module_host_header.py new file mode 100644 index 0000000000..2c4cf5a7d3 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_host_header.py @@ -0,0 +1,65 @@ +import asyncio +import re +from werkzeug.wrappers import Response + +from .base import ModuleTestBase + + +def extract_subdomain_tag(data): + pattern = r"([a-z0-9]{4})\.fakedomain\.fakeinteractsh\.com" + match = re.search(pattern, data) + if match: + return match.group(1) + + +class TestHost_Header(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "host_header"] + + fake_host = "fakedomain.fakeinteractsh.com" + + def request_handler(self, request): + subdomain_tag = None + subdomain_tag = extract_subdomain_tag(request.headers["Host"]) + + # Standard (with reflection) + if subdomain_tag: + self.interactsh_mock_instance.mock_interaction(subdomain_tag) + return Response(f"Alive, host is: {subdomain_tag}.{self.fake_host}", status=200) + + # Host Header Overrides + subdomain_tag_overrides = extract_subdomain_tag(request.headers["X-Forwarded-For"]) + if subdomain_tag_overrides: + return Response(f"Alive, host is: {subdomain_tag}.{self.fake_host}", status=200) + + return Response("Alive, host is: defaulthost.com", status=200) + + async def setup_before_prep(self, module_test): + self.interactsh_mock_instance = module_test.mock_interactsh("host_header") + module_test.monkeypatch.setattr( + module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance + ) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + # We can't fully test all the use-cases because werkzeug abstracts away some of our RFC-violating tricks :/ + + for e in events: + assert any( + e.type == "FINDING" + and "Possible Host header injection. Injection technique: standard" in e.data["description"] + for e in events + ), "Failed to detect Possible Host Header Injection (standard)" + assert any( + e.type == "FINDING" + and "Possible Host header injection. Injection technique: host override headers" + in e.data["description"] + for e in events + ), "Failed to detect Possible Host Header Injection (host override headers)" + assert any( + e.type == "FINDING" and "Spoofed Host header (standard) [HTTP] interaction" in e.data["description"] + for e in events + ), "Failed to detect Spoofed Host header (standard) [HTTP] interaction" diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py new file mode 100644 index 0000000000..2bc99f5ddf --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -0,0 +1,63 @@ +import json +import httpx + +from .base import ModuleTestBase + + +class TestHTTP(ModuleTestBase): + downstream_url = "https://blacklanternsecurity.fakedomain:1234/events" + config_overrides = { + "modules": { + "http": { + "url": downstream_url, + "method": "PUT", + "bearer": "auth_token", + "username": "bbot_user", + "password": "bbot_password", + } + } + } + + def verify_data(self, j): + return j["data"] == "blacklanternsecurity.com" and j["type"] == "DNS_NAME" + + async def setup_after_prep(self, module_test): + self.got_event = False + self.headers_correct = False + self.method_correct = False + self.url_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "PUT": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_response( + method="PUT", headers={"Authorization": "bearer auth_token"}, url=self.downstream_url + ) + + def check(self, module_test, events): + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True + + +class TestHTTPSIEMFriendly(TestHTTP): + modules_overrides = ["http"] + config_overrides = {"modules": {"http": dict(TestHTTP.config_overrides["modules"]["http"])}} + config_overrides["modules"]["http"]["siem_friendly"] = True + + def verify_data(self, j): + return j["data"] == {"DNS_NAME": "blacklanternsecurity.com"} and j["type"] == "DNS_NAME" diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py new file mode 100644 index 0000000000..450de75050 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -0,0 +1,146 @@ +from .base import ModuleTestBase + + +class TestHTTPXBase(ModuleTestBase): + targets = ["http://127.0.0.1:8888/url", "127.0.0.1:8888"] + module_name = "httpx" + modules_overrides = ["httpx", "excavate"] + config_overrides = {"modules": {"httpx": {"store_responses": True}}} + + # HTML for a page with a login form + html_with_login = """ + + +
+ + + +
+ +""" + + # HTML for a page without a login form + html_without_login = """ + + +
+ + +
+ +""" + + async def setup_after_prep(self, module_test): + request_args = {"uri": "/", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_without_login} + module_test.set_expect_requests(request_args, respond_args) + request_args = {"uri": "/url", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_with_login} + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + url = False + open_port = False + for e in events: + if e.type == "HTTP_RESPONSE": + if e.data["path"] == "/": + assert "login-page" not in e.tags + open_port = True + elif e.data["path"] == "/url": + assert "login-page" in e.tags + url = True + assert url, "Failed to visit target URL" + assert open_port, "Failed to visit target OPEN_TCP_PORT" + saved_response = module_test.scan.home / "httpx" / "127.0.0.1.8888[slash]url.txt" + assert saved_response.is_file(), "Failed to save raw httpx response" + + +class TestHTTPX_404(ModuleTestBase): + targets = ["https://127.0.0.1:9999"] + modules_overrides = ["httpx", "speculate", "excavate"] + config_overrides = {"modules": {"speculate": {"ports": "8888,9999"}}} + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "Redirecting...", status=301, headers={"Location": "https://127.0.0.1:9999"} + ) + module_test.httpserver_ssl.expect_request("/").respond_with_data("404 not found", status=404) + + def check(self, module_test, events): + assert 1 == len( + [e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and "status-301" in e.tags] + ) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "https://127.0.0.1:9999/"]) + + +class TestHTTPX_Redirect(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "speculate", "excavate"] + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "Redirecting...", status=301, headers={"Location": "http://www.evilcorp.com"} + ) + + def check(self, module_test, events): + assert 1 == len( + [e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and "status-301" in e.tags] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" and e.data == "http://www.evilcorp.com/" and "affiliate" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type.startswith("DNS_NAME") and e.data == "www.evilcorp.com" and "affiliate" in e.tags + ] + ) + + +class TestHTTPX_URLBlacklist(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "speculate", "excavate"] + config_overrides = {"web": {"spider_distance": 10, "spider_depth": 10}} + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + """ +
+ + + + """ + ) + + def check(self, module_test, events): + assert 4 == len([e for e in events if e.type == "URL_UNVERIFIED"]) + assert 3 == len([e for e in events if e.type == "HTTP_RESPONSE"]) + assert 3 == len([e for e in events if e.type == "URL"]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/test.aspx"]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/test.txt"]) + assert not any(e for e in events if "URL" in e.type and ".svg" in e.data) + assert not any(e for e in events if "URL" in e.type and ".woff" in e.data) + + +class TestHTTPX_querystring_removed(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "speculate", "excavate"] + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data('') + + def check(self, module_test, events): + assert [e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/test.php"] + + +class TestHTTPX_querystring_notremoved(TestHTTPX_querystring_removed): + config_overrides = {"url_querystring_remove": False} + + def check(self, module_test, events): + assert [e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/test.php?foo=bar"] diff --git a/bbot/test/test_step_2/module_tests/test_module_hunt.py b/bbot/test/test_step_2/module_tests/test_module_hunt.py new file mode 100644 index 0000000000..ff5eed716e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_hunt.py @@ -0,0 +1,20 @@ +from .base import ModuleTestBase + + +class TestHunt(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "hunt", "excavate"] + config_overrides = { + "interactsh_disable": True, + } + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": 'ping'} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" and e.data["description"] == "Found potential INSECURE CRYPTOGRAPHY parameter [cipher]" + for e in events + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_hunterio.py b/bbot/test/test_step_2/module_tests/test_module_hunterio.py new file mode 100644 index 0000000000..ecd71957ab --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_hunterio.py @@ -0,0 +1,160 @@ +from .base import ModuleTestBase + + +class TestHunterio(ModuleTestBase): + config_overrides = {"modules": {"hunterio": {"api_key": ["asdf", "1234", "4321", "fdsa"]}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.hunter.io/v2/account?api_key=asdf", + json={ + "data": { + "first_name": "jon", + "last_name": "snow", + "email": "jon@blacklanternsecurity.notreal", + "plan_name": "Starter", + "plan_level": 1, + "reset_date": "1917-05-23", + "team_id": 1234, + "calls": { + "_deprecation_notice": "Sums the searches and the verifications, giving an imprecise look of the available requests", + "used": 999, + "available": 2000, + }, + "requests": { + "searches": {"used": 998, "available": 1000}, + "verifications": {"used": 0, "available": 1000}, + }, + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.hunter.io/v2/domain-search?domain=blacklanternsecurity.com&api_key=fdsa&limit=100&offset=0", + json={ + "data": { + "domain": "blacklanternsecurity.com", + "disposable": False, + "webmail": False, + "accept_all": False, + "pattern": "{first}", + "organization": "Black Lantern Security", + "description": None, + "twitter": None, + "facebook": None, + "linkedin": "https://linkedin.com/company/black-lantern-security", + "instagram": None, + "youtube": None, + "technologies": ["jekyll", "nginx"], + "country": "US", + "state": "CA", + "city": "Night City", + "postal_code": "12345", + "street": "123 Any St", + "emails": [ + { + "value": "asdf@blacklanternsecurity.com", + "type": "generic", + "confidence": 77, + "sources": [ + { + "domain": "blacklanternsecurity.com", + "uri": "http://blacklanternsecurity.com", + "extracted_on": "2021-06-09", + "last_seen_on": "2023-03-21", + "still_on_page": True, + } + ], + "first_name": None, + "last_name": None, + "position": None, + "seniority": None, + "department": "support", + "linkedin": None, + "twitter": None, + "phone_number": None, + "verification": {"date": None, "status": None}, + } + ], + "linked_domains": [], + }, + "meta": { + "results": 1, + "limit": 100, + "offset": 0, + "params": { + "domain": "blacklanternsecurity.com", + "company": None, + "type": None, + "seniority": None, + "department": None, + }, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.hunter.io/v2/domain-search?domain=blacklanternsecurity.com&api_key=4321&limit=100&offset=100", + json={ + "data": { + "domain": "blacklanternsecurity.com", + "disposable": False, + "webmail": False, + "accept_all": False, + "pattern": "{first}", + "organization": "Black Lantern Security", + "description": None, + "twitter": None, + "facebook": None, + "linkedin": "https://linkedin.com/company/black-lantern-security", + "instagram": None, + "youtube": None, + "technologies": ["jekyll", "nginx"], + "country": "US", + "state": "CA", + "city": "Night City", + "postal_code": "12345", + "street": "123 Any St", + "emails": [ + { + "value": "fdsa@blacklanternsecurity.com", + "type": "generic", + "confidence": 77, + "sources": [ + { + "domain": "blacklanternsecurity.com", + "uri": "http://blacklanternsecurity.com", + "extracted_on": "2021-06-09", + "last_seen_on": "2023-03-21", + "still_on_page": True, + } + ], + "first_name": None, + "last_name": None, + "position": None, + "seniority": None, + "department": "support", + "linkedin": None, + "twitter": None, + "phone_number": None, + "verification": {"date": None, "status": None}, + } + ], + "linked_domains": [], + }, + "meta": { + "results": 1, + "limit": 100, + "offset": 0, + "params": { + "domain": "blacklanternsecurity.com", + "company": None, + "type": None, + "seniority": None, + "department": None, + }, + }, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf@blacklanternsecurity.com" for e in events), "Failed to detect email #1" + assert any(e.data == "fdsa@blacklanternsecurity.com" for e in events), "Failed to detect email #2" diff --git a/bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py b/bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py new file mode 100644 index 0000000000..36369cdca4 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_iis_shortnames.py @@ -0,0 +1,61 @@ +import re + +from .base import ModuleTestBase + + +class TestIIS_Shortnames(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "iis_shortnames"] + config_overrides = {"modules": {"iis_shortnames": {"detect_only": False}}} + + async def setup_after_prep(self, module_test): + module_test.httpserver.no_handler_status_code = 404 + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive", "status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/*~1*/a.aspx"} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/B\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/BL\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/BLS\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/BLSH\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/BLSHA\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": re.compile(r"\/BLSHAX\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + for char in "BLSHAX": + expect_args = {"method": "GET", "uri": re.compile(rf"\/\*{char}\*~1\*.*$")} + respond_args = {"response_data": "", "status": 400} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + vulnerabilityEmitted = False + url_hintEmitted = False + for e in events: + if e.type == "VULNERABILITY" and "iis-magic-url" not in e.tags: + vulnerabilityEmitted = True + if e.type == "URL_HINT" and e.data == "http://127.0.0.1:8888/BLSHAX~1": + url_hintEmitted = True + + assert vulnerabilityEmitted + assert url_hintEmitted diff --git a/bbot/test/test_step_2/module_tests/test_module_internetdb.py b/bbot/test/test_step_2/module_tests/test_module_internetdb.py new file mode 100644 index 0000000000..786ea1f33d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_internetdb.py @@ -0,0 +1,54 @@ +from .base import ModuleTestBase + + +class TestInternetDB(ModuleTestBase): + config_overrides = {"dns": {"minimal": False}} + + async def setup_before_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "autodiscover.blacklanternsecurity.com": {"A": ["2.3.4.5"]}, + "mail.blacklanternsecurity.com": {"A": ["3.4.5.6"]}, + } + ) + + module_test.httpx_mock.add_response( + url="https://internetdb.shodan.io/1.2.3.4", + json={ + "cpes": [ + "cpe:/a:microsoft:internet_information_services", + "cpe:/a:microsoft:outlook_web_access:15.0.1367", + ], + "hostnames": [ + "autodiscover.blacklanternsecurity.com", + "mail.blacklanternsecurity.com", + ], + "ip": "1.2.3.4", + "ports": [ + 25, + 80, + 443, + ], + "tags": ["starttls", "self-signed", "eol-os"], + "vulns": ["CVE-2021-26857", "CVE-2021-26855"], + }, + ) + + def check(self, module_test, events): + assert 5 == len([e for e in events if str(e.module) == "internetdb"]) + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "autodiscover.blacklanternsecurity.com"] + ) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "mail.blacklanternsecurity.com"]) + assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT"]) + assert 1 == len([e for e in events if e.type == "FINDING" and str(e.module) == "internetdb"]) + assert 1 == len([e for e in events if e.type == "FINDING" and "CVE-2021-26857" in e.data["description"]]) + assert 2 == len([e for e in events if e.type == "TECHNOLOGY" and str(e.module) == "internetdb"]) + assert 1 == len( + [ + e + for e in events + if e.type == "TECHNOLOGY" and e.data["technology"] == "cpe:/a:microsoft:outlook_web_access:15.0.1367" + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_ip2location.py b/bbot/test/test_step_2/module_tests/test_module_ip2location.py new file mode 100644 index 0000000000..2a63607207 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ip2location.py @@ -0,0 +1,31 @@ +from .base import ModuleTestBase + + +class TestIP2Location(ModuleTestBase): + targets = ["8.8.8.8"] + config_overrides = {"modules": {"ip2location": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="http://api.ip2location.io/?key=asdf&ip=8.8.8.8&format=json&source=bbot", + json={ + "ip": "8.8.8.8", + "country_code": "US", + "country_name": "United States of America", + "region_name": "California", + "city_name": "Mountain View", + "latitude": 37.405992, + "longitude": -122.078515, + "zip_code": "94043", + "time_zone": "-07:00", + "asn": "15169", + "as": "Google LLC", + "is_proxy": False, + }, + ) + + def check(self, module_test, events): + assert any( + e.type == "GEOLOCATION" and e.data["ip"] == "8.8.8.8" and e.data["city_name"] == "Mountain View" + for e in events + ), "Failed to geolocate IP" diff --git a/bbot/test/test_step_2/module_tests/test_module_ipneighbor.py b/bbot/test/test_step_2/module_tests/test_module_ipneighbor.py new file mode 100644 index 0000000000..8d8fdff416 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ipneighbor.py @@ -0,0 +1,15 @@ +from .base import ModuleTestBase + + +class TestIPNeighbor(ModuleTestBase): + targets = ["127.0.0.15", "www.bls.notreal"] + config_overrides = {"scope": {"report_distance": 1}, "dns": {"minimal": False, "search_distance": 2}} + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + {"3.0.0.127.in-addr.arpa": {"PTR": ["asdf.www.bls.notreal"]}, "asdf.www.bls.notreal": {"A": ["127.0.0.3"]}} + ) + + def check(self, module_test, events): + assert any(e.data == "127.0.0.3" for e in events) + assert not any(e.data == "127.0.0.4" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_ipstack.py b/bbot/test/test_step_2/module_tests/test_module_ipstack.py new file mode 100644 index 0000000000..dea0b28657 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ipstack.py @@ -0,0 +1,67 @@ +from .base import ModuleTestBase + + +class TestIPStack(ModuleTestBase): + targets = ["8.8.8.8"] + config_overrides = {"modules": {"ipstack": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="http://api.ipstack.com/check?access_key=asdf", + json={ + "ip": "1.2.3.4", + "type": "ipv4", + "continent_code": "NA", + "continent_name": "North America", + "country_code": "US", + "country_name": "United States", + "region_code": "FL", + "region_name": "Florida", + "city": "Cape Canaveral", + "zip": "12345", + "latitude": 47.89263153076172, + "longitude": -97.04190063476562, + "location": { + "geoname_id": 5059429, + "capital": "Washington D.C.", + "languages": [{"code": "en", "name": "English", "native": "English"}], + "country_flag": "https://assets.ipstack.com/flags/us.svg", + "country_flag_emoji": "\ud83c\uddfa\ud83c\uddf8", + "country_flag_emoji_unicode": "U+1F1FA U+1F1F8", + "calling_code": "1", + "is_eu": False, + }, + }, + ) + module_test.httpx_mock.add_response( + url="http://api.ipstack.com/8.8.8.8?access_key=asdf", + json={ + "ip": "8.8.8.8", + "type": "ipv4", + "continent_code": "NA", + "continent_name": "North America", + "country_code": "US", + "country_name": "United States", + "region_code": "OH", + "region_name": "Ohio", + "city": "Glenmont", + "zip": "44628", + "latitude": 40.5369987487793, + "longitude": -82.12859344482422, + "location": { + "geoname_id": None, + "capital": "Washington D.C.", + "languages": [{"code": "en", "name": "English", "native": "English"}], + "country_flag": "https://assets.ipstack.com/flags/us.svg", + "country_flag_emoji": "\ud83c\uddfa\ud83c\uddf8", + "country_flag_emoji_unicode": "U+1F1FA U+1F1F8", + "calling_code": "1", + "is_eu": False, + }, + }, + ) + + def check(self, module_test, events): + assert any( + e.type == "GEOLOCATION" and e.data["ip"] == "8.8.8.8" and e.data["city"] == "Glenmont" for e in events + ), "Failed to geolocate IP" diff --git a/bbot/test/test_step_2/module_tests/test_module_jadx.py b/bbot/test/test_step_2/module_tests/test_module_jadx.py new file mode 100644 index 0000000000..f57dabad89 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_jadx.py @@ -0,0 +1,55 @@ +from pathlib import Path +from bbot.core.helpers.libmagic import get_magic_info +from bbot.test.test_step_2.module_tests.base import ModuleTestBase, tempapkfile + + +class TestJadx(ModuleTestBase): + modules_overrides = ["apkpure", "google_playstore", "speculate", "jadx"] + apk_file = tempapkfile() + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.99"]}}) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/search?q=blacklanternsecurity&c=apps", + text=""" + + + "blacklanternsecurity" - Android Apps on Google Play + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://play.google.com/store/apps/details?id=com.bbot.test", + text=""" + + + BBOT + + + + + + + """, + ) + module_test.httpx_mock.add_response( + url="https://d.apkpure.com/b/XAPK/com.bbot.test?version=latest", + content=self.apk_file, + headers={ + "Content-Type": "application/vnd.android.package-archive", + "Content-Disposition": "attachment; filename=com.bbot.test.apk", + }, + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + apk_event = [e for e in filesystem_events if "file" in e.tags] + extension, mime_type, description, confidence = get_magic_info(apk_event[0].data["path"]) + assert description == "Android Application Package", f"Downloaded file was detected as {description}" + extract_event = [e for e in filesystem_events if "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract apk" + extract_path = Path(extract_event[0].data["path"]) + assert extract_path.is_dir(), "Destination apk doesn't exist" diff --git a/bbot/test/test_step_2/module_tests/test_module_json.py b/bbot/test/test_step_2/module_tests/test_module_json.py new file mode 100644 index 0000000000..27ed5a55e0 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_json.py @@ -0,0 +1,70 @@ +import json + +from .base import ModuleTestBase +from bbot.core.event.base import event_from_json + + +class TestJSON(ModuleTestBase): + def check(self, module_test, events): + dns_data = "blacklanternsecurity.com" + context_data = f"Scan {module_test.scan.name} seeded with DNS_NAME: blacklanternsecurity.com" + + scan_event = [e for e in events if e.type == "SCAN"][0] + dns_event = [e for e in events if e.type == "DNS_NAME"][0] + + # json events + txt_file = module_test.scan.home / "output.json" + lines = list(module_test.scan.helpers.read_file(txt_file)) + assert lines + json_events = [json.loads(line) for line in lines] + scan_json = [e for e in json_events if e["type"] == "SCAN"] + dns_json = [e for e in json_events if e["type"] == "DNS_NAME"] + assert len(scan_json) == 2 + assert len(dns_json) == 1 + dns_json = dns_json[0] + scan = scan_json[0] + assert scan["data"]["name"] == module_test.scan.name + assert scan["data"]["id"] == module_test.scan.id + assert scan["id"] == module_test.scan.id + assert scan["uuid"] == str(module_test.scan.root_event.uuid) + assert scan["parent_uuid"] == str(module_test.scan.root_event.uuid) + assert scan["data"]["target"]["seeds"] == ["blacklanternsecurity.com"] + assert scan["data"]["target"]["whitelist"] == ["blacklanternsecurity.com"] + assert dns_json["data"] == dns_data + assert dns_json["id"] == str(dns_event.id) + assert dns_json["uuid"] == str(dns_event.uuid) + assert dns_json["parent_uuid"] == str(module_test.scan.root_event.uuid) + assert dns_json["discovery_context"] == context_data + assert dns_json["discovery_path"] == [context_data] + assert dns_json["parent_chain"] == [dns_json["uuid"]] + + # event objects reconstructed from json + scan_reconstructed = event_from_json(scan_json[0]) + dns_reconstructed = event_from_json(dns_json) + assert scan_reconstructed.data["name"] == module_test.scan.name + assert scan_reconstructed.data["id"] == module_test.scan.id + assert scan_reconstructed.uuid == scan_event.uuid + assert scan_reconstructed.parent_uuid == scan_event.uuid + assert scan_reconstructed.data["target"]["seeds"] == ["blacklanternsecurity.com"] + assert scan_reconstructed.data["target"]["whitelist"] == ["blacklanternsecurity.com"] + assert dns_reconstructed.data == dns_data + assert dns_reconstructed.uuid == dns_event.uuid + assert dns_reconstructed.parent_uuid == module_test.scan.root_event.uuid + assert dns_reconstructed.discovery_context == context_data + assert dns_reconstructed.discovery_path == [context_data] + assert dns_reconstructed.parent_chain == [dns_json["uuid"]] + + +class TestJSONSIEMFriendly(ModuleTestBase): + modules_overrides = ["json"] + config_overrides = {"modules": {"json": {"siem_friendly": True}}} + + def check(self, module_test, events): + txt_file = module_test.scan.home / "output.json" + lines = list(module_test.scan.helpers.read_file(txt_file)) + passed = False + for line in lines: + e = json.loads(line) + if e["data"] == {"DNS_NAME": "blacklanternsecurity.com"}: + passed = True + assert passed diff --git a/bbot/test/test_step_2/module_tests/test_module_leakix.py b/bbot/test/test_step_2/module_tests/test_module_leakix.py new file mode 100644 index 0000000000..f87dba6b50 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_leakix.py @@ -0,0 +1,49 @@ +from .base import ModuleTestBase + + +class TestLeakIX(ModuleTestBase): + config_overrides = {"modules": {"leakix": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://leakix.net/host/1.1.1.1", + match_headers={"api-key": "asdf"}, + json={"title": "Not Found", "description": "Host not found"}, + ) + module_test.httpx_mock.add_response( + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", + match_headers={"api-key": "asdf"}, + json=[ + { + "subdomain": "asdf.blacklanternsecurity.com", + "distinct_ips": 3, + "last_seen": "2023-04-02T09:38:30.02Z", + }, + ], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + + +class TestLeakIX_NoAPIKey(ModuleTestBase): + modules_overrides = ["leakix"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://leakix.net/host/1.1.1.1", + json={"title": "Not Found", "description": "Host not found"}, + ) + module_test.httpx_mock.add_response( + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", + json=[ + { + "subdomain": "asdf.blacklanternsecurity.com", + "distinct_ips": 3, + "last_seen": "2023-04-02T09:38:30.02Z", + }, + ], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_mysql.py b/bbot/test/test_step_2/module_tests/test_module_mysql.py new file mode 100644 index 0000000000..4867c568d5 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_mysql.py @@ -0,0 +1,76 @@ +import asyncio +import time + +from .base import ModuleTestBase + + +class TestMySQL(ModuleTestBase): + targets = ["evilcorp.com"] + skip_distro_tests = True + + async def setup_before_prep(self, module_test): + process = await asyncio.create_subprocess_exec( + "docker", + "run", + "--name", + "bbot-test-mysql", + "--rm", + "-e", + "MYSQL_ROOT_PASSWORD=bbotislife", + "-e", + "MYSQL_DATABASE=bbot", + "-p", + "3306:3306", + "-d", + "mysql", + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await process.communicate() + + import aiomysql + + # wait for the container to start + start_time = time.time() + while True: + try: + conn = await aiomysql.connect(user="root", password="bbotislife", db="bbot", host="localhost") + conn.close() + break + except Exception as e: + if time.time() - start_time > 60: # timeout after 60 seconds + self.log.error("MySQL server did not start in time.") + raise e + await asyncio.sleep(1) + + if process.returncode != 0: + self.log.error(f"Failed to start MySQL server: {stderr.decode()}") + + async def check(self, module_test, events): + import aiomysql + + # Connect to the MySQL database + conn = await aiomysql.connect(user="root", password="bbotislife", db="bbot", host="localhost") + + try: + async with conn.cursor() as cur: + await cur.execute("SELECT * FROM event") + events = await cur.fetchall() + assert len(events) == 3, "No events found in MySQL database" + + await cur.execute("SELECT * FROM scan") + scans = await cur.fetchall() + assert len(scans) == 1, "No scans found in MySQL database" + + await cur.execute("SELECT * FROM target") + targets = await cur.fetchall() + assert len(targets) == 1, "No targets found in MySQL database" + finally: + conn.close() + process = await asyncio.create_subprocess_exec( + "docker", "stop", "bbot-test-mysql", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + + if process.returncode != 0: + raise Exception(f"Failed to stop MySQL server: {stderr.decode()}") diff --git a/bbot/test/test_step_2/module_tests/test_module_myssl.py b/bbot/test/test_step_2/module_tests/test_module_myssl.py new file mode 100644 index 0000000000..b39f2711d5 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_myssl.py @@ -0,0 +1,39 @@ +from .base import ModuleTestBase + + +class TestMySSL(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.module.abort_if = lambda e: False + module_test.httpx_mock.add_response( + url="https://myssl.com/api/v1/discover_sub_domain?domain=blacklanternsecurity.com", + json={ + "code": 0, + "data": [ + { + "ip": "1.2.3.4", + "port": "443", + "tips": [], + "level": 2, + "title": "", + "domain": "asdf.blacklanternsecurity.com", + "is_ats": True, + "is_pci": False, + "server": "", + "is_tlcp": False, + "duration": 46, + "icon_url": "", + "is_sslvpn": False, + "level_str": "A", + "ip_location": "美国", + "is_enable_gm": False, + "evaluate_date": "2022-03-13T02:38:08Z", + "demotion_reason": [], + "ignore_trust_level": "A", + "meet_gm_double_cert_statndard": False, + } + ], + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_neo4j.py b/bbot/test/test_step_2/module_tests/test_module_neo4j.py new file mode 100644 index 0000000000..c5df1e4748 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_neo4j.py @@ -0,0 +1,44 @@ +from .base import ModuleTestBase + + +class TestNeo4j(ModuleTestBase): + config_overrides = {"modules": {"neo4j": {"uri": "bolt://127.0.0.1:11111"}}} + + async def setup_before_prep(self, module_test): + # install neo4j + deps_pip = module_test.preloaded["neo4j"]["deps"]["pip"] + await module_test.scan.helpers.depsinstaller.pip_install(deps_pip) + + self.neo4j_used = False + + class MockResult: + async def data(s): + self.neo4j_used = True + return [ + { + "neo4j_id": "4:ee79a477-5f5b-445a-9def-7c051b2a533c:115", + "event_id": "DNS_NAME:c8fab50640cb87f8712d1998ecc78caf92b90f71", + } + ] + + class MockSession: + async def run(s, *args, **kwargs): + return MockResult() + + async def close(self): + pass + + class MockDriver: + def __init__(self, *args, **kwargs): + pass + + def session(self, *args, **kwargs): + return MockSession() + + async def close(self): + pass + + module_test.monkeypatch.setattr("neo4j.AsyncGraphDatabase.driver", MockDriver) + + def check(self, module_test, events): + assert self.neo4j_used is True diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py new file mode 100644 index 0000000000..c5edd25141 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -0,0 +1,57 @@ +from .base import ModuleTestBase + +# import logging + + +class TestNewsletters(ModuleTestBase): + found_tgt = "http://127.0.0.1:8888/found" + missing_tgt = "http://127.0.0.1:8888/missing" + targets = [found_tgt, missing_tgt] + modules_overrides = ["speculate", "httpx", "newsletters"] + + html_with_newsletter = """ + + """ + + html_without_newsletter = """ + + """ + + async def setup_after_prep(self, module_test): + request_args = {"uri": "/found", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_with_newsletter} + module_test.set_expect_requests(request_args, respond_args) + request_args = {"uri": "/missing", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_without_newsletter} + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + found = False + missing = True + for event in events: + # self.log.info(f"event type: {event.type}") + if event.type == "FINDING": + # self.log.info(f"event data: {event.data}") + # Verify Positive Result + if event.data["url"] == self.found_tgt: + found = True + # Verify Negative Result (should skip this statement if correct) + elif event.data["url"] == self.missing_tgt: + missing = False + assert found, "NEWSLETTER 'Found' Error - Expect status of True but got False" + assert missing, "NEWSLETTER 'Missing' Error - Expect status of True but got False" diff --git a/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py b/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py new file mode 100644 index 0000000000..b88595be01 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py @@ -0,0 +1,85 @@ +import xml.etree.ElementTree as ET + +from bbot.modules.base import BaseModule +from .base import ModuleTestBase + + +class TestNmap_XML(ModuleTestBase): + modules_overrides = ["nmap_xml", "speculate"] + targets = ["blacklanternsecurity.com", "127.0.0.3"] + config_overrides = {"dns": {"minimal": False}} + + class DummyModule(BaseModule): + watched_events = ["OPEN_TCP_PORT"] + _name = "dummy_module" + + async def handle_event(self, event): + if event.port == 80: + await self.emit_event( + {"host": str(event.host), "port": event.port, "protocol": "http", "banner": "Apache"}, + "PROTOCOL", + parent=event, + ) + elif event.port == 443: + await self.emit_event( + {"host": str(event.host), "port": event.port, "protocol": "https"}, "PROTOCOL", parent=event + ) + + async def setup_before_prep(self, module_test): + self.dummy_module = self.DummyModule(module_test.scan) + module_test.scan.modules["dummy_module"] = self.dummy_module + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["127.0.0.1", "127.0.0.2"]}, + "3.0.0.127.in-addr.arpa": {"PTR": ["www.blacklanternsecurity.com"]}, + "www.blacklanternsecurity.com": {"A": ["127.0.0.1"]}, + } + ) + + def check(self, module_test, events): + nmap_xml_file = module_test.scan.modules["nmap_xml"].output_file + nmap_xml = open(nmap_xml_file).read() + + # Parse the XML + root = ET.fromstring(nmap_xml) + + # Expected IP addresses + expected_ips = {"127.0.0.1", "127.0.0.2", "127.0.0.3"} + found_ips = set() + + # Iterate over each host in the XML + for host in root.findall("host"): + # Get the IP address + address = host.find("address").get("addr") + found_ips.add(address) + + # Get hostnames if available + hostnames = sorted([hostname.get("name") for hostname in host.findall(".//hostname")]) + + # Get open ports and services + ports = [] + for port in host.findall(".//port"): + port_id = port.get("portid") + state = port.find("state").get("state") + if state == "open": + service_name = port.find("service").get("name") + service_product = port.find("service").get("product", "") + service_extrainfo = port.find("service").get("extrainfo", "") + ports.append((port_id, service_name, service_product, service_extrainfo)) + + # Sort ports for consistency + ports.sort() + + # Assertions + if address == "127.0.0.1": + assert hostnames == ["blacklanternsecurity.com", "www.blacklanternsecurity.com"] + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + elif address == "127.0.0.2": + assert hostnames == sorted(["blacklanternsecurity.com"]) + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + elif address == "127.0.0.3": + assert hostnames == [] # No hostnames for this IP + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + + # Assert that all expected IPs were found + assert found_ips == expected_ips diff --git a/bbot/test/test_step_2/module_tests/test_module_ntlm.py b/bbot/test/test_step_2/module_tests/test_module_ntlm.py new file mode 100644 index 0000000000..7b834ef2f9 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ntlm.py @@ -0,0 +1,24 @@ +from .base import ModuleTestBase + + +class TestNTLM(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "ntlm"] + config_overrides = {"modules": {"ntlm": {"try_all": True}}} + + async def setup_after_prep(self, module_test): + request_args = {"uri": "/", "headers": {"test": "header"}} + module_test.set_expect_requests(request_args, {}) + request_args = { + "uri": "/oab/", + "headers": {"Authorization": "NTLM TlRMTVNTUAABAAAAl4II4gAAAAAAAAAAAAAAAAAAAAAKAGFKAAAADw=="}, + } + respond_args = { + "headers": { + "WWW-Authenticate": "NTLM TlRMTVNTUAACAAAABgAGADgAAAAVgoni89aZT4Q0mH0AAAAAAAAAAHYAdgA+AAAABgGxHQAAAA9WAE4ATwACAAYAVgBOAE8AAQAKAEUAWABDADAAMQAEABIAdgBuAG8ALgBsAG8AYwBhAGwAAwAeAEUAWABDADAAMQAuAHYAbgBvAC4AbABvAGMAYQBsAAUAEgB2AG4AbwAuAGwAbwBjAGEAbAAHAAgAXxo0p/6L2QEAAAAA" + } + } + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + assert any(e.type == "FINDING" and "EXC01.vno.local" in e.data["description"] for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_nuclei.py b/bbot/test/test_step_2/module_tests/test_module_nuclei.py new file mode 100644 index 0000000000..fe511c9b60 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_nuclei.py @@ -0,0 +1,184 @@ +from .base import ModuleTestBase + + +class TestNucleiManual(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "excavate", "nuclei"] + config_overrides = { + "web": { + "spider_distance": 1, + "spider_depth": 1, + }, + "modules": { + "nuclei": { + "version": "2.9.4", + "mode": "manual", + "concurrency": 2, + "ratelimit": 10, + "templates": "/tmp/.bbot_test/tools/nuclei-templates/http/miscellaneous/", + "interactsh_disable": True, + "directory_only": False, + } + }, + } + + test_html = """ + html> + + Index of /test + + +

Index of /test

+ + + + +
NameLast modifiedSize

Parent Directory  -
+
Apache/2.4.38 (Debian) Server at http://127.0.0.1:8888/testmultipleruns.html
+ +""" + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": self.test_html} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + expect_args = {"method": "GET", "uri": "/testmultipleruns.html"} + respond_args = {"response_data": "Copyright 1984"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + first_run_detect = False + second_run_detect = False + for e in events: + if e.type == "FINDING": + if "Directory listing enabled" in e.data["description"]: + first_run_detect = True + elif "Copyright" in e.data["description"]: + second_run_detect = True + assert first_run_detect + assert second_run_detect + + +class TestNucleiSevere(TestNucleiManual): + modules_overrides = ["httpx", "nuclei"] + config_overrides = { + "modules": { + "nuclei": { + "mode": "severe", + "concurrency": 1, + "templates": "/tmp/.bbot_test/tools/nuclei-templates/vulnerabilities/generic/generic-linux-lfi.yaml", + } + }, + "interactsh_disable": True, + } + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/etc/passwd"} + respond_args = {"response_data": "root:.*:0:0:"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any( + e.type == "VULNERABILITY" and "Generic Linux - Local File Inclusion" in e.data["description"] + for e in events + ) + + +class TestNucleiTechnology(TestNucleiManual): + config_overrides = { + "interactsh_disable": True, + "modules": {"nuclei": {"mode": "technology", "concurrency": 2, "tags": "apache"}}, + } + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = { + "response_data": "", + "headers": {"Server": "Apache/2.4.52 (Ubuntu)"}, + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "TECHNOLOGY" and "apache" in e.data["technology"].lower() for e in events) + + with open(module_test.scan.home / "debug.log") as f: + assert "Using Interactsh Server" not in f.read() + + +class TestNucleiBudget(TestNucleiManual): + config_overrides = { + "modules": { + "nuclei": { + "mode": "budget", + "concurrency": 1, + "tags": "spiderfoot", + "templates": "/tmp/.bbot_test/tools/nuclei-templates/exposed-panels/spiderfoot.yaml", + "interactsh_disable": True, + } + } + } + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "SpiderFoot

support@spiderfoot.net

"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "TECHNOLOGY" and "spider" in e.data["technology"] for e in events) + + +class TestNucleiRetries(TestNucleiManual): + config_overrides = { + "interactsh_disable": True, + "modules": {"nuclei": {"tags": "musictraveler"}}, + } + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = { + "response_data": "content", + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + with open(module_test.scan.home / "debug.log") as f: + assert "-retries 0" in f.read() + + +class TestNucleiRetriesCustom(TestNucleiRetries): + config_overrides = { + "interactsh_disable": True, + "modules": {"nuclei": {"tags": "musictraveler", "retries": 1}}, + } + + def check(self, module_test, events): + with open(module_test.scan.home / "debug.log") as f: + assert "-retries 1" in f.read() + + +class TestNucleiCustomHeaders(TestNucleiManual): + custom_headers = {"testheader1": "test1", "testheader2": "test2"} + config_overrides = TestNucleiManual.config_overrides + config_overrides["web"]["http_headers"] = custom_headers + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/", "headers": self.custom_headers} + respond_args = {"response_data": self.test_html} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + expect_args = {"method": "GET", "uri": "/testmultipleruns.html", "headers": {"nonexistent": "nope"}} + respond_args = {"response_data": "Copyright 1984"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + first_run_detect = False + second_run_detect = False + for e in events: + if e.type == "FINDING": + if "Directory listing enabled" in e.data["description"]: + first_run_detect = True + elif "Copyright" in e.data["description"]: + second_run_detect = True + # we should find the first one because it requires our custom headers + assert first_run_detect + # the second one requires different headers, so we shouldn't find it + assert not second_run_detect diff --git a/bbot/test/test_step_2/module_tests/test_module_oauth.py b/bbot/test/test_step_2/module_tests/test_module_oauth.py new file mode 100644 index 0000000000..1e7078e840 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_oauth.py @@ -0,0 +1,231 @@ +from .base import ModuleTestBase + +from .test_module_azure_realm import TestAzure_Realm as Azure_Realm + + +class TestOAUTH(ModuleTestBase): + targets = ["evilcorp.com"] + config_overrides = {"scope": {"report_distance": 1}, "omit_event_types": []} + modules_overrides = ["azure_realm", "oauth"] + openid_config_azure = { + "token_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/token", + "token_endpoint_auth_methods_supported": ["client_secret_post", "private_key_jwt", "client_secret_basic"], + "jwks_uri": "https://login.windows.net/common/discovery/keys", + "response_modes_supported": ["query", "fragment", "form_post"], + "subject_types_supported": ["pairwise"], + "id_token_signing_alg_values_supported": ["RS256"], + "response_types_supported": ["code", "id_token", "code id_token", "token id_token", "token"], + "scopes_supported": ["openid"], + "issuer": "https://sts.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/", + "microsoft_multi_refresh_token": True, + "authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/authorize", + "device_authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/devicecode", + "http_logout_supported": True, + "frontchannel_logout_supported": True, + "end_session_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/logout", + "claims_supported": [ + "sub", + "iss", + "cloud_instance_name", + "cloud_instance_host_name", + "cloud_graph_host_name", + "msgraph_host", + "aud", + "exp", + "iat", + "auth_time", + "acr", + "amr", + "nonce", + "email", + "given_name", + "family_name", + "nickname", + ], + "check_session_iframe": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/checksession", + "userinfo_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/openid/userinfo", + "kerberos_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/kerberos", + "tenant_region_scope": "NA", + "cloud_instance_name": "microsoftonline.com", + "cloud_graph_host_name": "graph.windows.net", + "msgraph_host": "graph.microsoft.com", + "rbac_url": "https://pas.windows.net", + } + openid_config_okta = { + "issuer": "https://evilcorp.okta.com", + "authorization_endpoint": "https://evilcorp.okta.com/oauth2/v1/authorize", + "token_endpoint": "https://evilcorp.okta.com/oauth2/v1/token", + "userinfo_endpoint": "https://evilcorp.okta.com/oauth2/v1/userinfo", + "registration_endpoint": "https://evilcorp.okta.com/oauth2/v1/clients", + "jwks_uri": "https://evilcorp.okta.com/oauth2/v1/keys", + "response_types_supported": [ + "code", + "id_token", + "code id_token", + "code token", + "id_token token", + "code id_token token", + ], + "response_modes_supported": ["query", "fragment", "form_post", "okta_post_message"], + "grant_types_supported": [ + "authorization_code", + "implicit", + "refresh_token", + "password", + "urn:ietf:params:oauth:grant-type:device_code", + "urn:openid:params:grant-type:ciba", + ], + "subject_types_supported": ["public"], + "id_token_signing_alg_values_supported": ["RS256"], + "scopes_supported": ["openid", "email", "profile", "address", "phone", "offline_access", "groups"], + "token_endpoint_auth_methods_supported": [ + "client_secret_basic", + "client_secret_post", + "client_secret_jwt", + "private_key_jwt", + "none", + ], + "claims_supported": [ + "iss", + "ver", + "sub", + "aud", + "iat", + "exp", + "jti", + "auth_time", + "amr", + "idp", + "nonce", + "name", + "nickname", + "preferred_username", + "given_name", + "middle_name", + "family_name", + "email", + "email_verified", + "profile", + "zoneinfo", + "locale", + "address", + "phone_number", + "picture", + "website", + "gender", + "birthdate", + "updated_at", + "at_hash", + "c_hash", + ], + "code_challenge_methods_supported": ["S256"], + "introspection_endpoint": "https://evilcorp.okta.com/oauth2/v1/introspect", + "introspection_endpoint_auth_methods_supported": [ + "client_secret_basic", + "client_secret_post", + "client_secret_jwt", + "private_key_jwt", + "none", + ], + "revocation_endpoint": "https://evilcorp.okta.com/oauth2/v1/revoke", + "revocation_endpoint_auth_methods_supported": [ + "client_secret_basic", + "client_secret_post", + "client_secret_jwt", + "private_key_jwt", + "none", + ], + "end_session_endpoint": "https://evilcorp.okta.com/oauth2/v1/logout", + "request_parameter_supported": True, + "request_object_signing_alg_values_supported": [ + "HS256", + "HS384", + "HS512", + "RS256", + "RS384", + "RS512", + "ES256", + "ES384", + "ES512", + ], + "device_authorization_endpoint": "https://evilcorp.okta.com/oauth2/v1/device/authorize", + "pushed_authorization_request_endpoint": "https://evilcorp.okta.com/oauth2/v1/par", + "backchannel_token_delivery_modes_supported": ["poll"], + "backchannel_authentication_request_signing_alg_values_supported": [ + "HS256", + "HS384", + "HS512", + "RS256", + "RS384", + "RS512", + "ES256", + "ES384", + "ES512", + ], + } + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.1"]}}) + module_test.httpx_mock.add_response( + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + json=Azure_Realm.response_json, + ) + module_test.httpx_mock.add_response( + url="https://login.windows.net/evilcorp.com/.well-known/openid-configuration", + json=self.openid_config_azure, + ) + module_test.httpx_mock.add_response( + url="https://evilcorp.okta.com/.well-known/openid-configuration", + json=self.openid_config_okta, + ) + module_test.httpx_mock.add_response( + url="https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/token", + json={ + "error": "invalid_grant", + "error_description": "AADSTS9002313: Invalid request. Request is malformed or invalid.\r\nTrace ID: a3618b0d-d3b2-4669-96bc-ce414e202300\r\nCorrelation ID: fc54afc5-6f9d-4488-90ba-d8213515b847\r\nTimestamp: 2023-07-12 20:39:45Z", + "error_codes": [9002313], + "timestamp": "2023-07-12 20:39:45Z", + "trace_id": "a3618b0d-d3b2-4669-96bc-ce414e202300", + "correlation_id": "fc54afc5-6f9d-4488-90ba-d8213515b847", + "error_uri": "https://login.windows.net/error?code=9002313", + }, + status_code=400, + ) + module_test.httpx_mock.add_response( + url="https://evilcorp.okta.com/oauth2/v1/token", + json={ + "errorCode": "invalid_client", + "errorSummary": "Invalid value for 'client_id' parameter.", + "errorLink": "invalid_client", + "errorId": "oae06YVQDq4Qz-WEuP3dU14XQ", + "errorCauses": [], + }, + status_code=400, + ) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "OpenID Connect Endpoint (domain: evilcorp.com) found at https://login.windows.net/evilcorp.com/.well-known/openid-configuration" + for e in events + ) + assert any( + e.type == "FINDING" + and e.data["description"] + == "OpenID Connect Endpoint (domain: evilcorp.com) found at https://evilcorp.okta.com/.well-known/openid-configuration" + for e in events + ) + assert any( + e.type == "FINDING" + and e.data["description"] + == "Potentially Sprayable OAUTH Endpoint (domain: evilcorp.com) at https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/token" + for e in events + ) + assert any( + e.type == "FINDING" + and e.data["description"] + == "Potentially Sprayable OAUTH Endpoint (domain: evilcorp.com) at https://evilcorp.okta.com/oauth2/v1/token" + for e in events + ) + assert any(e.data == "https://sts.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_otx.py b/bbot/test/test_step_2/module_tests/test_module_otx.py new file mode 100644 index 0000000000..9c533ca96e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_otx.py @@ -0,0 +1,27 @@ +from .base import ModuleTestBase + + +class TestOTX(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://otx.alienvault.com/api/v1/indicators/domain/blacklanternsecurity.com/passive_dns", + json={ + "passive_dns": [ + { + "address": "2606:50c0:8000::153", + "first": "2021-10-28T20:23:08", + "last": "2022-08-24T18:29:49", + "hostname": "asdf.blacklanternsecurity.com", + "record_type": "AAAA", + "indicator_link": "/indicator/hostname/www.blacklanternsecurity.com", + "flag_url": "assets/images/flags/us.png", + "flag_title": "United States", + "asset_type": "hostname", + "asn": "AS54113 fastly", + } + ] + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py new file mode 100644 index 0000000000..6c4ecda526 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py @@ -0,0 +1,69 @@ +from .test_module_paramminer_headers import Paramminer_Headers, tempwordlist, helper + + +class TestParamminer_Cookies(Paramminer_Headers): + modules_overrides = ["httpx", "paramminer_cookies"] + config_overrides = {"modules": {"paramminer_cookies": {"wordlist": tempwordlist(["junkcookie", "admincookie"])}}} + + cookies_body = """ + + the title + +

Hello null!

'; + + + """ + + cookies_body_match = """ + + the title + +

Hello AAAAAAAAAAAAAA!

'; + + + """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_cookies"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"headers": {"Cookie": "admincookie=AAAAAAAAAAAAAA"}} + respond_args = {"response_data": self.cookies_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.cookies_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + found_reflected_cookie = False + false_positive_match = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Cookie: [admincookie] Reasons: [body] Reflection: [True]" in e.data["description"]: + found_reflected_cookie = True + + if "junkcookie" in e.data["description"]: + false_positive_match = True + + assert found_reflected_cookie, "Failed to find hidden reflected cookie parameter" + assert not false_positive_match, "Found word which was in wordlist but not a real match" + + +class TestParamminer_Cookies_noreflection(TestParamminer_Cookies): + cookies_body_match = """ + + the title + +

Hello ADMINISTRATOR!

'; + + + """ + + def check(self, module_test, events): + assert any( + e.type == "WEB_PARAMETER" + and "[Paramminer] Cookie: [admincookie] Reasons: [body] Reflection: [False]" in e.data["description"] + for e in events + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py new file mode 100644 index 0000000000..a3acf0e6a9 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py @@ -0,0 +1,278 @@ +from .test_module_paramminer_headers import Paramminer_Headers, tempwordlist, helper + + +class TestParamminer_Getparams(Paramminer_Headers): + modules_overrides = ["httpx", "paramminer_getparams"] + config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist(["canary", "id"])}}} + + getparam_body = """ + + the title + +

Hello null!

'; + + + """ + + getparam_body_match = """ + + the title + +

Hello AAAAAAAAAAAAAA!

'; + + + """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"query_string": b"id=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.getparam_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.getparam_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + assert any( + e.type == "WEB_PARAMETER" + and "[Paramminer] Getparam: [id] Reasons: [body] Reflection: [True]" in e.data["description"] + for e in events + ) + assert not any( + e.type == "WEB_PARAMETER" and "[Paramminer] Getparam: [canary] Reasons: [body]" in e.data["description"] + for e in events + ) + + +class TestParamminer_Getparams_noreflection(TestParamminer_Getparams): + getparam_body_match = """ + + the title + +

Hello ADMINISTRATOR!

'; + + + """ + + def check(self, module_test, events): + assert any( + e.type == "WEB_PARAMETER" + and "[Paramminer] Getparam: [id] Reasons: [body] Reflection: [False]" in e.data["description"] + for e in events + ) + + +class TestParamminer_Getparams_singlewordlist(TestParamminer_Getparams): + config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist(["id"])}}} + + +class TestParamminer_Getparams_boring_off(TestParamminer_Getparams): + config_overrides = { + "modules": { + "paramminer_getparams": {"skip_boring_words": False, "wordlist": tempwordlist(["canary", "utm_term"])} + } + } + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"query_string": b"utm_term=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.getparam_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.getparam_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + emitted_boring_parameter = False + for e in events: + if e.type == "WEB_PARAMETER": + if "utm_term" in e.data["description"]: + emitted_boring_parameter = True + assert emitted_boring_parameter, "failed to emit boring parameter with skip_boring_words disabled" + + +class TestParamminer_Getparams_boring_on(TestParamminer_Getparams_boring_off): + config_overrides = { + "modules": { + "paramminer_getparams": {"skip_boring_words": True, "wordlist": tempwordlist(["canary", "boring"])} + } + } + + def check(self, module_test, events): + emitted_boring_parameter = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "boring" in e.data["description"]: + emitted_boring_parameter = True + + assert not emitted_boring_parameter, "emitted boring parameter with skip_boring_words enabled" + + +class TestParamminer_Getparams_finish(Paramminer_Headers): + modules_overrides = ["httpx", "excavate", "paramminer_getparams"] + config_overrides = { + "modules": {"paramminer_getparams": {"wordlist": tempwordlist(["canary", "canary2"]), "recycle_words": True}} + } + + targets = ["http://127.0.0.1:8888/test1.php", "http://127.0.0.1:8888/test2.php"] + + test_1_html = """ +paramstest2 + """ + + test_2_html = """ +

Hello

+ """ + + test_2_html_match = """ +

HackThePlanet!

+ """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + + expect_args = {"uri": "/test2.php", "query_string": b"abcd1234=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.test_2_html_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"uri": "/test2.php"} + respond_args = {"response_data": self.test_2_html} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"uri": "/test1.php", "query_string": b"abcd1234=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.test_2_html_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"uri": "/test1.php"} + respond_args = {"response_data": self.test_1_html} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + excavate_extracted_web_parameter = False + found_hidden_getparam_recycled = False + emitted_excavate_paramminer_duplicate = False + + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "http://127.0.0.1:8888/test2.php" in e.data["url"] + and "HTTP Extracted Parameter [abcd1234] (HTML Tags Submodule)" in e.data["description"] + ): + excavate_extracted_web_parameter = True + + if ( + "http://127.0.0.1:8888/test1.php" in e.data["url"] + and "[Paramminer] Getparam: [abcd1234] Reasons: [body] Reflection: [False]" + in e.data["description"] + ): + found_hidden_getparam_recycled = True + + if ( + "http://127.0.0.1:8888/test2.php" in e.data["url"] + and "[Paramminer] Getparam: [abcd1234] Reasons: [body] Reflection: [False]" + in e.data["description"] + ): + emitted_excavate_paramminer_duplicate = True + + assert excavate_extracted_web_parameter, "Excavate failed to extract GET parameter" + assert found_hidden_getparam_recycled, "Failed to find hidden GET parameter" + # the fact that it is a duplicate is OK, because it still won't be consumed mutltiple times. But we do want to make sure both modules try to emit it + assert emitted_excavate_paramminer_duplicate, "Paramminer emitted duplicate already found by excavate" + + +class TestParamminer_Getparams_xmlspeculative(Paramminer_Headers): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "paramminer_getparams"] + config_overrides = { + "modules": {"paramminer_getparams": {"wordlist": tempwordlist(["data", "common"]), "recycle_words": False}} + } + getparam_extract_xml = """ + + 1 + 1 + 1 + + """ + + getparam_speculative_used = """ + +

secret parameter used

+ + """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"query_string": b"obscureParameter=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.getparam_speculative_used} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"query_string": b"data=AAAAAAAAAAAAAA&obscureParameter=AAAAAAAAAAAAAA&AAAAAA=1"} + respond_args = {"response_data": self.getparam_speculative_used} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.getparam_extract_xml, "headers": {"Content-Type": "application/xml"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_discovered_speculative = False + paramminer_used_speculative = False + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "HTTP Extracted Parameter (speculative from xml content) [obscureParameter]" + in e.data["description"] + ): + excavate_discovered_speculative = True + + if ( + "[Paramminer] Getparam: [obscureParameter] Reasons: [header,body] Reflection: [False]" + in e.data["description"] + ): + paramminer_used_speculative = True + + assert excavate_discovered_speculative, "Excavate failed to discover speculative xml parameter" + assert paramminer_used_speculative, "Paramminer failed to confirm speculative GET parameter" + + +class TestParamminer_Getparams_filter_static(TestParamminer_Getparams_finish): + targets = ["http://127.0.0.1:8888/test1.php", "http://127.0.0.1:8888/test2.pdf"] + + test_1_html = """ + paramstest2 + """ + + def check(self, module_test, events): + found_hidden_getparam_recycled = False + emitted_excavate_paramminer_duplicate = False + + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "http://127.0.0.1:8888/test1.php" in e.data["url"] + and "[Paramminer] Getparam: [abcd1234] Reasons: [body] Reflection: [False]" + in e.data["description"] + ): + found_hidden_getparam_recycled = True + + if ( + "http://127.0.0.1:8888/test2.pdf" in e.data["url"] + and "[Paramminer] Getparam: [abcd1234] Reasons: [body] Reflection: [False]" + in e.data["description"] + ): + emitted_excavate_paramminer_duplicate = True + + assert found_hidden_getparam_recycled, "Failed to find hidden GET parameter" + assert not emitted_excavate_paramminer_duplicate, "Paramminer emitted parameter for static URL" diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py new file mode 100644 index 0000000000..20499abc06 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py @@ -0,0 +1,155 @@ +from bbot.core.helpers import helper + +from .base import ModuleTestBase, tempwordlist + + +class Paramminer_Headers(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "paramminer_headers"] + config_overrides = {"modules": {"paramminer_headers": {"wordlist": tempwordlist(["junkword1", "tracestate"])}}} + + headers_body = """ + + the title + +

Hello null!

'; + + + """ + + headers_body_match = """ + + the title + +

Hello AAAAAAAAAAAAAA!

'; + + + """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_headers"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"headers": {"tracestate": "AAAAAAAAAAAAAA"}} + respond_args = {"response_data": self.headers_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.headers_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + found_reflected_header = False + false_positive_match = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Header: [tracestate] Reasons: [body] Reflection: [True]" in e.data["description"]: + found_reflected_header = True + + if "junkword1" in e.data["description"]: + false_positive_match = True + + assert found_reflected_header, "Failed to find hidden reflected header parameter" + assert not false_positive_match, "Found word which was in wordlist but not a real match" + + +class TestParamminer_Headers(Paramminer_Headers): + pass + + +class TestParamminer_Headers_noreflection(Paramminer_Headers): + found_nonreflected_header = False + + headers_body_match = """ + + the title + +

Hello Administrator!

'; + + + """ + + def check(self, module_test, events): + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Header: [tracestate] Reasons: [body] Reflection: [False]" in e.data["description"]: + found_nonreflected_header = True + + assert found_nonreflected_header, "Failed to find hidden non-reflected header parameter" + + +class TestParamminer_Headers_extract(Paramminer_Headers): + modules_overrides = ["httpx", "paramminer_headers", "excavate"] + config_overrides = { + "modules": { + "paramminer_headers": {"wordlist": tempwordlist(["junkword1", "tracestate"]), "recycle_words": True} + } + } + + headers_body = """ + + the title + + Click Me + + + """ + + headers_body_match = """ + + the title + + Click Me + Click Me +

Secret param "foo" found with value: AAAAAAAAAAAAAA

+ + + """ + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_headers"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = {"headers": {"foo": "AAAAAAAAAAAAAA"}} + respond_args = {"response_data": self.headers_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.headers_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_extracted_web_parameter = False + used_recycled_parameter = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [foo] (HTML Tags Submodule)" in e.data["description"]: + excavate_extracted_web_parameter = True + if "[Paramminer] Header: [foo] Reasons: [body] Reflection: [True]" in e.data["description"]: + used_recycled_parameter = True + + assert excavate_extracted_web_parameter, "Excavate failed to extract WEB_PARAMETER" + assert used_recycled_parameter, "Failed to find header with recycled parameter" + + +class TestParamminer_Headers_extract_norecycle(TestParamminer_Headers_extract): + modules_overrides = ["httpx", "excavate"] + config_overrides = {} + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.headers_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_extracted_web_parameter = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [foo] (HTML Tags Submodule)" in e.data["description"]: + excavate_extracted_web_parameter = True + + assert not excavate_extracted_web_parameter, ( + "Excavate extract WEB_PARAMETER despite disabling parameter extraction" + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_passivetotal.py b/bbot/test/test_step_2/module_tests/test_module_passivetotal.py new file mode 100644 index 0000000000..55be613468 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_passivetotal.py @@ -0,0 +1,23 @@ +from .base import ModuleTestBase + + +class TestPassiveTotal(ModuleTestBase): + config_overrides = {"modules": {"passivetotal": {"api_key": "jon@bls.fakedomain:asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.passivetotal.org/v2/account/quota", + match_headers={"Authorization": "Basic am9uQGJscy5mYWtlZG9tYWluOmFzZGY="}, + json={"user": {"counts": {"search_api": 10}, "limits": {"search_api": 20}}}, + ) + module_test.httpx_mock.add_response( + url="https://api.passivetotal.org/v2/enrichment/subdomains?query=blacklanternsecurity.com", + match_headers={"Authorization": "Basic am9uQGJscy5mYWtlZG9tYWluOmFzZGY="}, + json={"subdomains": ["asdf"]}, + ) + + async def setup_after_prep(self, module_test): + module_test.monkeypatch.setattr(module_test.scan.modules["passivetotal"], "abort_if", lambda e: False) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_pgp.py b/bbot/test/test_step_2/module_tests/test_module_pgp.py new file mode 100644 index 0000000000..dc493d7b52 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_pgp.py @@ -0,0 +1,35 @@ +from .base import ModuleTestBase + + +class TestPGP(ModuleTestBase): + web_body = """ + + +Search results for 'blacklanternsecurity.com' + + +

Search results for 'blacklanternsecurity.com'

Type bits/keyID            cr. time   exp time   key expir
+
+ + +
pub eddsa263/0xd4e98af823deadbeef 2022-09-14T15:11:31Z
+
+uid Asdf <asdf@blacklanternsecurity.com>
+sig  sig  0xd4e98af823deadbeef 2022-09-14T15:11:31Z 2024-09-14T17:00:00Z ____________________ [selfsig]
+
+
+""" + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://keyserver.ubuntu.com/pks/lookup?fingerprint=on&op=vindex&search=blacklanternsecurity.com", + text=self.web_body, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf@blacklanternsecurity.com" for e in events), "Failed to detect email" diff --git a/bbot/test/test_step_2/module_tests/test_module_portfilter.py b/bbot/test/test_step_2/module_tests/test_module_portfilter.py new file mode 100644 index 0000000000..7ffd106a71 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_portfilter.py @@ -0,0 +1,48 @@ +from .base import ModuleTestBase + + +class TestPortfilter_disabled(ModuleTestBase): + modules_overrides = [] + + async def setup_before_prep(self, module_test): + from bbot.modules.base import BaseModule + + class DummyModule(BaseModule): + _name = "dummy_module" + watched_events = ["DNS_NAME"] + + async def handle_event(self, event): + if event.type == "DNS_NAME" and event.data == "blacklanternsecurity.com": + await self.emit_event( + "www.blacklanternsecurity.com:443", + "OPEN_TCP_PORT", + parent=event, + tags=["cdn-ip", "cdn-amazon"], + ) + # when portfilter is enabled, this should be filtered out + await self.emit_event( + "www.blacklanternsecurity.com:8080", + "OPEN_TCP_PORT", + parent=event, + tags=["cdn-ip", "cdn-amazon"], + ) + await self.emit_event("www.blacklanternsecurity.com:21", "OPEN_TCP_PORT", parent=event) + + module_test.scan.modules["dummy_module"] = DummyModule(module_test.scan) + + def check(self, module_test, events): + open_ports = {event.data for event in events if event.type == "OPEN_TCP_PORT"} + assert open_ports == { + "www.blacklanternsecurity.com:443", + "www.blacklanternsecurity.com:8080", + "www.blacklanternsecurity.com:21", + } + + +class TestPortfilter_enabled(TestPortfilter_disabled): + modules_overrides = ["portfilter"] + + def check(self, module_test, events): + open_ports = {event.data for event in events if event.type == "OPEN_TCP_PORT"} + # we should be missing the 8080 port because it's a CDN and not in portfilter's allowed list of open ports + assert open_ports == {"www.blacklanternsecurity.com:443", "www.blacklanternsecurity.com:21"} diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py new file mode 100644 index 0000000000..06a2fcef40 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -0,0 +1,157 @@ +from .base import ModuleTestBase + + +class TestPortscan(ModuleTestBase): + targets = [ + "www.evilcorp.com", + "evilcorp.com", + "8.8.8.8/32", + "8.8.8.8/24", + "8.8.4.4", + "asdf.evilcorp.net", + "8.8.4.4/24", + ] + scan_name = "test_portscan" + config_overrides = {"modules": {"portscan": {"ports": "443", "wait": 1}}, "dns": {"minimal": False}} + + masscan_output_1 = """{ "ip": "8.8.8.8", "timestamp": "1680197558", "ports": [ {"port": 443, "proto": "tcp", "status": "open", "reason": "syn-ack", "ttl": 54} ] }""" + masscan_output_2 = """{ "ip": "8.8.4.5", "timestamp": "1680197558", "ports": [ {"port": 80, "proto": "tcp", "status": "open", "reason": "syn-ack", "ttl": 54} ] }""" + masscan_output_3 = """{ "ip": "8.8.4.6", "timestamp": "1680197558", "ports": [ {"port": 631, "proto": "tcp", "status": "open", "reason": "syn-ack", "ttl": 54} ] }""" + + masscan_output_ping = """{ "ip": "8.8.8.8", "timestamp": "1719862594", "ports": [ {"port": 0, "proto": "icmp", "status": "open", "reason": "none", "ttl": 54} ] }""" + + async def setup_after_prep(self, module_test): + from bbot.modules.base import BaseModule + + class DummyModule(BaseModule): + _name = "dummy_module" + watched_events = ["*"] + + async def handle_event(self, event): + if event.type == "DNS_NAME": + if "dummy" not in event.host: + await self.emit_event(f"dummy.{event.data}", "DNS_NAME", parent=event) + + module_test.scan.modules["dummy_module"] = DummyModule(module_test.scan) + + await module_test.mock_dns( + { + "www.evilcorp.com": {"A": ["8.8.8.8"]}, + "evilcorp.com": {"A": ["8.8.8.8"]}, + "asdf.evilcorp.net": {"A": ["8.8.4.5"]}, + "dummy.asdf.evilcorp.net": {"A": ["8.8.4.5"]}, + "dummy.evilcorp.com": {"A": ["8.8.4.6"]}, + "dummy.www.evilcorp.com": {"A": ["8.8.4.4"]}, + } + ) + + self.syn_scanned = [] + self.ping_scanned = [] + self.syn_runs = 0 + self.ping_runs = 0 + + async def run_masscan(command, *args, **kwargs): + if "masscan" in command[:2]: + targets = open(command[11]).read().splitlines() + yield "[" + if "--ping" in command: + self.ping_runs += 1 + self.ping_scanned += targets + yield self.masscan_output_ping + else: + self.syn_runs += 1 + self.syn_scanned += targets + if "8.8.8.0/24" in targets or "8.8.8.8/32" in targets: + yield self.masscan_output_1 + if "8.8.4.0/24" in targets: + yield self.masscan_output_2 + yield self.masscan_output_3 + yield "]" + else: + async for l in module_test.scan.helpers.run_live(command, *args, **kwargs): + yield l + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", run_masscan) + + def check(self, module_test, events): + assert set(self.syn_scanned) == {"8.8.8.0/24", "8.8.4.0/24"} + assert set(self.ping_scanned) == set() + assert self.syn_runs == 1 + assert self.ping_runs == 0 + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "evilcorp.com" and str(e.module) == "TARGET"] + ) + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "www.evilcorp.com" and str(e.module) == "TARGET"] + ) + assert 1 == len( + [e for e in events if e.type == "DNS_NAME" and e.data == "asdf.evilcorp.net" and str(e.module) == "TARGET"] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "dummy.evilcorp.com" and str(e.module) == "dummy_module" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "dummy.www.evilcorp.com" and str(e.module) == "dummy_module" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "dummy.asdf.evilcorp.net" and str(e.module) == "dummy_module" + ] + ) + # the reason these numbers aren't exactly predictable is because we can't predict which one arrives first + # to the portscan module. Sometimes, one that would normally be deduped is force-emitted because it led to a new open port. + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 4 + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.8.8:443"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.5:80"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.6:631"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "evilcorp.com:443"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "www.evilcorp.com:443"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "asdf.evilcorp.net:80"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "dummy.asdf.evilcorp.net:80"]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "dummy.evilcorp.com:631"]) + assert not any(e for e in events if e.type == "OPEN_TCP_PORT" and e.host == "dummy.www.evilcorp.com") + + +class TestPortscanPingFirst(TestPortscan): + modules_overrides = {"portscan"} + config_overrides = {"modules": {"portscan": {"ports": "443", "wait": 1, "ping_first": True}}} + + def check(self, module_test, events): + assert set(self.syn_scanned) == {"8.8.8.8/32"} + assert set(self.ping_scanned) == {"8.8.8.0/24", "8.8.4.0/24"} + assert self.syn_runs == 1 + assert self.ping_runs == 1 + open_port_events = [e for e in events if e.type == "OPEN_TCP_PORT"] + assert len(open_port_events) == 3 + assert {e.data for e in open_port_events} == {"8.8.8.8:443", "evilcorp.com:443", "www.evilcorp.com:443"} + + +class TestPortscanPingOnly(TestPortscan): + modules_overrides = {"portscan"} + config_overrides = {"modules": {"portscan": {"ports": "443", "wait": 1, "ping_only": True}}} + + targets = ["8.8.8.8/24", "8.8.4.4/24"] + + def check(self, module_test, events): + assert set(self.syn_scanned) == set() + assert set(self.ping_scanned) == {"8.8.8.0/24", "8.8.4.0/24"} + assert self.syn_runs == 0 + assert self.ping_runs == 1 + open_port_events = [e for e in events if e.type == "OPEN_TCP_PORT"] + assert len(open_port_events) == 0 + ip_events = [e for e in events if e.type == "IP_ADDRESS"] + assert len(ip_events) == 1 + assert {e.data for e in ip_events} == {"8.8.8.8"} diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py new file mode 100644 index 0000000000..ea6c00210c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -0,0 +1,74 @@ +import time +import asyncio + +from .base import ModuleTestBase + + +class TestPostgres(ModuleTestBase): + targets = ["evilcorp.com"] + skip_distro_tests = True + + async def setup_before_prep(self, module_test): + process = await asyncio.create_subprocess_exec( + "docker", + "run", + "--name", + "bbot-test-postgres", + "--rm", + "-e", + "POSTGRES_PASSWORD=bbotislife", + "-e", + "POSTGRES_USER=postgres", + "-p", + "5432:5432", + "-d", + "postgres", + ) + + import asyncpg + + # wait for the container to start + start_time = time.time() + while True: + try: + # Connect to the default 'postgres' database to create 'bbot' + conn = await asyncpg.connect( + user="postgres", password="bbotislife", database="postgres", host="127.0.0.1" + ) + await conn.execute("CREATE DATABASE bbot") + await conn.close() + break + except asyncpg.exceptions.DuplicateDatabaseError: + # If the database already exists, break the loop + break + except Exception as e: + if time.time() - start_time > 60: # timeout after 60 seconds + self.log.error("PostgreSQL server did not start in time.") + raise e + await asyncio.sleep(1) + + if process.returncode != 0: + self.log.error("Failed to start PostgreSQL server") + + async def check(self, module_test, events): + import asyncpg + + # Connect to the PostgreSQL database + conn = await asyncpg.connect(user="postgres", password="bbotislife", database="bbot", host="127.0.0.1") + + try: + events = await conn.fetch("SELECT * FROM event") + assert len(events) == 3, "No events found in PostgreSQL database" + scans = await conn.fetch("SELECT * FROM scan") + assert len(scans) == 1, "No scans found in PostgreSQL database" + targets = await conn.fetch("SELECT * FROM target") + assert len(targets) == 1, "No targets found in PostgreSQL database" + finally: + await conn.close() + process = await asyncio.create_subprocess_exec( + "docker", "stop", "bbot-test-postgres", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + + if process.returncode != 0: + raise Exception(f"Failed to stop PostgreSQL server: {stderr.decode()}") diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py new file mode 100644 index 0000000000..d5b9cb3f2c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -0,0 +1,448 @@ +from .base import ModuleTestBase + + +class TestPostman(ModuleTestBase): + config_overrides = {"modules": {"postman": {"api_key": "asdf"}}} + modules_overrides = ["postman", "speculate"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/me", + match_headers={"X-Api-Key": "asdf"}, + json={ + "user": { + "id": 000000, + "username": "test_key", + "email": "blacklanternsecurity@test.com", + "fullName": "Test Key", + "avatar": "", + "isPublic": True, + "teamId": 0, + "teamDomain": "", + "roles": ["user"], + }, + "operations": [ + {"name": "api_object_usage", "limit": 3, "usage": 0, "overage": 0}, + {"name": "collection_run_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "file_storage_limit", "limit": 20, "usage": 0, "overage": 0}, + {"name": "flow_count", "limit": 5, "usage": 0, "overage": 0}, + {"name": "flow_requests", "limit": 5000, "usage": 0, "overage": 0}, + {"name": "performance_test_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "postbot_calls", "limit": 50, "usage": 0, "overage": 0}, + {"name": "reusable_packages", "limit": 3, "usage": 0, "overage": 0}, + {"name": "test_data_retrieval", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "test_data_storage", "limit": 10, "usage": 0, "overage": 0}, + {"name": "mock_usage", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "monitor_request_runs", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "api_usage", "limit": 1000, "usage": 0, "overage": 0}, + ], + }, + ) + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + {"blacklanternsecurity.com": {"A": ["127.0.0.99"]}, "github.com": {"A": ["127.0.0.99"]}} + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "search", "method": "POST", "path": "/search-all", "body": {"queryIndices": ["collaboration.workspace"], "queryText": "blacklanternsecurity", "size": 25, "from": 0, "clientTraceId": "", "requestOrigin": "srp", "mergeEntities": "true", "nonNestedRequests": "true", "domain": "public"}}', + json={ + "data": [ + { + "score": 611.41156, + "normalizedScore": 23, + "document": { + "watcherCount": 6, + "apiCount": 0, + "forkCount": 0, + "isblacklisted": "false", + "createdAt": "2021-06-15T14:03:51", + "publishertype": "team", + "publisherHandle": "blacklanternsecurity", + "id": "11498add-357d-4bc5-a008-0a2d44fb8829", + "slug": "bbot-public", + "updatedAt": "2024-07-30T11:00:35", + "entityType": "workspace", + "visibilityStatus": "public", + "forkcount": "0", + "tags": [], + "createdat": "2021-06-15T14:03:51", + "forkLabel": "", + "publisherName": "blacklanternsecurity", + "name": "BlackLanternSecurity BBOT [Public]", + "dependencyCount": 7, + "collectionCount": 6, + "warehouse__updated_at": "2024-07-30 11:00:00", + "privateNetworkFolders": [], + "isPublisherVerified": False, + "publisherType": "team", + "curatedInList": [], + "creatorId": "6900157", + "description": "", + "forklabel": "", + "publisherId": "299401", + "publisherLogo": "", + "popularity": 5, + "isPublic": True, + "categories": [], + "universaltags": "", + "views": 5788, + "summary": "BLS public workspaces.", + "memberCount": 2, + "isBlacklisted": False, + "publisherid": "299401", + "isPrivateNetworkEntity": False, + "isDomainNonTrivial": True, + "privateNetworkMeta": "", + "updatedat": "2021-10-20T16:19:29", + "documentType": "workspace", + }, + "highlight": {"summary": "BLS BBOT api test."}, + }, + { + "score": 611.41156, + "normalizedScore": 23, + "document": { + "watcherCount": 6, + "apiCount": 0, + "forkCount": 0, + "isblacklisted": "false", + "createdAt": "2021-06-15T14:03:51", + "publishertype": "team", + "publisherHandle": "testteam", + "id": "11498add-357d-4bc5-a008-0a2d44fb8829", + "slug": "testing-bbot-api", + "updatedAt": "2024-07-30T11:00:35", + "entityType": "workspace", + "visibilityStatus": "public", + "forkcount": "0", + "tags": [], + "createdat": "2021-06-15T14:03:51", + "forkLabel": "", + "publisherName": "testteam", + "name": "Test BlackLanternSecurity API Team Workspace", + "dependencyCount": 7, + "collectionCount": 6, + "warehouse__updated_at": "2024-07-30 11:00:00", + "privateNetworkFolders": [], + "isPublisherVerified": False, + "publisherType": "team", + "curatedInList": [], + "creatorId": "6900157", + "description": "", + "forklabel": "", + "publisherId": "299401", + "publisherLogo": "", + "popularity": 5, + "isPublic": True, + "categories": [], + "universaltags": "", + "views": 5788, + "summary": "Private test of BBOTs public API", + "memberCount": 2, + "isBlacklisted": False, + "publisherid": "299401", + "isPrivateNetworkEntity": False, + "isDomainNonTrivial": True, + "privateNetworkMeta": "", + "updatedat": "2021-10-20T16:19:29", + "documentType": "workspace", + }, + "highlight": {"summary": "Private test of BBOTs Public API"}, + }, + ], + "meta": { + "queryText": "blacklanternsecurity", + "total": { + "collection": 0, + "request": 0, + "workspace": 2, + "api": 0, + "team": 0, + "user": 0, + "flow": 0, + "apiDefinition": 0, + "privateNetworkFolder": 0, + }, + "state": "AQ4", + "spellCorrection": {"count": {"all": 2, "workspace": 2}, "correctedQueryText": None}, + "featureFlags": { + "enabledPublicResultCuration": True, + "boostByPopularity": True, + "reRankPostNormalization": True, + "enableUrlBarHostNameSearch": True, + }, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "workspaces", "method": "GET", "path": "/workspaces?handle=blacklanternsecurity&slug=bbot-public"}', + json={ + "meta": {"model": "workspace", "action": "find", "nextCursor": ""}, + "data": [ + { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "description": None, + "summary": "BLS public workspaces.", + "createdBy": "299401", + "updatedBy": "299401", + "team": None, + "createdAt": "2021-10-20T16:19:29", + "updatedAt": "2021-10-20T16:19:29", + "visibilityStatus": "public", + "profileInfo": { + "slug": "bbot-public", + "profileType": "team", + "profileId": "000000", + "publicHandle": "https://www.postman.com/blacklanternsecurity", + "publicImageURL": "", + "publicName": "BlackLanternSecurity", + "isVerified": False, + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "workspaces", "method": "GET", "path": "/workspaces?handle=testteam&slug=testing-bbot-api"}', + json={ + "meta": {"model": "workspace", "action": "find", "nextCursor": ""}, + "data": [ + { + "id": "a4dfe981-2593-4f0b-b4c3-5145e8640f7d", + "name": "Test BlackLanternSecurity API Team Workspace", + "description": None, + "summary": "Private test of BBOTs public API", + "createdBy": "299401", + "updatedBy": "299401", + "team": None, + "createdAt": "2021-10-20T16:19:29", + "updatedAt": "2021-10-20T16:19:29", + "visibilityStatus": "public", + "profileInfo": { + "slug": "bbot-public", + "profileType": "team", + "profileId": "000000", + "publicHandle": "https://www.postman.com/testteam", + "publicImageURL": "", + "publicName": "testteam", + "isVerified": False, + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/workspaces/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + match_headers={"X-Api-Key": "asdf"}, + json={ + "workspace": { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "type": "personal", + "description": None, + "visibility": "public", + "createdBy": "00000000", + "updatedBy": "00000000", + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-17T08:57:16.000Z", + "collections": [ + { + "id": "2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + "name": "BBOT Public", + "uid": "10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + }, + ], + "environments": [ + { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "uid": "10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + } + ], + "apis": [], + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/workspaces/a4dfe981-2593-4f0b-b4c3-5145e8640f7d", + json={ + "workspace": { + "id": "a4dfe981-2593-4f0b-b4c3-5145e8640f7d", + "name": "Test BlackLanternSecurity API Team Workspace", + "type": "personal", + "description": None, + "visibility": "public", + "createdBy": "00000000", + "updatedBy": "00000000", + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-17T08:57:16.000Z", + "collections": [ + { + "id": "f46bebfd-420a-4adf-97d1-6fb5a02cf7fc", + "name": "BBOT Public", + "uid": "10197090-f46bebfd-420a-4adf-97d1-6fb5a02cf7fc", + }, + ], + "environments": [], + "apis": [], + } + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/workspace/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b/globals", + json={ + "model_id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "meta": {"model": "globals", "action": "find"}, + "data": { + "workspace": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "lastUpdatedBy": "00000000", + "lastRevision": 1637239113000, + "id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "values": [ + { + "key": "endpoint_url", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-18T12:38:33.000Z", + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/workspace/a4dfe981-2593-4f0b-b4c3-5145e8640f7d/globals", + json={ + "model_id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "meta": {"model": "globals", "action": "find"}, + "data": { + "workspace": "a4dfe981-2593-4f0b-b4c3-5145e8640f7d", + "lastUpdatedBy": "00000000", + "lastRevision": 1637239113000, + "id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "values": [], + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-18T12:38:33.000Z", + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/environments/10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + match_headers={"X-Api-Key": "asdf"}, + json={ + "environment": { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "owner": "00000000", + "createdAt": "2021-11-17T06:29:54.000Z", + "updatedAt": "2021-11-23T07:06:53.000Z", + "values": [ + { + "key": "temp_session_endpoint", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "isPublic": True, + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/collections/10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + match_headers={"X-Api-Key": "asdf"}, + json={ + "collection": { + "info": { + "_postman_id": "62b91565-d2e2-4bcd-8248-4dba2e3452f0", + "name": "BBOT Public", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "updatedAt": "2021-11-17T07:13:16.000Z", + "createdAt": "2021-11-17T07:13:15.000Z", + "lastUpdatedBy": "00000000", + "uid": "00000000-62b91565-d2e2-4bcd-8248-4dba2e3452f0", + }, + "item": [ + { + "name": "Generate API Session", + "id": "c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + "protocolProfileBehavior": {"disableBodyPruning": True}, + "request": { + "method": "POST", + "header": [{"key": "Content-Type", "value": "application/json"}], + "body": { + "mode": "raw", + "raw": '{"username": "test", "password": "Test"}', + }, + "url": {"raw": "{{endpoint_url}}", "host": ["{{endpoint_url}}"]}, + "description": "", + }, + "response": [], + "uid": "10197090-c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + }, + ], + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/collections/10197090-f46bebfd-420a-4adf-97d1-6fb5a02cf7fc", + json={ + "collection": { + "info": { + "_postman_id": "f46bebfd-420a-4adf-97d1-6fb5a02cf7fc", + "name": "BBOT Public", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "updatedAt": "2021-11-17T07:13:16.000Z", + "createdAt": "2021-11-17T07:13:15.000Z", + "lastUpdatedBy": "00000000", + "uid": "00000000-f46bebfd-420a-4adf-97d1-6fb5a02cf7fc", + }, + "item": [ + { + "name": "Out of Scope API request", + "id": "c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + "protocolProfileBehavior": {"disableBodyPruning": True}, + "request": { + "method": "POST", + "header": [{"key": "Content-Type", "value": "application/json"}], + "body": { + "mode": "raw", + "raw": '{"username": "test", "password": "Test"}', + }, + "url": {"raw": "https://www.outofscope.com", "host": ["www.outofscope.com"]}, + "description": "", + }, + "response": [], + "uid": "10197090-c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + }, + ], + } + }, + ) + + def check(self, module_test, events): + assert len(events) == 5 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 0] + ), "Failed to find ORG_STUB" + # Find only 1 in-scope workspace the other will be out of scope + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "postman" in e.tags + and e.data["url"] == "https://www.postman.com/blacklanternsecurity/bbot-public" + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity postman workspace" diff --git a/bbot/test/test_step_2/module_tests/test_module_postman_download.py b/bbot/test/test_step_2/module_tests/test_module_postman_download.py new file mode 100644 index 0000000000..4a893601a7 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_postman_download.py @@ -0,0 +1,282 @@ +from .base import ModuleTestBase + + +class TestPostman_Download(ModuleTestBase): + config_overrides = {"modules": {"postman_download": {"api_key": "asdf"}}} + modules_overrides = ["postman", "postman_download", "speculate"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/me", + match_headers={"X-Api-Key": "asdf"}, + json={ + "user": { + "id": 000000, + "username": "test_key", + "email": "blacklanternsecurity@test.com", + "fullName": "Test Key", + "avatar": "", + "isPublic": True, + "teamId": 0, + "teamDomain": "", + "roles": ["user"], + }, + "operations": [ + {"name": "api_object_usage", "limit": 3, "usage": 0, "overage": 0}, + {"name": "collection_run_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "file_storage_limit", "limit": 20, "usage": 0, "overage": 0}, + {"name": "flow_count", "limit": 5, "usage": 0, "overage": 0}, + {"name": "flow_requests", "limit": 5000, "usage": 0, "overage": 0}, + {"name": "performance_test_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "postbot_calls", "limit": 50, "usage": 0, "overage": 0}, + {"name": "reusable_packages", "limit": 3, "usage": 0, "overage": 0}, + {"name": "test_data_retrieval", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "test_data_storage", "limit": 10, "usage": 0, "overage": 0}, + {"name": "mock_usage", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "monitor_request_runs", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "api_usage", "limit": 1000, "usage": 0, "overage": 0}, + ], + }, + ) + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + {"blacklanternsecurity.com": {"A": ["127.0.0.99"]}, "github.com": {"A": ["127.0.0.99"]}} + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "search", "method": "POST", "path": "/search-all", "body": {"queryIndices": ["collaboration.workspace"], "queryText": "blacklanternsecurity", "size": 25, "from": 0, "clientTraceId": "", "requestOrigin": "srp", "mergeEntities": "true", "nonNestedRequests": "true", "domain": "public"}}', + json={ + "data": [ + { + "score": 611.41156, + "normalizedScore": 23, + "document": { + "watcherCount": 6, + "apiCount": 0, + "forkCount": 0, + "isblacklisted": "false", + "createdAt": "2021-06-15T14:03:51", + "publishertype": "team", + "publisherHandle": "blacklanternsecurity", + "id": "11498add-357d-4bc5-a008-0a2d44fb8829", + "slug": "bbot-public", + "updatedAt": "2024-07-30T11:00:35", + "entityType": "workspace", + "visibilityStatus": "public", + "forkcount": "0", + "tags": [], + "createdat": "2021-06-15T14:03:51", + "forkLabel": "", + "publisherName": "blacklanternsecurity", + "name": "BlackLanternSecurity BBOT [Public]", + "dependencyCount": 7, + "collectionCount": 6, + "warehouse__updated_at": "2024-07-30 11:00:00", + "privateNetworkFolders": [], + "isPublisherVerified": False, + "publisherType": "team", + "curatedInList": [], + "creatorId": "6900157", + "description": "", + "forklabel": "", + "publisherId": "299401", + "publisherLogo": "", + "popularity": 5, + "isPublic": True, + "categories": [], + "universaltags": "", + "views": 5788, + "summary": "BLS public workspaces.", + "memberCount": 2, + "isBlacklisted": False, + "publisherid": "299401", + "isPrivateNetworkEntity": False, + "isDomainNonTrivial": True, + "privateNetworkMeta": "", + "updatedat": "2021-10-20T16:19:29", + "documentType": "workspace", + }, + "highlight": {"summary": "BLS BBOT api test."}, + }, + ], + "meta": { + "queryText": "blacklanternsecurity", + "total": { + "collection": 0, + "request": 0, + "workspace": 1, + "api": 0, + "team": 0, + "user": 0, + "flow": 0, + "apiDefinition": 0, + "privateNetworkFolder": 0, + }, + "state": "AQ4", + "spellCorrection": {"count": {"all": 1, "workspace": 1}, "correctedQueryText": None}, + "featureFlags": { + "enabledPublicResultCuration": True, + "boostByPopularity": True, + "reRankPostNormalization": True, + "enableUrlBarHostNameSearch": True, + }, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "workspaces", "method": "GET", "path": "/workspaces?handle=blacklanternsecurity&slug=bbot-public"}', + json={ + "meta": {"model": "workspace", "action": "find", "nextCursor": ""}, + "data": [ + { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "description": None, + "summary": "BLS public workspaces.", + "createdBy": "299401", + "updatedBy": "299401", + "team": None, + "createdAt": "2021-10-20T16:19:29", + "updatedAt": "2021-10-20T16:19:29", + "visibilityStatus": "public", + "profileInfo": { + "slug": "bbot-public", + "profileType": "team", + "profileId": "000000", + "publicHandle": "https://www.postman.com/blacklanternsecurity", + "publicImageURL": "", + "publicName": "BlackLanternSecurity", + "isVerified": False, + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/workspaces/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + match_headers={"X-Api-Key": "asdf"}, + json={ + "workspace": { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "type": "personal", + "description": None, + "visibility": "public", + "createdBy": "00000000", + "updatedBy": "00000000", + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-17T08:57:16.000Z", + "collections": [ + { + "id": "2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + "name": "BBOT Public", + "uid": "10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + }, + ], + "environments": [ + { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "uid": "10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + } + ], + "apis": [], + } + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/workspace/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b/globals", + json={ + "model_id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "meta": {"model": "globals", "action": "find"}, + "data": { + "workspace": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "lastUpdatedBy": "00000000", + "lastRevision": 1637239113000, + "id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "values": [ + { + "key": "endpoint_url", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-18T12:38:33.000Z", + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/environments/10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + match_headers={"X-Api-Key": "asdf"}, + json={ + "environment": { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "owner": "00000000", + "createdAt": "2021-11-17T06:29:54.000Z", + "updatedAt": "2021-11-23T07:06:53.000Z", + "values": [ + { + "key": "temp_session_endpoint", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "isPublic": True, + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/collections/10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + match_headers={"X-Api-Key": "asdf"}, + json={ + "collection": { + "info": { + "_postman_id": "62b91565-d2e2-4bcd-8248-4dba2e3452f0", + "name": "BBOT Public", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "updatedAt": "2021-11-17T07:13:16.000Z", + "createdAt": "2021-11-17T07:13:15.000Z", + "lastUpdatedBy": "00000000", + "uid": "00000000-62b91565-d2e2-4bcd-8248-4dba2e3452f0", + }, + "item": [ + { + "name": "Generate API Session", + "id": "c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + "protocolProfileBehavior": {"disableBodyPruning": True}, + "request": { + "method": "POST", + "header": [{"key": "Content-Type", "value": "application/json"}], + "body": { + "mode": "raw", + "raw": '{"username": "test", "password": "Test"}', + }, + "url": {"raw": "{{endpoint_url}}", "host": ["{{endpoint_url}}"]}, + "description": "", + }, + "response": [], + "uid": "10197090-c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + }, + ], + } + }, + ) + + def check(self, module_test, events): + assert 1 == len( + [e for e in events if e.type == "CODE_REPOSITORY" and "postman" in e.tags and e.scope_distance == 1] + ), "Failed to find blacklanternsecurity postman workspace" + assert 1 == len( + [ + e + for e in events + if e.type == "FILESYSTEM" + and "postman_workspaces/BlackLanternSecurity BBOT [Public]" in e.data["path"] + and "postman" in e.tags + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity postman workspace" diff --git a/bbot/test/test_step_2/module_tests/test_module_python.py b/bbot/test/test_step_2/module_tests/test_module_python.py new file mode 100644 index 0000000000..eb1628437b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_python.py @@ -0,0 +1,6 @@ +from .base import ModuleTestBase + + +class TestPython(ModuleTestBase): + def check(self, module_test, events): + assert any(e.data == "blacklanternsecurity.com" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py new file mode 100644 index 0000000000..df8d45fbd8 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py @@ -0,0 +1,108 @@ +import httpx + +from .base import ModuleTestBase + + +class TestRapidDNS(ModuleTestBase): + web_body = """12 +asdf.blacklanternsecurity.com +asdf.blacklanternsecurity.com.""" + + async def setup_after_prep(self, module_test): + module_test.module.abort_if = lambda e: False + module_test.httpx_mock.add_response( + url="https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result", text=self.web_body + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + + +class TestRapidDNSAbortThreshold1(TestRapidDNS): + module_name = "rapiddns" + + async def setup_after_prep(self, module_test): + self.url_count = {} + + async def custom_callback(request): + url = str(request.url) + try: + self.url_count[url] += 1 + except KeyError: + self.url_count[url] = 1 + raise httpx.TimeoutException("timeout") + + module_test.httpx_mock.add_callback(custom_callback) + + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["127.0.0.88"]}, + "evilcorp.com": {"A": ["127.0.0.11"]}, + "evilcorp.net": {"A": ["127.0.0.22"]}, + "evilcorp.co.uk": {"A": ["127.0.0.33"]}, + } + ) + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 10 + assert module_test.module.errored is False + assert module_test.module._api_request_failures == 3 + assert module_test.module.api_retries == 3 + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com"} + assert self.url_count == { + "https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result": 3, + } + + +class TestRapidDNSAbortThreshold2(TestRapidDNSAbortThreshold1): + targets = ["blacklanternsecurity.com", "evilcorp.com"] + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 10 + assert module_test.module.errored is False + assert module_test.module._api_request_failures == 6 + assert module_test.module.api_retries == 3 + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com", "evilcorp.com"} + assert self.url_count == { + "https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result": 3, + "https://rapiddns.io/subdomain/evilcorp.com?full=1#result": 3, + } + + +class TestRapidDNSAbortThreshold3(TestRapidDNSAbortThreshold1): + targets = ["blacklanternsecurity.com", "evilcorp.com", "evilcorp.net"] + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 10 + assert module_test.module.errored is False + assert module_test.module._api_request_failures == 9 + assert module_test.module.api_retries == 3 + assert {e.data for e in events if e.type == "DNS_NAME"} == { + "blacklanternsecurity.com", + "evilcorp.com", + "evilcorp.net", + } + assert self.url_count == { + "https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result": 3, + "https://rapiddns.io/subdomain/evilcorp.com?full=1#result": 3, + "https://rapiddns.io/subdomain/evilcorp.net?full=1#result": 3, + } + + +class TestRapidDNSAbortThreshold4(TestRapidDNSAbortThreshold1): + targets = ["blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", "evilcorp.co.uk"] + + def check(self, module_test, events): + assert module_test.module.api_failure_abort_threshold == 10 + assert module_test.module.errored is True + assert module_test.module._api_request_failures == 10 + assert module_test.module.api_retries == 3 + assert {e.data for e in events if e.type == "DNS_NAME"} == { + "blacklanternsecurity.com", + "evilcorp.com", + "evilcorp.net", + "evilcorp.co.uk", + } + assert len(self.url_count) == 4 + assert list(self.url_count.values()).count(3) == 3 + assert list(self.url_count.values()).count(1) == 1 diff --git a/bbot/test/test_step_2/module_tests/test_module_robots.py b/bbot/test/test_step_2/module_tests/test_module_robots.py new file mode 100644 index 0000000000..3d9156bb4c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_robots.py @@ -0,0 +1,42 @@ +import re +from .base import ModuleTestBase + + +class TestRobots(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "robots"] + config_overrides = {"modules": {"robots": {"include_sitemap": True}}} + + async def setup_after_prep(self, module_test): + sample_robots = f"Allow: /allow/\nDisallow: /disallow/\nJunk: test.com\nDisallow: /*/wildcard.txt\nSitemap: {self.targets[0]}/sitemap.txt" + + expect_args = {"method": "GET", "uri": "/robots.txt"} + respond_args = {"response_data": sample_robots} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + allow_bool = False + disallow_bool = False + sitemap_bool = False + wildcard_bool = False + + for e in events: + if e.type == "URL_UNVERIFIED": + if str(e.module) != "TARGET": + assert "spider-danger" in e.tags, f"{e} doesn't have spider-danger tag" + if e.data == "http://127.0.0.1:8888/allow/": + allow_bool = True + + if e.data == "http://127.0.0.1:8888/disallow/": + disallow_bool = True + + if e.data == "http://127.0.0.1:8888/sitemap.txt": + sitemap_bool = True + + if re.match(r"http://127\.0\.0\.1:8888/\w+/wildcard\.txt", e.data): + wildcard_bool = True + + assert allow_bool + assert disallow_bool + assert sitemap_bool + assert wildcard_bool diff --git a/bbot/test/test_step_2/module_tests/test_module_securitytrails.py b/bbot/test/test_step_2/module_tests/test_module_securitytrails.py new file mode 100644 index 0000000000..5010f3bc49 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_securitytrails.py @@ -0,0 +1,21 @@ +from .base import ModuleTestBase + + +class TestSecurityTrails(ModuleTestBase): + config_overrides = {"modules": {"securitytrails": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.securitytrails.com/v1/ping?apikey=asdf", + ) + module_test.httpx_mock.add_response( + url="https://api.securitytrails.com/v1/domain/blacklanternsecurity.com/subdomains?apikey=asdf", + json={ + "subdomains": [ + "asdf", + ], + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_securitytxt.py b/bbot/test/test_step_2/module_tests/test_module_securitytxt.py new file mode 100644 index 0000000000..0fa897c222 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_securitytxt.py @@ -0,0 +1,50 @@ +from .base import ModuleTestBase + + +class TestSecurityTxt(ModuleTestBase): + targets = ["blacklanternsecurity.notreal"] + modules_overrides = ["securitytxt", "speculate"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://blacklanternsecurity.notreal/.well-known/security.txt", + text="-----BEGIN PGP SIGNED MESSAGE-----\nHash: SHA512\n\nContact: mailto:joe.smith@blacklanternsecurity.notreal\nContact: mailto:vdp@example.com\nContact: https://vdp.example.com\nExpires: 2025-01-01T00:00:00.000Z\nPreferred-Languages: fr, en\nCanonical: https://blacklanternsecurity.notreal/.well-known/security.txt\nPolicy: https://example.com/cert\nHiring: https://www.careers.example.com\n-----BEGIN PGP SIGNATURE-----\n\nSIGNATURE\n\n-----END PGP SIGNATURE-----", + ) + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.notreal": { + "A": ["127.0.0.11"], + }, + } + ) + + def check(self, module_test, events): + assert any(e.type == "EMAIL_ADDRESS" and e.data == "joe.smith@blacklanternsecurity.notreal" for e in events), ( + "Failed to detect email address" + ) + assert not any( + e.type == "URL_UNVERIFIED" and e.data == "https://blacklanternsecurity.notreal/.well-known/security.txt" + for e in events + ), "Failed to filter Canonical URL to self" + assert not any(str(e.data) == "vdp@example.com" for e in events) + + +class TestSecurityTxtEmailsFalse(TestSecurityTxt): + config_overrides = { + "scope": {"report_distance": 1}, + "modules": {"securitytxt": {"emails": False}}, + } + + def check(self, module_test, events): + assert not any(e.type == "EMAIL_ADDRESS" for e in events), "Detected email address when emails=False" + assert any(e.type == "URL_UNVERIFIED" and e.data == "https://vdp.example.com/" for e in events), ( + "Failed to detect URL" + ) + assert any(e.type == "URL_UNVERIFIED" and e.data == "https://example.com/cert" for e in events), ( + "Failed to detect URL" + ) + assert any(e.type == "URL_UNVERIFIED" and e.data == "https://www.careers.example.com/" for e in events), ( + "Failed to detect URL" + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_shodan_dns.py b/bbot/test/test_step_2/module_tests/test_module_shodan_dns.py new file mode 100644 index 0000000000..3731220488 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_shodan_dns.py @@ -0,0 +1,40 @@ +from .base import ModuleTestBase + + +class TestShodan_DNS(ModuleTestBase): + config_overrides = {"modules": {"shodan": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.shodan.io/api-info?key=asdf", + ) + module_test.httpx_mock.add_response( + url="https://api.shodan.io/dns/domain/blacklanternsecurity.com?key=asdf&page=1", + json={ + "subdomains": [ + "asdf", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.shodan.io/dns/domain/blacklanternsecurity.com?key=asdf&page=2", + json={ + "subdomains": [ + "www", + ], + }, + ) + await module_test.mock_dns( + { + "blacklanternsecurity.com": { + "A": ["127.0.0.11"], + }, + "www.blacklanternsecurity.com": {"A": ["127.0.0.22"]}, + "asdf.blacklanternsecurity.com": {"A": ["127.0.0.33"]}, + } + ) + + def check(self, module_test, events): + assert len([e for e in events if e.type == "DNS_NAME"]) == 3, "Failed to detect both subdomains" + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "www.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_sitedossier.py b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py new file mode 100644 index 0000000000..ed93307664 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py @@ -0,0 +1,151 @@ +from .base import ModuleTestBase + +page1 = """ + + + + +Parent domain: evilcorp.com + + + + +
+
+logo +
+
+
+
+
+
+
+ + + + + + +
nwne
+

Parent domain: evilcorp.com

+
+ +
Displaying items 101 to 200, out of a total of 685 +
+
    +
  1.   http://asdf.evilcorp.com/
    +
  2.   http://zzzz.evilcorp.com/
    +
+Show next 100 items
+
+
swse
+
+
+
+ + +""" + +page2 = """ + + + + +Parent domain: evilcorp.com + + + + +
+
+logo +
+
+
+
+
+
+
+ + + + + + +
nwne
+

Parent domain: evilcorp.com

+
+ +
Displaying items 101 to 200, out of a total of 685 +
+
    +
  1.   http://xxxx.evilcorp.com/
    +
  2.   http://ffff.evilcorp.com/
    +
+
+
swse
+
+
+
+ + +""" + + +class TestSitedossier(ModuleTestBase): + targets = ["evilcorp.com"] + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "evilcorp.com": {"A": ["127.0.0.1"]}, + "asdf.evilcorp.com": {"A": ["127.0.0.1"]}, + "zzzz.evilcorp.com": {"A": ["127.0.0.1"]}, + "xxxx.evilcorp.com": {"A": ["127.0.0.1"]}, + "ffff.evilcorp.com": {"A": ["127.0.0.1"]}, + } + ) + module_test.httpx_mock.add_response( + url="http://www.sitedossier.com/parentdomain/evilcorp.com", + text=page1, + ) + module_test.httpx_mock.add_response( + url="http://www.sitedossier.com/parentdomain/evilcorp.com/101", + text=page2, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.evilcorp.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "zzzz.evilcorp.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "xxxx.evilcorp.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "ffff.evilcorp.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_skymem.py b/bbot/test/test_step_2/module_tests/test_module_skymem.py new file mode 100644 index 0000000000..58ae02950e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_skymem.py @@ -0,0 +1,43 @@ +from .base import ModuleTestBase + + +class TestSkymem(ModuleTestBase): + targets = ["blacklanternsecurity.com"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://www.skymem.info/srch?q=blacklanternsecurity.com", + text=page_1_body, + ) + module_test.httpx_mock.add_response( + url="https://www.skymem.info/domain/5679236812ad5b3f748a413d?p=2", + text=page_2_body, + ) + module_test.httpx_mock.add_response( + url="https://www.skymem.info/domain/5679236812ad5b3f748a413d?p=3", + text=page_3_body, + ) + + def check(self, module_test, events): + assert any(e.data == "page1email@blacklanternsecurity.com" for e in events), "Failed to detect first email" + assert any(e.data == "page2email@blacklanternsecurity.com" for e in events), "Failed to detect second email" + assert any(e.data == "page3email@blacklanternsecurity.com" for e in events), "Failed to detect third email" + + +page_1_body = """ +page1email@blacklanternsecurity.com + More emails for blacklanternsecurity.com ... + More emails for blacklanternsecurity.com ... +""" + +page_2_body = """ +page2email@blacklanternsecurity.com + More emails for blacklanternsecurity.com ... + More emails for blacklanternsecurity.com ... +""" + +page_3_body = """ +page3email@blacklanternsecurity.com + More emails for blacklanternsecurity.com ... + More emails for blacklanternsecurity.com ... +""" diff --git a/bbot/test/test_step_2/module_tests/test_module_slack.py b/bbot/test/test_step_2/module_tests/test_module_slack.py new file mode 100644 index 0000000000..1258ed5110 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_slack.py @@ -0,0 +1,7 @@ +from .test_module_discord import TestDiscord as DiscordBase + + +class TestSlack(DiscordBase): + modules_overrides = ["slack", "excavate", "badsecrets", "httpx"] + webhook_url = "https://hooks.slack.com/services/deadbeef/deadbeef/deadbeef" + config_overrides = {"modules": {"slack": {"webhook_url": webhook_url}}} diff --git a/bbot/test/test_step_2/module_tests/test_module_smuggler.py b/bbot/test/test_step_2/module_tests/test_module_smuggler.py new file mode 100644 index 0000000000..fb86b9ae92 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_smuggler.py @@ -0,0 +1,60 @@ +from .base import ModuleTestBase + +smuggler_text = r""" + ______ _ + / _____) | | + ( (____ ____ _ _ ____ ____| | _____ ____ + \____ \| \| | | |/ _ |/ _ | || ___ |/ ___) + _____) ) | | | |_| ( (_| ( (_| | || ____| | + (______/|_|_|_|____/ \___ |\___ |\_)_____)_| + (_____(_____| + + @defparam v1.1 + + [+] URL : http://127.0.0.1:8888 + [+] Method : POST + [+] Endpoint : / + [+] Configfile : default.py + [+] Timeout : 5.0 seconds + [+] Cookies : 1 (Appending to the attack) + [nameprefix1] : Checking TECL... + [nameprefix1] : Checking CLTE... + [nameprefix1] : OK (TECL: 0.61 - 405) (CLTE: 0.62 - 405) + [tabprefix1] : Checking TECL...git + [tabprefix1] : Checking CLTE... + [tabprefix1] : Checking TECL... + [tabprefix1] : Checking CLTE... + [tabprefix1] : Checking TECL... + [tabprefix1] : Checking CLTE... + [tabprefix1] : Potential CLTE Issue Found - POST @ http://127.0.0.1:8888 - default.py + [CRITICAL] : CLTE Payload: /home/user/.bbot/tools/smuggler/payloads/http_127.0.0.1_net_CLTE_tabprefix1.txt URL: http://127.0.0.1:8888/ + """ + + +class TestSmuggler(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "smuggler"] + + async def setup_after_prep(self, module_test): + old_run_live = module_test.scan.helpers.run_live + + async def smuggler_mock_run_live(*command, **kwargs): + if "smuggler" not in command[0][1]: + async for l in old_run_live(*command, **kwargs): + yield l + else: + for line in smuggler_text.splitlines(): + yield line + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run_live", smuggler_mock_run_live) + + request_args = {"uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and "[HTTP SMUGGLER] [Potential CLTE Issue Found] Technique: [tabprefix1]" in e.data["description"] + for e in events + ), "Failed to parse mocked command output" diff --git a/bbot/test/test_step_2/module_tests/test_module_social.py b/bbot/test/test_step_2/module_tests/test_module_social.py new file mode 100644 index 0000000000..6b03c77ed6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_social.py @@ -0,0 +1,58 @@ +from .base import ModuleTestBase + + +class TestSocial(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "excavate", "social"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = { + "response_data": """ + + + + + + + + """ + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert 4 == len([e for e in events if e.type == "SOCIAL"]) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" and e.data["platform"] == "discord" and e.data["profile_name"] == "asdf" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "docker" + and e.data["profile_name"] == "blacklanternsecurity" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "postman" + and e.data["profile_name"] == "blacklanternsecurity" + ] + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_speculate.py b/bbot/test/test_step_2/module_tests/test_module_speculate.py new file mode 100644 index 0000000000..55db777e7b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_speculate.py @@ -0,0 +1,82 @@ +from .base import ModuleTestBase + + +class TestSpeculate_Subdirectories(ModuleTestBase): + targets = ["http://127.0.0.1:8888/subdir1/subdir2/"] + modules_overrides = ["httpx", "speculate"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/subdir1/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/subdir1/subdir2/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/subdir1/" for e in events) + + +class TestSpeculate_OpenPorts(ModuleTestBase): + targets = ["evilcorp.com"] + modules_overrides = ["speculate", "certspotter", "internetdb"] + config_overrides = {"speculate": True} + + async def setup_before_prep(self, module_test): + await module_test.mock_dns( + { + "evilcorp.com": {"A": ["127.0.254.1"]}, + "asdf.evilcorp.com": {"A": ["127.0.254.2"]}, + } + ) + + module_test.httpx_mock.add_response( + url="https://api.certspotter.com/v1/issuances?domain=evilcorp.com&include_subdomains=true&expand=dns_names", + json=[{"dns_names": ["*.asdf.evilcorp.com"]}], + ) + + from bbot.modules.base import BaseModule + + class DummyModule(BaseModule): + _name = "dummy" + watched_events = ["OPEN_TCP_PORT"] + scope_distance_modifier = 10 + accept_dupes = True + + async def setup(self): + self.events = [] + return True + + async def handle_event(self, event): + self.events.append(event) + + module_test.scan.modules["dummy"] = DummyModule(module_test.scan) + + def check(self, module_test, events): + events_data = set() + for e in module_test.scan.modules["dummy"].events: + events_data.add(e.data) + assert all( + x in events_data + for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") + ) + + +class TestSpeculate_OpenPorts_Portscanner(TestSpeculate_OpenPorts): + targets = ["evilcorp.com"] + modules_overrides = ["speculate", "certspotter", "portscan"] + config_overrides = {"speculate": True} + + def check(self, module_test, events): + events_data = set() + for e in module_test.scan.modules["dummy"].events: + events_data.add(e.data) + assert not any( + x in events_data + for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_splunk.py b/bbot/test/test_step_2/module_tests/test_module_splunk.py new file mode 100644 index 0000000000..8366a6289b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_splunk.py @@ -0,0 +1,58 @@ +import json +import httpx + +from .base import ModuleTestBase + + +class TestSplunk(ModuleTestBase): + downstream_url = "https://splunk.blacklanternsecurity.fakedomain:1234/services/collector" + config_overrides = { + "modules": { + "splunk": { + "url": downstream_url, + "hectoken": "HECTOKEN", + "index": "bbot_index", + "source": "bbot_source", + } + } + } + + def verify_data(self, j): + if not j["source"] == "bbot_source": + return False + if not j["index"] == "bbot_index": + return False + data = j["event"] + if not data["data"] == "blacklanternsecurity.com" and data["type"] == "DNS_NAME": + return False + return True + + async def setup_after_prep(self, module_test): + self.url_correct = False + self.method_correct = False + self.got_event = False + self.headers_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "POST": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_response() + + def check(self, module_test, events): + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True diff --git a/bbot/test/test_step_2/module_tests/test_module_sqlite.py b/bbot/test/test_step_2/module_tests/test_module_sqlite.py new file mode 100644 index 0000000000..ec80b7555d --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_sqlite.py @@ -0,0 +1,18 @@ +import sqlite3 +from .base import ModuleTestBase + + +class TestSQLite(ModuleTestBase): + targets = ["evilcorp.com"] + + def check(self, module_test, events): + sqlite_output_file = module_test.scan.home / "output.sqlite" + assert sqlite_output_file.exists(), "SQLite output file not found" + with sqlite3.connect(sqlite_output_file) as db: + cursor = db.cursor() + results = cursor.execute("SELECT * FROM event").fetchall() + assert len(results) == 3, "No events found in SQLite database" + results = cursor.execute("SELECT * FROM scan").fetchall() + assert len(results) == 1, "No scans found in SQLite database" + results = cursor.execute("SELECT * FROM target").fetchall() + assert len(results) == 1, "No targets found in SQLite database" diff --git a/bbot/test/test_step_2/module_tests/test_module_sslcert.py b/bbot/test/test_step_2/module_tests/test_module_sslcert.py new file mode 100644 index 0000000000..a81482ff5c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_sslcert.py @@ -0,0 +1,19 @@ +from .base import ModuleTestBase + + +class TestSSLCert(ModuleTestBase): + targets = ["127.0.0.1:9999", "bbottest.notreal"] + config_overrides = {"scope": {"report_distance": 1}} + + def check(self, module_test, events): + assert len(events) == 7 + assert 1 == len( + [ + e + for e in events + if e.data == "www.bbottest.notreal" and str(e.module) == "sslcert" and e.scope_distance == 0 + ] + ), "Failed to detect subject alternate name (SAN)" + assert 1 == len( + [e for e in events if e.data == "test.notreal" and str(e.module) == "sslcert" and e.scope_distance == 1] + ), "Failed to detect main subject" diff --git a/bbot/test/test_step_2/module_tests/test_module_stdout.py b/bbot/test/test_step_2/module_tests/test_module_stdout.py new file mode 100644 index 0000000000..27d8a30594 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_stdout.py @@ -0,0 +1,103 @@ +import json + +from .base import ModuleTestBase + + +class TestStdout(ModuleTestBase): + modules_overrides = ["stdout"] + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + assert out.startswith("[SCAN] \tteststdout") + assert "[DNS_NAME] \tblacklanternsecurity.com\tTARGET" in out + + +class TestStdoutEventTypes(TestStdout): + config_overrides = {"modules": {"stdout": {"event_types": ["DNS_NAME"]}}} + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + assert len(out.splitlines()) == 1 + assert out.startswith("[DNS_NAME] \tblacklanternsecurity.com\tTARGET") + + +class TestStdoutEventFields(TestStdout): + config_overrides = {"modules": {"stdout": {"event_types": ["DNS_NAME"], "event_fields": ["data"]}}} + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + assert out == "blacklanternsecurity.com\n" + + +class TestStdoutJSON(TestStdout): + config_overrides = { + "modules": { + "stdout": { + "format": "json", + } + } + } + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + lines = out.splitlines() + assert len(lines) == 3 + for i, line in enumerate(lines): + event = json.loads(line) + if i == 0: + assert event["type"] == "SCAN" + elif i == 1: + assert event["type"] == "DNS_NAME" and event["data"] == "blacklanternsecurity.com" + if i == 2: + assert event["type"] == "SCAN" + + +class TestStdoutJSONFields(TestStdout): + config_overrides = {"modules": {"stdout": {"format": "json", "event_fields": ["data", "module_sequence"]}}} + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + lines = out.splitlines() + assert len(lines) == 3 + for line in lines: + event = json.loads(line) + assert set(event) == {"data", "module_sequence"} + + +class TestStdoutDupes(TestStdout): + targets = ["blacklanternsecurity.com", "127.0.0.2"] + config_overrides = { + "dns": {"minimal": False}, + "modules": { + "stdout": { + "event_types": ["DNS_NAME", "IP_ADDRESS"], + } + }, + } + + async def setup_after_prep(self, module_test): + await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.2"]}}) + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + lines = out.splitlines() + assert len(lines) == 3 + assert out.count("[IP_ADDRESS] \t127.0.0.2") == 2 + + +class TestStdoutNoDupes(TestStdoutDupes): + config_overrides = { + "dns": {"minimal": False}, + "modules": { + "stdout": { + "event_types": ["DNS_NAME", "IP_ADDRESS"], + "accept_dupes": False, + } + }, + } + + def check(self, module_test, events): + out, err = module_test.capsys.readouterr() + lines = out.splitlines() + assert len(lines) == 2 + assert out.count("[IP_ADDRESS] \t127.0.0.2") == 1 diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py new file mode 100644 index 0000000000..aa95473a48 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py @@ -0,0 +1,13 @@ +from .base import ModuleTestBase + + +class TestSubdomainCenter(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", + json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomainradar.py b/bbot/test/test_step_2/module_tests/test_module_subdomainradar.py new file mode 100644 index 0000000000..c2bb827f35 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_subdomainradar.py @@ -0,0 +1,208 @@ +from .base import ModuleTestBase + + +class TestSubDomainRadar(ModuleTestBase): + config_overrides = {"modules": {"subdomainradar": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["127.0.0.88"]}, + "www.blacklanternsecurity.com": {"A": ["127.0.0.88"]}, + "asdf.blacklanternsecurity.com": {"A": ["127.0.0.88"]}, + } + ) + module_test.httpx_mock.add_response( + url="https://api.subdomainradar.io/profile", + match_headers={"Authorization": "Bearer asdf"}, + ) + module_test.httpx_mock.add_response( + url="https://api.subdomainradar.io/enumerate", + method="POST", + json={ + "tasks": {"blacklanternsecurity.com": "86de4531-0a67-41fe-b5e4-8ce8207d6245"}, + "message": "Tasks initiated", + }, + match_headers={"Authorization": "Bearer asdf"}, + ) + module_test.httpx_mock.add_response( + url="https://api.subdomainradar.io/tasks/86de4531-0a67-41fe-b5e4-8ce8207d6245", + match_headers={"Authorization": "Bearer asdf"}, + json={ + "task_id": "86de4531-0a67-41fe-b5e4-8ce8207d6245", + "status": "completed", + "domain": "blacklanternsecurity.com", + "subdomains": [ + { + "subdomain": "www.blacklanternsecurity.com", + "ip": None, + "reverse_dns": [], + "country": None, + "timestamp": None, + }, + { + "subdomain": "asdf.blacklanternsecurity.com", + "ip": None, + "reverse_dns": [], + "country": None, + "timestamp": None, + }, + ], + "total_subdomains": 2, + "rank": None, + "whois": { + "domain_name": ["BLACKLANTERNSECURITY.COM", "blacklanternsecurity.com"], + "registrar": "MarkMonitor, Inc.", + "creation_date": ["1992-11-04T05:00:00", "1992-11-04T05:00:00+00:00"], + "expiration_date": ["2026-11-03T05:00:00", "2026-11-03T00:00:00+00:00"], + "last_updated": ["2024-10-02T10:15:20", "2024-10-02T10:15:20+00:00"], + "status": [ + "clientDeleteProhibited https://icann.org/epp#clientDeleteProhibited", + "clientTransferProhibited https://icann.org/epp#clientTransferProhibited", + "clientUpdateProhibited https://icann.org/epp#clientUpdateProhibited", + "serverDeleteProhibited https://icann.org/epp#serverDeleteProhibited", + "serverTransferProhibited https://icann.org/epp#serverTransferProhibited", + "serverUpdateProhibited https://icann.org/epp#serverUpdateProhibited", + "clientUpdateProhibited (https://www.icann.org/epp#clientUpdateProhibited)", + "clientTransferProhibited (https://www.icann.org/epp#clientTransferProhibited)", + "clientDeleteProhibited (https://www.icann.org/epp#clientDeleteProhibited)", + "serverUpdateProhibited (https://www.icann.org/epp#serverUpdateProhibited)", + "serverTransferProhibited (https://www.icann.org/epp#serverTransferProhibited)", + "serverDeleteProhibited (https://www.icann.org/epp#serverDeleteProhibited)", + ], + "nameservers": [ + "A1-12.AKAM.NET", + "A10-67.AKAM.NET", + "A12-64.AKAM.NET", + "A28-65.AKAM.NET", + "A7-66.AKAM.NET", + "A9-67.AKAM.NET", + "EDNS69.ULTRADNS.BIZ", + "EDNS69.ULTRADNS.COM", + "EDNS69.ULTRADNS.NET", + "EDNS69.ULTRADNS.ORG", + "edns69.ultradns.biz", + "a12-64.akam.net", + "edns69.ultradns.net", + "edns69.ultradns.org", + "a10-67.akam.net", + "a28-65.akam.net", + "a9-67.akam.net", + "a1-12.akam.net", + "a7-66.akam.net", + "edns69.ultradns.com", + ], + "emails": [ + "abusecomplaints@markmonitor.com", + "admin@dnstinations.com", + "whoisrequest@markmonitor.com", + ], + "dnssec": "unsigned", + "org": "DNStination Inc.", + "address": "3450 Sacramento Street, Suite 405", + "city": "San Francisco", + "state": "CA", + "zipcode": None, + "country": "US", + }, + "enumerators": ["Aquarius Enumerator", "Beta Enumerator", "Chi Enumerator", "Eta Enumerator"], + "timestamp": "2024-10-06T02:48:10.075636", + "error": None, + "is_notification": False, + "notification_domain_id": None, + "demo": False, + "user_id": 49, + "time_to_finish": 41, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.subdomainradar.io/enumerators/groups", + match_headers={"Authorization": "Bearer asdf"}, + json=[ + { + "id": "1", + "name": "Fast", + "description": "Enumerators optimized for high-speed scanning and rapid data collection", + "enumerators": [ + {"display_name": "Beta Enumerator"}, + {"display_name": "Chi Enumerator"}, + {"display_name": "Aquarius Enumerator"}, + {"display_name": "Eta Enumerator"}, + ], + }, + { + "id": "2", + "name": "Medium", + "description": "Enumerators balanced for moderate speed with a focus on thoroughness", + "enumerators": [ + {"display_name": "Kappa Enumerator"}, + {"display_name": "Lambda Enumerator"}, + {"display_name": "Mu Enumerator"}, + {"display_name": "Pi Enumerator"}, + {"display_name": "Tau Enumerator"}, + {"display_name": "Beta Enumerator"}, + {"display_name": "Chi Enumerator"}, + {"display_name": "Psi Enumerator"}, + {"display_name": "Aquarius Enumerator"}, + {"display_name": "Zeta Enumerator"}, + {"display_name": "Eta Enumerator"}, + ], + }, + { + "id": "3", + "name": "Deep", + "description": "Enumerators designed for exhaustive searches and in-depth data analysis", + "enumerators": [ + {"display_name": "Alpha Enumerator"}, + {"display_name": "Kappa Enumerator"}, + {"display_name": "Lambda Enumerator"}, + {"display_name": "Mu Enumerator"}, + {"display_name": "Nu Enumerator"}, + {"display_name": "Xi Enumerator"}, + {"display_name": "Pi Enumerator"}, + {"display_name": "Rho Enumerator"}, + {"display_name": "Sigma Enumerator"}, + {"display_name": "Tau Enumerator"}, + {"display_name": "Beta Enumerator"}, + {"display_name": "Chi Enumerator"}, + {"display_name": "Omega Enumerator"}, + {"display_name": "Psi Enumerator"}, + {"display_name": "Phi Enumerator"}, + {"display_name": "Axon Enumerator"}, + {"display_name": "Aquarius Enumerator"}, + {"display_name": "Pegasus Enumerator"}, + {"display_name": "Petra Enumerator"}, + {"display_name": "Oasis Enumerator"}, + {"display_name": "Mike Enumerator"}, + {"display_name": "Cat Enumerator"}, + {"display_name": "Brutus Enumerator"}, + {"display_name": "Dee Enumerator"}, + {"display_name": "Jul Enumerator"}, + {"display_name": "Eve Enumerator"}, + {"display_name": "Frank Enumerator"}, + {"display_name": "Gus Enumerator"}, + {"display_name": "Hank Enumerator"}, + {"display_name": "Delta Enumerator"}, + {"display_name": "Ivy Enumerator"}, + {"display_name": "Jack Enumerator"}, + {"display_name": "Karl Enumerator"}, + {"display_name": "Liam Enumerator"}, + {"display_name": "Nora Enumerator"}, + {"display_name": "Mars Enumerator"}, + {"display_name": "Neptune Enumerator"}, + {"display_name": "Orion Enumerator"}, + {"display_name": "Oedipus Enumerator"}, + {"display_name": "Pandora Enumerator"}, + {"display_name": "Epsilon Enumerator"}, + {"display_name": "Zeta Enumerator"}, + {"display_name": "Eta Enumerator"}, + {"display_name": "Theta Enumerator"}, + {"display_name": "Iota Enumerator"}, + ], + }, + ], + ) + + def check(self, module_test, events): + assert any(e.data == "www.blacklanternsecurity.com" for e in events), "Failed to detect subdomain #1" + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain #2" diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomains.py b/bbot/test/test_step_2/module_tests/test_module_subdomains.py new file mode 100644 index 0000000000..e7fb494591 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_subdomains.py @@ -0,0 +1,29 @@ +from .base import ModuleTestBase + + +class TestSubdomains(ModuleTestBase): + modules_overrides = ["subdomains", "subdomaincenter"] + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", + json=["asdfasdf.blacklanternsecurity.com", "zzzzzzzz.blacklanternsecurity.com"], + ) + + def check(self, module_test, events): + sub_file = module_test.scan.home / "subdomains.txt" + subdomains = set(open(sub_file).read().splitlines()) + assert subdomains == {"blacklanternsecurity.com"} + + +class TestSubdomainsUnresolved(TestSubdomains): + config_overrides = {"modules": {"subdomains": {"include_unresolved": True}}} + + def check(self, module_test, events): + sub_file = module_test.scan.home / "subdomains.txt" + subdomains = set(open(sub_file).read().splitlines()) + assert subdomains == { + "blacklanternsecurity.com", + "asdfasdf.blacklanternsecurity.com", + "zzzzzzzz.blacklanternsecurity.com", + } diff --git a/bbot/test/test_step_2/module_tests/test_module_teams.py b/bbot/test/test_step_2/module_tests/test_module_teams.py new file mode 100644 index 0000000000..3f573dc21b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_teams.py @@ -0,0 +1,39 @@ +import httpx + +from .test_module_discord import TestDiscord as DiscordBase + + +class TestTeams(DiscordBase): + modules_overrides = ["teams", "excavate", "badsecrets", "httpx"] + + webhook_url = "https://evilcorp.webhook.office.com/webhookb2/deadbeef@deadbeef/IncomingWebhook/deadbeef/deadbeef" + config_overrides = {"modules": {"teams": {"webhook_url": webhook_url, "retries": 5}}} + + async def setup_after_prep(self, module_test): + self.custom_setup(module_test) + + def custom_response(request: httpx.Request): + module_test.request_count += 1 + if module_test.request_count == 2: + return httpx.Response(status_code=429, headers={"Retry-After": "0.01"}) + elif module_test.request_count == 3: + return httpx.Response( + status_code=400, + json={ + "error": { + "code": "WorkflowTriggerIsNotEnabled", + "message": "Could not execute workflow 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx' trigger 'manual' with state 'Disabled': trigger is not enabled.", + } + }, + ) + else: + return httpx.Response(status_code=200) + + module_test.httpx_mock.add_callback(custom_response, url=self.webhook_url) + + def check(self, module_test, events): + vulns = [e for e in events if e.type == "VULNERABILITY"] + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 1 + assert len(vulns) == 2 + assert module_test.request_count == 5 diff --git a/bbot/test/test_step_2/module_tests/test_module_telerik.py b/bbot/test/test_step_2/module_tests/test_module_telerik.py new file mode 100644 index 0000000000..5302d72573 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_telerik.py @@ -0,0 +1,177 @@ +import re +from .base import ModuleTestBase, tempwordlist + + +class TestTelerik(ModuleTestBase): + targets = ["http://127.0.0.1:8888", "http://127.0.0.1:8888/telerik.aspx"] + modules_overrides = ["httpx", "telerik"] + config_overrides = {"modules": {"telerik": {"exploit_RAU_crypto": True}}} + + async def setup_before_prep(self, module_test): + # Simulate Telerik.Web.UI.WebResource.axd?type=rau detection + expect_args = {"method": "GET", "uri": "/Telerik.Web.UI.WebResource.axd", "query_string": "type=rau"} + respond_args = { + "response_data": '{ "message" : "RadAsyncUpload handler is registered successfully, however, it may not be accessed directly." }' + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate Vulnerable Telerik.Web.UI.WebResource.axd + vuln_data = "ATTu5i4R+ViNFYO6kst0jC11wM/1iqH+W/isjhaDjNuCI7eJ/BY5d1E9eqZK27CJCMuon9u8/hgRIM/cTlgLlv4qOYjPBjs81Y3dAZAdtIr3TXiCmZi9M09a1BYMxjvGKfVky3b7PoOppeWS/3rglTwL1e8oyqLGx2NKUH5y8Cd+kLKV2f31J1sV4I5HTDKgDmvziJp3zlDrCb0Fi9ilKH+O1cbVx6SdBop/U30FxLaB/QIbt2N1rQHREJ5Skpgo7dilPxzBaTObdBhCVyB/FiJhenS/0u3h0Mpi6+A40SylICcyyxQha7+Uh7lEJ8Ne+2eTs4WqcaaQbvIhy7oHc+D0soxRKMZRjo7Up+UWHQJJh6KtWSCxUESNSdNcxjPQZE9HqsPlldVlkeC+ehSGce5bR0Ylots6Iz1OoCgMEWwxByeG3VzgxF6XpitL61A1hFcNo9euSTnCfOWh0vrQHON7DN5LpM9xr7SoD0Dnu01hZ9NS1PHhPLyN5WS87u5qdZp/z3Sxwc3wawIdo62RNf4Iz2gAKJZnPfxrE1mRn5kBe7f6O44rcuv6lcdao/DGlwbERKwRI6/n+FxGmc7H5iEKyihIwS2XUoOgsYTx5CWCDM8CuOXTk+H5fPYp9APRPbkD1IS9I/vRmvNPwWsgv8/7DzttqdBsGxiZJfCw1uZ7KSVmbItgXPAcscNxGEMaHXyJzkAl/mlM5/t/YSejwYoSW6jFfQcLdaVx2dpIpl5UmmQjFedzKeiNqpZDCk4yzXFHX24XUODYMJDtIJK2Hz1KTZmFG+LAOJjB9QOI58hFAnytcKay+JWFrzah/IvoNZxJUtlYdxw0YEyKs/ExET7AXgYQN0S+8j2PfaMMpzDSctTqpp5XBFV4Mt718GiqVnQJtWQv2p9Xl8XXOerBthbzzAciVcB8AV2WfZ51W3e4aX4kcyT/sCJhm7NR5WrNG5mX/ns0TTnGnzlPYhJcbu8uMFjMGDpXuhVyroJ7wmZucaIvesg0h5Y9cMEFviqsdy15vjMzFh+v9uO9Vicf6n9Z9JGSpWKE8wer2JU5b53Zw0cTfulAAffLWXnzOnfu&6R/cGaqQeHVAzdJ9wTFOyCsrMSTtqcjLe8AHwiPckPDUwecnJyNlkDYwDQpxGYQ9hs6YxhupK310sbCbtXB4H6Dz5rGNL40nkkyo4j2clmRr08jtFsPQ0RpE5BGsulPT3l0MxyAvPFMs8bMybUyAP+9RB9LoHE3Xo8BqDadX3HQakpPfGtiDMp+wxkWRgaNpCnXeY1QewWTF6z/duLzbu6CT6s+H4HgBHrOLTpemC2PvP2bDm0ySPHLdpapLYxU8nIYjLKIyYJgwv9S9jNckIVpcGVTWVul7CauCKxAB2mMnM9jJi8zfFwKajT5d2d9XfpkiVMrdlmikSB/ehyX1wQ==" + expect_args = { + "method": "POST", + "uri": "/Telerik.Web.UI.WebResource.axd", + "query_string": "type=rau", + "data": vuln_data, + } + respond_args = { + "response_data": '{"fileInfo":{"FileName":"RAU_crypto.bypass","ContentType":"text/html","ContentLength":5,"DateJson":"2019-01-02T03:04:05.067Z","Index":0}, "metaData":"CS8S/Z0J/b2982DRxDin0BBslA7fI0cWMuWlPu4W3FkE4tKaVoIEiAOtVlJ6D+0RQsfu8ox6gvMYxceQ0LtWyTkQBaIUa8LgLQg05DMaQuufHNx0YQ2ACi5neqDBvduj2MGiSGC0hNKzSWsHystZGUfFPLTZuJXYnff+WXurecuRzSI7d4Q1aj0bcTKKvfyQtH+fsTEafWRRZ99X/xgi4ON2OsRZ738uQHw7pQT2e1v7AtN46mxO/BmhEuZQr6m6HEvxK0pJRNkBhFUiQ+poeu8j3JzicOjvPDwFE4Rjqf3RVILt83XZrju2VpRIJqAEtf//znhH8BhT5BWvhnRo+J3ML5qoZLa2joE/QK8Ctf3UPvAFkHIUMdOH2mLNgZ+U87tdVE6fYfzvphZsLxmJRG45H8ZTZuYhJbOfei2LQ4fqHmr7p8KpJNVqoz/ev1dnBclAf5ayb40qJKEVsGXIbWEbIZwg7TTsLFc29aP7DPg=" }' + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate SpellCheckHandler detection + expect_args = {"method": "GET", "uri": "/Telerik.Web.UI.SpellCheckHandler.axd"} + respond_args = {"status": 500} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate SpellCheckHandler false positive detection + expect_args = {"method": "GET", "uri": "/AAAAAAAAAAAAAA.axd"} + respond_args = {"status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate DialogHandler detection + expect_args = {"method": "GET", "uri": "/App_Master/Telerik.Web.UI.DialogHandler.aspx"} + respond_args = { + "response_data": '
Cannot deserialize dialog parameters. Please refresh the editor page.
Error Message:Invalid length for a Base-64 char array or string.
' + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate ChartImage.axd Detection + expect_args = { + "method": "GET", + "uri": "/ChartImage.axd", + "query_string": "ImageName=bqYXJAqm315eEd6b%2bY4%2bGqZpe7a1kY0e89gfXli%2bjFw%3d", + } + respond_args = {"status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/ChartImage.axd", "query_string": "ImageName="} + respond_args = {"status": 500} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate Dialog Parameters in URL + expect_args = {"method": "GET", "uri": "/telerik.aspx"} + respond_args = {"response_data": '{"ImageManager":{"SerializedParameters":"MBwZB"}'} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Fallback + expect_args = {"uri": re.compile(r"^/\w{10}$")} + respond_args = {"status": 200} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["telerik"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.scan.modules["telerik"].telerikVersions = ["2014.2.724", "2014.3.1024", "2015.1.204"] + module_test.scan.modules["telerik"].DialogHandlerUrls = [ + "Admin/ServerSide/Telerik.Web.UI.DialogHandler.aspx", + "App_Master/Telerik.Web.UI.DialogHandler.aspx", + "AsiCommon/Controls/ContentManagement/ContentDesigner/Telerik.Web.UI.DialogHandler.aspx", + ] + + def check(self, module_test, events): + telerik_axd_detection = False + telerik_axd_vulnerable = False + telerik_spellcheck_detection = False + telerik_dialoghandler_detection = False + telerik_chartimage_detection = False + telerik_http_response_parameters_detection = False + + for e in events: + if e.type == "FINDING" and "Telerik RAU AXD Handler detected" in e.data["description"]: + e.data["description"] + telerik_axd_detection = True + continue + + if e.type == "VULNERABILITY" and "Confirmed Vulnerable Telerik (version: 2014.3.1024)": + telerik_axd_vulnerable = True + continue + + if e.type == "FINDING" and "Telerik DialogHandler detected" in e.data["description"]: + telerik_dialoghandler_detection = True + continue + + if e.type == "FINDING" and "Telerik SpellCheckHandler detected" in e.data["description"]: + telerik_spellcheck_detection = True + continue + + if e.type == "FINDING" and "Telerik ChartImage AXD Handler Detected" in e.data["description"]: + telerik_chartimage_detection = True + continue + + if ( + e.type == "FINDING" + and "Telerik DialogHandler [SerializedParameters] Detected in HTTP Response" in e.data["description"] + ): + telerik_http_response_parameters_detection = True + continue + + assert telerik_axd_detection, "Telerik AXD detection failed" + assert telerik_axd_vulnerable, "Telerik vulnerable AXD detection failed" + assert telerik_spellcheck_detection, "Telerik spellcheck detection failed" + assert telerik_dialoghandler_detection, "Telerik dialoghandler detection failed" + assert telerik_chartimage_detection, "Telerik chartimage detection failed" + assert telerik_http_response_parameters_detection, "Telerik SerializedParameters detection failed" + + +class TestTelerikDialogHandler_includesubdirs(TestTelerik): + targets = ["http://127.0.0.1:8888/", "http://127.0.0.1:8888/temp/"] + config_overrides = { + "modules": { + "telerik": { + "include_subdirs": True, + }, + } + } + modules_overrides = ["httpx", "telerik"] + + async def setup_before_prep(self, module_test): + # Simulate NO SpellCheckHandler detection (not testing for that with this test) + expect_args = {"method": "GET", "uri": "/Telerik.Web.UI.SpellCheckHandler.axd"} + respond_args = {"status": 404} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate DialogHandler detection + expect_args = {"method": "GET", "uri": "/App_Master/Telerik.Web.UI.DialogHandler.aspx"} + respond_args = { + "response_data": '
Cannot deserialize dialog parameters. Please refresh the editor page.
Error Message:Invalid length for a Base-64 char array or string.
' + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate DialogHandler detection (in /temp) + expect_args = {"method": "GET", "uri": "/temp/App_Master/Telerik.Web.UI.DialogHandler.aspx"} + respond_args = { + "response_data": '
Cannot deserialize dialog parameters. Please refresh the editor page.
Error Message:Invalid length for a Base-64 char array or string.
' + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Simulate /temp directory detection + expect_args = {"method": "GET", "uri": "/temp/"} + respond_args = {"response_data": "Temporary directory found"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + # Fallback + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["telerik"].telerikVersions = ["2014.2.724", "2014.3.1024", "2015.1.204"] + module_test.scan.modules["telerik"].DialogHandlerUrls = [ + "App_Master/Telerik.Web.UI.DialogHandler.aspx", + ] + + def check(self, module_test, events): + # Check if the expected requests were made + finding_count = sum( + 1 for e in events if e.type == "FINDING" and "Telerik DialogHandler detected" in e.data["description"] + ) + assert finding_count == 2, "Expected 2 FINDING events (root and /temp), got {finding_count}" diff --git a/bbot/test/test_step_2/module_tests/test_module_trickest.py b/bbot/test/test_step_2/module_tests/test_module_trickest.py new file mode 100644 index 0000000000..457e8deef2 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_trickest.py @@ -0,0 +1,26 @@ +from .base import ModuleTestBase + + +class TestTrickest(ModuleTestBase): + config_overrides = {"modules": {"trickest": {"api_key": "deadbeef"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be/dataset", + match_headers={"Authorization": "Token deadbeef"}, + json={}, + ) + module_test.httpx_mock.add_response( + url="https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be/view?q=hostname%20~%20%22.blacklanternsecurity.com%22&dataset_id=a0a49ca9-03bb-45e0-aa9a-ad59082ebdfc&limit=50&offset=0&select=hostname&orderby=hostname", + match_headers={"Authorization": "Token deadbeef"}, + json={"results": [{"hostname": "asdf.blacklanternsecurity.com"}]}, + ) + module_test.httpx_mock.add_response( + url="https://api.trickest.io/solutions/v1/public/solution/a7cba1f1-df07-4a5c-876a-953f178996be/view?q=hostname%20~%20%22.blacklanternsecurity.com%22&dataset_id=a0a49ca9-03bb-45e0-aa9a-ad59082ebdfc&limit=50&offset=50&select=hostname&orderby=hostname", + match_headers={"Authorization": "Token deadbeef"}, + json={"results": [{"hostname": "www.blacklanternsecurity.com"}]}, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "www.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py new file mode 100644 index 0000000000..80ae3e3e86 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -0,0 +1,1280 @@ +import subprocess +import shutil +import io +import zipfile +import tarfile +from pathlib import Path + +from .base import ModuleTestBase + + +class TestTrufflehog(ModuleTestBase): + config_overrides = {"modules": {"postman_download": {"api_key": "asdf"}, "github_org": {"api_key": "asdf"}}} + modules_overrides = [ + "github_org", + "speculate", + "git_clone", + "github_workflows", + "dockerhub", + "docker_pull", + "postman", + "postman_download", + "trufflehog", + ] + + file_content = "Verifyable Secret:\nhttps://admin:admin@the-internet.herokuapp.com/basic_auth\n\nUnverifyable Secret:\nhttps://admin:admin@internal.host.com" + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response(url="https://api.github.com/zen") + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/me", + json={ + "user": { + "id": 000000, + "username": "test_key", + "email": "blacklanternsecurity@test.com", + "fullName": "Test Key", + "avatar": "", + "isPublic": True, + "teamId": 0, + "teamDomain": "", + "roles": ["user"], + }, + "operations": [ + {"name": "api_object_usage", "limit": 3, "usage": 0, "overage": 0}, + {"name": "collection_run_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "file_storage_limit", "limit": 20, "usage": 0, "overage": 0}, + {"name": "flow_count", "limit": 5, "usage": 0, "overage": 0}, + {"name": "flow_requests", "limit": 5000, "usage": 0, "overage": 0}, + {"name": "performance_test_limit", "limit": 25, "usage": 0, "overage": 0}, + {"name": "postbot_calls", "limit": 50, "usage": 0, "overage": 0}, + {"name": "reusable_packages", "limit": 3, "usage": 0, "overage": 0}, + {"name": "test_data_retrieval", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "test_data_storage", "limit": 10, "usage": 0, "overage": 0}, + {"name": "mock_usage", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "monitor_request_runs", "limit": 1000, "usage": 0, "overage": 0}, + {"name": "api_usage", "limit": 1000, "usage": 0, "overage": 0}, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity", + json={ + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "url": "https://api.github.com/orgs/blacklanternsecurity", + "repos_url": "https://api.github.com/orgs/blacklanternsecurity/repos", + "events_url": "https://api.github.com/orgs/blacklanternsecurity/events", + "hooks_url": "https://api.github.com/orgs/blacklanternsecurity/hooks", + "issues_url": "https://api.github.com/orgs/blacklanternsecurity/issues", + "members_url": "https://api.github.com/orgs/blacklanternsecurity/members{/member}", + "public_members_url": "https://api.github.com/orgs/blacklanternsecurity/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "description": "Security Organization", + "name": "Black Lantern Security", + "company": None, + "blog": "www.blacklanternsecurity.com", + "location": "Charleston, SC", + "email": None, + "twitter_username": None, + "is_verified": False, + "has_organization_projects": True, + "has_repository_projects": True, + "public_repos": 70, + "public_gists": 0, + "followers": 415, + "following": 0, + "html_url": "https://github.com/blacklanternsecurity", + "created_at": "2017-01-24T00:14:46Z", + "updated_at": "2022-03-28T11:39:03Z", + "archived_at": None, + "type": "Organization", + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/repos?per_page=100&page=1", + json=[ + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "test_keys", + "full_name": "blacklanternsecurity/test_keys", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/test_keys", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/test_keys.git", + "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", + "clone_url": "https://github.com/blacklanternsecurity/test_keys.git", + "svn_url": "https://github.com/blacklanternsecurity/test_keys", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + }, + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/bbot.git", + "ssh_url": "git@github.com:blacklanternsecurity/bbot.git", + "clone_url": "https://github.com/blacklanternsecurity/bbot.git", + "svn_url": "https://github.com/blacklanternsecurity/bbot", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + }, + ], + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows?per_page=100&page=1", + json={ + "total_count": 3, + "workflows": [ + { + "id": 22452226, + "node_id": "W_kwDOG_O3ns4BVpgC", + "name": "tests", + "path": ".github/workflows/tests.yml", + "state": "active", + "created_at": "2022-03-23T15:09:22.000Z", + "updated_at": "2022-09-27T17:49:34.000Z", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "html_url": "https://github.com/blacklanternsecurity/bbot/blob/stable/.github/workflows/tests.yml", + "badge_url": "https://github.com/blacklanternsecurity/bbot/workflows/tests/badge.svg", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?status=success&per_page=1", + json={ + "total_count": 2993, + "workflow_runs": [ + { + "id": 8839360698, + "name": "tests", + "node_id": "WFR_kwLOG_O3ns8AAAACDt3wug", + "head_branch": "dnsbrute-helperify", + "head_sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "path": ".github/workflows/tests.yml", + "display_title": "Helperify Massdns", + "run_number": 4520, + "event": "pull_request", + "status": "completed", + "conclusion": "success", + "workflow_id": 22452226, + "check_suite_id": 23162098295, + "check_suite_node_id": "CS_kwDOG_O3ns8AAAAFZJGSdw", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698", + "html_url": "https://github.com/blacklanternsecurity/bbot/actions/runs/8839360698", + "pull_requests": [ + { + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls/1303", + "id": 1839332952, + "number": 1303, + "head": { + "ref": "dnsbrute-helperify", + "sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + "base": { + "ref": "faster-regexes", + "sha": "7baf219c7f3a4ba165639c5ddb62322453a8aea8", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + } + ], + "created_at": "2024-04-25T21:04:32Z", + "updated_at": "2024-04-25T21:19:43Z", + "actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "run_attempt": 1, + "referenced_workflows": [], + "run_started_at": "2024-04-25T21:04:32Z", + "triggering_actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "jobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/jobs", + "logs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", + "check_suite_url": "https://api.github.com/repos/blacklanternsecurity/bbot/check-suites/23162098295", + "artifacts_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + "cancel_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/cancel", + "rerun_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/rerun", + "previous_attempt_url": None, + "workflow_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "head_commit": { + "id": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "tree_id": "fe9b345c0745a5bbacb806225e92e1c48fccf35c", + "message": "remove debug message", + "timestamp": "2024-04-25T21:02:37Z", + "author": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + "committer": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + }, + "repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + "head_repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", + headers={ + "location": "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" + }, + status_code=302, + ) + data = io.BytesIO() + with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as z: + z.writestr("test.txt", self.file_content) + z.writestr("folder/test2.txt", self.file_content) + data.seek(0) + zip_content = data.getvalue() + module_test.httpx_mock.add_response( + url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02", + content=zip_content, + ) + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/users/blacklanternsecurity", + json={ + "id": "f90895d9cf484d9182c6dbbef2632329", + "uuid": "f90895d9-cf48-4d91-82c6-dbbef2632329", + "username": "blacklanternsecurity", + "full_name": "", + "location": "", + "company": "Black Lantern Security", + "profile_url": "https://github.com/blacklanternsecurity", + "date_joined": "2022-08-29T15:27:10.227081Z", + "gravatar_url": "", + "gravatar_email": "", + "type": "User", + }, + ) + module_test.httpx_mock.add_response( + url="https://hub.docker.com/v2/repositories/blacklanternsecurity?page_size=25&page=1", + json={ + "count": 2, + "next": None, + "previous": None, + "results": [ + { + "name": "helloworld", + "namespace": "blacklanternsecurity", + "repository_type": "image", + "status": 1, + "status_description": "active", + "description": "", + "is_private": False, + "star_count": 0, + "pull_count": 1, + "last_updated": "2021-12-20T17:19:58.88296Z", + "date_registered": "2021-12-20T17:19:58.507614Z", + "affiliation": "", + "media_types": ["application/vnd.docker.container.image.v1+json"], + "content_types": ["image"], + "categories": [], + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/tags/list", + json={ + "name": "blacklanternsecurity/helloworld", + "tags": [ + "dev", + "latest", + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/manifests/latest", + json={ + "schemaVersion": 2, + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "config": { + "mediaType": "application/vnd.docker.container.image.v1+json", + "size": 8614, + "digest": "sha256:a9910947b74a4f0606cfc8669ae8808d2c328beaee9e79f489dc17df14cd50b1", + }, + "layers": [ + { + "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", + "size": 29124181, + "digest": "sha256:8a1e25ce7c4f75e372e9884f8f7b1bedcfe4a7a7d452eb4b0a1c7477c9a90345", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/blobs/sha256:a9910947b74a4f0606cfc8669ae8808d2c328beaee9e79f489dc17df14cd50b1", + json={ + "architecture": "amd64", + "config": { + "Env": [ + "PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "LANG=C.UTF-8", + "GPG_KEY=QWERTYUIOPASDFGHJKLZXCBNM", + "PYTHON_VERSION=3.10.14", + "PYTHON_PIP_VERSION=23.0.1", + "PYTHON_SETUPTOOLS_VERSION=65.5.1", + "PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/dbf0c85f76fb6e1ab42aa672ffca6f0a675d9ee4/public/get-pip.py", + "PYTHON_GET_PIP_SHA256=dfe9fd5c28dc98b5ac17979a953ea550cec37ae1b47a5116007395bfacff2ab9", + "LC_ALL=C.UTF-8", + "PIP_NO_CACHE_DIR=off", + ], + "Entrypoint": ["helloworld"], + "WorkingDir": "/root", + "ArgsEscaped": True, + "OnBuild": None, + }, + "created": "2024-03-24T03:46:29.788993495Z", + "history": [ + { + "created": "2024-03-12T01:21:01.529814652Z", + "created_by": "/bin/sh -c #(nop) ADD file:b86ae1c7ca3586d8feedcd9ff1b2b1e8ab872caf6587618f1da689045a5d7ae4 in / ", + }, + { + "created": "2024-03-12T01:21:01.866693306Z", + "created_by": '/bin/sh -c #(nop) CMD ["bash"]', + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV LANG=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "RUN /bin/sh -c set -eux; \tapt-get update; \tapt-get install -y --no-install-recommends \t\tca-certificates \t\tnetbase \t\ttzdata \t; \trm -rf /var/lib/apt/lists/* # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV GPG_KEY=QWERTYUIOPASDFGHJKLZXCBNM", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_VERSION=3.10.14", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \t\tsavedAptMark="$(apt-mark showmanual)"; \tapt-get update; \tapt-get install -y --no-install-recommends \t\tdpkg-dev \t\tgcc \t\tgnupg \t\tlibbluetooth-dev \t\tlibbz2-dev \t\tlibc6-dev \t\tlibdb-dev \t\tlibexpat1-dev \t\tlibffi-dev \t\tlibgdbm-dev \t\tliblzma-dev \t\tlibncursesw5-dev \t\tlibreadline-dev \t\tlibsqlite3-dev \t\tlibssl-dev \t\tmake \t\ttk-dev \t\tuuid-dev \t\twget \t\txz-utils \t\tzlib1g-dev \t; \t\twget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz"; \twget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc"; \tGNUPGHOME="$(mktemp -d)"; export GNUPGHOME; \tgpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$GPG_KEY"; \tgpg --batch --verify python.tar.xz.asc python.tar.xz; \tgpgconf --kill all; \trm -rf "$GNUPGHOME" python.tar.xz.asc; \tmkdir -p /usr/src/python; \ttar --extract --directory /usr/src/python --strip-components=1 --file python.tar.xz; \trm python.tar.xz; \t\tcd /usr/src/python; \tgnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)"; \t./configure \t\t--build="$gnuArch" \t\t--enable-loadable-sqlite-extensions \t\t--enable-optimizations \t\t--enable-option-checking=fatal \t\t--enable-shared \t\t--with-lto \t\t--with-system-expat \t\t--without-ensurepip \t; \tnproc="$(nproc)"; \tEXTRA_CFLAGS="$(dpkg-buildflags --get CFLAGS)"; \tLDFLAGS="$(dpkg-buildflags --get LDFLAGS)"; \tLDFLAGS="${LDFLAGS:--Wl},--strip-all"; \tmake -j "$nproc" \t\t"EXTRA_CFLAGS=${EXTRA_CFLAGS:-}" \t\t"LDFLAGS=${LDFLAGS:-}" \t\t"PROFILE_TASK=${PROFILE_TASK:-}" \t; \trm python; \tmake -j "$nproc" \t\t"EXTRA_CFLAGS=${EXTRA_CFLAGS:-}" \t\t"LDFLAGS=${LDFLAGS:--Wl},-rpath=\'\\$\\$ORIGIN/../lib\'" \t\t"PROFILE_TASK=${PROFILE_TASK:-}" \t\tpython \t; \tmake install; \t\tcd /; \trm -rf /usr/src/python; \t\tfind /usr/local -depth \t\t\\( \t\t\t\\( -type d -a \\( -name test -o -name tests -o -name idle_test \\) \\) \t\t\t-o \\( -type f -a \\( -name \'*.pyc\' -o -name \'*.pyo\' -o -name \'libpython*.a\' \\) \\) \t\t\\) -exec rm -rf \'{}\' + \t; \t\tldconfig; \t\tapt-mark auto \'.*\' > /dev/null; \tapt-mark manual $savedAptMark; \tfind /usr/local -type f -executable -not \\( -name \'*tkinter*\' \\) -exec ldd \'{}\' \';\' \t\t| awk \'/=>/ { so = $(NF-1); if (index(so, "/usr/local/") == 1) { next }; gsub("^/(usr/)?", "", so); printf "*%s\\n", so }\' \t\t| sort -u \t\t| xargs -r dpkg-query --search \t\t| cut -d: -f1 \t\t| sort -u \t\t| xargs -r apt-mark manual \t; \tapt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \trm -rf /var/lib/apt/lists/*; \t\tpython3 --version # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \tfor src in idle3 pydoc3 python3 python3-config; do \t\tdst="$(echo "$src" | tr -d 3)"; \t\t[ -s "/usr/local/bin/$src" ]; \t\t[ ! -e "/usr/local/bin/$dst" ]; \t\tln -svT "$src" "/usr/local/bin/$dst"; \tdone # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_PIP_VERSION=23.0.1", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_SETUPTOOLS_VERSION=65.5.1", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/dbf0c85f76fb6e1ab42aa672ffca6f0a675d9ee4/public/get-pip.py", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": "ENV PYTHON_GET_PIP_SHA256=dfe9fd5c28dc98b5ac17979a953ea550cec37ae1b47a5116007395bfacff2ab9", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'RUN /bin/sh -c set -eux; \t\tsavedAptMark="$(apt-mark showmanual)"; \tapt-get update; \tapt-get install -y --no-install-recommends wget; \t\twget -O get-pip.py "$PYTHON_GET_PIP_URL"; \techo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum -c -; \t\tapt-mark auto \'.*\' > /dev/null; \t[ -z "$savedAptMark" ] || apt-mark manual $savedAptMark > /dev/null; \tapt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \trm -rf /var/lib/apt/lists/*; \t\texport PYTHONDONTWRITEBYTECODE=1; \t\tpython get-pip.py \t\t--disable-pip-version-check \t\t--no-cache-dir \t\t--no-compile \t\t"pip==$PYTHON_PIP_VERSION" \t\t"setuptools==$PYTHON_SETUPTOOLS_VERSION" \t; \trm -f get-pip.py; \t\tpip --version # buildkit', + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-20T18:33:29Z", + "created_by": 'CMD ["python3"]', + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV LANG=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV LC_ALL=C.UTF-8", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "ENV PIP_NO_CACHE_DIR=off", + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + { + "created": "2024-03-24T03:45:39.322168741Z", + "created_by": "WORKDIR /usr/src/helloworld", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:45:52.226201188Z", + "created_by": "RUN /bin/sh -c apt-get update && apt-get install -y openssl gcc git make unzip curl wget vim nano sudo # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:45:52.391597947Z", + "created_by": "COPY . . # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.76589069Z", + "created_by": "RUN /bin/sh -c pip install . # buildkit", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.788993495Z", + "created_by": "WORKDIR /root", + "comment": "buildkit.dockerfile.v0", + }, + { + "created": "2024-03-24T03:46:29.788993495Z", + "created_by": 'ENTRYPOINT ["helloworld"]', + "comment": "buildkit.dockerfile.v0", + "empty_layer": True, + }, + ], + "os": "linux", + "rootfs": { + "type": "layers", + "diff_ids": [ + "sha256:a483da8ab3e941547542718cacd3258c6c705a63e94183c837c9bc44eb608999", + "sha256:c8f253aef5606f6716778771171c3fdf6aa135b76a5fa8bf66ba45c12c15b540", + "sha256:b4a9dcc697d250c7be53887bb8e155c8f7a06f9c63a3aa627c647bb4a426d3f0", + "sha256:120fda24c420b4e5d52f1c288b35c75b07969057bce41ec34cfb05606b2d7c11", + "sha256:c2287f03e33f4896b2720f0cb64e6b6050759a3eb5914e531e98fc3499b4e687", + "sha256:afe6e55a5cf240c050a4d2b72ec7b7d009a131cba8fe2753e453a8e62ef7e45c", + "sha256:ae6df275ba2e8f40c598e30588afe43f6bfa92e4915e8450b77cb5db5c89dfd5", + "sha256:621ab22fb386a9e663178637755b651beddc0eb4762804e74d8996cce0ddd441", + "sha256:4c534ad16bd2df668c0b8f637616517746ede530ba8546d85f28772bc748e06f", + "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + ], + }, + }, + ) + temp_path = Path("/tmp/.bbot_test") + tar_path = temp_path / "docker_pull_test.tar.gz" + shutil.rmtree(tar_path, ignore_errors=True) + with tarfile.open(tar_path, "w:gz") as tar: + file_io = io.BytesIO(self.file_content.encode()) + file_info = tarfile.TarInfo(name="file.txt") + file_info.size = len(file_io.getvalue()) + file_io.seek(0) + tar.addfile(file_info, file_io) + with open(tar_path, "rb") as file: + layer_file = file.read() + module_test.httpx_mock.add_response( + url="https://registry-1.docker.io/v2/blacklanternsecurity/helloworld/blobs/sha256:8a1e25ce7c4f75e372e9884f8f7b1bedcfe4a7a7d452eb4b0a1c7477c9a90345", + content=layer_file, + ) + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "search", "method": "POST", "path": "/search-all", "body": {"queryIndices": ["collaboration.workspace"], "queryText": "blacklanternsecurity", "size": 25, "from": 0, "clientTraceId": "", "requestOrigin": "srp", "mergeEntities": "true", "nonNestedRequests": "true", "domain": "public"}}', + json={ + "data": [ + { + "score": 611.41156, + "normalizedScore": 23, + "document": { + "watcherCount": 6, + "apiCount": 0, + "forkCount": 0, + "isblacklisted": "false", + "createdAt": "2021-06-15T14:03:51", + "publishertype": "team", + "publisherHandle": "blacklanternsecurity", + "id": "11498add-357d-4bc5-a008-0a2d44fb8829", + "slug": "bbot-public", + "updatedAt": "2024-07-30T11:00:35", + "entityType": "workspace", + "visibilityStatus": "public", + "forkcount": "0", + "tags": [], + "createdat": "2021-06-15T14:03:51", + "forkLabel": "", + "publisherName": "blacklanternsecurity", + "name": "BlackLanternSecurity BBOT [Public]", + "dependencyCount": 7, + "collectionCount": 6, + "warehouse__updated_at": "2024-07-30 11:00:00", + "privateNetworkFolders": [], + "isPublisherVerified": False, + "publisherType": "team", + "curatedInList": [], + "creatorId": "6900157", + "description": "", + "forklabel": "", + "publisherId": "299401", + "publisherLogo": "", + "popularity": 5, + "isPublic": True, + "categories": [], + "universaltags": "", + "views": 5788, + "summary": "BLS public workspaces.", + "memberCount": 2, + "isBlacklisted": False, + "publisherid": "299401", + "isPrivateNetworkEntity": False, + "isDomainNonTrivial": True, + "privateNetworkMeta": "", + "updatedat": "2021-10-20T16:19:29", + "documentType": "workspace", + }, + "highlight": {"summary": "BLS BBOT api test."}, + }, + ], + "meta": { + "queryText": "blacklanternsecurity", + "total": { + "collection": 0, + "request": 0, + "workspace": 1, + "api": 0, + "team": 0, + "user": 0, + "flow": 0, + "apiDefinition": 0, + "privateNetworkFolder": 0, + }, + "state": "AQ4", + "spellCorrection": {"count": {"all": 1, "workspace": 1}, "correctedQueryText": None}, + "featureFlags": { + "enabledPublicResultCuration": True, + "boostByPopularity": True, + "reRankPostNormalization": True, + "enableUrlBarHostNameSearch": True, + }, + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/ws/proxy", + match_content=b'{"service": "workspaces", "method": "GET", "path": "/workspaces?handle=blacklanternsecurity&slug=bbot-public"}', + json={ + "meta": {"model": "workspace", "action": "find", "nextCursor": ""}, + "data": [ + { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "description": None, + "summary": "BLS public workspaces.", + "createdBy": "299401", + "updatedBy": "299401", + "team": None, + "createdAt": "2021-10-20T16:19:29", + "updatedAt": "2021-10-20T16:19:29", + "visibilityStatus": "public", + "profileInfo": { + "slug": "bbot-public", + "profileType": "team", + "profileId": "000000", + "publicHandle": "https://www.postman.com/blacklanternsecurity", + "publicImageURL": "", + "publicName": "BlackLanternSecurity", + "isVerified": False, + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/workspaces/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + json={ + "workspace": { + "id": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "name": "BlackLanternSecurity BBOT [Public]", + "type": "personal", + "description": None, + "visibility": "public", + "createdBy": "00000000", + "updatedBy": "00000000", + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-17T08:57:16.000Z", + "collections": [ + { + "id": "2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + "name": "BBOT Public", + "uid": "10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + }, + ], + "environments": [ + { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "uid": "10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + } + ], + "apis": [], + } + }, + ) + module_test.httpx_mock.add_response( + url="https://www.postman.com/_api/workspace/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b/globals", + json={ + "model_id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "meta": {"model": "globals", "action": "find"}, + "data": { + "workspace": "3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b", + "lastUpdatedBy": "00000000", + "lastRevision": 1637239113000, + "id": "8be7574b-219f-49e0-8d25-da447a882e4e", + "values": [ + { + "key": "endpoint_url", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "createdAt": "2021-11-17T06:09:01.000Z", + "updatedAt": "2021-11-18T12:38:33.000Z", + }, + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/environments/10197090-f770f816-9c6a-40f7-bde3-c0855d2a1089", + json={ + "environment": { + "id": "f770f816-9c6a-40f7-bde3-c0855d2a1089", + "name": "BBOT Test", + "owner": "00000000", + "createdAt": "2021-11-17T06:29:54.000Z", + "updatedAt": "2021-11-23T07:06:53.000Z", + "values": [ + { + "key": "temp_session_endpoint", + "value": "https://api.blacklanternsecurity.com/", + "enabled": True, + }, + ], + "isPublic": True, + } + }, + ) + module_test.httpx_mock.add_response( + url="https://api.getpostman.com/collections/10197090-2aab9fd0-3715-4abe-8bb0-8cb0264d023f", + json={ + "collection": { + "info": { + "_postman_id": "62b91565-d2e2-4bcd-8248-4dba2e3452f0", + "name": "BBOT Public", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", + "updatedAt": "2021-11-17T07:13:16.000Z", + "createdAt": "2021-11-17T07:13:15.000Z", + "lastUpdatedBy": "00000000", + "uid": "172983-62b91565-d2e2-4bcd-8248-4dba2e3452f0", + }, + "item": [ + { + "name": "Generate API Session", + "id": "c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + "protocolProfileBehavior": {"disableBodyPruning": True}, + "request": { + "method": "POST", + "header": [{"key": "Content-Type", "value": "application/json"}], + "body": { + "mode": "raw", + "raw": '{"username": "test", "password": "Test"}', + }, + "url": { + "raw": "https://admin:admin@the-internet.herokuapp.com/basic_auth", + "host": ["https://admin:admin@the-internet.herokuapp.com/basic_auth"], + }, + "description": "", + }, + "response": [], + "uid": "10197090-c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + }, + { + "name": "Generate API Session", + "id": "c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + "protocolProfileBehavior": {"disableBodyPruning": True}, + "request": { + "method": "POST", + "header": [{"key": "Content-Type", "value": "application/json"}], + "body": { + "mode": "raw", + "raw": '{"username": "test", "password": "Test"}', + }, + "url": { + "raw": "https://admin:admin@internal.host.com", + "host": ["https://admin:admin@internal.host.com"], + }, + "description": "", + }, + "response": [], + "uid": "10197090-c1bac38c-dfc9-4cc0-9c19-828cbc8543b1", + }, + ], + } + }, + ) + temp_path = Path("/tmp/.bbot_test") + temp_repo_path = temp_path / "test_keys" + shutil.rmtree(temp_repo_path, ignore_errors=True) + subprocess.run(["git", "init", "test_keys"], cwd=temp_path) + with open(temp_repo_path / "keys.txt", "w") as f: + f.write(self.file_content) + subprocess.run(["git", "add", "."], cwd=temp_repo_path) + subprocess.run( + [ + "git", + "-c", + "user.name='BBOT Test'", + "-c", + "user.email='bbot@blacklanternsecurity.com'", + "commit", + "-m", + "Initial commit", + ], + check=True, + cwd=temp_repo_path, + ) + + old_filter_event = module_test.scan.modules["git_clone"].filter_event + + def new_filter_event(event): + event.data["url"] = event.data["url"].replace( + "https://github.com/blacklanternsecurity", f"file://{temp_path}" + ) + return old_filter_event(event) + + module_test.monkeypatch.setattr(module_test.scan.modules["git_clone"], "filter_event", new_filter_event) + + def check(self, module_test, events): + vuln_events = [ + e + for e in events + if e.type == "VULNERABILITY" + and ( + e.data["host"] == "hub.docker.com" + or e.data["host"] == "github.com" + or e.data["host"] == "www.postman.com" + ) + and "Verified Secret Found." in e.data["description"] + and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] + and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" in e.data["description"] + ] + # Trufflehog should find 4 verifiable secrets, 1 from the github, 1 from the workflow log, 1 from the docker image and 1 from the postman. + assert 4 == len(vuln_events), "Failed to find secret in events" + github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent + folder = Path(github_repo_event.data["path"]) + assert folder.is_dir(), "Destination folder doesn't exist" + with open(folder / "keys.txt") as f: + content = f.read() + assert content == self.file_content, "File content doesn't match" + filesystem_events = [e.parent for e in vuln_events] + assert len(filesystem_events) == 4 + assert all(e.type == "FILESYSTEM" for e in filesystem_events) + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() + ] + ), "Test keys repo dir does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/workflow_logs/blacklanternsecurity/bbot/test.txt") + and Path(e.data["path"]).is_file() + ] + ), "Workflow log file does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/docker_images/blacklanternsecurity_helloworld_latest.tar") + and Path(e.data["path"]).is_file() + ] + ), "Docker image file does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith( + "/postman_workspaces/BlackLanternSecurity BBOT [Public]/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b.zip" + ) + and Path(e.data["path"]).is_file() + ] + ), "Failed to find blacklanternsecurity postman workspace" + + +class TestTrufflehog_NonVerified(TestTrufflehog): + config_overrides = { + "modules": { + "trufflehog": {"only_verified": False}, + "postman_download": {"api_key": "asdf"}, + "github_org": {"api_key": "asdf"}, + } + } + + def check(self, module_test, events): + finding_events = [ + e + for e in events + if e.type == e.type == "FINDING" + and ( + e.data["host"] == "hub.docker.com" + or e.data["host"] == "github.com" + or e.data["host"] == "www.postman.com" + ) + and "Possible Secret Found." in e.data["description"] + and "Raw result: [https://admin:admin@internal.host.com]" in e.data["description"] + ] + # Trufflehog should find 4 unverifiable secrets, 1 from the github, 1 from the workflow log, 1 from the docker image and 1 from the postman. + assert 4 == len(finding_events), "Failed to find secret in events" + github_repo_event = [e for e in finding_events if "test_keys" in e.data["description"]][0].parent + folder = Path(github_repo_event.data["path"]) + assert folder.is_dir(), "Destination folder doesn't exist" + with open(folder / "keys.txt") as f: + content = f.read() + assert content == self.file_content, "File content doesn't match" + filesystem_events = [e.parent for e in finding_events] + assert len(filesystem_events) == 4 + assert all(e.type == "FILESYSTEM" for e in filesystem_events) + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() + ] + ), "Test keys repo dir does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/workflow_logs/blacklanternsecurity/bbot/test.txt") + and Path(e.data["path"]).is_file() + ] + ), "Workflow log file does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith("/docker_images/blacklanternsecurity_helloworld_latest.tar") + and Path(e.data["path"]).is_file() + ] + ), "Docker image file does not exist" + assert 1 == len( + [ + e + for e in filesystem_events + if e.data["path"].endswith( + "/postman_workspaces/BlackLanternSecurity BBOT [Public]/3a7e4bdc-7ff7-4dd4-8eaa-61ddce1c3d1b.zip" + ) + and Path(e.data["path"]).is_file() + ] + ), "Failed to find blacklanternsecurity postman workspace" + + +class TestTrufflehog_HTTPResponse(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "trufflehog"] + config_overrides = {"modules": {"trufflehog": {"only_verified": False}}} + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "https://admin:admin@internal.host.com"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "FINDING" for e in events) + + +class TestTrufflehog_RAWText(ModuleTestBase): + targets = ["http://127.0.0.1:8888/test.pdf"] + modules_overrides = ["httpx", "trufflehog", "filedownload", "extractous"] + config_overrides = {"modules": {"trufflehog": {"only_verified": False}}} + + async def setup_before_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/test.pdf"} + respond_args = { + "response_data": b"%PDF-1.4\n%\xc7\xec\x8f\xa2\n%%Invocation: path/gs -P- -dSAFER -dCompatibilityLevel=1.4 -dWriteXRefStm=false -dWriteObjStms=false -q -P- -dNOPAUSE -dBATCH -sDEVICE=pdfwrite -sstdout=? -sOutputFile=? -P- -dSAFER -dCompatibilityLevel=1.4 -dWriteXRefStm=false -dWriteObjStms=false -\n5 0 obj\n<>\nstream\nx\x9c-\x8c\xb1\x0e\x82@\x10D\xfb\xfd\x8a-\xa1\xe0\xd8\xe5@\xe1*c\xb4\xb1\xd3lba,\xc8\x81\x82\xf1@\xe4\xfe?\x02\x92If\x92\x97\x99\x19\x90\x14#\xcdZ\xd3: |\xc2\x00\xbcP\\\xc3:\xdc\x0b\xc4\x97\xed\x0c\xe4\x01\xff2\xe36\xc5\x9c6Jk\x8d\xe2\xe0\x16\\\xeb\n\x0f\xb5E\xce\x913\x93\x15F3&\x94\xa4a\x94fD\x01\x87w9M7\xc5z3Q\x8cx\xd9'(\x15\x04\x8d\xf7\x9f\xd1\xc4qY\xb9\xb63\x8b\xef\xda\xce\xd7\xdf\xae|\xab\xa6\x1f\xbd\xb2\xbd\x0b\xe5\x05G\x81\xf3\xa4\x1f~q-\xc7endstream\nendobj\n6 0 obj\n155\nendobj\n4 0 obj\n<>\n/Contents 5 0 R\n>>\nendobj\n3 0 obj\n<< /Type /Pages /Kids [\n4 0 R\n] /Count 1\n>>\nendobj\n1 0 obj\n<>\nendobj\n11 0 obj\n<>\nendobj\n9 0 obj\n<>\nendobj\n7 0 obj\n<>\nendobj\n10 0 obj\n<>\nendobj\n12 0 obj\n<>stream\nx\x9c\x9dT{TS\xf7\x1d\xbf\x91ps\x8f\xa0\xb2\xdc\x06\x1f\xe8\xbdX[|\xa0\x85\xaa\xad\xa7\xf4\x14P\x1eG9\x05\x9c\xa2\x08\xb4\xee@\x88\xc83\x08\x04\x84\x80\x84@B\xd3\x1f84!@\x12\x08\xe0\x8b\x97S\xe9\xc4U\xf4\x06\xb5\x15\xdd:5\xc8&j=\xb2\xad:'T9\xeb\xce\xbe\xb7\xe7\xban\xbf\x80\x16\xdb\xd3\xed\x8f\x9d\x93?n\xee\xe3\xf3\xfb~\x1e\xdf\x8f\x88\x10\xcf D\"\x11\x15\xa6T\xe5\xa5+\xf2\\\xd7\xabx\x1f\x11\xbfp\x06\xbf\xc8\r\tQ\xfc\xd8\xb7\xab\xdcy\xc6\x93\xa8\xf1\x14!O7\xe4)n_H\x19\xa4\xd0\xfb3\xa8\x9d\x03\xc5^\x84X$Z\x17\x9dd]\xb6mK\xfcr\x7f\xff\x95a\xca\xdc\xe2\xbc\xf4\xb4\xdd\x05\xbe\xab\x03\xdf\\\xeb\x9bR\xec\xfb\xfc\x89o\xb8\"?=-\xc7\xd7\x0f_\x14*\xb2\x94\xb9\xd9\x8a\x9c\x82\x98\xf4\xec\x14U\xbeo\xb42G\xe9\xbby\xab\xef\x16E\x9a*+9\xef\x87w\xa7\x11\xff\xbf3\x08\x82\x90\xe6(s\xf3\xf2\x0b\x92\xe5\xa9\x8a\xdd\xe9Y\xd9o\x04\x04\x85\x12D,\xb1\x99\xf89\xb1\x95\x88#\xb6\x11\x1b\x88p\"\x82\x88$6\x11QD4\x11C\xcc!\xbc\x08\x1fb1Acq\x081\xa1'\x06E\x1bE}3>\x9cq\xc1m\x93[\x9fx\x89\xb8P\x0c\xee\x91\xee\x95\xe4\xab\xe4zRIvJ\xd6\xf3\xe3\xb3\xf9q\xc4\xc1}N:\x08\xee\xf1\x0eht\xcc\xa5Ga=\xbfN\x16D\xaa**KJ\xcc\xdaV\x96\x1e\xe9\x10\x9crR\xa5\xd1\xaaK\x1a\xf0\x7f\x98G\xb6\x9aM6\xab\xc6T\xc8\xcaAG^\xf9\xe3a\xcb\x15t\x02\xb5\xe8\xda\x8a\x0f\x155\x14\xa0\\J\xa8PJ\xa6\xdf\x17\x91\xf6\x86\xe7\xef\xe7\xc0G\xe4\xed\x88\xc1\x00\x86\x1e\x8dAi\xc5\xdb\xb7Rx\x025\x07O9\xd15\x07\xfc\xdb\xe1\x06\x9f\xf1\x112a\xc1k\xcb\x05Z\xf0\xfaf)x\x83\xf7\xdf\x9f\x80\x14\xe6\xbc6!\xd0\xacn\x87\xec\x9b\xbb\xa1\xcb\xfc\xdf\r\xf6\xf3\x0b\x1a\x19\x7f|\xf7\xf6\x13\x16\x03\x08Q\x1c,\xe6`\x90\xdb\xc5Im0\x1f\x13\xf9\x1a\x13y\x04+0\x11\xbf\x97\x88|u\xeeYu\"I?*t\x8d\xe6\xba\x03\xdb\xc8\xb6)**\x96~\x18\x00\x05\xe4\xa7[.\xee\x19F\x14H\xc7\x1f\x81\x07K/\x00O\xff\x87\xc2+\xeb\x93\xf2cv0t\"\x04\x1f\x97=\xb9\x15\x11\xb8:$\xdc\x7fE\xc8\xd0\x83\xbf\xdc\xba\xf97vJC'\x97\xc2I\xe1\x17\xf8\xdc\x1b`\xc4\xe7\n\xb3\xc8\xc2r\xadZ\xddP\xd1\xca\xde\x10\x9c\x81\xf8_E\xe9\x94\x1e\xceI=,\xe5\xf5E\xac\xb0\x01RI:p\x1c\x88\x9e\xb6>\x1f;j\xd6\x1e\xca7V\xed7\x98\x10e1\x9b\xad\xf5:\xd3^\x0b\x9b\xdb\xae2e\xa1x\xf4\xc1\x9e5\xefM\xe9\xb5\xdb\x0e\xdfq\xe9v)x\\\x82\xc3\x97\xe6\xd2\xef\xc3\n\x98)\xb3j\xcc\xa5%ZM!\x13$)4ilV\x93\xd9\xce\xd0=Y\xa7\x06\xd4W|`\xe6\xfdKwN\x14\xfd*\xb3\x95\xcdh\xdbe\x8e>\xb0\xa6^_\xa3j,6k,\xa8\x89\xea\x1d\xe8\xb89|>7\xa5\x8e\xa9-6j-\x88\xb2\x99\xcc\xad\xecu\t\xbd\xb0UkV\x97UT\x94\x1a0\xd2\x91\xf4\x9d\x8d\xdb|\xfcB\x137f4gu\x16\xb3\x1d\xc5\x1dU\x7f\xa8\xba\xa8;\xa2;Rzx\x9fU\x85\n\xa9\xc4\xf7\xd3\xde~g\xe3\xf1\xd3\xcc\x94\xad\x7f\xe2D\xe0\x8bM\x8d\xc3\x82\x80X\xd2\xaa\xad/\xc1\x03\x161\x828\x12\xe7c\xd2\x966\xac\x8e\x99\x0c\xf9m\xc2\xd7g/\x99\x9b\xfb\x99\x93M\xd6Fd\xa1\x9a4\xe62}\xf5\xc7:-\x93\xaa\x8aT\xc7!jSJ\xe7Y\x16L\x90!q9f\xd3\x18U\xec\x94\x14\x1c\xbc\xc5\x81\x07'\xc5\xf9\xe9w\xc4\xc3\xfc\xb9t\x1e\xbf\xda{b:\xa3ti\"\x98\xc8\xe1\xf0\x01\x7fE\xd4\xbe\xbdqL\x99\xbe\xaa\x12\x95SefMc\xdd\xfe\x9a_62\x9f5\x9f6v#\xca\xd9\x9f\xbd\x93\x8d\x96\xc4Z\xf2\xf6\xefD\x94\xe0\xbd6v5Kk\x83\xbf\xd8>v\xe3b\xdb\xc0U,\xc0eqTl|A$\xa26&w\xf5\x7f\xee\xfc\xe4\xe9\x99~}e\x0f\xfb\"\xc2\xd8\x90;.\xff\xf9]\xbcL&\xef\xdan\xdb\x8ca\x16-_)\xcc\x17dc\x01\xe0s\xed\xf7-'\x06\xd8N\xbb\xa5\x19K\xde\xa81\xef\xab\xd4\x1b\xb4Z&\xe1\xc3\x98\x820D-\x0euN\xfccx\xe8\x9f\xf7\xae)\x12\x0e\xb0\xb5E\xc6\xca)\x1f\xec\xec\x03\t\x1d\x88}()\xa9\xc4\xde\xbe }\x7f\x92\xf4\xe7\x0ehvQ>\xc7\xd7\xf1Oq\xd6\xbfO\xf69a\x17\xb9s0\xb6+\x1c\x8f0g\xd9R\xc1K\xf0z\xe2\x07\xb3\x87\xaev_>\x83\x15\t\x9d\x90|\xafO\")\x14\xc1}\x9c\xeb\xd0e,\xdd\xe3\x1f\x1c\x8c\xa3=2>vk\xe4\xf1s\x17\xd7r\xb0\x90\x13\xf1\xed\x10/3J\x0eJ\xe0\x95\xa5\x8f\x85\x05\xc2\xbc\xd7W\t\xb3\x84y z\x1d\xd8q\xf0\xe8?\xe5\xb2LWm\xd0U2\xf2\xec0U,Z\x82\xde\xfb]\xd9\x18\xc5\x89m\xf7n^\xf8+z\x88\x86\xe3\xacA\xd4\x8b\xc6\xc1\xd3\x8b\xc0\xc3\x01M8\x1e!?\x9a\xfd\x99\xe1Gu\xd3\xf0|G\xe5PM\x1e\xed\xb4\xb5\x1c\xa8\xeb8t\xb4\xfe\x14\xeaEvW\xe9\xec\xc5\xa5\xa3\xc4\xa5#\x97Lo\xf6\x0f\xbe\xaa\"\xefE\x0e\xae\x8cM)\xda\x9e\xc4\xbcX\xd7\x07\xe0.\x85\x83\xce\x84\xc9\xa6\xb8\xe3\xda\xd8w\xa6\xab\x02\xdc\x05\xa7\x100=\x12|7\r\x87\xef\xd3\x13\x06\xfe\xba,Bpw\x92\x93p\xbc\x01\x939\x8a\x99\xdc\xc1L\x84uS\xc3\xbb\xb2\rn\xcf\x0c\xff\x03\xc7\xf5\xb1k\x95\xa5\x07@\xbc\x83\x835\xae\x9f\xab\x81g\xe2q\xde}\xa9\xb8n\xe0\x06\xce!\xe9Q\x17\x0en\x94\x16W\xa7b\x1c\xabm\xb2\xb8\xbeT\x82\x91<1\xd0\xd9~\x1cQ]\xc72w\xb3\xc2\xf5\xbb\xd3\xf6\xe6L>\xech\xefAT\xcf\xb1\xectV\x18\xba+y\xa9\x8f\x0f\x91W\x12\xce\xc7\xa4d\x97$\xc9\x99\xfc3\x99\xad\xc9\x88\xa2G\xe5(G\x9d\xa5pyUj\x17A?x\xc9\x923\xb3SS\xbb\xb3N\xb3f\xf2tw\xe7'\xbd\x99\x9d\xc9\xae\xdc\xf3\xeao\xc5\xb2\xba\xfa\x9aZTG5\x96\x9b\xcb\xca\xab\xf4\xa5U\x8c\xf0\xe5\xbfB\xaa+?\xaeF\xfa\xf9\xfb\x1a4M\r\x07\xeb,\x07\x99I0~\xd1O\xe1u\xf5N\xe2i\xe0\xec\x7f;'\xe6<\x04p\xbc''z\xea\x18u\x80\x97\xc3\x8d\x7f\x13^\x95\xf5\xe2%767T\x99\xca\xf7\xb3`\x97<\nw\xbe!Po\x0bn\xc2JFX#Aa-\xd1'w\x9c\x8c\xffM\xfeUD\xdd\x1e\xe99\x8eW\xaeT\xa77T\xeb\xd9=\xf9\x19\x9aD\x94\x842l{Nf\xf7\xa9/\xa2\xcb\x14\x04J@z\xf5\xab?\x7fq\xf6\x83(F.Y\xf2QX,ZGm\x18\x8c\xbbg6\xd5\xd461\xe7\xc5j\x83\x1eU *N\xd1\xfd\xe9\x85\x81_\x0f\xd5\xb0\xb3\xd5V\xfe-+x7\x1ck$\x1d39\x8f>\x93\xa7g\x9f\xd1s\x16A\xfc\x07\xbe\x9e\x12\xf0\nendstream\nendobj\n8 0 obj\n<>\nendobj\n13 0 obj\n<>stream\nx\x9c\x9d\x93{PSg\x1a\xc6O\x80\x9c\x9c\xad\xb4\"\xd9S\xd4\xb6Iv\xba\xabh\x91\x11\xa4\xad\xbbu\xb7\xd3B\xcb\xb6\x16G\xc1\x16P\xa0\x18\x03$\x84\\ AHBX\x92p1\xbc\x04\xb9$\xe1\x12 @@B@.\xca\x1dA\xb7\x8a\x80\x8e\x8b\xbb\x9d\xae\xb3\xf62\xbb\xba[;[hw\xc3\xd4\xef\x8cGg\xf6$\xe8t\xf7\xdf\xfd\xeb\x9cy\xbfs\xde\xf7\xf9~\xcf\xf3\xb2\xb0\xa0\x00\x8c\xc5b=\x1b\xab(,\x90d\x15\xecy[\x91'\xf2\x15\"\xa8\x17X\xd4\x8b\x01\xd4K\x81\xfa\x12\xea1\xf5\x98M\xf1\x82\xb1\x9a`\x16\x04\x07BpP\xc7\x8b\x9c\x0b\xa1\xc8\xb3\x05\xc1f\xa4\r\xc1\x82X\xac\xd7\xdfOi\x0e\xff01y\xd7+\xafD\xc4*\x94\x9a\x02I\x8eX-\x88\xde\x1b\x15#\x10j\x04ON\x04qY*I\x8e\\\xb0\x83y9\x95\x95\xa7P\xca\xb2\xe4\xeaC\x12\x99\xb0P%HP\xc8\x15\x82\xc3I\x02\x9f\x80\xff-\xfd\xd8\xee\xff\x1b\x80a\xd8\xe6\xb8\x93\xa2\xac\xe4\xbdQ\xd1\xfbb^\x15\xec\xff\xe5\xaf0\xec\x17X\x1c\xf6\x0e\xf6.\xb6\x1f\xdb\x82\x85b\\\xec\xa7\x18\x89=\x8f\xb1\xb0m\xd8v\xec\x05,\x84\x81\x82\x05aE\x18\xc5r\x07\x04\x04X\x03\x1e\x04&\x05^\tJ\x0bZ`\xc7\xb3\xdfg/\xe1\xb1\xb8\x86Z}\x8eZ\x05/z\xe8eQ\x89\x08\x0b\xfc\xa3\x97\xcc\xaaV\x17C\x1eh\xad\xbaf\xa3\xad\xbc\xf5\xb4\x0b\x08\x94\x89\xa3\xe8*\x14\xf8\xef\x1a\x14ALr\x00\xed\xa19h\x13\xbd\xd3L\xd0b\\\t\xa6jC\x85\xce`\xd0\x82\xd6\xf7W\x8b\xd1Z\xde`\xee\xaa&\x10F?$\xd1\xc3\x1f8\xf7\xcf\xac\xbck\t'28\x10\x91p$\xfc\x0c\xc1\x8c,\xf1\xa2j/k\x8e\x99H\x8dQ89\xad\xeb\xcc),3\x15\x97\xf3\xb2\xda\x8fY\x8f\x02A\xef\x11\xec\xa6\xf9\x87;S\xc6D\xfc\xb9\xb4\xebEk\xf0\x19\xdc\xb0\x8f9';\xbb{\xe1,\xd1\xa7r\xc9J\rU&\x03\xefd\xae\xd4\xf8\x06\xf3='q\xf4\xcf_,^\xfafb\xc8\xa4\xeb\xe17\x95\xd7\x9bjuu\x85\xb5\x15\x8d\xe5V\x93\xa3\xa2\x05\xda\xc0\xd1hon\xb4Yl\xd0\xeb\x13P\xea\x8dr\xa2\x15o\xa8\x1bah\x02aa\xdc)j\x80\xfa\x9e\xa4\x83\xf1\xfc\xa7\xf7\xd1\x81\x06\xb4\x8d%-\x06{\xb9\xed\xf4Y \x9a~\x86\x8b\xdc\xa9\xad\x89\xf0\x1bH,J\xcbL\xcbT%\xc1\x07p\xd0\x954\x939\x93y\xb5\xe86,\xc0\x85\xa6\x8b\x1e\x82[,C\xc1\x1c\x17\xd8-\xd6:\x87\xcd\xd6\x06\xed\xe009\xf4\xb6\xb2\x06\xa3E\x01\xc4\xefp\xba\x1e\x95\x90\xb3\xe0)\xeb\xcbw\x15\xb6HAFp\xa7\xde:\x9c\x1a\x93\x9e\xdb\xd4\xa3\xe4\xa9\xba\xf5\x1e\x18\x00O\x8b\xc7\xd5}\xb6w\xc0>\x0b\x1b\xc0n\xdf\xff\x0bc\xd2<\xdaO\x8eq\xd0v:p\x8d\x8e\xa0w\xd1\xecp\x9a\xa4\xc3P@$\x8a\xfe\xd4\xdb\xe6\x9c\xe2\xf5\xd8\x9aZ\xa1\x93p\x17v\xcb\xcb\xca\xcc\xa7KyQ\xea\xfc\xaat\xd8\x0f\xa9\xae\x82K\x84\xe5>\xe9\x98^\x18X\x81\x15\xb8*mK\xf7u\x06'\x95\xe0e\xa1\xcb\xc8F~M\xdb\xd8\x88\xc0\x17)a\x7f][\x07\x9c\xdd\xc6\x08o\xd5\xdb\x9f\x08\xa7\xc3\x9e\xb21\x1a4>\xaf\x1b\x19\xaf\xed&\xbb\xb9\x17\x88\x8bx.m\x8cE\x1f\xb3i\x0c\x8f\xa5?\xceEF\xf6\x04\xeeC`\xfb\x11A+\x83\xa0\xd1\xf0\xa4\x93\x12\xca\x99NZ\x83Q\x07E\xa0ph\xfb\xab\x96\x1f\t\xb7\xa2gpF\x91\xdeK\xfd\xda\xcb\xba\xc38s\xca\x17\x90v\xf4\x1d\t\xf7\xe4wR\xe7s\x86\x8e\xb7\x1f\x81#p\\\x93#NM\x91\x1f\x80}D\x14\x07b\xdco\xcc\xa5\x0e\x8bg5\x0b\x8c\x03\xb3\xed\xc3Css\xee\xcf\xe1.A\xdf]%\xd7&\xaf\xdf\xba5\xf9\xc1.\xde\xcf9\xbb3\x0e\xc6\xc7g\xdcX\xe5m$\xfe\xae\x93\x85\xaa\x99\xf6\xe8\x01\xf5\x98\xa4e\x1f\x9d0\xe8\xf5 \xdf&\xebR\xf5\xd9jk\xea\x9c\xbc/;\xd9\x8f\xb6\xec\xe6\xe4\xffw\xbcuV\xed\xc6Rt3K\xf1\t>\xedj?\xe7\xbf\x17\xdfw1%\x10\xbb}\xf2a\x9d\x8ad\x9cz\xd9\xd7\\\xbeN\xa2f\x94\xe5\x1e\x84\xaf\x88\x07\x91_\xd0!\x87\x92\x8a\xc4B\x9eX\xa6L\x03)\xa1\xecQ\xbb\xbb\x9dM\xed\xf5<\xbb\xa7\xc6b\xb5u\xb9\x06[\xce\x03q}V\x9c\x96\xa7+\xde\x19\xc3\x17\xe6\xbc\x93H\x13Q\x15\x95[\x05\x94\xf0\x1e\x07\\fk\x85\xcd\xd0\xaa\xb5\x16\x83\x14\xb4\xba*1\xe1\xc7\x85\xbes^\xf3\x86R;\x11\xf6\xaa/\xca\xdf 7\xf5\x13R\xaa*\x94\xcb\x9d\xda!3\x7f\xcal7;M\xd3\x9a>)H\xe0T\x99ZW\x9a\xaf\xce1\xc6\xc3A\x90\xd7\xa9\x1cZ[\xa5\xa5\x14\x88<\xb5Z\x9e\xf2U.\n\xbdw\xb9yp\x8a?s\xce\xfd\t\\\x85\xc5\xec\xb9\xb8s\x04\xf7_\x8bC\xbd\xa3\xf3\xdba\xbcx\\\xea\x11\x8d$w\xc43&\x06\x86'\x1f\x91\xbb\xd4\xee\xd6\x96z\x9b\x95?0\xd8k\xfb=\x10\x7f\x18\xcf?!:)I\xe3\xfb)\xbb}\xd2X\xe8[\x9f\x8d\xc9\xd4\x1aI\xbf\x84\xd3U\x8fH\xf6\xeb\xa8G.\xe1\x14\x80\xd1l\xa8\xdc@KH\\\x9ai\x1e\xda\x8a\xcf\xf8\x99:\xf4V\xbe\xa1\xa1\xdcRXC\xb89\xe7k\xba:\x98\x8d\xf0/\x91\xa1\xde_\xa4\xb1\xe7i\x1e\x8ex(\x97\xbdA \xdf\xfbW&\xc4\x1c&3\x19>\xee*\xaa\x92D\xc7\xf0.h\xb14>M`\x9b?\x81\r~\xa3\xe8kt\x1f\x9e\xdb\xad\xf2\xd8\xcf\xd44\xb4\xf0\xc6\x9c\xd3\xcd\x1e nNd\xc4\xbf\x95.\xd9\xf1\x9e\xa2\xa1[\xc6/i6\xd5\x96\x00!/P+\x92\xee\x9f@!\xdf.t\xccL\xf1\x87G\x9d\xf3p\x85@[\xf6~M\x87\xc8\xf3*\rb_\xa06D\xbc\xb6\x8e\xf6yC\x99\xe0\x863:D\xfeG\x18w\x95z\x13-\x91W\x86\xddSp\x91\xf8>\xf2\x0e\xbd\x89\xde\x14y`g\xaa;\xf3J6\x8f\xebM\xc8\x96\xa6\x1c\xde\xfe\xf2\xdf\xe3P\x18\xda\xfa\x8f?\xad_\x93\xce'\x8c\xf0\xb8\xab4\x17\t\xc9\xa5\ti\xfa\xb1\x13\xd2\x84C\x99\x8333\xe3\x03\xcb|\xae\x97v\x04-\xcf\xe7d\x1cO\xcf\xfd\xed{i\x833\xd3\xf3\xc3\xcb>\xd6\xfa\x1fP\xe8::\xeae=\xf0\xb1\x8eC\xfd\xa4\x92f\xed{s\x07\x18\xe1t\x8d\xa1V[o\xb0\x18\x80\x90\x15\xa8e\xa2\xd9\xfcO\xff\xf9\xe5\x85\xcfW\xf8\x97\x96z?\x83\xbf\xc1-\xcdm\xe5\xb4\xe8\xe6\xa1\xc1\xd7 \x1eR\x8b\xb3E\x92\x9c\xe2T8\xca\x18|7\x1aa\xb3\xa3m\xe3\x93<\x13\xdaL\xe6g\x1c\xcb\x15\x02\x91,\x1c\xbf\xbc4<\xbcx\xe3\x9c\xf8@\xab\x7f4\xe3\xf0\xb2\x9e<\xefq\x8f\x8e\xe4\xf5\x8b\xf8\x1a>stream\n\n\n\n\n\n2024-12-18T15:59:31-05:00\n2024-12-18T15:59:31-05:00\nGNU Enscript 1.6.6\n\nEnscript Output\n\n\n \n \n\nendstream\nendobj\n2 0 obj\n<>endobj\nxref\n0 15\n0000000000 65535 f \n0000000711 00000 n \n0000007145 00000 n \n0000000652 00000 n \n0000000510 00000 n \n0000000266 00000 n \n0000000491 00000 n \n0000001145 00000 n \n0000003652 00000 n \n0000000815 00000 n \n0000001471 00000 n \n0000000776 00000 n \n0000001773 00000 n \n0000003974 00000 n \n0000005817 00000 n \ntrailer\n<< /Size 15 /Root 1 0 R /Info 2 0 R\n/ID [<9BB34E42BF7AF21FE61720F4EBDFCCF8><9BB34E42BF7AF21FE61720F4EBDFCCF8>]\n>>\nstartxref\n7334\n%%EOF\n" + } + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + finding_events = [e for e in events if e.type == "FINDING"] + assert len(finding_events) == 1 + assert "Possible Secret Found" in finding_events[0].data["description"] diff --git a/bbot/test/test_step_2/module_tests/test_module_txt.py b/bbot/test/test_step_2/module_tests/test_module_txt.py new file mode 100644 index 0000000000..5602c8664b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_txt.py @@ -0,0 +1,8 @@ +from .base import ModuleTestBase + + +class TestTXT(ModuleTestBase): + def check(self, module_test, events): + txt_file = module_test.scan.home / "output.txt" + with open(txt_file) as f: + assert f.read().startswith("[SCAN]") diff --git a/bbot/test/test_step_2/module_tests/test_module_unarchive.py b/bbot/test/test_step_2/module_tests/test_module_unarchive.py new file mode 100644 index 0000000000..41c96c49c7 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_unarchive.py @@ -0,0 +1,229 @@ +import asyncio + +from pathlib import Path +from .base import ModuleTestBase + + +class TestUnarchive(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["filedownload", "httpx", "excavate", "speculate", "unarchive"] + + async def setup_after_prep(self, module_test): + temp_path = Path("/tmp/.bbot_test") + + # Create a text file to compress + text_file = temp_path / "test.txt" + with open(text_file, "w") as f: + f.write("This is a test file") + zip_file = temp_path / "test.zip" + zip_zip_file = temp_path / "test_zip.zip" + bz2_file = temp_path / "test.bz2" + xz_file = temp_path / "test.xz" + zip7_file = temp_path / "test.7z" + # lzma_file = temp_path / "test.lzma" + tar_file = temp_path / "test.tar" + tgz_file = temp_path / "test.tgz" + commands = [ + ("7z", "a", '-p""', "-aoa", f"{zip_file}", f"{text_file}"), + ("7z", "a", '-p""', "-aoa", f"{zip_zip_file}", f"{zip_file}"), + ("tar", "-C", f"{temp_path}", "-cvjf", f"{bz2_file}", f"{text_file.name}"), + ("tar", "-C", f"{temp_path}", "-cvJf", f"{xz_file}", f"{text_file.name}"), + ("7z", "a", '-p""', "-aoa", f"{zip7_file}", f"{text_file}"), + # ("tar", "-C", f"{temp_path}", "--lzma", "-cvf", f"{lzma_file}", f"{text_file.name}"), + ("tar", "-C", f"{temp_path}", "-cvf", f"{tar_file}", f"{text_file.name}"), + ("tar", "-C", f"{temp_path}", "-cvzf", f"{tgz_file}", f"{text_file.name}"), + ] + + for command in commands: + process = await asyncio.create_subprocess_exec( + *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + assert process.returncode == 0, f"Command {command} failed with error: {stderr.decode()}" + + module_test.set_expect_requests( + dict(uri="/"), + dict( + response_data="""
+ + + + + + """, + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test.zip"), + dict( + response_data=zip_file.read_bytes(), + headers={"Content-Type": "application/zip"}, + ), + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test-zip.zip"), + dict( + response_data=zip_zip_file.read_bytes(), + headers={"Content-Type": "application/zip"}, + ), + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test.bz2"), + dict( + response_data=bz2_file.read_bytes(), + headers={"Content-Type": "application/x-bzip2"}, + ), + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test.xz"), + dict( + response_data=xz_file.read_bytes(), + headers={"Content-Type": "application/x-xz"}, + ), + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test.7z"), + dict( + response_data=zip7_file.read_bytes(), + headers={"Content-Type": "application/x-7z-compressed"}, + ), + ), + ) + # ( + # module_test.set_expect_requests( + # dict(uri="/test.rar"), + # dict( + # response_data=b"Rar!\x1a\x07\x01\x003\x92\xb5\xe5\n\x01\x05\x06\x00\x05\x01\x01\x80\x80\x00\xa2N\x8ec&\x02\x03\x0b\x93\x00\x04\x93\x00\xa4\x83\x02\xc9\x11f\x06\x80\x00\x01\x08test.txt\n\x03\x13S\x96ug\x96\xf3\x1b\x06This is a test file\x1dwVQ\x03\x05\x04\x00", + # headers={"Content-Type": "application/vnd.rar"}, + # ), + # ), + # ) + # ( + # module_test.set_expect_requests( + # dict(uri="/test.lzma"), + # dict( + # response_data=lzma_file.read_bytes(), + # headers={"Content-Type": "application/x-lzma"}, + # ), + # ), + # ) + ( + module_test.set_expect_requests( + dict(uri="/test.tar"), + dict( + response_data=tar_file.read_bytes(), + headers={"Content-Type": "application/x-tar"}, + ), + ), + ) + ( + module_test.set_expect_requests( + dict(uri="/test.tgz"), + dict( + response_data=tgz_file.read_bytes(), + headers={"Content-Type": "application/x-tgz"}, + ), + ), + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + + # ZIP + zip_file_event = [e for e in filesystem_events if "test.zip" in e.data["path"]] + assert 1 == len(zip_file_event), "No zip file found" + file = Path(zip_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_zip" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract zip" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # Recursive ZIP + zip_zip_file_event = [e for e in filesystem_events if "test-zip.zip" in e.data["path"]] + assert 1 == len(zip_zip_file_event), "No recursive file found" + file = Path(zip_zip_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test-zip_zip" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract zip" + extract_path = Path(extract_event[0].data["path"]) / "test" / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # BZ2 + bz2_file_event = [e for e in filesystem_events if "test.bz2" in e.data["path"]] + assert 1 == len(bz2_file_event), "No bz2 file found" + file = Path(bz2_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_bz2" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract bz2" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # XZ + xz_file_event = [e for e in filesystem_events if "test.xz" in e.data["path"]] + assert 1 == len(xz_file_event), "No xz file found" + file = Path(xz_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_xz" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract xz" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # 7z + zip7_file_event = [e for e in filesystem_events if "test.7z" in e.data["path"]] + assert 1 == len(zip7_file_event), "No 7z file found" + file = Path(zip7_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_7z" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract 7z" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # RAR + # rar_file_event = [e for e in filesystem_events if "test.rar" in e.data["path"]] + # assert 1 == len(rar_file_event), "No rar file found" + # file = Path(rar_file_event[0].data["path"]) + # assert file.is_file(), f"File not found at {file}" + # extract_event = [e for e in filesystem_events if "test_rar" in e.data["path"] and "folder" in e.tags] + # assert 1 == len(extract_event), "Failed to extract rar" + # extract_path = Path(extract_event[0].data["path"]) / "test.txt" + # assert extract_path.is_file(), list(extract_path.parent.iterdir()) + + # LZMA + # lzma_file_event = [e for e in filesystem_events if "test.lzma" in e.data["path"]] + # assert 1 == len(lzma_file_event), "No lzma file found" + # file = Path(lzma_file_event[0].data["path"]) + # assert file.is_file(), f"File not found at {file}" + # extract_event = [e for e in filesystem_events if "test_lzma" in e.data["path"] and "folder" in e.tags] + # assert 1 == len(extract_event), "Failed to extract lzma" + # extract_path = Path(extract_event[0].data["path"]) / "test.txt" + # assert extract_path.is_file(), "Failed to extract the test file" + + # TAR + tar_file_event = [e for e in filesystem_events if "test.tar" in e.data["path"]] + assert 1 == len(tar_file_event), "No tar file found" + file = Path(tar_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_tar" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract tar" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" + + # TGZ + tgz_file_event = [e for e in filesystem_events if "test.tgz" in e.data["path"]] + assert 1 == len(tgz_file_event), "No tgz file found" + file = Path(tgz_file_event[0].data["path"]) + assert file.is_file(), f"File not found at {file}" + extract_event = [e for e in filesystem_events if "test_tgz" in e.data["path"] and "folder" in e.tags] + assert 1 == len(extract_event), "Failed to extract tgz" + extract_path = Path(extract_event[0].data["path"]) / "test.txt" + assert extract_path.is_file(), "Failed to extract the test file" diff --git a/bbot/test/test_step_2/module_tests/test_module_url_manipulation.py b/bbot/test/test_step_2/module_tests/test_module_url_manipulation.py new file mode 100644 index 0000000000..725a96fecf --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_url_manipulation.py @@ -0,0 +1,39 @@ +from .base import ModuleTestBase + + +class TestUrl_Manipulation(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "url_manipulation"] + body = """ + + the title + +

Hello null!

'; + + + """ + + body_match = """ + + the title + +

Hello AAAAAAAAAAAAAA!

'; + + + """ + + async def setup_after_prep(self, module_test): + expect_args = {"query_string": f"{module_test.module.rand_string}=.xml".encode()} + respond_args = {"response_data": self.body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + respond_args = {"response_data": self.body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == f"Url Manipulation: [body] Sig: [Modified URL: http://127.0.0.1:8888/?{module_test.module.rand_string}=.xml]" + for e in events + ) diff --git a/bbot/test/test_step_2/module_tests/test_module_urlscan.py b/bbot/test/test_step_2/module_tests/test_module_urlscan.py new file mode 100644 index 0000000000..d108f2f565 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_urlscan.py @@ -0,0 +1,58 @@ +from .base import ModuleTestBase + + +class TestUrlScan(ModuleTestBase): + config_overrides = {"modules": {"urlscan": {"urls": True}}} + + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://urlscan.io/api/v1/search/?q=blacklanternsecurity.com", + json={ + "results": [ + { + "task": { + "visibility": "public", + "method": "api", + "domain": "asdf.blacklanternsecurity.com", + "apexDomain": "blacklanternsecurity.com", + "time": "2023-05-17T01:45:11.391Z", + "uuid": "c558b3b3-b274-4339-99ef-301eb043741f", + "url": "https://asdf.blacklanternsecurity.com/cna.html", + }, + "stats": { + "uniqIPs": 6, + "uniqCountries": 3, + "dataLength": 926713, + "encodedDataLength": 332213, + "requests": 22, + }, + "page": { + "country": "US", + "server": "GitHub.com", + "ip": "2606:50c0:8002::153", + "mimeType": "text/html", + "title": "Vulnerability Program | Black Lantern Security", + "url": "https://asdf.blacklanternsecurity.com/cna.html", + "tlsValidDays": 89, + "tlsAgeDays": 25, + "tlsValidFrom": "2023-04-21T19:16:58.000Z", + "domain": "asdf.blacklanternsecurity.com", + "apexDomain": "blacklanternsecurity.com", + "asnname": "FASTLY, US", + "asn": "AS54113", + "tlsIssuer": "R3", + "status": "200", + }, + "_id": "c558b3b3-b274-4339-99ef-301eb043741f", + "_score": None, + "sort": [1684287911391, "c558b3b3-b274-4339-99ef-301eb043741f"], + "result": "https://urlscan.io/api/v1/result/c558b3b3-b274-4339-99ef-301eb043741f/", + "screenshot": "https://urlscan.io/screenshots/c558b3b3-b274-4339-99ef-301eb043741f.png", + } + ] + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + assert any(e.data == "https://asdf.blacklanternsecurity.com/cna.html" for e in events), "Failed to detect URL" diff --git a/bbot/test/test_step_2/module_tests/test_module_vhost.py b/bbot/test/test_step_2/module_tests/test_module_vhost.py new file mode 100644 index 0000000000..16f9991f6e --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_vhost.py @@ -0,0 +1,65 @@ +from .base import ModuleTestBase, tempwordlist + + +class TestVhost(ModuleTestBase): + targets = ["http://localhost:8888", "secret.localhost"] + modules_overrides = ["httpx", "vhost"] + test_wordlist = ["11111111", "admin", "cloud", "junkword1", "zzzjunkword2"] + config_overrides = { + "modules": { + "vhost": { + "wordlist": tempwordlist(test_wordlist), + } + } + } + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/", "headers": {"Host": "admin.localhost:8888"}} + respond_args = {"response_data": "Alive vhost admin"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/", "headers": {"Host": "cloud.localhost:8888"}} + respond_args = {"response_data": "Alive vhost cloud"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/", "headers": {"Host": "q-cloud.localhost:8888"}} + respond_args = {"response_data": "Alive vhost q-cloud"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/", "headers": {"Host": "secret.localhost:8888"}} + respond_args = {"response_data": "Alive vhost secret"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/", "headers": {"Host": "host.docker.internal"}} + respond_args = {"response_data": "Alive vhost host.docker.internal"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "alive"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + basic_detection = False + mutaton_of_detected = False + basehost_mutation = False + special_vhost_list = False + wordcloud_detection = False + + for e in events: + if e.type == "VHOST": + if e.data["vhost"] == "admin": + basic_detection = True + if e.data["vhost"] == "cloud": + mutaton_of_detected = True + if e.data["vhost"] == "q-cloud": + basehost_mutation = True + if e.data["vhost"] == "host.docker.internal": + special_vhost_list = True + if e.data["vhost"] == "secret": + wordcloud_detection = True + + assert basic_detection + assert mutaton_of_detected + assert basehost_mutation + assert special_vhost_list + assert wordcloud_detection diff --git a/bbot/test/test_step_2/module_tests/test_module_viewdns.py b/bbot/test/test_step_2/module_tests/test_module_viewdns.py new file mode 100644 index 0000000000..45417b2dca --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_viewdns.py @@ -0,0 +1,151 @@ +from .base import ModuleTestBase + + +class TestViewDNS(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://viewdns.info/reversewhois/?q=blacklanternsecurity.com", + text=web_body, + ) + + def check(self, module_test, events): + assert any(e.data == "hyperloop.com" and "affiliate" in e.tags for e in events), ( + "Failed to detect affiliate domain" + ) + + +web_body = """ + + + Reverse Whois Lookup - ViewDNS.info + + + + + + + + + + + +
ViewDNS.info - Your one source for DNS related tools! + + + + +
+ + + + + + + + + + + + + + + +
+ + ViewDNS.info > Tools > +

Reverse Whois Lookup

+

This free tool will allow you to find domain names owned by an individual person or company. Simply enter the email address or name of the person or company to find other domains registered using those same details. FAQ.

+ +
Registrant Name or Email Address:
+
+
+
+ Reverse Whois results for blacklanternsecurity.com
==============

There are 20 domains that matched this search query.
These are listed below:

+ + + + + + +
hyperloop.com2003-12-04NETWORK SOLUTIONS, LLC.
+
+
+ + + + + +
+ + + + +

+ +
+ + + + +
+
+ All content © 2023 ViewDNS.info
Feedback / Suggestions / Contact Us - Privacy Policy
+
+
+ + + + +
+ + + +
+
+
+
+
+ + +""" diff --git a/bbot/test/test_step_2/module_tests/test_module_virustotal.py b/bbot/test/test_step_2/module_tests/test_module_virustotal.py new file mode 100644 index 0000000000..a6325978f0 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_virustotal.py @@ -0,0 +1,51 @@ +from .base import ModuleTestBase + + +class TestVirusTotal(ModuleTestBase): + config_overrides = {"modules": {"virustotal": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://www.virustotal.com/api/v3/domains/blacklanternsecurity.com/subdomains", + json={ + "meta": {"count": 25, "cursor": "eyJsaW1pdCI6IDEwLCAib2Zmc2V0IjogMTB9"}, + "data": [ + { + "attributes": { + "last_dns_records": [{"type": "A", "value": "168.62.180.225", "ttl": 3600}], + "whois": "Creation Date: 2013-07-30T20:14:50Z\nDNSSEC: unsigned\nDomain Name: BLACKLANTERNSECURITY.COM\nDomain Status: clientDeleteProhibited https://icann.org/epp#clientDeleteProhibited\nDomain Status: clientRenewProhibited https://icann.org/epp#clientRenewProhibited\nDomain Status: clientTransferProhibited https://icann.org/epp#clientTransferProhibited\nDomain Status: clientUpdateProhibited https://icann.org/epp#clientUpdateProhibited\nName Server: NS01.DOMAINCONTROL.COM\nName Server: NS02.DOMAINCONTROL.COM\nRegistrar Abuse Contact Email: abuse@godaddy.com\nRegistrar Abuse Contact Phone: 480-624-2505\nRegistrar IANA ID: 146\nRegistrar URL: http://www.godaddy.com\nRegistrar WHOIS Server: whois.godaddy.com\nRegistrar: GoDaddy.com, LLC\nRegistry Domain ID: 1818679075_DOMAIN_COM-VRSN\nRegistry Expiry Date: 2023-07-30T20:14:50Z\nUpdated Date: 2022-09-14T16:28:14Z", + "tags": [], + "popularity_ranks": {}, + "last_dns_records_date": 1657734301, + "last_analysis_stats": { + "harmless": 0, + "malicious": 0, + "suspicious": 0, + "undetected": 86, + "timeout": 0, + }, + "creation_date": 1375215290, + "reputation": 0, + "registrar": "GoDaddy.com, LLC", + "last_analysis_results": {}, + "last_update_date": 1663172894, + "last_modification_date": 1657734301, + "tld": "com", + "categories": {}, + "total_votes": {"harmless": 0, "malicious": 0}, + }, + "type": "domain", + "id": "asdf.blacklanternsecurity.com", + "links": {"self": "https://www.virustotal.com/api/v3/domains/asdf.blacklanternsecurity.com"}, + "context_attributes": {"timestamp": 1657734301}, + } + ], + "links": { + "self": "https://www.virustotal.com/api/v3/domains/blacklanternsecurity.com/subdomains?limit=10", + "next": "https://www.virustotal.com/api/v3/domains/blacklanternsecurity.com/subdomains?cursor=eyJsaW1pdCI6IDEwLCAib2Zmc2V0IjogMTB9&limit=10", + }, + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_wafw00f.py b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py new file mode 100644 index 0000000000..892d892ff0 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py @@ -0,0 +1,50 @@ +from .base import ModuleTestBase + +from werkzeug.wrappers import Response + + +class TestWafw00f(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wafw00f"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"response_data": "Proudly powered by litespeed web server"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "WAF" and "LiteSpeed" in e.data["waf"] for e in events) + + +class TestWafw00f_noredirect(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wafw00f"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"status": 301, "headers": {"Location": "/redirect"}} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + expect_args = {"method": "GET", "uri": "/redirect"} + respond_args = {"response_data": "Proudly powered by litespeed web server"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert not any(e.type == "WAF" for e in events) + + +class TestWafw00f_genericdetection(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wafw00f"] + + async def setup_after_prep(self, module_test): + def handler(request): + if "SLEEP" in request.url: + return Response("nope", status=403) + return Response("yep") + + module_test.httpserver.expect_request("/").respond_with_handler(handler) + + def check(self, module_test, events): + waf_events = [e for e in events if e.type == "WAF"] + assert len(waf_events) == 1 + assert waf_events[0].data["waf"] == "generic detection" diff --git a/bbot/test/test_step_2/module_tests/test_module_wappalyzer.py b/bbot/test/test_step_2/module_tests/test_module_wappalyzer.py new file mode 100644 index 0000000000..99376ec457 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_wappalyzer.py @@ -0,0 +1,20 @@ +from .base import ModuleTestBase + + +class TestWappalyzer(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wappalyzer"] + + async def setup_after_prep(self, module_test): + respond_args = { + "response_data": """BBOT is life + + + +""", + "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, + } + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + assert any(e.type == "TECHNOLOGY" and e.data["technology"].lower() == "google font api" for e in events) diff --git a/bbot/test/test_step_2/module_tests/test_module_wayback.py b/bbot/test/test_step_2/module_tests/test_module_wayback.py new file mode 100644 index 0000000000..7582e54173 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_wayback.py @@ -0,0 +1,12 @@ +from .base import ModuleTestBase + + +class TestWayback(ModuleTestBase): + async def setup_after_prep(self, module_test): + module_test.httpx_mock.add_response( + url="http://web.archive.org/cdx/search/cdx?url=blacklanternsecurity.com&matchType=domain&output=json&fl=original&collapse=original", + json=[["original"], ["http://asdf.blacklanternsecurity.com"]], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/bbot/test/test_step_2/module_tests/test_module_web_parameters.py b/bbot/test/test_step_2/module_tests/test_module_web_parameters.py new file mode 100644 index 0000000000..7e47775e46 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_web_parameters.py @@ -0,0 +1,59 @@ +from .test_module_excavate import TestExcavateParameterExtraction + + +class TestWebParameters(TestExcavateParameterExtraction): + modules_overrides = ["excavate", "httpx", "web_parameters"] + + def check(self, module_test, events): + parameters_file = module_test.scan.home / "web_parameters.txt" + with open(parameters_file) as f: + data = f.read() + + assert "age" in data + assert "fit" in data + assert "id" in data + assert "jqueryget" in data + assert "jquerypost" in data + assert "size" in data + + # after lightfuzz is merged uncomment these additional parameters + # assert "blog-post-author-display" in data + # assert "csrf" in data + # assert "q1" in data + # assert "q2" in data + # assert "q3" in data + # assert "test" in data + + +class TestWebParameters_include_count(TestWebParameters): + config_overrides = { + "web": {"spider_distance": 1, "spider_depth": 1}, + "modules": {"web_parameters": {"include_count": True}}, + } + + def check(self, module_test, events): + parameters_file = module_test.scan.home / "web_parameters.txt" + with open(parameters_file) as f: + data = f.read() + assert "2\tq" in data + assert "1\tage" in data + assert "1\tfit" in data + assert "1\tid" in data + assert "1\tjqueryget" in data + assert "1\tjquerypost" in data + assert "1\tsize" in data + + # after lightfuzz is merged, these will be the correct parameters to check + + # assert "3\ttest" in data + # assert "2\tblog-post-author-display" in data + # assert "2\tcsrf" in data + # assert "2\tq2" in data + # assert "1\tage" in data + # assert "1\tfit" in data + # assert "1\tid" in data + # assert "1\tjqueryget" in data + # assert "1\tjquerypost" in data + # assert "1\tq1" in data + # assert "1\tq3" in data + # assert "1\tsize" in data diff --git a/bbot/test/test_step_2/module_tests/test_module_web_report.py b/bbot/test/test_step_2/module_tests/test_module_web_report.py new file mode 100644 index 0000000000..cfaa90f217 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_web_report.py @@ -0,0 +1,67 @@ +from .base import ModuleTestBase + + +class TestWebReport(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wappalyzer", "badsecrets", "web_report", "trufflehog"] + config_overrides = {"modules": {"trufflehog": {"only_verified": False}}} + + async def setup_before_prep(self, module_test): + # trufflehog --> FINDING + # wappalyzer --> TECHNOLOGY + # badsecrets --> VULNERABILITY + respond_args = {"response_data": web_body} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + report_file = module_test.scan.home / "web_report.html" + with open(report_file) as f: + report_content = f.read() + assert "
  • [CRITICAL] Known Secret Found" in report_content + assert ( + """

    URL

    +
      +
    • http://127.0.0.1:8888/""" + in report_content + ) + assert """Possible Secret Found. Detector Type: [PrivateKey]""" in report_content + assert "

      TECHNOLOGY

      " in report_content + assert "

      flask

      " in report_content + + +web_body = """ + + + + + +
      +
      + +
      + +
      + + + +
      +
      +

      -----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAOBY2pd9PSQvuxqu +WXFNVgILTWuUc721Wc2sFNvp4beowhUe1lfxaq5ZfCJcz7z4QsqFhOeks69O9UIb +oiOTDocPDog9PHO8yZXopHm0StFZvSjjKSNuFvy/WopPTGpxUZ5boCaF1CXumY7W +FL+jIap5faimLL9prIwaQKBwv80lAgMBAAECgYEAxvpHtgCgD849tqZYMgOTevCn +U/kwxltoMOClB39icNA+gxj8prc6FTTMwnVq0oGmS5UskX8k1yHCqUV1AvRU9o+q +I8L8a3F3TQKQieI/YjiUNK8A87bKkaiN65ooOnhT+I3ZjZMPR5YEyycimMp22jsv +LyX/35J/wf1rNiBs/YECQQDvtxgmMhE+PeajXqw1w2C3Jds27hI3RPDnamEyWr/L +KkSplbKTF6FuFDYOFdJNPrfxm1tx2MZ2cBfs+h/GnCJVAkEA75Z9w7q8obbqGBHW +9bpuFvLjW7bbqO7HBuXYX9zQcZL6GSArFP0ba5lhgH1qsVQfxVWVyiV9/chme7xc +ljfvkQJBAJ7MpSPQcRnRefNp6R0ok+5gFqt55PlWI1y6XS81bO7Szm+laooE0n0Q +yIpmLE3dqY9VgquVlkupkD/9poU0s40CQD118ZVAVht1/N9n1Cj9RjiE3mYspnTT +rCLM25Db6Gz6M0Y2xlaAB4S2uBhqE/Chj/TjW6WbsJJl0kRzsZynhMECQFYKiM1C +T4LB26ynW00VE8z4tEWSoYt4/Vn/5wFhalVjzoSJ8Hm2qZiObRYLQ1m0X4KnkShk +Gnl54dJHT+EhlfY= +-----END PRIVATE KEY-----

      + + +""" diff --git a/bbot/test/test_step_2/module_tests/test_module_websocket.py b/bbot/test/test_step_2/module_tests/test_module_websocket.py new file mode 100644 index 0000000000..0b09e4bc52 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_websocket.py @@ -0,0 +1,36 @@ +import json +import asyncio +import logging +import websockets +from websockets.asyncio.server import serve + +from .base import ModuleTestBase + +log = logging.getLogger("bbot.testing") + +results = {"events": []} + + +async def websocket_handler(websocket): + results["path"] = websocket.request.path + async for message in websocket: + results["events"].append(message) + + +# Define a coroutine for the server +async def server_coroutine(): + async with serve(websocket_handler, "127.0.0.1", 8765) as server: + await server.serve_forever() + + +class TestWebsocket(ModuleTestBase): + config_overrides = {"modules": {"websocket": {"url": "ws://127.0.0.1:8765/testing"}}} + + async def setup_before_prep(self, module_test): + self.server_task = asyncio.create_task(server_coroutine()) + + def check(self, module_test, events): + assert results["path"] == "/testing" + decoded_events = [json.loads(e) for e in results["events"]] + assert any(e["type"] == "SCAN" for e in decoded_events) + self.server_task.cancel() diff --git a/bbot/test/test_step_2/module_tests/test_module_wpscan.py b/bbot/test/test_step_2/module_tests/test_module_wpscan.py new file mode 100644 index 0000000000..7e65c1dcce --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_wpscan.py @@ -0,0 +1,1083 @@ +from subprocess import CompletedProcess +from .base import ModuleTestBase + + +class Testwpscan(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wpscan"] + + wpscan_output_json = """{ + "banner": { + "description": "WordPress Security Scanner by the WPScan Team", + "version": "3.8.25", + "authors": [ + "@_WPScan_", + "@ethicalhack3r", + "@erwan_lr", + "@firefart" + ], + "sponsor": "Sponsored by Automattic - https://automattic.com/" + }, + "start_time": 1717183319, + "start_memory": 49950720, + "target_url": "http://127.0.0.1:8888/", + "target_ip": "172.29.64.1", + "effective_url": "http://127.0.0.1:8888/", + "interesting_findings": [ + { + "url": "http://127.0.0.1:8888/", + "to_s": "Headers", + "type": "headers", + "found_by": "Headers (Passive Detection)", + "confidence": 100, + "confirmed_by": { + + }, + "references": { + + }, + "interesting_entries": [ + "Server: Apache/2.4.38 (Debian)", + "X-Powered-By: PHP/7.1.33" + ] + }, + { + "url": "http://127.0.0.1:8888/xmlrpc.php", + "to_s": "XML-RPC seems to be enabled: http://127.0.0.1:8888/xmlrpc.php", + "type": "xmlrpc", + "found_by": "Direct Access (Aggressive Detection)", + "confidence": 100, + "confirmed_by": { + + }, + "references": { + "url": [ + "http://codex.wordpress.org/XML-RPC_Pingback_API" + ], + "metasploit": [ + "auxiliary/scanner/http/wordpress_ghost_scanner", + "auxiliary/dos/http/wordpress_xmlrpc_dos", + "auxiliary/scanner/http/wordpress_xmlrpc_login", + "auxiliary/scanner/http/wordpress_pingback_access" + ] + }, + "interesting_entries": [ + + ] + }, + { + "url": "http://127.0.0.1:8888/readme.html", + "to_s": "WordPress readme found: http://127.0.0.1:8888/readme.html", + "type": "readme", + "found_by": "Direct Access (Aggressive Detection)", + "confidence": 100, + "confirmed_by": { + + }, + "references": { + + }, + "interesting_entries": [ + "/wp-admin/", + "/wp-admin/admin-ajax.php", + " " + ] + }, + { + "url": "http://127.0.0.1:8888/wp-cron.php", + "to_s": "The external WP-Cron seems to be enabled: http://127.0.0.1:8888/wp-cron.php", + "type": "wp_cron", + "found_by": "Direct Access (Aggressive Detection)", + "confidence": 60, + "confirmed_by": { + + }, + "references": { + "url": [ + "https://www.iplocation.net/defend-wordpress-from-ddos", + "https://github.com/wpscanteam/wpscan/issues/1299" + ] + }, + "interesting_entries": [ + + ] + } + ], + "version": { + "number": "5.3", + "release_date": "2019-11-12", + "status": "insecure", + "found_by": "Emoji Settings (Passive Detection)", + "confidence": 100, + "interesting_entries": [ + "http://127.0.0.1:8888/, Match: 'wp-includes\\/js\\/wp-emoji-release.min.js?ver=5.3'" + ], + "confirmed_by": { + "Meta Generator (Passive Detection)": { + "confidence": 60, + "interesting_entries": [ + "http://127.0.0.1:8888/, Match: 'WordPress 5.3'" + ] + } + }, + "vulnerabilities": [ + { + "title": "WordPress <= 5.3 - Authenticated Improper Access Controls in REST API", + "fixed_in": "5.3.1", + "references": { + "cve": [ + "2019-20043", + "2019-16788" + ], + "url": [ + "https://wordpress.org/news/2019/12/wordpress-5-3-1-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-g7rg-hchx-c2gw" + ], + "wpvulndb": [ + "4a6de154-5fbd-4c80-acd3-8902ee431bd8" + ] + } + }, + { + "title": "WordPress <= 5.3 - Authenticated Stored XSS via Crafted Links", + "fixed_in": "5.3.1", + "references": { + "cve": [ + "2019-20042" + ], + "url": [ + "https://wordpress.org/news/2019/12/wordpress-5-3-1-security-and-maintenance-release/", + "https://hackerone.com/reports/509930", + "https://github.com/WordPress/wordpress-develop/commit/1f7f3f1f59567e2504f0fbebd51ccf004b3ccb1d", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-xvg2-m2f4-83m7" + ], + "wpvulndb": [ + "23553517-34e3-40a9-a406-f3ffbe9dd265" + ] + } + }, + { + "title": "WordPress <= 5.3 - Authenticated Stored XSS via Block Editor Content", + "fixed_in": "5.3.1", + "references": { + "cve": [ + "2019-16781", + "2019-16780" + ], + "url": [ + "https://wordpress.org/news/2019/12/wordpress-5-3-1-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-pg4x-64rh-3c9v" + ], + "wpvulndb": [ + "be794159-4486-4ae1-a5cc-5c190e5ddf5f" + ] + } + }, + { + "title": "WordPress <= 5.3 - wp_kses_bad_protocol() Colon Bypass", + "fixed_in": "5.3.1", + "references": { + "cve": [ + "2019-20041" + ], + "url": [ + "https://wordpress.org/news/2019/12/wordpress-5-3-1-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/commit/b1975463dd995da19bb40d3fa0786498717e3c53" + ], + "wpvulndb": [ + "8fac612b-95d2-477a-a7d6-e5ec0bb9ca52" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Password Reset Tokens Failed to Be Properly Invalidated", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11027" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47634/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-ww7v-jg8c-q6jw" + ], + "wpvulndb": [ + "7db191c0-d112-4f08-a419-a1cd81928c4e" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Unauthenticated Users View Private Posts", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11028" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47635/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-xhx9-759f-6p2w" + ], + "wpvulndb": [ + "d1e1ba25-98c9-4ae7-8027-9632fb825a56" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Authenticated Cross-Site Scripting (XSS) in Customizer", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11025" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47633/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-4mhg-j6fx-5g3c" + ], + "wpvulndb": [ + "4eee26bd-a27e-4509-a3a5-8019dd48e429" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Authenticated Cross-Site Scripting (XSS) in Search Block", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11030" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47636/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-vccm-6gmc-qhjh" + ], + "wpvulndb": [ + "e4bda91b-067d-45e4-a8be-672ccf8b1a06" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Cross-Site Scripting (XSS) in wp-object-cache", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11029" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47637/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-568w-8m88-8g2c" + ], + "wpvulndb": [ + "e721d8b9-a38f-44ac-8520-b4a9ed6a5157" + ] + } + }, + { + "title": "WordPress < 5.4.1 - Authenticated Cross-Site Scripting (XSS) in File Uploads", + "fixed_in": "5.3.3", + "references": { + "cve": [ + "2020-11026" + ], + "url": [ + "https://wordpress.org/news/2020/04/wordpress-5-4-1/", + "https://core.trac.wordpress.org/changeset/47638/", + "https://www.wordfence.com/blog/2020/04/unpacking-the-7-vulnerabilities-fixed-in-todays-wordpress-5-4-1-security-update/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-3gw2-4656-pfr2", + "https://hackerone.com/reports/179695" + ], + "wpvulndb": [ + "55438b63-5fc9-4812-afc4-2f1eff800d5f" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Authenticated XSS in Block Editor", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-4046" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-rpwf-hrh2-39jf", + "https://pentest.co.uk/labs/research/subtle-stored-xss-wordpress-core/" + ], + "youtube": [ + "https://www.youtube.com/watch?v=tCh7Y8z8fb4" + ], + "wpvulndb": [ + "831e4a94-239c-4061-b66e-f5ca0dbb84fa" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Authenticated XSS via Media Files", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-4047" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-8q2w-5m27-wm27" + ], + "wpvulndb": [ + "741d07d1-2476-430a-b82f-e1228a9343a4" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Open Redirection", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-4048" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/WordPress/commit/10e2a50c523cf0b9785555a688d7d36a40fbeccf", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-q6pw-gvf4-5fj5" + ], + "wpvulndb": [ + "12855f02-432e-4484-af09-7d0fbf596909" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Authenticated Stored XSS via Theme Upload", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-4049" + ], + "exploitdb": [ + "48770" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-87h4-phjv-rm6p", + "https://hackerone.com/reports/406289" + ], + "wpvulndb": [ + "d8addb42-e70b-4439-b828-fd0697e5d9d4" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Misuse of set-screen-option Leading to Privilege Escalation", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-4050" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/WordPress/commit/dda0ccdd18f6532481406cabede19ae2ed1f575d", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-4vpv-fgg2-gcqc" + ], + "wpvulndb": [ + "b6f69ff1-4c11-48d2-b512-c65168988c45" + ] + } + }, + { + "title": "WordPress < 5.4.2 - Disclosure of Password-Protected Page/Post Comments", + "fixed_in": "5.3.4", + "references": { + "cve": [ + "2020-25286" + ], + "url": [ + "https://wordpress.org/news/2020/06/wordpress-5-4-2-security-and-maintenance-release/", + "https://github.com/WordPress/WordPress/commit/c075eec24f2f3214ab0d0fb0120a23082e6b1122" + ], + "wpvulndb": [ + "eea6dbf5-e298-44a7-9b0d-f078ad4741f9" + ] + } + }, + { + "title": "WordPress 4.7-5.7 - Authenticated Password Protected Pages Exposure", + "fixed_in": "5.3.7", + "references": { + "cve": [ + "2021-29450" + ], + "url": [ + "https://wordpress.org/news/2021/04/wordpress-5-7-1-security-and-maintenance-release/", + "https://blog.wpscan.com/2021/04/15/wordpress-571-security-vulnerability-release.html", + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-pmmh-2f36-wvhq", + "https://core.trac.wordpress.org/changeset/50717/" + ], + "youtube": [ + "https://www.youtube.com/watch?v=J2GXmxAdNWs" + ], + "wpvulndb": [ + "6a3ec618-c79e-4b9c-9020-86b157458ac5" + ] + } + }, + { + "title": "WordPress 3.7 to 5.7.1 - Object Injection in PHPMailer", + "fixed_in": "5.3.8", + "references": { + "cve": [ + "2020-36326", + "2018-19296" + ], + "url": [ + "https://github.com/WordPress/WordPress/commit/267061c9595fedd321582d14c21ec9e7da2dcf62", + "https://wordpress.org/news/2021/05/wordpress-5-7-2-security-release/", + "https://github.com/PHPMailer/PHPMailer/commit/e2e07a355ee8ff36aba21d0242c5950c56e4c6f9", + "https://www.wordfence.com/blog/2021/05/wordpress-5-7-2-security-release-what-you-need-to-know/" + ], + "youtube": [ + "https://www.youtube.com/watch?v=HaW15aMzBUM" + ], + "wpvulndb": [ + "4cd46653-4470-40ff-8aac-318bee2f998d" + ] + } + }, + { + "title": "WordPress < 5.8.2 - Expired DST Root CA X3 Certificate", + "fixed_in": "5.3.10", + "references": { + "url": [ + "https://wordpress.org/news/2021/11/wordpress-5-8-2-security-and-maintenance-release/", + "https://core.trac.wordpress.org/ticket/54207" + ], + "wpvulndb": [ + "cc23344a-5c91-414a-91e3-c46db614da8d" + ] + } + }, + { + "title": "WordPress < 5.8 - Plugin Confusion", + "fixed_in": "5.8", + "references": { + "cve": [ + "2021-44223" + ], + "url": [ + "https://vavkamil.cz/2021/11/25/wordpress-plugin-confusion-update-can-get-you-pwned/" + ], + "wpvulndb": [ + "95e01006-84e4-4e95-b5d7-68ea7b5aa1a8" + ] + } + }, + { + "title": "WordPress < 5.8.3 - SQL Injection via WP_Query", + "fixed_in": "5.3.11", + "references": { + "cve": [ + "2022-21661" + ], + "url": [ + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-6676-cqfm-gw84", + "https://hackerone.com/reports/1378209" + ], + "wpvulndb": [ + "7f768bcf-ed33-4b22-b432-d1e7f95c1317" + ] + } + }, + { + "title": "WordPress < 5.8.3 - Author+ Stored XSS via Post Slugs", + "fixed_in": "5.3.11", + "references": { + "cve": [ + "2022-21662" + ], + "url": [ + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-699q-3hj9-889w", + "https://hackerone.com/reports/425342", + "https://blog.sonarsource.com/wordpress-stored-xss-vulnerability" + ], + "wpvulndb": [ + "dc6f04c2-7bf2-4a07-92b5-dd197e4d94c8" + ] + } + }, + { + "title": "WordPress 4.1-5.8.2 - SQL Injection via WP_Meta_Query", + "fixed_in": "5.3.11", + "references": { + "cve": [ + "2022-21664" + ], + "url": [ + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-jp3p-gw8h-6x86" + ], + "wpvulndb": [ + "24462ac4-7959-4575-97aa-a6dcceeae722" + ] + } + }, + { + "title": "WordPress < 5.8.3 - Super Admin Object Injection in Multisites", + "fixed_in": "5.3.11", + "references": { + "cve": [ + "2022-21663" + ], + "url": [ + "https://github.com/WordPress/wordpress-develop/security/advisories/GHSA-jmmq-m8p8-332h", + "https://hackerone.com/reports/541469" + ], + "wpvulndb": [ + "008c21ab-3d7e-4d97-b6c3-db9d83f390a7" + ] + } + }, + { + "title": "WordPress < 5.9.2 - Prototype Pollution in jQuery", + "fixed_in": "5.3.12", + "references": { + "url": [ + "https://wordpress.org/news/2022/03/wordpress-5-9-2-security-maintenance-release/" + ], + "wpvulndb": [ + "1ac912c1-5e29-41ac-8f76-a062de254c09" + ] + } + }, + { + "title": "WP < 6.0.2 - Reflected Cross-Site Scripting", + "fixed_in": "5.3.13", + "references": { + "url": [ + "https://wordpress.org/news/2022/08/wordpress-6-0-2-security-and-maintenance-release/" + ], + "wpvulndb": [ + "622893b0-c2c4-4ee7-9fa1-4cecef6e36be" + ] + } + }, + { + "title": "WP < 6.0.2 - Authenticated Stored Cross-Site Scripting", + "fixed_in": "5.3.13", + "references": { + "url": [ + "https://wordpress.org/news/2022/08/wordpress-6-0-2-security-and-maintenance-release/" + ], + "wpvulndb": [ + "3b1573d4-06b4-442b-bad5-872753118ee0" + ] + } + }, + { + "title": "WP < 6.0.2 - SQLi via Link API", + "fixed_in": "5.3.13", + "references": { + "url": [ + "https://wordpress.org/news/2022/08/wordpress-6-0-2-security-and-maintenance-release/" + ], + "wpvulndb": [ + "601b0bf9-fed2-4675-aec7-fed3156a022f" + ] + } + }, + { + "title": "WP < 6.0.3 - Stored XSS via wp-mail.php", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/abf236fdaf94455e7bc6e30980cf70401003e283" + ], + "wpvulndb": [ + "713bdc8b-ab7c-46d7-9847-305344a579c4" + ] + } + }, + { + "title": "WP < 6.0.3 - Open Redirect via wp_nonce_ays", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/506eee125953deb658307bb3005417cb83f32095" + ], + "wpvulndb": [ + "926cd097-b36f-4d26-9c51-0dfab11c301b" + ] + } + }, + { + "title": "WP < 6.0.3 - Email Address Disclosure via wp-mail.php", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/5fcdee1b4d72f1150b7b762ef5fb39ab288c8d44" + ], + "wpvulndb": [ + "c5675b59-4b1d-4f64-9876-068e05145431" + ] + } + }, + { + "title": "WP < 6.0.3 - Reflected XSS via SQLi in Media Library", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/8836d4682264e8030067e07f2f953a0f66cb76cc" + ], + "wpvulndb": [ + "cfd8b50d-16aa-4319-9c2d-b227365c2156" + ] + } + }, + { + "title": "WP < 6.0.3 - CSRF in wp-trackback.php", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/a4f9ca17fae0b7d97ff807a3c234cf219810fae0" + ], + "wpvulndb": [ + "b60a6557-ae78-465c-95bc-a78cf74a6dd0" + ] + } + }, + { + "title": "WP < 6.0.3 - Stored XSS via the Customizer", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/2ca28e49fc489a9bb3c9c9c0d8907a033fe056ef" + ], + "wpvulndb": [ + "2787684c-aaef-4171-95b4-ee5048c74218" + ] + } + }, + { + "title": "WP < 6.0.3 - Stored XSS via Comment Editing", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/89c8f7919460c31c0f259453b4ffb63fde9fa955" + ], + "wpvulndb": [ + "02d76d8e-9558-41a5-bdb6-3957dc31563b" + ] + } + }, + { + "title": "WP < 6.0.3 - Content from Multipart Emails Leaked", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/3765886b4903b319764490d4ad5905bc5c310ef8" + ], + "wpvulndb": [ + "3f707e05-25f0-4566-88ed-d8d0aff3a872" + ] + } + }, + { + "title": "WP < 6.0.3 - SQLi in WP_Date_Query", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/d815d2e8b2a7c2be6694b49276ba3eee5166c21f" + ], + "wpvulndb": [ + "1da03338-557f-4cb6-9a65-3379df4cce47" + ] + } + }, + { + "title": "WP < 6.0.3 - Stored XSS via RSS Widget", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/929cf3cb9580636f1ae3fe944b8faf8cca420492" + ], + "wpvulndb": [ + "58d131f5-f376-4679-b604-2b888de71c5b" + ] + } + }, + { + "title": "WP < 6.0.3 - Data Exposure via REST Terms/Tags Endpoint", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/wordpress-develop/commit/ebaac57a9ac0174485c65de3d32ea56de2330d8e" + ], + "wpvulndb": [ + "b27a8711-a0c0-4996-bd6a-01734702913e" + ] + } + }, + { + "title": "WP < 6.0.3 - Multiple Stored XSS via Gutenberg", + "fixed_in": "5.3.14", + "references": { + "url": [ + "https://wordpress.org/news/2022/10/wordpress-6-0-3-security-release/", + "https://github.com/WordPress/gutenberg/pull/45045/files" + ], + "wpvulndb": [ + "f513c8f6-2e1c-45ae-8a58-36b6518e2aa9" + ] + } + }, + { + "title": "WP <= 6.2 - Unauthenticated Blind SSRF via DNS Rebinding", + "fixed_in": null, + "references": { + "cve": [ + "2022-3590" + ], + "url": [ + "https://blog.sonarsource.com/wordpress-core-unauthenticated-blind-ssrf/" + ], + "wpvulndb": [ + "c8814e6e-78b3-4f63-a1d3-6906a84c1f11" + ] + } + }, + { + "title": "WP < 6.2.1 - Directory Traversal via Translation Files", + "fixed_in": "5.3.15", + "references": { + "cve": [ + "2023-2745" + ], + "url": [ + "https://wordpress.org/news/2023/05/wordpress-6-2-1-maintenance-security-release/" + ], + "wpvulndb": [ + "2999613a-b8c8-4ec0-9164-5dfe63adf6e6" + ] + } + }, + { + "title": "WP < 6.2.1 - Thumbnail Image Update via CSRF", + "fixed_in": "5.3.15", + "references": { + "url": [ + "https://wordpress.org/news/2023/05/wordpress-6-2-1-maintenance-security-release/" + ], + "wpvulndb": [ + "a03d744a-9839-4167-a356-3e7da0f1d532" + ] + } + }, + { + "title": "WP < 6.2.1 - Contributor+ Stored XSS via Open Embed Auto Discovery", + "fixed_in": "5.3.15", + "references": { + "url": [ + "https://wordpress.org/news/2023/05/wordpress-6-2-1-maintenance-security-release/" + ], + "wpvulndb": [ + "3b574451-2852-4789-bc19-d5cc39948db5" + ] + } + }, + { + "title": "WP < 6.2.2 - Shortcode Execution in User Generated Data", + "fixed_in": "5.3.15", + "references": { + "url": [ + "https://wordpress.org/news/2023/05/wordpress-6-2-1-maintenance-security-release/", + "https://wordpress.org/news/2023/05/wordpress-6-2-2-security-release/" + ], + "wpvulndb": [ + "ef289d46-ea83-4fa5-b003-0352c690fd89" + ] + } + }, + { + "title": "WP < 6.2.1 - Contributor+ Content Injection", + "fixed_in": "5.3.15", + "references": { + "url": [ + "https://wordpress.org/news/2023/05/wordpress-6-2-1-maintenance-security-release/" + ], + "wpvulndb": [ + "1527ebdb-18bc-4f9d-9c20-8d729a628670" + ] + } + }, + { + "title": "WP < 6.3.2 - Denial of Service via Cache Poisoning", + "fixed_in": "5.3.16", + "references": { + "url": [ + "https://wordpress.org/news/2023/10/wordpress-6-3-2-maintenance-and-security-release/" + ], + "wpvulndb": [ + "6d80e09d-34d5-4fda-81cb-e703d0e56e4f" + ] + } + }, + { + "title": "WP < 6.3.2 - Subscriber+ Arbitrary Shortcode Execution", + "fixed_in": "5.3.16", + "references": { + "url": [ + "https://wordpress.org/news/2023/10/wordpress-6-3-2-maintenance-and-security-release/" + ], + "wpvulndb": [ + "3615aea0-90aa-4f9a-9792-078a90af7f59" + ] + } + }, + { + "title": "WP < 6.3.2 - Contributor+ Comment Disclosure", + "fixed_in": "5.3.16", + "references": { + "cve": [ + "2023-39999" + ], + "url": [ + "https://wordpress.org/news/2023/10/wordpress-6-3-2-maintenance-and-security-release/" + ], + "wpvulndb": [ + "d35b2a3d-9b41-4b4f-8e87-1b8ccb370b9f" + ] + } + }, + { + "title": "WP < 6.3.2 - Unauthenticated Post Author Email Disclosure", + "fixed_in": "5.3.16", + "references": { + "cve": [ + "2023-5561" + ], + "url": [ + "https://wpscan.com/blog/email-leak-oracle-vulnerability-addressed-in-wordpress-6-3-2/", + "https://wordpress.org/news/2023/10/wordpress-6-3-2-maintenance-and-security-release/" + ], + "wpvulndb": [ + "19380917-4c27-4095-abf1-eba6f913b441" + ] + } + }, + { + "title": "WordPress < 6.4.3 - Deserialization of Untrusted Data", + "fixed_in": "5.3.17", + "references": { + "url": [ + "https://wordpress.org/news/2024/01/wordpress-6-4-3-maintenance-and-security-release/" + ], + "wpvulndb": [ + "5e9804e5-bbd4-4836-a5f0-b4388cc39225" + ] + } + }, + { + "title": "WordPress < 6.4.3 - Admin+ PHP File Upload", + "fixed_in": "5.3.17", + "references": { + "url": [ + "https://wordpress.org/news/2024/01/wordpress-6-4-3-maintenance-and-security-release/" + ], + "wpvulndb": [ + "a8e12fbe-c70b-4078-9015-cf57a05bdd4a" + ] + } + } + ] + }, + "main_theme": null, + "plugins": { + "social-warfare": { + "slug": "social-warfare", + "location": "http://127.0.0.1:8888/wp-content/plugins/social-warfare/", + "latest_version": "4.4.6.3", + "last_updated": "2024-04-07T19:32:00.000Z", + "outdated": true, + "readme_url": null, + "directory_listing": null, + "error_log_url": null, + "found_by": "Comment (Passive Detection)", + "confidence": 30, + "interesting_entries": [ + + ], + "confirmed_by": { + + }, + "vulnerabilities": [ + { + "title": "Social Warfare <= 3.5.2 - Unauthenticated Arbitrary Settings Update", + "fixed_in": "3.5.3", + "references": { + "cve": [ + "2019-9978" + ], + "url": [ + "https://wordpress.org/support/topic/malware-into-new-update/", + "https://www.wordfence.com/blog/2019/03/unpatched-zero-day-vulnerability-in-social-warfare-plugin-exploited-in-the-wild/", + "https://threatpost.com/wordpress-plugin-removed-after-zero-day-discovered/143051/", + "https://twitter.com/warfareplugins/status/1108826025188909057", + "https://www.wordfence.com/blog/2019/03/recent-social-warfare-vulnerability-allowed-remote-code-execution/" + ], + "wpvulndb": [ + "32085d2d-1235-42b4-baeb-bc43172a4972" + ] + } + }, + { + "title": "Social Warfare <= 3.5.2 - Unauthenticated Remote Code Execution (RCE)", + "fixed_in": "3.5.3", + "references": { + "url": [ + "https://www.webarxsecurity.com/social-warfare-vulnerability/" + ], + "wpvulndb": [ + "7b412469-cc03-4899-b397-38580ced5618" + ] + } + }, + { + "title": "Social Warfare < 4.3.1 - Subscriber+ Post Meta Deletion", + "fixed_in": "4.3.1", + "references": { + "cve": [ + "2023-0402" + ], + "wpvulndb": [ + "5116068f-4b84-42ad-a88d-03e46096b41c" + ] + } + }, + { + "title": "Social Warfare < 4.4.0 - Post Meta Deletion via CSRF", + "fixed_in": "4.4.0", + "references": { + "cve": [ + "2023-0403" + ], + "wpvulndb": [ + "7140abf5-5966-4361-bd51-ee29d3071a30" + ] + } + }, + { + "title": "Social Sharing Plugin - Social Warfare < 4.4.4 - Authenticated (Contributor+) Stored Cross-Site Scripting via Shortcode", + "fixed_in": "4.4.4", + "references": { + "cve": [ + "2023-4842" + ], + "url": [ + "https://www.wordfence.com/threat-intel/vulnerabilities/id/8f5b9aff-0833-4887-ae59-df5bc88c7f91" + ], + "wpvulndb": [ + "ab221b58-369e-4010-ae36-be099b2f4c9b" + ] + } + }, + { + "title": "Social Sharing Plugin – Social Warfare < 4.4.6.2 - Authenticated(Contributor+) Stored Cross-Site Scripting via Shortcode", + "fixed_in": "4.4.6.2", + "references": { + "cve": [ + "2024-1959" + ], + "url": [ + "https://www.wordfence.com/threat-intel/vulnerabilities/id/1016f16c-0ab2-4cac-a7a5-8d93a37e7894" + ], + "wpvulndb": [ + "26ad138e-990a-4401-84e4-ea694ccf6e7f" + ] + } + }, + { + "title": "Social Sharing Plugin – Social Warfare < 4.4.6 - Cross-Site Request Forgery", + "fixed_in": "4.4.6", + "references": { + "cve": [ + "2024-34825" + ], + "url": [ + "https://www.wordfence.com/threat-intel/vulnerabilities/id/f105bee6-21b2-4014-bb0a-9e53c49e29b0" + ], + "wpvulndb": [ + "acb8b33c-6b74-4d65-a3a5-5cad0c1ea8b0" + ] + } + } + ], + "version": { + "number": "3.5.2", + "confidence": 100, + "found_by": "Comment (Passive Detection)", + "interesting_entries": [ + "http://127.0.0.1:8888/, Match: 'Social Warfare v3.5.2'" + ], + "confirmed_by": { + "Readme - Stable Tag (Aggressive Detection)": { + "confidence": 80, + "interesting_entries": [ + "http://127.0.0.1:8888/wp-content/plugins/social-warfare/readme.txt" + ] + }, + "Readme - ChangeLog Section (Aggressive Detection)": { + "confidence": 50, + "interesting_entries": [ + "http://127.0.0.1:8888/wp-content/plugins/social-warfare/readme.txt" + ] + } + } + } + } + }, + "config_backups": { + + }, + "vuln_api": { + "plan": "free", + "requests_done_during_scan": 0, + "requests_remaining": 15 + }, + "stop_time": 1717183322, + "elapsed": 3, + "requests_done": 169, + "cached_requests": 6, + "data_sent": 59178, + "data_sent_humanised": "57.791 KB", + "data_received": 313184, + "data_received_humanised": "305.844 KB", + "used_memory": 225398784, + "used_memory_humanised": "214.957 MB" +}""" + + async def setup_after_prep(self, module_test): + async def wpscan_mock_run(*command, **kwargs): + return CompletedProcess(command, 0, self.wpscan_output_json, "") + + module_test.monkeypatch.setattr(module_test.scan.helpers, "run", wpscan_mock_run) + + def check(self, module_test, events): + findings = [e for e in events if e.type == "FINDING"] + vulnerabilities = [e for e in events if e.type == "VULNERABILITY"] + technologies = [e for e in events if e.type == "TECHNOLOGY"] + assert len(findings) == 1 + assert len(vulnerabilities) == 59 + assert len(technologies) == 4 diff --git a/bbot/test/test_step_2/module_tests/test_module_zoomeye.py b/bbot/test/test_step_2/module_tests/test_module_zoomeye.py new file mode 100644 index 0000000000..7b7c843b56 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_zoomeye.py @@ -0,0 +1,35 @@ +from .base import ModuleTestBase + + +class TestZoomEye(ModuleTestBase): + config_overrides = {"modules": {"zoomeye": {"api_key": "asdf", "include_related": True, "max_pages": 3}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://api.zoomeye.hk/resources-info", + match_headers={"API-KEY": "asdf"}, + json={"quota_info": {"remain_total_quota": 5}}, + ) + module_test.httpx_mock.add_response( + url="https://api.zoomeye.hk/domain/search?q=blacklanternsecurity.com&type=0&page=1", + json={"list": [{"name": "asdf.blacklanternsecurity.com"}]}, + ) + module_test.httpx_mock.add_response( + url="https://api.zoomeye.hk/domain/search?q=blacklanternsecurity.com&type=0&page=2", + json={"list": [{"name": "zzzz.blacklanternsecurity.com"}]}, + ) + module_test.httpx_mock.add_response( + url="https://api.zoomeye.hk/domain/search?q=blacklanternsecurity.com&type=0&page=3", + json={"list": [{"name": "ffff.blacklanternsecurity.com"}, {"name": "affiliate.bls"}]}, + ) + module_test.httpx_mock.add_response( + url="https://api.zoomeye.hk/domain/search?q=blacklanternsecurity.com&type=0&page=4", + json={"list": [{"name": "nope.blacklanternsecurity.com"}]}, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain #1" + assert any(e.data == "zzzz.blacklanternsecurity.com" for e in events), "Failed to detect subdomain #2" + assert any(e.data == "ffff.blacklanternsecurity.com" for e in events), "Failed to detect subdomain #3" + assert any(e.data == "affiliate.bls" and "affiliate" in e.tags for e in events), "Failed to detect affiliate" + assert not any(e.data == "nope.blacklanternsecurity.com" for e in events), "Failed to obey max_pages" diff --git a/bbot/test/test_step_2/template_tests/__init__.py b/bbot/test/test_step_2/template_tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py new file mode 100644 index 0000000000..bfa186707b --- /dev/null +++ b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py @@ -0,0 +1,221 @@ +from ..module_tests.base import ModuleTestBase + + +class TestSubdomainEnum(ModuleTestBase): + targets = ["blacklanternsecurity.com"] + modules_overrides = [] + config_overrides = {"dns": {"minimal": False}, "scope": {"report_distance": 10}} + dedup_strategy = "highest_parent" + + txt = [ + "www.blacklanternsecurity.com", + "asdf.www.blacklanternsecurity.com", + "test.asdf.www.blacklanternsecurity.com", + "api.test.asdf.www.blacklanternsecurity.com", + ] + + async def setup_after_prep(self, module_test): + dns_mock = { + "evilcorp.com": {"A": ["127.0.0.6"]}, + "blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "test.asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "api.test.asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + } + if self.txt: + dns_mock["blacklanternsecurity.com"]["TXT"] = self.txt + await module_test.mock_dns(dns_mock) + + # load subdomain enum template as module + from bbot.modules.templates.subdomain_enum import subdomain_enum + + subdomain_enum_module = subdomain_enum(module_test.scan) + + self.queries = [] + + async def mock_query(query): + self.queries.append(query) + + subdomain_enum_module.query = mock_query + subdomain_enum_module.dedup_strategy = self.dedup_strategy + module_test.scan.modules["subdomain_enum"] = subdomain_enum_module + + def check(self, module_test, events): + in_scope_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 0] + assert len(in_scope_dns_names) == 5 + assert 1 == len([e for e in in_scope_dns_names if e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "test.asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "api.test.asdf.www.blacklanternsecurity.com"]) + assert len(self.queries) == 1 + assert self.queries[0] == "blacklanternsecurity.com" + + +class TestSubdomainEnumHighestParent(TestSubdomainEnum): + targets = ["api.test.asdf.www.blacklanternsecurity.com", "evilcorp.com"] + whitelist = ["www.blacklanternsecurity.com"] + modules_overrides = ["speculate"] + dedup_strategy = "highest_parent" + txt = None + + def check(self, module_test, events): + in_scope_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 0] + distance_1_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 1] + assert len(in_scope_dns_names) == 4 + assert 1 == len([e for e in in_scope_dns_names if e.data == "www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "test.asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "api.test.asdf.www.blacklanternsecurity.com"]) + assert len(distance_1_dns_names) == 2 + assert 1 == len([e for e in distance_1_dns_names if e.data == "evilcorp.com"]) + assert 1 == len([e for e in distance_1_dns_names if e.data == "blacklanternsecurity.com"]) + assert len(self.queries) == 1 + assert self.queries[0] == "www.blacklanternsecurity.com" + + +class TestSubdomainEnumLowestParent(TestSubdomainEnumHighestParent): + dedup_strategy = "lowest_parent" + + def check(self, module_test, events): + assert set(self.queries) == { + "test.asdf.www.blacklanternsecurity.com", + "asdf.www.blacklanternsecurity.com", + "www.blacklanternsecurity.com", + } + + +class TestSubdomainEnumWildcardBaseline(ModuleTestBase): + # oh walmart.cn why are you like this + targets = ["www.walmart.cn"] + whitelist = ["walmart.cn"] + modules_overrides = [] + config_overrides = {"dns": {"minimal": False}, "scope": {"report_distance": 10}, "omit_event_types": []} + dedup_strategy = "highest_parent" + + dns_mock_data = { + "walmart.cn": {"A": ["127.0.0.1"]}, + "www.walmart.cn": {"A": ["127.0.0.1"]}, + "test.walmart.cn": {"A": ["127.0.0.1"]}, + } + + async def setup_before_prep(self, module_test): + await module_test.mock_dns(self.dns_mock_data) + self.queries = [] + + async def mock_query(query): + self.queries.append(query) + return ["walmart.cn", "www.walmart.cn", "test.walmart.cn", "asdf.walmart.cn"] + + # load subdomain enum template as module + from bbot.modules.templates.subdomain_enum import subdomain_enum + + subdomain_enum_module = subdomain_enum(module_test.scan) + + subdomain_enum_module.query = mock_query + subdomain_enum_module._name = "subdomain_enum" + subdomain_enum_module.dedup_strategy = self.dedup_strategy + module_test.scan.modules["subdomain_enum"] = subdomain_enum_module + + def check(self, module_test, events): + assert self.queries == ["walmart.cn"] + assert len(events) == 7 + assert 2 == len( + [ + e + for e in events + if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and str(e.module) == "A" and e.scope_distance == 1 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "www.walmart.cn" + and str(e.module) == "TARGET" + and e.scope_distance == 0 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "test.walmart.cn" + and str(e.module) == "subdomain_enum" + and e.scope_distance == 0 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME_UNRESOLVED" + and e.data == "asdf.walmart.cn" + and str(e.module) == "subdomain_enum" + and e.scope_distance == 0 + ] + ) + + +class TestSubdomainEnumWildcardDefense(TestSubdomainEnumWildcardBaseline): + # oh walmart.cn why are you like this + targets = ["walmart.cn"] + modules_overrides = [] + config_overrides = {"dns": {"minimal": False}, "scope": {"report_distance": 10}} + dedup_strategy = "highest_parent" + + dns_mock_data = { + "walmart.cn": {"A": ["127.0.0.2"], "TXT": ["asdf.walmart.cn"]}, + } + + async def setup_after_prep(self, module_test): + # simulate wildcard + custom_lookup = """ +def custom_lookup(query, rdtype): + import random + if rdtype == "A" and query.endswith(".walmart.cn"): + ip = ".".join([str(random.randint(0,256)) for _ in range(4)]) + return {ip} +""" + await module_test.mock_dns(self.dns_mock_data, custom_lookup_fn=custom_lookup) + + def check(self, module_test, events): + # no subdomain enum should happen on this domain! + assert self.queries == [] + assert len(events) == 7 + assert 2 == len( + [e for e in events if e.type == "IP_ADDRESS" and str(e.module) == "A" and e.scope_distance == 1] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "walmart.cn" + and str(e.module) == "TARGET" + and e.scope_distance == 0 + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "asdf.walmart.cn" + and str(e.module) == "TXT" + and e.scope_distance == 0 + and "wildcard-possible" in e.tags + and "a-wildcard-possible" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "RAW_DNS_RECORD" + and e.data == {"host": "walmart.cn", "type": "TXT", "answer": '"asdf.walmart.cn"'} + ] + ) diff --git a/bbot/test/testsslcert.pem b/bbot/test/testsslcert.pem new file mode 100644 index 0000000000..240034d218 --- /dev/null +++ b/bbot/test/testsslcert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDADCCAeigAwIBAgIUJnHoP2WYqS692n3bHQxkGlYlX1MwDQYJKoZIhvcNAQEL +BQAwFzEVMBMGA1UEAwwMdGVzdC5ub3RyZWFsMCAXDTIzMTAxMzE3NTM0NFoYDzIw +NTEwMjI3MTc1MzQ0WjAXMRUwEwYDVQQDDAx0ZXN0Lm5vdHJlYWwwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYDFf5yrTe23FF2zv2dQxQs+VdwxF7lCS/ +F6Tycuh/7+4aDLG9+3IQMeqFE7VlnaQb/M2QHsjMCeFlHUnd1jxXbmt+dWQ5Pxtz +A8Vi0ypDDM6flHoT/f4CTVdDd1sc99ExBHApDAvRi6yyEnu0DxaZqzNTIRP2ijQq +eHDTO4Hx+K/K/NvSCF05FnASS5EnOCx745lURtETatdAwa7HZADZ8NDgG9Dj8fa/ +uRq3FclBbbbmq9LWKTw3cAEXTz8+5N9F2/xSGk7NZpvIv5u15gtfbMfZcVADLSVe +HR6NCfzgd/ZiHAx8CJf/ZStlMYksxZDSkb7wpdm9KeWNUpTjVknhAgMBAAGjQjBA +MB8GA1UdEQQYMBaCFHd3dy5iYm90dGVzdC5ub3RyZWFsMB0GA1UdDgQWBBQC20kP +Jq3PPZoWef0lV+c/ckbocjANBgkqhkiG9w0BAQsFAAOCAQEAzTLHR72bt2Bxc0bF +aUQtumrX1rtuO3Cb2AiqKLPgb3nwnP5q+RZq991U1vMUFTiXUjplh86/Bh5IRJ8X +1HUnMwTo6Co/77Ezx3Na2L62ajg2TpLo5YDOkIgMlOI63cGuk0ahelyxcsFVYdgA +2/Jrh/xsybdKA5l1VG5jxzZ3s9d0Gd1wXpNe+bpwFR7gby52TkibPPviZ/CKF7NB +7UdVj+SREXuSWH5NIicNQ71MJNE4CNNCOwy+yVoGY2E7WzqZNE+KZW5K5Sxp4Pnb +Z9ZnCPq5m0RL7wBd+BhB2WxLVuvt0XdVS3H21cGuD/NR7r4OAsUNrf1nUwARNKPu +BgZQhw== +-----END CERTIFICATE----- diff --git a/bbot/test/testsslkey.pem b/bbot/test/testsslkey.pem new file mode 100644 index 0000000000..c5f8dd3ad7 --- /dev/null +++ b/bbot/test/testsslkey.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDYDFf5yrTe23FF +2zv2dQxQs+VdwxF7lCS/F6Tycuh/7+4aDLG9+3IQMeqFE7VlnaQb/M2QHsjMCeFl +HUnd1jxXbmt+dWQ5PxtzA8Vi0ypDDM6flHoT/f4CTVdDd1sc99ExBHApDAvRi6yy +Enu0DxaZqzNTIRP2ijQqeHDTO4Hx+K/K/NvSCF05FnASS5EnOCx745lURtETatdA +wa7HZADZ8NDgG9Dj8fa/uRq3FclBbbbmq9LWKTw3cAEXTz8+5N9F2/xSGk7NZpvI +v5u15gtfbMfZcVADLSVeHR6NCfzgd/ZiHAx8CJf/ZStlMYksxZDSkb7wpdm9KeWN +UpTjVknhAgMBAAECggEAKsJYqB7LKN9YHhoLllXoo9FS+DlrDKEPm8V3dyewZd/L +6VpxVDc/hj6G2qNBr9ShHgvs+FTra1yaQDupeq8Tvr8jJcJgnWbkzSDmME64StBu +VY2akrnei8CYYIkvHn7ap3+oHiuc7DJfcdfwJT0mPTAxxoZhr9X/CJfRRrE8oPG4 +6w9WNS0CuyoDZ++xYwbWkNsF4XXtoOfkVgyXgtZDlIEAyRLvzVymDE05JjkmLRWt +mmk4dmxJrYh/vd0DNAK3w1qmV3iaACs+1KG/TSNKeTrDipl+WyZE5KpJe/wPiOSV +KG5hm4pXHRkN250k/5xUWWv+zQEC+fLd/JJdBev6JQKBgQDs8a28ltKstxYyTX2j +W2L/C+jdQi1COCN7u3M0rFKw+vxFoTnvlCj1X9FLQ9lbgZr96vE2cpSMmskkmDDb +KVneR6xBNLdqa/S0of/Ax9ZXwxR4k5EPUYMh3yfxuISbQyGe7qqLCeTbNZldqSYO +igGpQ0nhxFwEJ7d9VzZrBTC7JQKBgQDpbHN8jlxzBQIox56wB+CS2ISj6c0dsIOI +76J0nEdJ4qoDK1k185xaUvUyn43LmQOF2UHfwKOwVz09YwX+vFRkVNFKs7KCtUqg +e0Z7C9oiSzeqKwxUope4yKz8MYtRFgUhAwrBa0WyRRLAyfRJQ6mPzie0GdVWt4pM +tZ889lvVDQKBgQCCNo70BS7iG/vmyQ8ypxZQc4sVjTiyG4fkh69YUxteh4/79A6S +yyl3L6Ela7QXxbIXuPW2pmFco/PGWJ0A1Ei/D0Rq0T27Dnj8i8qxdyEkOeEWIoKl +mHYoNysMfArkCJCBd0fiAR30GhCemEaB1vXyvzfrCq5G2kzMZRFS3xdYwQKBgFl0 +sp2dgVijJryyI+KaYjpkuBCJXY5vQzmLfNrruXZbY4RrbHj8r4L+H/ISq6jHL05w +gIpbrV+7T0DjXjzNuBnrV3ole9gT2lG+bLhjRmm2IdMZRFR7K2IppgHQiu+8XKLW +I50Um1VCm3k+7FvXjngKLbUb4WKmXF4hjLE0SOVRAoGAZIXumUjy26y1dLL0E9F2 +HC5YEQfokVylQCWV+Ws5yjAZnAij8i0DWPHf2zvLAJ2BbmeMQAuYj5bUN1AUIFpG +/ve/yLM0635dKgoH1Zlk83iQMrjXAuXkKc4gwfocPUnGFJ/LRXufodIKI2SP7Nff +iVrq/w6VxVfc7EK0a7bfzxo= +-----END PRIVATE KEY----- diff --git a/bbot/wordlists/devops_mutations.txt b/bbot/wordlists/devops_mutations.txt index 319792db4e..b3fc8deda1 100644 --- a/bbot/wordlists/devops_mutations.txt +++ b/bbot/wordlists/devops_mutations.txt @@ -1,113 +1,108 @@ -ad -adm -admin -api -app -archive -aws -azure -backend -backup -bak -beta -bucket -ci -cicd -client -cloud -cm -cms -connect -content -ctl -dashboard -data -db -demo +www +sub +com +test dev -developer -development -download -dr -dv +en +api +de +m +e +staging +metric +a +csg +cdn +host +web +net +us +the +jp +blog eu -ext -external -fail -failover -files -gateway -go -gw -home -id -info -int -internal -intranet -it -lab -legacy -lib -login -mail -media -mirror -mobile -mx -my -na -new -old -online -panel -partner -pass -portal -prd -preview -priv -private -pro prod -production -proxy qa -rest -sa -secure -server -service -space ssl -sso -staff +mail +online +demo +fr +app +init +s +fna +br +secure +careers +ppls +it +p +static +ru +health +free +sn +family stage -staging -stg -store -svc -system -team -test -testing -tr -train -training -trial -ts -tst -ua +admin +d +info +mvm +t +am +c +in +live +i +mobile uat -v1 -v2 -v3 +stg +ca +new +ri +account +of +support +pc +shop +dr +service +sasg +west +es +at +srv +media +h +portal +ar +my +id +sas +r +hss +video +org +server +login +la +cms +beta +update vpn -w3 -web -ws -ww1 -www -www1 -www2 -www3 \ No newline at end of file +services +int +b +assets +pro +east +uk +cloud +ip +mx +store +home +production +auto +cn diff --git a/bbot/wordlists/ms_on_prem_subdomains.txt b/bbot/wordlists/ms_on_prem_subdomains.txt new file mode 100644 index 0000000000..b323e4605f --- /dev/null +++ b/bbot/wordlists/ms_on_prem_subdomains.txt @@ -0,0 +1,101 @@ +adfs +adfs01 +adfs02 +adfs1 +adfs2 +adfs3 +adfsproxy +adfstest +auth +fed +federate +federated +federation +federationfs +fs +fs1 +fs2 +fs3 +fs4 +gateway +login +portal +saml +sso +sts +wap +webmail +owa +hybrid +hybrid-cloud +email +outlook +exchange +mail2 +webmail2 +mail1 +mailbox +mail01 +mailman +mailgate +mailbackup +mail3 +webmail1 +webmail3 +mailing +mailserver +mailhost +mailer +mailadmin +imap +pop3 +post +post1 +post2 +mail +remote +desktop +desktop1 +desktop2 +desktops +extranet +mydesktop +ra +rdesktop +rdgate +rdp +rdpweb +rds +rdsh +rdweb +remote01 +remote02 +remote1 +remote2 +remote3 +remote4 +remoteapp +remoteapps +remotedesktop +remotegateway +tsweb +vdesktop +vdi +dialin +meet +lync +lyncweb +sip +skype +sfbweb +scheduler +lyncext +lyncdiscoverinternal +access +lyncaccess01 +lyncaccess +lync10 +wac +_sipinternaltls +uc +lyncdiscover diff --git a/bbot/wordlists/nameservers.txt b/bbot/wordlists/nameservers.txt new file mode 100644 index 0000000000..9153631946 --- /dev/null +++ b/bbot/wordlists/nameservers.txt @@ -0,0 +1,2373 @@ +# validated DNS servers pulled from public-dns.info on 2022/09/01 +198.153.194.50 +172.64.37.44 +69.67.97.18 +184.155.36.194 +45.225.123.238 +103.3.252.5 +212.187.140.53 +103.47.134.195 +204.199.98.173 +8.14.62.67 +103.121.228.5 +8.28.109.125 +45.90.28.13 +45.90.28.10 +64.158.240.81 +172.64.37.9 +4.15.141.202 +155.254.21.250 +172.64.37.76 +190.217.113.18 +195.186.4.110 +128.127.104.108 +50.204.42.225 +82.113.224.113 +195.74.68.2 +190.216.19.27 +190.216.125.220 +8.243.126.14 +217.160.70.42 +45.90.30.21 +162.159.36.125 +206.169.117.104 +209.12.133.148 +45.90.28.15 +103.196.38.38 +185.108.141.114 +172.64.37.131 +190.216.251.19 +146.190.6.13 +110.145.125.13 +162.159.36.132 +172.64.37.127 +173.163.85.121 +216.254.141.2 +77.88.8.3 +172.64.36.47 +211.115.194.5 +68.87.72.134 +172.64.37.168 +164.124.107.9 +190.216.250.222 +1.0.202.161 +103.85.107.99 +193.42.159.2 +74.203.74.105 +192.76.144.66 +68.105.46.149 +190.216.69.13 +172.64.46.9 +195.186.1.109 +12.204.162.62 +109.248.149.133 +172.64.46.22 +8.35.35.35 +4.4.53.164 +202.43.108.1 +172.64.36.104 +110.35.78.66 +162.159.36.46 +190.216.69.9 +209.234.212.26 +64.212.76.178 +194.225.73.141 +162.159.56.128 +194.98.65.165 +1.0.170.31 +162.159.51.23 +73.7.178.166 +204.199.106.78 +162.159.50.61 +4.7.75.194 +172.64.37.38 +172.64.36.235 +8.243.96.154 +12.71.143.33 +45.225.123.207 +172.64.36.79 +195.186.4.111 +173.244.51.54 +194.102.42.3 +172.64.37.75 +162.159.46.51 +66.162.169.190 +45.90.28.26 +8.243.96.156 +172.64.37.21 +8.28.109.70 +1.0.215.118 +172.64.36.182 +8.243.126.27 +113.161.116.150 +184.177.84.201 +64.132.21.189 +172.64.47.174 +172.64.36.44 +5.164.26.4 +116.118.119.167 +172.64.46.229 +209.200.100.151 +172.64.37.98 +73.128.218.47 +172.64.37.190 +8.243.104.162 +190.216.253.172 +193.230.161.3 +162.159.46.18 +103.160.248.44 +172.64.36.80 +200.221.11.101 +9.9.9.11 +162.159.56.43 +8.243.126.28 +109.228.22.126 +122.129.122.99 +222.255.167.61 +172.64.37.1 +64.105.199.74 +146.70.31.43 +162.159.36.181 +67.28.70.130 +67.73.141.62 +203.113.135.28 +88.198.92.222 +24.99.149.148 +172.64.36.164 +172.64.36.78 +101.102.103.104 +198.153.194.40 +149.112.122.20 +172.64.37.112 +174.69.40.212 +172.64.36.151 +63.209.155.118 +8.41.17.84 +45.90.30.19 +216.202.247.10 +1.0.209.99 +45.11.45.11 +162.159.50.152 +50.217.25.225 +1.0.169.175 +172.64.37.149 +66.193.38.100 +8.28.109.82 +162.159.56.84 +162.159.51.117 +172.64.37.182 +203.54.212.126 +156.154.70.16 +172.64.37.184 +172.64.37.234 +162.159.56.253 +204.199.122.5 +172.64.37.45 +216.229.0.25 +172.64.46.217 +45.125.208.8 +203.38.225.3 +172.64.37.20 +200.16.208.187 +8.28.109.252 +80.67.188.188 +172.64.37.83 +103.86.96.100 +172.64.36.45 +12.165.204.94 +1.0.133.90 +77.88.8.2 +200.41.78.209 +1.0.138.176 +190.216.67.53 +172.64.36.200 +1.0.170.113 +45.225.123.249 +201.184.230.34 +209.12.244.162 +75.103.115.94 +172.64.37.69 +190.216.203.224 +91.144.22.198 +1.0.170.39 +172.64.36.16 +174.48.45.128 +193.135.143.23 +159.69.114.157 +204.199.130.91 +4.2.167.65 +45.225.123.214 +71.58.100.49 +116.193.64.22 +172.64.36.96 +50.217.25.200 +204.199.157.70 +24.125.55.22 +109.224.233.174 +172.64.37.185 +198.153.192.40 +172.64.36.215 +172.64.36.166 +204.70.127.127 +67.187.17.182 +4.15.208.86 +45.225.123.239 +156.154.71.1 +195.186.1.111 +70.171.58.112 +172.64.37.253 +200.87.100.10 +41.225.236.101 +8.28.109.109 +4.4.26.135 +8.25.184.252 +172.64.37.245 +146.70.82.3 +172.64.36.152 +45.90.28.28 +159.203.187.29 +172.64.37.107 +172.64.36.30 +172.64.37.130 +8.20.247.7 +45.19.183.181 +172.64.36.223 +45.90.30.27 +162.159.57.6 +84.236.142.130 +103.23.150.89 +4.79.123.69 +172.64.36.203 +172.64.36.226 +98.232.103.71 +98.179.205.194 +149.112.112.10 +172.64.37.207 +195.77.235.10 +66.93.87.2 +172.64.36.155 +117.103.228.101 +97.65.124.6 +212.72.130.20 +92.255.164.166 +50.200.245.136 +1.0.209.242 +8.18.4.19 +172.64.36.9 +65.91.52.25 +50.223.22.178 +165.87.201.244 +172.64.36.251 +8.33.239.234 +172.64.36.187 +12.231.169.28 +8.26.56.11 +172.64.36.121 +172.64.37.218 +193.135.143.21 +98.38.222.66 +8.20.45.6 +96.102.121.126 +172.64.37.77 +190.0.15.18 +46.147.195.82 +8.26.56.17 +172.64.36.2 +185.42.192.114 +45.225.123.178 +4.14.199.129 +99.99.99.193 +66.192.104.68 +210.23.129.34 +172.64.46.192 +23.226.134.242 +172.64.37.106 +162.159.46.166 +172.64.36.176 +162.159.56.66 +172.64.36.28 +113.161.182.253 +98.39.154.157 +162.159.50.85 +8.243.126.2 +172.64.37.251 +194.7.1.4 +77.235.219.211 +216.36.31.135 +8.28.109.110 +172.64.37.225 +209.164.189.56 +162.159.56.228 +119.17.138.116 +172.64.46.111 +8.9.163.237 +162.159.57.183 +103.239.32.81 +193.135.143.35 +204.194.234.200 +158.43.128.72 +201.234.44.129 +24.56.77.138 +172.64.36.146 +4.28.150.154 +204.199.116.45 +8.46.206.93 +172.64.36.83 +188.225.225.25 +8.242.215.91 +172.64.37.163 +211.115.194.2 +112.197.12.40 +172.64.47.91 +69.44.110.204 +37.120.232.43 +172.64.37.179 +190.93.189.30 +12.127.17.72 +12.121.118.9 +172.64.36.159 +77.88.8.88 +172.64.36.27 +162.159.46.23 +4.1.67.166 +172.64.36.22 +76.104.155.196 +50.234.132.241 +195.186.1.107 +213.211.50.2 +210.220.163.82 +216.194.28.33 +162.159.36.6 +204.152.204.100 +216.106.1.254 +172.64.37.2 +168.95.192.1 +172.64.47.200 +162.159.36.227 +75.103.95.14 +190.216.237.18 +8.242.172.200 +8.36.139.1 +118.68.218.173 +8.243.220.194 +205.151.222.251 +172.64.36.53 +8.243.126.118 +14.225.232.19 +122.2.65.202 +172.64.37.144 +190.216.69.0 +4.59.232.194 +45.90.30.29 +172.64.37.28 +164.163.1.90 +172.64.37.51 +8.26.56.16 +8.243.126.18 +162.159.46.177 +172.64.36.212 +172.64.36.64 +8.29.3.37 +142.103.1.1 +95.80.104.128 +8.20.247.3 +190.217.110.10 +209.200.100.150 +172.64.37.70 +91.121.157.83 +72.237.206.37 +8.20.247.16 +8.242.49.142 +50.221.57.204 +94.28.20.249 +204.199.194.28 +208.67.220.2 +50.217.25.205 +162.159.36.104 +8.243.126.135 +64.157.242.118 +172.64.36.239 +172.64.36.229 +172.64.37.154 +172.64.36.14 +172.64.36.249 +172.64.37.192 +210.87.253.60 +8.242.214.61 +208.67.222.220 +8.30.101.114 +195.168.91.238 +193.78.240.12 +195.76.233.2 +172.64.37.204 +156.154.70.11 +190.217.25.34 +162.159.46.120 +172.64.37.169 +202.248.20.133 +162.159.36.139 +81.163.3.1 +4.79.244.118 +172.64.36.15 +162.159.56.16 +172.64.46.29 +217.138.220.243 +14.225.24.83 +172.64.36.29 +210.87.253.35 +172.64.36.218 +208.91.112.220 +221.163.74.11 +1.0.215.158 +125.234.104.230 +216.175.203.51 +172.64.37.177 +107.0.74.232 +95.158.129.2 +181.224.160.11 +83.143.8.249 +162.159.51.205 +172.64.46.159 +172.64.36.169 +172.64.46.28 +172.64.37.139 +144.91.64.224 +50.235.228.46 +8.243.126.105 +172.64.37.71 +8.30.83.132 +109.195.187.172 +172.64.37.196 +172.64.36.177 +165.22.241.78 +1.0.0.2 +172.64.37.99 +208.91.112.52 +172.64.47.216 +8.28.109.115 +8.29.3.132 +172.64.36.186 +172.64.46.34 +173.184.62.167 +8.30.101.125 +109.228.24.15 +8.20.247.17 +199.44.194.2 +172.64.37.43 +8.29.2.132 +5.11.11.5 +172.64.36.248 +4.15.23.203 +195.129.111.49 +64.76.25.120 +209.234.212.184 +8.243.126.112 +12.97.174.103 +216.54.240.147 +70.171.60.6 +1.0.221.165 +207.138.37.4 +85.21.144.55 +190.216.19.16 +8.26.21.127 +151.80.145.143 +204.199.97.162 +172.64.37.49 +190.216.247.150 +23.19.67.116 +64.64.110.3 +216.84.166.166 +8.29.103.224 +172.64.37.3 +67.73.188.138 +172.64.37.114 +8.242.215.226 +203.54.152.226 +172.64.37.92 +162.159.46.1 +172.64.36.253 +82.197.214.133 +193.135.143.13 +167.250.65.246 +94.28.26.138 +8.243.126.71 +190.216.65.166 +172.64.37.133 +162.159.57.19 +45.90.28.189 +172.64.36.69 +1.0.218.23 +172.64.36.85 +212.187.166.54 +62.149.132.2 +37.120.207.131 +98.180.23.77 +195.46.39.39 +8.242.215.202 +172.64.36.179 +162.159.46.28 +198.82.247.34 +172.64.37.121 +114.130.5.6 +162.159.57.78 +8.38.89.46 +149.156.12.250 +202.136.162.12 +172.64.36.99 +45.90.28.17 +45.90.28.23 +162.159.57.1 +8.242.173.2 +213.149.113.211 +172.64.37.59 +172.64.36.94 +45.90.30.10 +162.159.46.90 +12.97.174.104 +4.34.133.226 +45.90.28.22 +189.125.136.8 +108.175.22.60 +172.64.36.26 +96.102.76.175 +24.99.149.127 +209.51.161.14 +172.64.36.135 +172.64.47.195 +36.37.160.242 +209.136.31.102 +37.120.152.235 +109.194.17.191 +162.159.51.239 +172.64.36.174 +162.159.51.224 +172.64.36.76 +50.231.115.22 +216.244.192.3 +221.139.13.130 +172.64.36.67 +204.199.128.123 +208.51.60.81 +193.47.83.251 +172.64.36.12 +1.0.156.34 +45.90.28.193 +201.234.130.31 +172.64.36.72 +172.64.36.213 +172.64.36.18 +1.0.160.109 +203.21.196.20 +8.29.3.133 +139.134.2.190 +172.64.37.6 +45.90.30.17 +172.64.36.114 +192.71.166.92 +9.9.9.9 +172.64.37.187 +50.223.23.54 +199.227.106.122 +45.90.28.21 +73.31.121.3 +212.73.198.88 +67.99.197.123 +4.14.162.237 +190.216.19.21 +1.0.214.3 +172.64.37.25 +172.64.37.102 +172.64.36.175 +8.17.30.61 +172.64.37.46 +45.117.80.200 +172.64.37.157 +63.232.89.67 +220.239.164.49 +49.156.53.166 +189.126.192.4 +190.216.64.230 +4.15.7.161 +72.207.237.152 +172.64.36.170 +8.9.113.35 +216.146.35.35 +189.125.19.198 +212.187.156.31 +208.72.160.67 +89.163.221.181 +172.64.37.195 +190.216.241.71 +8.14.62.70 +72.237.212.20 +67.99.200.1 +172.64.37.242 +176.212.194.184 +85.214.91.66 +14.238.93.131 +209.244.104.184 +8.23.82.186 +4.79.140.163 +172.64.37.215 +194.69.194.3 +4.7.194.66 +45.90.30.28 +67.73.245.181 +209.244.104.187 +194.2.0.50 +63.209.154.102 +24.4.172.85 +222.255.167.73 +98.34.183.199 +172.64.36.74 +172.64.37.53 +209.0.191.6 +172.64.36.202 +162.159.46.202 +75.103.115.95 +172.64.46.72 +8.35.114.228 +172.64.36.52 +190.216.229.111 +8.243.120.54 +204.199.116.210 +113.161.86.104 +190.2.210.115 +64.120.5.251 +94.141.24.92 +8.20.247.4 +172.64.47.204 +172.64.37.123 +172.64.37.37 +24.250.147.79 +172.64.37.68 +172.64.46.103 +181.224.160.10 +172.64.37.136 +103.150.209.246 +8.28.113.202 +103.31.228.150 +8.28.109.247 +172.64.37.78 +176.103.130.130 +8.242.178.122 +162.159.57.251 +1.0.205.75 +172.64.37.105 +204.199.102.115 +162.159.57.139 +172.64.37.226 +24.116.92.101 +8.25.184.107 +8.242.48.97 +45.225.123.233 +1.0.170.69 +23.19.245.84 +50.216.25.75 +190.217.14.65 +45.90.28.169 +81.3.27.54 +172.64.36.6 +27.71.233.116 +66.192.104.191 +45.90.30.14 +8.29.3.211 +172.64.37.103 +51.15.69.236 +8.28.109.13 +205.214.45.10 +195.208.5.1 +162.159.57.36 +165.158.1.2 +216.84.166.42 +51.158.105.245 +172.64.36.158 +5.1.66.255 +50.238.53.122 +91.225.226.39 +209.244.104.189 +172.64.37.143 +172.64.46.198 +162.159.57.204 +201.234.130.26 +162.159.57.85 +64.129.104.46 +172.64.36.241 +172.64.36.66 +172.64.47.104 +58.186.80.18 +172.64.36.54 +45.225.123.101 +212.230.255.1 +8.243.113.190 +172.64.37.221 +1.0.150.32 +172.64.36.61 +8.0.7.0 +206.169.200.135 +4.49.73.138 +193.135.143.33 +172.64.36.144 +1.0.194.216 +172.64.37.148 +181.224.163.11 +210.181.1.24 +8.243.126.133 +168.205.99.11 +78.47.243.3 +172.64.37.62 +80.78.132.79 +162.159.51.17 +199.77.135.211 +172.64.37.115 +1.0.169.119 +190.216.69.8 +182.52.51.181 +172.64.37.167 +67.100.88.27 +180.211.158.90 +27.76.137.77 +103.196.16.2 +200.32.110.90 +8.30.101.115 +8.34.34.11 +201.132.162.254 +208.50.252.1 +77.37.232.237 +96.53.102.66 +204.199.99.99 +189.125.96.247 +172.64.46.177 +64.76.25.125 +109.228.18.5 +161.200.96.9 +198.181.254.34 +1.0.130.68 +98.38.222.6 +178.161.150.190 +8.28.109.101 +190.216.69.14 +165.16.22.130 +91.205.230.224 +172.64.47.171 +172.64.37.12 +172.64.47.9 +212.12.28.126 +172.64.36.86 +8.34.34.101 +204.199.172.132 +63.209.154.99 +209.247.118.9 +204.199.121.162 +68.87.72.130 +172.64.36.130 +193.240.108.125 +204.199.73.4 +213.202.216.236 +195.129.12.114 +190.217.10.230 +8.243.126.129 +172.64.36.62 +172.64.37.104 +81.201.58.99 +166.102.165.32 +1.0.225.244 +8.28.109.58 +134.75.122.2 +172.64.36.198 +172.64.36.247 +67.30.143.54 +121.254.134.99 +8.242.159.66 +194.187.251.67 +72.207.238.183 +45.90.30.16 +45.225.123.199 +8.242.213.37 +4.1.226.201 +172.64.37.249 +172.64.37.243 +8.21.123.101 +64.76.25.127 +172.64.37.201 +63.211.67.251 +213.55.96.166 +113.53.29.228 +162.159.57.114 +174.64.35.164 +172.64.37.173 +172.64.37.84 +172.64.47.147 +1.0.203.107 +8.242.187.227 +1.0.166.233 +216.55.100.220 +50.58.111.74 +193.135.143.39 +51.15.78.17 +8.33.239.149 +54.37.242.17 +12.127.16.67 +162.159.24.69 +1.0.233.221 +66.193.240.4 +45.90.30.25 +209.163.152.186 +1.1.136.105 +172.64.46.161 +8.29.3.226 +172.64.36.98 +172.64.36.58 +202.136.162.11 +8.242.205.35 +172.64.36.59 +162.159.50.157 +172.64.37.219 +172.64.37.52 +8.28.109.246 +8.28.109.253 +204.199.129.38 +172.64.37.101 +172.64.36.128 +62.176.12.111 +172.64.37.124 +162.159.56.255 +45.90.28.19 +8.242.184.54 +1.0.154.199 +185.233.106.232 +172.64.36.122 +193.227.50.3 +172.64.37.181 +8.35.35.10 +204.199.98.172 +200.76.5.147 +172.64.37.22 +172.64.37.31 +185.74.5.1 +172.64.47.170 +172.64.36.55 +172.64.36.42 +208.48.253.142 +89.107.129.15 +172.64.37.246 +50.59.195.149 +45.225.123.234 +172.64.37.145 +172.64.37.189 +202.138.120.86 +38.242.202.141 +172.64.37.134 +51.15.88.152 +172.64.36.77 +213.202.216.12 +172.64.37.210 +203.129.31.67 +63.208.141.14 +172.64.37.24 +45.90.28.14 +98.244.8.17 +198.153.192.50 +70.191.189.96 +172.64.37.198 +4.1.67.145 +172.64.37.146 +8.29.2.130 +172.64.36.65 +162.159.50.124 +67.166.30.234 +222.255.206.232 +8.243.126.117 +201.238.224.203 +8.242.184.52 +207.17.190.5 +202.29.218.138 +172.64.37.214 +46.245.253.5 +198.54.117.11 +172.64.37.40 +162.159.56.149 +162.159.50.83 +64.42.181.227 +172.64.37.110 +24.136.58.6 +1.0.138.245 +172.64.37.176 +172.64.36.199 +8.243.126.66 +45.225.123.206 +88.208.209.92 +195.158.0.3 +162.159.46.144 +172.64.37.162 +172.64.36.25 +1.0.170.44 +24.99.148.85 +199.77.206.122 +172.64.36.219 +94.140.14.141 +172.64.46.213 +64.105.199.76 +172.64.36.68 +64.76.25.117 +1.0.170.83 +45.90.28.12 +149.112.122.10 +1.0.169.69 +172.64.47.12 +190.217.80.59 +185.183.106.83 +45.90.28.18 +162.159.56.75 +50.59.58.165 +41.207.186.166 +1.232.188.2 +194.25.0.60 +172.64.36.41 +172.64.46.36 +41.65.236.37 +8.242.26.195 +172.64.37.238 +202.78.97.41 +45.90.30.30 +172.64.37.188 +37.120.211.91 +8.34.34.34 +98.208.56.152 +172.64.47.93 +212.72.130.21 +162.159.56.111 +103.136.202.93 +62.113.113.34 +213.157.50.130 +45.90.30.226 +24.98.20.141 +64.105.97.90 +76.76.10.0 +67.97.247.52 +207.138.37.173 +172.64.37.33 +194.7.15.70 +80.83.162.11 +195.10.195.195 +4.79.241.45 +172.64.36.123 +204.199.165.43 +8.42.68.81 +172.64.36.246 +1.0.248.135 +172.64.36.191 +70.171.58.83 +66.163.0.161 +103.239.32.36 +1.0.224.105 +172.64.37.150 +203.119.8.106 +198.153.192.1 +118.69.174.70 +24.104.140.229 +172.64.46.137 +172.64.36.33 +202.53.95.14 +46.224.1.42 +162.159.56.173 +68.1.40.86 +190.216.68.130 +8.243.126.30 +190.216.19.18 +172.64.37.108 +195.99.66.220 +172.64.37.85 +110.78.164.234 +176.9.1.117 +202.87.214.253 +203.38.225.13 +208.51.24.44 +8.242.6.242 +172.64.36.129 +172.64.36.254 +116.193.64.16 +203.38.225.45 +172.64.36.216 +162.159.56.1 +203.198.7.66 +73.206.234.153 +172.64.36.71 +8.242.172.206 +204.57.109.171 +190.216.69.5 +197.155.92.20 +162.159.57.109 +1.0.213.67 +172.64.37.194 +172.64.37.155 +123.30.108.151 +189.125.94.220 +40.70.57.226 +201.234.130.25 +190.216.69.12 +172.64.37.27 +162.159.56.132 +101.255.118.1 +73.137.96.252 +162.159.57.56 +172.64.36.225 +172.64.37.7 +8.29.87.186 +185.228.168.9 +45.90.30.15 +1.1.249.206 +195.153.19.5 +8.33.239.235 +1.0.240.7 +187.157.46.210 +172.64.37.90 +130.225.244.166 +172.64.46.203 +172.64.37.80 +209.87.64.70 +190.217.65.139 +1.0.248.182 +42.116.255.180 +8.30.97.161 +141.1.1.1 +45.90.30.23 +216.254.95.2 +50.58.155.130 +200.32.111.203 +162.159.50.109 +8.35.35.101 +172.64.37.128 +172.64.36.73 +190.216.125.81 +190.216.19.25 +50.219.55.167 +98.38.222.125 +216.21.128.22 +162.159.51.143 +162.159.46.48 +76.76.2.0 +193.202.121.50 +103.197.251.202 +200.41.102.254 +172.64.37.119 +37.209.219.30 +206.57.41.222 +162.159.50.116 +172.64.46.84 +162.159.51.100 +8.243.96.157 +8.19.132.50 +193.194.79.194 +8.29.2.42 +162.159.50.115 +80.248.48.14 +8.14.63.91 +193.135.143.15 +63.209.100.10 +64.192.52.197 +45.90.30.20 +103.139.14.2 +172.64.37.203 +8.243.126.109 +172.64.37.117 +165.87.13.129 +190.217.8.254 +190.216.69.6 +162.159.57.12 +1.0.163.94 +162.159.57.49 +5.152.215.29 +24.98.20.247 +8.243.126.9 +195.27.1.1 +76.120.201.96 +172.64.37.125 +8.29.3.67 +200.0.194.78 +172.64.36.110 +162.159.57.193 +8.243.217.24 +172.64.36.147 +8.43.56.38 +172.64.37.183 +8.36.160.49 +172.64.37.216 +178.136.2.208 +77.88.8.8 +193.240.207.200 +162.159.27.90 +1.0.152.224 +172.64.36.148 +8.243.126.11 +172.64.37.57 +8.26.56.26 +187.1.57.206 +45.90.30.18 +172.64.37.61 +64.192.69.159 +172.64.37.170 +172.64.37.88 +193.238.77.62 +92.60.50.40 +172.64.37.63 +194.177.199.1 +205.171.202.166 +8.28.109.122 +201.234.130.28 +172.64.36.209 +172.64.36.49 +118.69.109.45 +162.159.56.217 +172.64.36.92 +1.0.169.189 +172.64.37.32 +1.0.218.50 +1.0.170.33 +45.90.28.126 +162.159.46.218 +172.64.37.72 +210.4.2.61 +1.0.202.19 +103.196.16.3 +172.64.36.196 +162.159.56.156 +45.5.92.94 +172.64.36.167 +200.195.132.210 +54.37.138.118 +200.41.77.85 +172.64.37.4 +176.103.130.137 +208.67.222.2 +177.135.239.132 +172.64.37.109 +172.64.37.193 +103.147.187.246 +78.129.140.65 +110.142.40.60 +45.90.28.250 +162.159.46.47 +1.0.215.208 +172.64.46.176 +172.64.36.116 +8.242.48.100 +172.64.37.48 +172.64.37.197 +202.134.52.105 +45.90.28.1 +162.159.36.61 +86.106.74.219 +8.20.247.20 +172.64.36.211 +98.247.49.130 +1.0.203.136 +175.213.132.85 +204.152.204.10 +8.243.126.25 +193.135.143.3 +172.64.36.233 +189.125.148.1 +80.254.79.157 +94.140.14.140 +193.135.143.1 +194.108.42.253 +185.23.66.172 +185.184.222.222 +35.155.221.215 +190.90.86.81 +1.0.149.164 +172.64.36.160 +209.244.104.180 +177.54.145.131 +64.210.41.182 +8.243.126.19 +114.114.115.115 +103.106.112.18 +66.163.0.173 +190.216.69.2 +172.64.37.0 +8.9.160.30 +202.6.96.3 +8.243.126.108 +172.64.36.8 +172.64.46.45 +1.0.215.126 +172.64.37.55 +162.159.36.224 +213.211.50.1 +172.64.37.23 +211.25.11.15 +172.64.37.142 +45.191.130.26 +162.159.50.3 +172.64.37.247 +197.155.92.21 +118.69.197.57 +162.159.56.158 +172.64.36.193 +82.64.83.14 +149.112.122.30 +172.64.37.244 +50.59.195.147 +72.236.151.44 +172.64.36.190 +162.159.50.95 +8.8.8.8 +211.115.194.4 +172.64.37.222 +64.76.25.124 +210.87.250.59 +190.216.28.216 +162.159.46.53 +172.64.37.122 +8.28.109.84 +185.93.180.131 +204.117.214.10 +190.216.67.49 +65.220.42.38 +8.29.2.134 +172.64.37.229 +4.79.241.44 +172.64.46.42 +172.64.36.142 +68.183.235.124 +172.64.37.26 +200.169.88.1 +198.7.58.227 +89.163.140.67 +172.64.37.100 +113.161.116.121 +8.27.77.55 +172.64.36.183 +195.12.48.171 +172.64.47.227 +209.136.8.87 +64.157.63.18 +78.31.67.99 +172.64.47.158 +91.219.215.227 +172.64.36.181 +172.64.46.253 +85.9.129.38 +24.125.54.118 +172.64.36.208 +219.250.36.130 +172.64.37.205 +94.247.43.254 +190.216.204.79 +202.6.96.4 +85.114.138.119 +203.113.130.221 +190.217.63.34 +1.0.170.50 +113.161.230.19 +196.3.132.153 +208.91.112.53 +174.47.212.70 +5.164.28.186 +1.0.173.252 +4.53.133.131 +185.199.98.236 +211.115.194.3 +185.237.204.130 +212.89.130.180 +201.234.236.126 +198.71.117.66 +8.26.56.6 +213.176.123.5 +158.43.240.4 +184.187.143.241 +193.19.103.4 +1.0.168.238 +64.76.25.115 +162.159.50.144 +172.64.36.125 +193.135.143.9 +75.103.55.4 +125.235.11.66 +162.159.50.31 +192.133.129.2 +193.37.255.227 +203.133.1.8 +174.46.204.211 +209.244.104.188 +162.159.51.80 +8.28.109.114 +209.212.110.0 +178.212.65.61 +198.245.51.147 +8.35.35.103 +186.215.128.142 +172.64.37.10 +172.64.36.171 +200.41.50.6 +185.233.107.4 +172.64.36.139 +67.97.247.50 +172.64.36.48 +172.64.36.56 +76.76.10.1 +172.64.37.165 +8.243.126.130 +4.7.43.98 +198.60.22.2 +50.201.138.220 +162.159.57.229 +45.90.30.24 +64.192.91.38 +8.26.56.3 +8.243.126.69 +172.64.36.35 +193.67.79.39 +172.64.36.39 +180.182.54.14 +172.64.37.172 +172.64.37.137 +204.199.66.75 +203.162.125.129 +24.229.250.113 +139.130.4.4 +203.2.193.68 +91.192.196.226 +8.243.126.7 +51.89.88.77 +50.59.195.146 +172.64.36.161 +172.64.47.168 +8.29.3.174 +65.56.156.242 +213.230.90.106 +45.90.28.30 +4.34.37.186 +172.64.36.255 +203.119.36.106 +1.4.214.148 +89.218.58.122 +172.64.36.17 +172.64.37.208 +69.44.110.203 +8.20.247.5 +218.102.23.228 +209.58.147.36 +172.64.37.239 +45.90.30.0 +162.159.51.229 +69.44.4.100 +172.64.37.199 +172.64.46.202 +172.64.36.133 +4.7.175.6 +50.59.232.9 +172.64.36.111 +45.225.123.177 +125.234.254.184 +50.200.192.113 +1.0.216.155 +63.210.61.30 +1.0.202.216 +172.64.37.54 +198.153.194.1 +8.41.124.193 +190.216.253.143 +137.82.1.1 +172.64.37.16 +192.156.214.175 +172.64.36.236 +1.0.163.29 +201.234.210.179 +217.144.6.6 +189.125.208.155 +203.113.135.26 +212.12.14.122 +175.213.132.56 +172.64.36.163 +58.186.80.17 +8.29.64.122 +172.64.36.126 +24.98.164.7 +46.147.193.104 +203.253.64.1 +8.28.109.251 +173.89.30.48 +172.64.37.66 +4.15.10.86 +4.1.226.202 +63.209.154.98 +109.70.189.51 +172.64.37.73 +2.56.220.2 +1.0.170.234 +8.43.56.34 +66.28.0.61 +185.38.27.139 +149.211.153.51 +172.64.37.15 +195.129.12.83 +45.125.211.11 +212.73.198.95 +162.159.46.119 +199.85.126.20 +8.29.2.133 +173.8.207.139 +8.28.109.102 +76.76.2.4 +73.120.98.178 +172.64.46.109 +1.0.169.235 +103.15.241.241 +63.134.244.54 +116.100.88.123 +88.80.64.8 +172.64.37.64 +217.18.206.22 +94.28.49.131 +73.59.66.235 +103.74.122.91 +193.135.143.11 +206.51.143.55 +64.208.105.1 +209.164.189.54 +204.199.128.122 +206.80.23.5 +173.234.56.115 +184.55.4.145 +95.9.194.13 +24.99.148.175 +88.208.211.65 +205.171.3.66 +162.159.50.18 +210.94.0.7 +172.64.37.180 +203.248.252.2 +190.128.225.58 +37.120.236.11 +172.64.37.152 +172.64.37.232 +193.58.204.59 +45.90.28.16 +162.159.57.237 +45.90.28.24 +200.55.59.101 +172.64.37.11 +46.224.1.43 +8.21.123.119 +45.90.28.25 +201.234.239.155 +172.64.37.86 +173.10.78.68 +64.238.96.12 +162.159.56.64 +8.21.123.98 +200.55.59.102 +193.135.143.31 +193.135.143.7 +69.169.190.211 +50.216.244.79 +222.255.144.115 +204.199.84.229 +172.64.36.34 +45.225.123.161 +141.95.6.51 +172.64.36.118 +172.64.36.70 +197.210.211.1 +5.164.31.60 +1.0.167.125 +172.64.36.154 +37.120.235.187 +172.64.46.62 +172.64.37.224 +12.165.204.88 +8.242.155.74 +103.14.26.190 +193.135.143.19 +8.243.126.134 +8.28.109.124 +1.0.157.7 +207.191.5.59 +95.215.19.53 +189.125.73.13 +8.243.126.107 +176.103.130.136 +195.74.68.3 +172.64.36.136 +8.28.109.59 +172.64.37.42 +24.104.140.255 +172.64.37.60 +103.48.78.156 +12.51.21.245 +172.64.36.180 +172.64.36.143 +200.41.50.4 +172.64.36.238 +8.243.126.119 +14.161.2.38 +162.159.36.114 +199.85.126.10 +45.225.123.174 +208.51.60.215 +8.243.126.122 +189.125.136.9 +70.92.127.134 +76.76.10.2 +8.242.184.55 +8.243.126.10 +178.150.206.87 +210.87.253.61 +172.64.36.149 +172.64.36.150 +1.0.208.233 +180.182.54.12 +82.135.139.155 +50.222.131.40 +52.24.103.199 +8.243.217.66 +45.90.28.0 +172.64.36.221 +192.46.230.27 +180.182.54.11 +1.0.214.41 +31.7.37.37 +1.0.202.55 +162.159.56.223 +91.194.239.122 +45.90.28.20 +4.31.189.137 +193.135.143.29 +79.141.83.250 +209.203.82.54 +190.217.82.126 +172.64.36.252 +186.97.246.178 +97.65.32.114 +172.64.47.103 +172.64.37.160 +162.159.51.196 +200.222.51.208 +4.4.211.46 +80.80.218.218 +167.114.84.132 +81.16.18.228 +1.0.162.250 +84.54.64.35 +94.28.91.14 +50.201.178.63 +80.191.40.41 +50.201.178.29 +162.159.56.118 +50.59.65.117 +172.64.36.19 +1.0.212.99 +23.19.245.88 +64.210.72.157 +8.243.113.189 +162.159.36.243 +172.64.46.144 +172.64.37.132 +190.217.83.202 +200.41.12.163 +172.64.36.192 +172.64.36.210 +172.64.36.119 +8.27.215.70 +50.216.92.142 +190.216.66.178 +8.18.226.210 +172.64.46.179 +172.64.37.89 +80.254.77.39 +190.216.69.4 +172.64.36.107 +4.7.72.198 +162.159.51.90 +45.225.123.165 +64.76.25.119 +45.90.30.129 +204.199.80.58 +4.28.44.2 +172.64.36.250 +217.150.35.129 +73.176.118.83 +24.125.55.144 +118.69.172.151 +81.16.19.65 +213.154.80.203 +194.1.154.37 +210.245.87.16 +45.225.123.164 +73.69.70.235 +67.73.245.210 +172.64.37.81 +172.64.36.230 +50.229.154.179 +76.76.2.5 +162.159.51.140 +201.148.17.110 +190.216.111.247 +76.30.118.33 +4.2.237.6 +1.0.159.177 +190.216.230.18 +45.191.130.123 +95.158.128.2 +172.64.37.178 +156.154.71.25 +222.255.237.207 +172.64.37.126 +206.253.33.130 +189.125.108.13 +1.0.169.183 +8.243.126.21 +172.64.37.220 +204.199.86.147 +103.137.156.3 +172.64.37.164 +67.191.149.125 +8.28.109.42 +67.98.222.236 +8.26.56.12 +1.0.162.166 +172.107.199.19 +201.234.232.18 +8.14.63.82 +194.108.42.2 +210.80.58.66 +1.0.216.30 +62.76.62.76 +8.29.3.213 +190.216.73.163 +50.228.251.34 +98.212.26.255 +45.79.139.155 +194.25.0.52 +4.59.81.39 +45.231.223.250 +24.250.147.103 +8.40.240.36 +162.159.36.185 +162.159.50.119 +190.216.69.15 +201.234.159.214 +8.242.187.229 +194.61.59.25 +1.0.216.56 +172.64.46.13 +172.64.37.129 +130.226.161.34 +210.181.4.25 +8.28.109.77 +217.218.127.127 +204.199.194.24 +1.0.214.166 +204.199.85.178 +50.220.226.155 +172.64.36.127 +172.64.36.21 +4.26.50.74 +162.159.57.219 +172.64.36.217 +172.64.37.252 +172.64.36.197 +204.199.130.93 +62.94.0.42 +172.64.37.17 +109.224.233.190 +82.113.248.113 +73.32.84.89 +70.171.50.159 +8.243.126.77 +172.64.47.167 +172.64.37.241 +194.88.93.22 +162.159.36.134 +199.166.6.2 +8.243.126.72 +88.99.39.199 +172.64.47.133 +1.1.1.1 +8.28.109.227 +162.159.50.72 +204.57.114.215 +172.64.46.142 +203.27.182.202 +63.209.154.54 +8.29.3.214 +8.242.187.228 +113.161.230.20 +185.117.118.20 +4.79.132.219 +212.162.6.43 +172.64.46.230 +162.159.50.129 +162.159.46.117 +172.64.36.1 +82.99.242.155 +1.0.247.14 +8.26.56.13 +50.238.53.126 +45.90.28.129 +172.64.37.91 +103.7.172.7 +172.64.46.17 +162.159.46.38 +94.203.253.156 +190.216.229.78 +207.91.5.32 +172.64.36.101 +172.64.37.19 +172.64.36.7 +204.199.121.154 +96.9.69.164 +45.225.123.163 +95.85.95.85 +162.159.36.96 +194.225.62.80 +8.42.68.209 +66.243.10.180 +8.28.109.244 +172.64.46.255 +149.211.153.50 +172.64.37.138 +64.81.79.2 +180.250.252.218 +172.64.46.52 +172.64.37.156 +98.232.103.167 +50.58.112.194 +154.236.189.27 +120.150.56.245 +217.151.251.10 +212.3.255.178 +162.159.36.237 +172.64.36.222 +73.31.207.113 +201.148.17.116 +209.244.104.181 +172.64.36.157 +172.64.36.5 +103.164.113.26 +8.30.101.117 +172.64.36.227 +204.199.194.26 +64.132.102.138 +8.27.177.12 +209.244.104.186 +8.243.126.70 +203.54.189.178 +172.64.36.204 +162.159.57.128 +176.121.9.144 +172.64.37.56 +1.0.170.243 +8.21.123.86 +64.154.28.51 +4.16.3.86 +73.6.85.165 +149.112.121.30 +52.3.100.184 +216.248.55.141 +208.51.24.53 +1.0.138.14 +190.216.229.245 +81.27.162.100 +204.194.232.200 +4.14.161.87 +89.233.43.71 +172.64.37.217 +216.136.12.98 +51.68.190.250 +101.53.12.102 +172.64.36.206 +209.136.132.139 +69.75.150.3 +8.20.247.9 +65.18.114.254 +200.41.50.5 +8.18.4.20 +110.145.182.154 +8.34.34.103 +45.191.130.18 +172.64.36.91 +172.64.36.224 +172.64.37.174 +172.64.37.8 +24.170.197.68 +172.64.37.65 +71.66.130.90 +51.15.42.162 +202.78.224.134 +190.90.154.194 +162.159.56.78 +212.211.132.4 +172.64.47.254 +193.95.93.243 +190.216.19.22 +172.64.36.117 +8.243.126.29 +14.225.246.17 +62.149.128.2 +162.159.36.11 +104.255.175.2 +172.64.36.172 +216.27.175.2 +64.76.25.113 +178.175.129.16 +209.244.104.185 +146.190.6.140 +172.64.36.138 +172.64.37.235 +8.29.3.66 +172.64.37.228 +8.14.62.69 +121.52.154.225 +162.159.50.27 +162.159.36.158 +177.92.1.35 +172.64.36.43 +8.242.148.75 +1.0.170.108 +109.228.1.132 +156.154.70.7 +190.93.189.28 +199.2.252.10 +1.0.170.100 +41.222.4.34 +162.159.36.123 +162.159.56.8 +172.64.36.173 +162.159.50.239 +190.152.5.126 +4.53.7.42 +45.90.30.13 +217.138.219.219 +172.108.131.83 +80.211.55.138 +158.43.240.3 +200.55.63.133 +8.242.187.226 +181.209.105.154 +172.64.37.227 +162.159.51.183 +77.88.8.7 +1.0.247.85 +1.0.226.36 +162.159.36.36 +162.159.46.134 +172.64.37.14 +162.159.51.8 +94.142.242.40 +172.64.37.212 +45.225.123.237 +190.216.252.2 +64.233.217.2 +76.76.10.4 +172.64.37.236 +172.64.37.113 +209.216.160.131 +172.64.36.90 +64.81.45.2 +172.64.37.18 +8.242.24.58 +200.31.4.97 +45.90.30.26 +143.0.226.116 +172.64.36.23 +172.64.36.134 +172.64.36.184 +208.48.51.30 +172.64.37.116 +172.64.47.107 +212.78.94.40 +50.222.112.82 +172.64.47.106 +172.64.36.189 +194.36.144.87 +110.145.237.30 +172.64.37.94 +162.159.36.199 +172.64.36.103 +172.64.37.13 +210.94.0.73 +110.77.149.20 +200.12.130.66 +118.70.203.68 +172.64.36.37 +65.70.23.44 +212.73.221.107 +172.64.37.41 +4.16.64.169 +162.159.36.252 +195.186.4.109 +209.136.132.136 +118.69.174.71 +8.36.139.129 +8.243.126.120 +8.27.177.11 +68.87.74.166 +94.140.15.15 +190.216.19.17 +162.159.51.37 +8.28.109.10 +8.243.126.5 +172.64.47.45 +204.199.66.76 +172.64.37.39 +190.216.19.24 +8.243.126.20 +162.159.36.64 +190.181.21.50 +172.64.47.186 +128.199.128.150 +139.134.5.51 +200.125.168.132 +8.28.109.11 +172.64.36.36 +162.159.57.131 +1.0.218.58 +202.248.37.74 +75.73.8.108 +162.159.57.180 +171.251.51.138 +123.31.40.97 +205.151.222.250 +162.159.56.210 +1.0.215.132 +80.67.169.40 +172.64.36.112 +51.75.69.222 +83.69.179.25 +172.64.36.82 +172.65.25.255 +162.159.36.86 +35.167.25.37 +14.225.232.26 +38.132.106.139 +67.73.245.163 +1.0.216.220 +1.0.214.16 +8.21.123.80 +216.55.99.220 +172.64.37.161 +162.159.51.95 +41.65.236.61 +98.194.45.193 +8.21.123.117 +172.64.36.240 +190.216.254.200 +189.125.208.154 +9.9.9.12 +200.10.231.110 +49.231.140.120 +172.64.36.32 +144.76.83.104 +5.2.75.75 +123.30.184.141 +172.64.37.166 +172.64.36.237 +210.87.250.155 +201.234.119.52 +204.199.81.217 +193.111.144.145 +89.161.27.84 +8.28.109.60 +8.28.109.62 +45.225.123.235 +172.64.37.95 +200.55.19.247 +75.71.5.153 +172.64.36.168 +209.247.118.8 +76.76.2.3 +190.217.31.74 +172.64.36.100 +12.201.176.131 +162.159.50.42 +172.64.37.111 +172.64.36.207 +172.64.37.5 +8.12.246.73 +72.237.206.30 +64.156.223.254 +162.159.50.140 +172.64.36.102 +172.64.36.88 +172.64.37.87 +65.56.156.249 +45.225.123.253 +146.70.66.227 +5.164.31.108 +92.247.142.182 +202.136.163.11 +202.43.108.2 +24.125.55.212 +172.64.36.63 +172.64.36.124 +162.159.50.187 +27.76.137.76 +45.67.219.208 +172.64.37.35 +206.253.33.131 +174.69.40.233 +8.242.29.210 +162.159.51.116 +8.14.63.84 +172.64.37.211 +1.1.1.2 +172.64.37.200 +172.64.36.120 +172.64.36.0 +204.97.212.10 +172.64.37.30 +162.159.51.97 +172.64.36.140 +63.211.67.252 +64.195.220.221 +162.159.36.136 +185.222.222.222 +172.64.47.166 +172.64.36.51 +172.64.36.245 +45.90.30.169 +195.60.70.5 +1.0.218.57 +118.69.170.36 +81.116.141.156 +80.178.170.176 +5.11.11.11 +172.64.46.53 +200.41.50.3 +66.92.224.2 +203.146.237.237 +172.64.37.147 +8.224.34.74 +68.105.172.184 +172.64.37.29 +37.120.193.219 +45.90.30.12 +91.201.255.46 +1.0.0.19 +5.164.27.185 +66.162.13.184 +203.113.172.91 +72.237.212.21 +8.9.117.14 +172.64.46.127 +172.64.37.140 +110.35.78.65 +13.79.26.62 +162.159.51.248 +64.76.25.118 +50.233.102.227 +60.251.117.118 +172.64.36.93 +71.60.139.121 +190.216.19.30 +172.64.36.46 +162.159.50.138 +172.64.37.223 +8.243.126.3 +156.154.70.10 +204.199.35.164 +103.7.172.8 +216.146.36.36 +162.159.57.170 +4.0.0.53 +162.159.46.8 +172.64.36.81 +4.1.46.238 +88.208.244.225 +204.199.248.34 +202.78.224.129 +70.171.51.41 +162.159.46.214 +199.76.39.107 +4.7.43.100 +202.180.160.1 +172.64.47.44 +107.170.225.126 +82.221.128.44 +209.244.104.183 +98.195.112.169 +162.159.50.233 +85.132.85.85 +45.167.181.34 +1.0.214.195 +8.14.63.85 +45.225.123.162 +176.103.130.131 +8.26.56.15 +204.199.115.162 +198.77.228.151 +162.159.46.172 +209.247.111.148 +172.64.36.13 +172.64.46.27 +172.64.37.153 +172.64.36.165 +45.79.120.233 +109.228.0.226 +176.9.93.198 +193.135.143.37 +172.64.36.20 +209.234.196.12 +37.120.142.115 +50.220.47.51 +62.140.239.1 +85.9.129.36 +172.64.36.97 +156.154.70.5 +193.230.161.4 +172.64.36.105 +8.224.99.1 +204.199.122.6 +172.64.36.188 +185.51.92.108 +204.199.114.147 +204.199.97.171 +172.64.47.18 +172.64.36.162 +172.64.37.47 +190.216.73.28 +172.64.36.201 +24.113.32.30 +8.242.48.20 +8.29.3.219 +74.202.142.162 +70.171.61.101 +172.64.36.95 +64.105.172.26 +195.158.0.5 +165.231.253.163 +172.64.37.135 +45.90.30.11 +172.64.37.231 +185.5.17.19 +172.107.154.211 +162.159.51.11 +162.159.56.39 +172.64.36.178 +203.129.25.106 +45.225.123.179 +8.243.126.126 +8.29.3.222 +1.0.209.8 +193.238.77.61 +8.40.106.22 +172.64.37.50 +209.136.132.135 +172.64.36.153 +172.64.47.50 +8.30.101.118 +8.243.126.26 +1.0.247.215 +1.0.235.168 +8.24.104.109 +64.105.202.138 +172.64.47.224 +193.135.143.25 +204.199.130.156 +41.65.236.54 +8.243.126.73 +149.112.112.11 +172.64.37.175 +45.225.123.232 +172.64.37.58 +172.64.36.205 +72.198.188.68 +123.176.31.226 +172.64.36.185 +1.0.0.3 +172.64.37.97 +66.115.98.85 +103.48.78.157 +1.0.202.123 +149.156.132.100 +172.64.36.57 +103.160.248.45 +8.242.172.201 +172.64.37.209 +9.9.9.10 +162.159.50.221 +66.92.159.2 +162.159.50.133 +8.26.56.7 +172.64.36.156 +8.242.215.93 +172.64.37.191 +1.0.238.254 +8.243.126.116 +8.243.126.23 +176.214.35.182 +64.128.29.80 +45.90.30.22 +172.64.37.206 +14.238.96.162 +172.64.47.85 +123.30.27.24 +73.54.161.169 +162.159.56.21 +184.187.144.144 +172.64.37.120 +172.64.37.74 +75.150.197.154 +8.243.126.127 +8.28.109.44 +203.2.193.67 +172.64.36.50 +200.125.171.220 +64.76.23.53 +1.0.170.91 +172.64.37.213 +1.0.216.157 +164.52.192.24 +69.174.153.224 +49.156.53.165 +103.85.104.42 +204.199.33.244 +8.29.3.228 +185.74.5.5 +172.64.47.242 +162.159.56.242 +1.0.215.109 +8.28.109.117 +176.58.126.9 +162.159.36.110 +50.201.178.59 +172.64.37.93 +1.0.212.74 +204.199.6.86 +70.35.213.226 +172.64.36.87 +8.243.126.123 +8.28.109.106 +95.143.220.5 +202.87.213.253 +162.159.50.46 +66.162.142.38 +88.204.203.34 +223.6.6.6 +213.249.127.70 +8.29.3.220 +149.112.112.112 +162.159.36.247 +1.0.218.46 +202.78.224.130 +72.52.104.74 +196.27.105.130 +172.64.37.34 +162.159.46.197 +24.170.199.20 +172.64.37.82 +205.171.3.65 +190.216.241.5 +202.44.52.1 +76.76.2.2 +172.64.36.145 +63.209.154.100 +8.29.3.70 +162.159.56.33 +31.7.36.36 +98.249.57.2 +211.115.194.1 +4.1.131.250 +45.225.123.213 +85.204.79.2 +162.159.46.167 +172.64.36.244 +162.159.50.248 +172.64.37.79 +180.182.54.2 +66.251.199.51 +162.159.51.69 +172.64.36.89 +172.64.47.102 +172.64.36.38 +193.135.143.27 +172.64.36.60 +4.7.98.154 +8.28.109.99 +4.14.233.222 +64.119.80.100 +14.225.24.84 +66.162.85.79 +8.21.24.71 +162.159.57.81 +45.90.28.27 +172.64.37.159 +1.0.169.118 +172.64.36.214 +172.64.47.178 +201.234.235.90 +110.145.178.74 +103.196.38.39 +172.64.37.250 +129.250.35.250 +172.64.37.118 +172.64.37.141 +24.119.106.138 +190.216.67.52 +190.217.8.247 +185.43.135.1 +209.200.84.27 +45.225.123.172 +76.76.10.5 +94.198.41.235 +77.88.8.1 +50.58.191.11 +165.246.10.2 +172.64.36.84 +172.64.37.230 +181.224.160.14 +76.76.2.1 +198.54.117.10 +172.64.37.151 +195.129.111.50 +172.64.36.131 +172.64.36.75 +195.243.99.35 +41.65.236.53 +8.243.126.131 +162.159.57.86 +172.64.37.237 +162.159.36.216 +37.120.217.75 +190.216.251.5 +8.29.2.36 +1.0.162.56 +162.159.51.155 +147.0.63.59 +68.1.86.231 +24.99.148.61 +1.0.168.129 +64.76.25.123 +193.135.143.5 +172.64.37.248 +98.38.222.51 +45.90.28.29 +1.0.246.54 +204.199.85.179 +212.73.221.104 +1.0.216.26 +201.234.86.130 +156.200.116.73 +96.64.201.177 +172.64.37.240 +1.0.136.237 +1.1.220.28 +98.255.2.112 +45.90.30.126 +203.89.200.6 +8.38.117.156 +45.225.123.236 +45.90.28.11 +66.28.0.45 +204.199.81.94 +12.127.17.71 +162.159.46.147 +193.138.92.130 +8.25.185.131 +203.39.3.133 +118.69.187.252 +108.56.80.135 diff --git a/bbot/wordlists/paramminer_headers.txt b/bbot/wordlists/paramminer_headers.txt new file mode 100644 index 0000000000..3fe2366059 --- /dev/null +++ b/bbot/wordlists/paramminer_headers.txt @@ -0,0 +1,1150 @@ +accept +accept-charset +accept-encoding +accept-language +accept-ranges +access-control-allow-credentials +access-control-allow-headers +access-control-allow-methods +access-control-allow-origin +access-control-expose-headers +access-control-max-age +access-control-request-headers +access-control-request-method +age +allow +authorization +authenticate +cache-control +connection +contact +content-disposition +content-encoding +content-language +content-length +content-location +content-range +content-security-policy +content-security-policy-report-only +content-type +cookie +cookie2 +dnt +date +destination +etag +expect +expires +forwarded +from +host~%h:%s +if-match +if-modified-since +if-none-match +if-range +if-unmodified-since +keep-alive +large-allocation +last-modified +location +origin~https://%s.%h +pragma +profile +proxy-authenticate +proxy-authorization +public-key-pins +public-key-pins-report-only +range +referer~http://%s.%h/ +referrer-policy +report-to +retry-after +server +set-cookie +set-cookie2 +sourcemap +strict-transport-security +te +timing-allow-origin +tk +trailer +transfer-encoding +upgrade-insecure-requests +user-agent +vary +via +www-authenticate +warning +x-content-type-options +x-dns-prefetch-control +x-forwarded-for +x-forwarded-host~%s.%h +x-forwarded-proto +x-forwarded-port +x-forwarded-prefix +front-end-https +x-forwarded-protocol +x-forwarded-ssl +x-url-scheme +x-cluster-client-ip +x-forwarded-server~%s.%h +proxy-host +x-wap-profile +x-original-url +x-rewrite-url +x-http-destinationurl +proxy-connection +x-uidh +true-client-ip +request-uri +orig_path_info +client-ip +x-real-ip +x-originating-ip +cf-ipcountry +cf-visitor +remote-userhttps +server-software +web-server-api +remote-addr +remote-host +remote-user +request-method +script-name +path-info +unencoded-url +x-arr-ssl +x-arr-log-id +soapaction +x-original-http-command +x-server-name +x-server-port +query-string +auth-password +auth-type +auth-user +cert-cookie +cert-flags +cert-issuer +cert-keysize +cert-secretkeysize +cert-serialnumber +cert-server-issuer +cert-server-subject +cert-subject +cf-template-path +context-path +gateway-interface +https-keysize +https-secretkeysize +https-server-issuer +https-server-subject +http-accept +http-accept-encoding +http-accept-language +http-connection +http-cookie +http-host +http-referer +http-url +http-user-agent +local-addr +path-translated +server-name +server-port +server-port-secure +server-protocol +cloudfront-viewer-country +x-scheme +x-cascade +x-http-method-override +x-http-path-override +x-http-host-override +x-http-method +x-method-override +x-cf-url +php-auth-user +php-auth-pw +error +post-vars +raw-post-data +proxy-request-fulluri +request +server-varsabantecart +accept-application +accept-auth +accept-encodxng +accept-version +action +admin +akamai-origin-hop +app +app-key +apply-to-redirect-ref +atcept-language +auth-digest-ie +auth-key +auth-realm +base-url +bearer-indication +browser-user-agent +case-files +category +ch +challenge-response +charset +client-address +client-bad-request +client-conflict +client-error-connect +client-expectation-failed +client-forbidden +client-gone +client-length-required +client-method-not-allowed +client-not-acceptable +client-not-found +client-payment-required +client-precondition-failed +client-proxy-auth-required +client-quirk-mode +client-requested-range-not-possible +client-request-timeout +client-request-too-large +client-request-uri-too-large +client-unauthorized +client-unsupported-media-type +cloudinary-name +cloudinary-public-id +cloudinaryurl +cloudinary-version +compress +connection-type +content +content-type-xhtml +cookies +core-base +credentials-filepath +curl +curl-multithreaded +custom-secret-header +dataserviceversion +destroy +devblocksproxybase +devblocksproxyhost +devblocksproxyssl +digest +dir +dir-name +dir-resource +disable-gzip +dkim-signature +download-bad-url +download-cut-short +download-mime-type +download-no-server +download-size +download-status-not-found +download-status-server-error +download-status-unauthorized +download-status-unknown +download-url +env-silla-environment +espo-authorization +espo-cgi-auth +eve-charid +eve-charname +eve-solarsystemid +eve-solarsystemname +ex-copy-movie +ext +fake-header +fastly-client-ip +fb-appid +fb-secret +filename +file-not-found +files +files-vars +foo-bar +force-language +force-local-xhprof +forwarded-proto +fromlink +givenname +global-all +global-cookie +global-get +global-post +google-code-project-hosting-hook-hmac +h0st +home +host-liveserver +host-name +host-unavailable +http-authorization +if-modified-since-version +if-posted-before +if-unmodified-since-version +images +info +ischedule-version +iv-groups +iv-user +jenkins +kiss-rpc +last-event-id +local-dir +mail +max-conn +maxdataserviceversion +max-request-size +max-uri-length +message +message-b +mode +mod-env +mod-security-message +module-class +module-class-path +module-name +ms-asprotocolversion +msisdn +my-header +mysqlport +native-sockets +nonce +not-exists +notification-template +onerror-return +organizer +params-get-catid +params-get-currentday +params-get-disposition +params-get-downwards +params-get-givendate +params-get-lang +params-get-type +passkey +path-base +path-themes +phpthreads +portsensor-auth +post-error +postredir-301 +postredir-302 +postredir-all +protocol +protocols +proxy-agent +proxy-http-1-0 +proxy-pwd +proxy-socks4a +proxy-socks5-hostname +proxy-url +pull +querystring +realip +real-ip +real-method +reason +reason-phrase +redirected-accept-language +redirection-found +redirection-multiple-choices +redirection-not-modified +redirection-permanent +redirection-see-other +redirection-temporary +redirection-unused +redirection-use-proxy +redirect-problem-withoutwww +redirect-problem-withwww +ref +referer +refresh +remix-hash +remote-host-wp +request-method- +response +rest-key +returned-error +rlnclientipaddr +safe-ports-list +safe-ports-ssl-list +schedule-reply +sec-websocket-accept +sec-websocket-extensions +sec-websocket-key1 +sec-websocket-key2 +sec-websocket-origin +sec-websocket-protocol +sec-websocket-version +self +send-x-frame-options +server-bad-gateway +server-error +server-gateway-timeout +server-internal +server-not-implemented +server-service-unavailable +server-unsupported-version +session-id-tag +shib- +shib-identity-provider +shib-logouturl +shopilex +sn +socketlog +somevar +sp-client +ssl-offloaded +sslsessionid +ssl-session-id +status- +status-403 +status-403-admin-del +status-404 +status-code +status-platform-403 +success-accepted +success-created +success-no-content +success-non-authoritative +success-ok +success-partial-content +success-reset-content +test +test-config +test-server-path +test-something-anything +ticket +time-out +tmp +translate +ua-color +ua-resolution +ua-voice +unit-test-mode +upgrade +uri +url-sanitize-path +use-gzip +useragent-via +user-email +user-id +user-photos +util +verbose +versioncode +x-aastra-expmod1 +x-aastra-expmod2 +x-aastra-expmod3 +x-accel-mapping +x-advertiser-id +x-ajax-real-method +x-alto-ajax-keyz +x-api-signature +x-api-timestamp +x-apple-client-application +x-apple-store-front +x-authentication +x-authentication-key +x-auth-mode +x-authorization +x-auth-password +x-auth-service-provider +x-auth-token +x-auth-userid +x-auth-username +x-avantgo-screensize +x-azc-remote-addr +x-bear-ajax-request +x-bluecoat-via +x-browser-height +x-browser-width +x-cache +x-cept-encoding +x-chrome-extension +x-cisco-bbsm-clientip +x-client-host +x-client-id +x-clientip +x-client-key +x-client-os +x-client-os-ver +x-collect-coverage +x-credentials-request +x-csrf-crumb +x-cuid +x-custom +x-dagd-proxy +x-davical-testcase +x-debug-test +x-dialog +x-drestcg +x-dsid +x-enable-coverage +x-environment-override +x-experience-api-version +x-fb-user-remote-addr +x-file-id +x-file-resume +x-foo-bar +x-forwarded-for-original +x-forwarder-for +x-forward-proto +x-from +x-gb-shared-secret +x-geoip-country +x-get-checksum +x-helpscout-event +x-hgarg- +x-host +x-https +x-htx-agent +x-if-unmodified-since +x-imbo-test-config +x-insight +x-ip +x-ip-trail +x-iwproxy-nesting +x-jphone-color +x-jphone-geocode +x-kaltura-remote-addr +x-known-signature +x-known-username +x-litmus-second +x-machine +x-mandrill-signature +x-mobile-ua +x-mosso-dt +x-msisdn +x-ms-policykey +x-myqee-system-debug +x-myqee-system-hash +x-myqee-system-isadmin +x-myqee-system-isrest +x-myqee-system-pathinfo +x-myqee-system-project +x-myqee-system-rstr +x-myqee-system-time +x-network-info +x-nfsn-https +x-ning-request-uri +x-nokia-connection-mode +x-nokia-msisdn +x-nokia-wia-accept-original +x-nokia-wtls +x-nuget-apikey +x-opera-info +x-operamini-features +x-orchestra-scheme +x-orig-client +x-original-host +x-originally-forwarded-for +x-originally-forwarded-proto +x-original-remote-addr +x-overlay +x-pagelet-fragment +x-password +xpdb-debugger +x-phabricator-csrf +x-phpbb-using-plupload +xproxy +x-proxy-url +x-pswd +x-qafoo-profiler +x-remote-protocol +x-render-partial +x-request +x-request-id +x-request-start +x-response-format +x-rest-cors +x-sakura-forwarded-for +x-scalr-auth-key +x-scalr-auth-token +x-scalr-env-id +x-screen-height +x-screen-width +x-sendfile-type +x-serialize +x-serial-number +x-server-id +x-sina-proxyuser +x-skyfire-screen +x-ssl +x-subdomain +x-teamsite-preremap +x-test-session-id +x-tine20-jsonkey +x-tine20-request-type +x-tomboy-client +x-tor +x-twilio-signature +x-uniquewcid +x-up-calling-line-id +x-up-devcap-screendepth +x-upload-content-type +x-upload-maxresolution +x-upload-name +x-upload-size +x-upload-type +x-user-agent +x-username +x-verify-credentials-authorization +x-wap-client-sdu-size +x-wap-gateway +x-wap-network-client-ip +x-wap-network-client-msisdn +x-wap-proxy-cookie +x-wap-session-id +x-wap-tod +x-wap-tod-coded +x-wopi-override +x-wikimedia-debug +x-wp-pjax-prefetch +x-ws-api-key +x-xc-schema-version +x-xhprof-debug +x-xhr-referer +x-xmlhttprequest +x-xpid +xxx-real-ip +xxxxxxxxxxxxxxx +x-zikula-ajax-token +x-zotero-version +x-ztgo-bearerinfo +y +zotero-api-version +zotero-write-token +access-token +ajax +app-env +bae-env-addr-bcms +bae-env-addr-bus +bae-env-addr-channel +bae-logid +basic +catalog +clientip +debug +delete +enable-gzip +enable-no-cache-headers +error-1 +error-2 +error-3 +error-4 +eve-trusted +fire-breathing-dragon +format +gzip-level +head +hosti +htaccess +image +incap-client-ip +local-content-sha1 +on-behalf-of +options +password +pink-pony +proxy-password +put +request2-tests-base-url +request2-tests-proxy-host +request-timeout +rest-sign +root +support-events +token +user +useragent +user-mail +user-name +version-none +viad +x +x-access-token +x-amz-date +x-amz-server-side-encryption +x-auth-key +x-auth-user +x-confirm-delete +x-do-not-track +x-elgg-nonce +x-expected-entity-length +x-filename +x-flash-version +x-flx-consumer-key +x-flx-consumer-secret +x-flx-redirect-url +x-forwarded-scheme +x-jphone-msname +x-options +x-os-prefs +x-pjax-container +x-request-timestamp +x-rest-password +x-rest-username +x-te +x-unique-id +x-up-devcap-iscolor +accesskey +auth-any +auth-basic +auth-digest +auth-gssneg +auth-ntlm +code +cookie-httponly +cookie-parse-raw +cookie-secure +deflate-level-def +deflate-level-max +deflate-level-min +deflate-strategy-def +deflate-strategy-filt +deflate-strategy-fixed +deflate-strategy-huff +deflate-strategy-rle +deflate-type-gzip +deflate-type-raw +deflate-type-zlib +e-encoding +e-header +e-invalid-param +e-malformed-headers +e-message-type +encoding-stream-flush-full +encoding-stream-flush-none +encoding-stream-flush-sync +e-querystring +e-request +e-request-method +e-request-pool +e-response +e-runtime +e-socket +e-url +get +header +http-phone-number +ipresolve-any +ipresolve-v4 +ipresolve-v6 +link +meth-acl +meth-baseline-control +meth-checkin +meth-checkout +meth-connect +meth-copy +meth-label +meth-lock +meth-merge +meth-mkactivity +meth-mkcol +meth-mkworkspace +meth-move +meth-options +meth-propfind +meth-proppatch +meth-report +meth-trace +meth-uncheckout +meth-unlock +meth-update +meth-version-control +msg-none +msg-request +msg-response +oc-chunked +ocs-apirequest +params-allow-comma +params-allow-failure +params-default +params-raise-error +path +phone-number +pragma-no-cache +proxy-http +proxy-socks4 +proxy-socks5 +querystring-type-array +querystring-type-bool +querystring-type-float +querystring-type-int +querystring-type-object +querystring-type-string +redirect +redirect-found +redirect-perm +redirect-post +redirect-proxy +redirect-temp +refferer +requesttoken +sec-ch-ua +sec-ch-ua-arch +sec-ch-ua-bitness +sec-ch-ua-full-version-list +sec-ch-ua-mobile +sec-ch-ua-model +sec-ch-ua-platform +sec-ch-ua-platform-version +sec-fetch-dest +sec-fetch-mode +sec-fetch-site +sec-fetch-user +sec-websocket-key +sp-host +ssl +ssl-version-any +status-bad-request +status-forbidden +support +support-encodings +support-magicmime +support-requests +support-sslrequests +surrogate-capability +ua +upload-default-chmod +url +url-from-env +verbose-throttle +version-1-0 +version-1-1 +version-any +webodf-member-id +webodf-session-id +webodf-session-revision +work-directory +x- +x-api-key +x-apitoken +x-csrftoken +x-elgg-apikey +x-elgg-hmac +x-elgg-hmac-algo +x-elgg-posthash +x-elgg-posthash-algo +x-elgg-time +x-foo +x-forwarded-by +x-json +x-litmus +x-locking +x-oc-mtime +x-remote-addr +x-request-signature +x-ua-device +x-update-range +x-varnish +x-wp-nonce +auth +brief +chunk-size +client +download-attachment +download-bz2 +download-e-headers-sent +download-e-invalid-archive-type +download-e-invalid-content-type +download-e-invalid-file +download-e-invalid-param +download-e-invalid-request +download-e-invalid-resource +download-e-no-ext-mmagic +download-e-no-ext-zlib +download-inline +download-tar +download-tgz +download-zip +header-lf +header-status-client-error +header-status-informational +header-status-redirect +header-status-server-error +header-status-successful +https-from-lb +meth-delete +meth-head +meth-post +multipart-boundary +originator +php +recipient +request-error +request-vars +secretkey +status-ok +xauthorization +x-codeception-codecoverage +x-codeception-codecoverage-config +x-codeception-codecoverage-debug +x-codeception-codecoverage-suite +x-csrf-token +x-dokuwiki-do +x-helpscout-signature +x-nokia-bearer +xonnection +x-purpose +xroxy-connection +x-user +bae-env-appid +catalog-server +cookie-path +custom-header +forwarded-for-ip +meth-get +meth-put +opencart +unless-modified-since +www-address +x-content-type +x-hub-signature +x-signature +bae-env-addr-sql-ip +bae-env-addr-sql-port +cache-info +client-error-cannot-access-local-file +client-error-cannot-connect +client-error-communication-failure +client-error-invalid-parameters +client-error-invalid-server-address +client-error-no-error +client-error-protocol-failure +client-error-unspecified-error +error-formatting-html +lock-token +onerror-continue +onerror-die +overwrite +prefer +shib-application-id +x-fireloggerauth +cookie-domain +https +meth- +modauth +port +post +read-state-begin +read-state-body +read-state-headers +socket-connection-err +str-match +transport-err +coming-from +nl +ua-pixels +x-coming-from +x-jphone-display +x-up-devcap-screenpixels +x-whatever +appname +proxy-port +version +x-forward-for +proxy-user +x-em-uid +x-file-type +bar +proxy +timeout +referrer +x-forwarded-ssl +x-jphone-uid +x-file-size +accepted +appcookie +bad-gateway +bae-env-addr-bcs +conflict +continue +created +expectation-failed +failed-dependency +gateway-time-out +gone +insufficient-storage +internal-server-error +length-required +locked +method-not-allowed +moved-permanently +moved-temporarily +multiple-choices +multi-status +no-content +non-authoritative +not-acceptable +not-extended +not-implemented +not-modified +partial-content +payment-required +precondition-failed +processing +proxy-authentication-required +range-not-satisfiable +request-entity-too-large +request-time-out +request-uri-too-large +reset-content +see-other +service-unavailable +switching-protocols +temporary-redirect +unprocessable-entity +unsupported-media-type +upgrade-required +use-proxy +variant-also-varies +version-not-supported +x-operamini-phone +bad-request +forbidden +unauthorized +user-agent-via +appversion +not-found +url-strip- +x-pjax +cf-connecting-ip +x-dcmguid +foo +info-download-size +info-download-time +info-return-code +info-total-request-stat +info-total-response-stat +x-firelogger +content-md5 +x-up-subno +bae-env-ak +bae-env-sk +if +ok +url-join-path +url-join-query +url-replace +url-strip-all +url-strip-auth +url-strip-fragment +url-strip-pass +url-strip-path +url-strip-port +url-strip-query +url-strip-user +depth +x-file-name +x-moz +x-ucbrowser-device-ua +device-stock-ua +mod-rewrite +x-nokia-ipaddress +x-bolt-phone-ua +x-original-user-agent +x-skyfire-phone +title +ssl-https +request-error-file +request-error-gzip-crc +request-error-gzip-data +request-error-gzip-method +request-error-gzip-read +request-error-proxy +request-error-redirects +request-error-response +request-error-url +slug +x-att-deviceid +authentication +x-firephp-version +x-mobile-gateway +request-mbstring +x-device-user-agent +x-huawei-userid +x-orange-id +x-vodafone-3gpdpcontext +x-wap-clientid +ua-cpu +wap-connection +x-nokia-gateway-id +ua-os +body-maxlength +body-truncated +max-forwards +mimetype +verify-cert +request-http-ver-1-0 +request-http-ver-1-1 +request-method-delete +request-method-get +request-method-head +request-method-options +request-method-post +request-method-put +request-method-trace +x-operamini-phone-ua +status +x-update +method +forwarded-for +x-forwarded +scheme +x-forwarded-server +origin +x-client-ip +x-prototype-version +clientaddress +base +pc-remote-addr +post-files +session-vars +cookie-vars +env-vars +get-vars +server-vars +x-forwarded-host +x-requested-with +referer +host +alt-used +x-original-url~/%s +x-rewrite-url~/%s +command +__requesturi +__requestverb +x-http-status-code-override +x-amzn-remapped-host +x-amz-website-redirect-location +x-up-devcap-post-charset +http_sm_authdirname +http_sm_authdirnamespace +http_sm_authdiroid +http_sm_authdirserver +http_sm_authreason +http_sm_authtype +http_sm_dominocn +http_sm_realm +http_sm_realmoid +http_sm_sdomain +http_sm_serveridentityspec +http_sm_serversessionid +http_sm_serversessionspec +http_sm_sessiondrift +http_sm_timetoexpire +http_sm_transactionid +http_sm_universalid +http_sm_user +http_sm_userdn +http_sm_usermsg +x-remote-ip +traceparent +tracestate diff --git a/bbot/wordlists/paramminer_parameters.txt b/bbot/wordlists/paramminer_parameters.txt new file mode 100644 index 0000000000..501878d987 --- /dev/null +++ b/bbot/wordlists/paramminer_parameters.txt @@ -0,0 +1,6523 @@ +id +user +account +number +order +no +doc +key +email +group +profile +edit +report +daemon +upload +dir +execute +download +log +ip +cli +cmd +file +document +folder +root +path +pg +style +pdf +template +php_path +select +role +update +query +name +sort +where +search +params +process +row +view +table +from +sel +results +sleep +fetch +keyword +column +field +delete +string +filter +dest +redirect +uri +continue +url +window +next +data +reference +site +html +val +validate +domain +callback +return +page +feed +host +port +to +out +show +navigation +open +preview +activity +content +access +admin +dbg +debug +grant +test +alter +clone +create +disable +enable +exec +load +make +modify +rename +reset +shell +utm_content +toggle +adm +cfg +config +action +_method +password +type +username +title +code +q +submit +token +message +t +c +mode +lang +p +status +start +charset +description +s +post +excerpt +login +comment +step +ajax +state +f +error +save +format +tab +offset +a +limit +do +plugin +theme +text +language +height +logout +pass +h +value +filename +year +version +subject +m +u +confirm +width +w +size +date +source +GLOBALS +op +method +uid +tag +category +target +ids +term +new +locale +author +paged +cat +msg +add +d +day +nonce +captcha +output +revision +i +xml +db +time +section +image +r +files +tags +users +send +updated +skips +n +check +orderby +num +import +prefix +fields +pwd +pid +month +module +parent +cancel +activate +checked +success +desc +case +remove +position +location +extra +count +b +rating +pass2 +hostname +move +hash +dry +cid +body +src +level +generate +g +dbname +option +userid +sql +options +address +activated +action2 +password2 +pass1 +meta +ID +deleted +act +e +taxonomy +ref +publish +secret +app +rememberme +country +phone +hidden +force +export +sticky +nickname +v +plugins +locked +command +returnUrl +item +amount +timestamp +server +signature +part +json +del +comments +visible +LoginForm +keywords +enabled +base +refresh +foo +y +media +info +guid +dt +x +testdata +list +visibility +User +thumb +stage +history +timezone +upgrade +menu +items +class +blog +link +end +dbhost +approved +stylesheet +sid +settings +postid +deactivate +closed +posted +noheader +ContactForm +tax +ss +inline +gid +attachments +added +replytocom +dismiss +clear +city +spam +request +all +sidebar +dbuser +checkbox +short +active +session +registration +hh +price +nsql +mm +loggedout +lastname +SMALLER +saved +rsd +ps +newcontent +mn +linkurl +jj +install +hidem +firstname +detached +color +clearsql +checkemail +BIGGER +aa +slug +remember +referrer +reason +o +note +referredby +l +deletepost +dbpass +attached +tid +testcookie +noredir +newcat +monthnum +metakeyinput +insertonlybutton +input +form +failure +down +deletemeta +deletecomment +context +backto +undismiss +sitename +service +resetheader +print +phperror +oitar +metavalue +metakeyselect +mail +liveupdate +linkcheck +deletebookmarks +changeit +answers +addmeta +trashed +fid +back +selection +mod +label +img +features +direction +uname +sidebars +hide +auth +untrashed +task +submitted +database +addnew +Submit +purge +notes +editwidget +removewidget +nrows +groups +disabled +zip +trash +repair +overwrite +referer +themes +mid +defaults +custom +ctype +widget +topic +main +js +blogname +untrash +unspammed +unspam +spammed +selectall +quantity +newuser +networkwide +invalid +index +function +screen +reply +lat +gender +find +display +directory +batch +alt +set +scrollto +fwidth +fheight +sub +same +rows +reauth +notify +confirmdelete +autosave +aid +vote +review +keys +destination +allusers +passwd +change +apage +allblogs +private +noapi +charsout +catslist +categories +up +subscribe +script +removeheader +pos +period +nocache +kill +columns +api +z +sortby +register +recovered +pagenum +last +event +customized +attachment +answer +welcome +timeout +scope +rid +result +public +payload +ns +mobile +css +align +what +rank +qqfile +max +createuser +background +avatar +alias +total +question +priority +days +cache +skin +schema +orientation +groupid +done +summary +skipped +range +go +dump +confirmation +CKEditorFuncNum +changes +ticket +pw +pointer +param +first +entry +drop +default +selected +popup +owner +nolog +nochange +length +goto +company +Comment +close +website +st +skip +restart +pages +node +localize +fname +except +Type +restore +profiler +previewed +password1 +NewFolderName +lng +left +layout +k +fn +flag +doaction2 +details +currency +copy +compare +broken +block +paper +line +jax +icon +flush +fileName +dl +controller +catid +PayerID +newname +flash +decomposition +confirmed +chromeless +bid +yes +weight +verify +values +run +route +replace +read +project +Post +PHPSESSID +nid +md5 +map +logopng +listInfo +letter +hour +fullname +exclude +dbprefix +authors +zoom +userId +trigger +setting +rs +provider +package +operation +ok +object +mark +lid +invoice +insertonly +full +forum +err +doit +backup +ac +sent +phpThumbDebug +photo +interval +editor +echostr +channel +args +agree +WPLANG +userspage +usersearch +triggers +insert +inc +homepage +hello +func +duration +did +cookie +contact +chunk +apply +terms +tables +startdate +shortcode +scale +reverse +required +origin +indexes +identifier +hashed +fontcolor +databases +approve +advanced +webfile +urls +types +toggledisplay +subaction +sortorder +sign +sEcho +searchtype +saveasdraft +rss +recipient +prev +notice +njlowercolor +njform +njfontcolor +members +member +md5s +init +hs +headerimage +header +fontdisplay +finish +fax +engine +current +client +cc +callf +article +ver +ts +roles +region +raw +qid +old +nick +model +lon +lock +iDisplayLength +ext +expire +enddate +empty +chunks +album +userselect +userName +telephone +stats +saveauthors +right +revert +response +news +lname +images +highlight +frob +embed +denied +dccharset +contents +compress +Command +area +aim +accept +vid +unit +undeleted +thread +textinputs +textcolor +store +sqlite +showall +rsargs +reload +record +posts +pagenow +override +opt +opname +job +idx +help +groupname +filters +fileid +expand +entity +cp +clean +caption +apikey +verbose +var +tpl +topics +top +tablename +sSearch +sex +separator +scripts +rules +rt +rate +product +prepopulate +pgtIou +pgtId +pgsql +permissions +oracle +oldpass +mssql +modules +labels +get +foldername +family +delimiter +CurrentFolder +choice +box +autologin +age +again +actions +wysiwyg +word +userID +unsort +uninstall +unfoldmenu +support +startDate +standalone +since +score +runtests +regex +published +proxy +points +phrase +oldpassword +oid +noajax +newpassword +newName +minute +mac +langCode +iDisplayStart +genre +From +font +emails +eid +dst +device +demo +deletefile +cropDetails +connection +collation +cms +attributes +attribute +as +adduser +zone +zipcode +words +viewtype +usr +To +ssl +single +sendmail +protocol +phpinfo +perpage +newsletter +newsid +names +Name +min +logoutRequest +logo +interface +frequency +firstName +dbName +criteria +by +button +break +bg +ban +authorize +artist +allow +un +stripeToken +resize +replyto +remote +random +products +pic +perms +parentid +original +opener +namespace +mime +loc +lastName +jabber +global +forums +foo1 +FileName +endpoint +Email +detail +descr +deny +delall +customer +copyright +compression +collection +address2 +yim +week +unsubscribe +truncate +tableName +speed +sortOrder +sig +share +servername +sections +room +resource +req +qty +perm +orderid +operator +noconfirmation +newFileName +makedoc +license +graph +frame +duplicate +discount +created +clearcache +CKEditor +auto +after +about +wsdl +video +uploaded +unban +thumbnail +subtitle +stop +startIndex +sorttype +snippet +silent +sessionid +sequence +sender +searchTerm +sd +sc +rule +reg +redir +quote +prune +productid +popupurl +popuptitle +pageid +oc +nom +newpass +memo +maxResults +iSortingCols +gateway +for +feedback +fcksource +extension +draft +dev +deleteall +csv +business +board +address1 +addr +addgroup +who +unread +ttl +temp +tagid +sure +subpage +stat +showThumbs +setup +res +queryType +postcode +permission +pending +pattern +passkey +nr +match +jsonp +itemid +invites +invite +foo6 +foo2 +filetype +fc +encoding +enc +em +element +discard +delay +def +dbpassword +currentFolder +course +commit +cols +challenge +call +branch +blogid +banned +array +archive +web +unlock +uniqid +txt +twitter +todo +threadid +team +system +storage +STATUS +sites +rollback +resettext +repeat +rem +receiver +rebuild +rebroadcast +re +quality +qq +Profile +privileges +primary +poll +Password +parameters +os +orderbydate +opauth +messages +maintenance +long +links +ignore +handler +forward +fileext +endDate +driver +docroot +deletepage +d2 +cron +control +configure +conditions +Collation +codepress +chart +bitrate +barcode +AuthItemForm +assign +adminpass +write +watch +switch +subtype +street +str +siteurl +shipping +salt +rev +returnto +repo +rel +RegistrationForm +r2 +pre +player +place +pk +person +permalink +pc +payment +pagename +other +openid +notifications +nojs +newPassword +newdir +network +multi +mailbox +lowercase +layer +jsoncallback +itemName +isbn +iid +grade +game +expires +expiration +encode +edited +dropped +domains +dept +dbtype +conf +col +cname +char +browse +bio +banner +balance +asc +anonymous +announcement +xmldump +UserRecoveryForm +UserLogin +UserChangePassword +USER +updates +tx +tweet +trust +track +topicid +tool +timeformat +tb +step2 +ssid +sendto +season +Search +schedule +scan +sa +repassword +reinstall +realname +radius +px +proxyuser +ProfileField +pmid +pm +picture +paymentType +param2 +nopass +newfolder +mysql +multiple +Message +longitude +logtype +loader +latitude +languages +join +ipaddress +instance +iframe +id2 +hours +home +groupId +gallery +ftp +friends +footer +fld +fieldtype +feature +fail +explain +episode +email2 +EaseTemplateVer +distance +dirname +depth +delfile +decode +dbport +crop +cost +connect +confirmpassword +com +co +chk +child +categoryid +Body +birthdate +begin +before +BackURL +avatars +autofocus +authenticate +at +aname +agreement +adminname +activkey +xajax +viewonline +unwatch +ui +typeid +th +templateid +targets +tagged +sw +super +subname +subform +subdir +strings +strict +statistics +starttime +spec +sord +snapshot +side +sh +serial +second +rewrite +retry +realm +rand +profiling +previous +preset +poster +policies +pn +platform +placement +pin +pID +php +parentID +pagination +pagesize +p2 +p1 +oldPassword +name2 +msn +moved +monitor +migrate +merge +maxage +mask +manufacturer +ls +loginname +ld +Lang +kid +include +idSelect +hook +goback +fs +frontpage +fontsize +filepath +Filename +filecontent +featured +fav +failed +extend +eventId +eventid +endtime +editid +div +delivery +dbUser +dbsize +dbPassword +DATA +dashboard +cursor +container +component +compact +colors +collapse +characters +ch +cats +cart +calendar +C +browser +brand +birthday +bcc +attr +apps +ad +zid +xajaxargs +which +warned +venue +uuid +usuario +usesubform +unique +undelete +uids +tz +torrent +titles +templates +templatename +targetid +TableList +syear +svg +suser +suffix +subtotal +submitorderby +submitoptions +State +staff +special +sortBy +sorder +sname +sm +sitemap +siteid +simpledb +signin +sidx +sID +ShowFunctionFields +shoutbox +sec +sample +revokeall +resume +resetpasskey +regenerate +recursive +recover +recipients +receipt +quota +quiet +queue +publisher +progress +program +problem +postsperpage +postId +pollid +playlist +paymentAmount +passphrase +pagetitle +pageSize +pageno +pageID +padding +otp +onserver +obfuscate +newvalue +newDir +mongo +moderator +modal +mimetype +mID +ma +lst +loop +lookup +loggedin +lastID +issue +intro +in +idp +head +handle +gz +groupID +gift +gID +funcs +fulltext +folderid +flags +fill +fieldname +feedurl +feeds +errors +entries +elastic +dontlimitchars +donor +dob +displayname +disp +des +department +delmarked +dbusername +dbstats +dateformat +crypt +credit +createview +cpu +cover +coppa +contentType +complete +Comments +commentid +cID +catorder +book +authkey +attach +articles +appname +appid +append +and +analyze +agreed +agent +adress +adminmail +addfolder +addcomment +accountid +y2 +x2 +WriteTags +with +wipe +why +wctx +vp +videoType +vcode +vbrmethod +userrole +userpass +Username +useremail +userdata +unsynchronizedtags +unstick +unsecuresubmit +unbookmark +ua +utm_source +utm_campaign +utm_medium +utm_term +typ +tv +tree +transfer +trackzero +TracksTotal +tracknoalbum +trackinalbum +Track +trace +tot +torrentid +Toolbar +TOKEN +todate +titlefeat +tipo +thumbs +tel +tc +tagtypes +tagname +TagFormatsToWrite +synchronizetagsfrom +sum +subdomain +stype +stub +struct +stock +stick +static +srv +split +sp +sn +smtp +sku +Skin +signout +showwysiwyg +showtagfiles +ShowMD5 +showfiles +shadow +selector +securesubmit +searchtext +searchKey +savemode +saveid +saveField +SAMLResponse +samemix +rpp +rolename +rights +returnURL +returnurl +restrict +resolve +rescanerrors +reorder +renamefileto +reminder +rememberMe +relative +recent +realName +radio +quickmod +qa +pw2 +psubmit +properties +projects +proceed +privacy +pretty +pname +phase +persistent +permanent +percent +pay +PASSWORD +passwd2 +partial +paid +orderId +oID +npassword +notmodrewrite +notapache +nonemptycomments +noalert +newUser +newscan +newpw +newpass2 +newpage +newfile +msgid +mrpage +more +money +moduleName +mlpage +mkdir +missingtrackvolume +minutes +minor +mensaje +md5datadupes +manager +m3utitle +m3ufilename +m3uartist +m3u +longurl +logs +Login +ln +lists +listid +listdirectory +linktype +lines +like +lib +KEY +itemType +itemId +isAjax +int +initial +grp +groupName +GenreOther +genredistribution +Genre +fullfolder +framed +formName +formid +formatdistribution +foldmenu +flip +fixid3v1padding +filetypelist +filesize +filenamepattern +filelist +fileextensions +fieldValue +fieldName +fieldid +fID +feid +extended +extAction +existing +ex +events +eventName +errorswarnings +encoderoptionsdistribution +encodedbydistribution +emptygenres +emailAddress +emailaddress +edituser +dp +displayName +disallow +dirs +dictionary +deleteid +defaultValue +deadfilescheck +deactivated +dd +dbType +dates +ctf +createdb +Country +correctcase +copied +cookies +convert +contactname +confirmPassword +configuration +condition +cluster +CKFinderFuncNum +CKFinderCommand +chmod +children +chat +cep +cd +cb +catname +catID +CardType +caching +bookmark +bodytext +bgcolor +baseurl +bar +autofixforcesource +autofixforcedest +autofix +authtype +audiobitrates +assignment +artisttitledupes +application +APICpictureType +ans +announce +anchor +amt +always +adv +addusers +accessType +y1 +xrds +x1 +wrap +work +way +warning +votes +vn +views +videoid +verifypeer +verifyhost +vendor +varValue +varName +variant +variable +utmr +utmp +utmdebug +utmac +uses +userEmail +use +uporder +updatedb +unbansubmit +ult +ul2 +ul +UA +u2 +u1 +type2 +txtDescription +transaction +tracker +tos +torrentsperpage +topicsperpage +toboard +Title +timeframe +tID +textarea +testing +testemail +tbl +tasks +taglist +Tag +tableprefix +tableId +t2 +t1 +survey +surname +supportfor +subtab +subscription +submit1 +subj +styles +storyid +step1 +stay +Status +start2 +standard +span +so +smtpPort +smiley +slogan +slide +sitetitle +signatures +SID +showqueries +showpage +shout +sha1 +sf +severity +sesskey +sessidpass +series +sectionid +searchText +searchid +searchField +sdb +sday +scheme +scene +scenario +savesettings +savepms +savefile +saveData +Save +sandbox +rotatefile +rotate +roleid +rn +revoke +returnID +resync +restock +resolution +resizetype +resizefile +resetkey +resend +requestid +reportid +renamefile +renameext +removeall +release +relation +recurring +RecordingUrl +recordid +reasontype +race +qs +push +pub +province +protection +property +pref +predefined +pp +play +plan +pl +ping +pf +permerror +passw +PASS +PaRes +parameter +organization +org +orderBy +online +oldusername +oldpwd +older +objects +nowarn +notification +newpw2 +NEWPASS +newlang +nav +myEditor +modname +modeextension +modcomment +metric +memberName +maxwidth +matchtype +mapping +mandatory +ls2 +local +lightbox +levels +langID +L +kick +karma +j +Itemid +isDuplicate +iphone +ipexclude +invitecode +inv +interests +interest +ins +inputH +industry +incldead +importance +imgurl +imgpath +IMG +imageid +ident +id1 +Id +icq +href +hostid +hl +hit +headline +heading +HeaderHexBytes +goodfiles +Generate +ft +fragment +forumid +foreign +followup +fm +fldr +fileType +filetotal +fileID +fg +fCancel +facebook +extUpload +extTID +extMethod +expiry +example +errorCode +eol +entityid +encoded +emphasis +emailnotif +elements +edition +editing +editfile +editaction +dupfiles +donated +doinstall +docid +dlt +dl2 +direct +dip +Digits +dict +delid +deletepms +deleteImage +decoded +datetime +dateStart +dateEnd +date2 +datatype +cut +currencyCodeType +ct +csrf +cs +cPath +courses +coupon +controllers +content1 +contacts +contactid +conn +commentId +cod +cm +clientid +clearLogs +classification +chosen +channelmode +chanid +chan +Category +campaign +callerid +caller +cached +bulk +bucket +boards +blogusers +blogs +billing +bID +bib +bbconfigloc +base64 +bansubmit +badfiles +authorID +attempt +arguments +anon +angle +alpha +alert +albumid +ageverify +agb +afilter +adminpassword +adminid +adminemail +AddAuthItemForm +activation +actionfile +Action +acceptpms +accepted +abstract +abort +a2 +zoneid +youtube +yourname +wwname +wmax +wiki +widgets +Widget +whitelist +wait +voucher +vol +vl +visualizationSettings +viewName +viewname +via +Version +varname +variables +validator +valid +utype +utf8 +usort +Users +USERNAME +url1 +URL +uploadpos +Upload +Update +upc +until +unset +unselectall +unpublished +undo +u9 +u8 +u7 +u6 +u50 +u5 +u49 +u48 +u47 +u46 +u45 +u44 +u43 +u42 +u41 +u40 +u4 +u39 +u38 +u37 +u36 +u35 +u34 +u33 +u32 +u31 +u30 +u3 +u29 +u28 +u27 +u26 +u25 +u24 +u23 +u22 +u21 +u20 +u19 +u18 +u17 +u16 +u15 +u14 +u13 +u12 +u11 +u10 +txtEmail +trid +transactionID +trackusers +totalProductCount +topicID +tokens +times +timer +timelimit +thumbnails +throttle +themename +testmethods +taskid +targetboard +tac +tableFields +tabid +sys +sy +suspend +supplierID +subwdata +suburb +substruc +substep +submit2 +sublogin +subjoin +subconst +subcat +subacc +student +STRUCTURE +structure +strReferrer +strProfileData +strId +strFormId +stream +steps +stdDateFilterField +stdDateFilter +station +startTime +startday +sserver +square +sqlquery +sq +spass +sound +sortKey +sortfield +sortDir +sort2 +song +smonth +skype +singleout +signup +SignatureValue +Signature +showtemplate +showSource +ShowFieldTypesInDataEditView +showAll +shortname +shop +ship +searchType +searchterm +searchbox +searchaction +searchable +school +saveToFile +runQuery +ruleid +rp +round +Role +rmFiles +rm +rID +responsecompression +Reset +requiredData +requestKey +requestcompression +repopulate +removeVariables +removeID +removeid +removeAll +remark +relmodule +RelayState +regSubmit +RegisterForm +refid +referral +records +rec +reboot +rc +ratio +ratings +r1 +quick +quest +queryPart +qtype +qr +purpose +pto +proxypwd +proxyport +proto +promote +probe +PRIVILEGES +printview +previewwrite +pressthis +prenom +posttext +pop +point +pms +pmnotif +plus +pkg +phpMyAdmin +phonenumber +phone2 +phone1 +pfrom +paypal +paste +passwrd +passwordConfirm +password3 +partner +parked +parenttab +ParentID +param1 +panel +pageTitle +PAGE +Page +pack +p2ajax +OutSum +OUTPUTFILETEXT +OUTPUT +orderNo +or +optimize +oldname +offline +occ +npw +np +nowarned +nombre +nn +nID +newuseremail +newtitle +newtext +newtag +newstatus +newpwd +NEWPRIVILEGES +newpassword2 +newPass2 +newpass1 +newPass +NEWNAME +NEWHOST +newdid +NEWCHOICE +nb +name1 +NAME +mytribe +mtime +mp +movie +movefile +mood +months +monitorconfig +modifier +modid +mirror +mhpw +metrics +methodpayload +membername +memberID +membergroups +mediaid +maxtime +markread +markdown +mailto +mailSubject +mailid +longtitle +logoff +loginguest +logid +locations +locationName +listPrice +linkname +limitTypes +lim +lID +legend +leap +lead +lcwidget +latest +languageID +labelName +keystring +keepHTML +keep +keepalive +ItemId +itemID +itemCode +ipp +IP +invoiceid +InvId +intTimestamp +intDatabaseIndex +institution +installmode +inst +INSERTTYPE +initdb +INDEXTYPE +INDEXCOLUMNLIST +imaptest +IGNOREFIRST +if +idstring +idlist +hosts +HOST +hdnProductId +gzip +grid +GRANTOPTION +google +gold +gids +getInfos +GenerateForm +generated +fullsite +frontend +fromdate +formSubmit +FormbuilderTestModel +FORMAT +follow +folders +folderID +foffset +focus +fldName +filtertype +filterText +filterName +fileFormat +Fields +FIELDNAMES +field2 +field1 +fee +f2 +EXPORTTABLE +exportImages +EXPORTDB +exception +exact +eventID +eval +endyear +en +email1 +EMAIL +elementId +eids +education +editParts +Edit +ec +dtstart +dtend +downloadpos +downloaded +dname +dm +dlconfig +distinct +displayVisualization +director +directmode +dipl +difficulty +DeviceId +design +descending +desact +deluser +DELIMITER +deleteUsers +deletefolder +deldir +decline +dbms +DBLIST +dbase +dayDelta +date1 +dataType +DATABASE +d1 +cvv +customers +currentid +curr +curfile +cur +ctid +credits +createclass +cr +countryName +countryCode +counter +core +coords +contactName +connectt +conflict +configfile +completed +comp +commenttext +colours +colName +CollectionId +Cmd +clientcookies +clickedon +clicked +cleanup +CHOICE +chartSettings +chars +charge +channelName +channelID +changed +cf +cert +cdone +catId +card +canvas +campaignid +cal +cainfo +build +btn +breakdown +border +bool +blocks +blockid +blacklist +birthDate +binary +bi +bbox +banreason +bank +bandwidth +backend +autodeltime +autodel +autocomplete +authorName +authorized +AuthItem +AuthChildForm +atype +AttachmentName +AssignmentForm +Artist +Article +aoe +allrows +alli2 +allDay +akey +ajxaction +ajaxRequest +aggregate +adminpwd +admid +addon +additional +ADAPTER +ACTION +ACCESSLEVEL +a1 +3 +1 + +png +ob +maxdays +aliases +SHIPTOZIP +SHIPTOSTATE +SHIPTOCOUNTRY +SHIPTOCITY +Delete +Address +zID +yeniyer +ww +wser +wq +wdir +vpn +voting +viewscount +verified +vPath +ux +ut +usrid +userspec +userpicpersonal +usefilename +urldown +uptime +uploadloc +upfile +ty +tradercap +todoAction +toaddress +toAdd +tmp +tickets +templateID +tarfile +sv +submitcollation +step4 +step3 +srcport +sqlf +shortcut +seqnum +searchlabel +searchip +searchClause2 +searchClause +scheduled +sameall +rw +rto +rmdir +reveal +resetVoteCount +renamefolder +remoteserver +regval +regtype +regname +registre +redirection +readregname +qaction +pu +prog +prepare +preference +precmd +power +postgroup +postRedirect +pool +pmsg +pipi +pids +phpvarname +phpexec +phpev +passwrd2 +passwrd1 +pa +ox +overmodsecurity +orderdir +orderByColumn +onserverover +oldpasswrd +oldemail +obgz +newver +newdirectory +netmask +nere +mysqlpass +mx +msgs +mquery +moderators +mkfile +missing +mip +minage +menuHashes +mem +mbname +maxPlotLimit +mass +lngfile +ldap +kind +jump +it +ispublic +ipaddr +inside +immediate +imagesize +iStart +iLength +iColumns +hp +hname +guestname +gf +getfile +generalgroup +fromname +fixErrors +finished +filterCategory +filterAlert +fileperm +fileact +fedit +fdownload +fdelete +fchmod +fallback +eventDate +erorr +ephp +ep +env +enquiry +emailto +emailActivate +eheight +ef +editform +editfilename +ed +dup +dstport +dosyaa +dontFormat +dolma +doi +displayAllColumns +dirupload +dif +delregname +delim +deleteuser +deleteAccount +dc +dbu +dbsession +dbp +dbh +dateFormat +dataLabel +cy +customerid +customWhereClause +curl +curdir +criteriaValues +criteriaTables +criteriaSort +criteriaShow +criteriaSearchType +criteriaSearchString +criteriaRowInsert +criteriaRowDelete +criteriaRowAdd +criteriaColumnTypes +criteriaColumnOperators +criteriaColumnNames +criteriaColumnName +criteriaColumnInsert +criteriaColumnDelete +criteriaColumnCount +criteriaColumnCollations +criteriaColumnAdd +criteriaColumn +criteriaAndOrRow +criteriaAndOrColumn +createfolder +cpy +coppaPost +coppaFax +coord +cookiename +cookielength +contactId +con +community +columnsToDisplay +cn +cl +chmod0 +checksum +changeusername +certificate +censortext +censortest +censorWholeWord +censorIgnoreCase +calname +calid +c99shcook +bug +brd +bport +boardurl +boardid +boardaccess +bgc +bday2 +backuptype +backconnectport +backcconnmsge +backcconnmsg +appId +animate +allday +actionfolder +aclid +absolute +aPath +TYPE +SHIPTOSTREET +ProfileForm +Mohajer22 +MD +M2 +F +ER +Direction +CURRENCYCODE +A +zrecord +zpage +zonetxt +zonet +zonesub +yearend +yPath +xsrf +wstype +woeid +weekdays +webid +watermark +vv +vpassword +viewed +viewall +viewUsers +viewResults +viewOption +ver2 +ver1 +variations +usertype +userlength +userip +usergroup +userGroup +userEnableRecovery +usepost +used +upsql +uploadfile +uploadForm +updateRecordID +updateFileID +updateData +updateBiblioID +upd +upage +unzip +untilDate +unstable +unhideNavItem +uitype +ue +typE +txtCommand +txtAddComment +tvid +tt +transactionId +transStatus +transId +tpp +tp +totaltopics +topicseen +tools +toolbar +tok +timezonedetection +timeUnit +timeIncrement +ti +threshold +thankyou +tftp +tfid +tests +testmode +tempLoanID +te +taxid +tagvalue +tabs +sync +symlinktarget +symlink +supplierPlace +supplierPhone +supplierName +supplierFax +supplierEmail +supplierContact +supplierAccount +subsection +subscribed +subs +submitok +subjectType +subid +subfiles +subdom +subcategory +subact +strategy +strHtml +story +stories +statusID +states +startval +starts +stars +star +stUpload +ssi +sshport +ssearch +sqluser +sqlpass +sqlhost +spoiler +specialchars +specDetailInfo +spage +smtpusername +smtpport +smtppassword +smodule +sl +skid +siteName +showsc +shown +showh +showevent +showdupes +showUnhideDialog +showCheckbox +shared +shareWith +shareType +setMetrics +setDefault +sessionId +sesc +services +serverurl +servertype +servers +serverid +serveR +seriesTitle +serialID +seqNumber +seq +separate +selectedDoc +security +sect +searchin +searchby +searchString +searchName +searchId +sea +scid +scdir +scalingup +savemsg +saveandnext +saveZ +saveNclose +saveLogs +saveKardexes +salesrank +saction +runcmd +ruletype +ruledefgroup +ruledef +rssfeed +rowspage +rownumber +rowid +routines +routes +rmver +rminstall +returnaction +resultXML +reshares +resetpassword +reserved +reserveLimit +reserveItemID +reserveID +reserveAlert +resent +requireAgreement +reqType +reportsent +reports +reportView +reportContentType +replies +replaceWith +repeatable +ren +removesess +removeFines +remotefile +remipp +remail +relpathinfo +releasedate +relatedmodule +regularity +regexp +regDate +refurl +recvDate +recsEachPage +recoveryPassword +recordSep +recordOffset +recordNum +recaptcha +recapBy +reborrowLimit +ready +rback +rawfilter +ranking +ragename +rage +r4 +quirks +quickReturnID +questionid +querY +qt +qindsub +qcontent +qact2 +qact +publisherName +publisherID +publicUpload +ptype +ptID +pt +pruningOptions +proxypass +proxyhostmsg +protect +prop +projectid +projectID +progresskey +profiles +producttype +processed +pro +priceCurrency +pr +postto +postgroups +postfrom +postal +portalauth +popuptype +pod +plug +plain +placeName +placeID +pipe +phpini +phpcode +pftext +personal +pd +pb +paymentStatus +pause +passwords +passwd1 +passlength +passWord +pasS +parentId +palette +pais +pageId +packageName +overrideID +outbox +ot +ordDate +optimization +opml +operations +opacHide +oldform +oldfilename +off +oauth +nzbpath +numbers +numExtended +null +ntp2 +ntp1 +noupdate +notsent +notificationType +notificationCode +noteid +notdeleted +notactivated +noredirect +noChangeGroup +nfid +nf +newowner +newgroupname +newf +newer +newemail +newdb +newWidth +newPassword2 +newLoanDate +newHeight +newDueDate +newDirectory +nentries +myip +msgfield +ms +movieview +mountType +mountPoint +modulename +moduleid +modulePath +moduleDesc +modifiedSince +misc +minuteDelta +minus +mins +minimum +mini +microhistory +methodsig +memory +memberTypeName +memberTypeID +memberPostal +memberPhone +memberPeriode +memberPIN +memberNotes +memberFax +memberEmail +memberAddress +me +md5sum +md5sig +maxentries +maxUploadSize +matchword +matchuser +matchname +matchcase +massupload +marked +makenote +makedir +mailtxt +mailsub +mailing +magic +logging +logfile +logdefaultblock +logMeIn +locationID +loanStatus +loanSessionID +loanPeriode +loanLimit +loanID +listprice +listname +listing +listarea +listShow +link2 +lineid +lifetime +library +len +leave +layoutType +layers +lasturl +lastmodified +lastid +lastQueryStr +languagePrefix +langName +labelDesc +labdef +kw +kstart +keyname +keydata +key2 +key1 +kb +k2 +jupart +jufinal +joindate +iv +itemname +itemStatusID +itemStatus +itemSourceName +itemSource +itemSite +itemShares +itemCollID +itemAction +iso +isdescending +isPersonal +isPending +invitepage +inverse +inventoryCode +invcDate +installpath +installed +installdata +installbind +instName +inputSearchVal +inheritperm +inherit +indxtxt +indx +incspeed +inXML +inUsername +inPopUp +inPassword +inNewPass +imdbid +imdb +ie +idtype +idc +htaccess +hot +holiday +holDesc +holDateEnd +holDate +hideNavItem +hex +headers +harm +harddiskstandby +gx +guest +gtype +grouptype +groupreason +groupr +groupfilter +graphid +gracePeriode +grabs +gpack +googleplus +gmdName +gmdID +gmdCode +gmd +giveout +getupdatestatus +getstatus +getprogress +getactivity +getDropdownValues +geoOption +generic +gen +gameid +fu +ftpuser +fstype +front +fromsearch +fromemail +frequencyName +frequencyID +free +fp +forgot +foreignTable +foreignDb +forceRefresh +folderpath +flow +fldname +fldlength +fldlabel +flddecimal +fldType +fldPickList +fldLength +fldLabel +fldDecimal +fix +firstday +finishID +finesDesc +finesDate +fineEachDay +findString +fileurl +fileto +fileold +filenew +filename2 +filefrom +fileframe +filecontents +fileURL +fileTitle +fileDir +fileDesc +fieldlabel +fieldType +fieldSep +fieldId +fieldEnc +fh +ffile +favicon +fam +external +extensions +exponent +expirationyear +expirationmonth +expDateYear +expDateMonth +expDate +exemplar +exe +exccat +evtitle +eta +errorstr +errormsg +errormail +errmsg +enroll +ends +endday +encryption +encrypted +encrypt +enclose +enableReserve +emailcomplete +emailId +editf +editable +editUserGroupSubmit +editUserGroup +eday +ecotax +dwld +due +dto +dos +documentID +doaction +doSearch +doImport +doExport +dnssec +dns2 +dns1 +dn +dmodule +disk +disablelocallogging +disabledBBC +dis +dirToken +dim +digest +dialog +dhcp +dfrom +df +depts +demolish +delsub +delrule +delrow +delgroup +deletesmiley +deleteip +deleteevent +deletecheck +deleteUserGroup +debet +dbserver +dbpw +dbid +dbPrefix +dbPort +dbHost +dayname +datetype +dateto +datefrom +dateReceived +dateExpected +dataurl +dataset +datadir +databaseloginpassword +databaseloginname +databasehost +dB +cw +cvv2Number +cvmodule +customfield +custid +cust +currentFolderPath +curpage +csid +crt +creditCardType +creditCardNumber +credentials +createpages +createmode +crdir +couponamount +counts +convertmode +conversation +conv +contest +contentTitle +contentPath +contentDesc +contbutt +contains +consumer +constraint +consoleview +confirmFinish +combine +columnIndex +color2 +colltype +collTypeName +collTypeID +collType +codes +cmspassword +cmsadminemail +cmsadmin +cls +clientId +cleared +classOptions +claim +chvalue +chpage +chkagree +checkprivstable +checkprivsdb +checkout +checking +checkboxes +checkShares +checkReshare +check1 +channels +changepassword +changecurrent +changeUserGroup +cfgval +cfgkey +categoryID +cardtype +cap +callbackPW +callNumber +calendarid +calcolor +bzipcode +buddies +btnSubmit +bstate +bridge +breadcrumb +bphone +boxes +box3 +box2 +box1 +bootstrap +bomb +boardtheme +boardseen +boardprofile +blocklabel +blastname +bits +birthyear +birthmonth +binding +bill +biblioTitle +biblioID +bfirstname +beta +bemail +beginner +bcountry +bconfirmemail +bcity +bbc +baza +batchID +batchExtend +basedn +baddress2 +baddress1 +backupnow +backdrop +baba +autoupdate +automatic +authorityType +authPin +authList +audioFolder +asin +arg +arch +applicable +appkey +appeal +aop +animal +altmethodpayload +alterview +alsoDeleteFile +allsignups +allflag +allfiles +allboards +aliasid +algorithm +afterupload +aemail +adopt +adminuser +adminpass2 +adminEnableRecovery +addcategory +addUserGroupSubmit +addUserGroup +addSpider +addReply +addMessage +addList +acttype +actors +actionName +acl +acct +accountnumber +accountname +abc +aID +WSDL +UserChangePassForm +UID +Test +Term +Tab +T +Submit1 +Settings +SaveInSent +SORT +SHIPTOSTREET2 +Review +ReturnUrl +RecordingDuration +Project +Product +PasswordResetForm +PasswordForm +Or +MenuItem +Menu +METHOD +Language +LOCALECODE +Issue +InstallForm +Group +ExpirationYear +ExpirationMonth +ERORR +DialCallStatus +DeviceType +DATE +D +Condition +CallSid +CVV +B +AudioPlayerSubmit +AudioPlayerReset +AccountNumber +zonefile +zipName +zhsd +yy +ystart +yellowtemp +yellowstales +yellowremfails +yellowrejects +yellowgetfails +yellowgessper +yellowfan +yellowdiscards +yellowavgmhper +years +yahoo +xxx +xx +xtype +xnum +xmode +xmldata +xjxmthd +xjxfun +xjxevt +xjxcls +xjxargs +xjxGenerateStyle +xjxGenerateJavascript +xhrLocation +xhprof +xdebug +wu +wstoken +writeSchema +wresult +wrcont +wpseo +wpnonce +wpas +workingdiR +workgroup +workflow +workerId +wordlist +wood +wlk +wli +withdraw +withCount +wins2 +wins1 +wins +wildcard +wikitext +wide +whw +whom +websiteId +webserver +webpage +webguiproto +webguiport +wbp +wbcp +warn +want +wakeall +wa +vuln +vrt +vpntype +vouchersyncusername +vouchersyncport +vouchersyncpass +vouchersyncdbip +vouchers +volume +void +vnutr +vlanprioset +vlanprio +vjcomp +villagename +viewweek +viewupgradelog +viewscope +viewMode +viewBag +videos +videopress +videoTitle +videoTags +videoId +videoDescription +videoCategory +vhostcontainer +vhid +vgrlf +versions +verse +verifycode +verification +verboselog +verb +vecdo +ve +vcheck +vbxsite +vbulletin +vbss +vbsq +vat +vars +variants +var2 +var1 +valor +validation +validateValue +validateId +ustsub +ustools +ustname +usrgroups +usetoken +usetcp +userrealname +usernamefld +username2 +usermail +userlogin +userlevel +userinfo +userids +userf +useraction +userPassword +userEdit +userDialogResult +userAgent +usepublicip +useicmp +usecurl +useR +uscmnds +urlup +urltype +urlf +urldd0 +url2 +urL +upports +uploadurl +uploading +uploadhd +uploadf +uploader +uploaddir +uploadPath +upl +upip +upin +upff +upf +updateurl +updatempd +updateme +updateid +updatefile +updateType +updateMsgCount +upcont +upcom +upchange +unverify +unscheduled +unreleased +unpubdate +unknown +units +unitprice +uniqueid +uniqueID +undodrag +unbanreason +ulang +uk +uf +ucd +uback +uN +uID +u1p +typeofdata +typename +typefilter +type6 +type1 +txtwebemail +txtsupport +txtUsername +txtRecallBuffer +txtPHPCommand +txtCaptcha +txtAddress +txpower +txkey +txantenna +tvname +tuser +tunable +tribe +tresc +trapstring +trapserverport +trapserver +trappercap +trapenable +transport +transient +traffic +tracks +trackback +tpshcook +tplName +tplID +town +touserid +touch +totalcount +totalTracks +totalItems +topsearch +toppool +tooltip +tomod +toid +toProcess +tn +tld +titulo +titre +tint +timeupdateinterval +timeservers +timeoffset +timeint +timedescr +timedd0 +timeFormat +tile +tids +ticketid +ticketbits +thumbWidth +thumbHeight +throwexception +threadID +thisX +themeName +tftpinterface +textonly +texto +textmail +textfield +textIn +text0Name +testvar +testdbpwd +testdb +testType +testMode +testID +templatefile +tempName +temat +teamid +teacher +tdir +td +tcpmssfix +tcpidletimeout +tcp +tbname +tbls +taxtype +taxrate +taskID +targetname +targetip +tagcloudview +tagId +tablo +tableList +tabla +tabAction +tab1 +ta +t3 +syslocation +sysevents +sysemail +syscontact +syscmd +syntax +synconupgrade +synchronize +syncfilter +symgo +symbol +svff +svdi +supprimer +suppr +sunrise +subsubaction +subset +subscriptionId +subscribers +subqcmnds +subop +subnetv6 +subnet +submode +submitv +submitrobots +submithtaccess +submitf +submitThemes +submitReset +submitFilter +submitFilesAdminSettings +submitEmail +submitAdd +submit4 +submit3 +submail +subjectid +subfolder +subdomains +subcanemaildomain +subId +subGenre +stuid +stuff +students +studentidx +sts +strukt +stringtoh +strin +strictcn +strictbind +streamMode +stp +storagegroup +stoptime +stoppool +stoppga +stopbtn +stime +stereo +stepid +step5 +stdlib +stderr +statut +statusid +statsgraph +staticarp +statetype +statetimeout +statetable +stateid +stateOrProvinceName +startyear +startpool +startpga +startnum +startmonth +startdisplayingat +startbtn +startMonth +starred +stamp +staffId +stack +sshdkeyonly +srname +srm +srctype +srctrack +srctext +srcnot +srcmask +srch +srcfmt +srcendport +srcbeginposrt +srcbeginport +sr +sqtid +sqsrv +sqquery +sqpwd +sqprt +sqlwxp +sqluser4 +sqluser3 +sqluser2 +sqluser1 +sqltype +sqlty +sqlportb4 +sqlportb3 +sqlportb2 +sqlportb1 +sqlport4 +sqlport3 +sqlport2 +sqlport1 +sqlport +sqlpass4 +sqlpass3 +sqlpass2 +sqlpass1 +sqlog +sqlite2 +sqlhost4 +sqlhost3 +sqlhost2 +sqlhost1 +sqlfile +sqldp +sqldebug +sqlcode +sqlaction +sqdbn +sqconf +spy +spots +spot +sport +spoofmac +spellstring +spelling +specs +specifiedpassword +specialsettings +speciallogfile +specialFiles +spammer +spamcheck +sourcetracking +sourceport +sourceip +sourcego +sorttable +sortname +sorting +sortdirection +sortdir +sortable +sortField +songid +soname +something +somestuff +some +solrsort +snn +snmpscanner +snatched +snaplen +smtptls +smtpssl +smtprelay +smtpnotifyemailaddress +smtpipaddress +smtpfromaddress +smtpPassword +sms +smode +smile +smfdbu +smfdbp +smfdbn +smfdbh +smf +smartpagebreak +smartmonemail +slot +slid +skiplang +skipIOS +skipANDROID +skinname +skinName +sk +sjid +sizey +sizes +sitter2 +sitter1 +sitedown +siteId +simpin +silver +showthumbs +showtext +showslow +showmessage +showinfo +showinactive +showbd +showact +showIndex +showFooterMessage +shorturl +shortseq +shopping +shiptobilling +sharing +sharednet +sh311 +sh3 +sfname +sfldr +sfilter +sfilename +setype +setupid +settype +settags +setrw +setoption +setname +setlanguage +setlang +setdefault +setUserAgent +setPublic +sessions +sessid +sess +servicestatusfilter +serviceName +serversdisabled +serverip +servercn +serverId +serie +serialspeed +serialport +serialize +serdir +ser +sentitems +senm +senha +sendtime +sendpassword +sendmsg +sendmethod +sendit +sendfile +senderEmail +sendemail +sendactivation +sendTo +selyear +selmonth +sellernick +selectvalues +selectop +selectlist +selectedmodule +selectedTable +selectcategory +selectAmount +sele +selday +selCountry +segment +seed +sedir +securityscanner +secu +secs +secretKey +searchval +searchuser +searchstring +searchfield +searchadvsizeto +searchadvsizefrom +searchadvr +searchadvposter +searchadvgroups +searchadvcat +searchUsername +searchQuery +searchOper +searcc +seC +scrubrnid +scrubnodf +scores +schooldatex +schedule0 +sched +scalepoints +sca +sbjct +savmode +savetest +saveoptions +savehostid +savegroup +savefolderurl +savefolder +savefilenameurl +savefilename +savedraft +saveconf +savePath +saveNedit +saveNcreate +saveNback +sat +sampledata +salutation +saleprice +safemodz +safefile +safecss +safe +sabsetting +sabapikeytype +sYear +sName +sColumns +s3key +s3bucket +rxantenna +rwenable +rwcommunity +rvm +runsnippet +runid +runer +runState +rtl +rstarget4 +rstarget3 +rstarget2 +rstarget1 +rsswidgettextlength +rsswidgetheight +rssurl +rssmaxitems +rrule +rrdbackup +rport +rpassword +rownum +rowId +routeid +rootpath +rollbits +rocommunity +robotsnew +rname +rmid +richtext +rfiletxt +rfile +rfc959workaround +rf +reverseacct +returnsession +returnpage +retries +ret +resultmatch +resultid +resubmit +restorefile +restorearea +restartchk +respuesta +responsive +resources +resourcefile +residence +resetwidgets +resetpass +resetlogs +resetlog +resetPassword +rescanwifi +requests +reqid +reqFor +req128 +repwd +repositoryurl +reportname +reportfun +reportType +replayMode +repeatMonth +repass +reopen +rensub +renold +rennew +renf +renderlinks +renderimages +renderforms +renderfields +render +rempool +removep +removemp +removefields +removeOldVisits +remoteserver3 +remoteserver2 +remotekey +remoteip +remot +remmin +remhrs +remdays +remarks +reloadfilter +relevance +relayd +relay +relationships +relationship +relations +related +rela +reglocation +registered +reginput +regid +regdomain +regdhcpstatic +regdhcp +regcountry +refuse +refund +refuid +refreshinterval +reflectiontimeout +refkod +referid +referer2 +redirurl +redirectto +redirectUri +redfi +reddi +recurse +recurringtype +recurrence +recreate +recordsArray +recordcount +recordType +recordID +reconstruct +recommend +recipientCurrency +recipientAmount +recherche +receipient +recache +reauthenticateacct +reauthenticate +realpath +readonly +readme +reading +rdata +rawAuthMessage +rasamednsasdhcp6 +rapriority +randkey +ramode +rainterface +radomainsearchlist +radns2 +radns1 +radiusvendor +radiusserverport +radiusserveracctport +radiusserver2port +radiusserver2acctport +radiusserver2 +radiusserver +radiussecret2 +radiussecret +radiussecenable +radiusport4 +radiusport3 +radiusport2 +radiusport +radiusnasid +radiuskey4 +radiuskey3 +radiuskey2 +radiuskey +radiusissueips +radiusip4 +radiusip3 +radiusip2 +radiusip +radiusenable +radiusacctport +radiobutton +radPostPage +rN +rM +r3 +r00t +qx +quoteid +qunfatmpname +quizid +quitchk +quietlogin +quickmanagertv +quickmanagerclose +quickmanager +quicklogin +questions +querytype +querysql +queryString +quantityBackup +qu +qtranslateincompatiblemessage +qsubject +qqfafile +qmrefresh +qact3 +q3 +q2 +purgedb +puremode +purchaseorderid +purchaseid +publickey +pubkey +pubdate +puT +ptpid +ptp +psk +psid +pseudo +ps2pdf +prv +proxyusername +proxyurl +proxypassword +proxyhost +prov +protocomp +protmode +promiscuous +projectionxy +projection +profileId +prof +productname +productlist +productcode +productDescription +processlogin +processlist +processing +procedure +probability +prj +privid +privatekey +priv +priority3 +priority2 +priority1 +prio +printer +principal +primarymodule +primaryconsole +prices +pri +prevpage +press +presence +prescription +prereq +preg +prefork +prefetchkey +prefetch +preauthurl +ppsstratum +ppsselect +ppsrefid +ppsport +ppsfudge1 +ppsflag4 +ppsflag3 +ppsflag2 +pppoeid +ppid +ppdebug +ppage +potentialid +potentalid +posttype +postedText +postback +postafterlogin +postData +post2 +post1 +portscanner +portbw +portbl +portbc +port1 +porder +pop3host +pools +poolopts +poolname +pollvote +pollport +pollQuestion +pollOptions +plusminus +plname +plid +playlistTitle +playlistDescription +plaintext +pkgs +pkgrepourl +pinned +pictitle +pics +pickfieldtable +pickfieldname +pickfieldlabel +pickfieldcolname +pick +piasS +pi +phpsettings +phpsettingid +phpenabled +phpbbkat +phpbbdbu +phpbbdbp +phpbbdbn +phpbbdbh +phpbb +photoid +phoneNr +phone3 +pguser +pgsqlcon +pgport +pgdb +peruserbw +personality +personId +persistcommonwireless +persist +perms9 +perms8 +perms7 +perms6 +perms5 +perms4 +perms3 +perms2 +perms1 +perms0 +permStatus +periodo +periodidx +perform +perPage +peerstats +peace +pdouser +pdopass +pdodsn +pdocon +pdnpipe +pcid +paypalListener +payments +paymentId +paymentData +paths +pathf +path2news +patch +passwordnotifymethod +passwordkey +passwordgenmethod +passwordfld2 +passwordfld1 +passwordfld +passwordconfirm +passwordc +passwdList +passthrumacaddusername +passthrumacadd +passgen +passf +passenger +passd +partition +parseSchema +parid +parentqueue +parentfieldid +paranoia +paporchap +pagestyle +pagestart +pageop +pagenumber +pageborder +pageType +pageOwner +padID +paID +pW +pUID +pPassConf +pPass +pPage +pName +pMail +pDesc +p4ssw0rD +p3 +p2p +p2index +p2entry +p1index +p1entry +overwriteconfigxml +overdue +ouT +ostlang +orionprofile +origname +organizationalUnitName +organizationName +orgajax +orders +ordering +orderType +orauser +oraclecon +opwd +optin +optimizer +oper +openings +opened +openbasedir +oof +onw +onlyforuser +onlyfind +ondemand +on +oldtime +olddir +oldaction +oldPlaylistTitle +oldMountPoint +oldEmail +odbcuser +odbcpass +odbcdsn +odbccon +odb +occupation +objectIDs +obj +nurlen +nurld +numwant +numlabel +numberposts +numail +nuked +nuf +ntporphan +nslookup +nrresults +npassworda +npage +noxml +nowmodule +nounce +notrap +notices +noti +not +nosync +noserve +noreload +nordr +noquery +nopfsync +nopeer +nopackages +noofrows +none +nonat +nomodify +nometool +nome +nomacfilter +nolimit +nolang +nohttpsforwards +nohttpreferercheck +nohtml +nogrants +noexpand +noedit +nodraft +nodnsrebindcheck +nodeid +noconcurrentlogins +noantilockout +noaction +noRedirect +noOfBytes +nmdf +nfile +nf4cs +nf4c +nf1 +nextserver +nextid +nextPage +newwin +newusername +newusergroup +newtype +newtime +newtheme +newtemplate +newrule +newprefix +newpref +newpath +newnick +newmessage +newids +newid +newgroup +newdocgroup +newcode +newcategory +newalbum +newaccount +newX10Monitor +newWindow +newVideoTitle +newVideoTags +newVideoDescription +newVideoCategory +newValue +newText +newSite +newProject +newPlaylistTitle +newPlaylistDescription +newPath +newMonitor +newGroup +newGame +newControl +netgraph +netboot +nested +neg +ncbase +nc +natreflection +natport +nameren +namelist +namefe +name3 +namE +n1 +myusername +mysqls +mysqlcon +mypassword +myname +mylogout +mycode +mybulletin +mybbindex +mybbdbu +mybbdbp +mybbdbn +mybbdbh +mw +mve +mvdi +mute +music +muser +multiplier +multifieldname +multifieldid +mtype +mtu +mto +mtext +msubj +mssqlcon +mss +msqur +msq1 +msid +msi +msgtype +msgnoaccess +msgno +msgexpired +msgcachesize +msg1 +mru +mpdconf +mpath +mpage +moveup +moveto +movedown +movd +mount +motivo +motd +moodlewsrestformat +mon +moduletype +moduleorder +moduleguid +moduleType +moduleId +modified +modfunc +modfile +moderate +modelId +modeid +modcat +modE +mobj +mobilephone +mnam +mmsg +mmail +mlist +ml +mkF +mkD +mito +minkills +minViewability +minJs +minCss +mimetypes +milw0 +mids +mibii +mhtc +mhost +mhash +mh +mg +mfrom +mfldr +mffw +metadata +messagesubject +messageid +messagebody +messageMultiplier +mess +meridiem +mergefile +merchantReference +menutitle +menus +menuindex +menuid +memtype +memday944 +memday942 +memberPasswd2 +memberPasswd +memberPassWord +mediatype +mediaopt +medalweek +medalid +mdp +md5q +md5pass +md5hash +md5crack +md +mcid +mc +mbox +mbadmin +maxtry +maxtemp +maxstore +maxstales +maxremfails +maxrejects +maxprocperip +maxproc +maxmss +maxleasetime +maximumtableentries +maximumstates +maxgetfails +maxgessper +maxfan +maxdiscards +maxcrop +maxaddr +maxZipInputSize +master +masssource +massedit +massdefaceurl +massdefacedir +massa +masdr +marker +markdefault +manual +managerlanguage +manage +man +makeupdate +maintitle +maint +mainmessage +mainGenre +mailsent +maillisttmpname +mailcontent +mailbodyid +mailbody +mailMethod +mailAuth +magicfields +macname +mV +mSendm +mKf +mKd +mD +lucky +lticket +lp +losslow +losshigh +loopstats +lookfornewversion +longlastingsession +logsys +logprivatenets +logpeer +logoutid +loglighttpd +loglevel +loginmessage +loginemail +loginautocomplete +logic +loggedAt +logfilesize +logfilE +logf +logeraser +logdefaultpass +logbogons +logall +logable +logType +logFile +lockid +locationid +localized +localityName +localip +localfile +localf +localbeginport +loan +lm +live +listorder +listmode +liste2 +liste1 +liste +listSubmitted +listItem +listId +linkedin +link1 +link0 +limitpage +limitless +limite +liked +lfilename +lemail +legendstyle +legendsize +legendfontsize +legendfontr +legendfontg +legendfontb +legendfont +lecture +leaptxt +leadval +leadsource +lbg +lbcp +latencylow +latencyhigh +lastactive +lastActive +langs +langname +lanes +lane +landscape +lan +laggif +l7container +kr +kod +king +kime +kim +killfilter +kil +keytype +keylen +keyid +keepslashes +keeppass +katid +jpeg +joingroup +joined +jid +jform +jenkins +jaxl +jahr +jCryption +itemkey +itemcount +isverify +issues +ispersis +isocode +isnano +isim +isenabled +isemaildomain +iscustomreport +iscomment +iscatchall +isbinddomain +isactive +isSwitch +isDev +iron +ipv6allow +ipsecpsk +ipscanner +iprestricted +ipprotocol +ipproto +iplist +ipandport +ipaddrv6 +invoiceId +inviteesid +invited +invitation +invest +invalidate +introeditor +interfaces +instanceId +installstep +installGoingOn +inputid +inputSize +injector +initstr +initialtext +initialise +initdelay +inifile +inid +inf3ct +ineligible +indent +includenoncache +incl +iname +inajax +inactive +inViewWarnings +inViewLogs +inViewErrors +inSessionSecuirty +inRemember +inNewUserName +inForgotPassword +inDownLoad +inConfEmail +inBindLog +importrobotsmeta +importonly +importmethod +importid +importfile +importer +important +importaioseo +importType +importFile +impersonate +imgtype +imgid +imdbID +imagename +imagefile +imagedetails +imageUrl +imageThumbID +imagE +ikesaid +ikeid +ignoresubjectmismatch +ignorephpver +ignorefatal +ignored +ignoreTV +ifnum +ifname +ieee8021x +idname +idletimeout +identity +identifiant +idb +idSite +idL +id9level +id9gid +id8level +id8gid +id7level +id7gid +id6level +id6gid +id5level +id5gid +id4level +id4gid +id40level +id40gid +id3level +id3gid +id39level +id39gid +id38level +id38gid +id37level +id37gid +id36level +id36gid +id35level +id35gid +id34level +id34gid +id33level +id33gid +id32level +id32gid +id31level +id31gid +id30level +id30gid +id2level +id2gid +id29level +id29gid +id28level +id28gid +id27level +id27gid +id26level +id26gid +id25level +id25gid +id24level +id24gid +id23level +id23gid +id22level +id22gid +id21level +id21gid +id20level +id20gid +id1level +id1gid +id19level +id19gid +id18level +id18gid +id17level +id17gid +id16level +id16gid +id15level +id15gid +id14level +id14gid +id13level +id13gid +id12level +id12gid +id11level +id11gid +id10level +id10gid +icp +icode +icmptype +icerik +ical +hwhy +htype +httpsverify +httpsname +httpscanner +https +httpbanner +htmlemail +html2xhtml +htcc +htc +htaccessnew +hrs +howmuch +howmany +howlong +how +hostres +hostipformat +hostapd +hostName +hosT +horario +holdcnt +hlp +hldb +hidrfile +hidid +hideversion +hidemenu +hideidentity +hidFileID +hid +hellotime +health +hd +hc +having +hashtoh +hashkey +hasAudio +hardenglue +ham +gtin +gt +gs +grupo +grps +grpage +grouped +groupdesc +groupdel +groupby +groupIDs +groupCounter +greif +graphtype +graphlot +granularity +grants +granted +gr +gpstype +gpssubsec +gpsstratum +gpsspeed +gpsselect +gpsrefid +gpsprefer +gpsport +gpsnmea +gpsinitcmd +gpsfudge2 +gpsfudge1 +gpsflag4 +gpsflag3 +gpsflag2 +gpsflag1 +gotod +goodsid +gomkf +godb +godashboard +goal +gn +github +gip +gifif +ggid +gfils +getpic +getm +getenv +getdyndnsstatus +getdb +getdate +getcfg +getThermalSensorsData +getOutputCompression +generatekey +generateKeypair +general +ged +geT +gdork +gd +gc +gbid +gatewayv6 +gameID +gadget +ga +fyear +fwdelay +fw +fvonly +fuzz +functionz +functionp +ftype +ftpscanner +ftps +ftppass +ftphost +fsOP +fromAddress +frm +friendlyiface +friend +fresh +frames +fqdn +fq +fpath +fpassw +forwarding +forwarderid +formname +formfactor +formdata +formatup +formats +formatdown +formage +formId +formAutosave +forgotPassword +forever +foreground +forceIcon +forceFormat +fontr +fontg +fonte +fontb +fontSize +following +folderId +fmt +flushcache +flowtable +floor +floating +fldMandatory +flashtype +flashpga +fl +fixmetadesc +firmwareurl +finds +findid +fin +filtre +filtertext +filterlogentriesinterfaces +filterlogentries +filterdescriptions +filled +filew +filetosave +filesend +fileoffset +filename64 +filename32 +filecreate +filecount +fileOffset +fileLength +fileExistsAction +fileEdit +fileDataName +fileContent +file2ch +filE +fieldkey +fieldCounter +fid2 +feedId +fe +fdo +fdel +fcsubmit +fcopy +fbclearall +favourite +favorites +favicons +fast +familyName +facility +facid +fType +ezID +eyear +extras +extractDir +extern +extdisplay +extdir +exportVideo +exportMisc +exportFrames +exportFormat +exportFile +exportDetail +exploit +expirationDate +expid +expertise +expanded +expandAll +exp +exitsql +exists +exif +executeForm +execmethod +execmassdeface +excludedRecords +exchange +exc +exTime +exT +ewidth +eventname +eventTitle +evap +evalsource +evalinfect +evalcode +evac +etag +et +eshopId +eshopAccount +esId +error500path +error404path +error403path +erne +epot +epoch +entryid +entryPoint +entryId +entryID +entityID +entire +enhanced +enforceHTTPS +endport +endmonth +encoder +encod +enablestp +enablesshd +enableserial +enablenatreflectionhelper +enablebinatreflection +emonth +eml +embedded +emailsubject +emailfrom +emailch +emailToken +emailList +emailID +emailBody +elementType +ee +edittxt +editprofile +editkey +editgroup +editedon +edge +ecraz +ealgo +dynamic +dxval +dxsqlsearch +dxportscan +dxparam +dxmode +dxinstant +dximg +dxfile +dxdirsimple +dxdir +dummy +dumd +duid +duedate +dsttype +dstnot +dstmask +dstip +dstendport +dstbeginport +dscp +dryrun +droptables +drilldown +dragtable +dragdroporder +dpgn +dpath +downloadid +downloadbtn +downloadbackup +downloadIndex +downloaD +downf +downchange +dosthisserver +dosearch +dopt +donotbackuprrd +domerge +domen +domainsearchlist +domainname +domaiN +doimage +documentroot +documentgroup +doctype +docs +docgroups +docgroup +doRegister +doDelete +dnssrcip +dnssecstripped +dnsquery +dnslocalhost +dnsallowoverride +dns4 +dns3 +dnpipe +dlgzip +dldone +dlPath +dkim +dizin +divider +diversity +distribution +diskspace +discipline +disapprove +disablevpnrules +disablesegmentationoffloading +disablescrub +disablereplyto +disablenegate +disablelargereceiveoffloading +disablehttpredirect +disablefilter +disableconsolemenu +disablechecksumoffloading +disablecheck +disablecarp +disablebeep +dirr +dirlisting +dirfree +directoryscanner +direccion +dire +dircreate +diract +dirList +dimensions +dig +diff +dhtc +dhcpv6leaseinlocaltime +dhcprejectfrom +dhcpleaseinlocaltime +dhcphostname +dhcpfirst +dhcpbackup +dhcp6usev4iface +dhcp6prefixonly +dfilename +devid +deviceid +detail0 +destslice +destino +destd +descripcion +desc2 +desc1 +deptid +deposit +depid +denyunknown +dend +demoData +deltype +deltpl +delstring +delsel +delpref +delmac +deliveries +deliver +delimeter +delfriend +delfolder +delfl +delfbadmin +delf +deleteweek +deletesubmit +deleterule +deletegrp +deleteg +deletedir +deletedSpecs +deletecntlist +deleteUser +deletePrices +deleteList +deleteIndex +deleteImages +deleteCategory +deldat +deld +delName +degrees +deftime +defaulttemplate +defaultqueue +defaultleasetime +defaultgw +deduction +decrypt +debugmethods +debugfailover +debugbox +debug3 +debug2 +debit +deathplace +deathdate +deadline +deact +deS +deL +ddo +ddnsupdate +ddnsdomainprimary +ddnsdomainkeyname +ddnsdomainkey +ddnsdomain +dbsocket +dbn +dbbase +dbUsername +dbTablePrefix +dbPwd +dbPass +dbOP +datestamp +datechange +datasrt +dataroot +dataofs +datagapradius +datagapangle +dataflt +databasename +dataangle +data2 +darezz +dare +dID +cx +customernumber +customcss +customaddtplid +customId +customFieldId +currentday +currentPassword +currentPage +currencyid +currencyCode +curpath +cuenta +ctx +ctrl +ctag +csvIDs +csspreview +csrftoken +csr +cs2 +cs1 +crypo +crtty +crrt +crefile +createstdsubdomain +createlist +createdon +createaccount +cre +crcf +crannycap +cracK +cpyto +cpw +cpath +cpass +cpage +coverage +courseId +couponcode +coupling +countryID +countonly +copyname +cop +contenttype +containerid +contactidlist +contactID +contactEmail +cont +consumerSecret +consumerKey +const +consent +connsub +connport +connections +connectionType +connectback +confirmEmail +confirm3 +confirm2 +configs +concepto +compr +compose +communication +commonName +commits +commex +commentaire +commander +combo +color1 +college +collectionto +collectionfrom +collectcolumn +coin +codetype +coded +codeblock +coauthors +coM +cnpj +cmspasswordconfirm +cmode +cmmd +cmediafix +cmdr +cmdir +cmdid +cmdex +cmde +closenotice +closedate +clockstats +clipboard +cleartokens +clearquery +clearlogs +clearSess +clearLog +cleancache +clay +classname +cktime +ckeditor +ck +cipher +cinterface +cids +choix +choice2 +chmodnow +chmodenum +chm +chkalldocs +chfl +checksumbits +checknum +checkmetadesc +checkid +checkconnect +checkaliasesurlcert +chdir +chats +chatmsg +chartsize +characterid +chapter +chapo +changestatus +changero +changeVisitAlpha +changePass +cfy +cfx +cfilename +cfile +cfil +cfed +certsubject +certref +certid +certdepth +cds +cdirname +cdir +cateid +categoryname +categoryName +catalogid +catalogName +casein +cartId +caref +cardno +capture +cantidad +canpreview +canned +caneditphpsettings +caneditdomain +cancelled +canceldelete +campo +cambio +callop +callerId +caid +cacheable +cable +cP +c37url +c2 +byws +bythis +bysyml +bypcu +bypassstaticroutes +byoc +byfc9 +byfc +byetc +bye +bycw +byapache +bwdefaultup +bwdefaultdn +bv +buy +buttonval +buttons +businessName +bulletin +budget +bs +broadcast +bridgeif +breakpoints +breakpoint +bps +bpg +bpage +bounce +bottom +bots +bootslice +boolean +bookings +bonus +bogonsinterval +boardmod +blogtitle +blogtags +blogbody +blockpriv +blockeduntil +blockedmacsurl +blockedafter +blockbogons +blatent +birthplace +birth +bip +bindpw +bindip +binddn +bgColor +benchmark +behaviour +be +bduss +bcip +baz +baslik +basket +basic +basemodule +bantype +bantime +bannedUser +banip +banid +backurl +backupcount +backupbeforeupgrade +backuparea +autorefresh +autoredirect +autoptp +autoplay +autogroup +autoenable +autoedge +autoassign +autoapprove +autoadjust +autoaddfields +authserver +authorship +authorizedkeys +authname +authmode +authlvl +authentication +authcn +authcfg +auid +attendance +attachmentsid +attachmentUploadDir +attachmentId +attachid +attaches +assigntype +assignedTo +assigned +asset +assertion +asid +asText +artistid +articleid +argv +argb +arg2 +archivo +archivedate +archiveDate +apple +apnum +apn +apinger +apiKey +aot +answerid +annotation +ann +animetitle +anidb +andor +ampm +amountup +amountdown +alturlenable +althostnames +alternate +allyid +allss +allqw +allowopts +allowinvalidsig +allowed +allowZipDownload +allids +allfields +allergies +allDepts +aliasimport +aliasesresolveinterval +aliA +algo +alertEmail +albumname +alI +al +ak +ajaxMode +ajaxCalendar +ajaxAction +aj +airdate +agentoption +affw +affiliate +af +advskew +advbase +advancedview +adsr +adresse +admins +adminlogin +adminUser +adminPass +adminPWD +adminEmail +adlr +addurl +addtype +addtxt +addtag +addsite +addrule +addressren +address0 +addpool +addonkey +additionalData +addfile +addevent +addcat +addacc +addUser +addOption +addComment +addBase +adaptivestart +adaptiveend +adapter +ad2syp +ad2syc +actreject +actpass +activityID +activationKey +actionadd +actionType +actid +actblock +act3 +act2 +acpage +ackqueue +ack +acfcomp +accLimit +acao +abbr +ZipName +Yol +Y +XL +WIDsubject +VerifyCode +VPSSignature +UserType +UserSettingsForm +UserName +UserLoginForm +UserForm +UserCreateForm +URI +TxAuthNo +TrYaG +Touchm +Toucha +Touch +Taxonomy +Task +Target +TO +TITL +SysMessage +SubsiteID +Submit2 +StoreCategory +StepID +Soups +ShareForm +SettingsForm +Setting +Service +SecurityKey +SearchForm +Sandwiches +Salads +SURN +SUBMIT +SPFX +SAMLRequest +Result +ResourceUploadForm +ResetRRD +Register +ReduxFrameworkPlugin +ROMN +RESULT +RESET +REPO +RECHECK +RC +Q +Public +ProjectUserForm +PostCodeResult +PostCode +Plain +Person +Perms +PayerStatus +ParentPage +Parent +PWD +PUBL +PHONE +Owner +Opt2 +Opt1 +OpenWith +Object +NetworkUserID +NetworkScreenName +NetworkPlatform +NSFX +NPFX +NOTE +NICK +N3tshcook +ModuleVar +LostPasswordForm +Lookup +LegendMode +Last4Digits +LATEST +KloutID +Joomla +Import +IPv6 +HowMany +Hkrkoz +Help +Heads +Hash +HMACKey +Good +GiftAid +GROUP +GRAPHS +GIVN +GENDER +Form +Flag +Filter +FileIDs +File +Field +FactoryName +FactoryId +FXuser +FXpass +FXimage +FONE +FILES +FIELDS +Export +ExpiryDate +Example +Event +EmailForm +EVEN +ENCRYPTION +E +Download +DevForceUpdate +Desserts +DUMP +DESC +Customer +CustomPage +Currency +Create +Coupon +ContentList +Contacts +City +Cancel +CallStatus +Calendar +CV2Result +CSalt +CID +CHIL +CAVV +CAPTCHA +CALN +Blog +Block +Beverages +AuthItemChild +Attachment +Albania +Admin +AddressStatus +AddressResult +Accounts +AVSCV2 +AUTH +AMOUNT +ALL +ABBR +4 +3DSecureStatus +23 +22 +21 +2 +17 +16 +15 +14 +13 +12 +11 +_escaped_fragment_ +__amp_source_origin +http_host +api-version +x-method-override +x-http-method-override +access_token +applicationid +assembly +assemblyPath +bloburl +buildid +checkin +checkno +classID +classid +classnames +codetext +connectionData +connectionId +connectionString +connectionToken +culture +customerNum +dataid +echo +FappId +flinkid +FLinkId +idNoticia +idUsuario +Usuario +Noticia +jobid +linkid +LocationPath +methodid +nextUrl +noscript +oauth_token +oauth_verifier +orderNumber +originalPath +OriginalUrl +OutputType +OverridePath +pagepath +PortalId +promoId +proxyRestUri +ReportPath +reqclient +requestType +resourceId +returnpath +searchword +secretcode +ServerPath +sourcePage +SourcePath +statuscode +suppress +tabname +tblname +testAction +uploadType +OAuthCookie +shell_path +user_token +adminCookie +fullapp +LandingUrl diff --git a/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt b/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt new file mode 100644 index 0000000000..b5f461182f --- /dev/null +++ b/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt @@ -0,0 +1,833 @@ + +.0 +.0.0 +.0.1 +.0.2 +.0.3 +.0.4 +.0.5 +.0.8 +.0.html +.0.pdf +.00 +.00.8169 +.001 +.01 +.01.4511 +.025 +.03 +.04 +.06 +.07 +.075 +.077 +.08 +.083 +.09 +.1 +.1.0 +.1.1 +.1.2 +.1.3 +.1.5.swf +.1.6 +.1.html +.1.pdf +.10 +.10.html +.11 +.11.html +.112 +.12 +.125 +.13 +.134 +.14 +.15 +.156 +.16 +.17 +.18 +.19 +.1a +.1c +.2 +.2.0 +.2.1 +.2.2 +.2.3 +.2.6 +.2.9 +.2.html +.20 +.20.html +.2007 +.2008 +.2011 +.206 +.21 +.211 +.22 +.23 +.24 +.246 +.25 +.25.html +.26.13.391n35.50.38.816 +.26.24.165n35.50.24.134 +.26.56.247n35.52.03.605 +.26.html +.27.02.940n35.49.56.075 +.27.15.919n35.52.04.300 +.27.29.262n35.47.15.083 +.2a +.2ms2 +.3 +.3.0 +.3.1 +.3.2 +.3.2.min.js +.3.3 +.3.4 +.3.5 +.3.html +.30 +.30-i486 +.300 +.32 +.33 +.34 +.367 +.3gp +.4 +.4.0 +.4.1 +.4.2 +.4.6 +.4.7 +.4.9.php +.4.html +.40.00.573n35.42.57.445 +.403 +.43.58.040n35.38.35.826 +.44.04.344n35.38.35.077 +.44.08.714n35.39.08.499 +.44.10.892n35.38.49.246 +.44.27.243n35.41.29.367 +.44.29.976n35.37.51.790 +.44.32.445n35.36.10.206 +.44.34.800n35.38.08.156 +.44.37.128n35.40.54.403 +.44.40.556n35.40.53.025 +.44.45.013n35.38.36.211 +.44.46.104n35.38.22.970 +.44.48.130n35.38.25.969 +.44.52.162n35.38.50.456 +.44.58.315n35.38.53.455 +.445 +.45 +.45.01.562n35.38.38.778 +.45.04.359n35.38.39.112 +.45.06.789n35.38.22.556 +.45.10.717n35.38.41.989 +.4511 +.455 +.456 +.499 +.5 +.5.0 +.5.1 +.5.3 +.5.4 +.5.6 +.5.html +.5.php +.50 +.556 +.6 +.6.0 +.6.1 +.6.12 +.6.19 +.6.2 +.6.3 +.6.5 +.6.9 +.6.edu +.6.html +.605 +.7 +.7.0 +.7.1 +.7.2 +.7.3 +.7.html +.72 +.75.html +.778 +.790 +.7z +.8 +.8.1 +.8.2 +.8.3 +.816 +.8169 +.826 +.9 +.91 +.969 +.970 +.989 +.a +.access.login +.acgi +.action +.action2 +.adcode +.add +.admin +.adp +.ai +.ajax +.ajax.asp +.ajax.php +.alt +.app +.apsx +.aquery +.array-keys +.array-merge +.array-rand +.as +.asa +.asax +.asax.cs +.asax.resx +.asax.vb +.asc +.ascx +.ascx.cs +.ascx.vb +.asd +.asf +.ashx +.asm +.asmx +.asp +.aspx +.assets +.asx +.at +.atom +.au +.avi +.award +.awm +.axd +.b +.back +.backup +.bad +.bak +.bak2 +.bat +.bck +.bhtml +.bin +.bk +.bkp +.blog +.bml +.bmp +.bok +.browse +.bsp +.btr +.bu +.bz2 +.c +.ca +.cab +.cache +.calendar +.call-user-func-array +.captcha +.captcha.aspx +.cart +.casino +.cat +.cc +.cdr +.cer +.cfc +.cfg +.cfg.php +.cfm +.cfm.cfm +.cfml +.cgi +.changelang.php +.children +.chm +.class +.class.php +.cmd +.cms +.cn +.cnf +.co.uk +.cocomore.txt +.code +.com +.com-redirect +.com.crt +.com.html +.com_backup_giornaliero +.com_backup_settimanale +.common.php +.conf +.config +.config.php +.content +.contrib +.controls +.copy +.core +.count +.cp +.crt +.cs +.csi +.csp +.csproj +.csproj.user +.css +.csv +.cur +.custom +.cz +.d +.dat +.data +.db +.dbf +.dcr +.de +.de.html +.de.txt +.deb +.default +.delete +.detail +.details.php +.dev +.dhtml +.dic +.dict.php +.diff +.dir +.disabled +.dist.php +.divx +.djvu +.dll +.dmg +.do +.doc +.docx +.dot +.ds +.dta +.dtd +.dwf +.dwg +.dwt +.e +.ece +.edit +.edu +.egov +.email +.eml +.en +.en.html +.en.php +.enfinity +.enu +.eot +.ep +.epc +.epl +.eps +.epub +.err +.error +.errors +.es +.eu +.exclude +.exe +.extract +.f4v +.faces +.fancybox +.fcgi +.feed +.fil +.file +.file-get-contents +.file-put-contents +.filemtime +.files +.filesize +.film +.fla +.flv +.fopen +.form +.fpl +.fr +.fr.html +.framework +.fread +.fsockopen +.functions.php +.g +.geo +.getimagesize +.getmapimage +.gif +.gif.php +.gif_var_de +.git +.go +.googlebook +.gpx +.grp +.gz +.h +.hml +.hmtl +.home +.hotelname +.hqx +.ht +.hta +.htaccess +.htc +.htlm +.htm +.html +.htmls +.htx +.i +.ice +.ico +.ics +.ida +.idq +.idx +.ihtml +.image +.images +.img +.implode +.in-array +.inc +.inc.asp +.inc.html +.inc.js +.inc.php +.include +.include-once +.includes +.index +.index.html +.index.php +.inf +.info +.ini +.ini.php +.ini.sample +.iso +.it +.it.html +.j +.jad +.jar +.java +.jbf +.jhtml +.jnlp +.jp +.jpe +.jpeg +.jpg +.js +.js2 +.jsf +.json +.jsp +.jspa +.jspf +.jspx +.kml +.kmz +.l +.lang-en.php +.lasso +.layer +.lbi +.lck +.letter +.lib +.lib.php +.lic +.licx +.link +.list +.listevents +.lnk +.load +.local +.local.php +.lock +.log +.log.0 +.login +.login.php +.lst +.m +.m3u +.m4v +.main +.maninfo +.map +.master +.master.cs +.master.vb +.mbox +.mc_id +.mdb +.media +.menu.php +.mgi +.mhtml +.mi +.mid +.min.js +.mkdir +.mno +.mod +.mov +.mp2 +.mp3 +.mp4 +.mpeg +.mpg +.mpl +.msg +.msi +.mso +.mspx +.mv +.mvc +.mysql +.mysql-connect +.mysql-pconnect +.mysql-query +.mysql-result +.mysql-select-db +.net +.net.html +.new +.new.html +.new.php +.news +.nl +.none +.nsf +.num +.o +.ocx +.odt +.off +.ogg +.old +.old.php +.old2 +.opendir +.opml +.org +.orig +.original +.oui +.out +.outcontrol +.p +.p3p +.p7b +.pac +.pad +.page +.pages +.parse.errors +.pd +.pdb +.pdf +.pem +.pfx +.pgp +.pgt +.ph +.php +.php-dist +.php1 +.php2 +.php3 +.php4 +.php5 +.php_files +.phpp +.phps +.phtm +.phtml +.pl +.plx +.pm +.png +.pnp +.po +.pop_3d_viewer +.pop_formata_viewer +.popup.php +.popup.pop_3d_viewer +.popup.pop_formata_viewer +.portal +.pot +.pps +.ppt +.pptx +.preg-match +.prep +.prev_next +.preview +.preview-content.php +.prg +.price +.print +.print.html +.print.php +.printable +.process +.product_details +.prt +.ps +.psd +.psp +.psql +.pub +.pvk +.pwd +.py +.pyc +.q +.query +.r +.ra +.ram +.randomhouse +.rar +.raw +.rb +.rc +.rdf +.read +.readme +.readme_var_de +.rec +.red +.reg +.registration +.require +.require-once +.results +.resx +.rhtml +.rm +.rpm +.rss +.rtf +.ru +.ru.html +.run +.run.adcode +.s +.s7 +.sample +.sav +.save +.scc +.scripts +.sdb +.se +.sea +.seam +.search +.search +.sema +.sendtoafriendform +.ser +.server +.session +.session-start +.settings.php +.setup +.sh +.shop +.shtm +.shtml +.simplexml-load-file +.sis +.sit +.site +.sitemap +.sitemap.xml +.sitx +.skins +.sln +.smi +.smil +.sponsors +.sql +.sql.gz +.squery +.src +.srv +.ssf +.ssi +.start +.static +.ste +.stm +.store +.storefront +.strpos +.subscribe +.suo +.svc +.svg +.svn +.swf +.swi +.swp +.sxw +.t +.taf +.tar +.tar.bz2 +.tar.gz +.tcl +.tem +.temp +.template +.template.php +.templates +.test +.text +.textsearch +.tgz +.thtml +.tif +.tiff +.tmp +.tmpl +.top +.torrent +.tpl +.trck +.ttf +.tv +.txt +.txt.gz +.txt.php +.types +.ua +.uguide +.uk +.unlink +.unsubscribe +.url +.us +.user +.userloginpopup.php +.v +.vb +.vbproj +.vbproj.webinfo +.vbs +.vcf +.vcs +.view +.visapopup.php +.visapopupvalid.php +.vm +.vorteil +.vspscc +.vssscc +.w +.war +.wav +.wbp +.wci +.web +.web.ui.webresource.axd +.webinfo +.wma +.wmf +.wml +.wmv +.woa +.work +.wpd +.ws +.wsdl +.wvx +.wws +.x +.x-affiliate +.x-affiliate_var_de +.x-aom +.x-aom_var_de +.x-fancycat +.x-fancycat_var_de +.x-fcomp +.x-fcomp_var_de +.x-giftreg +.x-giftreg_var_de +.x-magnifier +.x-magnifier_var_de +.x-offers +.x-offers_var_de +.x-pconf +.x-pconf_var_de +.x-rma +.x-rma_var_de +.x-survey +.xhtm +.xhtml +.xls +.xlsx +.xml +.xpi +.xpml +.xsd +.xsl +.xslt +.xspf +.y +.z +.zdat +.zif +.zip diff --git a/bbot/wordlists/top_open_ports_nmap.txt b/bbot/wordlists/top_open_ports_nmap.txt new file mode 100644 index 0000000000..30f1bcefd4 --- /dev/null +++ b/bbot/wordlists/top_open_ports_nmap.txt @@ -0,0 +1,8377 @@ +80 +23 +443 +21 +22 +25 +3389 +110 +445 +139 +143 +53 +135 +3306 +8080 +1723 +111 +995 +993 +5900 +1025 +587 +8888 +199 +1720 +465 +548 +113 +81 +6001 +10000 +514 +5060 +179 +1026 +2000 +8443 +8000 +32768 +554 +26 +1433 +49152 +2001 +515 +8008 +49154 +1027 +5666 +646 +5000 +5631 +631 +49153 +8081 +2049 +88 +79 +5800 +106 +2121 +1110 +49155 +6000 +513 +990 +5357 +427 +49156 +543 +544 +5101 +144 +7 +389 +8009 +3128 +444 +9999 +5009 +7070 +5190 +3000 +5432 +1900 +3986 +13 +1029 +9 +5051 +6646 +49157 +1028 +873 +1755 +2717 +4899 +9100 +119 +37 +1000 +3001 +5001 +82 +10010 +1030 +9090 +2107 +1024 +2103 +6004 +1801 +5050 +19 +8031 +1041 +255 +1049 +1048 +2967 +1053 +3703 +1056 +1065 +1064 +1054 +17 +808 +3689 +1031 +1044 +1071 +5901 +100 +9102 +8010 +2869 +1039 +5120 +4001 +9000 +2105 +636 +1038 +2601 +1 +7000 +1066 +1069 +625 +311 +280 +254 +4000 +1761 +5003 +2002 +2005 +1998 +1032 +1050 +6112 +3690 +1521 +2161 +6002 +1080 +2401 +4045 +902 +7937 +787 +1058 +2383 +32771 +1033 +1040 +1059 +50000 +5555 +10001 +1494 +593 +2301 +3 +3268 +7938 +1234 +1022 +1074 +8002 +1036 +1035 +9001 +1037 +464 +497 +1935 +6666 +2003 +6543 +1352 +24 +3269 +1111 +407 +500 +20 +2006 +3260 +15000 +1218 +1034 +4444 +264 +2004 +33 +1042 +42510 +999 +3052 +1023 +1068 +222 +7100 +888 +563 +1717 +2008 +992 +32770 +32772 +7001 +8082 +2007 +5550 +2009 +5801 +1043 +512 +2701 +7019 +50001 +1700 +4662 +2065 +2010 +42 +9535 +2602 +3333 +161 +5100 +5002 +2604 +4002 +6059 +1047 +8192 +8193 +2702 +6789 +9595 +1051 +9594 +9593 +16993 +16992 +5226 +5225 +32769 +3283 +1052 +8194 +1055 +1062 +9415 +8701 +8652 +8651 +8089 +65389 +65000 +64680 +64623 +55600 +55555 +52869 +35500 +33354 +23502 +20828 +1311 +1060 +4443 +1067 +13782 +5902 +366 +9050 +1002 +85 +5500 +5431 +1864 +1863 +8085 +51103 +49999 +45100 +10243 +49 +6667 +90 +27000 +1503 +6881 +1500 +8021 +340 +5566 +8088 +2222 +9071 +8899 +6005 +9876 +1501 +5102 +32774 +32773 +9101 +5679 +163 +648 +146 +1666 +901 +83 +9207 +8001 +8083 +8084 +5004 +3476 +5214 +14238 +12345 +912 +30 +2605 +2030 +6 +541 +8007 +3005 +4 +1248 +2500 +880 +306 +4242 +1097 +9009 +2525 +1086 +1088 +8291 +52822 +6101 +900 +7200 +2809 +800 +32775 +12000 +1083 +211 +987 +705 +20005 +711 +13783 +6969 +3071 +5269 +5222 +1085 +1046 +5986 +5985 +5987 +5989 +5988 +2190 +3301 +11967 +8600 +3766 +7627 +8087 +30000 +9010 +7741 +14000 +3367 +1099 +1098 +3031 +2718 +6580 +15002 +4129 +6901 +3827 +3580 +2144 +8181 +3801 +1718 +2811 +9080 +2135 +1045 +2399 +3017 +10002 +1148 +9002 +8873 +2875 +9011 +5718 +8086 +20000 +3998 +2607 +11110 +4126 +9618 +2381 +1096 +3300 +3351 +1073 +8333 +3784 +5633 +15660 +6123 +3211 +1078 +5910 +5911 +3659 +3551 +2260 +2160 +2100 +16001 +3325 +3323 +1104 +9968 +9503 +9502 +9485 +9290 +9220 +8994 +8649 +8222 +7911 +7625 +7106 +65129 +63331 +6156 +6129 +60020 +5962 +5961 +5960 +5959 +5925 +5877 +5825 +5810 +58080 +57294 +50800 +50006 +50003 +49160 +49159 +49158 +48080 +40193 +34573 +34572 +34571 +3404 +33899 +32782 +32781 +31038 +30718 +28201 +27715 +25734 +24800 +22939 +21571 +20221 +20031 +19842 +19801 +19101 +17988 +1783 +16018 +16016 +15003 +14442 +13456 +10629 +10628 +10626 +10621 +10617 +10616 +10566 +10025 +10024 +10012 +1169 +5030 +5414 +1057 +6788 +1947 +1094 +1075 +1108 +4003 +1081 +1093 +4449 +1687 +1840 +1100 +1063 +1061 +9900 +1107 +1106 +9500 +20222 +7778 +1077 +1310 +2119 +2492 +1070 +8400 +1272 +6389 +7777 +1072 +1079 +1082 +8402 +89 +691 +1001 +32776 +1999 +212 +2020 +6003 +7002 +2998 +50002 +3372 +898 +5510 +32 +2033 +99 +749 +425 +5903 +43 +5405 +6106 +13722 +6502 +7007 +458 +9666 +8100 +3737 +5298 +1152 +8090 +2191 +3011 +1580 +9877 +5200 +3851 +3371 +3370 +3369 +7402 +5054 +3918 +3077 +7443 +3493 +3828 +1186 +2179 +1183 +19315 +19283 +3995 +5963 +1124 +8500 +1089 +10004 +2251 +1087 +5280 +3871 +3030 +62078 +5904 +9091 +4111 +1334 +3261 +2522 +5859 +1247 +9944 +9943 +9110 +8654 +8254 +8180 +8011 +7512 +7435 +7103 +61900 +61532 +5922 +5915 +5822 +56738 +55055 +51493 +50636 +50389 +49175 +49165 +49163 +3546 +32784 +27355 +27353 +27352 +24444 +19780 +18988 +16012 +15742 +10778 +4006 +2126 +4446 +3880 +1782 +1296 +9998 +9040 +32779 +1021 +32777 +2021 +32778 +616 +666 +700 +5802 +4321 +545 +1524 +1112 +49400 +84 +38292 +2040 +32780 +3006 +2111 +1084 +1600 +2048 +2638 +9111 +6699 +16080 +6547 +6007 +1533 +5560 +2106 +1443 +667 +720 +2034 +555 +801 +6025 +3221 +3826 +9200 +2608 +4279 +7025 +11111 +3527 +1151 +8200 +8300 +6689 +9878 +10009 +8800 +5730 +2394 +2393 +2725 +6566 +9081 +5678 +5906 +3800 +4550 +5080 +1201 +3168 +3814 +1862 +1114 +6510 +3905 +8383 +3914 +3971 +3809 +5033 +7676 +3517 +4900 +3869 +9418 +2909 +3878 +8042 +1091 +1090 +3920 +6567 +1138 +3945 +1175 +10003 +3390 +5907 +3889 +1131 +8292 +5087 +1119 +1117 +4848 +7800 +16000 +3324 +3322 +5221 +4445 +9917 +9575 +9099 +9003 +8290 +8099 +8093 +8045 +7921 +7920 +7496 +6839 +6792 +6779 +6692 +6565 +60443 +5952 +5950 +5862 +5850 +5815 +5811 +57797 +56737 +5544 +55056 +5440 +54328 +54045 +52848 +52673 +50500 +50300 +49176 +49167 +49161 +44501 +44176 +41511 +40911 +32785 +32783 +30951 +27356 +26214 +25735 +19350 +18101 +18040 +17877 +16113 +15004 +14441 +12265 +12174 +10215 +10180 +4567 +6100 +5061 +4004 +4005 +8022 +9898 +7999 +1271 +1199 +3003 +1122 +2323 +4224 +2022 +617 +777 +417 +714 +6346 +981 +722 +1009 +4998 +70 +1076 +5999 +10082 +765 +301 +524 +668 +2041 +6009 +1417 +1434 +259 +44443 +1984 +2068 +7004 +1007 +4343 +416 +2038 +6006 +109 +4125 +1461 +9103 +911 +726 +1010 +2046 +2035 +7201 +687 +2013 +481 +125 +6669 +6668 +903 +1455 +683 +1011 +2043 +2047 +256 +9929 +5998 +406 +31337 +44442 +783 +843 +2042 +2045 +4040 +6060 +6051 +1145 +3916 +9443 +9444 +1875 +7272 +4252 +4200 +7024 +1556 +13724 +1141 +1233 +8765 +1137 +3963 +5938 +9191 +3808 +8686 +3981 +2710 +3852 +3849 +3944 +3853 +9988 +1163 +4164 +3820 +6481 +3731 +5081 +40000 +8097 +4555 +3863 +1287 +4430 +7744 +7913 +1166 +1164 +1165 +8019 +10160 +4658 +7878 +3304 +3307 +1259 +1092 +7278 +3872 +10008 +7725 +3410 +1971 +3697 +3859 +3514 +4949 +4147 +7900 +5353 +3931 +8675 +1277 +3957 +1213 +2382 +6600 +3700 +3007 +4080 +1113 +3969 +1132 +1309 +3848 +7281 +3907 +3972 +3968 +1126 +5223 +1217 +3870 +3941 +8293 +1719 +1300 +2099 +6068 +3013 +3050 +1174 +3684 +2170 +3792 +1216 +5151 +7123 +7080 +22222 +4143 +5868 +8889 +12006 +1121 +3119 +8015 +10023 +3824 +1154 +20002 +3888 +4009 +5063 +3376 +1185 +1198 +1192 +1972 +1130 +1149 +4096 +6500 +8294 +3990 +3993 +8016 +5242 +3846 +3929 +1187 +5074 +5909 +8766 +5905 +1102 +2800 +9941 +9914 +9815 +9673 +9643 +9621 +9501 +9409 +9198 +9197 +9098 +8996 +8987 +8877 +8676 +8648 +8540 +8481 +8385 +8189 +8098 +8095 +8050 +7929 +7770 +7749 +7438 +7241 +7051 +7050 +6896 +6732 +6711 +65310 +6520 +6504 +6247 +6203 +61613 +60642 +60146 +60123 +5981 +5940 +59202 +59201 +59200 +5918 +5914 +59110 +5899 +58838 +5869 +58632 +58630 +5823 +5818 +5812 +5807 +58002 +58001 +57665 +55576 +55020 +53535 +5339 +53314 +53313 +53211 +52853 +52851 +52850 +52849 +52847 +5279 +52735 +52710 +52660 +5212 +51413 +51191 +5040 +50050 +49401 +49236 +49195 +49186 +49171 +49168 +49164 +4875 +47544 +46996 +46200 +44709 +41523 +41064 +40811 +3994 +39659 +39376 +39136 +38188 +38185 +37839 +35513 +33554 +33453 +32835 +32822 +32816 +32803 +32792 +32791 +30704 +30005 +29831 +29672 +28211 +27357 +26470 +23796 +23052 +2196 +21792 +19900 +18264 +18018 +17595 +16851 +16800 +16705 +15402 +15001 +12452 +12380 +12262 +12215 +12059 +12021 +10873 +10058 +10034 +10022 +10011 +2910 +1594 +1658 +1583 +3162 +2920 +1812 +26000 +2366 +4600 +1688 +1322 +2557 +1095 +1839 +2288 +1123 +5968 +9600 +1244 +1641 +2200 +1105 +6550 +5501 +1328 +2968 +1805 +1914 +1974 +31727 +3400 +1301 +1147 +1721 +1236 +2501 +2012 +6222 +1220 +1109 +1347 +502 +701 +2232 +2241 +4559 +710 +10005 +5680 +623 +913 +1103 +780 +930 +803 +725 +639 +540 +102 +5010 +1222 +953 +8118 +9992 +1270 +27 +123 +86 +447 +1158 +442 +18000 +419 +931 +874 +856 +250 +475 +2044 +441 +210 +6008 +7003 +5803 +1008 +556 +6103 +829 +3299 +55 +713 +1550 +709 +2628 +223 +3025 +87 +57 +10083 +5520 +980 +251 +1013 +9152 +1212 +2433 +1516 +333 +2011 +748 +1350 +1526 +7010 +1241 +127 +157 +220 +1351 +2067 +684 +77 +4333 +674 +943 +904 +840 +825 +792 +732 +1020 +1006 +657 +557 +610 +1547 +523 +996 +2025 +602 +3456 +862 +600 +2903 +257 +1522 +1353 +6662 +998 +660 +729 +730 +731 +782 +1357 +3632 +3399 +6050 +2201 +971 +969 +905 +846 +839 +823 +822 +795 +790 +778 +757 +659 +225 +1015 +1014 +1012 +655 +786 +6017 +6670 +690 +388 +44334 +754 +5011 +98 +411 +1525 +3999 +740 +12346 +802 +1337 +1127 +2112 +1414 +2600 +621 +606 +59 +928 +924 +922 +921 +918 +878 +864 +859 +806 +805 +728 +252 +1005 +1004 +641 +758 +669 +38037 +715 +1413 +2104 +1229 +3817 +6063 +6062 +6055 +6052 +6030 +6021 +6015 +6010 +3220 +6115 +3940 +2340 +8006 +4141 +3810 +1565 +3511 +33000 +2723 +9202 +4036 +4035 +2312 +3652 +3280 +4243 +4298 +4297 +4294 +4262 +4234 +4220 +4206 +22555 +9300 +7121 +1927 +4433 +5070 +2148 +1168 +9979 +7998 +4414 +1823 +3653 +1223 +8201 +4876 +3240 +2644 +4020 +2436 +3906 +4375 +4024 +5581 +5580 +9694 +6251 +7345 +7325 +7320 +7300 +3121 +5473 +5475 +3600 +3943 +4912 +2142 +1976 +1975 +5202 +5201 +4016 +5111 +9911 +10006 +3923 +3930 +1221 +2973 +3909 +5814 +3080 +4158 +3526 +1911 +5066 +2711 +2187 +3788 +3796 +3922 +2292 +16161 +4881 +3979 +3670 +4174 +3102 +3483 +2631 +1750 +3897 +7500 +5553 +5554 +9875 +4570 +3860 +3712 +8052 +2083 +8883 +2271 +4606 +1208 +3319 +3935 +3430 +1215 +3962 +3368 +3964 +1128 +5557 +4010 +9400 +1605 +3291 +7400 +5005 +1699 +1195 +5053 +3813 +1712 +3002 +3765 +3806 +43000 +2371 +3532 +3799 +3790 +3599 +3850 +4355 +4358 +4357 +4356 +5433 +3928 +4713 +4374 +3961 +9022 +3911 +3396 +7628 +3200 +1753 +3967 +2505 +5133 +3658 +8471 +1314 +2558 +6161 +4025 +3089 +9021 +30001 +8472 +5014 +9990 +1159 +1157 +1308 +5723 +3443 +4161 +1135 +9211 +9210 +4090 +7789 +6619 +9628 +12121 +4454 +3680 +3167 +3902 +3901 +3890 +3842 +16900 +4700 +4687 +8980 +1196 +4407 +3520 +3812 +5012 +10115 +1615 +2902 +4118 +2706 +2095 +2096 +3363 +5137 +3795 +8005 +10007 +3515 +8003 +3847 +3503 +5252 +27017 +2197 +4120 +1180 +5722 +1134 +1883 +1249 +3311 +27350 +3837 +2804 +4558 +4190 +2463 +1204 +4056 +1184 +19333 +9333 +3913 +3672 +4342 +4877 +3586 +8282 +1861 +1752 +9592 +1701 +6085 +2081 +4058 +2115 +8900 +4328 +2958 +2957 +7071 +3899 +2531 +2691 +5052 +1638 +3419 +2551 +5908 +4029 +3603 +1336 +2082 +1143 +3602 +1176 +4100 +3486 +6077 +4800 +2062 +1918 +12001 +12002 +9084 +7072 +1156 +2313 +3952 +4999 +5023 +2069 +28017 +27019 +27018 +3439 +6324 +1188 +1125 +3908 +7501 +8232 +1722 +2988 +10500 +1136 +1162 +10020 +22128 +1211 +3530 +12009 +9005 +3057 +3956 +4325 +1191 +3519 +5235 +1144 +4745 +1901 +1807 +2425 +3210 +32767 +5015 +5013 +3622 +4039 +10101 +5233 +5152 +3983 +3982 +9616 +4369 +3728 +3621 +2291 +5114 +7101 +1315 +2087 +5234 +1635 +3263 +4121 +4602 +2224 +3949 +9131 +3310 +3937 +2253 +3882 +3831 +2376 +2375 +3876 +3362 +3663 +3334 +47624 +1825 +4302 +5721 +1279 +2606 +1173 +22125 +17500 +12005 +6113 +1973 +3793 +3637 +8954 +3742 +9667 +41795 +41794 +4300 +8445 +12865 +3365 +4665 +3190 +3577 +3823 +2261 +2262 +2812 +1190 +22350 +3374 +4135 +2598 +2567 +1167 +8470 +10443 +8116 +3830 +8880 +2734 +3505 +3388 +3669 +1871 +8025 +1958 +3681 +3014 +8999 +4415 +3414 +4101 +6503 +9700 +3683 +1150 +18333 +4376 +3991 +3989 +3992 +2302 +3415 +1179 +3946 +2203 +4192 +4418 +2712 +25565 +4065 +5820 +3915 +2080 +3103 +2265 +8202 +2304 +8060 +4119 +4401 +1560 +3904 +4534 +1835 +1116 +8023 +8474 +3879 +4087 +4112 +6350 +9950 +3506 +3948 +3825 +2325 +1800 +1153 +6379 +3839 +4689 +47806 +5912 +3975 +3980 +4113 +2847 +2070 +3425 +6628 +3997 +3513 +3656 +2335 +1182 +1954 +3996 +4599 +2391 +3479 +5021 +5020 +1558 +1924 +4545 +2991 +6065 +1290 +1559 +1317 +5423 +1707 +5055 +9975 +9971 +9919 +9915 +9912 +9910 +9908 +9901 +9844 +9830 +9826 +9825 +9823 +9814 +9812 +9777 +9745 +9683 +9680 +9679 +9674 +9665 +9661 +9654 +9648 +9620 +9619 +9613 +9583 +9527 +9513 +9493 +9478 +9464 +9454 +9364 +9351 +9183 +9170 +9133 +9130 +9128 +9125 +9065 +9061 +9044 +9037 +9013 +9004 +8925 +8898 +8887 +8882 +8879 +8878 +8865 +8843 +8801 +8798 +8790 +8772 +8756 +8752 +8736 +8680 +8673 +8658 +8655 +8644 +8640 +8621 +8601 +8562 +8539 +8531 +8530 +8515 +8484 +8479 +8477 +8455 +8454 +8453 +8452 +8451 +8409 +8339 +8308 +8295 +8273 +8268 +8255 +8248 +8245 +8144 +8133 +8110 +8092 +8064 +8037 +8029 +8018 +8014 +7975 +7895 +7854 +7853 +7852 +7830 +7813 +7788 +7780 +7772 +7771 +7688 +7685 +7654 +7637 +7600 +7555 +7553 +7456 +7451 +7231 +7218 +7184 +7119 +7104 +7102 +7092 +7068 +7067 +7043 +7033 +6973 +6972 +6956 +6942 +6922 +6920 +6897 +6877 +6780 +6734 +6725 +6710 +6709 +6650 +6647 +6644 +6606 +65514 +65488 +6535 +65311 +65048 +64890 +64727 +64726 +64551 +64507 +64438 +64320 +6412 +64127 +64080 +63803 +63675 +6349 +63423 +6323 +63156 +6310 +63105 +6309 +62866 +6274 +6273 +62674 +6259 +62570 +62519 +6250 +62312 +62188 +62080 +62042 +62006 +61942 +61851 +61827 +61734 +61722 +61669 +61617 +61616 +61516 +61473 +61402 +6126 +6120 +61170 +61169 +61159 +60989 +6091 +6090 +60794 +60789 +60783 +60782 +60753 +60743 +60728 +60713 +6067 +60628 +60621 +60612 +60579 +60544 +60504 +60492 +60485 +60403 +60401 +60377 +60279 +60243 +60227 +60177 +60111 +60086 +60055 +60003 +60002 +60000 +59987 +59841 +59829 +59810 +59778 +5975 +5974 +5971 +59684 +5966 +5958 +59565 +5954 +5953 +59525 +59510 +59509 +59504 +5949 +59499 +5948 +5945 +5939 +5936 +5934 +59340 +5931 +5927 +5926 +5924 +5923 +59239 +5921 +5920 +59191 +5917 +59160 +59149 +59122 +59107 +59087 +58991 +58970 +58908 +5888 +5887 +5881 +5878 +5875 +5874 +58721 +5871 +58699 +58634 +58622 +58610 +5860 +5858 +58570 +58562 +5854 +5853 +5852 +5849 +58498 +5848 +58468 +5845 +58456 +58446 +58430 +5840 +5839 +5838 +58374 +5836 +5834 +5831 +58310 +58305 +5827 +5826 +58252 +5824 +5821 +5817 +58164 +58109 +58107 +5808 +58072 +5806 +5804 +57999 +57988 +57928 +57923 +57896 +57891 +57733 +57730 +57702 +57681 +57678 +57576 +57479 +57398 +57387 +5737 +57352 +57350 +5734 +57347 +57335 +5732 +57325 +57123 +5711 +57103 +57020 +56975 +56973 +56827 +56822 +56810 +56725 +56723 +56681 +5667 +56668 +5665 +56591 +56535 +56507 +56293 +56259 +5622 +5621 +5620 +5612 +5611 +56055 +56016 +55948 +55910 +55907 +55901 +55781 +55773 +55758 +55721 +55684 +55652 +55635 +55579 +55569 +55568 +55556 +5552 +55527 +55479 +55426 +55400 +55382 +55350 +55312 +55227 +55187 +55183 +55000 +54991 +54987 +54907 +54873 +54741 +54722 +54688 +54658 +54605 +5458 +5457 +54551 +54514 +5444 +5442 +5441 +54323 +54321 +54276 +54263 +54235 +54127 +54101 +54075 +53958 +53910 +53852 +53827 +53782 +5377 +53742 +5370 +53690 +53656 +53639 +53633 +53491 +5347 +53469 +53460 +53370 +53361 +53319 +53240 +53212 +53189 +53178 +53085 +52948 +5291 +52893 +52675 +52665 +5261 +5259 +52573 +52506 +52477 +52391 +52262 +52237 +52230 +52226 +52225 +5219 +52173 +52071 +52046 +52025 +52003 +52002 +52001 +52000 +51965 +51961 +51909 +51906 +51809 +51800 +51772 +51771 +51658 +51582 +51515 +51488 +51485 +51484 +5147 +51460 +51423 +51366 +51351 +51343 +51300 +5125 +51240 +51235 +51234 +51233 +5122 +5121 +51139 +51118 +51067 +51037 +51020 +51011 +50997 +5098 +5096 +5095 +50945 +5090 +50903 +5088 +50887 +50854 +50849 +50836 +50835 +50834 +50833 +50831 +50815 +50809 +50787 +50733 +50692 +50585 +50577 +50576 +50545 +50529 +50513 +50356 +50277 +50258 +50246 +50224 +50205 +50202 +50198 +50189 +5017 +5016 +50101 +50040 +50019 +50016 +49927 +49803 +49765 +49762 +49751 +49678 +49603 +49597 +49522 +49521 +49520 +49519 +49500 +49498 +49452 +49398 +49372 +49352 +4931 +49302 +49275 +49241 +49235 +49232 +49228 +49216 +49213 +49211 +49204 +49203 +49202 +49201 +49197 +49196 +49191 +49190 +49189 +49179 +49173 +49172 +49170 +49169 +49166 +49132 +49048 +4903 +49002 +48973 +48967 +48966 +48925 +48813 +48783 +48682 +48648 +48631 +4860 +4859 +48434 +48356 +4819 +48167 +48153 +48127 +48083 +48067 +48009 +47969 +47966 +4793 +47860 +47858 +47850 +4778 +47777 +4771 +4770 +47700 +4767 +47634 +4760 +47595 +47581 +47567 +47448 +47372 +47348 +47267 +47197 +4712 +47119 +47029 +47012 +46992 +46813 +46593 +4649 +4644 +46436 +46418 +46372 +46310 +46182 +46171 +46115 +4609 +46069 +46034 +45960 +45864 +45777 +45697 +45624 +45602 +45463 +45438 +45413 +4530 +45226 +45220 +4517 +4516 +45164 +45136 +45050 +45038 +44981 +44965 +4476 +4471 +44711 +44704 +4464 +44628 +44616 +44541 +44505 +44479 +44431 +44410 +44380 +44200 +44119 +44101 +44004 +4388 +43868 +4384 +43823 +43734 +43690 +43654 +43425 +43242 +43231 +43212 +43143 +43139 +43103 +43027 +43018 +43002 +42990 +42906 +42735 +42685 +42679 +42675 +42632 +42590 +42575 +42560 +42559 +42452 +42449 +42322 +42276 +42251 +42158 +42127 +42035 +42001 +41808 +41773 +41632 +41551 +41442 +41398 +41348 +41345 +41342 +41318 +41281 +41250 +41142 +41123 +40951 +40834 +40812 +40754 +40732 +40712 +40628 +40614 +40513 +40489 +40457 +40400 +40393 +40306 +40011 +40005 +40003 +40002 +40001 +39917 +39895 +39883 +39869 +39795 +39774 +39763 +39732 +39630 +39489 +39482 +39433 +39380 +39293 +39265 +39117 +39067 +38936 +38805 +38780 +38764 +38761 +38570 +38561 +38546 +38481 +38446 +38358 +38331 +38313 +38270 +38224 +38205 +38194 +38029 +37855 +37789 +37777 +37674 +37647 +37614 +37607 +37522 +37393 +37218 +37185 +37174 +37151 +37121 +36983 +36962 +36950 +36914 +36824 +36823 +36748 +36710 +36694 +36677 +36659 +36552 +36530 +36508 +36436 +36368 +36275 +36256 +36105 +36104 +36046 +35986 +35929 +35906 +35901 +35900 +35879 +35731 +35593 +35553 +35506 +35401 +35393 +35392 +35349 +35272 +35217 +35131 +35116 +35050 +35033 +34875 +34833 +34783 +34765 +34728 +34683 +34510 +34507 +34401 +34381 +34341 +34317 +34189 +34096 +34036 +34021 +33895 +33889 +33882 +33879 +33841 +33605 +33604 +33550 +33523 +33522 +33444 +33395 +33367 +33337 +33335 +33327 +33277 +33203 +33200 +33192 +33175 +33124 +33087 +33070 +33017 +33011 +32976 +32961 +32960 +32944 +32932 +32911 +32910 +32908 +32905 +32904 +32898 +32897 +32888 +32871 +32869 +32868 +32858 +32842 +32837 +32820 +32815 +32814 +32807 +32799 +32798 +32797 +32790 +32789 +32788 +32765 +32764 +32261 +32260 +32219 +32200 +32102 +32088 +32031 +32022 +32006 +31728 +31657 +31522 +31438 +31386 +31339 +31072 +31058 +31033 +30896 +30705 +30659 +30644 +30599 +30519 +30299 +30195 +30087 +29810 +29507 +29243 +29152 +29045 +28967 +28924 +28851 +28850 +28717 +28567 +28374 +28142 +28114 +27770 +27537 +27521 +27372 +27351 +27316 +27204 +27087 +27075 +27074 +27055 +27016 +27015 +26972 +26669 +26417 +26340 +26007 +26001 +25847 +25717 +25703 +25486 +25473 +25445 +25327 +25288 +25262 +25260 +25174 +24999 +24616 +24552 +24416 +24392 +24218 +23953 +23887 +23723 +23451 +23430 +23382 +23342 +23296 +23270 +23228 +23219 +23040 +23017 +22969 +22959 +22882 +22769 +22727 +22719 +22711 +22563 +22341 +22290 +22223 +22200 +22177 +22100 +22063 +22022 +21915 +21891 +21728 +21634 +21631 +21473 +21078 +21011 +20990 +20940 +20934 +20883 +20734 +20473 +20280 +20228 +20227 +20226 +20225 +20224 +20223 +20180 +20179 +20147 +20127 +20125 +20118 +20111 +20106 +20102 +20089 +20085 +20080 +20076 +20052 +20039 +20032 +20021 +20017 +20011 +19996 +19995 +19852 +19715 +19634 +19612 +19501 +19464 +19403 +19353 +19201 +19200 +19130 +19010 +18962 +18910 +18887 +18874 +18669 +18569 +18517 +18505 +18439 +18380 +18337 +18336 +18231 +18148 +18080 +18015 +18012 +17997 +17985 +17969 +17867 +17860 +17802 +17801 +17715 +17702 +17701 +17700 +17413 +17409 +17255 +17251 +17129 +17089 +17070 +17017 +17016 +16901 +16845 +16797 +16725 +16724 +16723 +16464 +16372 +16349 +16297 +16286 +16283 +16273 +16270 +16048 +15915 +15758 +15730 +15722 +15677 +15670 +15646 +15645 +15631 +15550 +15448 +15344 +15317 +15275 +15191 +15190 +15145 +15050 +15005 +14916 +14891 +14827 +14733 +14693 +14545 +14534 +14444 +14443 +14418 +14254 +14237 +14218 +14147 +13899 +13846 +13784 +13766 +13730 +13723 +13695 +13580 +13502 +13359 +13340 +13318 +13306 +13265 +13264 +13261 +13250 +13229 +13194 +13193 +13192 +13188 +13167 +13149 +13142 +13140 +13132 +13130 +13093 +13017 +12962 +12955 +12892 +12891 +12766 +12702 +12699 +12414 +12340 +12296 +12275 +12271 +12251 +12243 +12240 +12225 +12192 +12171 +12156 +12146 +12137 +12132 +12097 +12096 +12090 +12080 +12077 +12034 +12031 +12019 +11940 +11863 +11862 +11813 +11735 +11697 +11552 +11401 +11296 +11288 +11250 +11224 +11200 +11180 +11100 +11089 +11033 +11032 +11031 +11026 +11019 +11007 +11003 +10900 +10878 +10852 +10842 +10754 +10699 +10602 +10601 +10567 +10565 +10556 +10555 +10554 +10553 +10552 +10551 +10550 +10535 +10529 +10509 +10494 +10414 +10387 +10357 +10347 +10338 +10280 +10255 +10246 +10245 +10238 +10093 +10064 +10045 +10042 +10035 +10019 +10018 +1327 +2330 +2580 +2700 +1584 +9020 +3281 +2439 +1250 +14001 +1607 +1736 +1330 +2270 +2728 +2888 +3803 +5250 +1645 +1303 +3636 +1251 +1243 +1291 +1297 +1200 +1811 +4442 +1118 +8401 +2101 +2889 +1694 +1730 +1912 +29015 +28015 +1745 +2250 +1306 +2997 +2449 +1262 +4007 +1101 +1268 +1735 +1858 +1264 +1711 +3118 +4601 +1321 +1598 +1305 +1632 +9995 +1307 +1981 +2532 +1808 +2435 +1194 +1622 +1239 +1799 +2882 +1683 +3063 +3062 +1340 +4447 +1806 +6888 +2438 +1261 +5969 +9343 +2583 +2031 +3798 +2269 +20001 +2622 +11001 +1207 +2850 +21201 +2908 +3936 +3023 +2280 +2623 +7099 +2372 +1318 +1339 +1276 +11000 +48619 +3497 +1209 +1331 +1240 +3856 +2987 +2326 +25001 +25000 +1792 +3919 +1299 +2984 +1715 +1703 +1677 +2086 +1708 +1228 +3787 +5502 +1620 +1316 +1569 +1210 +1691 +1282 +2124 +1791 +2150 +9909 +4022 +3868 +1324 +2584 +2300 +9287 +2806 +1566 +1713 +1592 +3749 +1302 +1709 +3485 +2418 +2472 +24554 +3146 +2134 +2898 +9161 +9160 +2930 +1319 +5672 +3811 +2456 +2901 +6579 +2550 +8403 +31416 +22273 +7005 +66 +32786 +32787 +706 +914 +635 +6105 +400 +47 +830 +4008 +5977 +1989 +1444 +3985 +678 +27001 +591 +642 +446 +1441 +54320 +11 +769 +983 +979 +973 +967 +965 +961 +942 +935 +926 +925 +863 +858 +844 +834 +817 +815 +811 +809 +789 +779 +743 +1019 +1507 +1492 +509 +762 +5632 +578 +1495 +5308 +52 +219 +525 +1420 +665 +620 +3064 +3045 +653 +158 +716 +861 +9991 +3049 +1366 +1364 +833 +91 +1680 +3398 +750 +615 +603 +6110 +101 +989 +27010 +510 +810 +1139 +4199 +76 +847 +649 +707 +68 +449 +664 +75 +104 +629 +1652 +682 +577 +985 +984 +974 +958 +952 +949 +946 +923 +916 +899 +897 +894 +889 +835 +824 +814 +807 +804 +798 +733 +727 +237 +12 +10 +501 +122 +440 +771 +1663 +828 +860 +695 +634 +538 +1359 +1358 +1517 +1370 +3900 +492 +268 +27374 +605 +8076 +1651 +1178 +6401 +761 +5145 +50 +2018 +1349 +2014 +7597 +2120 +1445 +1402 +1465 +9104 +627 +4660 +7273 +950 +1384 +1388 +760 +92 +831 +5978 +4557 +45 +112 +456 +1214 +3086 +702 +6665 +1404 +651 +5300 +6347 +5400 +1389 +647 +448 +1356 +5232 +1484 +450 +1991 +1988 +1523 +1400 +1399 +221 +1385 +5191 +1346 +2024 +2430 +988 +962 +948 +945 +941 +938 +936 +929 +927 +919 +906 +883 +881 +875 +872 +870 +866 +855 +851 +850 +841 +836 +826 +820 +819 +816 +813 +791 +745 +736 +735 +724 +719 +343 +334 +300 +28 +249 +230 +16 +1018 +1016 +658 +1474 +696 +630 +663 +2307 +1552 +609 +741 +353 +638 +1551 +661 +491 +640 +507 +673 +632 +1354 +9105 +6143 +676 +214 +14141 +182 +69 +27665 +1475 +97 +633 +560 +799 +7009 +2015 +628 +751 +4480 +1403 +8123 +1527 +723 +1466 +1486 +1650 +991 +832 +137 +1348 +685 +1762 +6701 +994 +4500 +194 +180 +1539 +1379 +51 +886 +2064 +1405 +1435 +11371 +1401 +1369 +402 +103 +1372 +704 +854 +8892 +47557 +624 +1387 +3397 +1996 +1995 +1997 +18182 +18184 +3264 +3292 +13720 +9107 +9106 +201 +1381 +35 +6588 +5530 +3141 +670 +970 +968 +964 +963 +960 +959 +951 +947 +944 +939 +933 +909 +895 +891 +879 +869 +868 +867 +837 +821 +812 +797 +796 +794 +788 +756 +734 +721 +718 +708 +703 +60 +40 +253 +231 +14 +1017 +1003 +656 +975 +2026 +1497 +553 +511 +611 +689 +1668 +1664 +15 +561 +997 +505 +1496 +637 +213 +1412 +1515 +692 +694 +681 +680 +644 +675 +1467 +454 +622 +1476 +1373 +770 +262 +654 +1535 +58 +177 +26208 +677 +1519 +1398 +3457 +401 +412 +493 +13713 +94 +1498 +871 +1390 +6145 +133 +362 +118 +193 +115 +1549 +7008 +608 +1426 +1436 +915 +38 +74 +73 +71 +601 +136 +4144 +129 +16444 +1446 +4132 +308 +1528 +1365 +1393 +1394 +1493 +138 +5997 +397 +29 +31 +44 +2627 +6147 +1510 +568 +350 +2053 +6146 +6544 +1763 +3531 +399 +1537 +1992 +1355 +1454 +261 +887 +200 +1376 +1424 +6111 +1410 +1409 +686 +5301 +5302 +1513 +747 +9051 +1499 +7006 +1439 +1438 +8770 +853 +196 +93 +410 +462 +619 +1529 +1990 +1994 +1986 +1386 +18183 +18181 +6700 +1442 +95 +6400 +1432 +1548 +486 +1422 +114 +1397 +6142 +1827 +626 +422 +688 +206 +202 +204 +1483 +7634 +774 +699 +2023 +776 +672 +1545 +2431 +697 +982 +978 +972 +966 +957 +956 +934 +920 +908 +907 +892 +890 +885 +884 +882 +877 +876 +865 +857 +852 +849 +842 +838 +827 +818 +793 +785 +784 +755 +746 +738 +737 +717 +34 +336 +325 +303 +276 +273 +236 +235 +233 +181 +604 +1362 +712 +1437 +2027 +1368 +1531 +645 +65301 +260 +536 +764 +698 +607 +1667 +1662 +1661 +404 +224 +418 +176 +848 +315 +466 +403 +1456 +1479 +355 +763 +1472 +453 +759 +437 +2432 +120 +415 +1544 +1511 +1538 +346 +173 +54 +56 +265 +1462 +13701 +1518 +1457 +117 +1470 +13715 +13714 +267 +1419 +1418 +1407 +380 +518 +65 +391 +392 +413 +1391 +614 +1408 +162 +108 +4987 +1502 +598 +582 +487 +530 +1509 +72 +4672 +189 +209 +270 +7464 +408 +191 +1459 +5714 +5717 +5713 +564 +767 +583 +1395 +192 +1448 +428 +4133 +1416 +773 +1458 +526 +1363 +742 +1464 +1427 +1482 +569 +571 +6141 +351 +3984 +5490 +2 +13718 +373 +17300 +910 +148 +7326 +271 +423 +1451 +480 +1430 +1429 +781 +383 +2564 +613 +612 +652 +5303 +1383 +128 +19150 +1453 +190 +1505 +1371 +533 +27009 +27007 +27005 +27003 +27002 +744 +1423 +1374 +141 +1440 +1396 +352 +96 +48 +552 +570 +217 +528 +452 +451 +2766 +2108 +132 +1993 +1987 +130 +18187 +216 +3421 +142 +13721 +67 +15151 +364 +1411 +205 +6548 +124 +116 +5193 +258 +485 +599 +149 +1469 +775 +2019 +516 +986 +977 +976 +955 +954 +937 +932 +8 +896 +893 +845 +768 +766 +739 +337 +329 +326 +305 +295 +294 +293 +289 +288 +277 +238 +234 +229 +228 +226 +522 +2028 +150 +572 +596 +420 +460 +1543 +358 +361 +470 +360 +457 +643 +322 +168 +753 +369 +185 +43188 +1541 +1540 +752 +496 +662 +1449 +1480 +1473 +184 +1672 +1671 +1670 +435 +434 +1532 +1360 +174 +472 +1361 +17007 +414 +535 +432 +479 +473 +151 +1542 +438 +1488 +1508 +618 +316 +1367 +439 +284 +542 +370 +2016 +248 +1491 +44123 +41230 +7173 +5670 +18136 +3925 +7088 +1425 +17755 +17756 +4072 +5841 +2102 +4123 +2989 +10051 +10050 +31029 +3726 +5243 +9978 +9925 +6061 +6058 +6057 +6056 +6054 +6053 +6049 +6048 +6047 +6046 +6045 +6044 +6043 +6042 +6041 +6040 +6039 +6038 +6037 +6036 +6035 +6034 +6033 +6032 +6031 +6029 +6028 +6027 +6026 +6024 +6023 +6022 +6020 +6019 +6018 +6016 +6014 +6013 +6012 +6011 +36462 +5793 +3423 +3424 +4095 +3646 +3510 +3722 +2459 +3651 +14500 +3865 +15345 +3763 +38422 +3877 +9092 +5344 +3974 +2341 +6116 +2157 +165 +6936 +8041 +4888 +4889 +3074 +2165 +4389 +5770 +5769 +16619 +11876 +11877 +3741 +3633 +3840 +3717 +3716 +3590 +2805 +4537 +9762 +5007 +5006 +5358 +4879 +6114 +4185 +2784 +3724 +2596 +2595 +4417 +4845 +22321 +22289 +3219 +1338 +36411 +3861 +5166 +3674 +1785 +534 +6602 +47001 +5363 +8912 +2231 +5747 +5748 +11208 +7236 +4049 +4050 +22347 +63 +3233 +3359 +8908 +4177 +48050 +3111 +3427 +5321 +5320 +3702 +2907 +8991 +8990 +2054 +4847 +9802 +9800 +4368 +5990 +3563 +5744 +5743 +12321 +12322 +9206 +9204 +9205 +9201 +9203 +2949 +2948 +6626 +37472 +8199 +4145 +3482 +2216 +13708 +3786 +3375 +7566 +2539 +2387 +3317 +2410 +2255 +3883 +4299 +4296 +4295 +4293 +4292 +4291 +4290 +4289 +4288 +4287 +4286 +4285 +4284 +4283 +4282 +4281 +4280 +4278 +4277 +4276 +4275 +4274 +4273 +4272 +4271 +4270 +4269 +4268 +4267 +4266 +4265 +4264 +4263 +4261 +4260 +4259 +4258 +4257 +4256 +4255 +4254 +4253 +4251 +4250 +4249 +4248 +4247 +4246 +4245 +4244 +4241 +4240 +4239 +4238 +4237 +4236 +4235 +4233 +4232 +4231 +4230 +4229 +4228 +4227 +4226 +4225 +4223 +4222 +4221 +4219 +4218 +4217 +4216 +4215 +4214 +4213 +4212 +4211 +4210 +4209 +4208 +4207 +4205 +4204 +4203 +4202 +4201 +2530 +5164 +28200 +3845 +3541 +4052 +21590 +1796 +25793 +8699 +8182 +4991 +2474 +5780 +3676 +24249 +1631 +6672 +6673 +3601 +5046 +3509 +1852 +2386 +8473 +7802 +4789 +3555 +12013 +12012 +3752 +3245 +3231 +16666 +6678 +17184 +9086 +9598 +3073 +2074 +1956 +2610 +3738 +2994 +2993 +2802 +1885 +14149 +13786 +10100 +9284 +14150 +10107 +4032 +2821 +3207 +14154 +24323 +2771 +5646 +2426 +18668 +2554 +4188 +3654 +8034 +5675 +15118 +4031 +2529 +2248 +1142 +19194 +433 +3534 +3664 +2537 +519 +2655 +4184 +1506 +3098 +7887 +37654 +1979 +9629 +2357 +1889 +3314 +3313 +4867 +2696 +3217 +6306 +1189 +5281 +8953 +1910 +13894 +372 +3720 +1382 +2542 +3584 +4034 +145 +27999 +3791 +21800 +2670 +3492 +24678 +34249 +39681 +1846 +5197 +5462 +5463 +2862 +2977 +2978 +3468 +2675 +3474 +4422 +12753 +13709 +2573 +3012 +4307 +4725 +3346 +3686 +4070 +9555 +4711 +4323 +4322 +10200 +7727 +3608 +3959 +2405 +3858 +3857 +24322 +6118 +4176 +6442 +8937 +17224 +17225 +7234 +33434 +1906 +22351 +2158 +5153 +3885 +24465 +3040 +20167 +8066 +474 +2739 +3308 +590 +3309 +7902 +7901 +7903 +20046 +5582 +5583 +7872 +13716 +13717 +13705 +6252 +2915 +1965 +3459 +3160 +3754 +3243 +10261 +7932 +7933 +5450 +11971 +379 +7548 +1832 +28080 +3805 +16789 +8320 +8321 +4423 +2296 +7359 +7358 +7357 +7356 +7355 +7354 +7353 +7352 +7351 +7350 +7349 +7348 +7347 +7346 +7344 +7343 +7342 +7341 +7340 +7339 +7338 +7337 +7336 +7335 +7334 +7333 +7332 +7331 +7330 +7329 +7328 +7327 +7324 +7323 +7322 +7321 +7319 +7318 +7317 +7316 +7315 +7314 +7313 +7312 +7311 +7310 +7309 +7308 +7307 +7306 +7305 +7304 +7303 +7302 +7301 +8140 +5196 +5195 +6130 +5474 +5471 +5472 +5470 +4146 +3713 +5048 +31457 +7631 +3544 +41121 +11600 +3696 +3549 +1380 +22951 +22800 +3521 +2060 +6083 +9668 +3552 +1814 +1977 +2576 +2729 +24680 +13710 +13712 +25900 +2403 +2402 +2470 +5203 +3579 +2306 +1450 +7015 +7012 +7011 +22763 +2156 +2493 +4019 +4018 +4017 +4015 +2392 +3175 +32249 +1627 +10104 +2609 +5406 +3251 +4094 +3241 +6514 +6418 +3734 +2679 +4953 +5008 +2880 +8243 +8280 +26133 +8555 +5629 +3547 +5639 +5638 +5637 +5115 +3723 +4950 +3895 +3894 +3491 +3318 +6419 +3185 +243 +3212 +9536 +1925 +11171 +8404 +8405 +8989 +6787 +6483 +3867 +3866 +1860 +1870 +5306 +3816 +7588 +6786 +2084 +11165 +11161 +11163 +11162 +11164 +3708 +4850 +7677 +16959 +247 +3478 +5349 +3854 +5397 +7411 +9612 +11173 +9293 +5027 +5026 +5705 +8778 +527 +1312 +8808 +6144 +4157 +4156 +3249 +7471 +3615 +5777 +2154 +45966 +17235 +3018 +38800 +2737 +156 +3807 +2876 +1759 +7981 +3606 +3647 +3438 +4683 +9306 +9312 +7016 +33334 +3413 +3834 +3835 +2440 +6121 +8668 +2568 +17185 +7982 +2290 +2569 +2863 +1964 +4738 +2132 +17777 +16162 +6551 +3230 +4538 +3884 +9282 +9281 +4882 +5146 +580 +1967 +2659 +2409 +5416 +2657 +3380 +5417 +2658 +5161 +5162 +10162 +10161 +33656 +7560 +2599 +2704 +2703 +4170 +7734 +9522 +3158 +4426 +4786 +2721 +1608 +3516 +4988 +4408 +1847 +36423 +2826 +2827 +3556 +8111 +6456 +6455 +3874 +3611 +2629 +2630 +166 +5059 +3110 +1733 +40404 +2257 +2278 +4750 +4303 +3688 +4751 +5794 +4752 +7626 +16950 +3273 +3896 +3635 +1959 +4753 +2857 +4163 +1659 +2905 +2904 +2733 +4936 +5032 +3048 +29000 +28240 +2320 +4742 +22335 +22333 +5043 +4105 +1257 +3841 +43210 +4366 +5163 +11106 +5434 +6444 +6445 +5634 +5636 +5635 +6343 +4546 +3242 +5568 +4057 +24666 +21221 +6488 +6484 +6486 +6485 +6487 +6443 +6480 +6489 +7690 +2603 +4787 +2367 +9212 +9213 +5445 +45824 +8351 +13711 +4076 +5099 +2316 +3588 +5093 +9450 +8056 +8055 +8054 +8059 +8058 +8057 +8053 +3090 +3255 +2254 +2479 +2477 +2478 +4194 +3496 +3495 +2089 +38865 +9026 +9025 +9024 +9023 +3480 +1905 +3550 +7801 +2189 +5361 +32635 +3782 +3432 +3978 +6629 +3143 +7784 +2342 +2309 +2705 +2310 +2384 +6315 +5343 +9899 +5168 +5167 +3927 +266 +2577 +5307 +3838 +19007 +7708 +37475 +7701 +5435 +3499 +2719 +3352 +25576 +3942 +1644 +3755 +5574 +5573 +7542 +9310 +1129 +4079 +3038 +8768 +4033 +9401 +9402 +20012 +20013 +30832 +1606 +5410 +5422 +5409 +9801 +7743 +14034 +14033 +4952 +21801 +3452 +2760 +3153 +23272 +2578 +5156 +8554 +7401 +3771 +3138 +3137 +3500 +6900 +363 +3455 +1698 +13217 +2752 +3864 +10201 +6568 +2377 +3677 +520 +2258 +4124 +8051 +2223 +3194 +4041 +48653 +8270 +5693 +25471 +2416 +5994 +9208 +7810 +7870 +2249 +7473 +4664 +4590 +2777 +2776 +2057 +6148 +3296 +4410 +4684 +8230 +5842 +1431 +12109 +4756 +4336 +324 +323 +3019 +39 +2225 +4733 +30100 +2999 +3422 +107 +1232 +3418 +3537 +5 +8184 +3789 +5231 +4731 +4373 +45045 +12302 +2373 +6084 +16665 +16385 +18635 +18634 +10253 +7227 +3572 +3032 +5786 +2346 +2348 +2347 +2349 +45002 +3553 +43191 +5313 +3707 +3706 +3736 +32811 +1942 +44553 +35001 +35002 +35005 +35006 +35003 +35004 +532 +2214 +5569 +3142 +2332 +3768 +2774 +2773 +6099 +2167 +2714 +2713 +3533 +4037 +2457 +1953 +9345 +21553 +2408 +2736 +2188 +18104 +1813 +469 +1596 +3178 +5430 +5676 +2177 +4841 +5028 +7980 +3166 +3554 +3566 +3843 +5677 +7040 +2589 +8153 +10055 +5464 +2497 +4354 +9222 +5083 +5082 +45825 +2612 +6980 +5689 +6209 +2523 +2490 +2468 +3543 +5543 +7794 +4193 +4951 +3951 +4093 +7747 +7997 +8117 +6140 +2873 +4329 +320 +319 +597 +3453 +4457 +2303 +5360 +4487 +409 +344 +1460 +5716 +5715 +9640 +5798 +7663 +7798 +7797 +4352 +15999 +34962 +34963 +34964 +4749 +8032 +4182 +1283 +1778 +3248 +2722 +2039 +3650 +3133 +2618 +4168 +10631 +1392 +3910 +6716 +47809 +38638 +4690 +9280 +6163 +2315 +3607 +5630 +4455 +4456 +1587 +28001 +5134 +13224 +13223 +5507 +2443 +4150 +8432 +7172 +3710 +9889 +6464 +7787 +6771 +6770 +3055 +2487 +16310 +16311 +3540 +34379 +34378 +2972 +7633 +6355 +188 +2790 +32400 +4351 +3934 +3933 +4659 +1819 +5586 +5863 +17010 +9318 +318 +5318 +2634 +4416 +5078 +3189 +6924 +3010 +15740 +1603 +2787 +4390 +468 +4869 +4868 +3177 +3347 +6124 +2350 +3208 +2520 +2441 +3109 +3557 +281 +1916 +4313 +5312 +4066 +345 +9630 +9631 +6817 +3582 +9279 +9278 +8027 +3587 +4747 +2178 +5112 +3135 +5443 +7880 +1980 +6086 +3254 +4012 +9597 +3253 +2274 +2299 +8444 +6655 +44322 +44321 +5351 +5350 +5172 +4172 +1332 +2256 +8129 +8128 +4097 +8161 +2665 +2664 +6162 +4189 +1333 +3735 +586 +6581 +6582 +4681 +4312 +4989 +7216 +3348 +3095 +6657 +30002 +7237 +3435 +2246 +1675 +31400 +4311 +9559 +6671 +6679 +3034 +40853 +11103 +3274 +3355 +3078 +3075 +3076 +8070 +2484 +2483 +3891 +1571 +1830 +1630 +8997 +8102 +2482 +2481 +5155 +5575 +3718 +22005 +22004 +22003 +22002 +2524 +1829 +2237 +3977 +3976 +3303 +19191 +3433 +5724 +2400 +7629 +6640 +2389 +30999 +2447 +3673 +7430 +7429 +7426 +7431 +7428 +7427 +9390 +4317 +35357 +7728 +8004 +5045 +8688 +1258 +5757 +5729 +5767 +5766 +5755 +5768 +4743 +9008 +9007 +3187 +20014 +4089 +3434 +4840 +4843 +3100 +314 +3154 +9994 +9993 +8767 +4304 +2428 +2199 +2198 +2185 +4428 +4429 +4162 +4395 +2056 +5402 +3340 +3339 +3341 +3338 +7275 +7274 +7277 +7276 +4359 +2077 +8769 +9966 +4732 +3320 +11175 +11174 +11172 +13706 +3523 +429 +2697 +18186 +3442 +3441 +29167 +36602 +7030 +1894 +28000 +126 +4420 +2184 +3780 +49001 +11235 +4128 +8711 +10810 +45001 +5415 +4453 +359 +3266 +36424 +2868 +7724 +396 +2645 +23402 +23400 +23401 +3016 +21010 +5215 +4663 +4803 +2338 +15126 +8433 +5209 +3406 +3405 +5627 +4088 +2210 +2244 +2817 +10111 +10110 +1242 +5299 +2252 +3649 +6421 +6420 +1617 +48001 +48002 +48003 +48005 +48004 +48000 +61 +8061 +4134 +38412 +20048 +7393 +4021 +178 +8457 +550 +2058 +2075 +2076 +3165 +6133 +2614 +2585 +4702 +4701 +2586 +3203 +3204 +4460 +16361 +16367 +16360 +16368 +4159 +170 +2293 +4703 +8981 +3409 +7549 +171 +20049 +1155 +537 +3196 +3195 +2411 +2788 +4127 +6777 +6778 +1879 +5421 +3440 +2128 +21846 +21849 +21847 +21848 +395 +154 +155 +4425 +2328 +3129 +3641 +3640 +1970 +2486 +2485 +6842 +6841 +3149 +3148 +3150 +3151 +1406 +218 +10116 +10114 +2219 +2735 +10117 +10113 +2220 +3725 +5229 +4350 +6513 +4335 +4334 +5681 +1676 +2971 +4409 +3131 +4441 +1612 +1616 +1613 +1614 +13785 +11104 +11105 +3829 +11095 +3507 +3213 +7474 +3886 +4043 +2730 +377 +378 +3024 +2738 +2528 +4844 +4842 +5979 +1888 +2093 +2094 +20034 +2163 +3159 +6317 +4361 +2895 +3753 +2343 +3015 +1790 +3950 +6363 +9286 +9285 +7282 +6446 +2273 +33060 +2388 +9119 +3733 +32801 +4421 +7420 +9903 +6622 +5354 +7742 +2305 +2791 +8115 +3122 +2855 +8276 +2871 +4554 +2171 +2172 +2173 +2174 +7680 +3343 +7392 +3958 +3358 +46 +6634 +8503 +3924 +2488 +10544 +10543 +10541 +10540 +10542 +4691 +8666 +1576 +4986 +6997 +3732 +4688 +7871 +9632 +7869 +2593 +3764 +5237 +4668 +4173 +4667 +8077 +4310 +7606 +5136 +4069 +21554 +7391 +9445 +2180 +3180 +2621 +4551 +3008 +7013 +7014 +5362 +6601 +1512 +5356 +6074 +5726 +5364 +5725 +6076 +6075 +2175 +3132 +5359 +2176 +5022 +4679 +4680 +6509 +2266 +6382 +2230 +6390 +6370 +6360 +393 +2311 +8787 +18 +8786 +47000 +19788 +1960 +9596 +4603 +4151 +4552 +11211 +3569 +4883 +3571 +2944 +2945 +2272 +7720 +5157 +3445 +2427 +2727 +2363 +46999 +2789 +13930 +3232 +2688 +3235 +5598 +3115 +3117 +3116 +3331 +3332 +3302 +3330 +3558 +8809 +3570 +4153 +2591 +4179 +4171 +3276 +5540 +4360 +8448 +4458 +7421 +49000 +7073 +3836 +5282 +8384 +36700 +4686 +269 +9255 +6201 +2544 +2516 +5092 +2243 +4902 +313 +3691 +2453 +4345 +44900 +36444 +36443 +4894 +3747 +3746 +5044 +6471 +3079 +4913 +4741 +10805 +3487 +3157 +3068 +8162 +4083 +4082 +4081 +7026 +1983 +2289 +1629 +1628 +1634 +8101 +6482 +5254 +5058 +4044 +3591 +3592 +1903 +5062 +6087 +2090 +2465 +2466 +6200 +8208 +8207 +8204 +31620 +8205 +8206 +3278 +2145 +2143 +2147 +2146 +3767 +46336 +10933 +4341 +1969 +10809 +12300 +8191 +517 +4670 +7365 +3028 +3027 +3029 +1203 +1886 +11430 +374 +2212 +3407 +2816 +2779 +2815 +2780 +3373 +3739 +3815 +4347 +11796 +3970 +4547 +1764 +2395 +4372 +4432 +9747 +4371 +3360 +3361 +4331 +40023 +27504 +2294 +5253 +7697 +35354 +186 +30260 +4566 +584 +5696 +6623 +6620 +6621 +2502 +3112 +36865 +2918 +4661 +31016 +26262 +26263 +3642 +48048 +5309 +3155 +4166 +27442 +6583 +3215 +3214 +8901 +19020 +4160 +3094 +3093 +3777 +1937 +1938 +1939 +1940 +2097 +1936 +1810 +6244 +6243 +6242 +6241 +4107 +19541 +3529 +3528 +5230 +4327 +5883 +2205 +7095 +3794 +3473 +3472 +7181 +5034 +3627 +8091 +1578 +5673 +5049 +4880 +3258 +2828 +3719 +7478 +7280 +1636 +1637 +3775 +24321 +499 +3205 +1950 +1949 +3226 +8148 +5047 +4075 +17223 +21000 +3504 +3206 +2632 +529 +4073 +32034 +18769 +2527 +4593 +4792 +4791 +7031 +33435 +4740 +4739 +4068 +20202 +4737 +9214 +2215 +3743 +2088 +7410 +5728 +45054 +3614 +8020 +11751 +2202 +6697 +4744 +1884 +3699 +6714 +1611 +7202 +4569 +3508 +24386 +16995 +16994 +1674 +1673 +7128 +4746 +17234 +9215 +4486 +484 +5057 +5056 +7624 +2980 +4109 +49150 +215 +23005 +23004 +23003 +23002 +23001 +23000 +2716 +3560 +5597 +134 +38001 +38000 +4067 +1428 +2480 +5029 +8067 +5069 +3156 +3139 +244 +7675 +7673 +7672 +7674 +2637 +4139 +3783 +3657 +11320 +8615 +585 +48128 +2239 +3596 +2055 +3186 +19000 +5165 +3420 +17220 +17221 +19998 +2404 +2079 +4152 +4604 +25604 +5742 +5741 +4553 +2799 +4801 +4802 +2063 +14143 +14142 +4061 +4062 +4063 +4064 +31948 +31949 +2276 +2275 +1881 +2078 +3660 +3661 +1920 +1919 +9085 +424 +1933 +1934 +9089 +9088 +3667 +3666 +12003 +12004 +3539 +3538 +3267 +25100 +385 +3494 +4594 +4595 +4596 +3898 +9614 +4169 +5674 +2374 +5105 +8313 +44323 +5628 +2570 +2113 +4591 +4592 +5228 +5224 +5227 +2207 +4484 +3037 +2209 +2448 +3101 +382 +381 +3209 +7510 +2206 +2690 +2208 +7738 +5317 +3329 +5316 +3449 +2029 +1985 +10125 +2597 +3634 +8231 +3250 +43438 +4884 +4117 +2467 +4148 +18516 +7397 +22370 +8807 +3921 +4306 +10860 +6440 +3740 +1161 +2641 +7630 +3804 +4197 +11108 +9954 +6791 +3623 +3769 +3036 +5315 +5305 +3542 +5304 +11720 +2517 +3179 +2979 +2356 +3745 +18262 +2186 +35356 +3436 +2152 +2123 +1452 +4729 +3761 +3136 +28010 +9340 +9339 +8710 +30400 +6267 +6269 +6268 +3757 +4755 +4754 +4026 +5117 +9277 +2947 +3386 +2217 +37483 +16002 +5687 +2072 +1909 +9122 +9123 +4131 +3912 +3229 +1880 +5688 +4332 +10800 +4985 +3108 +3475 +6080 +4790 +23053 +6081 +8190 +7017 +7283 +4730 +2159 +3429 +2660 +14145 +3484 +3762 +3222 +8322 +1421 +1859 +31765 +2914 +3051 +38201 +8881 +4340 +8074 +2678 +2677 +4110 +2731 +286 +3402 +3272 +1514 +3382 +1904 +1902 +3648 +2975 +574 +8502 +3488 +9217 +4130 +7726 +5556 +7244 +4319 +41111 +4411 +4084 +2242 +4396 +4901 +7545 +7544 +27008 +27006 +27004 +5579 +2884 +3035 +1193 +5618 +7018 +2673 +4086 +8043 +8044 +3192 +3729 +1855 +1856 +1784 +24922 +1887 +7164 +4349 +7394 +16021 +16020 +6715 +4915 +4122 +3216 +14250 +3152 +1776 +36524 +4320 +4727 +3225 +2819 +4038 +6417 +347 +3047 +2495 +10081 +38202 +19790 +2515 +2514 +4353 +38472 +10102 +4085 +3953 +4788 +3088 +3134 +3639 +4309 +2755 +1928 +5075 +26486 +5401 +3759 +43440 +1926 +1982 +1798 +9981 +4536 +4535 +1504 +592 +1267 +6935 +2036 +6316 +2221 +44818 +34980 +2380 +2379 +6107 +1772 +8416 +8417 +8266 +4023 +3629 +9617 +3679 +3727 +4942 +4941 +4940 +43439 +3628 +3620 +5116 +3259 +4666 +4669 +3819 +37601 +5084 +5085 +3383 +5599 +5600 +5601 +3665 +1818 +3044 +1295 +7962 +7117 +121 +17754 +6636 +6635 +20480 +23333 +3585 +6322 +6321 +4091 +4092 +140 +6656 +3693 +11623 +11723 +13218 +3682 +3218 +9083 +3197 +3198 +394 +2526 +7700 +7707 +2916 +2917 +4370 +6515 +12010 +5398 +3564 +4346 +1378 +1893 +3525 +3638 +2228 +6632 +3392 +3671 +6159 +3462 +3461 +3464 +3465 +3460 +3463 +3123 +34567 +8149 +6703 +6702 +2263 +3477 +3524 +6160 +17729 +3711 +45678 +2168 +3328 +38462 +3932 +3295 +2164 +3395 +2874 +3246 +3247 +4191 +4028 +3489 +4556 +5684 +13929 +31685 +9987 +4060 +13819 +13820 +13821 +13818 +13822 +2420 +7547 +3685 +2193 +4427 +1930 +8913 +7021 +7020 +5719 +5565 +5245 +6326 +6320 +6325 +3522 +44544 +13400 +6088 +3568 +8567 +3567 +5567 +7165 +4142 +3161 +5352 +195 +1172 +5993 +3199 +3574 +4059 +1177 +3624 +19999 +4646 +21212 +246 +5107 +14002 +7171 +3448 +3336 +3335 +3337 +198 +197 +3447 +5031 +4605 +2464 +2227 +3223 +1335 +2226 +33333 +2762 +2761 +3227 +3228 +33331 +2861 +2860 +2098 +4301 +3252 +547 +546 +6785 +8750 +4330 +3776 +24850 +8805 +2763 +4167 +2092 +3444 +8415 +3714 +1278 +5700 +3668 +7569 +365 +8894 +8893 +8891 +8890 +11202 +3988 +1160 +3938 +6117 +6624 +6625 +2073 +461 +3612 +3578 +11109 +2229 +1775 +2764 +3678 +6511 +1133 +29999 +2594 +3881 +3498 +8732 +2378 +3394 +3393 +2298 +2297 +9388 +9387 +3120 +3297 +1898 +8442 +9888 +4183 +4673 +3778 +5271 +3127 +1932 +4451 +2563 +4452 +9346 +7022 +3631 +3630 +105 +3271 +2699 +3004 +2129 +4187 +1724 +3113 +2314 +8380 +8377 +8376 +8379 +8378 +20810 +3818 +41797 +41796 +38002 +3364 +3366 +2824 +2823 +3609 +4055 +4054 +4053 +2654 +19220 +9093 +3183 +2565 +4078 +4774 +2153 +17222 +7551 +7563 +3072 +4047 +9695 +4846 +5992 +5683 +4692 +3191 +3417 +7169 +3973 +46998 +16384 +3947 +47100 +6970 +2491 +7023 +10321 +42508 +3822 +2417 +2555 +3257 +3256 +22343 +64 +7215 +20003 +4450 +3751 +3605 +2534 +3490 +4419 +7689 +21213 +7574 +3377 +3779 +44444 +3039 +2415 +2183 +26257 +3576 +3575 +2976 +7168 +8501 +164 +3384 +7550 +45514 +356 +2617 +3730 +6688 +6687 +6690 +7683 +2052 +3481 +4136 +4137 +9087 +172 +1729 +4980 +7229 +7228 +24754 +2897 +7279 +2512 +2513 +4870 +22305 +5787 +6633 +131 +15555 +4051 +4785 +43441 +5784 +7546 +8017 +3887 +5194 +1743 +2891 +3770 +1377 +4316 +4314 +3099 +1572 +39063 +1891 +1892 +3349 +18241 +18243 +18242 +18185 +5505 +6556 +562 +531 +3772 +5065 +5064 +2182 +3893 +2921 +2922 +13832 +4074 +4140 +4115 +3056 +3616 +3559 +4970 +4969 +3114 +3750 +12168 +2122 +7129 +7162 +7167 +5270 +1197 +9060 +3106 +12546 +5247 +5246 +3290 +4728 +8998 +8610 +8609 +3756 +8614 +8613 +8612 +8611 +1872 +3583 +24676 +4377 +5079 +4378 +1734 +3545 +7262 +3675 +2552 +22537 +3709 +14414 +5251 +1882 +42509 +2318 +4326 +1563 +7163 +1554 +7161 +595 +348 +282 +8026 +5249 +5248 +5154 +10880 +3626 +4990 +3107 +6410 +6409 +6408 +6407 +6406 +6405 +6404 +4677 +581 +4671 +2964 +2965 +28589 +47808 +3966 +2446 +1854 +1961 +2444 +2277 +4175 +3188 +3043 +9380 +3692 +5682 +2155 +4104 +4103 +4102 +3593 +2845 +2844 +4186 +2218 +4678 +2017 +2913 +7648 +4914 +7687 +6501 +9750 +3344 +1896 +4568 +10128 +6768 +6767 +3182 +1313 +3181 +2059 +3604 +6300 +10129 +3695 +6301 +2494 +2625 +48129 +8195 +2369 +2574 +5750 +13823 +13216 +4027 +5068 +25955 +25954 +6946 +3411 +24577 +5429 +2259 +4621 +6784 +4676 +4675 +4784 +3785 +5425 +5424 +4305 +3960 +3408 +5584 +5585 +1943 +3124 +6508 +6507 +4155 +1120 +1929 +4324 +10439 +6506 +6505 +6122 +4971 +3387 +152 +2635 +2169 +6696 +2204 +3512 +2071 +10260 +35100 +4195 +3277 +3502 +2066 +2238 +4413 +20057 +2992 +2050 +3965 +10990 +31020 +4685 +1140 +7508 +16003 +4071 +3104 +3437 +5067 +33123 +1146 +44600 +2264 +7543 +2419 +32896 +2317 +3821 +4937 +1520 +11367 +4154 +3617 +20999 +1170 +1171 +2864 +27876 +4485 +4704 +7235 +3087 +45000 +4405 +4404 +4406 +4402 +4403 +4400 +5727 +11489 +2192 +4077 +4448 +3581 +5150 +13702 +3451 +386 +8211 +7166 +3518 +27782 +3176 +9292 +3174 +9295 +9294 +3426 +8423 +3140 +7570 +421 +2114 +6344 +2581 +2582 +11321 +384 +23546 +1834 +1115 +4165 +1557 +3758 +7847 +5086 +4849 +2037 +1447 +3312 +187 +4488 +2336 +387 +208 +207 +203 +3454 +10548 +4674 +38203 +3239 +3236 +3237 +3238 +4573 +2758 +10252 +2759 +8121 +2754 +8122 +3184 +42999 +539 +6082 +18888 +9952 +9951 +7846 +7845 +6549 +5456 +5455 +5454 +4851 +5913 +5072 +3939 +2247 +1206 +3715 +2646 +3054 +5671 +8040 +376 +2640 +30004 +30003 +5192 +4393 +4392 +4391 +4394 +1931 +5506 +8301 +4563 +35355 +4011 +7799 +3265 +9209 +693 +36001 +9956 +9955 +6627 +3234 +2667 +2668 +3613 +4804 +2887 +3416 +3833 +9216 +2846 +17555 +2786 +3316 +3021 +3026 +4878 +3917 +4362 +7775 +3224 +23457 +23456 +4549 +4431 +2295 +3573 +5073 +3760 +3357 +3954 +3705 +3704 +2692 +6769 +33890 +7170 +2521 +2085 +3096 +2810 +2859 +3431 +9389 +3655 +5106 +5103 +44445 +7509 +6801 +4013 +2476 +2475 +2334 +12007 +12008 +6868 +4046 +18463 +32483 +4030 +8793 +62 +1955 +3781 +3619 +3618 +28119 +4726 +4502 +4597 +4598 +3598 +3597 +3125 +4149 +9953 +23294 +2933 +2934 +5783 +5782 +5785 +5781 +15363 +48049 +2339 +5265 +5264 +1181 +3446 +3428 +15998 +3091 +2133 +3774 +317 +3832 +508 +3721 +1619 +1716 +2279 +3412 +2327 +6558 +2130 +1760 +5413 +2396 +2923 +3378 +3466 +2504 +2720 +4871 +7395 +3926 +1727 +1326 +2518 +1890 +2781 +565 +4984 +3342 +21845 +1963 +2851 +3748 +1739 +1269 +2455 +2547 +2548 +2546 +7779 +2695 +312 +2996 +2893 +1589 +2649 +1224 +1345 +3625 +2538 +3321 +175 +1868 +4344 +1853 +3058 +3802 +78 +2770 +3270 +575 +1771 +4839 +4838 +4837 +671 +430 +431 +2745 +2648 +3356 +1957 +2820 +1978 +2927 +2499 +2437 +2138 +2110 +1797 +1737 +483 +390 +1867 +1624 +1833 +2879 +2767 +2768 +2943 +1568 +2489 +1237 +2741 +2742 +8804 +1588 +6069 +1869 +2642 +20670 +594 +2885 +2669 +476 +2798 +3083 +3082 +3081 +2361 +5104 +1758 +7491 +1728 +5428 +1946 +559 +1610 +3144 +1922 +2726 +6149 +1838 +4014 +1274 +2647 +4106 +6102 +4548 +19540 +1866 +6965 +6966 +6964 +6963 +1751 +1625 +5453 +2709 +7967 +3354 +566 +4178 +2986 +1226 +1836 +1654 +2838 +1692 +3644 +6071 +477 +478 +2507 +1923 +3193 +2653 +2636 +1621 +3379 +2533 +2892 +2452 +1684 +2333 +22000 +1553 +3536 +11201 +2775 +2942 +2941 +2940 +2939 +2938 +2613 +426 +4116 +4412 +1966 +3065 +1225 +1705 +1618 +1660 +2545 +2676 +3687 +2756 +1599 +2832 +2831 +2830 +2829 +5461 +2974 +498 +1626 +3595 +160 +153 +3326 +1714 +3172 +3173 +3171 +3170 +3169 +2235 +6108 +169 +5399 +2471 +558 +2308 +1681 +2385 +3562 +5024 +5025 +5427 +3391 +3744 +1646 +3275 +3698 +2390 +1793 +1647 +1697 +1693 +1695 +1696 +2919 +9599 +2423 +3844 +2959 +2818 +1817 +521 +3147 +3163 +2886 +283 +2837 +2543 +2928 +2240 +1343 +2321 +3467 +9753 +1530 +2872 +1595 +2900 +1341 +2935 +3059 +2724 +3385 +2765 +368 +2461 +2462 +1253 +2680 +3009 +2434 +2694 +2351 +2353 +2354 +1788 +2352 +3662 +2355 +2091 +1732 +8183 +1678 +2588 +2924 +2687 +5071 +1777 +2899 +494 +3875 +2937 +5437 +5436 +3469 +3285 +1293 +5272 +2865 +321 +1280 +1779 +6432 +1230 +2843 +3033 +2566 +1562 +3085 +3892 +1246 +1564 +8160 +1633 +9997 +9996 +7511 +5236 +3955 +2956 +2954 +2953 +5310 +2951 +2936 +6951 +2413 +2407 +1597 +1570 +2398 +1809 +1575 +1754 +1748 +22001 +3855 +2368 +8764 +6653 +5314 +2267 +3244 +2661 +2364 +506 +2322 +2498 +3305 +183 +650 +2329 +5991 +1463 +159 +8450 +1917 +1921 +2839 +2503 +25903 +25901 +25902 +2556 +2672 +1690 +2360 +2671 +1669 +1665 +1286 +4138 +2592 +61441 +61439 +61440 +2983 +5465 +1843 +1842 +1841 +2061 +1329 +2451 +3701 +3066 +2442 +5771 +2450 +489 +8834 +1285 +3262 +2881 +2883 +43189 +6064 +1591 +1744 +405 +2397 +2683 +2162 +1288 +2286 +2236 +167 +1685 +1831 +2981 +467 +1574 +2743 +19398 +2469 +2460 +1477 +1478 +5720 +3535 +1582 +1731 +679 +2684 +2686 +2681 +2685 +1952 +9397 +9344 +2952 +2579 +2561 +1235 +367 +8665 +471 +2926 +1815 +7786 +8033 +1581 +7979 +1534 +490 +3070 +349 +1824 +2511 +1897 +6070 +2118 +2117 +1231 +24003 +24004 +24006 +24000 +3594 +24002 +24001 +24005 +5418 +2698 +8763 +1820 +1899 +2587 +8911 +8910 +1593 +2535 +4181 +3565 +2559 +3069 +2620 +1298 +2540 +2541 +2125 +1487 +2283 +2284 +2285 +2281 +2282 +2813 +5355 +2814 +2795 +1555 +1968 +2611 +245 +4042 +1682 +1485 +2560 +2841 +2370 +2842 +2840 +398 +2424 +1773 +1649 +287 +2656 +2213 +2822 +1289 +3471 +3470 +3042 +4114 +6962 +6961 +1567 +2808 +1706 +2406 +2508 +2506 +1623 +13160 +2166 +2866 +2982 +1275 +1573 +4348 +1828 +3084 +1609 +2853 +3589 +147 +3501 +1643 +1642 +1245 +43190 +2962 +2963 +576 +2549 +1579 +1585 +503 +1907 +3202 +3548 +3060 +2652 +2633 +16991 +495 +1602 +1490 +2793 +18881 +2854 +2319 +2233 +3345 +2454 +8130 +8131 +2127 +2970 +2932 +3164 +1710 +11319 +27345 +2801 +1284 +2995 +3797 +2966 +2590 +549 +1725 +2337 +3130 +5813 +25008 +25007 +25006 +25005 +25004 +25003 +25002 +25009 +6850 +1344 +1604 +8733 +2572 +1260 +1586 +1726 +6999 +6998 +2140 +2139 +2141 +1577 +4180 +4827 +1877 +2715 +19412 +19410 +19411 +5404 +5403 +2985 +1803 +2744 +6790 +2575 +12172 +1789 +35000 +1281 +14937 +14936 +263 +375 +5094 +1816 +2245 +1238 +2778 +9321 +2643 +2421 +488 +1850 +2458 +41 +2519 +6109 +1774 +2833 +3862 +3381 +1590 +2626 +1738 +2732 +19539 +2849 +2358 +1786 +1787 +1657 +2429 +1747 +1746 +5408 +5407 +2359 +24677 +1874 +2946 +2509 +1873 +2747 +2751 +2750 +2748 +2749 +9396 +3067 +1848 +9374 +2510 +2615 +1689 +4682 +3350 +24242 +3401 +3294 +3293 +5503 +5504 +5746 +5745 +2344 +7437 +3353 +2689 +3873 +1561 +1915 +2792 +10103 +26260 +26261 +589 +1948 +2666 +26489 +26487 +2769 +2674 +6066 +1876 +2835 +2834 +2782 +16309 +2969 +2867 +2797 +2950 +1822 +1342 +5135 +2650 +2109 +2051 +2912 +309 +1865 +3289 +1804 +3286 +1740 +2211 +2707 +1273 +2181 +2553 +2896 +2858 +3610 +2651 +1325 +2445 +1265 +3053 +1292 +1878 +4098 +1780 +1795 +4099 +1821 +2151 +1227 +436 +2287 +32636 +1489 +1263 +5419 +3041 +2496 +3287 +6073 +2234 +242 +1844 +2362 +11112 +1941 +3046 +1945 +6072 +2960 +5426 +2753 +3298 +1702 +1256 +1254 +1266 +2562 +1656 +1655 +579 +1255 +1415 +2365 +2345 +6104 +8132 +1908 +3282 +1857 +1679 +2870 +3458 +5420 +772 +3645 +551 +1686 +3773 +4379 +1851 +3022 +2807 +2890 +1837 +2955 +3145 +1471 +1468 +40841 +40842 +40843 +2422 +6253 +455 +2746 +3201 +5984 +2324 +3288 +5412 +2137 +1648 +1802 +4308 +48556 +2757 +1757 +1294 +7174 +1944 +371 +504 +1741 +2931 +3020 +17219 +3903 +1768 +1767 +1766 +1765 +2856 +1640 +1639 +1794 +3987 +2571 +2412 +3315 +2116 +3061 +2836 +3450 +3105 +1756 +9283 +2906 +588 +1202 +1375 +2803 +2536 +1252 +2619 +1323 +2990 +1304 +2961 +6402 +6403 +3561 +1770 +1769 +2877 +10288 +2911 +2032 +2663 +2662 +1962 +310 +357 +354 +482 +2414 +2852 +1951 +1704 +3327 +573 +567 +2708 +2131 +2772 +3643 +1749 +5042 +1913 +2624 +1826 +2136 +2616 +9164 +9163 +9162 +1781 +2929 +1320 +2848 +2268 +459 +1536 +2639 +6831 +10080 +1845 +1653 +1849 +463 +2740 +2473 +2783 +1481 +2785 +2331 +7107 +1219 +3279 +5411 +2796 +2149 +7781 +1205 +4108 +4885 +1546 +2894 +1601 +2878 +5605 +5604 +5602 +5603 +3284 +1742 diff --git a/bbot/wordlists/valid_url_schemes.txt b/bbot/wordlists/valid_url_schemes.txt new file mode 100644 index 0000000000..721a854aee --- /dev/null +++ b/bbot/wordlists/valid_url_schemes.txt @@ -0,0 +1,382 @@ +aaa +awb +aaas +about +acap +acct +acd +acr +adiumxtra +adt +afp +afs +aim +amss +android +appdata +apt +ar +ark +at +attachment +aw +barion +bb +beshare +bitcoin +bitcoincash +blob +bolo +brid +browserext +cabal +calculator +callto +cap +cast +casts +chrome +chrome-extension +cid +coap +coap+tcp +coap+ws +coaps +coaps+tcp +coaps+ws +com-eventbrite-attendee +content +content-type +crid +cstr +cvs +dab +dat +data +dav +dhttp +diaspora +dict +did +dis +dlna-playcontainer +dlna-playsingle +dns +dntp +doi +dpp +drm +drop +dtmi +dtn +dvb +dvx +dweb +ed2k +eid +elsi +embedded +ens +ethereum +example +facetime +fax +feed +feedready +fido +file +filesystem +finger +first-run-pen-experience +fish +fm +ftp +fuchsia-pkg +geo +gg +git +gitoid +gizmoproject +go +gopher +graph +grd +gtalk +h323 +ham +hcap +hcp +hs20 +http +https +hxxp +hxxps +hydrazone +hyper +iax +icap +icon +im +imap +info +iotdisco +ipfs +ipn +ipns +ipp +ipps +irc +irc6 +ircs +iris +iris.beep +iris.lwz +iris.xpc +iris.xpcs +isostore +itms +jabber +jar +jms +keyparc +lastfm +lbry +ldap +ldaps +leaptofrogans +lid +lorawan +lpa +lvlt +machineProvisioningProgressReporter +magnet +mailserver +mailto +maps +market +matrix +message +microsoft.windows.camera +microsoft.windows.camera.multipicker +microsoft.windows.camera.picker +mid +mms +modem +mongodb +moz +ms-access +ms-appinstaller +ms-browser-extension +ms-calculator +ms-drive-to +ms-enrollment +ms-excel +ms-eyecontrolspeech +ms-gamebarservices +ms-gamingoverlay +ms-getoffice +ms-help +ms-infopath +ms-inputapp +ms-launchremotedesktop +ms-lockscreencomponent-config +ms-media-stream-id +ms-meetnow +ms-mixedrealitycapture +ms-mobileplans +ms-newsandinterests +ms-officeapp +ms-people +ms-project +ms-powerpoint +ms-publisher +ms-recall +ms-remotedesktop +ms-remotedesktop-launch +ms-restoretabcompanion +ms-screenclip +ms-screensketch +ms-search +ms-search-repair +ms-secondary-screen-controller +ms-secondary-screen-setup +ms-settings +ms-settings-airplanemode +ms-settings-bluetooth +ms-settings-camera +ms-settings-cellular +ms-settings-cloudstorage +ms-settings-connectabledevices +ms-settings-displays-topology +ms-settings-emailandaccounts +ms-settings-language +ms-settings-location +ms-settings-lock +ms-settings-nfctransactions +ms-settings-notifications +ms-settings-power +ms-settings-privacy +ms-settings-proximity +ms-settings-screenrotation +ms-settings-wifi +ms-settings-workplace +ms-spd +ms-stickers +ms-sttoverlay +ms-transit-to +ms-useractivityset +ms-virtualtouchpad +ms-visio +ms-walk-to +ms-whiteboard +ms-whiteboard-cmd +ms-word +msnim +msrp +msrps +mss +mt +mtqp +mumble +mupdate +mvn +mvrp +mvrps +news +nfs +ni +nih +nntp +notes +num +ocf +oid +onenote +onenote-cmd +opaquelocktoken +openid +openpgp4fpr +otpauth +p1 +pack +palm +paparazzi +payment +payto +pkcs11 +platform +pop +pres +prospero +proxy +pwid +psyc +pttp +qb +query +quic-transport +redis +rediss +reload +res +resource +rmi +rsync +rtmfp +rtmp +rtsp +rtsps +rtspu +sarif +secondlife +secret-token +service +session +sftp +sgn +shc +shttp +sieve +simpleledger +simplex +sip +sips +skype +smb +smp +sms +smtp +snews +snmp +soap.beep +soap.beeps +soldat +spiffe +spotify +ssb +ssh +starknet +steam +stun +stuns +submit +svn +swh +swid +swidpath +tag +taler +teamspeak +tel +teliaeid +telnet +tftp +things +thismessage +tip +tn3270 +tool +turn +turns +tv +udp +unreal +upt +urn +ut2004 +uuid-in-package +v-event +vemmi +ventrilo +ves +videotex +vnc +view-source +vscode +vscode-insiders +vsls +w3 +wais +web3 +wcr +webcal +web+ap +wifi +wpid +ws +wss +wtai +wyciwyg +xcon +xcon-userid +xfire +xmlrpc.beep +xmlrpc.beeps +xmpp +xftp +xrcp +xri +ymsgr +z39.50 +z39.50r +z39.50s diff --git a/bbot/wordlists/wordninja_dns.txt.gz b/bbot/wordlists/wordninja_dns.txt.gz new file mode 100644 index 0000000000..a038310fff Binary files /dev/null and b/bbot/wordlists/wordninja_dns.txt.gz differ diff --git a/docs/bbot.png b/docs/bbot.png new file mode 100644 index 0000000000..89e899a9b0 Binary files /dev/null and b/docs/bbot.png differ diff --git a/docs/comparison.md b/docs/comparison.md new file mode 100644 index 0000000000..183e84319d --- /dev/null +++ b/docs/comparison.md @@ -0,0 +1,21 @@ +# Comparison to Other Tools + +BBOT does a lot more than just subdomain enumeration. However, subdomain enumeration is arguably the most important part of OSINT, and since there's so many subdomain enumeration tools out there, they're the easiest class of tool to compare it to. + +Thanks to BBOT's recursive nature (and its `dnsbrute_mutations` module with its NLP-powered subdomain mutations), it typically finds about 20-25% more than other tools such as `Amass` or `theHarvester`. This holds true especially for larger targets like `delta.com` (1000+ subdomains): + +### Subdomains Found + +![subdomains](https://github.com/blacklanternsecurity/bbot/assets/20261699/0d7eb982-e68a-4a33-b33c-7c8ba8c7d6ad) + +### Runtimes (Lower is Better) + +![runtimes](https://github.com/blacklanternsecurity/bbot/assets/20261699/66cafb5f-045b-4d88-9ffa-7542b3dada4f) + +For a detailed analysis of this data, please see [Subdomain Enumeration Tool Face-Off](https://blog.blacklanternsecurity.com/p/subdomain-enumeration-tool-face-off-4e5) + +### Ebay.com (larger domain) + +![subdomain-stats-ebay](https://github.com/blacklanternsecurity/bbot/assets/20261699/53e07e9f-50b6-4b70-9e83-297dbfbcb436) + +_Note that in this benchmark, Spiderfoot crashed after ~20 minutes due to excessive memory usage. Amass never finished and had to be cancelled after 24h. All other tools finished successfully._ diff --git a/docs/contribution.md b/docs/contribution.md new file mode 100644 index 0000000000..b291cea68a --- /dev/null +++ b/docs/contribution.md @@ -0,0 +1,9 @@ +# Contribution + +We welcome contributions! If you have an idea for a new module, or are a Python developer who wants to get involved, please fork us or come talk to us on [Discord](https://discord.com/invite/PZqkgxu5SA). + +To get started devving, see the following links: + +- [Setting up a Dev Environment](./dev/dev_environment.md) +- [How to Write a BBOT Module](./dev/module_howto.md) +- [Discord Bot Example](./dev/discord_bot.md) diff --git a/docs/data/chord_graph/entities.json b/docs/data/chord_graph/entities.json new file mode 100644 index 0000000000..af91c060c2 --- /dev/null +++ b/docs/data/chord_graph/entities.json @@ -0,0 +1,2086 @@ +[ + { + "id": 77777777, + "name": "root" + }, + { + "id": 99999999, + "name": "module", + "parent": 77777777 + }, + { + "id": 88888888, + "name": "event_type", + "parent": 77777777 + }, + { + "id": 13, + "name": "ASN", + "parent": 88888888, + "consumes": [], + "produces": [ + 11 + ] + }, + { + "id": 131, + "name": "AZURE_TENANT", + "parent": 88888888, + "consumes": [ + 130 + ], + "produces": [] + }, + { + "id": 43, + "name": "CODE_REPOSITORY", + "parent": 88888888, + "consumes": [ + 62, + 82, + 83, + 86, + 88, + 120, + 138 + ], + "produces": [ + 42, + 63, + 81, + 84, + 85, + 87, + 119 + ] + }, + { + "id": 7, + "name": "DNS_NAME", + "parent": 88888888, + "consumes": [ + 6, + 14, + 18, + 20, + 21, + 25, + 27, + 28, + 29, + 30, + 31, + 33, + 34, + 35, + 36, + 38, + 39, + 40, + 41, + 44, + 49, + 51, + 52, + 53, + 55, + 56, + 57, + 58, + 59, + 61, + 67, + 79, + 84, + 91, + 95, + 97, + 103, + 104, + 106, + 109, + 110, + 114, + 115, + 117, + 121, + 123, + 124, + 125, + 126, + 127, + 130, + 133, + 134, + 135, + 137, + 141, + 144, + 145, + 148, + 152 + ], + "produces": [ + 6, + 20, + 27, + 28, + 35, + 36, + 38, + 39, + 40, + 41, + 49, + 52, + 55, + 56, + 57, + 58, + 59, + 60, + 79, + 91, + 95, + 97, + 103, + 104, + 107, + 109, + 110, + 114, + 121, + 123, + 125, + 126, + 130, + 132, + 133, + 134, + 137, + 141, + 142, + 144, + 145, + 148, + 152 + ] + }, + { + "id": 22, + "name": "DNS_NAME_UNRESOLVED", + "parent": 88888888, + "consumes": [ + 21, + 130, + 135 + ], + "produces": [] + }, + { + "id": 45, + "name": "EMAIL_ADDRESS", + "parent": 88888888, + "consumes": [ + 68 + ], + "produces": [ + 44, + 51, + 57, + 61, + 67, + 95, + 115, + 124, + 127, + 132 + ] + }, + { + "id": 10, + "name": "FILESYSTEM", + "parent": 88888888, + "consumes": [ + 72, + 102, + 138, + 139 + ], + "produces": [ + 8, + 62, + 76, + 82, + 83, + 86, + 102, + 120, + 139 + ] + }, + { + "id": 4, + "name": "FINDING", + "parent": 88888888, + "consumes": [ + 14, + 150 + ], + "produces": [ + 1, + 21, + 23, + 25, + 26, + 29, + 30, + 31, + 33, + 34, + 37, + 50, + 81, + 87, + 92, + 94, + 97, + 105, + 107, + 108, + 111, + 112, + 128, + 130, + 136, + 138, + 140, + 151 + ] + }, + { + "id": 99, + "name": "GEOLOCATION", + "parent": 88888888, + "consumes": [], + "produces": [ + 98, + 101 + ] + }, + { + "id": 46, + "name": "HASHED_PASSWORD", + "parent": 88888888, + "consumes": [], + "produces": [ + 44, + 51 + ] + }, + { + "id": 2, + "name": "HTTP_RESPONSE", + "parent": 88888888, + "consumes": [ + 1, + 14, + 26, + 50, + 66, + 69, + 76, + 87, + 92, + 105, + 106, + 107, + 111, + 112, + 113, + 130, + 136, + 138, + 147, + 151 + ], + "produces": [ + 93 + ] + }, + { + "id": 12, + "name": "IP_ADDRESS", + "parent": 88888888, + "consumes": [ + 11, + 14, + 97, + 98, + 100, + 101, + 106, + 117, + 130 + ], + "produces": [ + 14, + 60, + 100, + 130 + ] + }, + { + "id": 118, + "name": "IP_RANGE", + "parent": 88888888, + "consumes": [ + 117, + 130 + ], + "produces": [] + }, + { + "id": 9, + "name": "MOBILE_APP", + "parent": 88888888, + "consumes": [ + 8 + ], + "produces": [ + 88 + ] + }, + { + "id": 15, + "name": "OPEN_TCP_PORT", + "parent": 88888888, + "consumes": [ + 14, + 77, + 93, + 106, + 116, + 132 + ], + "produces": [ + 14, + 97, + 117, + 130 + ] + }, + { + "id": 64, + "name": "ORG_STUB", + "parent": 88888888, + "consumes": [ + 63, + 85, + 88, + 119 + ], + "produces": [ + 130 + ] + }, + { + "id": 47, + "name": "PASSWORD", + "parent": 88888888, + "consumes": [], + "produces": [ + 44, + 51 + ] + }, + { + "id": 78, + "name": "PROTOCOL", + "parent": 88888888, + "consumes": [ + 106 + ], + "produces": [ + 77 + ] + }, + { + "id": 54, + "name": "RAW_DNS_RECORD", + "parent": 88888888, + "consumes": [], + "produces": [ + 53, + 60, + 61 + ] + }, + { + "id": 70, + "name": "RAW_TEXT", + "parent": 88888888, + "consumes": [ + 69, + 138 + ], + "produces": [ + 72 + ] + }, + { + "id": 65, + "name": "SOCIAL", + "parent": 88888888, + "consumes": [ + 63, + 85, + 87, + 89, + 119, + 130 + ], + "produces": [ + 63, + 87, + 129 + ] + }, + { + "id": 24, + "name": "STORAGE_BUCKET", + "parent": 88888888, + "consumes": [ + 23, + 29, + 30, + 31, + 32, + 33, + 34, + 130 + ], + "produces": [ + 29, + 30, + 31, + 33, + 34 + ] + }, + { + "id": 16, + "name": "TECHNOLOGY", + "parent": 88888888, + "consumes": [ + 14, + 87, + 150, + 151 + ], + "produces": [ + 26, + 66, + 87, + 89, + 97, + 108, + 147, + 151 + ] + }, + { + "id": 3, + "name": "URL", + "parent": 88888888, + "consumes": [ + 1, + 14, + 23, + 37, + 73, + 80, + 81, + 89, + 93, + 96, + 107, + 108, + 116, + 122, + 128, + 130, + 136, + 140, + 142, + 146, + 150 + ], + "produces": [ + 89, + 93 + ] + }, + { + "id": 75, + "name": "URL_HINT", + "parent": 88888888, + "consumes": [ + 74 + ], + "produces": [ + 96 + ] + }, + { + "id": 19, + "name": "URL_UNVERIFIED", + "parent": 88888888, + "consumes": [ + 42, + 76, + 93, + 109, + 116, + 129, + 130 + ], + "produces": [ + 18, + 27, + 32, + 53, + 57, + 61, + 63, + 69, + 73, + 74, + 84, + 89, + 95, + 122, + 124, + 141, + 148, + 151 + ] + }, + { + "id": 48, + "name": "USERNAME", + "parent": 88888888, + "consumes": [ + 130 + ], + "produces": [ + 44, + 51 + ] + }, + { + "id": 143, + "name": "VHOST", + "parent": 88888888, + "consumes": [ + 150 + ], + "produces": [ + 142 + ] + }, + { + "id": 5, + "name": "VULNERABILITY", + "parent": 88888888, + "consumes": [ + 14, + 150 + ], + "produces": [ + 1, + 21, + 23, + 25, + 26, + 50, + 66, + 80, + 97, + 108, + 136, + 138, + 151 + ] + }, + { + "id": 17, + "name": "WAF", + "parent": 88888888, + "consumes": [ + 14 + ], + "produces": [ + 146 + ] + }, + { + "id": 90, + "name": "WEBSCREENSHOT", + "parent": 88888888, + "consumes": [], + "produces": [ + 89 + ] + }, + { + "id": 71, + "name": "WEB_PARAMETER", + "parent": 88888888, + "consumes": [ + 94, + 111, + 112, + 113, + 149 + ], + "produces": [ + 69, + 111, + 112, + 113 + ] + }, + { + "id": 1, + "name": "ajaxpro", + "parent": 99999999, + "consumes": [ + 2, + 3 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 6, + "name": "anubisdb", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 8, + "name": "apkpure", + "parent": 99999999, + "consumes": [ + 9 + ], + "produces": [ + 10 + ] + }, + { + "id": 11, + "name": "asn", + "parent": 99999999, + "consumes": [ + 12 + ], + "produces": [ + 13 + ] + }, + { + "id": 14, + "name": "asset_inventory", + "parent": 99999999, + "consumes": [ + 7, + 4, + 2, + 12, + 15, + 16, + 3, + 5, + 17 + ], + "produces": [ + 12, + 15 + ] + }, + { + "id": 18, + "name": "azure_realm", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 19 + ] + }, + { + "id": 20, + "name": "azure_tenant", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 21, + "name": "baddns", + "parent": 99999999, + "consumes": [ + 7, + 22 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 23, + "name": "baddns_direct", + "parent": 99999999, + "consumes": [ + 24, + 3 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 25, + "name": "baddns_zone", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 26, + "name": "badsecrets", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 4, + 16, + 5 + ] + }, + { + "id": 27, + "name": "bevigil", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7, + 19 + ] + }, + { + "id": 28, + "name": "binaryedge", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 29, + "name": "bucket_amazon", + "parent": 99999999, + "consumes": [ + 7, + 24 + ], + "produces": [ + 4, + 24 + ] + }, + { + "id": 30, + "name": "bucket_azure", + "parent": 99999999, + "consumes": [ + 7, + 24 + ], + "produces": [ + 4, + 24 + ] + }, + { + "id": 31, + "name": "bucket_digitalocean", + "parent": 99999999, + "consumes": [ + 7, + 24 + ], + "produces": [ + 4, + 24 + ] + }, + { + "id": 32, + "name": "bucket_file_enum", + "parent": 99999999, + "consumes": [ + 24 + ], + "produces": [ + 19 + ] + }, + { + "id": 33, + "name": "bucket_firebase", + "parent": 99999999, + "consumes": [ + 7, + 24 + ], + "produces": [ + 4, + 24 + ] + }, + { + "id": 34, + "name": "bucket_google", + "parent": 99999999, + "consumes": [ + 7, + 24 + ], + "produces": [ + 4, + 24 + ] + }, + { + "id": 35, + "name": "bufferoverrun", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 36, + "name": "builtwith", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 37, + "name": "bypass403", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 4 + ] + }, + { + "id": 38, + "name": "c99", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 39, + "name": "censys", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 40, + "name": "certspotter", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 41, + "name": "chaos", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 42, + "name": "code_repository", + "parent": 99999999, + "consumes": [ + 19 + ], + "produces": [ + 43 + ] + }, + { + "id": 44, + "name": "credshed", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45, + 46, + 47, + 48 + ] + }, + { + "id": 49, + "name": "crt", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 50, + "name": "dastardly", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 51, + "name": "dehashed", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45, + 46, + 47, + 48 + ] + }, + { + "id": 52, + "name": "digitorus", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 53, + "name": "dnsbimi", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 54, + 19 + ] + }, + { + "id": 55, + "name": "dnsbrute", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 56, + "name": "dnsbrute_mutations", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 57, + "name": "dnscaa", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7, + 45, + 19 + ] + }, + { + "id": 58, + "name": "dnscommonsrv", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 59, + "name": "dnsdumpster", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 60, + "name": "dnsresolve", + "parent": 99999999, + "consumes": [], + "produces": [ + 7, + 12, + 54 + ] + }, + { + "id": 61, + "name": "dnstlsrpt", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45, + 54, + 19 + ] + }, + { + "id": 62, + "name": "docker_pull", + "parent": 99999999, + "consumes": [ + 43 + ], + "produces": [ + 10 + ] + }, + { + "id": 63, + "name": "dockerhub", + "parent": 99999999, + "consumes": [ + 64, + 65 + ], + "produces": [ + 43, + 65, + 19 + ] + }, + { + "id": 66, + "name": "dotnetnuke", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 16, + 5 + ] + }, + { + "id": 67, + "name": "emailformat", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45 + ] + }, + { + "id": 68, + "name": "emails", + "parent": 99999999, + "consumes": [ + 45 + ], + "produces": [] + }, + { + "id": 69, + "name": "excavate", + "parent": 99999999, + "consumes": [ + 2, + 70 + ], + "produces": [ + 19, + 71 + ] + }, + { + "id": 72, + "name": "extractous", + "parent": 99999999, + "consumes": [ + 10 + ], + "produces": [ + 70 + ] + }, + { + "id": 73, + "name": "ffuf", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 19 + ] + }, + { + "id": 74, + "name": "ffuf_shortnames", + "parent": 99999999, + "consumes": [ + 75 + ], + "produces": [ + 19 + ] + }, + { + "id": 76, + "name": "filedownload", + "parent": 99999999, + "consumes": [ + 2, + 19 + ], + "produces": [ + 10 + ] + }, + { + "id": 77, + "name": "fingerprintx", + "parent": 99999999, + "consumes": [ + 15 + ], + "produces": [ + 78 + ] + }, + { + "id": 79, + "name": "fullhunt", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 80, + "name": "generic_ssrf", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 5 + ] + }, + { + "id": 81, + "name": "git", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 43, + 4 + ] + }, + { + "id": 82, + "name": "git_clone", + "parent": 99999999, + "consumes": [ + 43 + ], + "produces": [ + 10 + ] + }, + { + "id": 83, + "name": "gitdumper", + "parent": 99999999, + "consumes": [ + 43 + ], + "produces": [ + 10 + ] + }, + { + "id": 84, + "name": "github_codesearch", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 43, + 19 + ] + }, + { + "id": 85, + "name": "github_org", + "parent": 99999999, + "consumes": [ + 64, + 65 + ], + "produces": [ + 43 + ] + }, + { + "id": 86, + "name": "github_workflows", + "parent": 99999999, + "consumes": [ + 43 + ], + "produces": [ + 10 + ] + }, + { + "id": 87, + "name": "gitlab", + "parent": 99999999, + "consumes": [ + 2, + 65, + 16 + ], + "produces": [ + 43, + 4, + 65, + 16 + ] + }, + { + "id": 88, + "name": "google_playstore", + "parent": 99999999, + "consumes": [ + 43, + 64 + ], + "produces": [ + 9 + ] + }, + { + "id": 89, + "name": "gowitness", + "parent": 99999999, + "consumes": [ + 65, + 3 + ], + "produces": [ + 16, + 3, + 19, + 90 + ] + }, + { + "id": 91, + "name": "hackertarget", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 92, + "name": "host_header", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 4 + ] + }, + { + "id": 93, + "name": "httpx", + "parent": 99999999, + "consumes": [ + 15, + 3, + 19 + ], + "produces": [ + 2, + 3 + ] + }, + { + "id": 94, + "name": "hunt", + "parent": 99999999, + "consumes": [ + 71 + ], + "produces": [ + 4 + ] + }, + { + "id": 95, + "name": "hunterio", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7, + 45, + 19 + ] + }, + { + "id": 96, + "name": "iis_shortnames", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 75 + ] + }, + { + "id": 97, + "name": "internetdb", + "parent": 99999999, + "consumes": [ + 7, + 12 + ], + "produces": [ + 7, + 4, + 15, + 16, + 5 + ] + }, + { + "id": 98, + "name": "ip2location", + "parent": 99999999, + "consumes": [ + 12 + ], + "produces": [ + 99 + ] + }, + { + "id": 100, + "name": "ipneighbor", + "parent": 99999999, + "consumes": [ + 12 + ], + "produces": [ + 12 + ] + }, + { + "id": 101, + "name": "ipstack", + "parent": 99999999, + "consumes": [ + 12 + ], + "produces": [ + 99 + ] + }, + { + "id": 102, + "name": "jadx", + "parent": 99999999, + "consumes": [ + 10 + ], + "produces": [ + 10 + ] + }, + { + "id": 103, + "name": "leakix", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 104, + "name": "myssl", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 105, + "name": "newsletters", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 4 + ] + }, + { + "id": 106, + "name": "nmap_xml", + "parent": 99999999, + "consumes": [ + 7, + 2, + 12, + 15, + 78 + ], + "produces": [] + }, + { + "id": 107, + "name": "ntlm", + "parent": 99999999, + "consumes": [ + 2, + 3 + ], + "produces": [ + 7, + 4 + ] + }, + { + "id": 108, + "name": "nuclei", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 4, + 16, + 5 + ] + }, + { + "id": 109, + "name": "oauth", + "parent": 99999999, + "consumes": [ + 7, + 19 + ], + "produces": [ + 7 + ] + }, + { + "id": 110, + "name": "otx", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 111, + "name": "paramminer_cookies", + "parent": 99999999, + "consumes": [ + 2, + 71 + ], + "produces": [ + 4, + 71 + ] + }, + { + "id": 112, + "name": "paramminer_getparams", + "parent": 99999999, + "consumes": [ + 2, + 71 + ], + "produces": [ + 4, + 71 + ] + }, + { + "id": 113, + "name": "paramminer_headers", + "parent": 99999999, + "consumes": [ + 2, + 71 + ], + "produces": [ + 71 + ] + }, + { + "id": 114, + "name": "passivetotal", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 115, + "name": "pgp", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45 + ] + }, + { + "id": 116, + "name": "portfilter", + "parent": 99999999, + "consumes": [ + 15, + 3, + 19 + ], + "produces": [] + }, + { + "id": 117, + "name": "portscan", + "parent": 99999999, + "consumes": [ + 7, + 12, + 118 + ], + "produces": [ + 15 + ] + }, + { + "id": 119, + "name": "postman", + "parent": 99999999, + "consumes": [ + 64, + 65 + ], + "produces": [ + 43 + ] + }, + { + "id": 120, + "name": "postman_download", + "parent": 99999999, + "consumes": [ + 43 + ], + "produces": [ + 10 + ] + }, + { + "id": 121, + "name": "rapiddns", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 122, + "name": "robots", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 19 + ] + }, + { + "id": 123, + "name": "securitytrails", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 124, + "name": "securitytxt", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45, + 19 + ] + }, + { + "id": 125, + "name": "shodan_dns", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 126, + "name": "sitedossier", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 127, + "name": "skymem", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 45 + ] + }, + { + "id": 128, + "name": "smuggler", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 4 + ] + }, + { + "id": 129, + "name": "social", + "parent": 99999999, + "consumes": [ + 19 + ], + "produces": [ + 65 + ] + }, + { + "id": 130, + "name": "speculate", + "parent": 99999999, + "consumes": [ + 131, + 7, + 22, + 2, + 12, + 118, + 65, + 24, + 3, + 19, + 48 + ], + "produces": [ + 7, + 4, + 12, + 15, + 64 + ] + }, + { + "id": 132, + "name": "sslcert", + "parent": 99999999, + "consumes": [ + 15 + ], + "produces": [ + 7, + 45 + ] + }, + { + "id": 133, + "name": "subdomaincenter", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 134, + "name": "subdomainradar", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 135, + "name": "subdomains", + "parent": 99999999, + "consumes": [ + 7, + 22 + ], + "produces": [] + }, + { + "id": 136, + "name": "telerik", + "parent": 99999999, + "consumes": [ + 2, + 3 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 137, + "name": "trickest", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 138, + "name": "trufflehog", + "parent": 99999999, + "consumes": [ + 43, + 10, + 2, + 70 + ], + "produces": [ + 4, + 5 + ] + }, + { + "id": 139, + "name": "unarchive", + "parent": 99999999, + "consumes": [ + 10 + ], + "produces": [ + 10 + ] + }, + { + "id": 140, + "name": "url_manipulation", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 4 + ] + }, + { + "id": 141, + "name": "urlscan", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7, + 19 + ] + }, + { + "id": 142, + "name": "vhost", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 7, + 143 + ] + }, + { + "id": 144, + "name": "viewdns", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 145, + "name": "virustotal", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + }, + { + "id": 146, + "name": "wafw00f", + "parent": 99999999, + "consumes": [ + 3 + ], + "produces": [ + 17 + ] + }, + { + "id": 147, + "name": "wappalyzer", + "parent": 99999999, + "consumes": [ + 2 + ], + "produces": [ + 16 + ] + }, + { + "id": 148, + "name": "wayback", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7, + 19 + ] + }, + { + "id": 149, + "name": "web_parameters", + "parent": 99999999, + "consumes": [ + 71 + ], + "produces": [] + }, + { + "id": 150, + "name": "web_report", + "parent": 99999999, + "consumes": [ + 4, + 16, + 3, + 143, + 5 + ], + "produces": [] + }, + { + "id": 151, + "name": "wpscan", + "parent": 99999999, + "consumes": [ + 2, + 16 + ], + "produces": [ + 4, + 16, + 19, + 5 + ] + }, + { + "id": 152, + "name": "zoomeye", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 7 + ] + } +] \ No newline at end of file diff --git a/docs/data/chord_graph/rels.json b/docs/data/chord_graph/rels.json new file mode 100644 index 0000000000..d7834469ae --- /dev/null +++ b/docs/data/chord_graph/rels.json @@ -0,0 +1,1797 @@ +[ + { + "source": 1, + "target": 2, + "type": "consumes" + }, + { + "source": 1, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 1, + "type": "produces" + }, + { + "source": 5, + "target": 1, + "type": "produces" + }, + { + "source": 6, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 6, + "type": "produces" + }, + { + "source": 8, + "target": 9, + "type": "consumes" + }, + { + "source": 10, + "target": 8, + "type": "produces" + }, + { + "source": 11, + "target": 12, + "type": "consumes" + }, + { + "source": 13, + "target": 11, + "type": "produces" + }, + { + "source": 14, + "target": 7, + "type": "consumes" + }, + { + "source": 14, + "target": 4, + "type": "consumes" + }, + { + "source": 14, + "target": 2, + "type": "consumes" + }, + { + "source": 14, + "target": 12, + "type": "consumes" + }, + { + "source": 14, + "target": 15, + "type": "consumes" + }, + { + "source": 14, + "target": 16, + "type": "consumes" + }, + { + "source": 14, + "target": 3, + "type": "consumes" + }, + { + "source": 14, + "target": 5, + "type": "consumes" + }, + { + "source": 14, + "target": 17, + "type": "consumes" + }, + { + "source": 12, + "target": 14, + "type": "produces" + }, + { + "source": 15, + "target": 14, + "type": "produces" + }, + { + "source": 18, + "target": 7, + "type": "consumes" + }, + { + "source": 19, + "target": 18, + "type": "produces" + }, + { + "source": 20, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 20, + "type": "produces" + }, + { + "source": 21, + "target": 7, + "type": "consumes" + }, + { + "source": 21, + "target": 22, + "type": "consumes" + }, + { + "source": 4, + "target": 21, + "type": "produces" + }, + { + "source": 5, + "target": 21, + "type": "produces" + }, + { + "source": 23, + "target": 24, + "type": "consumes" + }, + { + "source": 23, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 23, + "type": "produces" + }, + { + "source": 5, + "target": 23, + "type": "produces" + }, + { + "source": 25, + "target": 7, + "type": "consumes" + }, + { + "source": 4, + "target": 25, + "type": "produces" + }, + { + "source": 5, + "target": 25, + "type": "produces" + }, + { + "source": 26, + "target": 2, + "type": "consumes" + }, + { + "source": 4, + "target": 26, + "type": "produces" + }, + { + "source": 16, + "target": 26, + "type": "produces" + }, + { + "source": 5, + "target": 26, + "type": "produces" + }, + { + "source": 27, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 27, + "type": "produces" + }, + { + "source": 19, + "target": 27, + "type": "produces" + }, + { + "source": 28, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 28, + "type": "produces" + }, + { + "source": 29, + "target": 7, + "type": "consumes" + }, + { + "source": 29, + "target": 24, + "type": "consumes" + }, + { + "source": 4, + "target": 29, + "type": "produces" + }, + { + "source": 24, + "target": 29, + "type": "produces" + }, + { + "source": 30, + "target": 7, + "type": "consumes" + }, + { + "source": 30, + "target": 24, + "type": "consumes" + }, + { + "source": 4, + "target": 30, + "type": "produces" + }, + { + "source": 24, + "target": 30, + "type": "produces" + }, + { + "source": 31, + "target": 7, + "type": "consumes" + }, + { + "source": 31, + "target": 24, + "type": "consumes" + }, + { + "source": 4, + "target": 31, + "type": "produces" + }, + { + "source": 24, + "target": 31, + "type": "produces" + }, + { + "source": 32, + "target": 24, + "type": "consumes" + }, + { + "source": 19, + "target": 32, + "type": "produces" + }, + { + "source": 33, + "target": 7, + "type": "consumes" + }, + { + "source": 33, + "target": 24, + "type": "consumes" + }, + { + "source": 4, + "target": 33, + "type": "produces" + }, + { + "source": 24, + "target": 33, + "type": "produces" + }, + { + "source": 34, + "target": 7, + "type": "consumes" + }, + { + "source": 34, + "target": 24, + "type": "consumes" + }, + { + "source": 4, + "target": 34, + "type": "produces" + }, + { + "source": 24, + "target": 34, + "type": "produces" + }, + { + "source": 35, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 35, + "type": "produces" + }, + { + "source": 36, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 36, + "type": "produces" + }, + { + "source": 37, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 37, + "type": "produces" + }, + { + "source": 38, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 38, + "type": "produces" + }, + { + "source": 39, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 39, + "type": "produces" + }, + { + "source": 40, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 40, + "type": "produces" + }, + { + "source": 41, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 41, + "type": "produces" + }, + { + "source": 42, + "target": 19, + "type": "consumes" + }, + { + "source": 43, + "target": 42, + "type": "produces" + }, + { + "source": 44, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 44, + "type": "produces" + }, + { + "source": 46, + "target": 44, + "type": "produces" + }, + { + "source": 47, + "target": 44, + "type": "produces" + }, + { + "source": 48, + "target": 44, + "type": "produces" + }, + { + "source": 49, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 49, + "type": "produces" + }, + { + "source": 50, + "target": 2, + "type": "consumes" + }, + { + "source": 4, + "target": 50, + "type": "produces" + }, + { + "source": 5, + "target": 50, + "type": "produces" + }, + { + "source": 51, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 51, + "type": "produces" + }, + { + "source": 46, + "target": 51, + "type": "produces" + }, + { + "source": 47, + "target": 51, + "type": "produces" + }, + { + "source": 48, + "target": 51, + "type": "produces" + }, + { + "source": 52, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 52, + "type": "produces" + }, + { + "source": 53, + "target": 7, + "type": "consumes" + }, + { + "source": 54, + "target": 53, + "type": "produces" + }, + { + "source": 19, + "target": 53, + "type": "produces" + }, + { + "source": 55, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 55, + "type": "produces" + }, + { + "source": 56, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 56, + "type": "produces" + }, + { + "source": 57, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 57, + "type": "produces" + }, + { + "source": 45, + "target": 57, + "type": "produces" + }, + { + "source": 19, + "target": 57, + "type": "produces" + }, + { + "source": 58, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 58, + "type": "produces" + }, + { + "source": 59, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 59, + "type": "produces" + }, + { + "source": 7, + "target": 60, + "type": "produces" + }, + { + "source": 12, + "target": 60, + "type": "produces" + }, + { + "source": 54, + "target": 60, + "type": "produces" + }, + { + "source": 61, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 61, + "type": "produces" + }, + { + "source": 54, + "target": 61, + "type": "produces" + }, + { + "source": 19, + "target": 61, + "type": "produces" + }, + { + "source": 62, + "target": 43, + "type": "consumes" + }, + { + "source": 10, + "target": 62, + "type": "produces" + }, + { + "source": 63, + "target": 64, + "type": "consumes" + }, + { + "source": 63, + "target": 65, + "type": "consumes" + }, + { + "source": 43, + "target": 63, + "type": "produces" + }, + { + "source": 65, + "target": 63, + "type": "produces" + }, + { + "source": 19, + "target": 63, + "type": "produces" + }, + { + "source": 66, + "target": 2, + "type": "consumes" + }, + { + "source": 16, + "target": 66, + "type": "produces" + }, + { + "source": 5, + "target": 66, + "type": "produces" + }, + { + "source": 67, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 67, + "type": "produces" + }, + { + "source": 68, + "target": 45, + "type": "consumes" + }, + { + "source": 69, + "target": 2, + "type": "consumes" + }, + { + "source": 69, + "target": 70, + "type": "consumes" + }, + { + "source": 19, + "target": 69, + "type": "produces" + }, + { + "source": 71, + "target": 69, + "type": "produces" + }, + { + "source": 72, + "target": 10, + "type": "consumes" + }, + { + "source": 70, + "target": 72, + "type": "produces" + }, + { + "source": 73, + "target": 3, + "type": "consumes" + }, + { + "source": 19, + "target": 73, + "type": "produces" + }, + { + "source": 74, + "target": 75, + "type": "consumes" + }, + { + "source": 19, + "target": 74, + "type": "produces" + }, + { + "source": 76, + "target": 2, + "type": "consumes" + }, + { + "source": 76, + "target": 19, + "type": "consumes" + }, + { + "source": 10, + "target": 76, + "type": "produces" + }, + { + "source": 77, + "target": 15, + "type": "consumes" + }, + { + "source": 78, + "target": 77, + "type": "produces" + }, + { + "source": 79, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 79, + "type": "produces" + }, + { + "source": 80, + "target": 3, + "type": "consumes" + }, + { + "source": 5, + "target": 80, + "type": "produces" + }, + { + "source": 81, + "target": 3, + "type": "consumes" + }, + { + "source": 43, + "target": 81, + "type": "produces" + }, + { + "source": 4, + "target": 81, + "type": "produces" + }, + { + "source": 82, + "target": 43, + "type": "consumes" + }, + { + "source": 10, + "target": 82, + "type": "produces" + }, + { + "source": 83, + "target": 43, + "type": "consumes" + }, + { + "source": 10, + "target": 83, + "type": "produces" + }, + { + "source": 84, + "target": 7, + "type": "consumes" + }, + { + "source": 43, + "target": 84, + "type": "produces" + }, + { + "source": 19, + "target": 84, + "type": "produces" + }, + { + "source": 85, + "target": 64, + "type": "consumes" + }, + { + "source": 85, + "target": 65, + "type": "consumes" + }, + { + "source": 43, + "target": 85, + "type": "produces" + }, + { + "source": 86, + "target": 43, + "type": "consumes" + }, + { + "source": 10, + "target": 86, + "type": "produces" + }, + { + "source": 87, + "target": 2, + "type": "consumes" + }, + { + "source": 87, + "target": 65, + "type": "consumes" + }, + { + "source": 87, + "target": 16, + "type": "consumes" + }, + { + "source": 43, + "target": 87, + "type": "produces" + }, + { + "source": 4, + "target": 87, + "type": "produces" + }, + { + "source": 65, + "target": 87, + "type": "produces" + }, + { + "source": 16, + "target": 87, + "type": "produces" + }, + { + "source": 88, + "target": 43, + "type": "consumes" + }, + { + "source": 88, + "target": 64, + "type": "consumes" + }, + { + "source": 9, + "target": 88, + "type": "produces" + }, + { + "source": 89, + "target": 65, + "type": "consumes" + }, + { + "source": 89, + "target": 3, + "type": "consumes" + }, + { + "source": 16, + "target": 89, + "type": "produces" + }, + { + "source": 3, + "target": 89, + "type": "produces" + }, + { + "source": 19, + "target": 89, + "type": "produces" + }, + { + "source": 90, + "target": 89, + "type": "produces" + }, + { + "source": 91, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 91, + "type": "produces" + }, + { + "source": 92, + "target": 2, + "type": "consumes" + }, + { + "source": 4, + "target": 92, + "type": "produces" + }, + { + "source": 93, + "target": 15, + "type": "consumes" + }, + { + "source": 93, + "target": 3, + "type": "consumes" + }, + { + "source": 93, + "target": 19, + "type": "consumes" + }, + { + "source": 2, + "target": 93, + "type": "produces" + }, + { + "source": 3, + "target": 93, + "type": "produces" + }, + { + "source": 94, + "target": 71, + "type": "consumes" + }, + { + "source": 4, + "target": 94, + "type": "produces" + }, + { + "source": 95, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 95, + "type": "produces" + }, + { + "source": 45, + "target": 95, + "type": "produces" + }, + { + "source": 19, + "target": 95, + "type": "produces" + }, + { + "source": 96, + "target": 3, + "type": "consumes" + }, + { + "source": 75, + "target": 96, + "type": "produces" + }, + { + "source": 97, + "target": 7, + "type": "consumes" + }, + { + "source": 97, + "target": 12, + "type": "consumes" + }, + { + "source": 7, + "target": 97, + "type": "produces" + }, + { + "source": 4, + "target": 97, + "type": "produces" + }, + { + "source": 15, + "target": 97, + "type": "produces" + }, + { + "source": 16, + "target": 97, + "type": "produces" + }, + { + "source": 5, + "target": 97, + "type": "produces" + }, + { + "source": 98, + "target": 12, + "type": "consumes" + }, + { + "source": 99, + "target": 98, + "type": "produces" + }, + { + "source": 100, + "target": 12, + "type": "consumes" + }, + { + "source": 12, + "target": 100, + "type": "produces" + }, + { + "source": 101, + "target": 12, + "type": "consumes" + }, + { + "source": 99, + "target": 101, + "type": "produces" + }, + { + "source": 102, + "target": 10, + "type": "consumes" + }, + { + "source": 10, + "target": 102, + "type": "produces" + }, + { + "source": 103, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 103, + "type": "produces" + }, + { + "source": 104, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 104, + "type": "produces" + }, + { + "source": 105, + "target": 2, + "type": "consumes" + }, + { + "source": 4, + "target": 105, + "type": "produces" + }, + { + "source": 106, + "target": 7, + "type": "consumes" + }, + { + "source": 106, + "target": 2, + "type": "consumes" + }, + { + "source": 106, + "target": 12, + "type": "consumes" + }, + { + "source": 106, + "target": 15, + "type": "consumes" + }, + { + "source": 106, + "target": 78, + "type": "consumes" + }, + { + "source": 107, + "target": 2, + "type": "consumes" + }, + { + "source": 107, + "target": 3, + "type": "consumes" + }, + { + "source": 7, + "target": 107, + "type": "produces" + }, + { + "source": 4, + "target": 107, + "type": "produces" + }, + { + "source": 108, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 108, + "type": "produces" + }, + { + "source": 16, + "target": 108, + "type": "produces" + }, + { + "source": 5, + "target": 108, + "type": "produces" + }, + { + "source": 109, + "target": 7, + "type": "consumes" + }, + { + "source": 109, + "target": 19, + "type": "consumes" + }, + { + "source": 7, + "target": 109, + "type": "produces" + }, + { + "source": 110, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 110, + "type": "produces" + }, + { + "source": 111, + "target": 2, + "type": "consumes" + }, + { + "source": 111, + "target": 71, + "type": "consumes" + }, + { + "source": 4, + "target": 111, + "type": "produces" + }, + { + "source": 71, + "target": 111, + "type": "produces" + }, + { + "source": 112, + "target": 2, + "type": "consumes" + }, + { + "source": 112, + "target": 71, + "type": "consumes" + }, + { + "source": 4, + "target": 112, + "type": "produces" + }, + { + "source": 71, + "target": 112, + "type": "produces" + }, + { + "source": 113, + "target": 2, + "type": "consumes" + }, + { + "source": 113, + "target": 71, + "type": "consumes" + }, + { + "source": 71, + "target": 113, + "type": "produces" + }, + { + "source": 114, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 114, + "type": "produces" + }, + { + "source": 115, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 115, + "type": "produces" + }, + { + "source": 116, + "target": 15, + "type": "consumes" + }, + { + "source": 116, + "target": 3, + "type": "consumes" + }, + { + "source": 116, + "target": 19, + "type": "consumes" + }, + { + "source": 117, + "target": 7, + "type": "consumes" + }, + { + "source": 117, + "target": 12, + "type": "consumes" + }, + { + "source": 117, + "target": 118, + "type": "consumes" + }, + { + "source": 15, + "target": 117, + "type": "produces" + }, + { + "source": 119, + "target": 64, + "type": "consumes" + }, + { + "source": 119, + "target": 65, + "type": "consumes" + }, + { + "source": 43, + "target": 119, + "type": "produces" + }, + { + "source": 120, + "target": 43, + "type": "consumes" + }, + { + "source": 10, + "target": 120, + "type": "produces" + }, + { + "source": 121, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 121, + "type": "produces" + }, + { + "source": 122, + "target": 3, + "type": "consumes" + }, + { + "source": 19, + "target": 122, + "type": "produces" + }, + { + "source": 123, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 123, + "type": "produces" + }, + { + "source": 124, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 124, + "type": "produces" + }, + { + "source": 19, + "target": 124, + "type": "produces" + }, + { + "source": 125, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 125, + "type": "produces" + }, + { + "source": 126, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 126, + "type": "produces" + }, + { + "source": 127, + "target": 7, + "type": "consumes" + }, + { + "source": 45, + "target": 127, + "type": "produces" + }, + { + "source": 128, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 128, + "type": "produces" + }, + { + "source": 129, + "target": 19, + "type": "consumes" + }, + { + "source": 65, + "target": 129, + "type": "produces" + }, + { + "source": 130, + "target": 131, + "type": "consumes" + }, + { + "source": 130, + "target": 7, + "type": "consumes" + }, + { + "source": 130, + "target": 22, + "type": "consumes" + }, + { + "source": 130, + "target": 2, + "type": "consumes" + }, + { + "source": 130, + "target": 12, + "type": "consumes" + }, + { + "source": 130, + "target": 118, + "type": "consumes" + }, + { + "source": 130, + "target": 65, + "type": "consumes" + }, + { + "source": 130, + "target": 24, + "type": "consumes" + }, + { + "source": 130, + "target": 3, + "type": "consumes" + }, + { + "source": 130, + "target": 19, + "type": "consumes" + }, + { + "source": 130, + "target": 48, + "type": "consumes" + }, + { + "source": 7, + "target": 130, + "type": "produces" + }, + { + "source": 4, + "target": 130, + "type": "produces" + }, + { + "source": 12, + "target": 130, + "type": "produces" + }, + { + "source": 15, + "target": 130, + "type": "produces" + }, + { + "source": 64, + "target": 130, + "type": "produces" + }, + { + "source": 132, + "target": 15, + "type": "consumes" + }, + { + "source": 7, + "target": 132, + "type": "produces" + }, + { + "source": 45, + "target": 132, + "type": "produces" + }, + { + "source": 133, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 133, + "type": "produces" + }, + { + "source": 134, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 134, + "type": "produces" + }, + { + "source": 135, + "target": 7, + "type": "consumes" + }, + { + "source": 135, + "target": 22, + "type": "consumes" + }, + { + "source": 136, + "target": 2, + "type": "consumes" + }, + { + "source": 136, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 136, + "type": "produces" + }, + { + "source": 5, + "target": 136, + "type": "produces" + }, + { + "source": 137, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 137, + "type": "produces" + }, + { + "source": 138, + "target": 43, + "type": "consumes" + }, + { + "source": 138, + "target": 10, + "type": "consumes" + }, + { + "source": 138, + "target": 2, + "type": "consumes" + }, + { + "source": 138, + "target": 70, + "type": "consumes" + }, + { + "source": 4, + "target": 138, + "type": "produces" + }, + { + "source": 5, + "target": 138, + "type": "produces" + }, + { + "source": 139, + "target": 10, + "type": "consumes" + }, + { + "source": 10, + "target": 139, + "type": "produces" + }, + { + "source": 140, + "target": 3, + "type": "consumes" + }, + { + "source": 4, + "target": 140, + "type": "produces" + }, + { + "source": 141, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 141, + "type": "produces" + }, + { + "source": 19, + "target": 141, + "type": "produces" + }, + { + "source": 142, + "target": 3, + "type": "consumes" + }, + { + "source": 7, + "target": 142, + "type": "produces" + }, + { + "source": 143, + "target": 142, + "type": "produces" + }, + { + "source": 144, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 144, + "type": "produces" + }, + { + "source": 145, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 145, + "type": "produces" + }, + { + "source": 146, + "target": 3, + "type": "consumes" + }, + { + "source": 17, + "target": 146, + "type": "produces" + }, + { + "source": 147, + "target": 2, + "type": "consumes" + }, + { + "source": 16, + "target": 147, + "type": "produces" + }, + { + "source": 148, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 148, + "type": "produces" + }, + { + "source": 19, + "target": 148, + "type": "produces" + }, + { + "source": 149, + "target": 71, + "type": "consumes" + }, + { + "source": 150, + "target": 4, + "type": "consumes" + }, + { + "source": 150, + "target": 16, + "type": "consumes" + }, + { + "source": 150, + "target": 3, + "type": "consumes" + }, + { + "source": 150, + "target": 143, + "type": "consumes" + }, + { + "source": 150, + "target": 5, + "type": "consumes" + }, + { + "source": 151, + "target": 2, + "type": "consumes" + }, + { + "source": 151, + "target": 16, + "type": "consumes" + }, + { + "source": 4, + "target": 151, + "type": "produces" + }, + { + "source": 16, + "target": 151, + "type": "produces" + }, + { + "source": 19, + "target": 151, + "type": "produces" + }, + { + "source": 5, + "target": 151, + "type": "produces" + }, + { + "source": 152, + "target": 7, + "type": "consumes" + }, + { + "source": 7, + "target": 152, + "type": "produces" + } +] \ No newline at end of file diff --git a/docs/data/chord_graph/vega.json b/docs/data/chord_graph/vega.json new file mode 100644 index 0000000000..86ee688486 --- /dev/null +++ b/docs/data/chord_graph/vega.json @@ -0,0 +1,197 @@ +{ + "$schema": "https://vega.github.io/schema/vega/v5.json", + "description": "BBOT", + "padding": 20, + "width": 800, + "height": 800, + "autosize": "none", + + "signals": [ + { "name": "producesColor", "value": "#ff8400" }, + { "name": "consumesColor", "value": "white" }, + { "name": "originX", "update": "width / 2" }, + { "name": "originY", "update": "height / 2" }, + { + "name": "active", "value": "{id: 555555555, consumes: []}", + "on": [ + { "events": "text:pointerover", "update": "datum" }, + { "events": "pointerover[!event.item]", "update": "{id: 555555555, consumes: []}" } + ] + } + ], + + "data": [ + { + "name": "entities", + "url": "../data/chord_graph/entities.json", + "transform": [ + { + "type": "stratify", + "key": "id", + "parentKey": "parent" + }, + { + "type": "tree", + "method": "cluster", + "size": [1, 1], + "as": ["alpha", "beta", "depth", "children"] + }, + { + "type": "formula", + "expr": "(360 * datum.alpha + 270) % 360", + "as": "angle" + }, + { + "type": "formula", + "expr": "inrange(datum.angle, [90, 270])", + "as": "leftside" + }, + { + "type": "formula", + "expr": "originX + 280 * datum.beta * cos(PI * datum.angle / 180)", + "as": "x" + }, + { + "type": "formula", + "expr": "originY + 280 * datum.beta * sin(PI * datum.angle / 180)", + "as": "y" + } + ] + }, + { + "name": "leaves", + "source": "entities", + "transform": [ + { + "type": "filter", + "expr": "!datum.children" + } + ] + }, + { + "name": "rels", + "url": "../data/chord_graph/rels.json", + "transform": [ + { + "type": "formula", + "expr": "treePath('entities', datum.source, datum.target)", + "as": "treepath", + "initonly": true + } + ] + }, + { + "name": "selected_rels", + "source": "rels", + "transform": [ + { + "type": "filter", + "expr": "datum.source === active.id || datum.target === active.id" + } + ] + }, + { + "name": "selected_entities", + "source": "entities", + "transform": [ + { + "type": "filter", + "expr": "datum.id " + } + ] + } + ], + + "marks": [ + { + "type": "text", + "from": {"data": "leaves"}, + "encode": { + "enter": { + "text": {"field": "name"}, + "baseline": {"value": "middle"} + }, + "update": { + "x": {"field": "x"}, + "y": {"field": "y"}, + "dx": {"signal": "2 * (datum.leftside ? -1 : 1)"}, + "angle": {"signal": "datum.leftside ? datum.angle - 180 : datum.angle"}, + "align": {"signal": "datum.leftside ? 'right' : 'left'"}, + "fontSize": [ + {"test": "indata('selected_rels', 'source', datum.id)", "value": 15}, + {"test": "indata('selected_rels', 'target', datum.id)", "value": 15}, + {"value": 11} + ], + "fontWeight": [ + {"test": "indata('selected_rels', 'source', datum.id)", "value": "bold"}, + {"test": "indata('selected_rels', 'target', datum.id)", "value": "bold"}, + {"value": null} + ], + "fill": [ + {"test": "datum.id === active.id", "value": "white"}, + {"test": "if(active && active.produces, active.produces.length > 0 && indexof(active.produces, datum.id) >= 0, false)", "signal": "producesColor"}, + {"test": "if(active && active.consumes, active.consumes.length > 0 && indexof(active.consumes, datum.id) >= 0, false)", "signal": "consumesColor"}, + {"value": "#aaa"} + ] + } + } + }, + { + "type": "group", + "from": { + "facet": { + "name": "path", + "data": "rels", + "field": "treepath" + } + }, + "marks": [ + { + "type": "line", + "interactive": false, + "from": {"data": "path"}, + "encode": { + "enter": { + "interpolate": {"value": "bundle"}, + "strokeWidth": {"value": 3} + }, + "update": { + "stroke": [ + {"test": "(parent.source === active.id || parent.target === active.id) && parent.type === 'consumes'", "signal": "consumesColor"}, + {"test": "(parent.source === active.id || parent.target === active.id) && parent.type === 'produces'", "signal": "producesColor"}, + {"value": "#ff8400"} + ], + "strokeOpacity": [ + {"test": "parent.source === active.id || parent.target === active.id", "value": 1}, + {"value": 0.2} + ], + "tension": {"value": 0.7}, + "x": {"field": "x"}, + "y": {"field": "y"} + } + } + } + ] + } + ], + + "scales": [ + { + "name": "color", + "type": "ordinal", + "domain": ["consumes", "produces"], + "range": [{"signal": "consumesColor"}, {"signal": "producesColor"}] + } + ], + + "legends": [ + { + "stroke": "color", + "labelColor": "white", + "labelFontSize": 20, + "symbolStrokeWidth": 20, + "orient": "bottom-right", + "symbolType": "stroke" + } + ] +} diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md new file mode 100644 index 0000000000..a2547154f5 --- /dev/null +++ b/docs/dev/architecture.md @@ -0,0 +1,17 @@ +# BBOT Internal Architecture + +Here is a basic overview of BBOT's internal architecture. + +## Queues + +Being both ***recursive*** and ***event-driven***, BBOT makes heavy use of queues. These enable smooth communication between the modules, and ensure that large numbers of events can be produced without slowing down or clogging up the scan. + +Every module in BBOT has both an ***incoming*** and ***outgoing*** queue. Event types matching the module's `WATCHED_EVENTS` (e.g. `DNS_NAME`) are queued in its incoming queue, and processed by the module's `handle_event()` (or `handle_batch()` in the case of batched modules). If the module finds anything interesting, it creates an event and places it in its outgoing queue, to be processed by the scan and redistributed to other modules. + +## Event Flow + +Below is a graph showing the internal event flow in BBOT. White lines represent queues. Notice how some modules run in sequence, while others run in parallel. With the exception of a few specific modules, most BBOT modules are parallelized. + +![event-flow](https://github.com/blacklanternsecurity/bbot/assets/20261699/6cece76b-70bd-4690-a53f-02d42e6ed05b) + +For a higher-level overview, see [How it Works](../how_it_works.md). diff --git a/docs/dev/basemodule.md b/docs/dev/basemodule.md new file mode 100644 index 0000000000..04e59042ec --- /dev/null +++ b/docs/dev/basemodule.md @@ -0,0 +1 @@ +::: bbot.modules.base.BaseModule diff --git a/docs/dev/core.md b/docs/dev/core.md new file mode 100644 index 0000000000..d138681f98 --- /dev/null +++ b/docs/dev/core.md @@ -0,0 +1 @@ +::: bbot.core.core.BBOTCore diff --git a/docs/dev/dev_environment.md b/docs/dev/dev_environment.md new file mode 100644 index 0000000000..b73f660574 --- /dev/null +++ b/docs/dev/dev_environment.md @@ -0,0 +1,42 @@ +# Setting Up a Dev Environment + +The following will show you how to set up a fully functioning python environment for devving on BBOT. + +## Installation (Poetry) + +[Poetry](https://python-poetry.org/) is the recommended method of installation if you want to dev on BBOT. To set up a dev environment with Poetry, you can follow these steps: + +- Fork [BBOT](https://github.com/blacklanternsecurity/bbot) on GitHub +- Clone your fork and set up a development environment with Poetry: + +```bash +# clone your forked repo and cd into it +git clone git@github.com//bbot.git +cd bbot + +# install poetry +curl -sSL https://install.python-poetry.org | python3 - + +# install pip dependencies +poetry install +# install pre-commit hooks, etc. +poetry run pre-commit install + +# enter virtual environment +poetry shell + +bbot --help +``` + +- Now, any changes you make in the code will be reflected in the `bbot` command. +- After making your changes, run the tests locally to ensure they pass. + +```bash +# auto-format code indentation, etc. +ruff format + +# run tests +./bbot/test/run_tests.sh +``` + +- Finally, commit and push your changes, and create a pull request to the `dev` branch of the main BBOT repo. diff --git a/docs/dev/discord_bot.md b/docs/dev/discord_bot.md new file mode 100644 index 0000000000..ff2aa860a1 --- /dev/null +++ b/docs/dev/discord_bot.md @@ -0,0 +1,8 @@ + +![bbot-discord](https://github.com/blacklanternsecurity/bbot/assets/20261699/22b268a2-0dfd-4c2a-b7c5-548c0f2cc6f9) + +Below is a simple Discord bot designed to run BBOT scans. + +```python title="examples/discord_bot.py" +--8<-- "examples/discord_bot.py" +``` diff --git a/docs/dev/engine.md b/docs/dev/engine.md new file mode 100644 index 0000000000..d77bd3970e --- /dev/null +++ b/docs/dev/engine.md @@ -0,0 +1,5 @@ +::: bbot.core.engine.EngineBase + +::: bbot.core.engine.EngineClient + +::: bbot.core.engine.EngineServer diff --git a/docs/dev/event.md b/docs/dev/event.md new file mode 100644 index 0000000000..79f0cc7cb5 --- /dev/null +++ b/docs/dev/event.md @@ -0,0 +1,16 @@ +This is a developer reference. For a high-level description of BBOT events including a full list of event types, see [Events](../../scanning/events) + +::: bbot.core.event.base.make_event +::: bbot.core.event.base.event_from_json + +::: bbot.core.event.base.BaseEvent + options: + members: + - __init__ + - json + - from_json + - pretty_string + - module_sequence + - make_internal + - unmake_internal + - set_scope_distance diff --git a/docs/dev/helpers/command.md b/docs/dev/helpers/command.md new file mode 100644 index 0000000000..3716d2037a --- /dev/null +++ b/docs/dev/helpers/command.md @@ -0,0 +1,20 @@ +# Command Helpers + +These are helpers related to executing shell commands. They are used throughout BBOT and its modules for executing various binaries such as `masscan`, `nuclei`, etc. + +These helpers can be invoked directly from `self.helpers`, but inside a module they should always use `self.run_process()` or `self.run_process_live()`. These are light wrappers which ensure the running process is tracked by the module so that it can be easily terminated should the user need to kill the module: + +```python +# simple subprocess +ls_result = await self.run_process("ls", "-l") +for line ls_result.stdout.splitlines(): + # ... + +# iterate through each line in real time +async for line in self.run_process_live(["grep", "-R"]): + # ... +``` + +::: bbot.core.helpers.command + options: + show_root_heading: false diff --git a/docs/dev/helpers/dns.md b/docs/dev/helpers/dns.md new file mode 100644 index 0000000000..5a51d61168 --- /dev/null +++ b/docs/dev/helpers/dns.md @@ -0,0 +1,19 @@ +# DNS + +These are helpers related to DNS resolution. They are used throughout BBOT and its modules for performing DNS lookups and detecting DNS wildcards, etc. + +Note that these helpers can be invoked directly from `self.helpers`, e.g.: + +```python +self.helpers.resolve("evilcorp.com") +``` + +::: bbot.core.helpers.dns.DNSHelper + handler: python + options: + members: + - resolve + - resolve_batch + - resolve_raw + - is_wildcard + - is_wildcard_domain diff --git a/docs/dev/helpers/index.md b/docs/dev/helpers/index.md new file mode 100644 index 0000000000..cc27ed1f2b --- /dev/null +++ b/docs/dev/helpers/index.md @@ -0,0 +1,29 @@ +# BBOT Helpers + +In this section are various helper functions that are designed to make your life easier when devving on BBOT. Whether you're extending BBOT by writing a module or working on its core engine, these functions are designed to act as useful machine parts to perform essential tasks, such as making a web request or executing a DNS query. + +The vast majority of these helpers can be accessed directly from the `.helpers` attribute of a scan or module, like so: + +```python +class MyModule(BaseModule): + + ... + + async def handle_event(self, event): + # Web Request + response = await self.helpers.request("https://www.evilcorp.com") + + # DNS query + for ip in await self.helpers.resolve("www.evilcorp.com"): + self.hugesuccess(str(ip)) + + # Execute shell command + completed_process = await self.run_process("ls", "-l") + self.hugesuccess(completed_process.stdout) + + # Split a DNS name into subdomain / domain + self.helpers.split_domain("www.internal.evilcorp.co.uk") + # ("www.internal", "evilcorp.co.uk") +``` + +[Next Up: Command Helpers -->](command.md){ .md-button .md-button--primary } diff --git a/docs/dev/helpers/interactsh.md b/docs/dev/helpers/interactsh.md new file mode 100644 index 0000000000..5431e63378 --- /dev/null +++ b/docs/dev/helpers/interactsh.md @@ -0,0 +1,5 @@ +# Interact.sh + +::: bbot.core.helpers.interactsh.Interactsh + options: + show_root_heading: false diff --git a/docs/dev/helpers/misc.md b/docs/dev/helpers/misc.md new file mode 100644 index 0000000000..3a95dc0d94 --- /dev/null +++ b/docs/dev/helpers/misc.md @@ -0,0 +1,7 @@ +# Misc Helpers + +These are miscellaneous helpers, used throughout BBOT and its modules for simple tasks such as parsing domains, ports, urls, etc. + +::: bbot.core.helpers.misc + options: + show_root_heading: false diff --git a/docs/dev/helpers/web.md b/docs/dev/helpers/web.md new file mode 100644 index 0000000000..b25f0e7cad --- /dev/null +++ b/docs/dev/helpers/web.md @@ -0,0 +1,15 @@ +# Web + +These are helpers for making various web requests. + +Note that these helpers can be invoked directly from `self.helpers`, e.g.: + +```python +self.helpers.request("https://www.evilcorp.com") +``` + +::: bbot.core.helpers.web + options: + show_root_heading: false + members: + - WebHelper diff --git a/docs/dev/helpers/wordcloud.md b/docs/dev/helpers/wordcloud.md new file mode 100644 index 0000000000..cc2d6671ce --- /dev/null +++ b/docs/dev/helpers/wordcloud.md @@ -0,0 +1,13 @@ +# Word Cloud + +These are helpers related to BBOT's Word Cloud, a mechanism for storing target-specific keywords that are useful for custom wordlists, etc. + +Note that these helpers can be invoked directly from `self.helpers`, e.g.: + +```python +self.helpers.word_cloud +``` + +::: bbot.core.helpers.wordcloud + options: + show_root_heading: false diff --git a/docs/dev/index.md b/docs/dev/index.md new file mode 100644 index 0000000000..6315637f02 --- /dev/null +++ b/docs/dev/index.md @@ -0,0 +1,100 @@ +# BBOT Developer Reference + +BBOT exposes a Python API that allows you to create, start, and stop scans. + +Documented in this section are commonly-used classes and functions within BBOT, along with usage examples. + +## Adding BBOT to Your Python Project + +If you are using Poetry, you can add BBOT to your python environment like this: + +```bash +# stable +poetry add bbot + +# bleeding-edge (dev branch) +poetry add bbot --allow-prereleases +``` + +## Running a BBOT Scan from Python + +#### Synchronous +```python +from bbot.scanner import Scanner + +if __name__ == "__main__": + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + for event in scan.start(): + print(event) +``` + +#### Asynchronous +```python +from bbot.scanner import Scanner + +async def main(): + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + async for event in scan.async_start(): + print(event.json()) + +if __name__ == "__main__": + import asyncio + asyncio.run(main()) +``` + +For a full listing of `Scanner` attributes and functions, see the [`Scanner` Code Reference](./scanner.md). + +#### Multiple Targets + +You can specify any number of targets: + +```python +# create a scan against multiple targets +scan = Scanner( + "evilcorp.com", + "evilcorp.org", + "evilcorp.ce", + "4.3.2.1", + "1.2.3.4/24", + presets=["subdomain-enum"] +) + +# this is the same as: +targets = ["evilcorp.com", "evilcorp.org", "evilcorp.ce", "4.3.2.1", "1.2.3.4/24"] +scan = Scanner(*targets, presets=["subdomain-enum"]) +``` + +For more details, including which types of targets are valid, see [Targets](../scanning/index.md#targets) + +#### Other Custom Options + +In many cases, using a [Preset](../scanning/presets.md) like `subdomain-enum` is sufficient. However, the `Scanner` is flexible and accepts many other arguments that can override the default functionality. You can specify [`flags`](../index.md#flags), [`modules`](../index.md#modules), [`output_modules`](../output.md), a [`whitelist` or `blacklist`](../scanning/index.md#whitelists-and-blacklists), and custom [`config` options](../scanning/configuration.md): + +```python +# create a scan against multiple targets +scan = Scanner( + # targets + "evilcorp.com", + "4.3.2.1", + # enable these presets + presets=["subdomain-enum"], + # whitelist these hosts + whitelist=["evilcorp.com", "evilcorp.org"], + # blacklist these hosts + blacklist=["prod.evilcorp.com"], + # also enable these individual modules + modules=["nuclei", "ipstack"], + # exclude modules with these flags + exclude_flags=["slow"], + # custom config options + config={ + "modules": { + "nuclei": { + "tags": "apache,nginx" + } + } + } +) +``` + +For a list of all the possible scan options, see the [`Presets` Code Reference](./presets.md) diff --git a/docs/dev/module_howto.md b/docs/dev/module_howto.md new file mode 100644 index 0000000000..de5bdd90b2 --- /dev/null +++ b/docs/dev/module_howto.md @@ -0,0 +1,203 @@ +# How to Write a BBOT Module + +Here we'll go over a basic example of writing a custom BBOT module. + +## Create the python file + +1. Create a new `.py` file in `bbot/modules` (or in a [custom module directory](#custom-module-directory)) +1. At the top of the file, import `BaseModule` +1. Declare a class that inherits from `BaseModule` + - the class must have the same name as your file (case-insensitive) +1. Define in `watched_events` what type of data your module will consume +1. Define in `produced_events` what type of data your module will produce +1. Define (via `flags`) whether your module is `active` or `passive`, and whether it's `safe` or `aggressive` +1. **Put your main logic in `.handle_event()`** + +Here is an example of a simple module that performs whois lookups: + +```python title="bbot/modules/whois.py" +from bbot.modules.base import BaseModule + +class whois(BaseModule): + watched_events = ["DNS_NAME"] # watch for DNS_NAME events + produced_events = ["WHOIS"] # we produce WHOIS events + flags = ["passive", "safe"] + meta = {"description": "Query WhoisXMLAPI for WHOIS data"} + options = {"api_key": ""} # module config options + options_desc = {"api_key": "WhoisXMLAPI Key"} + per_domain_only = True # only run once per domain + + base_url = "https://www.whoisxmlapi.com/whoisserver/WhoisService" + + # one-time setup - runs at the beginning of the scan + async def setup(self): + self.api_key = self.config.get("api_key") + if not self.api_key: + # soft-fail if no API key is set + return None, "Must set API key" + + async def handle_event(self, event): + self.hugesuccess(f"Got {event} (event.data: {event.data})") + _, domain = self.helpers.split_domain(event.data) + url = f"{self.base_url}?apiKey={self.api_key}&domainName={domain}&outputFormat=JSON" + self.hugeinfo(f"Visiting {url}") + response = await self.helpers.request(url) + if response is not None: + await self.emit_event(response.json(), "WHOIS", parent=event) +``` + +## Test your new module + +After saving the module, you can run it with `-m`: + +```bash +# run a scan enabling the module in bbot/modules/mymodule.py +bbot -t evilcorp.com -m whois +``` + +### Debugging Your Module + +BBOT has a variety of colorful logging functions like `self.hugesuccess()` that can be useful for debugging. + +**BBOT log levels**: + +- `critical`: bright red +- `hugesuccess`: bright green +- `hugewarning`: bright orange +- `hugeinfo`: bright blue +- `error`: red +- `warning`: orange +- `info`: blue +- `verbose`: grey (must enable `-v` to see) +- `debug`: grey (must enable `-d` to see) + + +For details on how tests are written, see [Unit Tests](./tests.md). + +## `handle_event()` and `emit_event()` + +The `handle_event()` method is the most important part of the module. By overriding this method, you control what the module does. During a scan, when an [event](./scanning/events.md) from your `watched_events` is encountered (a `DNS_NAME` in this example), `handle_event()` is automatically called with that event as its argument. + +The `emit_event()` method is how modules return data. When you call `emit_event()`, it creates an [event](./scanning/events.md) and outputs it, sending it any modules that are interested in that data type. + +## `setup()` + +A module's `setup()` method is used for performing one-time setup at the start of the scan, like downloading a wordlist or checking to make sure an API key is valid. It needs to return either: + +1. `True` - module setup succeeded +2. `None` - module setup soft-failed (scan will continue but module will be disabled) +3. `False` - module setup hard-failed (scan will abort) + +Optionally, it can also return a reason. Here are some examples: + +```python +async def setup(self): + if not self.config.get("api_key"): + # soft-fail + return None, "No API key specified" + +async def setup(self): + try: + wordlist = self.helpers.wordlist("https://raw.githubusercontent.com/user/wordlist.txt") + except WordlistError as e: + # hard-fail + return False, f"Error downloading wordlist: {e}" + +async def setup(self): + self.timeout = self.config.get("timeout", 5) + # success + return True +``` + +## Module Config Options + +Each module can have its own set of config options. These live in the `options` and `options_desc` attributes on your class. Both are dictionaries; `options` is for defaults and `options_desc` is for descriptions. Here is a typical example: + +```python title="bbot/modules/nmap.py" +class nmap(BaseModule): + # ... + options = { + "top_ports": 100, + "ports": "", + "timing": "T4", + "skip_host_discovery": True, + } + options_desc = { + "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", + "ports": "Ports to scan", + "timing": "-T<0-5>: Set timing template (higher is faster)", + "skip_host_discovery": "skip host discovery (-Pn)", + } + + async def setup(self): + self.ports = self.config.get("ports", "") + self.timing = self.config.get("timing", "T4") + self.top_ports = self.config.get("top_ports", 100) + self.skip_host_discovery = self.config.get("skip_host_discovery", True) + return True +``` + +Once you've defined these variables, you can pass the options via `-c`: + +```bash +bbot -m nmap -c modules.nmap.top_ports=250 +``` + +... or via the config: + +```yaml title="~/.config/bbot/bbot.yml" +modules: + nmap: + top_ports: 250 +``` + +Inside the module, you access them via `self.config`, e.g.: + +```python +self.config.get("top_ports") +``` + +## Module Dependencies + +BBOT automates module dependencies with **Ansible**. If your module relies on a third-party binary, OS package, or python library, you can specify them in the `deps_*` attributes of your module. + +```python +class MyModule(BaseModule): + ... + deps_apt = ["chromium-browser"] + deps_ansible = [ + { + "name": "install dev tools", + "package": {"name": ["gcc", "git", "make"], "state": "present"}, + "become": True, + "ignore_errors": True, + }, + { + "name": "Download massdns source code", + "git": { + "repo": "https://github.com/blechschmidt/massdns.git", + "dest": "#{BBOT_TEMP}/massdns", + "single_branch": True, + "version": "master", + }, + }, + { + "name": "Build massdns", + "command": {"chdir": "#{BBOT_TEMP}/massdns", "cmd": "make", "creates": "#{BBOT_TEMP}/massdns/bin/massdns"}, + }, + { + "name": "Install massdns", + "copy": {"src": "#{BBOT_TEMP}/massdns/bin/massdns", "dest": "#{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, + }, + ] +``` + +## Load Modules from Custom Locations + +If you have a custom module and you want to use it with BBOT, you can add its parent folder to `module_dirs`. This saves you from having to copy it into the BBOT install location. To add a custom module directory, add it to `module_dirs` in your preset: + +```yaml title="my_preset.yml" +# load BBOT modules from these additional paths +module_dirs: + - /home/user/my_modules +``` diff --git a/docs/dev/presets.md b/docs/dev/presets.md new file mode 100644 index 0000000000..7bc7343e07 --- /dev/null +++ b/docs/dev/presets.md @@ -0,0 +1 @@ +::: bbot.scanner.Preset diff --git a/docs/dev/scanner.md b/docs/dev/scanner.md new file mode 100644 index 0000000000..a03de4e4bb --- /dev/null +++ b/docs/dev/scanner.md @@ -0,0 +1 @@ +::: bbot.scanner.Scanner diff --git a/docs/dev/target.md b/docs/dev/target.md new file mode 100644 index 0000000000..6740cfb744 --- /dev/null +++ b/docs/dev/target.md @@ -0,0 +1,9 @@ +::: bbot.scanner.target.BaseTarget + +::: bbot.scanner.target.ScanSeeds + +::: bbot.scanner.target.ScanWhitelist + +::: bbot.scanner.target.ScanBlacklist + +::: bbot.scanner.target.BBOTTarget diff --git a/docs/dev/tests.md b/docs/dev/tests.md new file mode 100644 index 0000000000..4381981812 --- /dev/null +++ b/docs/dev/tests.md @@ -0,0 +1,105 @@ +# Unit Tests + +BBOT takes tests seriously. Every module *must* have a custom-written test that *actually tests* its functionality. Don't worry if you want to contribute but you aren't used to writing tests. If you open a draft PR, we will help write them :) + +We use [ruff](https://docs.astral.sh/ruff/) for linting, and [pytest](https://docs.pytest.org/en/8.2.x/) for tests. + +## Running tests locally + +We have GitHub Actions that automatically run tests whenever you open a Pull Request. However, you can also run the tests locally with `pytest`: + +```bash +# lint with ruff +poetry run ruff check + +# format code with ruff +poetry run ruff format + +# run all tests with pytest (takes roughly 30 minutes) +poetry run pytest +``` + +### Running specific tests + +If you only want to run a single test, you can select it with `-k`: + +```bash +# run only the sslcert test +poetry run pytest -k test_module_sslcert +``` + +You can also filter like this: +```bash +# run all the module tests except for sslcert +poetry run pytest -k "test_module_ and not test_module_sslcert" +``` + +If you want to see the output of your module, you can enable `--log-cli-level`: +```bash +poetry run pytest --log-cli-level=DEBUG +``` + +## Example: Writing a Module Test + +To write a test for your module, create a new python file in `bbot/test/test_step_2/module_tests`. Your filename must be `test_module_`: + +```python title="test_module_mymodule.py" +from .base import ModuleTestBase + + +class TestMyModule(ModuleTestBase): + targets = ["blacklanternsecurity.com"] + config_overrides = {"modules": {"mymodule": {"api_key": "deadbeef"}}} + + async def setup_after_prep(self, module_test): + # mock HTTP response + module_test.httpx_mock.add_response( + url="https://api.com/sudomains?apikey=deadbeef&domain=blacklanternsecurity.com", + json={ + "subdomains": [ + "www.blacklanternsecurity.com", + "dev.blacklanternsecurity.com" + ], + }, + ) + # mock DNS + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "www.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + "dev.blacklanternsecurity.com": {"A": ["1.2.3.4"]}, + } + ) + + def check(self, module_test, events): + # here is where we check to make sure it worked + dns_names = [e.data for e in events if e.type == "DNS_NAME"] + # temporary log messages for debugging + for e in dns_names: + self.log.critical(e) + assert "www.blacklanternsecurity.com" in dns_names, "failed to find subdomain #1" + assert "dev.blacklanternsecurity.com" in dns_names, "failed to find subdomain #2" +``` + +### Debugging a test + +Similar to debugging from within a module, you can debug from within a test using `self.log.critical()`, etc: + +```python + def check(self, module_test, events): + for e in events: + # bright red + self.log.critical(e.type) + # bright green + self.log.hugesuccess(e.data) + # bright orange + self.log.hugewarning(e.tags) + # bright blue + self.log.hugeinfo(e.parent) +``` + +### More advanced tests + +If you have questions about tests or need to write a more advanced test, come talk to us on [GitHub](https://github.com/blacklanternsecurity/bbot/discussions) or [Discord](https://discord.com/invite/PZqkgxu5SA). + +It's also a good idea to look through our [existing tests](https://github.com/blacklanternsecurity/bbot/tree/stable/bbot/test/test_step_2/module_tests). BBOT has over a hundred of them, so you might find one that's similar to what you're trying to do. diff --git a/docs/diagrams/engine-architecture-bbot-v1.png b/docs/diagrams/engine-architecture-bbot-v1.png new file mode 100644 index 0000000000..93cf0ff36b Binary files /dev/null and b/docs/diagrams/engine-architecture-bbot-v1.png differ diff --git a/docs/diagrams/engine-architecture-bbot-v2.png b/docs/diagrams/engine-architecture-bbot-v2.png new file mode 100644 index 0000000000..8cc764d639 Binary files /dev/null and b/docs/diagrams/engine-architecture-bbot-v2.png differ diff --git a/docs/diagrams/engine-architecture.drawio b/docs/diagrams/engine-architecture.drawio new file mode 100644 index 0000000000..50601ef0a1 --- /dev/null +++ b/docs/diagrams/engine-architecture.drawio @@ -0,0 +1,141 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/diagrams/event-flow.drawio b/docs/diagrams/event-flow.drawio new file mode 100644 index 0000000000..e90f1f0516 --- /dev/null +++ b/docs/diagrams/event-flow.drawio @@ -0,0 +1,135 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/diagrams/event-flow.png b/docs/diagrams/event-flow.png new file mode 100644 index 0000000000..8d36fd13ad Binary files /dev/null and b/docs/diagrams/event-flow.png differ diff --git a/docs/diagrams/module-recursion.drawio b/docs/diagrams/module-recursion.drawio new file mode 100644 index 0000000000..9d7e92a001 --- /dev/null +++ b/docs/diagrams/module-recursion.drawio @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/diagrams/module-recursion.png b/docs/diagrams/module-recursion.png new file mode 100644 index 0000000000..d276ad7222 Binary files /dev/null and b/docs/diagrams/module-recursion.png differ diff --git a/docs/favicon.png b/docs/favicon.png new file mode 100644 index 0000000000..bf843100be Binary files /dev/null and b/docs/favicon.png differ diff --git a/docs/how_it_works.md b/docs/how_it_works.md new file mode 100644 index 0000000000..bcec765f81 --- /dev/null +++ b/docs/how_it_works.md @@ -0,0 +1,37 @@ +# How it Works + +## BBOT's Recursive Philosophy + +It's well-known that when you're doing recon, it's best to do it recursively. However, there are very few recursive tools, and the main reason for this is because making a recursive tool is hard. In particular, it's very difficult to build a large-scale recursive system that interacts with the internet, and to keep it stable. When we first set out to make BBOT, we didn't know this, and it was definitely a lesson we learned the hard way. BBOT's stability is thanks to its extensive [Unit Tests](./dev/tests.md). + +BBOT inherits its recursive philosophy from [Spiderfoot](https://github.com/smicallef/spiderfoot), which means it is also ***event-driven***. Each of BBOT's 100+ modules ***consume*** a certain type of [Event](./scanning/events.md), use it to discover something new, and ***produce*** new events, which get distributed to all the other modules. This happens again and again -- thousands of times during a scan -- spidering outwards in a recursive web of discovery. + +Below is an interactive graph showing the relationships between modules and the event types they produce and consume. + + +
      + + + +## How BBOT Modules Work Together + +Each BBOT module does one specific task, such as querying an API for subdomains, or running a tool like `nuclei`, and is carefully designed to work together with other modules inside BBOT's recursive system. + +For example, the `portscan` module consumes `DNS_NAME`, and produces `OPEN_TCP_PORT`. The `sslcert` module consumes `OPEN_TCP_PORT` and produces `DNS_NAME`. You can see how even these two modules, when enabled together, will feed each other recursively. + +![module-recursion](https://github.com/blacklanternsecurity/bbot/assets/20261699/10ff5fb4-b3e7-453d-9772-7a26808b071e) + +Because of this, enabling even one module has the potential to increase your results exponentially. This is exactly how BBOT is able to outperform other tools. + +To learn more about how events flow inside BBOT, see [BBOT Internal Architecture](./dev/architecture.md). diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..355d58e8b2 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,149 @@ +# Getting Started + + + +_A BBOT scan in real-time - visualization with [VivaGraphJS](https://github.com/blacklanternsecurity/bbot-vivagraphjs)_ + +## Installation + +!!! info "Supported Platforms" + + Only **Linux** is supported at this time. **Windows** and **macOS** are *not* supported. If you use one of these platforms, consider using [Docker](#Docker). + +BBOT offers multiple methods of installation, including **pipx** and **Docker**. If you plan to dev on BBOT, see [Installation (Poetry)](./contribution/#installation-poetry). + +### [Python (pip / pipx)](https://pypi.org/project/bbot/) + + +???+ note inline end + + `pipx` installs BBOT inside its own virtual environment. + +```bash +# stable version +pipx install bbot + +# bleeding edge (dev branch) +pipx install --pip-args '\--pre' bbot + +# execute bbot command +bbot --help +``` + +### [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot) + +Docker images are provided, along with helper script `bbot-docker.sh` to persist your scan data. + +Scans are output to `~/.bbot/scans` (the usual place for BBOT scan data). + +```bash +# bleeding edge (dev) +docker run -it blacklanternsecurity/bbot --help + +# stable +docker run -it blacklanternsecurity/bbot:stable --help + +# helper script +git clone https://github.com/blacklanternsecurity/bbot && cd bbot +./bbot-docker.sh --help +``` + +Note: If you need to pass in a custom preset, you can do so by mapping the preset into the container: + +```bash +# use the preset `my_preset.yml` from the current directory +docker run --rm -it \ + -v "$HOME/.bbot/scans:/root/.bbot/scans" \ + -v "$PWD/my_preset.yml:/my_preset.yml" \ + blacklanternsecurity/bbot -p /my_preset.yml +``` + +## Example Commands + +Below are some examples of common scans. + + +**Subdomains:** + +```bash +# Perform a full subdomain enumeration on evilcorp.com +bbot -t evilcorp.com -p subdomain-enum +``` + +**Subdomains (passive only):** + +```bash +# Perform a passive-only subdomain enumeration on evilcorp.com +bbot -t evilcorp.com -p subdomain-enum -rf passive +``` + +**Subdomains + port scan + web screenshots:** + +```bash +# Port-scan every subdomain, screenshot every webpage, output to current directory +bbot -t evilcorp.com -p subdomain-enum -m portscan gowitness -n my_scan -o . +``` + +**Subdomains + basic web scan:** + +```bash +# A basic web scan includes wappalyzer, robots.txt, and other non-intrusive web modules +bbot -t evilcorp.com -p subdomain-enum web-basic +``` + +**Web spider:** + +```bash +# Crawl www.evilcorp.com up to a max depth of 2, automatically extracting emails, secrets, etc. +bbot -t www.evilcorp.com -p spider -c web.spider_distance=2 web.spider_depth=2 +``` + +**Everything everywhere all at once:** + +```bash +# Subdomains, emails, cloud buckets, port scan, basic web, web screenshots, nuclei +bbot -t evilcorp.com -p kitchen-sink +``` + + +## API Keys + +BBOT works just fine without API keys. However, there are certain modules that need them to function. If you have API keys and want to make use of these modules, you can place them either in your preset: + +```yaml title="my_preset.yml" +description: My custom subdomain enum preset + +include: + - subdomain-enum + - cloud-enum + +config: + modules: + shodan_dns: + api_key: deadbeef + virustotal: + api_key: cafebabe +``` + +...in BBOT's global YAML config (`~/.config/bbot/bbot.yml`): + +Note: this will ensure the API keys are used in all scans, regardless of preset. + +```yaml title="~/.config/bbot/bbot.yml" +modules: + shodan_dns: + api_key: deadbeef + virustotal: + api_key: cafebabe +``` + +...or directly on the command-line: + +```bash +# specify API key with -c +bbot -t evilcorp.com -f subdomain-enum -c modules.shodan_dns.api_key=deadbeef modules.virustotal.api_key=cafebabe +``` + +For more information, see [Configuration](./scanning/configuration.md). For a full list of modules, including which ones require API keys, see [List of Modules](./modules/list_of_modules.md). + +[Next Up: Scanning -->](./scanning/index.md){ .md-button .md-button--primary } diff --git a/docs/javascripts/tablesort.js b/docs/javascripts/tablesort.js new file mode 100644 index 0000000000..4014300f8e --- /dev/null +++ b/docs/javascripts/tablesort.js @@ -0,0 +1,6 @@ +document$.subscribe(function () { + var tables = document.querySelectorAll("article table:not([class])"); + tables.forEach(function (table) { + new Tablesort(table); + }); +}); diff --git a/docs/javascripts/tablesort.min.js b/docs/javascripts/tablesort.min.js new file mode 100644 index 0000000000..fcd3b078ef --- /dev/null +++ b/docs/javascripts/tablesort.min.js @@ -0,0 +1,6 @@ +/*! + * tablesort v5.2.1 (2021-10-30) + * http://tristen.ca/tablesort/demo/ + * Copyright (c) 2021 ; Licensed MIT +*/ +!function(){function a(b,c){if(!(this instanceof a))return new a(b,c);if(!b||"TABLE"!==b.tagName)throw new Error("Element must be a table");this.init(b,c||{})}var b=[],c=function(a){var b;return window.CustomEvent&&"function"==typeof window.CustomEvent?b=new CustomEvent(a):(b=document.createEvent("CustomEvent"),b.initCustomEvent(a,!1,!1,void 0)),b},d=function(a,b){return a.getAttribute(b.sortAttribute||"data-sort")||a.textContent||a.innerText||""},e=function(a,b){return a=a.trim().toLowerCase(),b=b.trim().toLowerCase(),a===b?0:a0)if(a.tHead&&a.tHead.rows.length>0){for(e=0;e0&&n.push(m),o++;if(!n)return}for(o=0;o=48&&t<=57))return!1;n++}return!0}function p(e){return-1===e.indexOf("/")&&-1===e.indexOf("~")?e:e.replace(/~/g,"~0").replace(/\//g,"~1")}function d(e){return e.replace(/~1/g,"/").replace(/~0/g,"~")}function u(e){if(void 0===e)return!0;if(e)if(Array.isArray(e)){for(var t=0,n=e.length;t0&&"constructor"==s[c-1]))throw new TypeError("JSON-Patch: modifying `__proto__` or `constructor/prototype` prop is banned for security reasons, if this was on purpose, please set `banPrototypeModifications` flag false and pass it to this function. More info in fast-json-patch README");if(n&&void 0===u&&(void 0===l[g]?u=s.slice(0,c).join("/"):c==p-1&&(u=t.path),void 0!==u&&m(t,0,e,u)),c++,Array.isArray(l)){if("-"===g)g=l.length;else{if(n&&!f(g))throw new v("Expected an unsigned base-10 integer value, making the new referenced value the array element with the zero-based index","OPERATION_PATH_ILLEGAL_ARRAY_INDEX",o,t,e);f(g)&&(g=~~g)}if(c>=p){if(n&&"add"===t.op&&g>l.length)throw new v("The specified index MUST NOT be greater than the number of elements in the array","OPERATION_VALUE_OUT_OF_BOUNDS",o,t,e);if(!1===(a=y[t.op].call(t,l,g,e)).test)throw new v("Test operation failed","TEST_OPERATION_FAILED",o,t,e);return a}}else if(c>=p){if(!1===(a=b[t.op].call(t,l,g,e)).test)throw new v("Test operation failed","TEST_OPERATION_FAILED",o,t,e);return a}if(l=l[g],n&&c0)throw new v('Operation `path` property must start with "/"',"OPERATION_PATH_INVALID",t,e,n);if(("move"===e.op||"copy"===e.op)&&"string"!=typeof e.from)throw new v("Operation `from` property is not present (applicable in `move` and `copy` operations)","OPERATION_FROM_REQUIRED",t,e,n);if(("add"===e.op||"replace"===e.op||"test"===e.op)&&void 0===e.value)throw new v("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)","OPERATION_VALUE_REQUIRED",t,e,n);if(("add"===e.op||"replace"===e.op||"test"===e.op)&&u(e.value))throw new v("Operation `value` property is not present (applicable in `add`, `replace` and `test` operations)","OPERATION_VALUE_CANNOT_CONTAIN_UNDEFINED",t,e,n);if(n)if("add"==e.op){var i=e.path.split("/").length,o=r.split("/").length;if(i!==o+1&&i!==o)throw new v("Cannot perform an `add` operation at the desired path","OPERATION_PATH_CANNOT_ADD",t,e,n)}else if("replace"===e.op||"remove"===e.op||"_get"===e.op){if(e.path!==r)throw new v("Cannot perform the operation at a path that does not exist","OPERATION_PATH_UNRESOLVABLE",t,e,n)}else if("move"===e.op||"copy"===e.op){var a=I([{op:"_get",path:e.from,value:void 0}],n);if(a&&"OPERATION_PATH_UNRESOLVABLE"===a.name)throw new v("Cannot perform the operation from a path that does not exist","OPERATION_FROM_UNRESOLVABLE",t,e,n)}}function I(e,t,n){try{if(!Array.isArray(e))throw new v("Patch sequence must be an array","SEQUENCE_NOT_AN_ARRAY");if(t)O(h(t),h(e),n||!0);else{n=n||x;for(var r=0;r0&&(e.patches=[],e.callback&&e.callback(r)),r}function C(e,t,n,r,i){if(t!==e){"function"==typeof t.toJSON&&(t=t.toJSON());for(var o=c(t),a=c(e),s=!1,f=a.length-1;f>=0;f--){var d=e[g=a[f]];if(!l(t,g)||void 0===t[g]&&void 0!==d&&!1===Array.isArray(t))Array.isArray(e)===Array.isArray(t)?(i&&n.push({op:"test",path:r+"/"+p(g),value:h(d)}),n.push({op:"remove",path:r+"/"+p(g)}),s=!0):(i&&n.push({op:"test",path:r,value:e}),n.push({op:"replace",path:r,value:t}));else{var u=t[g];"object"==typeof d&&null!=d&&"object"==typeof u&&null!=u&&Array.isArray(d)===Array.isArray(u)?C(d,u,n,r+"/"+p(g),i):d!==u&&(i&&n.push({op:"test",path:r+"/"+p(g),value:h(d)}),n.push({op:"replace",path:r+"/"+p(g),value:h(u)}))}}if(s||o.length!=a.length)for(f=0;f0)return[m,n+c.join(",\n"+u),s].join("\n"+o)}return v}(e,"",0)},j=F(M);var z=U;function U(e){var t=this;if(t instanceof U||(t=new U),t.tail=null,t.head=null,t.length=0,e&&"function"==typeof e.forEach)e.forEach((function(e){t.push(e)}));else if(arguments.length>0)for(var n=0,r=arguments.length;n1)n=t;else{if(!this.head)throw new TypeError("Reduce of empty list with no initial value");r=this.head.next,n=this.head.value}for(var i=0;null!==r;i++)n=e(n,r.value,i),r=r.next;return n},U.prototype.reduceReverse=function(e,t){var n,r=this.tail;if(arguments.length>1)n=t;else{if(!this.tail)throw new TypeError("Reduce of empty list with no initial value");r=this.tail.prev,n=this.tail.value}for(var i=this.length-1;null!==r;i--)n=e(n,r.value,i),r=r.prev;return n},U.prototype.toArray=function(){for(var e=new Array(this.length),t=0,n=this.head;null!==n;t++)e[t]=n.value,n=n.next;return e},U.prototype.toArrayReverse=function(){for(var e=new Array(this.length),t=0,n=this.tail;null!==n;t++)e[t]=n.value,n=n.prev;return e},U.prototype.slice=function(e,t){(t=t||this.length)<0&&(t+=this.length),(e=e||0)<0&&(e+=this.length);var n=new U;if(tthis.length&&(t=this.length);for(var r=0,i=this.head;null!==i&&rthis.length&&(t=this.length);for(var r=this.length,i=this.tail;null!==i&&r>t;r--)i=i.prev;for(;null!==i&&r>e;r--,i=i.prev)n.push(i.value);return n},U.prototype.splice=function(e,t,...n){e>this.length&&(e=this.length-1),e<0&&(e=this.length+e);for(var r=0,i=this.head;null!==i&&r1;const ie=(e,t,n)=>{const r=e[te].get(t);if(r){const t=r.value;if(oe(e,t)){if(se(e,r),!e[J])return}else n&&(e[ne]&&(r.value.now=Date.now()),e[ee].unshiftNode(r));return t.value}},oe=(e,t)=>{if(!t||!t.maxAge&&!e[Q])return!1;const n=Date.now()-t.now;return t.maxAge?n>t.maxAge:e[Q]&&n>e[Q]},ae=e=>{if(e[q]>e[H])for(let t=e[ee].tail;e[q]>e[H]&&null!==t;){const n=t.prev;se(e,t),t=n}},se=(e,t)=>{if(t){const n=t.value;e[Z]&&e[Z](n.key,n.value),e[q]-=n.length,e[te].delete(n.key),e[ee].removeNode(t)}};class le{constructor(e,t,n,r,i){this.key=e,this.value=t,this.length=n,this.now=r,this.maxAge=i||0}}const ce=(e,t,n,r)=>{let i=n.value;oe(e,i)&&(se(e,n),e[J]||(i=void 0)),i&&t.call(r,i.value,i.key,e)};var he=class{constructor(e){if("number"==typeof e&&(e={max:e}),e||(e={}),e.max&&("number"!=typeof e.max||e.max<0))throw new TypeError("max must be a non-negative number");this[H]=e.max||1/0;const t=e.length||re;if(this[Y]="function"!=typeof t?re:t,this[J]=e.stale||!1,e.maxAge&&"number"!=typeof e.maxAge)throw new TypeError("maxAge must be a number");this[Q]=e.maxAge||0,this[Z]=e.dispose,this[K]=e.noDisposeOnSet||!1,this[ne]=e.updateAgeOnGet||!1,this.reset()}set max(e){if("number"!=typeof e||e<0)throw new TypeError("max must be a non-negative number");this[H]=e||1/0,ae(this)}get max(){return this[H]}set allowStale(e){this[J]=!!e}get allowStale(){return this[J]}set maxAge(e){if("number"!=typeof e)throw new TypeError("maxAge must be a non-negative number");this[Q]=e,ae(this)}get maxAge(){return this[Q]}set lengthCalculator(e){"function"!=typeof e&&(e=re),e!==this[Y]&&(this[Y]=e,this[q]=0,this[ee].forEach((e=>{e.length=this[Y](e.value,e.key),this[q]+=e.length}))),ae(this)}get lengthCalculator(){return this[Y]}get length(){return this[q]}get itemCount(){return this[ee].length}rforEach(e,t){t=t||this;for(let n=this[ee].tail;null!==n;){const r=n.prev;ce(this,e,n,t),n=r}}forEach(e,t){t=t||this;for(let n=this[ee].head;null!==n;){const r=n.next;ce(this,e,n,t),n=r}}keys(){return this[ee].toArray().map((e=>e.key))}values(){return this[ee].toArray().map((e=>e.value))}reset(){this[Z]&&this[ee]&&this[ee].length&&this[ee].forEach((e=>this[Z](e.key,e.value))),this[te]=new Map,this[ee]=new V,this[q]=0}dump(){return this[ee].map((e=>!oe(this,e)&&{k:e.key,v:e.value,e:e.now+(e.maxAge||0)})).toArray().filter((e=>e))}dumpLru(){return this[ee]}set(e,t,n){if((n=n||this[Q])&&"number"!=typeof n)throw new TypeError("maxAge must be a number");const r=n?Date.now():0,i=this[Y](t,e);if(this[te].has(e)){if(i>this[H])return se(this,this[te].get(e)),!1;const o=this[te].get(e).value;return this[Z]&&(this[K]||this[Z](e,o.value)),o.now=r,o.maxAge=n,o.value=t,this[q]+=i-o.length,o.length=i,this.get(e),ae(this),!0}const o=new le(e,t,i,r,n);return o.length>this[H]?(this[Z]&&this[Z](e,t),!1):(this[q]+=o.length,this[ee].unshift(o),this[te].set(e,this[ee].head),ae(this),!0)}has(e){if(!this[te].has(e))return!1;const t=this[te].get(e).value;return!oe(this,t)}get(e){return ie(this,e,!0)}peek(e){return ie(this,e,!1)}pop(){const e=this[ee].tail;return e?(se(this,e),e.value):null}del(e){se(this,this[te].get(e))}load(e){this.reset();const t=Date.now();for(let n=e.length-1;n>=0;n--){const r=e[n],i=r.e||0;if(0===i)this.set(r.k,r.v);else{const e=i-t;e>0&&this.set(r.k,r.v,e)}}}prune(){this[te].forEach(((e,t)=>ie(this,t,!1)))}};const fe=Object.freeze({loose:!0}),pe=Object.freeze({});var de=e=>e?"object"!=typeof e?fe:e:pe,ue={exports:{}};var ge={MAX_LENGTH:256,MAX_SAFE_COMPONENT_LENGTH:16,MAX_SAFE_BUILD_LENGTH:250,MAX_SAFE_INTEGER:Number.MAX_SAFE_INTEGER||9007199254740991,RELEASE_TYPES:["major","premajor","minor","preminor","patch","prepatch","prerelease"],SEMVER_SPEC_VERSION:"2.0.0",FLAG_INCLUDE_PRERELEASE:1,FLAG_LOOSE:2};var me="object"==typeof process&&process.env&&process.env.NODE_DEBUG&&/\bsemver\b/i.test(process.env.NODE_DEBUG)?(...e)=>console.error("SEMVER",...e):()=>{};!function(e,t){const{MAX_SAFE_COMPONENT_LENGTH:n,MAX_SAFE_BUILD_LENGTH:r,MAX_LENGTH:i}=ge,o=me,a=(t=e.exports={}).re=[],s=t.safeRe=[],l=t.src=[],c=t.t={};let h=0;const f="[a-zA-Z0-9-]",p=[["\\s",1],["\\d",i],[f,r]],d=(e,t,n)=>{const r=(e=>{for(const[t,n]of p)e=e.split(`${t}*`).join(`${t}{0,${n}}`).split(`${t}+`).join(`${t}{1,${n}}`);return e})(t),i=h++;o(e,i,t),c[e]=i,l[i]=t,a[i]=new RegExp(t,n?"g":void 0),s[i]=new RegExp(r,n?"g":void 0)};d("NUMERICIDENTIFIER","0|[1-9]\\d*"),d("NUMERICIDENTIFIERLOOSE","\\d+"),d("NONNUMERICIDENTIFIER",`\\d*[a-zA-Z-]${f}*`),d("MAINVERSION",`(${l[c.NUMERICIDENTIFIER]})\\.(${l[c.NUMERICIDENTIFIER]})\\.(${l[c.NUMERICIDENTIFIER]})`),d("MAINVERSIONLOOSE",`(${l[c.NUMERICIDENTIFIERLOOSE]})\\.(${l[c.NUMERICIDENTIFIERLOOSE]})\\.(${l[c.NUMERICIDENTIFIERLOOSE]})`),d("PRERELEASEIDENTIFIER",`(?:${l[c.NUMERICIDENTIFIER]}|${l[c.NONNUMERICIDENTIFIER]})`),d("PRERELEASEIDENTIFIERLOOSE",`(?:${l[c.NUMERICIDENTIFIERLOOSE]}|${l[c.NONNUMERICIDENTIFIER]})`),d("PRERELEASE",`(?:-(${l[c.PRERELEASEIDENTIFIER]}(?:\\.${l[c.PRERELEASEIDENTIFIER]})*))`),d("PRERELEASELOOSE",`(?:-?(${l[c.PRERELEASEIDENTIFIERLOOSE]}(?:\\.${l[c.PRERELEASEIDENTIFIERLOOSE]})*))`),d("BUILDIDENTIFIER",`${f}+`),d("BUILD",`(?:\\+(${l[c.BUILDIDENTIFIER]}(?:\\.${l[c.BUILDIDENTIFIER]})*))`),d("FULLPLAIN",`v?${l[c.MAINVERSION]}${l[c.PRERELEASE]}?${l[c.BUILD]}?`),d("FULL",`^${l[c.FULLPLAIN]}$`),d("LOOSEPLAIN",`[v=\\s]*${l[c.MAINVERSIONLOOSE]}${l[c.PRERELEASELOOSE]}?${l[c.BUILD]}?`),d("LOOSE",`^${l[c.LOOSEPLAIN]}$`),d("GTLT","((?:<|>)?=?)"),d("XRANGEIDENTIFIERLOOSE",`${l[c.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`),d("XRANGEIDENTIFIER",`${l[c.NUMERICIDENTIFIER]}|x|X|\\*`),d("XRANGEPLAIN",`[v=\\s]*(${l[c.XRANGEIDENTIFIER]})(?:\\.(${l[c.XRANGEIDENTIFIER]})(?:\\.(${l[c.XRANGEIDENTIFIER]})(?:${l[c.PRERELEASE]})?${l[c.BUILD]}?)?)?`),d("XRANGEPLAINLOOSE",`[v=\\s]*(${l[c.XRANGEIDENTIFIERLOOSE]})(?:\\.(${l[c.XRANGEIDENTIFIERLOOSE]})(?:\\.(${l[c.XRANGEIDENTIFIERLOOSE]})(?:${l[c.PRERELEASELOOSE]})?${l[c.BUILD]}?)?)?`),d("XRANGE",`^${l[c.GTLT]}\\s*${l[c.XRANGEPLAIN]}$`),d("XRANGELOOSE",`^${l[c.GTLT]}\\s*${l[c.XRANGEPLAINLOOSE]}$`),d("COERCEPLAIN",`(^|[^\\d])(\\d{1,${n}})(?:\\.(\\d{1,${n}}))?(?:\\.(\\d{1,${n}}))?`),d("COERCE",`${l[c.COERCEPLAIN]}(?:$|[^\\d])`),d("COERCEFULL",l[c.COERCEPLAIN]+`(?:${l[c.PRERELEASE]})?`+`(?:${l[c.BUILD]})?(?:$|[^\\d])`),d("COERCERTL",l[c.COERCE],!0),d("COERCERTLFULL",l[c.COERCEFULL],!0),d("LONETILDE","(?:~>?)"),d("TILDETRIM",`(\\s*)${l[c.LONETILDE]}\\s+`,!0),t.tildeTrimReplace="$1~",d("TILDE",`^${l[c.LONETILDE]}${l[c.XRANGEPLAIN]}$`),d("TILDELOOSE",`^${l[c.LONETILDE]}${l[c.XRANGEPLAINLOOSE]}$`),d("LONECARET","(?:\\^)"),d("CARETTRIM",`(\\s*)${l[c.LONECARET]}\\s+`,!0),t.caretTrimReplace="$1^",d("CARET",`^${l[c.LONECARET]}${l[c.XRANGEPLAIN]}$`),d("CARETLOOSE",`^${l[c.LONECARET]}${l[c.XRANGEPLAINLOOSE]}$`),d("COMPARATORLOOSE",`^${l[c.GTLT]}\\s*(${l[c.LOOSEPLAIN]})$|^$`),d("COMPARATOR",`^${l[c.GTLT]}\\s*(${l[c.FULLPLAIN]})$|^$`),d("COMPARATORTRIM",`(\\s*)${l[c.GTLT]}\\s*(${l[c.LOOSEPLAIN]}|${l[c.XRANGEPLAIN]})`,!0),t.comparatorTrimReplace="$1$2$3",d("HYPHENRANGE",`^\\s*(${l[c.XRANGEPLAIN]})\\s+-\\s+(${l[c.XRANGEPLAIN]})\\s*$`),d("HYPHENRANGELOOSE",`^\\s*(${l[c.XRANGEPLAINLOOSE]})\\s+-\\s+(${l[c.XRANGEPLAINLOOSE]})\\s*$`),d("STAR","(<|>)?=?\\s*\\*"),d("GTE0","^\\s*>=\\s*0\\.0\\.0\\s*$"),d("GTE0PRE","^\\s*>=\\s*0\\.0\\.0-0\\s*$")}(ue,ue.exports);var ve=ue.exports;const Ee=/^[0-9]+$/,be=(e,t)=>{const n=Ee.test(e),r=Ee.test(t);return n&&r&&(e=+e,t=+t),e===t?0:n&&!r?-1:r&&!n?1:ebe(t,e)};const we=me,{MAX_LENGTH:Ae,MAX_SAFE_INTEGER:Oe}=ge,{safeRe:xe,t:Ie}=ve,Ne=de,{compareIdentifiers:Le}=ye;var Re=class e{constructor(t,n){if(n=Ne(n),t instanceof e){if(t.loose===!!n.loose&&t.includePrerelease===!!n.includePrerelease)return t;t=t.version}else if("string"!=typeof t)throw new TypeError(`Invalid version. Must be a string. Got type "${typeof t}".`);if(t.length>Ae)throw new TypeError(`version is longer than ${Ae} characters`);we("SemVer",t,n),this.options=n,this.loose=!!n.loose,this.includePrerelease=!!n.includePrerelease;const r=t.trim().match(n.loose?xe[Ie.LOOSE]:xe[Ie.FULL]);if(!r)throw new TypeError(`Invalid Version: ${t}`);if(this.raw=t,this.major=+r[1],this.minor=+r[2],this.patch=+r[3],this.major>Oe||this.major<0)throw new TypeError("Invalid major version");if(this.minor>Oe||this.minor<0)throw new TypeError("Invalid minor version");if(this.patch>Oe||this.patch<0)throw new TypeError("Invalid patch version");r[4]?this.prerelease=r[4].split(".").map((e=>{if(/^[0-9]+$/.test(e)){const t=+e;if(t>=0&&t=0;)"number"==typeof this.prerelease[r]&&(this.prerelease[r]++,r=-2);if(-1===r){if(t===this.prerelease.join(".")&&!1===n)throw new Error("invalid increment argument: identifier already exists");this.prerelease.push(e)}}if(t){let r=[t,e];!1===n&&(r=[t]),0===Le(this.prerelease[0],t)?isNaN(this.prerelease[1])&&(this.prerelease=r):this.prerelease=r}break}default:throw new Error(`invalid increment argument: ${e}`)}return this.raw=this.format(),this.build.length&&(this.raw+=`+${this.build.join(".")}`),this}};const $e=Re;var Se=(e,t,n)=>new $e(e,n).compare(new $e(t,n));const Te=Se;const Ce=Se;const De=Se;const Fe=Se;const ke=Se;const _e=Se;const Pe=(e,t,n)=>0===Te(e,t,n),Me=(e,t,n)=>0!==Ce(e,t,n),je=(e,t,n)=>De(e,t,n)>0,ze=(e,t,n)=>Fe(e,t,n)>=0,Ue=(e,t,n)=>ke(e,t,n)<0,Be=(e,t,n)=>_e(e,t,n)<=0;var Ge,We,Xe,Ve,He=(e,t,n,r)=>{switch(t){case"===":return"object"==typeof e&&(e=e.version),"object"==typeof n&&(n=n.version),e===n;case"!==":return"object"==typeof e&&(e=e.version),"object"==typeof n&&(n=n.version),e!==n;case"":case"=":case"==":return Pe(e,n,r);case"!=":return Me(e,n,r);case">":return je(e,n,r);case">=":return ze(e,n,r);case"<":return Ue(e,n,r);case"<=":return Be(e,n,r);default:throw new TypeError(`Invalid operator: ${t}`)}};function qe(){if(Ve)return Xe;Ve=1;class e{constructor(t,i){if(i=n(i),t instanceof e)return t.loose===!!i.loose&&t.includePrerelease===!!i.includePrerelease?t:new e(t.raw,i);if(t instanceof r)return this.raw=t.value,this.set=[[t]],this.format(),this;if(this.options=i,this.loose=!!i.loose,this.includePrerelease=!!i.includePrerelease,this.raw=t.trim().split(/\s+/).join(" "),this.set=this.raw.split("||").map((e=>this.parseRange(e.trim()))).filter((e=>e.length)),!this.set.length)throw new TypeError(`Invalid SemVer Range: ${this.raw}`);if(this.set.length>1){const e=this.set[0];if(this.set=this.set.filter((e=>!d(e[0]))),0===this.set.length)this.set=[e];else if(this.set.length>1)for(const e of this.set)if(1===e.length&&u(e[0])){this.set=[e];break}}this.format()}format(){return this.range=this.set.map((e=>e.join(" ").trim())).join("||").trim(),this.range}toString(){return this.range}parseRange(e){const n=((this.options.includePrerelease&&f)|(this.options.loose&&p))+":"+e,o=t.get(n);if(o)return o;const u=this.options.loose,g=u?a[s.HYPHENRANGELOOSE]:a[s.HYPHENRANGE];e=e.replace(g,N(this.options.includePrerelease)),i("hyphen replace",e),e=e.replace(a[s.COMPARATORTRIM],l),i("comparator trim",e),e=e.replace(a[s.TILDETRIM],c),i("tilde trim",e),e=e.replace(a[s.CARETTRIM],h),i("caret trim",e);let v=e.split(" ").map((e=>m(e,this.options))).join(" ").split(/\s+/).map((e=>I(e,this.options)));u&&(v=v.filter((e=>(i("loose invalid filter",e,this.options),!!e.match(a[s.COMPARATORLOOSE]))))),i("range list",v);const E=new Map,b=v.map((e=>new r(e,this.options)));for(const e of b){if(d(e))return[e];E.set(e.value,e)}E.size>1&&E.has("")&&E.delete("");const y=[...E.values()];return t.set(n,y),y}intersects(t,n){if(!(t instanceof e))throw new TypeError("a Range is required");return this.set.some((e=>g(e,n)&&t.set.some((t=>g(t,n)&&e.every((e=>t.every((t=>e.intersects(t,n)))))))))}test(e){if(!e)return!1;if("string"==typeof e)try{e=new o(e,this.options)}catch(e){return!1}for(let t=0;t")||!e.operator.startsWith(">"))&&(!this.operator.startsWith("<")||!e.operator.startsWith("<"))&&(this.semver.version!==e.semver.version||!this.operator.includes("=")||!e.operator.includes("="))&&!(o(this.semver,"<",e.semver,r)&&this.operator.startsWith(">")&&e.operator.startsWith("<"))&&!(o(this.semver,">",e.semver,r)&&this.operator.startsWith("<")&&e.operator.startsWith(">")))}}Ge=t;const n=de,{safeRe:r,t:i}=ve,o=He,a=me,s=Re,l=qe();return Ge}(),i=me,o=Re,{safeRe:a,t:s,comparatorTrimReplace:l,tildeTrimReplace:c,caretTrimReplace:h}=ve,{FLAG_INCLUDE_PRERELEASE:f,FLAG_LOOSE:p}=ge,d=e=>"<0.0.0-0"===e.value,u=e=>""===e.value,g=(e,t)=>{let n=!0;const r=e.slice();let i=r.pop();for(;n&&r.length;)n=r.every((e=>i.intersects(e,t))),i=r.pop();return n},m=(e,t)=>(i("comp",e,t),e=y(e,t),i("caret",e),e=E(e,t),i("tildes",e),e=A(e,t),i("xrange",e),e=x(e,t),i("stars",e),e),v=e=>!e||"x"===e.toLowerCase()||"*"===e,E=(e,t)=>e.trim().split(/\s+/).map((e=>b(e,t))).join(" "),b=(e,t)=>{const n=t.loose?a[s.TILDELOOSE]:a[s.TILDE];return e.replace(n,((t,n,r,o,a)=>{let s;return i("tilde",e,t,n,r,o,a),v(n)?s="":v(r)?s=`>=${n}.0.0 <${+n+1}.0.0-0`:v(o)?s=`>=${n}.${r}.0 <${n}.${+r+1}.0-0`:a?(i("replaceTilde pr",a),s=`>=${n}.${r}.${o}-${a} <${n}.${+r+1}.0-0`):s=`>=${n}.${r}.${o} <${n}.${+r+1}.0-0`,i("tilde return",s),s}))},y=(e,t)=>e.trim().split(/\s+/).map((e=>w(e,t))).join(" "),w=(e,t)=>{i("caret",e,t);const n=t.loose?a[s.CARETLOOSE]:a[s.CARET],r=t.includePrerelease?"-0":"";return e.replace(n,((t,n,o,a,s)=>{let l;return i("caret",e,t,n,o,a,s),v(n)?l="":v(o)?l=`>=${n}.0.0${r} <${+n+1}.0.0-0`:v(a)?l="0"===n?`>=${n}.${o}.0${r} <${n}.${+o+1}.0-0`:`>=${n}.${o}.0${r} <${+n+1}.0.0-0`:s?(i("replaceCaret pr",s),l="0"===n?"0"===o?`>=${n}.${o}.${a}-${s} <${n}.${o}.${+a+1}-0`:`>=${n}.${o}.${a}-${s} <${n}.${+o+1}.0-0`:`>=${n}.${o}.${a}-${s} <${+n+1}.0.0-0`):(i("no pr"),l="0"===n?"0"===o?`>=${n}.${o}.${a}${r} <${n}.${o}.${+a+1}-0`:`>=${n}.${o}.${a}${r} <${n}.${+o+1}.0-0`:`>=${n}.${o}.${a} <${+n+1}.0.0-0`),i("caret return",l),l}))},A=(e,t)=>(i("replaceXRanges",e,t),e.split(/\s+/).map((e=>O(e,t))).join(" ")),O=(e,t)=>{e=e.trim();const n=t.loose?a[s.XRANGELOOSE]:a[s.XRANGE];return e.replace(n,((n,r,o,a,s,l)=>{i("xRange",e,n,r,o,a,s,l);const c=v(o),h=c||v(a),f=h||v(s),p=f;return"="===r&&p&&(r=""),l=t.includePrerelease?"-0":"",c?n=">"===r||"<"===r?"<0.0.0-0":"*":r&&p?(h&&(a=0),s=0,">"===r?(r=">=",h?(o=+o+1,a=0,s=0):(a=+a+1,s=0)):"<="===r&&(r="<",h?o=+o+1:a=+a+1),"<"===r&&(l="-0"),n=`${r+o}.${a}.${s}${l}`):h?n=`>=${o}.0.0${l} <${+o+1}.0.0-0`:f&&(n=`>=${o}.${a}.0${l} <${o}.${+a+1}.0-0`),i("xRange return",n),n}))},x=(e,t)=>(i("replaceStars",e,t),e.trim().replace(a[s.STAR],"")),I=(e,t)=>(i("replaceGTE0",e,t),e.trim().replace(a[t.includePrerelease?s.GTE0PRE:s.GTE0],"")),N=e=>(t,n,r,i,o,a,s,l,c,h,f,p,d)=>`${n=v(r)?"":v(i)?`>=${r}.0.0${e?"-0":""}`:v(o)?`>=${r}.${i}.0${e?"-0":""}`:a?`>=${n}`:`>=${n}${e?"-0":""}`} ${l=v(c)?"":v(h)?`<${+c+1}.0.0-0`:v(f)?`<${c}.${+h+1}.0-0`:p?`<=${c}.${h}.${f}-${p}`:e?`<${c}.${h}.${+f+1}-0`:`<=${l}`}`.trim(),L=(e,t,n)=>{for(let n=0;n0){const r=e[n].semver;if(r.major===t.major&&r.minor===t.minor&&r.patch===t.patch)return!0}return!1}return!0};return Xe}const Ye=qe();var Je=(e,t,n)=>{try{t=new Ye(t,n)}catch(e){return!1}return t.test(e)},Qe=F(Je);var Ze={NaN:NaN,E:Math.E,LN2:Math.LN2,LN10:Math.LN10,LOG2E:Math.LOG2E,LOG10E:Math.LOG10E,PI:Math.PI,SQRT1_2:Math.SQRT1_2,SQRT2:Math.SQRT2,MIN_VALUE:Number.MIN_VALUE,MAX_VALUE:Number.MAX_VALUE},Ke={"*":(e,t)=>e*t,"+":(e,t)=>e+t,"-":(e,t)=>e-t,"/":(e,t)=>e/t,"%":(e,t)=>e%t,">":(e,t)=>e>t,"<":(e,t)=>ee<=t,">=":(e,t)=>e>=t,"==":(e,t)=>e==t,"!=":(e,t)=>e!=t,"===":(e,t)=>e===t,"!==":(e,t)=>e!==t,"&":(e,t)=>e&t,"|":(e,t)=>e|t,"^":(e,t)=>e^t,"<<":(e,t)=>e<>":(e,t)=>e>>t,">>>":(e,t)=>e>>>t},et={"+":e=>+e,"-":e=>-e,"~":e=>~e,"!":e=>!e};const tt=Array.prototype.slice,nt=(e,t,n)=>{const r=n?n(t[0]):t[0];return r[e].apply(r,tt.call(t,1))};var rt={isNaN:Number.isNaN,isFinite:Number.isFinite,abs:Math.abs,acos:Math.acos,asin:Math.asin,atan:Math.atan,atan2:Math.atan2,ceil:Math.ceil,cos:Math.cos,exp:Math.exp,floor:Math.floor,log:Math.log,max:Math.max,min:Math.min,pow:Math.pow,random:Math.random,round:Math.round,sin:Math.sin,sqrt:Math.sqrt,tan:Math.tan,clamp:(e,t,n)=>Math.max(t,Math.min(n,e)),now:Date.now,utc:Date.UTC,datetime:(e,t,n,r,i,o,a)=>new Date(e,t||0,null!=n?n:1,r||0,i||0,o||0,a||0),date:e=>new Date(e).getDate(),day:e=>new Date(e).getDay(),year:e=>new Date(e).getFullYear(),month:e=>new Date(e).getMonth(),hours:e=>new Date(e).getHours(),minutes:e=>new Date(e).getMinutes(),seconds:e=>new Date(e).getSeconds(),milliseconds:e=>new Date(e).getMilliseconds(),time:e=>new Date(e).getTime(),timezoneoffset:e=>new Date(e).getTimezoneOffset(),utcdate:e=>new Date(e).getUTCDate(),utcday:e=>new Date(e).getUTCDay(),utcyear:e=>new Date(e).getUTCFullYear(),utcmonth:e=>new Date(e).getUTCMonth(),utchours:e=>new Date(e).getUTCHours(),utcminutes:e=>new Date(e).getUTCMinutes(),utcseconds:e=>new Date(e).getUTCSeconds(),utcmilliseconds:e=>new Date(e).getUTCMilliseconds(),length:e=>e.length,join:function(){return nt("join",arguments)},indexof:function(){return nt("indexOf",arguments)},lastindexof:function(){return nt("lastIndexOf",arguments)},slice:function(){return nt("slice",arguments)},reverse:e=>e.slice().reverse(),parseFloat:parseFloat,parseInt:parseInt,upper:e=>String(e).toUpperCase(),lower:e=>String(e).toLowerCase(),substring:function(){return nt("substring",arguments,String)},split:function(){return nt("split",arguments,String)},replace:function(){return nt("replace",arguments,String)},trim:e=>String(e).trim(),regexp:RegExp,test:(e,t)=>RegExp(e).test(t)};const it=["view","item","group","xy","x","y"],ot=new Set([Function,eval,setTimeout,setInterval]);"function"==typeof setImmediate&&ot.add(setImmediate);const at={Literal:(e,t)=>t.value,Identifier:(e,t)=>{const n=t.name;return e.memberDepth>0?n:"datum"===n?e.datum:"event"===n?e.event:"item"===n?e.item:Ze[n]||e.params["$"+n]},MemberExpression:(e,t)=>{const n=!t.computed,r=e(t.object);n&&(e.memberDepth+=1);const i=e(t.property);if(n&&(e.memberDepth-=1),!ot.has(r[i]))return r[i];console.error(`Prevented interpretation of member "${i}" which could lead to insecure code execution`)},CallExpression:(e,t)=>{const n=t.arguments;let r=t.callee.name;return r.startsWith("_")&&(r=r.slice(1)),"if"===r?e(n[0])?e(n[1]):e(n[2]):(e.fn[r]||rt[r]).apply(e.fn,n.map(e))},ArrayExpression:(e,t)=>t.elements.map(e),BinaryExpression:(e,t)=>Ke[t.operator](e(t.left),e(t.right)),UnaryExpression:(e,t)=>et[t.operator](e(t.argument)),ConditionalExpression:(e,t)=>e(t.test)?e(t.consequent):e(t.alternate),LogicalExpression:(e,t)=>"&&"===t.operator?e(t.left)&&e(t.right):e(t.left)||e(t.right),ObjectExpression:(e,t)=>t.properties.reduce(((t,n)=>{e.memberDepth+=1;const r=e(n.key);return e.memberDepth-=1,ot.has(e(n.value))?console.error(`Prevented interpretation of property "${r}" which could lead to insecure code execution`):t[r]=e(n.value),t}),{})};function st(e,t,n,r,i,o){const a=e=>at[e.type](a,e);return a.memberDepth=0,a.fn=Object.create(t),a.params=n,a.datum=r,a.event=i,a.item=o,it.forEach((e=>a.fn[e]=function(){return i.vega[e](...arguments)})),a(e)}var lt={operator(e,t){const n=t.ast,r=e.functions;return e=>st(n,r,e)},parameter(e,t){const n=t.ast,r=e.functions;return(e,t)=>st(n,r,t,e)},event(e,t){const n=t.ast,r=e.functions;return e=>st(n,r,void 0,void 0,e)},handler(e,t){const n=t.ast,r=e.functions;return(e,t)=>{const i=t.item&&t.item.datum;return st(n,r,e,i,t)}},encode(e,t){const{marktype:n,channels:r}=t,i=e.functions,o="group"===n||"image"===n||"rect"===n;return(e,t)=>{const a=e.datum;let s,l=0;for(const n in r)s=st(r[n].ast,i,t,a,void 0,e),e[n]!==s&&(e[n]=s,l=1);return"rule"!==n&&function(e,t,n){let r;t.x2&&(t.x?(n&&e.x>e.x2&&(r=e.x,e.x=e.x2,e.x2=r),e.width=e.x2-e.x):e.x=e.x2-(e.width||0)),t.xc&&(e.x=e.xc-(e.width||0)/2),t.y2&&(t.y?(n&&e.y>e.y2&&(r=e.y,e.y=e.y2,e.y2=r),e.height=e.y2-e.y):e.y=e.y2-(e.height||0)),t.yc&&(e.y=e.yc-(e.height||0)/2)}(e,r,o),l}}};function ct(e){const[t,n]=/schema\/([\w-]+)\/([\w\.\-]+)\.json$/g.exec(e).slice(1,3);return{library:t,version:n}}var ht="2.14.0";const ft="#fff",pt="#888",dt={background:"#333",view:{stroke:pt},title:{color:ft,subtitleColor:ft},style:{"guide-label":{fill:ft},"guide-title":{fill:ft}},axis:{domainColor:ft,gridColor:pt,tickColor:ft}},ut="#4572a7",gt={background:"#fff",arc:{fill:ut},area:{fill:ut},line:{stroke:ut,strokeWidth:2},path:{stroke:ut},rect:{fill:ut},shape:{stroke:ut},symbol:{fill:ut,strokeWidth:1.5,size:50},axis:{bandPosition:.5,grid:!0,gridColor:"#000000",gridOpacity:1,gridWidth:.5,labelPadding:10,tickSize:5,tickWidth:.5},axisBand:{grid:!1,tickExtra:!0},legend:{labelBaseline:"middle",labelFontSize:11,symbolSize:50,symbolType:"square"},range:{category:["#4572a7","#aa4643","#8aa453","#71598e","#4598ae","#d98445","#94aace","#d09393","#b9cc98","#a99cbc"]}},mt="#30a2da",vt="#cbcbcb",Et="#f0f0f0",bt="#333",yt={arc:{fill:mt},area:{fill:mt},axis:{domainColor:vt,grid:!0,gridColor:vt,gridWidth:1,labelColor:"#999",labelFontSize:10,titleColor:"#333",tickColor:vt,tickSize:10,titleFontSize:14,titlePadding:10,labelPadding:4},axisBand:{grid:!1},background:Et,group:{fill:Et},legend:{labelColor:bt,labelFontSize:11,padding:1,symbolSize:30,symbolType:"square",titleColor:bt,titleFontSize:14,titlePadding:10},line:{stroke:mt,strokeWidth:2},path:{stroke:mt,strokeWidth:.5},rect:{fill:mt},range:{category:["#30a2da","#fc4f30","#e5ae38","#6d904f","#8b8b8b","#b96db8","#ff9e27","#56cc60","#52d2ca","#52689e","#545454","#9fe4f8"],diverging:["#cc0020","#e77866","#f6e7e1","#d6e8ed","#91bfd9","#1d78b5"],heatmap:["#d6e8ed","#cee0e5","#91bfd9","#549cc6","#1d78b5"]},point:{filled:!0,shape:"circle"},shape:{stroke:mt},bar:{binSpacing:2,fill:mt,stroke:null},title:{anchor:"start",fontSize:24,fontWeight:600,offset:20}},wt="#000",At={group:{fill:"#e5e5e5"},arc:{fill:wt},area:{fill:wt},line:{stroke:wt},path:{stroke:wt},rect:{fill:wt},shape:{stroke:wt},symbol:{fill:wt,size:40},axis:{domain:!1,grid:!0,gridColor:"#FFFFFF",gridOpacity:1,labelColor:"#7F7F7F",labelPadding:4,tickColor:"#7F7F7F",tickSize:5.67,titleFontSize:16,titleFontWeight:"normal"},legend:{labelBaseline:"middle",labelFontSize:11,symbolSize:40},range:{category:["#000000","#7F7F7F","#1A1A1A","#999999","#333333","#B0B0B0","#4D4D4D","#C9C9C9","#666666","#DCDCDC"]}},Ot="Benton Gothic, sans-serif",xt="#82c6df",It="Benton Gothic Bold, sans-serif",Nt="normal",Lt={"category-6":["#ec8431","#829eb1","#c89d29","#3580b1","#adc839","#ab7fb4"],"fire-7":["#fbf2c7","#f9e39c","#f8d36e","#f4bb6a","#e68a4f","#d15a40","#ab4232"],"fireandice-6":["#e68a4f","#f4bb6a","#f9e39c","#dadfe2","#a6b7c6","#849eae"],"ice-7":["#edefee","#dadfe2","#c4ccd2","#a6b7c6","#849eae","#607785","#47525d"]},Rt={background:"#ffffff",title:{anchor:"start",color:"#000000",font:It,fontSize:22,fontWeight:"normal"},arc:{fill:xt},area:{fill:xt},line:{stroke:xt,strokeWidth:2},path:{stroke:xt},rect:{fill:xt},shape:{stroke:xt},symbol:{fill:xt,size:30},axis:{labelFont:Ot,labelFontSize:11.5,labelFontWeight:"normal",titleFont:It,titleFontSize:13,titleFontWeight:Nt},axisX:{labelAngle:0,labelPadding:4,tickSize:3},axisY:{labelBaseline:"middle",maxExtent:45,minExtent:45,tickSize:2,titleAlign:"left",titleAngle:0,titleX:-45,titleY:-11},legend:{labelFont:Ot,labelFontSize:11.5,symbolType:"square",titleFont:It,titleFontSize:13,titleFontWeight:Nt},range:{category:Lt["category-6"],diverging:Lt["fireandice-6"],heatmap:Lt["fire-7"],ordinal:Lt["fire-7"],ramp:Lt["fire-7"]}},$t="#ab5787",St="#979797",Tt={background:"#f9f9f9",arc:{fill:$t},area:{fill:$t},line:{stroke:$t},path:{stroke:$t},rect:{fill:$t},shape:{stroke:$t},symbol:{fill:$t,size:30},axis:{domainColor:St,domainWidth:.5,gridWidth:.2,labelColor:St,tickColor:St,tickWidth:.2,titleColor:St},axisBand:{grid:!1},axisX:{grid:!0,tickSize:10},axisY:{domain:!1,grid:!0,tickSize:0},legend:{labelFontSize:11,padding:1,symbolSize:30,symbolType:"square"},range:{category:["#ab5787","#51b2e5","#703c5c","#168dd9","#d190b6","#00609f","#d365ba","#154866","#666666","#c4c4c4"]}},Ct="#3e5c69",Dt={background:"#fff",arc:{fill:Ct},area:{fill:Ct},line:{stroke:Ct},path:{stroke:Ct},rect:{fill:Ct},shape:{stroke:Ct},symbol:{fill:Ct},axis:{domainWidth:.5,grid:!0,labelPadding:2,tickSize:5,tickWidth:.5,titleFontWeight:"normal"},axisBand:{grid:!1},axisX:{gridWidth:.2},axisY:{gridDash:[3],gridWidth:.4},legend:{labelFontSize:11,padding:1,symbolType:"square"},range:{category:["#3e5c69","#6793a6","#182429","#0570b0","#3690c0","#74a9cf","#a6bddb","#e2ddf2"]}},Ft="#1696d2",kt="#000000",_t="Lato",Pt="Lato",Mt={"main-colors":["#1696d2","#d2d2d2","#000000","#fdbf11","#ec008b","#55b748","#5c5859","#db2b27"],"shades-blue":["#CFE8F3","#A2D4EC","#73BFE2","#46ABDB","#1696D2","#12719E","#0A4C6A","#062635"],"shades-gray":["#F5F5F5","#ECECEC","#E3E3E3","#DCDBDB","#D2D2D2","#9D9D9D","#696969","#353535"],"shades-yellow":["#FFF2CF","#FCE39E","#FDD870","#FCCB41","#FDBF11","#E88E2D","#CA5800","#843215"],"shades-magenta":["#F5CBDF","#EB99C2","#E46AA7","#E54096","#EC008B","#AF1F6B","#761548","#351123"],"shades-green":["#DCEDD9","#BCDEB4","#98CF90","#78C26D","#55B748","#408941","#2C5C2D","#1A2E19"],"shades-black":["#D5D5D4","#ADABAC","#848081","#5C5859","#332D2F","#262223","#1A1717","#0E0C0D"],"shades-red":["#F8D5D4","#F1AAA9","#E9807D","#E25552","#DB2B27","#A4201D","#6E1614","#370B0A"],"one-group":["#1696d2","#000000"],"two-groups-cat-1":["#1696d2","#000000"],"two-groups-cat-2":["#1696d2","#fdbf11"],"two-groups-cat-3":["#1696d2","#db2b27"],"two-groups-seq":["#a2d4ec","#1696d2"],"three-groups-cat":["#1696d2","#fdbf11","#000000"],"three-groups-seq":["#a2d4ec","#1696d2","#0a4c6a"],"four-groups-cat-1":["#000000","#d2d2d2","#fdbf11","#1696d2"],"four-groups-cat-2":["#1696d2","#ec0008b","#fdbf11","#5c5859"],"four-groups-seq":["#cfe8f3","#73bf42","#1696d2","#0a4c6a"],"five-groups-cat-1":["#1696d2","#fdbf11","#d2d2d2","#ec008b","#000000"],"five-groups-cat-2":["#1696d2","#0a4c6a","#d2d2d2","#fdbf11","#332d2f"],"five-groups-seq":["#cfe8f3","#73bf42","#1696d2","#0a4c6a","#000000"],"six-groups-cat-1":["#1696d2","#ec008b","#fdbf11","#000000","#d2d2d2","#55b748"],"six-groups-cat-2":["#1696d2","#d2d2d2","#ec008b","#fdbf11","#332d2f","#0a4c6a"],"six-groups-seq":["#cfe8f3","#a2d4ec","#73bfe2","#46abdb","#1696d2","#12719e"],"diverging-colors":["#ca5800","#fdbf11","#fdd870","#fff2cf","#cfe8f3","#73bfe2","#1696d2","#0a4c6a"]},jt={background:"#FFFFFF",title:{anchor:"start",fontSize:18,font:_t},axisX:{domain:!0,domainColor:kt,domainWidth:1,grid:!1,labelFontSize:12,labelFont:Pt,labelAngle:0,tickColor:kt,tickSize:5,titleFontSize:12,titlePadding:10,titleFont:_t},axisY:{domain:!1,domainWidth:1,grid:!0,gridColor:"#DEDDDD",gridWidth:1,labelFontSize:12,labelFont:Pt,labelPadding:8,ticks:!1,titleFontSize:12,titlePadding:10,titleFont:_t,titleAngle:0,titleY:-10,titleX:18},legend:{labelFontSize:12,labelFont:Pt,symbolSize:100,titleFontSize:12,titlePadding:10,titleFont:_t,orient:"right",offset:10},view:{stroke:"transparent"},range:{category:Mt["six-groups-cat-1"],diverging:Mt["diverging-colors"],heatmap:Mt["diverging-colors"],ordinal:Mt["six-groups-seq"],ramp:Mt["shades-blue"]},area:{fill:Ft},rect:{fill:Ft},line:{color:Ft,stroke:Ft,strokeWidth:5},trail:{color:Ft,stroke:Ft,strokeWidth:0,size:1},path:{stroke:Ft,strokeWidth:.5},point:{filled:!0},text:{font:"Lato",color:Ft,fontSize:11,align:"center",fontWeight:400,size:11},style:{bar:{fill:Ft,stroke:null}},arc:{fill:Ft},shape:{stroke:Ft},symbol:{fill:Ft,size:30}},zt="#3366CC",Ut="#ccc",Bt="Arial, sans-serif",Gt={arc:{fill:zt},area:{fill:zt},path:{stroke:zt},rect:{fill:zt},shape:{stroke:zt},symbol:{stroke:zt},circle:{fill:zt},background:"#fff",padding:{top:10,right:10,bottom:10,left:10},style:{"guide-label":{font:Bt,fontSize:12},"guide-title":{font:Bt,fontSize:12},"group-title":{font:Bt,fontSize:12}},title:{font:Bt,fontSize:14,fontWeight:"bold",dy:-3,anchor:"start"},axis:{gridColor:Ut,tickColor:Ut,domain:!1,grid:!0},range:{category:["#4285F4","#DB4437","#F4B400","#0F9D58","#AB47BC","#00ACC1","#FF7043","#9E9D24","#5C6BC0","#F06292","#00796B","#C2185B"],heatmap:["#c6dafc","#5e97f6","#2a56c6"]}},Wt=e=>e*(1/3+1),Xt=Wt(9),Vt=Wt(10),Ht=Wt(12),qt="Segoe UI",Yt="wf_standard-font, helvetica, arial, sans-serif",Jt="#252423",Qt="#605E5C",Zt="transparent",Kt="#118DFF",en="#DEEFFF",tn=[en,Kt],nn={view:{stroke:Zt},background:Zt,font:qt,header:{titleFont:Yt,titleFontSize:Ht,titleColor:Jt,labelFont:qt,labelFontSize:Vt,labelColor:Qt},axis:{ticks:!1,grid:!1,domain:!1,labelColor:Qt,labelFontSize:Xt,titleFont:Yt,titleColor:Jt,titleFontSize:Ht,titleFontWeight:"normal"},axisQuantitative:{tickCount:3,grid:!0,gridColor:"#C8C6C4",gridDash:[1,5],labelFlush:!1},axisBand:{tickExtra:!0},axisX:{labelPadding:5},axisY:{labelPadding:10},bar:{fill:Kt},line:{stroke:Kt,strokeWidth:3,strokeCap:"round",strokeJoin:"round"},text:{font:qt,fontSize:Xt,fill:Qt},arc:{fill:Kt},area:{fill:Kt,line:!0,opacity:.6},path:{stroke:Kt},rect:{fill:Kt},point:{fill:Kt,filled:!0,size:75},shape:{stroke:Kt},symbol:{fill:Kt,strokeWidth:1.5,size:50},legend:{titleFont:qt,titleFontWeight:"bold",titleColor:Qt,labelFont:qt,labelFontSize:Vt,labelColor:Qt,symbolType:"circle",symbolSize:75},range:{category:[Kt,"#12239E","#E66C37","#6B007B","#E044A7","#744EC2","#D9B300","#D64550"],diverging:tn,heatmap:tn,ordinal:[en,"#c7e4ff","#b0d9ff","#9aceff","#83c3ff","#6cb9ff","#55aeff","#3fa3ff","#2898ff",Kt]}},rn='IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,".sfnstext-regular",sans-serif',on=["#8a3ffc","#33b1ff","#007d79","#ff7eb6","#fa4d56","#fff1f1","#6fdc8c","#4589ff","#d12771","#d2a106","#08bdba","#bae6ff","#ba4e00","#d4bbff"],an=["#6929c4","#1192e8","#005d5d","#9f1853","#fa4d56","#570408","#198038","#002d9c","#ee538b","#b28600","#009d9a","#012749","#8a3800","#a56eff"];function sn({type:e,background:t}){const n="dark"===e?"#161616":"#ffffff",r="dark"===e?"#f4f4f4":"#161616",i="dark"===e?"#d4bbff":"#6929c4";return{background:t,arc:{fill:i},area:{fill:i},path:{stroke:i},rect:{fill:i},shape:{stroke:i},symbol:{stroke:i},circle:{fill:i},view:{fill:n,stroke:n},group:{fill:n},title:{color:r,anchor:"start",dy:-15,fontSize:16,font:rn,fontWeight:600},axis:{labelColor:r,labelFontSize:12,grid:!0,gridColor:"#525252",titleColor:r,labelAngle:0},style:{"guide-label":{font:rn,fill:r,fontWeight:400},"guide-title":{font:rn,fill:r,fontWeight:400}},range:{category:"dark"===e?on:an,diverging:["#750e13","#a2191f","#da1e28","#fa4d56","#ff8389","#ffb3b8","#ffd7d9","#fff1f1","#e5f6ff","#bae6ff","#82cfff","#33b1ff","#1192e8","#0072c3","#00539a","#003a6d"],heatmap:["#f6f2ff","#e8daff","#d4bbff","#be95ff","#a56eff","#8a3ffc","#6929c4","#491d8b","#31135e","#1c0f30"]}}}const ln=sn({type:"light",background:"#ffffff"}),cn=sn({type:"light",background:"#f4f4f4"}),hn=sn({type:"dark",background:"#262626"}),fn=sn({type:"dark",background:"#161616"}),pn=ht;var dn=Object.freeze({__proto__:null,carbong10:cn,carbong100:fn,carbong90:hn,carbonwhite:ln,dark:dt,excel:gt,fivethirtyeight:yt,ggplot2:At,googlecharts:Gt,latimes:Rt,powerbi:nn,quartz:Tt,urbaninstitute:jt,version:pn,vox:Dt});function un(e,t,n){return e.fields=t||[],e.fname=n,e}function gn(e){return 1===e.length?mn(e[0]):vn(e)}const mn=e=>function(t){return t[e]},vn=e=>{const t=e.length;return function(n){for(let r=0;rr&&c(),s=r=i+1):"]"===o&&(s||En("Access path missing open bracket: "+e),s>0&&c(),s=0,r=i+1):i>r?c():r=i+1}return s&&En("Access path missing closing bracket: "+e),a&&En("Access path missing closing quote: "+e),i>r&&(i++,c()),t}(e);e=1===r.length?r[0]:e,un((n&&n.get||gn)(r),[e],t||e)}("id"),un((e=>e),[],"identity"),un((()=>0),[],"zero"),un((()=>1),[],"one"),un((()=>!0),[],"true"),un((()=>!1),[],"false");var bn=Array.isArray;function yn(e){return e===Object(e)}function wn(e,t){return JSON.stringify(e,function(e){const t=[];return function(n,r){if("object"!=typeof r||null===r)return r;const i=t.indexOf(this)+1;return t.length=i,t.length>e?"[Object]":t.indexOf(r)>=0?"[Circular]":(t.push(r),r)}}(t))}var An="#vg-tooltip-element {\n visibility: hidden;\n padding: 8px;\n position: fixed;\n z-index: 1000;\n font-family: sans-serif;\n font-size: 11px;\n border-radius: 3px;\n box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.1);\n /* The default theme is the light theme. */\n background-color: rgba(255, 255, 255, 0.95);\n border: 1px solid #d9d9d9;\n color: black;\n}\n#vg-tooltip-element.visible {\n visibility: visible;\n}\n#vg-tooltip-element h2 {\n margin-top: 0;\n margin-bottom: 10px;\n font-size: 13px;\n}\n#vg-tooltip-element table {\n border-spacing: 0;\n}\n#vg-tooltip-element table tr {\n border: none;\n}\n#vg-tooltip-element table tr td {\n overflow: hidden;\n text-overflow: ellipsis;\n padding-top: 2px;\n padding-bottom: 2px;\n}\n#vg-tooltip-element table tr td.key {\n color: #808080;\n max-width: 150px;\n text-align: right;\n padding-right: 4px;\n}\n#vg-tooltip-element table tr td.value {\n display: block;\n max-width: 300px;\n max-height: 7em;\n text-align: left;\n}\n#vg-tooltip-element.dark-theme {\n background-color: rgba(32, 32, 32, 0.9);\n border: 1px solid #f5f5f5;\n color: white;\n}\n#vg-tooltip-element.dark-theme td.key {\n color: #bfbfbf;\n}\n";const On="vg-tooltip-element",xn={offsetX:10,offsetY:10,id:On,styleId:"vega-tooltip-style",theme:"light",disableDefaultStyle:!1,sanitize:function(e){return String(e).replace(/&/g,"&").replace(/t("string"==typeof e?e:wn(e,n)))).join(", ")}]`;if(yn(e)){let i="";const{title:o,image:a,...s}=e;o&&(i+=`

      ${t(o)}

      `),a&&(i+=``);const l=Object.keys(s);if(l.length>0){i+="";for(const e of l){let r=s[e];void 0!==r&&(yn(r)&&(r=wn(r,n)),i+=``)}i+="
      ${t(e)}${t(r)}
      "}return i||"{}"}return t(e)},baseURL:""};class In{constructor(e){this.options={...xn,...e};const t=this.options.id;if(this.el=null,this.call=this.tooltipHandler.bind(this),!this.options.disableDefaultStyle&&!document.getElementById(this.options.styleId)){const e=document.createElement("style");e.setAttribute("id",this.options.styleId),e.innerHTML=function(e){if(!/^[A-Za-z]+[-:.\w]*$/.test(e))throw new Error("Invalid HTML ID");return An.toString().replace(On,e)}(t);const n=document.head;n.childNodes.length>0?n.insertBefore(e,n.childNodes[0]):n.appendChild(e)}}tooltipHandler(e,t,n,r){if(this.el=document.getElementById(this.options.id),!this.el){this.el=document.createElement("div"),this.el.setAttribute("id",this.options.id),this.el.classList.add("vg-tooltip");(document.fullscreenElement??document.body).appendChild(this.el)}if(null==r||""===r)return void this.el.classList.remove("visible",`${this.options.theme}-theme`);this.el.innerHTML=this.options.formatTooltip(r,this.options.sanitize,this.options.maxDepth,this.options.baseURL),this.el.classList.add("visible",`${this.options.theme}-theme`);const{x:i,y:o}=function(e,t,n,r){let i=e.clientX+n;i+t.width>window.innerWidth&&(i=+e.clientX-n-t.width);let o=e.clientY+r;return o+t.height>window.innerHeight&&(o=+e.clientY-r-t.height),{x:i,y:o}}(t,this.el.getBoundingClientRect(),this.options.offsetX,this.options.offsetY);this.el.style.top=`${o}px`,this.el.style.left=`${i}px`}}var Nn='.vega-embed {\n position: relative;\n display: inline-block;\n box-sizing: border-box;\n}\n.vega-embed.has-actions {\n padding-right: 38px;\n}\n.vega-embed details:not([open]) > :not(summary) {\n display: none !important;\n}\n.vega-embed summary {\n list-style: none;\n position: absolute;\n top: 0;\n right: 0;\n padding: 6px;\n z-index: 1000;\n background: white;\n box-shadow: 1px 1px 3px rgba(0, 0, 0, 0.1);\n color: #1b1e23;\n border: 1px solid #aaa;\n border-radius: 999px;\n opacity: 0.2;\n transition: opacity 0.4s ease-in;\n cursor: pointer;\n line-height: 0px;\n}\n.vega-embed summary::-webkit-details-marker {\n display: none;\n}\n.vega-embed summary:active {\n box-shadow: #aaa 0px 0px 0px 1px inset;\n}\n.vega-embed summary svg {\n width: 14px;\n height: 14px;\n}\n.vega-embed details[open] summary {\n opacity: 0.7;\n}\n.vega-embed:hover summary, .vega-embed:focus-within summary {\n opacity: 1 !important;\n transition: opacity 0.2s ease;\n}\n.vega-embed .vega-actions {\n position: absolute;\n z-index: 1001;\n top: 35px;\n right: -9px;\n display: flex;\n flex-direction: column;\n padding-bottom: 8px;\n padding-top: 8px;\n border-radius: 4px;\n box-shadow: 0 2px 8px 0 rgba(0, 0, 0, 0.2);\n border: 1px solid #d9d9d9;\n background: white;\n animation-duration: 0.15s;\n animation-name: scale-in;\n animation-timing-function: cubic-bezier(0.2, 0, 0.13, 1.5);\n text-align: left;\n}\n.vega-embed .vega-actions a {\n padding: 8px 16px;\n font-family: sans-serif;\n font-size: 14px;\n font-weight: 600;\n white-space: nowrap;\n color: #434a56;\n text-decoration: none;\n}\n.vega-embed .vega-actions a:hover, .vega-embed .vega-actions a:focus {\n background-color: #f7f7f9;\n color: black;\n}\n.vega-embed .vega-actions::before, .vega-embed .vega-actions::after {\n content: "";\n display: inline-block;\n position: absolute;\n}\n.vega-embed .vega-actions::before {\n left: auto;\n right: 14px;\n top: -16px;\n border: 8px solid rgba(0, 0, 0, 0);\n border-bottom-color: #d9d9d9;\n}\n.vega-embed .vega-actions::after {\n left: auto;\n right: 15px;\n top: -14px;\n border: 7px solid rgba(0, 0, 0, 0);\n border-bottom-color: #fff;\n}\n.vega-embed .chart-wrapper.fit-x {\n width: 100%;\n}\n.vega-embed .chart-wrapper.fit-y {\n height: 100%;\n}\n\n.vega-embed-wrapper {\n max-width: 100%;\n overflow: auto;\n padding-right: 14px;\n}\n\n@keyframes scale-in {\n from {\n opacity: 0;\n transform: scale(0.6);\n }\n to {\n opacity: 1;\n transform: scale(1);\n }\n}\n';function Ln(e,...t){for(const n of t)Rn(e,n);return e}function Rn(t,n){for(const r of Object.keys(n))e.writeConfig(t,r,n[r],!0)}const $n="6.25.0",Sn=i;let Tn=o;const Cn="undefined"!=typeof window?window:void 0;void 0===Tn&&Cn?.vl?.compile&&(Tn=Cn.vl);const Dn={export:{svg:!0,png:!0},source:!0,compiled:!0,editor:!0},Fn={CLICK_TO_VIEW_ACTIONS:"Click to view actions",COMPILED_ACTION:"View Compiled Vega",EDITOR_ACTION:"Open in Vega Editor",PNG_ACTION:"Save as PNG",SOURCE_ACTION:"View Source",SVG_ACTION:"Save as SVG"},kn={vega:"Vega","vega-lite":"Vega-Lite"},_n={vega:Sn.version,"vega-lite":Tn?Tn.version:"not available"},Pn={vega:e=>e,"vega-lite":(e,t)=>Tn.compile(e,{config:t}).spec},Mn='\n\n \n \n \n',jn="chart-wrapper";function zn(e,t,n,r){const i=`${t}
      `,o=`
      ${n}`,a=window.open("");a.document.write(i+e+o),a.document.title=`${kn[r]} JSON Source`}function Un(e){return!(!e||!("load"in e))}function Bn(e){return Un(e)?e:Sn.loader(e)}async function Gn(t,n,r={}){let i,o;e.isString(n)?(o=Bn(r.loader),i=JSON.parse(await o.load(n))):i=n;const a=function(t){const n=t.usermeta?.embedOptions??{};return e.isString(n.defaultStyle)&&(n.defaultStyle=!1),n}(i),s=a.loader;o&&!s||(o=Bn(r.loader??s));const l=await Wn(a,o),c=await Wn(r,o),h={...Ln(c,l),config:e.mergeConfig(c.config??{},l.config??{})};return await async function(t,n,r={},i){const o=r.theme?e.mergeConfig(dn[r.theme],r.config??{}):r.config,a=e.isBoolean(r.actions)?r.actions:Ln({},Dn,r.actions??{}),s={...Fn,...r.i18n},l=r.renderer??"canvas",c=r.logLevel??Sn.Warn,h=r.downloadFileName??"visualization",f="string"==typeof t?document.querySelector(t):t;if(!f)throw new Error(`${t} does not exist`);if(!1!==r.defaultStyle){const e="vega-embed-style",{root:t,rootContainer:n}=function(e){const t=e.getRootNode?e.getRootNode():document;return t instanceof ShadowRoot?{root:t,rootContainer:t}:{root:document,rootContainer:document.head??document.body}}(f);if(!t.getElementById(e)){const t=document.createElement("style");t.id=e,t.innerHTML=void 0===r.defaultStyle||!0===r.defaultStyle?Nn.toString():r.defaultStyle,n.appendChild(t)}}const p=function(e,t){if(e.$schema){const n=ct(e.$schema);t&&t!==n.library&&console.warn(`The given visualization spec is written in ${kn[n.library]}, but mode argument sets ${kn[t]??t}.`);const r=n.library;return Qe(_n[r],`^${n.version.slice(1)}`)||console.warn(`The input spec uses ${kn[r]} ${n.version}, but the current version of ${kn[r]} is v${_n[r]}.`),r}return"mark"in e||"encoding"in e||"layer"in e||"hconcat"in e||"vconcat"in e||"facet"in e||"repeat"in e?"vega-lite":"marks"in e||"signals"in e||"scales"in e||"axes"in e?"vega":t??"vega"}(n,r.mode);let d=Pn[p](n,o);if("vega-lite"===p&&d.$schema){const e=ct(d.$schema);Qe(_n.vega,`^${e.version.slice(1)}`)||console.warn(`The compiled spec uses Vega ${e.version}, but current version is v${_n.vega}.`)}f.classList.add("vega-embed"),a&&f.classList.add("has-actions");f.innerHTML="";let u=f;if(a){const e=document.createElement("div");e.classList.add(jn),f.appendChild(e),u=e}const g=r.patch;g&&(d=g instanceof Function?g(d):O(d,g,!0,!1).newDocument);r.formatLocale&&Sn.formatLocale(r.formatLocale);r.timeFormatLocale&&Sn.timeFormatLocale(r.timeFormatLocale);if(r.expressionFunctions)for(const e in r.expressionFunctions){const t=r.expressionFunctions[e];"fn"in t?Sn.expressionFunction(e,t.fn,t.visitor):t instanceof Function&&Sn.expressionFunction(e,t)}const{ast:m}=r,v=Sn.parse(d,"vega-lite"===p?{}:o,{ast:m}),E=new(r.viewClass||Sn.View)(v,{loader:i,logLevel:c,renderer:l,...m?{expr:Sn.expressionInterpreter??r.expr??lt}:{}});if(E.addSignalListener("autosize",((e,t)=>{const{type:n}=t;"fit-x"==n?(u.classList.add("fit-x"),u.classList.remove("fit-y")):"fit-y"==n?(u.classList.remove("fit-x"),u.classList.add("fit-y")):"fit"==n?u.classList.add("fit-x","fit-y"):u.classList.remove("fit-x","fit-y")})),!1!==r.tooltip){const{loader:e,tooltip:t}=r,n=e&&!Un(e)?e?.baseURL:void 0,i="function"==typeof t?t:new In({baseURL:n,...!0===t?{}:t}).call;E.tooltip(i)}let b,{hover:y}=r;void 0===y&&(y="vega"===p);if(y){const{hoverSet:e,updateSet:t}="boolean"==typeof y?{}:y;E.hover(e,t)}r&&(null!=r.width&&E.width(r.width),null!=r.height&&E.height(r.height),null!=r.padding&&E.padding(r.padding));if(await E.initialize(u,r.bind).runAsync(),!1!==a){let t=f;if(!1!==r.defaultStyle||r.forceActionsMenu){const e=document.createElement("details");e.title=s.CLICK_TO_VIEW_ACTIONS,f.append(e),t=e;const n=document.createElement("summary");n.innerHTML=Mn,e.append(n),b=t=>{e.contains(t.target)||e.removeAttribute("open")},document.addEventListener("click",b)}const i=document.createElement("div");if(t.append(i),i.classList.add("vega-actions"),!0===a||!1!==a.export)for(const t of["svg","png"])if(!0===a||!0===a.export||a.export[t]){const n=s[`${t.toUpperCase()}_ACTION`],o=document.createElement("a"),a=e.isObject(r.scaleFactor)?r.scaleFactor[t]:r.scaleFactor;o.text=n,o.href="#",o.target="_blank",o.download=`${h}.${t}`,o.addEventListener("mousedown",(async function(e){e.preventDefault();const n=await E.toImageURL(t,a);this.href=n})),i.append(o)}if(!0===a||!1!==a.source){const e=document.createElement("a");e.text=s.SOURCE_ACTION,e.href="#",e.addEventListener("click",(function(e){zn(j(n),r.sourceHeader??"",r.sourceFooter??"",p),e.preventDefault()})),i.append(e)}if("vega-lite"===p&&(!0===a||!1!==a.compiled)){const e=document.createElement("a");e.text=s.COMPILED_ACTION,e.href="#",e.addEventListener("click",(function(e){zn(j(d),r.sourceHeader??"",r.sourceFooter??"","vega"),e.preventDefault()})),i.append(e)}if(!0===a||!1!==a.editor){const e=r.editorUrl??"https://vega.github.io/editor/",t=document.createElement("a");t.text=s.EDITOR_ACTION,t.href="#",t.addEventListener("click",(function(t){!function(e,t,n){const r=e.open(t),{origin:i}=new URL(t);let o=40;e.addEventListener("message",(function t(n){n.source===r&&(o=0,e.removeEventListener("message",t,!1))}),!1),setTimeout((function e(){o<=0||(r.postMessage(n,i),setTimeout(e,250),o-=1)}),250)}(window,e,{config:o,mode:p,renderer:l,spec:j(n)}),t.preventDefault()})),i.append(t)}}function w(){b&&document.removeEventListener("click",b),E.finalize()}return{view:E,spec:n,vgSpec:d,finalize:w,embedOptions:r}}(t,i,h,o)}async function Wn(t,n){const r=e.isString(t.config)?JSON.parse(await n.load(t.config)):t.config??{},i=e.isString(t.patch)?JSON.parse(await n.load(t.patch)):t.patch;return{...t,...i?{patch:i}:{},...r?{config:r}:{}}}async function Xn(e,t={}){const n=document.createElement("div");n.classList.add("vega-embed-wrapper");const r=document.createElement("div");n.appendChild(r);const i=!0===t.actions||!1===t.actions?t.actions:{export:!0,source:!1,compiled:!0,editor:!0,...t.actions},o=await Gn(r,e,{actions:i,...t});return n.value=o.view,n}const Vn=(...t)=>{return t.length>1&&(e.isString(t[0])&&!((n=t[0]).startsWith("http://")||n.startsWith("https://")||n.startsWith("//"))||t[0]instanceof HTMLElement||3===t.length)?Gn(t[0],t[1],t[2]):Xn(t[0],t[1]);var n};return Vn.vegaLite=Tn,Vn.vl=Tn,Vn.container=Xn,Vn.embed=Gn,Vn.vega=Sn,Vn.default=Gn,Vn.version=$n,Vn})); +//# sourceMappingURL=vega-embed.min.js.map diff --git a/docs/javascripts/vega-lite@5.js b/docs/javascripts/vega-lite@5.js new file mode 100644 index 0000000000..f457480294 --- /dev/null +++ b/docs/javascripts/vega-lite@5.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("vega")):"function"==typeof define&&define.amd?define(["exports","vega"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).vegaLite={},e.vega)}(this,(function(e,t){"use strict";var n="5.18.1";function i(e){return!!e.or}function r(e){return!!e.and}function o(e){return!!e.not}function a(e,t){if(o(e))a(e.not,t);else if(r(e))for(const n of e.and)a(n,t);else if(i(e))for(const n of e.or)a(n,t);else t(e)}function s(e,t){return o(e)?{not:s(e.not,t)}:r(e)?{and:e.and.map((e=>s(e,t)))}:i(e)?{or:e.or.map((e=>s(e,t)))}:t(e)}const l=structuredClone;function c(e){throw new Error(e)}function u(e,n){const i={};for(const r of n)t.hasOwnProperty(e,r)&&(i[r]=e[r]);return i}function f(e,t){const n={...e};for(const e of t)delete n[e];return n}function d(e){if(t.isNumber(e))return e;const n=t.isString(e)?e:X(e);if(n.length<250)return n;let i=0;for(let e=0;e1?t-1:0),i=1;i0===t?e:`[${e}]`)),r=e.map(((t,n)=>e.slice(0,n+1).join("")));for(const e of r)n.add(e)}return n}function k(e,t){return void 0===e||void 0===t||$(w(e),w(t))}function S(e){return 0===D(e).length}Set.prototype.toJSON=function(){return`Set(${[...this].map((e=>X(e))).join(",")})`};const D=Object.keys,F=Object.values,z=Object.entries;function O(e){return!0===e||!1===e}function _(e){const t=e.replace(/\W/g,"_");return(e.match(/^\d+/)?"_":"")+t}function C(e,t){return o(e)?`!(${C(e.not,t)})`:r(e)?`(${e.and.map((e=>C(e,t))).join(") && (")})`:i(e)?`(${e.or.map((e=>C(e,t))).join(") || (")})`:t(e)}function N(e,t){if(0===t.length)return!0;const n=t.shift();return n in e&&N(e[n],t)&&delete e[n],S(e)}function P(e){return e.charAt(0).toUpperCase()+e.substr(1)}function A(e){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"datum";const i=t.splitAccessPath(e),r=[];for(let e=1;e<=i.length;e++){const o=`[${i.slice(0,e).map(t.stringValue).join("][")}]`;r.push(`${n}${o}`)}return r.join(" && ")}function j(e){return`${arguments.length>1&&void 0!==arguments[1]?arguments[1]:"datum"}[${t.stringValue(t.splitAccessPath(e).join("."))}]`}function T(e){return e.replace(/(\[|\]|\.|'|")/g,"\\$1")}function E(e){return`${t.splitAccessPath(e).map(T).join("\\.")}`}function M(e,t,n){return e.replace(new RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"g"),n)}function L(e){return`${t.splitAccessPath(e).join(".")}`}function q(e){return e?t.splitAccessPath(e).length:0}function U(){for(var e=arguments.length,t=new Array(e),n=0;nfn(e[t])?_(`_${t}_${z(e[t])}`):_(`_${t}_${e[t]}`))).join("")}function ln(e){return!0===e||un(e)&&!e.binned}function cn(e){return"binned"===e||un(e)&&!0===e.binned}function un(e){return t.isObject(e)}function fn(e){return e?.param}function dn(e){switch(e){case Q:case J:case ye:case me:case pe:case ge:case we:case be:case xe:case $e:case he:return 6;case ke:return 4;default:return 10}}function mn(e){return!!e?.expr}function pn(e){const t=D(e||{}),n={};for(const i of t)n[i]=Sn(e[i]);return n}function gn(e){const{anchor:t,frame:n,offset:i,orient:r,angle:o,limit:a,color:s,subtitleColor:l,subtitleFont:c,subtitleFontSize:f,subtitleFontStyle:d,subtitleFontWeight:m,subtitleLineHeight:p,subtitlePadding:g,...h}=e,y={...t?{anchor:t}:{},...n?{frame:n}:{},...i?{offset:i}:{},...r?{orient:r}:{},...void 0!==o?{angle:o}:{},...void 0!==a?{limit:a}:{}},v={...l?{subtitleColor:l}:{},...c?{subtitleFont:c}:{},...f?{subtitleFontSize:f}:{},...d?{subtitleFontStyle:d}:{},...m?{subtitleFontWeight:m}:{},...p?{subtitleLineHeight:p}:{},...g?{subtitlePadding:g}:{}};return{titleMarkConfig:{...h,...s?{fill:s}:{}},subtitleMarkConfig:u(e,["align","baseline","dx","dy","limit"]),nonMarkTitleProperties:y,subtitle:v}}function hn(e){return t.isString(e)||t.isArray(e)&&t.isString(e[0])}function yn(e){return!!e?.signal}function vn(e){return!!e.step}function bn(e){return!t.isArray(e)&&("field"in e&&"data"in e)}const xn=D({aria:1,description:1,ariaRole:1,ariaRoleDescription:1,blend:1,opacity:1,fill:1,fillOpacity:1,stroke:1,strokeCap:1,strokeWidth:1,strokeOpacity:1,strokeDash:1,strokeDashOffset:1,strokeJoin:1,strokeOffset:1,strokeMiterLimit:1,startAngle:1,endAngle:1,padAngle:1,innerRadius:1,outerRadius:1,size:1,shape:1,interpolate:1,tension:1,orient:1,align:1,baseline:1,text:1,dir:1,dx:1,dy:1,ellipsis:1,limit:1,radius:1,theta:1,angle:1,font:1,fontSize:1,fontWeight:1,fontStyle:1,lineBreak:1,lineHeight:1,cursor:1,href:1,tooltip:1,cornerRadius:1,cornerRadiusTopLeft:1,cornerRadiusTopRight:1,cornerRadiusBottomLeft:1,cornerRadiusBottomRight:1,aspect:1,width:1,height:1,url:1,smooth:1}),$n={arc:1,area:1,group:1,image:1,line:1,path:1,rect:1,rule:1,shape:1,symbol:1,text:1,trail:1},wn=["cornerRadius","cornerRadiusTopLeft","cornerRadiusTopRight","cornerRadiusBottomLeft","cornerRadiusBottomRight"];function kn(e){const n=t.isArray(e.condition)?e.condition.map(Dn):Dn(e.condition);return{...Sn(e),condition:n}}function Sn(e){if(mn(e)){const{expr:t,...n}=e;return{signal:t,...n}}return e}function Dn(e){if(mn(e)){const{expr:t,...n}=e;return{signal:t,...n}}return e}function Fn(e){if(mn(e)){const{expr:t,...n}=e;return{signal:t,...n}}return yn(e)?e:void 0!==e?{value:e}:void 0}function zn(e){return yn(e)?e.signal:t.stringValue(e.value)}function On(e){return yn(e)?e.signal:null==e?null:t.stringValue(e)}function _n(e,t,n){for(const i of n){const n=Pn(i,t.markDef,t.config);void 0!==n&&(e[i]=Fn(n))}return e}function Cn(e){return[].concat(e.type,e.style??[])}function Nn(e,t,n){let i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};const{vgChannel:r,ignoreVgConfig:o}=i;return r&&void 0!==t[r]?t[r]:void 0!==t[e]?t[e]:!o||r&&r!==e?Pn(e,t,n,i):void 0}function Pn(e,t,n){let{vgChannel:i}=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};return U(i?An(e,t,n.style):void 0,An(e,t,n.style),i?n[t.type][i]:void 0,n[t.type][e],i?n.mark[i]:n.mark[e])}function An(e,t,n){return jn(e,Cn(t),n)}function jn(e,n,i){let r;n=t.array(n);for(const t of n){const n=i[t];n&&void 0!==n[e]&&(r=n[e])}return r}function Tn(e,n){return t.array(e).reduce(((e,t)=>(e.field.push(ta(t,n)),e.order.push(t.sort??"ascending"),e)),{field:[],order:[]})}function En(e,t){const n=[...e];return t.forEach((e=>{for(const t of n)if(Y(t,e))return;n.push(e)})),n}function Mn(e,n){return Y(e,n)||!n?e:e?[...t.array(e),...t.array(n)].join(", "):n}function Ln(e,t){const n=e.value,i=t.value;if(null==n||null===i)return{explicit:e.explicit,value:null};if((hn(n)||yn(n))&&(hn(i)||yn(i)))return{explicit:e.explicit,value:Mn(n,i)};if(hn(n)||yn(n))return{explicit:e.explicit,value:n};if(hn(i)||yn(i))return{explicit:e.explicit,value:i};if(!(hn(n)||yn(n)||hn(i)||yn(i)))return{explicit:e.explicit,value:En(n,i)};throw new Error("It should never reach here")}function qn(e){return`Invalid specification ${X(e)}. Make sure the specification includes at least one of the following properties: "mark", "layer", "facet", "hconcat", "vconcat", "concat", or "repeat".`}const Un='Autosize "fit" only works for single views and layered views.';function Rn(e){return`${"width"==e?"Width":"Height"} "container" only works for single views and layered views.`}function Wn(e){return`${"width"==e?"Width":"Height"} "container" only works well with autosize "fit" or "fit-${"width"==e?"x":"y"}".`}function Bn(e){return e?`Dropping "fit-${e}" because spec has discrete ${rt(e)}.`:'Dropping "fit" because spec has discrete size.'}function In(e){return`Unknown field for ${e}. Cannot calculate view size.`}function Hn(e){return`Cannot project a selection on encoding channel "${e}", which has no field.`}function Vn(e,t){return`Cannot project a selection on encoding channel "${e}" as it uses an aggregate function ("${t}").`}function Gn(e){return`Selection not supported for ${e} yet.`}const Yn="The same selection must be used to override scale domains in a layered view.";function Xn(e){return`The "columns" property cannot be used when "${e}" has nested row/column.`}function Qn(e,t,n){return`An ancestor parsed field "${e}" as ${n} but a child wants to parse the field as ${t}.`}function Jn(e){return`Config.customFormatTypes is not true, thus custom format type and format for channel ${e} are dropped.`}function Kn(e){return`${e}Offset dropped because ${e} is continuous`}function Zn(e){return`Invalid field type "${e}".`}function ei(e,t){const{fill:n,stroke:i}=t;return`Dropping color ${e} as the plot also has ${n&&i?"fill and stroke":n?"fill":"stroke"}.`}function ti(e,t){return`Dropping ${X(e)} from channel "${t}" since it does not contain any data field, datum, value, or signal.`}function ni(e,t,n){return`${e} dropped as it is incompatible with "${t}".`}function ii(e){return`${e} encoding should be discrete (ordinal / nominal / binned).`}function ri(e){return`${e} encoding should be discrete (ordinal / nominal / binned) or use a discretizing scale (e.g. threshold).`}function oi(e,t){return`Using discrete channel "${e}" to encode "${t}" field can be misleading as it does not encode ${"ordinal"===t?"order":"magnitude"}.`}function ai(e){return`Using unaggregated domain with raw field has no effect (${X(e)}).`}function si(e){return`Unaggregated domain not applicable for "${e}" since it produces values outside the origin domain of the source data.`}function li(e){return`Unaggregated domain is currently unsupported for log scale (${X(e)}).`}function ci(e,t,n){return`${n}-scale's "${t}" is dropped as it does not work with ${e} scale.`}function ui(e){return`The step for "${e}" is dropped because the ${"width"===e?"x":"y"} is continuous.`}const fi="Domains that should be unioned has conflicting sort properties. Sort will be set to true.";function di(e,t){return`Invalid ${e}: ${X(t)}.`}function mi(e){return`1D error band does not support ${e}.`}function pi(e){return`Channel ${e} is required for "binned" bin.`}const gi=t.logger(t.Warn);let hi=gi;function yi(){hi.warn(...arguments)}function vi(e){if(e&&t.isObject(e))for(const t of Fi)if(t in e)return!0;return!1}const bi=["january","february","march","april","may","june","july","august","september","october","november","december"],xi=bi.map((e=>e.substr(0,3))),$i=["sunday","monday","tuesday","wednesday","thursday","friday","saturday"],wi=$i.map((e=>e.substr(0,3)));function ki(e,n){const i=[];if(n&&void 0!==e.day&&D(e).length>1&&(yi(function(e){return`Dropping day from datetime ${X(e)} as day cannot be combined with other units.`}(e)),delete(e=l(e)).day),void 0!==e.year?i.push(e.year):i.push(2012),void 0!==e.month){const r=n?function(e){if(V(e)&&(e=+e),t.isNumber(e))return e-1;{const t=e.toLowerCase(),n=bi.indexOf(t);if(-1!==n)return n;const i=t.substr(0,3),r=xi.indexOf(i);if(-1!==r)return r;throw new Error(di("month",e))}}(e.month):e.month;i.push(r)}else if(void 0!==e.quarter){const r=n?function(e){if(V(e)&&(e=+e),t.isNumber(e))return e>4&&yi(di("quarter",e)),e-1;throw new Error(di("quarter",e))}(e.quarter):e.quarter;i.push(t.isNumber(r)?3*r:`${r}*3`)}else i.push(0);if(void 0!==e.date)i.push(e.date);else if(void 0!==e.day){const r=n?function(e){if(V(e)&&(e=+e),t.isNumber(e))return e%7;{const t=e.toLowerCase(),n=$i.indexOf(t);if(-1!==n)return n;const i=t.substr(0,3),r=wi.indexOf(i);if(-1!==r)return r;throw new Error(di("day",e))}}(e.day):e.day;i.push(t.isNumber(r)?r+1:`${r}+1`)}else i.push(1);for(const t of["hours","minutes","seconds","milliseconds"]){const n=e[t];i.push(void 0===n?0:n)}return i}function Si(e){const t=ki(e,!0).join(", ");return e.utc?`utc(${t})`:`datetime(${t})`}const Di={year:1,quarter:1,month:1,week:1,day:1,dayofyear:1,date:1,hours:1,minutes:1,seconds:1,milliseconds:1},Fi=D(Di);function zi(e){return t.isObject(e)?e.binned:Oi(e)}function Oi(e){return e&&e.startsWith("binned")}function _i(e){return e.startsWith("utc")}const Ci={"year-month":"%b %Y ","year-month-date":"%b %d, %Y "};function Ni(e){return Fi.filter((t=>Ai(e,t)))}function Pi(e){const t=Ni(e);return t[t.length-1]}function Ai(e,t){const n=e.indexOf(t);return!(n<0)&&(!(n>0&&"seconds"===t&&"i"===e.charAt(n-1))&&(!(e.length>n+3&&"day"===t&&"o"===e.charAt(n+3))&&!(n>0&&"year"===t&&"f"===e.charAt(n-1))))}function ji(e,t){let{end:n}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{end:!1};const i=A(t),r=_i(e)?"utc":"";let o;const a={};for(const t of Fi)Ai(e,t)&&(a[t]="quarter"===(s=t)?`(${r}quarter(${i})-1)`:`${r}${s}(${i})`,o=t);var s;return n&&(a[o]+="+1"),function(e){const t=ki(e,!1).join(", ");return e.utc?`utc(${t})`:`datetime(${t})`}(a)}function Ti(e){if(!e)return;return`timeUnitSpecifier(${X(Ni(e))}, ${X(Ci)})`}function Ei(e){if(!e)return;let n;return t.isString(e)?n=Oi(e)?{unit:e.substring(6),binned:!0}:{unit:e}:t.isObject(e)&&(n={...e,...e.unit?{unit:e.unit}:{}}),_i(n.unit)&&(n.utc=!0,n.unit=n.unit.substring(3)),n}function Mi(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e=>e;const n=Ei(e),i=Pi(n.unit);if(i&&"day"!==i){const e={year:2001,month:1,date:1,hours:0,minutes:0,seconds:0,milliseconds:0},{step:r,part:o}=qi(i,n.step);return`${t(Si({...e,[o]:+e[o]+r}))} - ${t(Si(e))}`}}const Li={year:1,month:1,date:1,hours:1,minutes:1,seconds:1,milliseconds:1};function qi(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:1;if(function(e){return!!Li[e]}(e))return{part:e,step:t};switch(e){case"day":case"dayofyear":return{part:"date",step:t};case"quarter":return{part:"month",step:3*t};case"week":return{part:"date",step:7*t}}}function Ui(e){return!!e?.field&&void 0!==e.equal}function Ri(e){return!!e?.field&&void 0!==e.lt}function Wi(e){return!!e?.field&&void 0!==e.lte}function Bi(e){return!!e?.field&&void 0!==e.gt}function Ii(e){return!!e?.field&&void 0!==e.gte}function Hi(e){if(e?.field){if(t.isArray(e.range)&&2===e.range.length)return!0;if(yn(e.range))return!0}return!1}function Vi(e){return!!e?.field&&(t.isArray(e.oneOf)||t.isArray(e.in))}function Gi(e){return Vi(e)||Ui(e)||Hi(e)||Ri(e)||Bi(e)||Wi(e)||Ii(e)}function Yi(e,t){return va(e,{timeUnit:t,wrapTime:!0})}function Xi(e){let t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];const{field:n}=e,i=Ei(e.timeUnit),{unit:r,binned:o}=i||{},a=ta(e,{expr:"datum"}),s=r?`time(${o?a:ji(r,n)})`:a;if(Ui(e))return`${s}===${Yi(e.equal,r)}`;if(Ri(e)){return`${s}<${Yi(e.lt,r)}`}if(Bi(e)){return`${s}>${Yi(e.gt,r)}`}if(Wi(e)){return`${s}<=${Yi(e.lte,r)}`}if(Ii(e)){return`${s}>=${Yi(e.gte,r)}`}if(Vi(e))return`indexof([${function(e,t){return e.map((e=>Yi(e,t)))}(e.oneOf,r).join(",")}], ${s}) !== -1`;if(function(e){return!!e?.field&&void 0!==e.valid}(e))return Qi(s,e.valid);if(Hi(e)){const{range:n}=e,i=yn(n)?{signal:`${n.signal}[0]`}:n[0],o=yn(n)?{signal:`${n.signal}[1]`}:n[1];if(null!==i&&null!==o&&t)return"inrange("+s+", ["+Yi(i,r)+", "+Yi(o,r)+"])";const a=[];return null!==i&&a.push(`${s} >= ${Yi(i,r)}`),null!==o&&a.push(`${s} <= ${Yi(o,r)}`),a.length>0?a.join(" && "):"true"}throw new Error(`Invalid field predicate: ${X(e)}`)}function Qi(e){return!(arguments.length>1&&void 0!==arguments[1])||arguments[1]?`isValid(${e}) && isFinite(+${e})`:`!isValid(${e}) || !isFinite(+${e})`}function Ji(e){return Gi(e)&&e.timeUnit?{...e,timeUnit:Ei(e.timeUnit)}:e}function Ki(e){return"quantitative"===e||"temporal"===e}function Zi(e){return"ordinal"===e||"nominal"===e}const er="quantitative",tr="ordinal",nr="temporal",ir="nominal",rr="geojson";const or={LINEAR:"linear",LOG:"log",POW:"pow",SQRT:"sqrt",SYMLOG:"symlog",IDENTITY:"identity",SEQUENTIAL:"sequential",TIME:"time",UTC:"utc",QUANTILE:"quantile",QUANTIZE:"quantize",THRESHOLD:"threshold",BIN_ORDINAL:"bin-ordinal",ORDINAL:"ordinal",POINT:"point",BAND:"band"},ar={linear:"numeric",log:"numeric",pow:"numeric",sqrt:"numeric",symlog:"numeric",identity:"numeric",sequential:"numeric",time:"time",utc:"time",ordinal:"ordinal","bin-ordinal":"bin-ordinal",point:"ordinal-position",band:"ordinal-position",quantile:"discretizing",quantize:"discretizing",threshold:"discretizing"};function sr(e,t){const n=ar[e],i=ar[t];return n===i||"ordinal-position"===n&&"time"===i||"ordinal-position"===i&&"time"===n}const lr={linear:0,log:1,pow:1,sqrt:1,symlog:1,identity:1,sequential:1,time:0,utc:0,point:10,band:11,ordinal:0,"bin-ordinal":0,quantile:0,quantize:0,threshold:0};function cr(e){return lr[e]}const ur=new Set(["linear","log","pow","sqrt","symlog"]),fr=new Set([...ur,"time","utc"]);function dr(e){return ur.has(e)}const mr=new Set(["quantile","quantize","threshold"]),pr=new Set([...fr,...mr,"sequential","identity"]),gr=new Set(["ordinal","bin-ordinal","point","band"]);function hr(e){return gr.has(e)}function yr(e){return pr.has(e)}function vr(e){return fr.has(e)}function br(e){return mr.has(e)}function xr(e){return e?.param}const{type:$r,domain:wr,range:kr,rangeMax:Sr,rangeMin:Dr,scheme:Fr,...zr}={type:1,domain:1,domainMax:1,domainMin:1,domainMid:1,domainRaw:1,align:1,range:1,rangeMax:1,rangeMin:1,scheme:1,bins:1,reverse:1,round:1,clamp:1,nice:1,base:1,exponent:1,constant:1,interpolate:1,zero:1,padding:1,paddingInner:1,paddingOuter:1},Or=D(zr);function _r(e,t){switch(t){case"type":case"domain":case"reverse":case"range":return!0;case"scheme":case"interpolate":return!["point","band","identity"].includes(e);case"bins":return!["point","band","identity","ordinal"].includes(e);case"round":return vr(e)||"band"===e||"point"===e;case"padding":case"rangeMin":case"rangeMax":return vr(e)||["point","band"].includes(e);case"paddingOuter":case"align":return["point","band"].includes(e);case"paddingInner":return"band"===e;case"domainMax":case"domainMid":case"domainMin":case"domainRaw":case"clamp":return vr(e);case"nice":return vr(e)||"quantize"===e||"threshold"===e;case"exponent":return"pow"===e;case"base":return"log"===e;case"constant":return"symlog"===e;case"zero":return yr(e)&&!p(["log","time","utc","threshold","quantile"],e)}}function Cr(e,t){switch(t){case"interpolate":case"scheme":case"domainMid":return qe(e)?void 0:`Cannot use the scale property "${t}" with non-color channel.`;case"align":case"type":case"bins":case"domain":case"domainMax":case"domainMin":case"domainRaw":case"range":case"base":case"exponent":case"constant":case"nice":case"padding":case"paddingInner":case"paddingOuter":case"rangeMax":case"rangeMin":case"reverse":case"round":case"clamp":case"zero":return}}function Nr(e){const{channel:t,channelDef:n,markDef:i,scale:r,config:o}=e,a=Er(e);return Ro(n)&&!rn(n.aggregate)&&r&&vr(r.get("type"))?function(e){let{fieldDef:t,channel:n,markDef:i,ref:r,config:o}=e;const a=Nn("invalid",i,o);if(null===a)return[Pr(t,n),r];return r}({fieldDef:n,channel:t,markDef:i,ref:a,config:o}):a}function Pr(e,t){return{test:Ar(e,!0),..."y"===tt(t)?{field:{group:"height"}}:{value:0}}}function Ar(e){let n=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];return Qi(t.isString(e)?e:ta(e,{expr:"datum"}),!n)}function jr(e,t,n,i){const r={};if(t&&(r.scale=t),Bo(e)){const{datum:t}=e;vi(t)?r.signal=Si(t):yn(t)?r.signal=t.signal:mn(t)?r.signal=t.expr:r.value=t}else r.field=ta(e,n);if(i){const{offset:e,band:t}=i;e&&(r.offset=e),t&&(r.band=t)}return r}function Tr(e){let{scaleName:t,fieldOrDatumDef:n,fieldOrDatumDef2:i,offset:r,startSuffix:o,endSuffix:a="end",bandPosition:s=.5}=e;const l=!yn(s)&&01&&void 0!==arguments[1]?arguments[1]:{},n=e.field;const i=t.prefix;let r=t.suffix,o="";if(function(e){return"count"===e.aggregate}(e))n=B("count");else{let i;if(!t.nofn)if(function(e){return"op"in e}(e))i=e.op;else{const{bin:a,aggregate:s,timeUnit:l}=e;ln(a)?(i=sn(a),r=(t.binSuffix??"")+(t.suffix??"")):s?en(s)?(o=`["${n}"]`,n=`argmax_${s.argmax}`):Zt(s)?(o=`["${n}"]`,n=`argmin_${s.argmin}`):i=String(s):l&&!zi(l)&&(i=function(e){const{utc:t,...n}=Ei(e);return n.unit?(t?"utc":"")+D(n).map((e=>_(`${"unit"===e?"":`_${e}_`}${n[e]}`))).join(""):(t?"utc":"")+"timeunit"+D(n).map((e=>_(`_${e}_${n[e]}`))).join("")}(l),r=(!["range","mid"].includes(t.binSuffix)&&t.binSuffix||"")+(t.suffix??""))}i&&(n=n?`${i}_${n}`:i)}return r&&(n=`${n}_${r}`),i&&(n=`${i}_${n}`),t.forAs?L(n):t.expr?j(n,t.expr)+o:E(n)+o}function na(e){switch(e.type){case"nominal":case"ordinal":case"geojson":return!0;case"quantitative":return Ro(e)&&!!e.bin;case"temporal":return!1}throw new Error(Zn(e.type))}const ia=(e,t)=>{switch(t.fieldTitle){case"plain":return e.field;case"functional":return function(e){const{aggregate:t,bin:n,timeUnit:i,field:r}=e;if(en(t))return`${r} for argmax(${t.argmax})`;if(Zt(t))return`${r} for argmin(${t.argmin})`;const o=i&&!zi(i)?Ei(i):void 0,a=t||o?.unit||o?.maxbins&&"timeunit"||ln(n)&&"bin";return a?`${a.toUpperCase()}(${r})`:r}(e);default:return function(e,t){const{field:n,bin:i,timeUnit:r,aggregate:o}=e;if("count"===o)return t.countTitle;if(ln(i))return`${n} (binned)`;if(r&&!zi(r)){const e=Ei(r)?.unit;if(e)return`${n} (${Ni(e).join("-")})`}else if(o)return en(o)?`${n} for max ${o.argmax}`:Zt(o)?`${n} for min ${o.argmin}`:`${P(o)} of ${n}`;return n}(e,t)}};let ra=ia;function oa(e){ra=e}function aa(e,t,n){let{allowDisabling:i,includeDefault:r=!0}=n;const o=sa(e)?.title;if(!Ro(e))return o??e.title;const a=e,s=r?la(a,t):void 0;return i?U(o,a.title,s):o??a.title??s}function sa(e){return Jo(e)&&e.axis?e.axis:Ko(e)&&e.legend?e.legend:Co(e)&&e.header?e.header:void 0}function la(e,t){return ra(e,t)}function ca(e){if(Zo(e)){const{format:t,formatType:n}=e;return{format:t,formatType:n}}{const t=sa(e)??{},{format:n,formatType:i}=t;return{format:n,formatType:i}}}function ua(e){return Ro(e)?e:qo(e)?e.condition:void 0}function fa(e){return Go(e)?e:Uo(e)?e.condition:void 0}function da(e,n,i){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};if(t.isString(e)||t.isNumber(e)||t.isBoolean(e)){return yi(function(e,t,n){return`Channel ${e} is a ${t}. Converted to {value: ${X(n)}}.`}(n,t.isString(e)?"string":t.isNumber(e)?"number":"boolean",e)),{value:e}}return Go(e)?ma(e,n,i,r):Uo(e)?{...e,condition:ma(e.condition,n,i,r)}:e}function ma(e,n,i,r){if(Zo(e)){const{format:t,formatType:o,...a}=e;if(Lr(o)&&!i.customFormatTypes)return yi(Jn(n)),ma(a,n,i,r)}else{const t=Jo(e)?"axis":Ko(e)?"legend":Co(e)?"header":null;if(t&&e[t]){const{format:o,formatType:a,...s}=e[t];if(Lr(a)&&!i.customFormatTypes)return yi(Jn(n)),ma({...e,[t]:s},n,i,r)}}return Ro(e)?pa(e,n,r):function(e){let n=e.type;if(n)return e;const{datum:i}=e;return n=t.isNumber(i)?"quantitative":t.isString(i)?"nominal":vi(i)?"temporal":void 0,{...e,type:n}}(e)}function pa(e,n){let{compositeMark:i=!1}=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};const{aggregate:r,timeUnit:o,bin:a,field:s}=e,l={...e};if(i||!r||tn(r)||en(r)||Zt(r)||(yi(function(e){return`Invalid aggregation operator "${e}".`}(r)),delete l.aggregate),o&&(l.timeUnit=Ei(o)),s&&(l.field=`${s}`),ln(a)&&(l.bin=ga(a,n)),cn(a)&&!zt(n)&&yi(function(e){return`Channel ${e} should not be used with "binned" bin.`}(n)),Yo(l)){const{type:e}=l,t=function(e){if(e)switch(e=e.toLowerCase()){case"q":case er:return"quantitative";case"t":case nr:return"temporal";case"o":case tr:return"ordinal";case"n":case ir:return"nominal";case rr:return"geojson"}}(e);e!==t&&(l.type=t),"quantitative"!==e&&rn(r)&&(yi(function(e,t){return`Invalid field type "${e}" for aggregate: "${t}", using "quantitative" instead.`}(e,r)),l.type="quantitative")}else if(!et(n)){const e=function(e,n){switch(n){case"latitude":case"longitude":return"quantitative";case"row":case"column":case"facet":case"shape":case"strokeDash":return"nominal";case"order":return"ordinal"}if(Ao(e)&&t.isArray(e.sort))return"ordinal";const{aggregate:i,bin:r,timeUnit:o}=e;if(o)return"temporal";if(r||i&&!en(i)&&!Zt(i))return"quantitative";if(Qo(e)&&e.scale?.type)switch(ar[e.scale.type]){case"numeric":case"discretizing":return"quantitative";case"time":return"temporal"}return"nominal"}(l,n);l.type=e}if(Yo(l)){const{compatible:e,warning:t}=function(e,t){const n=e.type;if("geojson"===n&&"shape"!==t)return{compatible:!1,warning:`Channel ${t} should not be used with a geojson data.`};switch(t){case Q:case J:case K:return na(e)?ha:{compatible:!1,warning:ii(t)};case Z:case ee:case ie:case re:case me:case pe:case ge:case Se:case Fe:case ze:case Oe:case _e:case Ce:case ve:case se:case oe:case Ne:return ha;case ue:case de:case ce:case fe:return n!==er?{compatible:!1,warning:`Channel ${t} should be used with a quantitative field only, not ${e.type} field.`}:ha;case be:case xe:case $e:case we:case ye:case le:case ae:case te:case ne:return"nominal"!==n||e.sort?ha:{compatible:!1,warning:`Channel ${t} should not be used with an unsorted discrete field.`};case he:case ke:return na(e)||Qo(i=e)&&br(i.scale?.type)?ha:{compatible:!1,warning:ri(t)};case De:return"nominal"!==e.type||"sort"in e?ha:{compatible:!1,warning:"Channel order is inappropriate for nominal field, which has no inherent order."}}var i}(l,n)||{};!1===e&&yi(t)}if(Ao(l)&&t.isString(l.sort)){const{sort:e}=l;if(Do(e))return{...l,sort:{encoding:e}};const t=e.substr(1);if("-"===e.charAt(0)&&Do(t))return{...l,sort:{encoding:t,order:"descending"}}}if(Co(l)){const{header:e}=l;if(e){const{orient:t,...n}=e;if(t)return{...l,header:{...n,labelOrient:e.labelOrient||t,titleOrient:e.titleOrient||t}}}}return l}function ga(e,n){return t.isBoolean(e)?{maxbins:dn(n)}:"binned"===e?{binned:!0}:e.maxbins||e.step?e:{...e,maxbins:dn(n)}}const ha={compatible:!0};function ya(e){const{formatType:t}=ca(e);return"time"===t||!t&&((n=e)&&("temporal"===n.type||Ro(n)&&!!n.timeUnit));var n}function va(e,n){let{timeUnit:i,type:r,wrapTime:o,undefinedIfExprNotRequired:a}=n;const s=i&&Ei(i)?.unit;let l,c=s||"temporal"===r;return mn(e)?l=e.expr:yn(e)?l=e.signal:vi(e)?(c=!0,l=Si(e)):(t.isString(e)||t.isNumber(e))&&c&&(l=`datetime(${X(e)})`,function(e){return!!Di[e]}(s)&&(t.isNumber(e)&&e<1e4||t.isString(e)&&isNaN(Date.parse(e)))&&(l=Si({[s]:e}))),l?o&&c?`time(${l})`:l:a?void 0:X(e)}function ba(e,t){const{type:n}=e;return t.map((t=>{const i=va(t,{timeUnit:Ro(e)&&!zi(e.timeUnit)?e.timeUnit:void 0,type:n,undefinedIfExprNotRequired:!0});return void 0!==i?{signal:i}:t}))}function xa(e,t){return ln(e.bin)?Ht(t)&&["ordinal","nominal"].includes(e.type):(console.warn("Only call this method for binned field defs."),!1)}const $a={labelAlign:{part:"labels",vgProp:"align"},labelBaseline:{part:"labels",vgProp:"baseline"},labelColor:{part:"labels",vgProp:"fill"},labelFont:{part:"labels",vgProp:"font"},labelFontSize:{part:"labels",vgProp:"fontSize"},labelFontStyle:{part:"labels",vgProp:"fontStyle"},labelFontWeight:{part:"labels",vgProp:"fontWeight"},labelOpacity:{part:"labels",vgProp:"opacity"},labelOffset:null,labelPadding:null,gridColor:{part:"grid",vgProp:"stroke"},gridDash:{part:"grid",vgProp:"strokeDash"},gridDashOffset:{part:"grid",vgProp:"strokeDashOffset"},gridOpacity:{part:"grid",vgProp:"opacity"},gridWidth:{part:"grid",vgProp:"strokeWidth"},tickColor:{part:"ticks",vgProp:"stroke"},tickDash:{part:"ticks",vgProp:"strokeDash"},tickDashOffset:{part:"ticks",vgProp:"strokeDashOffset"},tickOpacity:{part:"ticks",vgProp:"opacity"},tickSize:null,tickWidth:{part:"ticks",vgProp:"strokeWidth"}};function wa(e){return e?.condition}const ka=["domain","grid","labels","ticks","title"],Sa={grid:"grid",gridCap:"grid",gridColor:"grid",gridDash:"grid",gridDashOffset:"grid",gridOpacity:"grid",gridScale:"grid",gridWidth:"grid",orient:"main",bandPosition:"both",aria:"main",description:"main",domain:"main",domainCap:"main",domainColor:"main",domainDash:"main",domainDashOffset:"main",domainOpacity:"main",domainWidth:"main",format:"main",formatType:"main",labelAlign:"main",labelAngle:"main",labelBaseline:"main",labelBound:"main",labelColor:"main",labelFlush:"main",labelFlushOffset:"main",labelFont:"main",labelFontSize:"main",labelFontStyle:"main",labelFontWeight:"main",labelLimit:"main",labelLineHeight:"main",labelOffset:"main",labelOpacity:"main",labelOverlap:"main",labelPadding:"main",labels:"main",labelSeparation:"main",maxExtent:"main",minExtent:"main",offset:"both",position:"main",tickCap:"main",tickColor:"main",tickDash:"main",tickDashOffset:"main",tickMinStep:"both",tickOffset:"both",tickOpacity:"main",tickRound:"both",ticks:"main",tickSize:"main",tickWidth:"both",title:"main",titleAlign:"main",titleAnchor:"main",titleAngle:"main",titleBaseline:"main",titleColor:"main",titleFont:"main",titleFontSize:"main",titleFontStyle:"main",titleFontWeight:"main",titleLimit:"main",titleLineHeight:"main",titleOpacity:"main",titlePadding:"main",titleX:"main",titleY:"main",encode:"both",scale:"both",tickBand:"both",tickCount:"both",tickExtra:"both",translate:"both",values:"both",zindex:"both"},Da={orient:1,aria:1,bandPosition:1,description:1,domain:1,domainCap:1,domainColor:1,domainDash:1,domainDashOffset:1,domainOpacity:1,domainWidth:1,format:1,formatType:1,grid:1,gridCap:1,gridColor:1,gridDash:1,gridDashOffset:1,gridOpacity:1,gridWidth:1,labelAlign:1,labelAngle:1,labelBaseline:1,labelBound:1,labelColor:1,labelFlush:1,labelFlushOffset:1,labelFont:1,labelFontSize:1,labelFontStyle:1,labelFontWeight:1,labelLimit:1,labelLineHeight:1,labelOffset:1,labelOpacity:1,labelOverlap:1,labelPadding:1,labels:1,labelSeparation:1,maxExtent:1,minExtent:1,offset:1,position:1,tickBand:1,tickCap:1,tickColor:1,tickCount:1,tickDash:1,tickDashOffset:1,tickExtra:1,tickMinStep:1,tickOffset:1,tickOpacity:1,tickRound:1,ticks:1,tickSize:1,tickWidth:1,title:1,titleAlign:1,titleAnchor:1,titleAngle:1,titleBaseline:1,titleColor:1,titleFont:1,titleFontSize:1,titleFontStyle:1,titleFontWeight:1,titleLimit:1,titleLineHeight:1,titleOpacity:1,titlePadding:1,titleX:1,titleY:1,translate:1,values:1,zindex:1},Fa={...Da,style:1,labelExpr:1,encoding:1};function za(e){return!!Fa[e]}const Oa=D({axis:1,axisBand:1,axisBottom:1,axisDiscrete:1,axisLeft:1,axisPoint:1,axisQuantitative:1,axisRight:1,axisTemporal:1,axisTop:1,axisX:1,axisXBand:1,axisXDiscrete:1,axisXPoint:1,axisXQuantitative:1,axisXTemporal:1,axisY:1,axisYBand:1,axisYDiscrete:1,axisYPoint:1,axisYQuantitative:1,axisYTemporal:1});function _a(e){return"mark"in e}class Ca{constructor(e,t){this.name=e,this.run=t}hasMatchingType(e){return!!_a(e)&&(go(t=e.mark)?t.type:t)===this.name;var t}}function Na(e,n){const i=e&&e[n];return!!i&&(t.isArray(i)?g(i,(e=>!!e.field)):Ro(i)||qo(i))}function Pa(e,n){const i=e&&e[n];return!!i&&(t.isArray(i)?g(i,(e=>!!e.field)):Ro(i)||Bo(i)||Uo(i))}function Aa(e,t){if(zt(t)){const n=e[t];if((Ro(n)||Bo(n))&&(Zi(n.type)||Ro(n)&&n.timeUnit)){return Pa(e,at(t))}}return!1}function ja(e){return g(Be,(n=>{if(Na(e,n)){const i=e[n];if(t.isArray(i))return g(i,(e=>!!e.aggregate));{const e=ua(i);return e&&!!e.aggregate}}return!1}))}function Ta(e,t){const n=[],i=[],r=[],o=[],a={};return La(e,((s,l)=>{if(Ro(s)){const{field:c,aggregate:u,bin:f,timeUnit:d,...m}=s;if(u||d||f){const e=sa(s),p=e?.title;let g=ta(s,{forAs:!0});const h={...p?[]:{title:aa(s,t,{allowDisabling:!0})},...m,field:g};if(u){let e;if(en(u)?(e="argmax",g=ta({op:"argmax",field:u.argmax},{forAs:!0}),h.field=`${g}.${c}`):Zt(u)?(e="argmin",g=ta({op:"argmin",field:u.argmin},{forAs:!0}),h.field=`${g}.${c}`):"boxplot"!==u&&"errorbar"!==u&&"errorband"!==u&&(e=u),e){const t={op:e,as:g};c&&(t.field=c),o.push(t)}}else if(n.push(g),Yo(s)&&ln(f)){if(i.push({bin:f,field:c,as:g}),n.push(ta(s,{binSuffix:"end"})),xa(s,l)&&n.push(ta(s,{binSuffix:"range"})),zt(l)){const e={field:`${g}_end`};a[`${l}2`]=e}h.bin="binned",et(l)||(h.type=er)}else if(d&&!zi(d)){r.push({timeUnit:d,field:c,as:g});const e=Yo(s)&&s.type!==nr&&"time";e&&(l===Se||l===Oe?h.formatType=e:!function(e){return!!kt[e]}(l)?zt(l)&&(h.axis={formatType:e,...h.axis}):h.legend={formatType:e,...h.legend})}a[l]=h}else n.push(c),a[l]=e[l]}else a[l]=e[l]})),{bins:i,timeUnits:r,aggregate:o,groupby:n,encoding:a}}function Ea(e,t,n){const i=Vt(t,n);if(!i)return!1;if("binned"===i){const n=e[t===te?Z:ee];return!!(Ro(n)&&Ro(e[t])&&cn(n.bin))}return!0}function Ma(e,t){const n={};for(const i of D(e)){const r=da(e[i],i,t,{compositeMark:!0});n[i]=r}return n}function La(e,n,i){if(e)for(const r of D(e)){const o=e[r];if(t.isArray(o))for(const e of o)n.call(i,e,r);else n.call(i,o,r)}}function qa(e,n){return D(n).reduce(((i,r)=>{switch(r){case Z:case ee:case _e:case Ne:case Ce:case te:case ne:case ie:case re:case se:case le:case oe:case ae:case ce:case ue:case fe:case de:case Se:case he:case ve:case Oe:return i;case De:if("line"===e||"trail"===e)return i;case Fe:case ze:{const e=n[r];if(t.isArray(e)||Ro(e))for(const n of t.array(e))n.aggregate||i.push(ta(n,{}));return i}case ye:if("trail"===e)return i;case me:case pe:case ge:case be:case xe:case $e:case ke:case we:{const e=ua(n[r]);return e&&!e.aggregate&&i.push(ta(e,{})),i}}}),[])}function Ua(e,n,i){let r=!(arguments.length>3&&void 0!==arguments[3])||arguments[3];if("tooltip"in i)return{tooltip:i.tooltip};return{tooltip:[...e.map((e=>{let{fieldPrefix:t,titlePrefix:i}=e;const o=r?` of ${Ra(n)}`:"";return{field:t+n.field,type:n.type,title:yn(i)?{signal:`${i}"${escape(o)}"`}:i+o}})),...b(function(e){const n=[];for(const i of D(e))if(Na(e,i)){const r=e[i],o=t.array(r);for(const e of o)Ro(e)?n.push(e):qo(e)&&n.push(e.condition)}return n}(i).map(ea),d)]}}function Ra(e){const{title:t,field:n}=e;return U(t,n)}function Wa(e,n,i,r,o){const{scale:a,axis:s}=i;return l=>{let{partName:c,mark:u,positionPrefix:f,endPositionPrefix:d,extraEncoding:m={}}=l;const p=Ra(i);return Ba(e,c,o,{mark:u,encoding:{[n]:{field:`${f}_${i.field}`,type:i.type,...void 0!==p?{title:p}:{},...void 0!==a?{scale:a}:{},...void 0!==s?{axis:s}:{}},...t.isString(d)?{[`${n}2`]:{field:`${d}_${i.field}`}}:{},...r,...m}})}}function Ba(e,n,i,r){const{clip:o,color:a,opacity:s}=e,l=e.type;return e[n]||void 0===e[n]&&i[n]?[{...r,mark:{...i[n],...o?{clip:o}:{},...a?{color:a}:{},...s?{opacity:s}:{},...go(r.mark)?r.mark:{type:r.mark},style:`${l}-${String(n)}`,...t.isBoolean(e[n])?{}:e[n]}}]:[]}function Ia(e,t,n){const{encoding:i}=e,r="vertical"===t?"y":"x",o=i[r],a=i[`${r}2`],s=i[`${r}Error`],l=i[`${r}Error2`];return{continuousAxisChannelDef:Ha(o,n),continuousAxisChannelDef2:Ha(a,n),continuousAxisChannelDefError:Ha(s,n),continuousAxisChannelDefError2:Ha(l,n),continuousAxis:r}}function Ha(e,t){if(e?.aggregate){const{aggregate:n,...i}=e;return n!==t&&yi(function(e,t){return`Continuous axis should not have customized aggregation function ${e}; ${t} already agregates the axis.`}(n,t)),i}return e}function Va(e,t){const{mark:n,encoding:i}=e,{x:r,y:o}=i;if(go(n)&&n.orient)return n.orient;if(Io(r)){if(Io(o)){const e=Ro(r)&&r.aggregate,n=Ro(o)&&o.aggregate;if(e||n!==t){if(n||e!==t){if(e===t&&n===t)throw new Error("Both x and y cannot have aggregate");return ya(o)&&!ya(r)?"horizontal":"vertical"}return"horizontal"}return"vertical"}return"horizontal"}if(Io(o))return"vertical";throw new Error(`Need a valid continuous axis for ${t}s`)}const Ga="boxplot",Ya=new Ca(Ga,Qa);function Xa(e){return t.isNumber(e)?"tukey":e}function Qa(e,n){let{config:i}=n;e={...e,encoding:Ma(e.encoding,i)};const{mark:r,encoding:o,params:a,projection:s,...l}=e,c=go(r)?r:{type:r};a&&yi(Gn("boxplot"));const u=c.extent??i.boxplot.extent,d=Nn("size",c,i),m=c.invalid,p=Xa(u),{bins:g,timeUnits:h,transform:y,continuousAxisChannelDef:v,continuousAxis:b,groupby:x,aggregate:$,encodingWithoutContinuousAxis:w,ticksOrient:k,boxOrient:D,customTooltipWithoutAggregatedField:F}=function(e,n,i){const r=Va(e,Ga),{continuousAxisChannelDef:o,continuousAxis:a}=Ia(e,r,Ga),s=o.field,l=L(s),c=Xa(n),u=[...Ja(s),{op:"median",field:s,as:`mid_box_${l}`},{op:"min",field:s,as:("min-max"===c?"lower_whisker_":"min_")+l},{op:"max",field:s,as:("min-max"===c?"upper_whisker_":"max_")+l}],f="min-max"===c||"tukey"===c?[]:[{calculate:`datum["upper_box_${l}"] - datum["lower_box_${l}"]`,as:`iqr_${l}`},{calculate:`min(datum["upper_box_${l}"] + datum["iqr_${l}"] * ${n}, datum["max_${l}"])`,as:`upper_whisker_${l}`},{calculate:`max(datum["lower_box_${l}"] - datum["iqr_${l}"] * ${n}, datum["min_${l}"])`,as:`lower_whisker_${l}`}],{[a]:d,...m}=e.encoding,{customTooltipWithoutAggregatedField:p,filteredEncoding:g}=function(e){const{tooltip:n,...i}=e;if(!n)return{filteredEncoding:i};let r,o;if(t.isArray(n)){for(const e of n)e.aggregate?(r||(r=[]),r.push(e)):(o||(o=[]),o.push(e));r&&(i.tooltip=r)}else n.aggregate?i.tooltip=n:o=n;return t.isArray(o)&&1===o.length&&(o=o[0]),{customTooltipWithoutAggregatedField:o,filteredEncoding:i}}(m),{bins:h,timeUnits:y,aggregate:v,groupby:b,encoding:x}=Ta(g,i),$="vertical"===r?"horizontal":"vertical",w=r,k=[...h,...y,{aggregate:[...v,...u],groupby:b},...f];return{bins:h,timeUnits:y,transform:k,groupby:b,aggregate:v,continuousAxisChannelDef:o,continuousAxis:a,encodingWithoutContinuousAxis:x,ticksOrient:$,boxOrient:w,customTooltipWithoutAggregatedField:p}}(e,u,i),z=L(v.field),{color:O,size:_,...C}=w,N=e=>Wa(c,b,v,e,i.boxplot),P=N(C),A=N(w),j=(t.isObject(i.boxplot.box)?i.boxplot.box.color:i.mark.color)||"#4c78a8",T=N({...C,..._?{size:_}:{},color:{condition:{test:`datum['lower_box_${v.field}'] >= datum['upper_box_${v.field}']`,...O||{value:j}}}}),E=Ua([{fieldPrefix:"min-max"===p?"upper_whisker_":"max_",titlePrefix:"Max"},{fieldPrefix:"upper_box_",titlePrefix:"Q3"},{fieldPrefix:"mid_box_",titlePrefix:"Median"},{fieldPrefix:"lower_box_",titlePrefix:"Q1"},{fieldPrefix:"min-max"===p?"lower_whisker_":"min_",titlePrefix:"Min"}],v,w),M={type:"tick",color:"black",opacity:1,orient:k,invalid:m,aria:!1},q="min-max"===p?E:Ua([{fieldPrefix:"upper_whisker_",titlePrefix:"Upper Whisker"},{fieldPrefix:"lower_whisker_",titlePrefix:"Lower Whisker"}],v,w),U=[...P({partName:"rule",mark:{type:"rule",invalid:m,aria:!1},positionPrefix:"lower_whisker",endPositionPrefix:"lower_box",extraEncoding:q}),...P({partName:"rule",mark:{type:"rule",invalid:m,aria:!1},positionPrefix:"upper_box",endPositionPrefix:"upper_whisker",extraEncoding:q}),...P({partName:"ticks",mark:M,positionPrefix:"lower_whisker",extraEncoding:q}),...P({partName:"ticks",mark:M,positionPrefix:"upper_whisker",extraEncoding:q})],R=[..."tukey"!==p?U:[],...A({partName:"box",mark:{type:"bar",...d?{size:d}:{},orient:D,invalid:m,ariaRoleDescription:"box"},positionPrefix:"lower_box",endPositionPrefix:"upper_box",extraEncoding:E}),...T({partName:"median",mark:{type:"tick",invalid:m,...t.isObject(i.boxplot.median)&&i.boxplot.median.color?{color:i.boxplot.median.color}:{},...d?{size:d}:{},orient:k,aria:!1},positionPrefix:"mid_box",extraEncoding:E})];if("min-max"===p)return{...l,transform:(l.transform??[]).concat(y),layer:R};const W=`datum["lower_box_${v.field}"]`,B=`datum["upper_box_${v.field}"]`,I=`(${B} - ${W})`,H=`${W} - ${u} * ${I}`,V=`${B} + ${u} * ${I}`,G=`datum["${v.field}"]`,Y={joinaggregate:Ja(v.field),groupby:x},X={transform:[{filter:`(${H} <= ${G}) && (${G} <= ${V})`},{aggregate:[{op:"min",field:v.field,as:`lower_whisker_${z}`},{op:"max",field:v.field,as:`upper_whisker_${z}`},{op:"min",field:`lower_box_${v.field}`,as:`lower_box_${z}`},{op:"max",field:`upper_box_${v.field}`,as:`upper_box_${z}`},...$],groupby:x}],layer:U},{tooltip:Q,...J}=C,{scale:K,axis:Z}=v,ee=Ra(v),te=f(Z,["title"]),ne=Ba(c,"outliers",i.boxplot,{transform:[{filter:`(${G} < ${H}) || (${G} > ${V})`}],mark:"point",encoding:{[b]:{field:v.field,type:v.type,...void 0!==ee?{title:ee}:{},...void 0!==K?{scale:K}:{},...S(te)?{}:{axis:te}},...J,...O?{color:O}:{},...F?{tooltip:F}:{}}})[0];let ie;const re=[...g,...h,Y];return ne?ie={transform:re,layer:[ne,X]}:(ie=X,ie.transform.unshift(...re)),{...l,layer:[ie,{transform:y,layer:R}]}}function Ja(e){const t=L(e);return[{op:"q1",field:e,as:`lower_box_${t}`},{op:"q3",field:e,as:`upper_box_${t}`}]}const Ka="errorbar",Za=new Ca(Ka,es);function es(e,t){let{config:n}=t;e={...e,encoding:Ma(e.encoding,n)};const{transform:i,continuousAxisChannelDef:r,continuousAxis:o,encodingWithoutContinuousAxis:a,ticksOrient:s,markDef:l,outerSpec:c,tooltipEncoding:u}=ns(e,Ka,n);delete a.size;const f=Wa(l,o,r,a,n.errorbar),d=l.thickness,m=l.size,p={type:"tick",orient:s,aria:!1,...void 0!==d?{thickness:d}:{},...void 0!==m?{size:m}:{}},g=[...f({partName:"ticks",mark:p,positionPrefix:"lower",extraEncoding:u}),...f({partName:"ticks",mark:p,positionPrefix:"upper",extraEncoding:u}),...f({partName:"rule",mark:{type:"rule",ariaRoleDescription:"errorbar",...void 0!==d?{size:d}:{}},positionPrefix:"lower",endPositionPrefix:"upper",extraEncoding:u})];return{...c,transform:i,...g.length>1?{layer:g}:{...g[0]}}}function ts(e,t){const{encoding:n}=e;if(function(e){return(Go(e.x)||Go(e.y))&&!Go(e.x2)&&!Go(e.y2)&&!Go(e.xError)&&!Go(e.xError2)&&!Go(e.yError)&&!Go(e.yError2)}(n))return{orient:Va(e,t),inputType:"raw"};const i=function(e){return Go(e.x2)||Go(e.y2)}(n),r=function(e){return Go(e.xError)||Go(e.xError2)||Go(e.yError)||Go(e.yError2)}(n),o=n.x,a=n.y;if(i){if(r)throw new Error(`${t} cannot be both type aggregated-upper-lower and aggregated-error`);const e=n.x2,i=n.y2;if(Go(e)&&Go(i))throw new Error(`${t} cannot have both x2 and y2`);if(Go(e)){if(Io(o))return{orient:"horizontal",inputType:"aggregated-upper-lower"};throw new Error(`Both x and x2 have to be quantitative in ${t}`)}if(Go(i)){if(Io(a))return{orient:"vertical",inputType:"aggregated-upper-lower"};throw new Error(`Both y and y2 have to be quantitative in ${t}`)}throw new Error("No ranged axis")}{const e=n.xError,i=n.xError2,r=n.yError,s=n.yError2;if(Go(i)&&!Go(e))throw new Error(`${t} cannot have xError2 without xError`);if(Go(s)&&!Go(r))throw new Error(`${t} cannot have yError2 without yError`);if(Go(e)&&Go(r))throw new Error(`${t} cannot have both xError and yError with both are quantiative`);if(Go(e)){if(Io(o))return{orient:"horizontal",inputType:"aggregated-error"};throw new Error("All x, xError, and xError2 (if exist) have to be quantitative")}if(Go(r)){if(Io(a))return{orient:"vertical",inputType:"aggregated-error"};throw new Error("All y, yError, and yError2 (if exist) have to be quantitative")}throw new Error("No ranged axis")}}function ns(e,t,n){const{mark:i,encoding:r,params:o,projection:a,...s}=e,l=go(i)?i:{type:i};o&&yi(Gn(t));const{orient:c,inputType:u}=ts(e,t),{continuousAxisChannelDef:f,continuousAxisChannelDef2:d,continuousAxisChannelDefError:m,continuousAxisChannelDefError2:p,continuousAxis:g}=Ia(e,c,t),{errorBarSpecificAggregate:h,postAggregateCalculates:y,tooltipSummary:v,tooltipTitleWithFieldName:b}=function(e,t,n,i,r,o,a,s){let l=[],c=[];const u=t.field;let f,d=!1;if("raw"===o){const t=e.center?e.center:e.extent?"iqr"===e.extent?"median":"mean":s.errorbar.center,n=e.extent?e.extent:"mean"===t?"stderr":"iqr";if("median"===t!=("iqr"===n)&&yi(function(e,t,n){return`${e} is not usually used with ${t} for ${n}.`}(t,n,a)),"stderr"===n||"stdev"===n)l=[{op:n,field:u,as:`extent_${u}`},{op:t,field:u,as:`center_${u}`}],c=[{calculate:`datum["center_${u}"] + datum["extent_${u}"]`,as:`upper_${u}`},{calculate:`datum["center_${u}"] - datum["extent_${u}"]`,as:`lower_${u}`}],f=[{fieldPrefix:"center_",titlePrefix:P(t)},{fieldPrefix:"upper_",titlePrefix:is(t,n,"+")},{fieldPrefix:"lower_",titlePrefix:is(t,n,"-")}],d=!0;else{let e,t,i;"ci"===n?(e="mean",t="ci0",i="ci1"):(e="median",t="q1",i="q3"),l=[{op:t,field:u,as:`lower_${u}`},{op:i,field:u,as:`upper_${u}`},{op:e,field:u,as:`center_${u}`}],f=[{fieldPrefix:"upper_",titlePrefix:aa({field:u,aggregate:i,type:"quantitative"},s,{allowDisabling:!1})},{fieldPrefix:"lower_",titlePrefix:aa({field:u,aggregate:t,type:"quantitative"},s,{allowDisabling:!1})},{fieldPrefix:"center_",titlePrefix:aa({field:u,aggregate:e,type:"quantitative"},s,{allowDisabling:!1})}]}}else{(e.center||e.extent)&&yi((m=e.center,`${(p=e.extent)?"extent ":""}${p&&m?"and ":""}${m?"center ":""}${p&&m?"are ":"is "}not needed when data are aggregated.`)),"aggregated-upper-lower"===o?(f=[],c=[{calculate:`datum["${n.field}"]`,as:`upper_${u}`},{calculate:`datum["${u}"]`,as:`lower_${u}`}]):"aggregated-error"===o&&(f=[{fieldPrefix:"",titlePrefix:u}],c=[{calculate:`datum["${u}"] + datum["${i.field}"]`,as:`upper_${u}`}],r?c.push({calculate:`datum["${u}"] + datum["${r.field}"]`,as:`lower_${u}`}):c.push({calculate:`datum["${u}"] - datum["${i.field}"]`,as:`lower_${u}`}));for(const e of c)f.push({fieldPrefix:e.as.substring(0,6),titlePrefix:M(M(e.calculate,'datum["',""),'"]',"")})}var m,p;return{postAggregateCalculates:c,errorBarSpecificAggregate:l,tooltipSummary:f,tooltipTitleWithFieldName:d}}(l,f,d,m,p,u,t,n),{[g]:x,["x"===g?"x2":"y2"]:$,["x"===g?"xError":"yError"]:w,["x"===g?"xError2":"yError2"]:k,...S}=r,{bins:D,timeUnits:F,aggregate:z,groupby:O,encoding:_}=Ta(S,n),C=[...z,...h],N="raw"!==u?[]:O,A=Ua(v,f,_,b);return{transform:[...s.transform??[],...D,...F,...0===C.length?[]:[{aggregate:C,groupby:N}],...y],groupby:N,continuousAxisChannelDef:f,continuousAxis:g,encodingWithoutContinuousAxis:_,ticksOrient:"vertical"===c?"horizontal":"vertical",markDef:l,outerSpec:s,tooltipEncoding:A}}function is(e,t,n){return`${P(e)} ${n} ${t}`}const rs="errorband",os=new Ca(rs,as);function as(e,t){let{config:n}=t;e={...e,encoding:Ma(e.encoding,n)};const{transform:i,continuousAxisChannelDef:r,continuousAxis:o,encodingWithoutContinuousAxis:a,markDef:s,outerSpec:l,tooltipEncoding:c}=ns(e,rs,n),u=s,f=Wa(u,o,r,a,n.errorband),d=void 0!==e.encoding.x&&void 0!==e.encoding.y;let m={type:d?"area":"rect"},p={type:d?"line":"rule"};const g={...u.interpolate?{interpolate:u.interpolate}:{},...u.tension&&u.interpolate?{tension:u.tension}:{}};return d?(m={...m,...g,ariaRoleDescription:"errorband"},p={...p,...g,aria:!1}):u.interpolate?yi(mi("interpolate")):u.tension&&yi(mi("tension")),{...l,transform:i,layer:[...f({partName:"band",mark:m,positionPrefix:"lower",endPositionPrefix:"upper",extraEncoding:c}),...f({partName:"borders",mark:p,positionPrefix:"lower",extraEncoding:c}),...f({partName:"borders",mark:p,positionPrefix:"upper",extraEncoding:c})]}}const ss={};function ls(e,t,n){const i=new Ca(e,t);ss[e]={normalizer:i,parts:n}}ls(Ga,Qa,["box","median","outliers","rule","ticks"]),ls(Ka,es,["ticks","rule"]),ls(rs,as,["band","borders"]);const cs=["gradientHorizontalMaxLength","gradientHorizontalMinLength","gradientVerticalMaxLength","gradientVerticalMinLength","unselectedOpacity"],us={titleAlign:"align",titleAnchor:"anchor",titleAngle:"angle",titleBaseline:"baseline",titleColor:"color",titleFont:"font",titleFontSize:"fontSize",titleFontStyle:"fontStyle",titleFontWeight:"fontWeight",titleLimit:"limit",titleLineHeight:"lineHeight",titleOrient:"orient",titlePadding:"offset"},fs={labelAlign:"align",labelAnchor:"anchor",labelAngle:"angle",labelBaseline:"baseline",labelColor:"color",labelFont:"font",labelFontSize:"fontSize",labelFontStyle:"fontStyle",labelFontWeight:"fontWeight",labelLimit:"limit",labelLineHeight:"lineHeight",labelOrient:"orient",labelPadding:"offset"},ds=D(us),ms=D(fs),ps=D({header:1,headerRow:1,headerColumn:1,headerFacet:1}),gs=["size","shape","fill","stroke","strokeDash","strokeWidth","opacity"],hs="_vgsid_",ys={point:{on:"click",fields:[hs],toggle:"event.shiftKey",resolve:"global",clear:"dblclick"},interval:{on:"[pointerdown, window:pointerup] > window:pointermove!",encodings:["x","y"],translate:"[pointerdown, window:pointerup] > window:pointermove!",zoom:"wheel!",mark:{fill:"#333",fillOpacity:.125,stroke:"white"},resolve:"global",clear:"dblclick"}};function vs(e){return"legend"===e||!!e?.legend}function bs(e){return vs(e)&&t.isObject(e)}function xs(e){return!!e?.select}function $s(e){const t=[];for(const n of e||[]){if(xs(n))continue;const{expr:e,bind:i,...r}=n;if(i&&e){const n={...r,bind:i,init:e};t.push(n)}else{const n={...r,...e?{update:e}:{},...i?{bind:i}:{}};t.push(n)}}return t}function ws(e){return"concat"in e}function ks(e){return"vconcat"in e}function Ss(e){return"hconcat"in e}function Ds(e){let{step:t,offsetIsDiscrete:n}=e;return n?t.for??"offset":"position"}function Fs(e){return t.isObject(e)&&void 0!==e.step}function zs(e){return e.view||e.width||e.height}const Os=D({align:1,bounds:1,center:1,columns:1,spacing:1});function _s(e,t){return e[t]??e["width"===t?"continuousWidth":"continuousHeight"]}function Cs(e,t){const n=Ns(e,t);return Fs(n)?n.step:Ps}function Ns(e,t){return U(e[t]??e["width"===t?"discreteWidth":"discreteHeight"],{step:e.step})}const Ps=20,As={background:"white",padding:5,timeFormat:"%b %d, %Y",countTitle:"Count of Records",view:{continuousWidth:200,continuousHeight:200,step:Ps},mark:{color:"#4c78a8",invalid:"filter",timeUnitBandSize:1},arc:{},area:{},bar:$o,circle:{},geoshape:{},image:{},line:{},point:{},rect:wo,rule:{color:"black"},square:{},text:{color:"black"},tick:{thickness:1},trail:{},boxplot:{size:14,extent:1.5,box:{},median:{color:"white"},outliers:{},rule:{},ticks:null},errorbar:{center:"mean",rule:!0,ticks:!1},errorband:{band:{opacity:.3},borders:!1},scale:{pointPadding:.5,barBandPaddingInner:.1,rectBandPaddingInner:0,bandWithNestedOffsetPaddingInner:.2,bandWithNestedOffsetPaddingOuter:.2,minBandSize:2,minFontSize:8,maxFontSize:40,minOpacity:.3,maxOpacity:.8,minSize:9,minStrokeWidth:1,maxStrokeWidth:4,quantileCount:4,quantizeCount:4,zero:!0},projection:{},legend:{gradientHorizontalMaxLength:200,gradientHorizontalMinLength:100,gradientVerticalMaxLength:200,gradientVerticalMinLength:64,unselectedOpacity:.35},header:{titlePadding:10,labelPadding:10},headerColumn:{},headerRow:{},headerFacet:{},selection:ys,style:{},title:{},facet:{spacing:20},concat:{spacing:20},normalizedNumberFormat:".0%"},js=["#4c78a8","#f58518","#e45756","#72b7b2","#54a24b","#eeca3b","#b279a2","#ff9da6","#9d755d","#bab0ac"],Ts={text:11,guideLabel:10,guideTitle:11,groupTitle:13,groupSubtitle:12},Es={blue:js[0],orange:js[1],red:js[2],teal:js[3],green:js[4],yellow:js[5],purple:js[6],pink:js[7],brown:js[8],gray0:"#000",gray1:"#111",gray2:"#222",gray3:"#333",gray4:"#444",gray5:"#555",gray6:"#666",gray7:"#777",gray8:"#888",gray9:"#999",gray10:"#aaa",gray11:"#bbb",gray12:"#ccc",gray13:"#ddd",gray14:"#eee",gray15:"#fff"};function Ms(e){const t=D(e||{}),n={};for(const i of t){const t=e[i];n[i]=wa(t)?kn(t):Sn(t)}return n}const Ls=[...vo,...Oa,...ps,"background","padding","legend","lineBreak","scale","style","title","view"];function qs(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};const{color:n,font:i,fontSize:r,selection:o,...a}=e,s=t.mergeConfig({},l(As),i?function(e){return{text:{font:e},style:{"guide-label":{font:e},"guide-title":{font:e},"group-title":{font:e},"group-subtitle":{font:e}}}}(i):{},n?function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return{signals:[{name:"color",value:t.isObject(e)?{...Es,...e}:Es}],mark:{color:{signal:"color.blue"}},rule:{color:{signal:"color.gray0"}},text:{color:{signal:"color.gray0"}},style:{"guide-label":{fill:{signal:"color.gray0"}},"guide-title":{fill:{signal:"color.gray0"}},"group-title":{fill:{signal:"color.gray0"}},"group-subtitle":{fill:{signal:"color.gray0"}},cell:{stroke:{signal:"color.gray8"}}},axis:{domainColor:{signal:"color.gray13"},gridColor:{signal:"color.gray8"},tickColor:{signal:"color.gray13"}},range:{category:[{signal:"color.blue"},{signal:"color.orange"},{signal:"color.red"},{signal:"color.teal"},{signal:"color.green"},{signal:"color.yellow"},{signal:"color.purple"},{signal:"color.pink"},{signal:"color.brown"},{signal:"color.grey8"}]}}}(n):{},r?function(e){return{signals:[{name:"fontSize",value:t.isObject(e)?{...Ts,...e}:Ts}],text:{fontSize:{signal:"fontSize.text"}},style:{"guide-label":{fontSize:{signal:"fontSize.guideLabel"}},"guide-title":{fontSize:{signal:"fontSize.guideTitle"}},"group-title":{fontSize:{signal:"fontSize.groupTitle"}},"group-subtitle":{fontSize:{signal:"fontSize.groupSubtitle"}}}}}(r):{},a||{});o&&t.writeConfig(s,"selection",o,!0);const c=f(s,Ls);for(const e of["background","lineBreak","padding"])s[e]&&(c[e]=Sn(s[e]));for(const e of vo)s[e]&&(c[e]=pn(s[e]));for(const e of Oa)s[e]&&(c[e]=Ms(s[e]));for(const e of ps)s[e]&&(c[e]=pn(s[e]));return s.legend&&(c.legend=pn(s.legend)),s.scale&&(c.scale=pn(s.scale)),s.style&&(c.style=function(e){const t=D(e),n={};for(const i of t)n[i]=Ms(e[i]);return n}(s.style)),s.title&&(c.title=pn(s.title)),s.view&&(c.view=pn(s.view)),c}const Us=new Set(["view",...po]),Rs=["color","fontSize","background","padding","facet","concat","numberFormat","numberFormatType","normalizedNumberFormat","normalizedNumberFormatType","timeFormat","countTitle","header","axisQuantitative","axisTemporal","axisDiscrete","axisPoint","axisXBand","axisXPoint","axisXDiscrete","axisXQuantitative","axisXTemporal","axisYBand","axisYPoint","axisYDiscrete","axisYQuantitative","axisYTemporal","scale","selection","overlay"],Ws={view:["continuousWidth","continuousHeight","discreteWidth","discreteHeight","step"],area:["line","point"],bar:["binSpacing","continuousBandSize","discreteBandSize","minBandSize"],rect:["binSpacing","continuousBandSize","discreteBandSize","minBandSize"],line:["point"],tick:["bandSize","thickness"]};function Bs(e){e=l(e);for(const t of Rs)delete e[t];if(e.axis)for(const t in e.axis)wa(e.axis[t])&&delete e.axis[t];if(e.legend)for(const t of cs)delete e.legend[t];if(e.mark){for(const t of yo)delete e.mark[t];e.mark.tooltip&&t.isObject(e.mark.tooltip)&&delete e.mark.tooltip}e.params&&(e.signals=(e.signals||[]).concat($s(e.params)),delete e.params);for(const t of Us){for(const n of yo)delete e[t][n];const n=Ws[t];if(n)for(const i of n)delete e[t][i];Is(e,t)}for(const t of D(ss))delete e[t];!function(e){const{titleMarkConfig:t,subtitleMarkConfig:n,subtitle:i}=gn(e.title);S(t)||(e.style["group-title"]={...e.style["group-title"],...t});S(n)||(e.style["group-subtitle"]={...e.style["group-subtitle"],...n});S(i)?delete e.title:e.title=i}(e);for(const n in e)t.isObject(e[n])&&S(e[n])&&delete e[n];return S(e)?void 0:e}function Is(e,t,n,i){"view"===t&&(n="cell");const r={...e[t],...e.style[n??t]};S(r)||(e.style[n??t]=r),delete e[t]}function Hs(e){return"layer"in e}class Vs{map(e,t){return No(e)?this.mapFacet(e,t):function(e){return"repeat"in e}(e)?this.mapRepeat(e,t):Ss(e)?this.mapHConcat(e,t):ks(e)?this.mapVConcat(e,t):ws(e)?this.mapConcat(e,t):this.mapLayerOrUnit(e,t)}mapLayerOrUnit(e,t){if(Hs(e))return this.mapLayer(e,t);if(_a(e))return this.mapUnit(e,t);throw new Error(qn(e))}mapLayer(e,t){return{...e,layer:e.layer.map((e=>this.mapLayerOrUnit(e,t)))}}mapHConcat(e,t){return{...e,hconcat:e.hconcat.map((e=>this.map(e,t)))}}mapVConcat(e,t){return{...e,vconcat:e.vconcat.map((e=>this.map(e,t)))}}mapConcat(e,t){const{concat:n,...i}=e;return{...i,concat:n.map((e=>this.map(e,t)))}}mapFacet(e,t){return{...e,spec:this.map(e.spec,t)}}mapRepeat(e,t){return{...e,spec:this.map(e.spec,t)}}}const Gs={zero:1,center:1,normalize:1};const Ys=new Set([Jr,Zr,Kr,ro,no,lo,co,to,oo,ao]),Xs=new Set([Zr,Kr,Jr]);function Qs(e){return Ro(e)&&"quantitative"===Wo(e)&&!e.bin}function Js(e,t,n){let{orient:i,type:r}=n;const o="x"===t?"y":"radius",a="x"===t&&["bar","area"].includes(r),s=e[t],l=e[o];if(Ro(s)&&Ro(l))if(Qs(s)&&Qs(l)){if(s.stack)return t;if(l.stack)return o;const e=Ro(s)&&!!s.aggregate;if(e!==(Ro(l)&&!!l.aggregate))return e?t:o;if(a){if("vertical"===i)return o;if("horizontal"===i)return t}}else{if(Qs(s))return t;if(Qs(l))return o}else{if(Qs(s)){if(a&&"vertical"===i)return;return t}if(Qs(l)){if(a&&"horizontal"===i)return;return o}}}function Ks(e,n){const i=go(e)?e:{type:e},r=i.type;if(!Ys.has(r))return null;const o=Js(n,"x",i)||Js(n,"theta",i);if(!o)return null;const a=n[o],s=Ro(a)?ta(a,{}):void 0,l=function(e){switch(e){case"x":return"y";case"y":return"x";case"theta":return"radius";case"radius":return"theta"}}(o),c=[],u=new Set;if(n[l]){const e=n[l],t=Ro(e)?ta(e,{}):void 0;t&&t!==s&&(c.push(l),u.add(t))}const f="x"===l?"xOffset":"yOffset",d=n[f],m=Ro(d)?ta(d,{}):void 0;m&&m!==s&&(c.push(f),u.add(m));const p=St.reduce(((e,i)=>{if("tooltip"!==i&&Na(n,i)){const r=n[i];for(const n of t.array(r)){const t=ua(n);if(t.aggregate)continue;const r=ta(t,{});r&&u.has(r)||e.push({channel:i,fieldDef:t})}}return e}),[]);let g;return void 0!==a.stack?g=t.isBoolean(a.stack)?a.stack?"zero":null:a.stack:Xs.has(r)&&(g="zero"),g&&g in Gs?ja(n)&&0===p.length?null:(a?.scale?.type&&a?.scale?.type!==or.LINEAR&&a?.stack&&yi(function(e){return`Stack is applied to a non-linear scale (${e}).`}(a.scale.type)),Go(n[it(o)])?(void 0!==a.stack&&yi(`Cannot stack "${h=o}" if there is already "${h}2".`),null):(Ro(a)&&a.aggregate&&!on.has(a.aggregate)&&yi(`Stacking is applied even though the aggregate function is non-summative ("${a.aggregate}").`),{groupbyChannels:c,groupbyFields:u,fieldChannel:o,impute:null!==a.impute&&fo(r),stackBy:p,offset:g})):null;var h}function Zs(e,t,n){const i=pn(e),r=Nn("orient",i,n);if(i.orient=function(e,t,n){switch(e){case no:case lo:case co:case oo:case io:case eo:return}const{x:i,y:r,x2:o,y2:a}=t;switch(e){case Zr:if(Ro(i)&&(cn(i.bin)||Ro(r)&&r.aggregate&&!i.aggregate))return"vertical";if(Ro(r)&&(cn(r.bin)||Ro(i)&&i.aggregate&&!r.aggregate))return"horizontal";if(a||o){if(n)return n;if(!o)return(Ro(i)&&i.type===er&&!ln(i.bin)||Vo(i))&&Ro(r)&&cn(r.bin)?"horizontal":"vertical";if(!a)return(Ro(r)&&r.type===er&&!ln(r.bin)||Vo(r))&&Ro(i)&&cn(i.bin)?"vertical":"horizontal"}case ro:if(o&&(!Ro(i)||!cn(i.bin))&&a&&(!Ro(r)||!cn(r.bin)))return;case Kr:if(a)return Ro(r)&&cn(r.bin)?"horizontal":"vertical";if(o)return Ro(i)&&cn(i.bin)?"vertical":"horizontal";if(e===ro){if(i&&!r)return"vertical";if(r&&!i)return"horizontal"}case to:case ao:{const t=Ho(i),o=Ho(r);if(n)return n;if(t&&!o)return"tick"!==e?"horizontal":"vertical";if(!t&&o)return"tick"!==e?"vertical":"horizontal";if(t&&o)return"vertical";{const e=Yo(i)&&i.type===nr,t=Yo(r)&&r.type===nr;if(e&&!t)return"vertical";if(!e&&t)return"horizontal"}return}}return"vertical"}(i.type,t,r),void 0!==r&&r!==i.orient&&yi(`Specified orient "${i.orient}" overridden with "${r}".`),"bar"===i.type&&i.orient){const e=Nn("cornerRadiusEnd",i,n);if(void 0!==e){const n="horizontal"===i.orient&&t.x2||"vertical"===i.orient&&t.y2?["cornerRadius"]:xo[i.orient];for(const t of n)i[t]=e;void 0!==i.cornerRadiusEnd&&delete i.cornerRadiusEnd}}const o=Nn("opacity",i,n),a=Nn("fillOpacity",i,n);void 0===o&&void 0===a&&(i.opacity=function(e,t){if(p([no,ao,lo,co],e)&&!ja(t))return.7;return}(i.type,t));return void 0===Nn("cursor",i,n)&&(i.cursor=function(e,t,n){if(t.href||e.href||Nn("href",e,n))return"pointer";return e.cursor}(i,t,n)),i}function el(e){const{point:t,line:n,...i}=e;return D(i).length>1?i:i.type}function tl(e){for(const t of["line","area","rule","trail"])e[t]&&(e={...e,[t]:f(e[t],["point","line"])});return e}function nl(e){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},i=arguments.length>2?arguments[2]:void 0;return"transparent"===e.point?{opacity:0}:e.point?t.isObject(e.point)?e.point:{}:void 0!==e.point?null:n.point||i.shape?t.isObject(n.point)?n.point:{}:void 0}function il(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return e.line?!0===e.line?{}:e.line:void 0!==e.line?null:t.line?!0===t.line?{}:t.line:void 0}class rl{name="path-overlay";hasMatchingType(e,t){if(_a(e)){const{mark:n,encoding:i}=e,r=go(n)?n:{type:n};switch(r.type){case"line":case"rule":case"trail":return!!nl(r,t[r.type],i);case"area":return!!nl(r,t[r.type],i)||!!il(r,t[r.type])}}return!1}run(e,t,n){const{config:i}=t,{params:r,projection:o,mark:a,name:s,encoding:l,...c}=e,d=Ma(l,i),m=go(a)?a:{type:a},p=nl(m,i[m.type],d),g="area"===m.type&&il(m,i[m.type]),h=[{name:s,...r?{params:r}:{},mark:el({..."area"===m.type&&void 0===m.opacity&&void 0===m.fillOpacity?{opacity:.7}:{},...m}),encoding:f(d,["shape"])}],y=Ks(Zs(m,d,i),d);let v=d;if(y){const{fieldChannel:e,offset:t}=y;v={...d,[e]:{...d[e],...t?{stack:t}:{}}}}return v=f(v,["y2","x2"]),g&&h.push({...o?{projection:o}:{},mark:{type:"line",...u(m,["clip","interpolate","tension","tooltip"]),...g},encoding:v}),p&&h.push({...o?{projection:o}:{},mark:{type:"point",opacity:1,filled:!0,...u(m,["clip","tooltip"]),...p},encoding:v}),n({...c,layer:h},{...t,config:tl(i)})}}function ol(e,t){return t?_o(e)?fl(e,t):ll(e,t):e}function al(e,t){return t?fl(e,t):e}function sl(e,n,i){const r=n[e];return(o=r)&&!t.isString(o)&&"repeat"in o?r.repeat in i?{...n,[e]:i[r.repeat]}:void yi(function(e){return`Unknown repeated value "${e}".`}(r.repeat)):n;var o}function ll(e,t){if(void 0!==(e=sl("field",e,t))){if(null===e)return null;if(Ao(e)&&zo(e.sort)){const n=sl("field",e.sort,t);e={...e,...n?{sort:n}:{}}}return e}}function cl(e,t){if(Ro(e))return ll(e,t);{const n=sl("datum",e,t);return n===e||n.type||(n.type="nominal"),n}}function ul(e,t){if(!Go(e)){if(Uo(e)){const n=cl(e.condition,t);if(n)return{...e,condition:n};{const{condition:t,...n}=e;return n}}return e}{const n=cl(e,t);if(n)return n;if(Lo(e))return{condition:e.condition}}}function fl(e,n){const i={};for(const r in e)if(t.hasOwnProperty(e,r)){const o=e[r];if(t.isArray(o))i[r]=o.map((e=>ul(e,n))).filter((e=>e));else{const e=ul(o,n);void 0!==e&&(i[r]=e)}}return i}class dl{name="RuleForRangedLine";hasMatchingType(e){if(_a(e)){const{encoding:t,mark:n}=e;if("line"===n||go(n)&&"line"===n.type)for(const e of Ze){const n=t[tt(e)];if(t[e]&&(Ro(n)&&!cn(n.bin)||Bo(n)))return!0}}return!1}run(e,n,i){const{encoding:r,mark:o}=e;var a,s;return yi((a=!!r.x2,s=!!r.y2,`Line mark is for continuous lines and thus cannot be used with ${a&&s?"x2 and y2":a?"x2":"y2"}. We will use the rule mark (line segments) instead.`)),i({...e,mark:t.isObject(o)?{...o,type:"rule"}:"rule"},n)}}function ml(e){let{parentEncoding:n,encoding:i={},layer:r}=e,o={};if(n){const e=new Set([...D(n),...D(i)]);for(const a of e){const e=i[a],s=n[a];if(Go(e)){const t={...s,...e};o[a]=t}else Uo(e)?o[a]={...e,condition:{...s,...e.condition}}:e||null===e?o[a]=e:(r||Xo(s)||yn(s)||Go(s)||t.isArray(s))&&(o[a]=s)}}else o=i;return!o||S(o)?void 0:o}function pl(e){const{parentProjection:t,projection:n}=e;return t&&n&&yi(function(e){const{parentProjection:t,projection:n}=e;return`Layer's shared projection ${X(t)} is overridden by a child projection ${X(n)}.`}({parentProjection:t,projection:n})),n??t}function gl(e){return"filter"in e}function hl(e){return"lookup"in e}function yl(e){return"pivot"in e}function vl(e){return"density"in e}function bl(e){return"quantile"in e}function xl(e){return"regression"in e}function $l(e){return"loess"in e}function wl(e){return"sample"in e}function kl(e){return"window"in e}function Sl(e){return"joinaggregate"in e}function Dl(e){return"flatten"in e}function Fl(e){return"calculate"in e}function zl(e){return"bin"in e}function Ol(e){return"impute"in e}function _l(e){return"timeUnit"in e}function Cl(e){return"aggregate"in e}function Nl(e){return"stack"in e}function Pl(e){return"fold"in e}function Al(e){return"extent"in e&&!("density"in e)&&!("regression"in e)}function jl(e,t){const{transform:n,...i}=e;if(n){return{...i,transform:n.map((e=>{if(gl(e))return{filter:Ml(e,t)};if(zl(e)&&un(e.bin))return{...e,bin:El(e.bin)};if(hl(e)){const{selection:t,...n}=e.from;return t?{...e,from:{param:t,...n}}:e}return e}))}}return e}function Tl(e,n){const i=l(e);if(Ro(i)&&un(i.bin)&&(i.bin=El(i.bin)),Qo(i)&&i.scale?.domain?.selection){const{selection:e,...t}=i.scale.domain;i.scale.domain={...t,...e?{param:e}:{}}}if(Lo(i))if(t.isArray(i.condition))i.condition=i.condition.map((e=>{const{selection:t,param:i,test:r,...o}=e;return i?e:{...o,test:Ml(e,n)}}));else{const{selection:e,param:t,test:r,...o}=Tl(i.condition,n);i.condition=t?i.condition:{...o,test:Ml(i.condition,n)}}return i}function El(e){const t=e.extent;if(t?.selection){const{selection:n,...i}=t;return{...e,extent:{...i,param:n}}}return e}function Ml(e,t){const n=e=>s(e,(e=>{const n={param:e,empty:t.emptySelections[e]??!0};return t.selectionPredicates[e]??=[],t.selectionPredicates[e].push(n),n}));return e.selection?n(e.selection):s(e.test||e.filter,(e=>e.selection?n(e.selection):e))}class Ll extends Vs{map(e,t){const n=t.selections??[];if(e.params&&!_a(e)){const t=[];for(const i of e.params)xs(i)?n.push(i):t.push(i);e.params=t}return t.selections=n,super.map(e,t)}mapUnit(e,n){const i=n.selections;if(!i||!i.length)return e;const r=(n.path??[]).concat(e.name),o=[];for(const n of i)if(n.views&&n.views.length)for(const i of n.views)(t.isString(i)&&(i===e.name||r.includes(i))||t.isArray(i)&&i.map((e=>r.indexOf(e))).every(((e,t,n)=>-1!==e&&(0===t||e>n[t-1]))))&&o.push(n);else o.push(n);return o.length&&(e.params=o),e}}for(const e of["mapFacet","mapRepeat","mapHConcat","mapVConcat","mapLayer"]){const t=Ll.prototype[e];Ll.prototype[e]=function(e,n){return t.call(this,e,ql(e,n))}}function ql(e,t){return e.name?{...t,path:(t.path??[]).concat(e.name)}:t}function Ul(e,t){void 0===t&&(t=qs(e.config));const n=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const n={config:t};return Bl.map(Rl.map(Wl.map(e,n),n),n)}(e,t),{width:i,height:r}=e,o=function(e,t,n){let{width:i,height:r}=t;const o=_a(e)||Hs(e),a={};o?"container"==i&&"container"==r?(a.type="fit",a.contains="padding"):"container"==i?(a.type="fit-x",a.contains="padding"):"container"==r&&(a.type="fit-y",a.contains="padding"):("container"==i&&(yi(Rn("width")),i=void 0),"container"==r&&(yi(Rn("height")),r=void 0));const s={type:"pad",...a,...n?Il(n.autosize):{},...Il(e.autosize)};"fit"!==s.type||o||(yi(Un),s.type="pad");"container"==i&&"fit"!=s.type&&"fit-x"!=s.type&&yi(Wn("width"));"container"==r&&"fit"!=s.type&&"fit-y"!=s.type&&yi(Wn("height"));if(Y(s,{type:"pad"}))return;return s}(n,{width:i,height:r,autosize:e.autosize},t);return{...n,...o?{autosize:o}:{}}}const Rl=new class extends Vs{nonFacetUnitNormalizers=[Ya,Za,os,new rl,new dl];map(e,t){if(_a(e)){const n=Na(e.encoding,Q),i=Na(e.encoding,J),r=Na(e.encoding,K);if(n||i||r)return this.mapFacetedUnit(e,t)}return super.map(e,t)}mapUnit(e,t){const{parentEncoding:n,parentProjection:i}=t,r=al(e.encoding,t.repeater),o={...e,...e.name?{name:[t.repeaterPrefix,e.name].filter((e=>e)).join("_")}:{},...r?{encoding:r}:{}};if(n||i)return this.mapUnitWithParentEncodingOrProjection(o,t);const a=this.mapLayerOrUnit.bind(this);for(const e of this.nonFacetUnitNormalizers)if(e.hasMatchingType(o,t.config))return e.run(o,t,a);return o}mapRepeat(e,n){return function(e){return!t.isArray(e.repeat)&&e.repeat.layer}(e)?this.mapLayerRepeat(e,n):this.mapNonLayerRepeat(e,n)}mapLayerRepeat(e,t){const{repeat:n,spec:i,...r}=e,{row:o,column:a,layer:s}=n,{repeater:l={},repeaterPrefix:c=""}=t;return o||a?this.mapRepeat({...e,repeat:{...o?{row:o}:{},...a?{column:a}:{}},spec:{repeat:{layer:s},spec:i}},t):{...r,layer:s.map((e=>{const n={...l,layer:e},r=`${(i.name?`${i.name}_`:"")+c}child__layer_${_(e)}`,o=this.mapLayerOrUnit(i,{...t,repeater:n,repeaterPrefix:r});return o.name=r,o}))}}mapNonLayerRepeat(e,n){const{repeat:i,spec:r,data:o,...a}=e;!t.isArray(i)&&e.columns&&(e=f(e,["columns"]),yi(Xn("repeat")));const s=[],{repeater:l={},repeaterPrefix:c=""}=n,u=!t.isArray(i)&&i.row||[l?l.row:null],d=!t.isArray(i)&&i.column||[l?l.column:null],m=t.isArray(i)&&i||[l?l.repeat:null];for(const e of m)for(const o of u)for(const a of d){const u={repeat:e,row:o,column:a,layer:l.layer},d=(r.name?`${r.name}_`:"")+c+"child__"+(t.isArray(i)?`${_(e)}`:(i.row?`row_${_(o)}`:"")+(i.column?`column_${_(a)}`:"")),m=this.map(r,{...n,repeater:u,repeaterPrefix:d});m.name=d,s.push(f(m,["data"]))}const p=t.isArray(i)?e.columns:i.column?i.column.length:1;return{data:r.data??o,align:"all",...a,columns:p,concat:s}}mapFacet(e,t){const{facet:n}=e;return _o(n)&&e.columns&&(e=f(e,["columns"]),yi(Xn("facet"))),super.mapFacet(e,t)}mapUnitWithParentEncodingOrProjection(e,t){const{encoding:n,projection:i}=e,{parentEncoding:r,parentProjection:o,config:a}=t,s=pl({parentProjection:o,projection:i}),l=ml({parentEncoding:r,encoding:al(n,t.repeater)});return this.mapUnit({...e,...s?{projection:s}:{},...l?{encoding:l}:{}},{config:a})}mapFacetedUnit(e,t){const{row:n,column:i,facet:r,...o}=e.encoding,{mark:a,width:s,projection:l,height:c,view:u,params:f,encoding:d,...m}=e,{facetMapping:p,layout:g}=this.getFacetMappingAndLayout({row:n,column:i,facet:r},t),h=al(o,t.repeater);return this.mapFacet({...m,...g,facet:p,spec:{...s?{width:s}:{},...c?{height:c}:{},...u?{view:u}:{},...l?{projection:l}:{},mark:a,encoding:h,...f?{params:f}:{}}},t)}getFacetMappingAndLayout(e,t){const{row:n,column:i,facet:r}=e;if(n||i){r&&yi(`Facet encoding dropped as ${(o=[...n?[Q]:[],...i?[J]:[]]).join(" and ")} ${o.length>1?"are":"is"} also specified.`);const t={},a={};for(const n of[Q,J]){const i=e[n];if(i){const{align:e,center:r,spacing:o,columns:s,...l}=i;t[n]=l;for(const e of["align","center","spacing"])void 0!==i[e]&&(a[e]??={},a[e][n]=i[e])}}return{facetMapping:t,layout:a}}{const{align:e,center:n,spacing:i,columns:o,...a}=r;return{facetMapping:ol(a,t.repeater),layout:{...e?{align:e}:{},...n?{center:n}:{},...i?{spacing:i}:{},...o?{columns:o}:{}}}}var o}mapLayer(e,t){let{parentEncoding:n,parentProjection:i,...r}=t;const{encoding:o,projection:a,...s}=e,l={...r,parentEncoding:ml({parentEncoding:n,encoding:o,layer:!0}),parentProjection:pl({parentProjection:i,projection:a})};return super.mapLayer({...s,...e.name?{name:[l.repeaterPrefix,e.name].filter((e=>e)).join("_")}:{}},l)}},Wl=new class extends Vs{map(e,t){return t.emptySelections??={},t.selectionPredicates??={},e=jl(e,t),super.map(e,t)}mapLayerOrUnit(e,t){if((e=jl(e,t)).encoding){const n={};for(const[i,r]of z(e.encoding))n[i]=Tl(r,t);e={...e,encoding:n}}return super.mapLayerOrUnit(e,t)}mapUnit(e,t){const{selection:n,...i}=e;return n?{...i,params:z(n).map((e=>{let[n,i]=e;const{init:r,bind:o,empty:a,...s}=i;"single"===s.type?(s.type="point",s.toggle=!1):"multi"===s.type&&(s.type="point"),t.emptySelections[n]="none"!==a;for(const e of F(t.selectionPredicates[n]??{}))e.empty="none"!==a;return{name:n,value:r,select:s,bind:o}}))}:e}},Bl=new Ll;function Il(e){return t.isString(e)?{type:e}:e??{}}const Hl=["background","padding"];function Vl(e,t){const n={};for(const t of Hl)e&&void 0!==e[t]&&(n[t]=Sn(e[t]));return t&&(n.params=e.params),n}class Gl{constructor(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};this.explicit=e,this.implicit=t}clone(){return new Gl(l(this.explicit),l(this.implicit))}combine(){return{...this.explicit,...this.implicit}}get(e){return U(this.explicit[e],this.implicit[e])}getWithExplicit(e){return void 0!==this.explicit[e]?{explicit:!0,value:this.explicit[e]}:void 0!==this.implicit[e]?{explicit:!1,value:this.implicit[e]}:{explicit:!1,value:void 0}}setWithExplicit(e,t){let{value:n,explicit:i}=t;void 0!==n&&this.set(e,n,i)}set(e,t,n){return delete this[n?"implicit":"explicit"][e],this[n?"explicit":"implicit"][e]=t,this}copyKeyFromSplit(e,t){let{explicit:n,implicit:i}=t;void 0!==n[e]?this.set(e,n[e],!0):void 0!==i[e]&&this.set(e,i[e],!1)}copyKeyFromObject(e,t){void 0!==t[e]&&this.set(e,t[e],!0)}copyAll(e){for(const t of D(e.combine())){const n=e.getWithExplicit(t);this.setWithExplicit(t,n)}}}function Yl(e){return{explicit:!0,value:e}}function Xl(e){return{explicit:!1,value:e}}function Ql(e){return(t,n,i,r)=>{const o=e(t.value,n.value);return o>0?t:o<0?n:Jl(t,n,i,r)}}function Jl(e,t,n,i){return e.explicit&&t.explicit&&yi(function(e,t,n,i){return`Conflicting ${t.toString()} property "${e.toString()}" (${X(n)} and ${X(i)}). Using ${X(n)}.`}(n,i,e.value,t.value)),e}function Kl(e,t,n,i){let r=arguments.length>4&&void 0!==arguments[4]?arguments[4]:Jl;return void 0===e||void 0===e.value?t:e.explicit&&!t.explicit?e:t.explicit&&!e.explicit?t:Y(e.value,t.value)?e:r(e,t,n,i)}class Zl extends Gl{constructor(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];super(e,t),this.explicit=e,this.implicit=t,this.parseNothing=n}clone(){const e=super.clone();return e.parseNothing=this.parseNothing,e}}function ec(e){return"url"in e}function tc(e){return"values"in e}function nc(e){return"name"in e&&!ec(e)&&!tc(e)&&!ic(e)}function ic(e){return e&&(rc(e)||oc(e)||ac(e))}function rc(e){return"sequence"in e}function oc(e){return"sphere"in e}function ac(e){return"graticule"in e}let sc=function(e){return e[e.Raw=0]="Raw",e[e.Main=1]="Main",e[e.Row=2]="Row",e[e.Column=3]="Column",e[e.Lookup=4]="Lookup",e}({});function lc(e){const{signals:t,hasLegend:n,index:i,...r}=e;return r.field=E(r.field),r}function cc(e){let n=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:t.identity;if(t.isArray(e)){const t=e.map((e=>cc(e,n,i)));return n?`[${t.join(", ")}]`:t}return vi(e)?i(n?Si(e):function(e){const t=ki(e,!0);return e.utc?+new Date(Date.UTC(...t)):+new Date(...t)}(e)):n?i(X(e)):e}function uc(e,n){for(const i of F(e.component.selection??{})){const r=i.name;let o=`${r}${_u}, ${"global"===i.resolve?"true":`{unit: ${Au(e)}}`}`;for(const t of Pu)t.defined(i)&&(t.signals&&(n=t.signals(e,i,n)),t.modifyExpr&&(o=t.modifyExpr(e,i,o)));n.push({name:r+Cu,on:[{events:{signal:i.name+_u},update:`modify(${t.stringValue(i.name+Ou)}, ${o})`}]})}return mc(n)}function fc(e,n){if(e.component.selection&&D(e.component.selection).length){const i=t.stringValue(e.getName("cell"));n.unshift({name:"facet",value:{},on:[{events:t.parseSelector("pointermove","scope"),update:`isTuple(facet) ? facet : group(${i}).datum`}]})}return mc(n)}function dc(e,t){for(const n of F(e.component.selection??{}))for(const i of Pu)i.defined(n)&&i.marks&&(t=i.marks(e,n,t));return t}function mc(e){return e.map((e=>(e.on&&!e.on.length&&delete e.on,e)))}class pc{_children=[];_parent=null;constructor(e,t){this.debugName=t,e&&(this.parent=e)}clone(){throw new Error("Cannot clone node")}get parent(){return this._parent}set parent(e){this._parent=e,e&&e.addChild(this)}get children(){return this._children}numChildren(){return this._children.length}addChild(e,t){this._children.includes(e)?yi("Attempt to add the same child twice."):void 0!==t?this._children.splice(t,0,e):this._children.push(e)}removeChild(e){const t=this._children.indexOf(e);return this._children.splice(t,1),t}remove(){let e=this._parent.removeChild(this);for(const t of this._children)t._parent=this._parent,this._parent.addChild(t,e++)}insertAsParentOf(e){const t=e.parent;t.removeChild(this),this.parent=t,e.parent=this}swapWithParent(){const e=this._parent,t=e.parent;for(const t of this._children)t.parent=e;this._children=[],e.removeChild(this);const n=e.parent.removeChild(e);this._parent=t,t.addChild(this,n),e.parent=this}}class gc extends pc{clone(){const e=new this.constructor;return e.debugName=`clone_${this.debugName}`,e._source=this._source,e._name=`clone_${this._name}`,e.type=this.type,e.refCounts=this.refCounts,e.refCounts[e._name]=0,e}constructor(e,t,n,i){super(e,t),this.type=n,this.refCounts=i,this._source=this._name=t,this.refCounts&&!(this._name in this.refCounts)&&(this.refCounts[this._name]=0)}dependentFields(){return new Set}producedFields(){return new Set}hash(){return void 0===this._hash&&(this._hash=`Output ${W()}`),this._hash}getSource(){return this.refCounts[this._name]++,this._source}isRequired(){return!!this.refCounts[this._name]}setSource(e){this._source=e}}function hc(e){return void 0!==e.as}function yc(e){return`${e}_end`}class vc extends pc{clone(){return new vc(null,l(this.timeUnits))}constructor(e,t){super(e),this.timeUnits=t}static makeFromEncoding(e,t){const n=t.reduceFieldDef(((e,n,i)=>{const{field:r,timeUnit:o}=n;if(o){let a;if(zi(o)){if(gm(t)){const{mark:e,markDef:i,config:s}=t,l=jo({fieldDef:n,markDef:i,config:s});(mo(e)||l)&&(a={timeUnit:Ei(o),field:r})}}else a={as:ta(n,{forAs:!0}),field:r,timeUnit:o};if(gm(t)){const{mark:e,markDef:r,config:o}=t,s=jo({fieldDef:n,markDef:r,config:o});mo(e)&&zt(i)&&.5!==s&&(a.rectBandPosition=s)}a&&(e[d(a)]=a)}return e}),{});return S(n)?null:new vc(e,n)}static makeFromTransform(e,t){const{timeUnit:n,...i}={...t},r={...i,timeUnit:Ei(n)};return new vc(e,{[d(r)]:r})}merge(e){this.timeUnits={...this.timeUnits};for(const t in e.timeUnits)this.timeUnits[t]||(this.timeUnits[t]=e.timeUnits[t]);for(const t of e.children)e.removeChild(t),t.parent=this;e.remove()}removeFormulas(e){const t={};for(const[n,i]of z(this.timeUnits)){const r=hc(i)?i.as:`${i.field}_end`;e.has(r)||(t[n]=i)}this.timeUnits=t}producedFields(){return new Set(F(this.timeUnits).map((e=>hc(e)?e.as:yc(e.field))))}dependentFields(){return new Set(F(this.timeUnits).map((e=>e.field)))}hash(){return`TimeUnit ${d(this.timeUnits)}`}assemble(){const e=[];for(const t of F(this.timeUnits)){const{rectBandPosition:n}=t,i=Ei(t.timeUnit);if(hc(t)){const{field:r,as:o}=t,{unit:a,utc:s,...l}=i,c=[o,`${o}_end`];e.push({field:E(r),type:"timeunit",...a?{units:Ni(a)}:{},...s?{timezone:"utc"}:{},...l,as:c}),e.push(...wc(c,n,i))}else if(t){const{field:r}=t,o=r.replaceAll("\\.","."),a=$c({timeUnit:i,field:o}),s=yc(o);e.push({type:"formula",expr:a,as:s}),e.push(...wc([o,s],n,i))}}return e}}const bc="offsetted_rect_start",xc="offsetted_rect_end";function $c(e){let{timeUnit:t,field:n,reverse:i}=e;const{unit:r,utc:o}=t,a=Pi(r),{part:s,step:l}=qi(a,t.step);return`${o?"utcOffset":"timeOffset"}('${s}', datum['${n}'], ${i?-l:l})`}function wc(e,t,n){let[i,r]=e;if(void 0!==t&&.5!==t){const e=`datum['${i}']`,o=`datum['${r}']`;return[{type:"formula",expr:kc([$c({timeUnit:n,field:i,reverse:!0}),e],t+.5),as:`${i}_${bc}`},{type:"formula",expr:kc([e,o],t+.5),as:`${i}_${xc}`}]}return[]}function kc(e,t){let[n,i]=e;return`${1-t} * ${n} + ${t} * ${i}`}const Sc="_tuple_fields";class Dc{constructor(){for(var e=arguments.length,t=new Array(e),n=0;n!0,parse:(e,n,i)=>{const r=n.name,o=n.project??=new Dc,a={},s={},l=new Set,c=(e,t)=>{const n="visual"===t?e.channel:e.field;let i=_(`${r}_${n}`);for(let e=1;l.has(i);e++)i=_(`${r}_${n}_${e}`);return l.add(i),{[t]:i}},u=n.type,f=e.config.selection[u],m=void 0!==i.value?t.array(i.value):null;let{fields:p,encodings:g}=t.isObject(i.select)?i.select:{};if(!p&&!g&&m)for(const e of m)if(t.isObject(e))for(const t of D(e))Je[t]?(g||(g=[])).push(t):"interval"===u?(yi('Interval selections should be initialized using "x", "y", "longitude", or "latitude" keys.'),g=f.encodings):(p??=[]).push(t);p||g||(g=f.encodings,"fields"in f&&(p=f.fields));for(const t of g??[]){const n=e.fieldDef(t);if(n){let i=n.field;if(n.aggregate){yi(Vn(t,n.aggregate));continue}if(!i){yi(Hn(t));continue}if(n.timeUnit&&!zi(n.timeUnit)){i=e.vgField(t);const r={timeUnit:n.timeUnit,as:i,field:n.field};s[d(r)]=r}if(!a[i]){const r={field:i,channel:t,type:"interval"===u&&Ht(t)&&yr(e.getScaleComponent(t).get("type"))?"R":n.bin?"R-RE":"E",index:o.items.length};r.signals={...c(r,"data"),...c(r,"visual")},o.items.push(a[i]=r),o.hasField[i]=a[i],o.hasSelectionId=o.hasSelectionId||i===hs,Ee(t)?(r.geoChannel=t,r.channel=Te(t),o.hasChannel[r.channel]=a[i]):o.hasChannel[t]=a[i]}}else yi(Hn(t))}for(const e of p??[]){if(o.hasField[e])continue;const t={type:"E",field:e,index:o.items.length};t.signals={...c(t,"data")},o.items.push(t),o.hasField[e]=t,o.hasSelectionId=o.hasSelectionId||e===hs}m&&(n.init=m.map((e=>o.items.map((n=>t.isObject(e)?void 0!==e[n.geoChannel||n.channel]?e[n.geoChannel||n.channel]:e[n.field]:e))))),S(s)||(o.timeUnit=new vc(null,s))},signals:(e,t,n)=>{const i=t.name+Sc;return n.filter((e=>e.name===i)).length>0||t.project.hasSelectionId?n:n.concat({name:i,value:t.project.items.map(lc)})}},zc={defined:e=>"interval"===e.type&&"global"===e.resolve&&e.bind&&"scales"===e.bind,parse:(e,t)=>{const n=t.scales=[];for(const i of t.project.items){const r=i.channel;if(!Ht(r))continue;const o=e.getScaleComponent(r),a=o?o.get("type"):void 0;"sequential"==a&&yi("Sequntial scales are deprecated. The available quantitative scale type values are linear, log, pow, sqrt, symlog, time and utc"),o&&yr(a)?(o.set("selectionExtent",{param:t.name,field:i.field},!0),n.push(i)):yi("Scale bindings are currently only supported for scales with unbinned, continuous domains.")}},topLevelSignals:(e,n,i)=>{const r=n.scales.filter((e=>0===i.filter((t=>t.name===e.signals.data)).length));if(!e.parent||_c(e)||0===r.length)return i;const o=i.filter((e=>e.name===n.name))[0];let a=o.update;if(a.indexOf(Nu)>=0)o.update=`{${r.map((e=>`${t.stringValue(E(e.field))}: ${e.signals.data}`)).join(", ")}}`;else{for(const e of r){const n=`${t.stringValue(E(e.field))}: ${e.signals.data}`;a.includes(n)||(a=`${a.substring(0,a.length-1)}, ${n}}`)}o.update=a}return i.concat(r.map((e=>({name:e.signals.data}))))},signals:(e,t,n)=>{if(e.parent&&!_c(e))for(const e of t.scales){const t=n.find((t=>t.name===e.signals.data));t.push="outer",delete t.value,delete t.update}return n}};function Oc(e,n){return`domain(${t.stringValue(e.scaleName(n))})`}function _c(e){return e.parent&&vm(e.parent)&&(!e.parent.parent??_c(e.parent.parent))}const Cc="_brush",Nc="_scale_trigger",Pc="geo_interval_init_tick",Ac="_init",jc={defined:e=>"interval"===e.type,parse:(e,n,i)=>{if(e.hasProjection){const e={...t.isObject(i.select)?i.select:{}};e.fields=[hs],e.encodings||(e.encodings=i.value?D(i.value):[ue,ce]),i.select={type:"interval",...e}}if(n.translate&&!zc.defined(n)){const e=`!event.item || event.item.mark.name !== ${t.stringValue(n.name+Cc)}`;for(const i of n.events){if(!i.between){yi(`${i} is not an ordered event stream for interval selections.`);continue}const n=t.array(i.between[0].filter??=[]);n.indexOf(e)<0&&n.push(e)}}},signals:(e,n,i)=>{const r=n.name,o=r+_u,a=F(n.project.hasChannel).filter((e=>e.channel===Z||e.channel===ee)),s=n.init?n.init[0]:null;if(i.push(...a.reduce(((i,r)=>i.concat(function(e,n,i,r){const o=!e.hasProjection,a=i.channel,s=i.signals.visual,l=t.stringValue(o?e.scaleName(a):e.projectionName()),c=e=>`scale(${l}, ${e})`,u=e.getSizeSignalRef(a===Z?"width":"height").signal,f=`${a}(unit)`,d=n.events.reduce(((e,t)=>[...e,{events:t.between[0],update:`[${f}, ${f}]`},{events:t,update:`[${s}[0], clamp(${f}, 0, ${u})]`}]),[]);if(o){const t=i.signals.data,o=zc.defined(n),u=e.getScaleComponent(a),f=u?u.get("type"):void 0,m=r?{init:cc(r,!0,c)}:{value:[]};return d.push({events:{signal:n.name+Nc},update:yr(f)?`[${c(`${t}[0]`)}, ${c(`${t}[1]`)}]`:"[0, 0]"}),o?[{name:t,on:[]}]:[{name:s,...m,on:d},{name:t,...r?{init:cc(r)}:{},on:[{events:{signal:s},update:`${s}[0] === ${s}[1] ? null : invert(${l}, ${s})`}]}]}{const e=a===Z?0:1,t=n.name+Ac;return[{name:s,...r?{init:`[${t}[0][${e}], ${t}[1][${e}]]`}:{value:[]},on:d}]}}(e,n,r,s&&s[r.index]))),[])),e.hasProjection){const l=t.stringValue(e.projectionName()),c=e.projectionName()+"_center",{x:u,y:f}=n.project.hasChannel,d=u&&u.signals.visual,m=f&&f.signals.visual,p=u?s&&s[u.index]:`${c}[0]`,g=f?s&&s[f.index]:`${c}[1]`,h=t=>e.getSizeSignalRef(t).signal,y=`[[${d?d+"[0]":"0"}, ${m?m+"[0]":"0"}],[${d?d+"[1]":h("width")}, ${m?m+"[1]":h("height")}]]`;if(s&&(i.unshift({name:r+Ac,init:`[scale(${l}, [${u?p[0]:p}, ${f?g[0]:g}]), scale(${l}, [${u?p[1]:p}, ${f?g[1]:g}])]`}),!u||!f)){i.find((e=>e.name===c))||i.unshift({name:c,update:`invert(${l}, [${h("width")}/2, ${h("height")}/2])`})}const v=`vlSelectionTuples(${`intersect(${y}, {markname: ${t.stringValue(e.getName("marks"))}}, unit.mark)`}, ${`{unit: ${Au(e)}}`})`,b=a.map((e=>e.signals.visual));return i.concat({name:o,on:[{events:[...b.length?[{signal:b.join(" || ")}]:[],...s?[{signal:Pc}]:[]],update:v}]})}{if(!zc.defined(n)){const n=r+Nc,o=a.map((n=>{const i=n.channel,{data:r,visual:o}=n.signals,a=t.stringValue(e.scaleName(i)),s=yr(e.getScaleComponent(i).get("type"))?"+":"";return`(!isArray(${r}) || (${s}invert(${a}, ${o})[0] === ${s}${r}[0] && ${s}invert(${a}, ${o})[1] === ${s}${r}[1]))`}));o.length&&i.push({name:n,value:{},on:[{events:a.map((t=>({scale:e.scaleName(t.channel)}))),update:o.join(" && ")+` ? ${n} : {}`}]})}const l=a.map((e=>e.signals.data)),c=`unit: ${Au(e)}, fields: ${r+Sc}, values`;return i.concat({name:o,...s?{init:`{${c}: ${cc(s)}}`}:{},...l.length?{on:[{events:[{signal:l.join(" || ")}],update:`${l.join(" && ")} ? {${c}: [${l}]} : null`}]}:{}})}},topLevelSignals:(e,t,n)=>{if(gm(e)&&e.hasProjection&&t.init){n.filter((e=>e.name===Pc)).length||n.unshift({name:Pc,value:null,on:[{events:"timer{1}",update:`${Pc} === null ? {} : ${Pc}`}]})}return n},marks:(e,n,i)=>{const r=n.name,{x:o,y:a}=n.project.hasChannel,s=o?.signals.visual,l=a?.signals.visual,c=`data(${t.stringValue(n.name+Ou)})`;if(zc.defined(n)||!o&&!a)return i;const u={x:void 0!==o?{signal:`${s}[0]`}:{value:0},y:void 0!==a?{signal:`${l}[0]`}:{value:0},x2:void 0!==o?{signal:`${s}[1]`}:{field:{group:"width"}},y2:void 0!==a?{signal:`${l}[1]`}:{field:{group:"height"}}};if("global"===n.resolve)for(const t of D(u))u[t]=[{test:`${c}.length && ${c}[0].unit === ${Au(e)}`,...u[t]},{value:0}];const{fill:f,fillOpacity:d,cursor:m,...p}=n.mark,g=D(p).reduce(((e,t)=>(e[t]=[{test:[void 0!==o&&`${s}[0] !== ${s}[1]`,void 0!==a&&`${l}[0] !== ${l}[1]`].filter((e=>e)).join(" && "),value:p[t]},{value:null}],e)),{}),h=m??(n.translate?"move":null);return[{name:`${r+Cc}_bg`,type:"rect",clip:!0,encode:{enter:{fill:{value:f},fillOpacity:{value:d}},update:u}},...i,{name:r+Cc,type:"rect",clip:!0,encode:{enter:{...h?{cursor:{value:h}}:{},fill:{value:"transparent"}},update:{...u,...g}}}]}};const Tc={defined:e=>"point"===e.type,signals:(e,n,i)=>{const r=n.name,o=r+Sc,a=n.project,s="(item().isVoronoi ? datum.datum : datum)",l=F(e.component.selection??{}).reduce(((e,t)=>"interval"===t.type?e.concat(t.name+Cc):e),[]).map((e=>`indexof(item().mark.name, '${e}') < 0`)).join(" && "),c="datum && item().mark.marktype !== 'group' && indexof(item().mark.role, 'legend') < 0"+(l?` && ${l}`:"");let u=`unit: ${Au(e)}, `;if(n.project.hasSelectionId)u+=`${hs}: ${s}[${t.stringValue(hs)}]`;else{u+=`fields: ${o}, values: [${a.items.map((n=>{const i=e.fieldDef(n.channel);return i?.bin?`[${s}[${t.stringValue(e.vgField(n.channel,{}))}], ${s}[${t.stringValue(e.vgField(n.channel,{binSuffix:"end"}))}]]`:`${s}[${t.stringValue(n.field)}]`})).join(", ")}]`}const f=n.events;return i.concat([{name:r+_u,on:f?[{events:f,update:`${c} ? {${u}} : null`,force:!0}]:[]}])}};function Ec(e,n,i,r){const o=Lo(n)&&n.condition,a=r(n);if(o){const n=t.array(o).map((t=>{const n=r(t);if(function(e){return e.param}(t)){const{param:i,empty:r}=t;return{test:Uu(e,{param:i,empty:r}),...n}}return{test:Wu(e,t.test),...n}}));return{[i]:[...n,...void 0!==a?[a]:[]]}}return void 0!==a?{[i]:a}:{}}function Mc(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"text";const n=e.encoding[t];return Ec(e,n,t,(t=>Lc(t,e.config)))}function Lc(e,t){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"datum";if(e){if(Xo(e))return Fn(e.value);if(Go(e)){const{format:i,formatType:r}=ca(e);return Rr({fieldOrDatumDef:e,format:i,formatType:r,expr:n,config:t})}}}function qc(e){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const{encoding:i,markDef:r,config:o,stack:a}=e,s=i.tooltip;if(t.isArray(s))return{tooltip:Rc({tooltip:s},a,o,n)};{const l=n.reactiveGeom?"datum.datum":"datum";return Ec(e,s,"tooltip",(e=>{const s=Lc(e,o,l);if(s)return s;if(null===e)return;let c=Nn("tooltip",r,o);return!0===c&&(c={content:"encoding"}),t.isString(c)?{value:c}:t.isObject(c)?yn(c)?c:"encoding"===c.content?Rc(i,a,o,n):{signal:l}:void 0}))}}function Uc(e,n,i){let{reactiveGeom:r}=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};const o={...i,...i.tooltipFormat},a={},s=r?"datum.datum":"datum",l=[];function c(i,r){const c=tt(r),u=Yo(i)?i:{...i,type:e[c].type},f=u.title||la(u,o),d=t.array(f).join(", ").replaceAll(/"/g,'\\"');let m;if(zt(r)){const t="x"===r?"x2":"y2",n=ua(e[t]);if(cn(u.bin)&&n){const e=ta(u,{expr:s}),i=ta(n,{expr:s}),{format:r,formatType:l}=ca(u);m=Xr(e,i,r,l,o),a[t]=!0}}if((zt(r)||r===se||r===oe)&&n&&n.fieldChannel===r&&"normalize"===n.offset){const{format:e,formatType:t}=ca(u);m=Rr({fieldOrDatumDef:u,format:e,formatType:t,expr:s,config:o,normalizeStack:!0}).signal}m??=Lc(u,o,s).signal,l.push({channel:r,key:d,value:m})}La(e,((e,t)=>{Ro(e)?c(e,t):qo(e)&&c(e.condition,t)}));const u={};for(const{channel:e,key:t,value:n}of l)a[e]||u[t]||(u[t]=n);return u}function Rc(e,t,n){let{reactiveGeom:i}=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};const r=Uc(e,t,n,{reactiveGeom:i}),o=z(r).map((e=>{let[t,n]=e;return`"${t}": ${n}`}));return o.length>0?{signal:`{${o.join(", ")}}`}:void 0}function Wc(e){const{markDef:t,config:n}=e,i=Nn("aria",t,n);return!1===i?{}:{...i?{aria:i}:{},...Bc(e),...Ic(e)}}function Bc(e){const{mark:t,markDef:n,config:i}=e;if(!1===i.aria)return{};const r=Nn("ariaRoleDescription",n,i);return null!=r?{ariaRoleDescription:{value:r}}:t in $n?{}:{ariaRoleDescription:{value:t}}}function Ic(e){const{encoding:t,markDef:n,config:i,stack:r}=e,o=t.description;if(o)return Ec(e,o,"description",(t=>Lc(t,e.config)));const a=Nn("description",n,i);if(null!=a)return{description:Fn(a)};if(!1===i.aria)return{};const s=Uc(t,r,i);return S(s)?void 0:{description:{signal:z(s).map(((e,t)=>{let[n,i]=e;return`"${t>0?"; ":""}${n}: " + (${i})`})).join(" + ")}}}function Hc(e,t){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};const{markDef:i,encoding:r,config:o}=t,{vgChannel:a}=n;let{defaultRef:s,defaultValue:l}=n;void 0===s&&(l??=Nn(e,i,o,{vgChannel:a,ignoreVgConfig:!0}),void 0!==l&&(s=Fn(l)));const c=r[e];return Ec(t,c,a??e,(n=>Er({channel:e,channelDef:n,markDef:i,config:o,scaleName:t.scaleName(e),scale:t.getScaleComponent(e),stack:null,defaultRef:s})))}function Vc(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{filled:void 0};const{markDef:n,encoding:i,config:r}=e,{type:o}=n,a=t.filled??Nn("filled",n,r),s=p(["bar","point","circle","square","geoshape"],o)?"transparent":void 0,l=Nn(!0===a?"color":void 0,n,r,{vgChannel:"fill"})??r.mark[!0===a&&"color"]??s,c=Nn(!1===a?"color":void 0,n,r,{vgChannel:"stroke"})??r.mark[!1===a&&"color"],u=a?"fill":"stroke",f={...l?{fill:Fn(l)}:{},...c?{stroke:Fn(c)}:{}};return n.color&&(a?n.fill:n.stroke)&&yi(ei("property",{fill:"fill"in n,stroke:"stroke"in n})),{...f,...Hc("color",e,{vgChannel:u,defaultValue:a?l:c}),...Hc("fill",e,{defaultValue:i.fill?l:void 0}),...Hc("stroke",e,{defaultValue:i.stroke?c:void 0})}}function Gc(e){const{encoding:t,mark:n}=e,i=t.order;return!fo(n)&&Xo(i)?Ec(e,i,"zindex",(e=>Fn(e.value))):{}}function Yc(e){let{channel:t,markDef:n,encoding:i={},model:r,bandPosition:o}=e;const a=`${t}Offset`,s=n[a],l=i[a];if(("xOffset"===a||"yOffset"===a)&&l){return{offsetType:"encoding",offset:Er({channel:a,channelDef:l,markDef:n,config:r?.config,scaleName:r.scaleName(a),scale:r.getScaleComponent(a),stack:null,defaultRef:Fn(s),bandPosition:o})}}const c=n[a];return c?{offsetType:"visual",offset:c}:{}}function Xc(e,t,n){let{defaultPos:i,vgChannel:r}=n;const{encoding:o,markDef:a,config:s,stack:l}=t,c=o[e],u=o[it(e)],f=t.scaleName(e),d=t.getScaleComponent(e),{offset:m,offsetType:p}=Yc({channel:e,markDef:a,encoding:o,model:t,bandPosition:.5}),g=Qc({model:t,defaultPos:i,channel:e,scaleName:f,scale:d}),h=!c&&zt(e)&&(o.latitude||o.longitude)?{field:t.getName(e)}:function(e){const{channel:t,channelDef:n,scaleName:i,stack:r,offset:o,markDef:a}=e;if(Go(n)&&r&&t===r.fieldChannel){if(Ro(n)){let e=n.bandPosition;if(void 0!==e||"text"!==a.type||"radius"!==t&&"theta"!==t||(e=.5),void 0!==e)return Tr({scaleName:i,fieldOrDatumDef:n,startSuffix:"start",bandPosition:e,offset:o})}return jr(n,i,{suffix:"end"},{offset:o})}return Nr(e)}({channel:e,channelDef:c,channel2Def:u,markDef:a,config:s,scaleName:f,scale:d,stack:l,offset:m,defaultRef:g,bandPosition:"encoding"===p?0:void 0});return h?{[r||e]:h}:void 0}function Qc(e){let{model:t,defaultPos:n,channel:i,scaleName:r,scale:o}=e;const{markDef:a,config:s}=t;return()=>{const e=tt(i),l=nt(i),c=Nn(i,a,s,{vgChannel:l});if(void 0!==c)return Mr(i,c);switch(n){case"zeroOrMin":case"zeroOrMax":if(r){const e=o.get("type");if(p([or.LOG,or.TIME,or.UTC],e));else if(o.domainDefinitelyIncludesZero())return{scale:r,value:0}}if("zeroOrMin"===n)return"y"===e?{field:{group:"height"}}:{value:0};switch(e){case"radius":return{signal:`min(${t.width.signal},${t.height.signal})/2`};case"theta":return{signal:"2*PI"};case"x":return{field:{group:"width"}};case"y":return{value:0}}break;case"mid":return{...t[rt(i)],mult:.5}}}}const Jc={left:"x",center:"xc",right:"x2"},Kc={top:"y",middle:"yc",bottom:"y2"};function Zc(e,t,n){let i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"middle";if("radius"===e||"theta"===e)return nt(e);const r="x"===e?"align":"baseline",o=Nn(r,t,n);let a;return yn(o)?(yi(function(e){return`The ${e} for range marks cannot be an expression`}(r)),a=void 0):a=o,"x"===e?Jc[a||("top"===i?"left":"center")]:Kc[a||i]}function eu(e,t,n){let{defaultPos:i,defaultPos2:r,range:o}=n;return o?tu(e,t,{defaultPos:i,defaultPos2:r}):Xc(e,t,{defaultPos:i})}function tu(e,t,n){let{defaultPos:i,defaultPos2:r}=n;const{markDef:o,config:a}=t,s=it(e),l=rt(e),c=function(e,t,n){const{encoding:i,mark:r,markDef:o,stack:a,config:s}=e,l=tt(n),c=rt(n),u=nt(n),f=i[l],d=e.scaleName(l),m=e.getScaleComponent(l),{offset:p}=Yc(n in i||n in o?{channel:n,markDef:o,encoding:i,model:e}:{channel:l,markDef:o,encoding:i,model:e});if(!f&&("x2"===n||"y2"===n)&&(i.latitude||i.longitude)){const t=rt(n),i=e.markDef[t];return null!=i?{[t]:{value:i}}:{[u]:{field:e.getName(n)}}}const g=function(e){let{channel:t,channelDef:n,channel2Def:i,markDef:r,config:o,scaleName:a,scale:s,stack:l,offset:c,defaultRef:u}=e;if(Go(n)&&l&&t.charAt(0)===l.fieldChannel.charAt(0))return jr(n,a,{suffix:"start"},{offset:c});return Nr({channel:t,channelDef:i,scaleName:a,scale:s,stack:l,markDef:r,config:o,offset:c,defaultRef:u})}({channel:n,channelDef:f,channel2Def:i[n],markDef:o,config:s,scaleName:d,scale:m,stack:a,offset:p,defaultRef:void 0});if(void 0!==g)return{[u]:g};return nu(n,o)||nu(n,{[n]:An(n,o,s.style),[c]:An(c,o,s.style)})||nu(n,s[r])||nu(n,s.mark)||{[u]:Qc({model:e,defaultPos:t,channel:n,scaleName:d,scale:m})()}}(t,r,s);return{...Xc(e,t,{defaultPos:i,vgChannel:c[l]?Zc(e,o,a):nt(e)}),...c}}function nu(e,t){const n=rt(e),i=nt(e);if(void 0!==t[i])return{[i]:Mr(e,t[i])};if(void 0!==t[e])return{[i]:Mr(e,t[e])};if(t[n]){const i=t[n];if(!bo(i))return{[n]:Mr(e,i)};yi(function(e){return`Position range does not support relative band size for ${e}.`}(n))}}function iu(e,n){const{config:i,encoding:r,markDef:o}=e,a=o.type,s=it(n),l=rt(n),c=r[n],u=r[s],f=e.getScaleComponent(n),d=f?f.get("type"):void 0,m=o.orient,p=r[l]??r.size??Nn("size",o,i,{vgChannel:l}),g=ot(n),h="bar"===a&&("x"===n?"vertical"===m:"horizontal"===m);return!Ro(c)||!(ln(c.bin)||cn(c.bin)||c.timeUnit&&!u)||p&&!bo(p)||r[g]||hr(d)?(Go(c)&&hr(d)||h)&&!u?function(e,n,i){const{markDef:r,encoding:o,config:a,stack:s}=i,l=r.orient,c=i.scaleName(n),u=i.getScaleComponent(n),f=rt(n),d=it(n),m=ot(n),p=i.scaleName(m),g=i.getScaleComponent(at(n)),h="horizontal"===l&&"y"===n||"vertical"===l&&"x"===n;let y;(o.size||r.size)&&(h?y=Hc("size",i,{vgChannel:f,defaultRef:Fn(r.size)}):yi(function(e){return`Cannot apply size to non-oriented mark "${e}".`}(r.type)));const v=!!y,b=To({channel:n,fieldDef:e,markDef:r,config:a,scaleType:(u||g)?.get("type"),useVlSizeChannel:h});y=y||{[f]:ru(f,p||c,g||u,a,b,!!e,r.type)};const x="band"===(u||g)?.get("type")&&bo(b)&&!v?"top":"middle",$=Zc(n,r,a,x),w="xc"===$||"yc"===$,{offset:k,offsetType:S}=Yc({channel:n,markDef:r,encoding:o,model:i,bandPosition:w?.5:0}),D=Nr({channel:n,channelDef:e,markDef:r,config:a,scaleName:c,scale:u,stack:s,offset:k,defaultRef:Qc({model:i,defaultPos:"mid",channel:n,scaleName:c,scale:u}),bandPosition:w?"encoding"===S?0:.5:yn(b)?{signal:`(1-${b})/2`}:bo(b)?(1-b.band)/2:0});if(f)return{[$]:D,...y};{const e=nt(d),n=y[f],i=k?{...n,offset:k}:n;return{[$]:D,[e]:t.isArray(D)?[D[0],{...D[1],offset:i}]:{...D,offset:i}}}}(c,n,e):tu(n,e,{defaultPos:"zeroOrMax",defaultPos2:"zeroOrMin"}):function(e){let{fieldDef:t,fieldDef2:n,channel:i,model:r}=e;const{config:o,markDef:a,encoding:s}=r,l=r.getScaleComponent(i),c=r.scaleName(i),u=l?l.get("type"):void 0,f=l.get("reverse"),d=To({channel:i,fieldDef:t,markDef:a,config:o,scaleType:u}),m=r.component.axes[i]?.[0],p=m?.get("translate")??.5,g=zt(i)?Nn("binSpacing",a,o)??0:0,h=it(i),y=nt(i),v=nt(h),b=Pn("minBandSize",a,o),{offset:x}=Yc({channel:i,markDef:a,encoding:s,model:r,bandPosition:0}),{offset:$}=Yc({channel:h,markDef:a,encoding:s,model:r,bandPosition:0}),w=function(e){let{scaleName:t,fieldDef:n}=e;const i=ta(n,{expr:"datum"});return`abs(scale("${t}", ${ta(n,{expr:"datum",suffix:"end"})}) - scale("${t}", ${i}))`}({fieldDef:t,scaleName:c}),k=ou(i,g,f,p,x,b,w),S=ou(h,g,f,p,$??x,b,w),D=yn(d)?{signal:`(1-${d.signal})/2`}:bo(d)?(1-d.band)/2:.5,F=jo({fieldDef:t,fieldDef2:n,markDef:a,config:o});if(ln(t.bin)||t.timeUnit){const e=t.timeUnit&&.5!==F;return{[v]:au({fieldDef:t,scaleName:c,bandPosition:D,offset:S,useRectOffsetField:e}),[y]:au({fieldDef:t,scaleName:c,bandPosition:yn(D)?{signal:`1-${D.signal}`}:1-D,offset:k,useRectOffsetField:e})}}if(cn(t.bin)){const e=jr(t,c,{},{offset:S});if(Ro(n))return{[v]:e,[y]:jr(n,c,{},{offset:k})};if(un(t.bin)&&t.bin.step)return{[v]:e,[y]:{signal:`scale("${c}", ${ta(t,{expr:"datum"})} + ${t.bin.step})`,offset:k}}}return void yi(pi(h))}({fieldDef:c,fieldDef2:u,channel:n,model:e})}function ru(e,n,i,r,o,a,s){if(bo(o)){if(!i)return{mult:o.band,field:{group:e}};{const e=i.get("type");if("band"===e){let e=`bandwidth('${n}')`;1!==o.band&&(e=`${o.band} * ${e}`);const t=Pn("minBandSize",{type:s},r);return{signal:t?`max(${On(t)}, ${e})`:e}}1!==o.band&&(yi(function(e){return`Cannot use the relative band size with ${e} scale.`}(e)),o=void 0)}}else{if(yn(o))return o;if(o)return{value:o}}if(i){const e=i.get("range");if(vn(e)&&t.isNumber(e.step))return{value:e.step-2}}if(!a){const{bandPaddingInner:n,barBandPaddingInner:i,rectBandPaddingInner:o}=r.scale,a=U(n,"bar"===s?i:o);if(yn(a))return{signal:`(1 - (${a.signal})) * ${e}`};if(t.isNumber(a))return{signal:`${1-a} * ${e}`}}return{value:Cs(r.view,e)-2}}function ou(e,t,n,i,r,o,a){if(Ae(e))return 0;const s="x"===e||"y2"===e,l=s?-t/2:t/2;if(yn(n)||yn(r)||yn(i)||o){const e=On(n),t=On(r),c=On(i),u=On(o),f=o?`(${a} < ${u} ? ${s?"":"-"}0.5 * (${u} - (${a})) : ${l})`:l;return{signal:(c?`${c} + `:"")+(e?`(${e} ? -1 : 1) * `:"")+(t?`(${t} + ${f})`:f)}}return r=r||0,i+(n?-r-l:+r+l)}function au(e){let{fieldDef:t,scaleName:n,bandPosition:i,offset:r,useRectOffsetField:o}=e;return Tr({scaleName:n,fieldOrDatumDef:t,bandPosition:i,offset:r,...o?{startSuffix:bc,endSuffix:xc}:{}})}const su=new Set(["aria","width","height"]);function lu(e,t){const{fill:n,stroke:i}="include"===t.color?Vc(e):{};return{...uu(e.markDef,t),...cu(e,"fill",n),...cu(e,"stroke",i),...Hc("opacity",e),...Hc("fillOpacity",e),...Hc("strokeOpacity",e),...Hc("strokeWidth",e),...Hc("strokeDash",e),...Gc(e),...qc(e),...Mc(e,"href"),...Wc(e)}}function cu(e,n,i){const{config:r,mark:o,markDef:a}=e;if("hide"===Nn("invalid",a,r)&&i&&!fo(o)){const r=function(e,t){let{invalid:n=!1,channels:i}=t;const r=i.reduce(((t,n)=>{const i=e.getScaleComponent(n);if(i){const r=i.get("type"),o=e.vgField(n,{expr:"datum"});o&&yr(r)&&(t[o]=!0)}return t}),{}),o=D(r);if(o.length>0){const e=n?"||":"&&";return o.map((e=>Ar(e,n))).join(` ${e} `)}return}(e,{invalid:!0,channels:It});if(r)return{[n]:[{test:r,value:null},...t.array(i)]}}return i?{[n]:i}:{}}function uu(e,t){return xn.reduce(((n,i)=>(su.has(i)||void 0===e[i]||"ignore"===t[i]||(n[i]=Fn(e[i])),n)),{})}function fu(e){const{config:t,markDef:n}=e;if(Nn("invalid",n,t)){const t=function(e,t){let{invalid:n=!1,channels:i}=t;const r=i.reduce(((t,n)=>{const i=e.getScaleComponent(n);if(i){const r=i.get("type"),o=e.vgField(n,{expr:"datum",binSuffix:e.stack?.impute?"mid":void 0});o&&yr(r)&&(t[o]=!0)}return t}),{}),o=D(r);if(o.length>0){const e=n?"||":"&&";return o.map((e=>Ar(e,n))).join(` ${e} `)}return}(e,{channels:Ft});if(t)return{defined:{signal:t}}}return{}}function du(e,t){if(void 0!==t)return{[e]:Fn(t)}}const mu="voronoi",pu={defined:e=>"point"===e.type&&e.nearest,parse:(e,t)=>{if(t.events)for(const n of t.events)n.markname=e.getName(mu)},marks:(e,t,n)=>{const{x:i,y:r}=t.project.hasChannel,o=e.mark;if(fo(o))return yi(`The "nearest" transform is not supported for ${o} marks.`),n;const a={name:e.getName(mu),type:"path",interactive:!0,from:{data:e.getName("marks")},encode:{update:{fill:{value:"transparent"},strokeWidth:{value:.35},stroke:{value:"transparent"},isVoronoi:{value:!0},...qc(e,{reactiveGeom:!0})}},transform:[{type:"voronoi",x:{expr:i||!r?"datum.datum.x || 0":"0"},y:{expr:r||!i?"datum.datum.y || 0":"0"},size:[e.getSizeSignalRef("width"),e.getSizeSignalRef("height")]}]};let s=0,l=!1;return n.forEach(((t,n)=>{const i=t.name??"";i===e.component.mark[0].name?s=n:i.indexOf(mu)>=0&&(l=!0)})),l||n.splice(s+1,0,a),n}},gu={defined:e=>"point"===e.type&&"global"===e.resolve&&e.bind&&"scales"!==e.bind&&!vs(e.bind),parse:(e,t,n)=>Tu(t,n),topLevelSignals:(e,n,i)=>{const r=n.name,o=n.project,a=n.bind,s=n.init&&n.init[0],l=pu.defined(n)?"(item().isVoronoi ? datum.datum : datum)":"datum";return o.items.forEach(((e,o)=>{const c=_(`${r}_${e.field}`);i.filter((e=>e.name===c)).length||i.unshift({name:c,...s?{init:cc(s[o])}:{value:null},on:n.events?[{events:n.events,update:`datum && item().mark.marktype !== 'group' ? ${l}[${t.stringValue(e.field)}] : null`}]:[],bind:a[e.field]??a[e.channel]??a})})),i},signals:(e,t,n)=>{const i=t.name,r=t.project,o=n.filter((e=>e.name===i+_u))[0],a=i+Sc,s=r.items.map((e=>_(`${i}_${e.field}`))),l=s.map((e=>`${e} !== null`)).join(" && ");return s.length&&(o.update=`${l} ? {fields: ${a}, values: [${s.join(", ")}]} : null`),delete o.value,delete o.on,n}},hu="_toggle",yu={defined:e=>"point"===e.type&&!!e.toggle,signals:(e,t,n)=>n.concat({name:t.name+hu,value:!1,on:[{events:t.events,update:t.toggle}]}),modifyExpr:(e,t)=>{const n=t.name+_u,i=t.name+hu;return`${i} ? null : ${n}, `+("global"===t.resolve?`${i} ? null : true, `:`${i} ? null : {unit: ${Au(e)}}, `)+`${i} ? ${n} : null`}},vu={defined:e=>void 0!==e.clear&&!1!==e.clear,parse:(e,n)=>{n.clear&&(n.clear=t.isString(n.clear)?t.parseSelector(n.clear,"view"):n.clear)},topLevelSignals:(e,t,n)=>{if(gu.defined(t))for(const e of t.project.items){const i=n.findIndex((n=>n.name===_(`${t.name}_${e.field}`)));-1!==i&&n[i].on.push({events:t.clear,update:"null"})}return n},signals:(e,t,n)=>{function i(e,i){-1!==e&&n[e].on&&n[e].on.push({events:t.clear,update:i})}if("interval"===t.type)for(const e of t.project.items){const t=n.findIndex((t=>t.name===e.signals.visual));if(i(t,"[0, 0]"),-1===t){i(n.findIndex((t=>t.name===e.signals.data)),"null")}}else{let e=n.findIndex((e=>e.name===t.name+_u));i(e,"null"),yu.defined(t)&&(e=n.findIndex((e=>e.name===t.name+hu)),i(e,"false"))}return n}},bu={defined:e=>{const t="global"===e.resolve&&e.bind&&vs(e.bind),n=1===e.project.items.length&&e.project.items[0].field!==hs;return t&&!n&&yi("Legend bindings are only supported for selections over an individual field or encoding channel."),t&&n},parse:(e,n,i)=>{const r=l(i);if(r.select=t.isString(r.select)?{type:r.select,toggle:n.toggle}:{...r.select,toggle:n.toggle},Tu(n,r),t.isObject(i.select)&&(i.select.on||i.select.clear)){const e='event.item && indexof(event.item.mark.role, "legend") < 0';for(const i of n.events)i.filter=t.array(i.filter??[]),i.filter.includes(e)||i.filter.push(e)}const o=bs(n.bind)?n.bind.legend:"click",a=t.isString(o)?t.parseSelector(o,"view"):t.array(o);n.bind={legend:{merge:a}}},topLevelSignals:(e,t,n)=>{const i=t.name,r=bs(t.bind)&&t.bind.legend,o=e=>t=>{const n=l(t);return n.markname=e,n};for(const e of t.project.items){if(!e.hasLegend)continue;const a=`${_(e.field)}_legend`,s=`${i}_${a}`;if(0===n.filter((e=>e.name===s)).length){const e=r.merge.map(o(`${a}_symbols`)).concat(r.merge.map(o(`${a}_labels`))).concat(r.merge.map(o(`${a}_entries`)));n.unshift({name:s,...t.init?{}:{value:null},on:[{events:e,update:"isDefined(datum.value) ? datum.value : item().items[0].items[0].datum.value",force:!0},{events:r.merge,update:`!event.item || !datum ? null : ${s}`,force:!0}]})}}return n},signals:(e,t,n)=>{const i=t.name,r=t.project,o=n.find((e=>e.name===i+_u)),a=i+Sc,s=r.items.filter((e=>e.hasLegend)).map((e=>_(`${i}_${_(e.field)}_legend`))),l=`${s.map((e=>`${e} !== null`)).join(" && ")} ? {fields: ${a}, values: [${s.join(", ")}]} : null`;t.events&&s.length>0?o.on.push({events:s.map((e=>({signal:e}))),update:l}):s.length>0&&(o.update=l,delete o.value,delete o.on);const c=n.find((e=>e.name===i+hu)),u=bs(t.bind)&&t.bind.legend;return c&&(t.events?c.on.push({...c.on[0],events:u}):c.on[0].events=u),n}};const xu="_translate_anchor",$u="_translate_delta",wu={defined:e=>"interval"===e.type&&e.translate,signals:(e,n,i)=>{const r=n.name,o=zc.defined(n),a=r+xu,{x:s,y:l}=n.project.hasChannel;let c=t.parseSelector(n.translate,"scope");return o||(c=c.map((e=>(e.between[0].markname=r+Cc,e)))),i.push({name:a,value:{},on:[{events:c.map((e=>e.between[0])),update:"{x: x(unit), y: y(unit)"+(void 0!==s?`, extent_x: ${o?Oc(e,Z):`slice(${s.signals.visual})`}`:"")+(void 0!==l?`, extent_y: ${o?Oc(e,ee):`slice(${l.signals.visual})`}`:"")+"}"}]},{name:r+$u,value:{},on:[{events:c,update:`{x: ${a}.x - x(unit), y: ${a}.y - y(unit)}`}]}),void 0!==s&&ku(e,n,s,"width",i),void 0!==l&&ku(e,n,l,"height",i),i}};function ku(e,t,n,i,r){const o=t.name,a=o+xu,s=o+$u,l=n.channel,c=zc.defined(t),u=r.filter((e=>e.name===n.signals[c?"data":"visual"]))[0],f=e.getSizeSignalRef(i).signal,d=e.getScaleComponent(l),m=d&&d.get("type"),p=d&&d.get("reverse"),g=`${a}.extent_${l}`,h=`${c&&d?"log"===m?"panLog":"symlog"===m?"panSymlog":"pow"===m?"panPow":"panLinear":"panLinear"}(${g}, ${`${c?l===Z?p?"":"-":p?"-":"":""}${s}.${l} / ${c?`${f}`:`span(${g})`}`}${c?"pow"===m?`, ${d.get("exponent")??1}`:"symlog"===m?`, ${d.get("constant")??1}`:"":""})`;u.on.push({events:{signal:s},update:c?h:`clampRange(${h}, 0, ${f})`})}const Su="_zoom_anchor",Du="_zoom_delta",Fu={defined:e=>"interval"===e.type&&e.zoom,signals:(e,n,i)=>{const r=n.name,o=zc.defined(n),a=r+Du,{x:s,y:l}=n.project.hasChannel,c=t.stringValue(e.scaleName(Z)),u=t.stringValue(e.scaleName(ee));let f=t.parseSelector(n.zoom,"scope");return o||(f=f.map((e=>(e.markname=r+Cc,e)))),i.push({name:r+Su,on:[{events:f,update:o?"{"+[c?`x: invert(${c}, x(unit))`:"",u?`y: invert(${u}, y(unit))`:""].filter((e=>e)).join(", ")+"}":"{x: x(unit), y: y(unit)}"}]},{name:a,on:[{events:f,force:!0,update:"pow(1.001, event.deltaY * pow(16, event.deltaMode))"}]}),void 0!==s&&zu(e,n,s,"width",i),void 0!==l&&zu(e,n,l,"height",i),i}};function zu(e,t,n,i,r){const o=t.name,a=n.channel,s=zc.defined(t),l=r.filter((e=>e.name===n.signals[s?"data":"visual"]))[0],c=e.getSizeSignalRef(i).signal,u=e.getScaleComponent(a),f=u&&u.get("type"),d=s?Oc(e,a):l.name,m=o+Du,p=`${s&&u?"log"===f?"zoomLog":"symlog"===f?"zoomSymlog":"pow"===f?"zoomPow":"zoomLinear":"zoomLinear"}(${d}, ${`${o}${Su}.${a}`}, ${m}${s?"pow"===f?`, ${u.get("exponent")??1}`:"symlog"===f?`, ${u.get("constant")??1}`:"":""})`;l.on.push({events:{signal:m},update:s?p:`clampRange(${p}, 0, ${c})`})}const Ou="_store",_u="_tuple",Cu="_modify",Nu="vlSelectionResolve",Pu=[Tc,jc,Fc,yu,gu,zc,bu,vu,wu,Fu,pu];function Au(e){let{escape:n}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{escape:!0},i=n?t.stringValue(e.name):e.name;const r=function(e){let t=e.parent;for(;t&&!hm(t);)t=t.parent;return t}(e);if(r){const{facet:e}=r;for(const n of Re)e[n]&&(i+=` + '__facet_${n}_' + (facet[${t.stringValue(r.vgField(n))}])`)}return i}function ju(e){return F(e.component.selection??{}).reduce(((e,t)=>e||t.project.hasSelectionId),!1)}function Tu(e,n){!t.isString(n.select)&&n.select.on||delete e.events,!t.isString(n.select)&&n.select.clear||delete e.clear,!t.isString(n.select)&&n.select.toggle||delete e.toggle}function Eu(e){const t=[];return"Identifier"===e.type?[e.name]:"Literal"===e.type?[e.value]:("MemberExpression"===e.type&&(t.push(...Eu(e.object)),t.push(...Eu(e.property))),t)}function Mu(e){return"MemberExpression"===e.object.type?Mu(e.object):"datum"===e.object.name}function Lu(e){const n=t.parseExpression(e),i=new Set;return n.visit((e=>{"MemberExpression"===e.type&&Mu(e)&&i.add(Eu(e).slice(1).join("."))})),i}class qu extends pc{clone(){return new qu(null,this.model,l(this.filter))}constructor(e,t,n){super(e),this.model=t,this.filter=n,this.expr=Wu(this.model,this.filter,this),this._dependentFields=Lu(this.expr)}dependentFields(){return this._dependentFields}producedFields(){return new Set}assemble(){return{type:"filter",expr:this.expr}}hash(){return`Filter ${this.expr}`}}function Uu(e,n,i){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:"datum";const o=t.isString(n)?n:n.param,a=_(o),s=t.stringValue(a+Ou);let l;try{l=e.getSelectionComponent(a,o)}catch(e){return`!!${a}`}if(l.project.timeUnit){const t=i??e.component.data.raw,n=l.project.timeUnit.clone();t.parent?n.insertAsParentOf(t):t.parent=n}const c=`${l.project.hasSelectionId?"vlSelectionIdTest(":"vlSelectionTest("}${s}, ${r}${"global"===l.resolve?")":`, ${t.stringValue(l.resolve)})`}`,u=`length(data(${s}))`;return!1===n.empty?`${u} && ${c}`:`!${u} || ${c}`}function Ru(e,n,i){const r=_(n),o=i.encoding;let a,s=i.field;try{a=e.getSelectionComponent(r,n)}catch(e){return r}if(o||s){if(o&&!s){const e=a.project.items.filter((e=>e.channel===o));!e.length||e.length>1?(s=a.project.items[0].field,yi((e.length?"Multiple ":"No ")+`matching ${t.stringValue(o)} encoding found for selection ${t.stringValue(i.param)}. `+`Using "field": ${t.stringValue(s)}.`)):s=e[0].field}}else s=a.project.items[0].field,a.project.items.length>1&&yi(`A "field" or "encoding" must be specified when using a selection as a scale domain. Using "field": ${t.stringValue(s)}.`);return`${a.name}[${t.stringValue(E(s))}]`}function Wu(e,n,i){return C(n,(n=>t.isString(n)?n:function(e){return e?.param}(n)?Uu(e,n,i):Xi(n)))}function Bu(e,t,n,i){e.encode??={},e.encode[t]??={},e.encode[t].update??={},e.encode[t].update[n]=i}function Iu(e,n,i){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{header:!1};const{disable:o,orient:a,scale:s,labelExpr:l,title:c,zindex:u,...f}=e.combine();if(!o){for(const e in f){const i=Sa[e],r=f[e];if(i&&i!==n&&"both"!==i)delete f[e];else if(wa(r)){const{condition:n,...i}=r,o=t.array(n),a=$a[e];if(a){const{vgProp:t,part:n}=a;Bu(f,n,t,[...o.map((e=>{const{test:t,...n}=e;return{test:Wu(null,t),...n}})),i]),delete f[e]}else if(null===a){const t={signal:o.map((e=>{const{test:t,...n}=e;return`${Wu(null,t)} ? ${zn(n)} : `})).join("")+zn(i)};f[e]=t}}else if(yn(r)){const t=$a[e];if(t){const{vgProp:n,part:i}=t;Bu(f,i,n,r),delete f[e]}}p(["labelAlign","labelBaseline"],e)&&null===f[e]&&delete f[e]}if("grid"===n){if(!f.grid)return;if(f.encode){const{grid:e}=f.encode;f.encode={...e?{grid:e}:{}},S(f.encode)&&delete f.encode}return{scale:s,orient:a,...f,domain:!1,labels:!1,aria:!1,maxExtent:0,minExtent:0,ticks:!1,zindex:U(u,0)}}{if(!r.header&&e.mainExtracted)return;if(void 0!==l){let e=l;f.encode?.labels?.update&&yn(f.encode.labels.update.text)&&(e=M(l,"datum.label",f.encode.labels.update.text.signal)),Bu(f,"labels","text",{signal:e})}if(null===f.labelAlign&&delete f.labelAlign,f.encode){for(const t of ka)e.hasAxisPart(t)||delete f.encode[t];S(f.encode)&&delete f.encode}const n=function(e,n){if(e)return t.isArray(e)&&!hn(e)?e.map((e=>la(e,n))).join(", "):e}(c,i);return{scale:s,orient:a,grid:!1,...n?{title:n}:{},...f,...!1===i.aria?{aria:!1}:{},zindex:U(u,0)}}}}function Hu(e){const{axes:t}=e.component,n=[];for(const i of Ft)if(t[i])for(const r of t[i])if(!r.get("disable")&&!r.get("gridScale")){const t="x"===i?"height":"width",r=e.getSizeSignalRef(t).signal;t!==r&&n.push({name:t,update:r})}return n}function Vu(e,t,n,i){return Object.assign.apply(null,[{},...e.map((e=>{if("axisOrient"===e){const e="x"===n?"bottom":"left",r=t["x"===n?"axisBottom":"axisLeft"]||{},o=t["x"===n?"axisTop":"axisRight"]||{},a=new Set([...D(r),...D(o)]),s={};for(const t of a.values())s[t]={signal:`${i.signal} === "${e}" ? ${On(r[t])} : ${On(o[t])}`};return s}return t[e]}))])}function Gu(e,n){const i=[{}];for(const r of e){let e=n[r]?.style;if(e){e=t.array(e);for(const t of e)i.push(n.style[t])}}return Object.assign.apply(null,i)}function Yu(e,t,n){let i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};const r=jn(e,n,t);if(void 0!==r)return{configFrom:"style",configValue:r};for(const t of["vlOnlyAxisConfig","vgAxisConfig","axisConfigStyle"])if(void 0!==i[t]?.[e])return{configFrom:t,configValue:i[t][e]};return{}}const Xu={scale:e=>{let{model:t,channel:n}=e;return t.scaleName(n)},format:e=>{let{format:t}=e;return t},formatType:e=>{let{formatType:t}=e;return t},grid:e=>{let{fieldOrDatumDef:t,axis:n,scaleType:i}=e;return n.grid??function(e,t){return!hr(e)&&Ro(t)&&!ln(t?.bin)&&!cn(t?.bin)}(i,t)},gridScale:e=>{let{model:t,channel:n}=e;return function(e,t){const n="x"===t?"y":"x";if(e.getScaleComponent(n))return e.scaleName(n);return}(t,n)},labelAlign:e=>{let{axis:t,labelAngle:n,orient:i,channel:r}=e;return t.labelAlign||Ku(n,i,r)},labelAngle:e=>{let{labelAngle:t}=e;return t},labelBaseline:e=>{let{axis:t,labelAngle:n,orient:i,channel:r}=e;return t.labelBaseline||Ju(n,i,r)},labelFlush:e=>{let{axis:t,fieldOrDatumDef:n,channel:i}=e;return t.labelFlush??function(e,t){if("x"===t&&p(["quantitative","temporal"],e))return!0;return}(n.type,i)},labelOverlap:e=>{let{axis:n,fieldOrDatumDef:i,scaleType:r}=e;return n.labelOverlap??function(e,n,i,r){if(i&&!t.isObject(r)||"nominal"!==e&&"ordinal"!==e)return"log"!==n&&"symlog"!==n||"greedy";return}(i.type,r,Ro(i)&&!!i.timeUnit,Ro(i)?i.sort:void 0)},orient:e=>{let{orient:t}=e;return t},tickCount:e=>{let{channel:t,model:n,axis:i,fieldOrDatumDef:r,scaleType:o}=e;const a="x"===t?"width":"y"===t?"height":void 0,s=a?n.getSizeSignalRef(a):void 0;return i.tickCount??function(e){let{fieldOrDatumDef:t,scaleType:n,size:i,values:r}=e;if(!r&&!hr(n)&&"log"!==n){if(Ro(t)){if(ln(t.bin))return{signal:`ceil(${i.signal}/10)`};if(t.timeUnit&&p(["month","hours","day","quarter"],Ei(t.timeUnit)?.unit))return}return{signal:`ceil(${i.signal}/40)`}}return}({fieldOrDatumDef:r,scaleType:o,size:s,values:i.values})},tickMinStep:function(e){let{format:t,fieldOrDatumDef:n}=e;if("d"===t)return 1;if(Ro(n)){const{timeUnit:e}=n;if(e){const t=Mi(e);if(t)return{signal:t}}}return},title:e=>{let{axis:t,model:n,channel:i}=e;if(void 0!==t.title)return t.title;const r=Zu(n,i);if(void 0!==r)return r;const o=n.typedFieldDef(i),a="x"===i?"x2":"y2",s=n.fieldDef(a);return En(o?[Po(o)]:[],Ro(s)?[Po(s)]:[])},values:e=>{let{axis:n,fieldOrDatumDef:i}=e;return function(e,n){const i=e.values;if(t.isArray(i))return ba(n,i);if(yn(i))return i;return}(n,i)},zindex:e=>{let{axis:t,fieldOrDatumDef:n,mark:i}=e;return t.zindex??function(e,t){if("rect"===e&&na(t))return 1;return 0}(i,n)}};function Qu(e){return`(((${e.signal} % 360) + 360) % 360)`}function Ju(e,t,n,i){if(void 0!==e){if("x"===n){if(yn(e)){const n=Qu(e);return{signal:`(45 < ${n} && ${n} < 135) || (225 < ${n} && ${n} < 315) ? "middle" :(${n} <= 45 || 315 <= ${n}) === ${yn(t)?`(${t.signal} === "top")`:"top"===t} ? "bottom" : "top"`}}if(45{if(Qo(t)&&Oo(t.sort)){const{field:i,timeUnit:r}=t,o=t.sort,a=o.map(((e,t)=>`${Xi({field:i,timeUnit:r,equal:e})} ? ${t} : `)).join("")+o.length;e=new ef(e,{calculate:a,as:tf(t,n,{forAs:!0})})}})),e}producedFields(){return new Set([this.transform.as])}dependentFields(){return this._dependentFields}assemble(){return{type:"formula",expr:this.transform.calculate,as:this.transform.as}}hash(){return`Calculate ${d(this.transform)}`}}function tf(e,t,n){return ta(e,{prefix:t,suffix:"sort_index",...n})}function nf(e,t){return p(["top","bottom"],t)?"column":p(["left","right"],t)||"row"===e?"row":"column"}function rf(e,t,n,i){const r="row"===i?n.headerRow:"column"===i?n.headerColumn:n.headerFacet;return U((t||{})[e],r[e],n.header[e])}function of(e,t,n,i){const r={};for(const o of e){const e=rf(o,t||{},n,i);void 0!==e&&(r[o]=e)}return r}const af=["row","column"],sf=["header","footer"];function lf(e,t){const n=e.component.layoutHeaders[t].title,i=e.config?e.config:void 0,r=e.component.layoutHeaders[t].facetFieldDef?e.component.layoutHeaders[t].facetFieldDef:void 0,{titleAnchor:o,titleAngle:a,titleOrient:s}=of(["titleAnchor","titleAngle","titleOrient"],r.header,i,t),l=nf(t,s),c=H(a);return{name:`${t}-title`,type:"group",role:`${l}-title`,title:{text:n,..."row"===t?{orient:"left"}:{},style:"guide-title",...uf(c,l),...cf(l,c,o),...yf(i,r,t,ds,us)}}}function cf(e,t){switch(arguments.length>2&&void 0!==arguments[2]?arguments[2]:"middle"){case"start":return{align:"left"};case"end":return{align:"right"}}const n=Ku(t,"row"===e?"left":"top","row"===e?"y":"x");return n?{align:n}:{}}function uf(e,t){const n=Ju(e,"row"===t?"left":"top","row"===t?"y":"x",!0);return n?{baseline:n}:{}}function ff(e,t){const n=e.component.layoutHeaders[t],i=[];for(const r of sf)if(n[r])for(const o of n[r]){const a=pf(e,t,r,n,o);null!=a&&i.push(a)}return i}function df(e,n){const{sort:i}=e;return zo(i)?{field:ta(i,{expr:"datum"}),order:i.order??"ascending"}:t.isArray(i)?{field:tf(e,n,{expr:"datum"}),order:"ascending"}:{field:ta(e,{expr:"datum"}),order:i??"ascending"}}function mf(e,t,n){const{format:i,formatType:r,labelAngle:o,labelAnchor:a,labelOrient:s,labelExpr:l}=of(["format","formatType","labelAngle","labelAnchor","labelOrient","labelExpr"],e.header,n,t),c=Rr({fieldOrDatumDef:e,format:i,formatType:r,expr:"parent",config:n}).signal,u=nf(t,s);return{text:{signal:l?M(M(l,"datum.label",c),"datum.value",ta(e,{expr:"parent"})):c},..."row"===t?{orient:"left"}:{},style:"guide-label",frame:"group",...uf(o,u),...cf(u,o,a),...yf(n,e,t,ms,fs)}}function pf(e,t,n,i,r){if(r){let o=null;const{facetFieldDef:a}=i,s=e.config?e.config:void 0;if(a&&r.labels){const{labelOrient:e}=of(["labelOrient"],a.header,s,t);("row"===t&&!p(["top","bottom"],e)||"column"===t&&!p(["left","right"],e))&&(o=mf(a,t,s))}const l=hm(e)&&!_o(e.facet),c=r.axes,u=c?.length>0;if(o||u){const s="row"===t?"height":"width";return{name:e.getName(`${t}_${n}`),type:"group",role:`${t}-${n}`,...i.facetFieldDef?{from:{data:e.getName(`${t}_domain`)},sort:df(a,t)}:{},...u&&l?{from:{data:e.getName(`facet_domain_${t}`)}}:{},...o?{title:o}:{},...r.sizeSignal?{encode:{update:{[s]:r.sizeSignal}}}:{},...u?{axes:c}:{}}}}return null}const gf={column:{start:0,end:1},row:{start:1,end:0}};function hf(e,t){return gf[t][e]}function yf(e,t,n,i,r){const o={};for(const a of i){if(!r[a])continue;const i=rf(a,t?.header,e,n);void 0!==i&&(o[r[a]]=i)}return o}function vf(e){return[...bf(e,"width"),...bf(e,"height"),...bf(e,"childWidth"),...bf(e,"childHeight")]}function bf(e,t){const n="width"===t?"x":"y",i=e.component.layoutSize.get(t);if(!i||"merged"===i)return[];const r=e.getSizeSignalRef(t).signal;if("step"===i){const t=e.getScaleComponent(n);if(t){const i=t.get("type"),o=t.get("range");if(hr(i)&&vn(o)){const i=e.scaleName(n);if(hm(e.parent)){if("independent"===e.parent.component.resolve.scale[n])return[xf(i,o)]}return[xf(i,o),{name:r,update:$f(i,t,`domain('${i}').length`)}]}}throw new Error("layout size is step although width/height is not step.")}if("container"==i){const t=r.endsWith("width"),n=t?"containerSize()[0]":"containerSize()[1]",i=`isFinite(${n}) ? ${n} : ${_s(e.config.view,t?"width":"height")}`;return[{name:r,init:i,on:[{update:i,events:"window:resize"}]}]}return[{name:r,value:i}]}function xf(e,t){const n=`${e}_step`;return yn(t.step)?{name:n,update:t.step.signal}:{name:n,value:t.step}}function $f(e,t,n){const i=t.get("type"),r=t.get("padding"),o=U(t.get("paddingOuter"),r);let a=t.get("paddingInner");return a="band"===i?void 0!==a?a:r:1,`bandspace(${n}, ${On(a)}, ${On(o)}) * ${e}_step`}function wf(e){return"childWidth"===e?"width":"childHeight"===e?"height":e}function kf(e,t){return D(e).reduce(((n,i)=>{const r=e[i];return{...n,...Ec(t,r,i,(e=>Fn(e.value)))}}),{})}function Sf(e,t){if(hm(t))return"theta"===e?"independent":"shared";if(vm(t))return"shared";if(ym(t))return zt(e)||"theta"===e||"radius"===e?"independent":"shared";throw new Error("invalid model type for resolve")}function Df(e,t){const n=e.scale[t],i=zt(t)?"axis":"legend";return"independent"===n?("shared"===e[i][t]&&yi(function(e){return`Setting the scale to be independent for "${e}" means we also have to set the guide (axis or legend) to be independent.`}(t)),"independent"):e[i][t]||"shared"}const Ff=D({aria:1,clipHeight:1,columnPadding:1,columns:1,cornerRadius:1,description:1,direction:1,fillColor:1,format:1,formatType:1,gradientLength:1,gradientOpacity:1,gradientStrokeColor:1,gradientStrokeWidth:1,gradientThickness:1,gridAlign:1,labelAlign:1,labelBaseline:1,labelColor:1,labelFont:1,labelFontSize:1,labelFontStyle:1,labelFontWeight:1,labelLimit:1,labelOffset:1,labelOpacity:1,labelOverlap:1,labelPadding:1,labelSeparation:1,legendX:1,legendY:1,offset:1,orient:1,padding:1,rowPadding:1,strokeColor:1,symbolDash:1,symbolDashOffset:1,symbolFillColor:1,symbolLimit:1,symbolOffset:1,symbolOpacity:1,symbolSize:1,symbolStrokeColor:1,symbolStrokeWidth:1,symbolType:1,tickCount:1,tickMinStep:1,title:1,titleAlign:1,titleAnchor:1,titleBaseline:1,titleColor:1,titleFont:1,titleFontSize:1,titleFontStyle:1,titleFontWeight:1,titleLimit:1,titleLineHeight:1,titleOpacity:1,titleOrient:1,titlePadding:1,type:1,values:1,zindex:1,disable:1,labelExpr:1,selections:1,opacity:1,shape:1,stroke:1,fill:1,size:1,strokeWidth:1,strokeDash:1,encode:1});class zf extends Gl{}const Of={symbols:function(e,n){let{fieldOrDatumDef:i,model:r,channel:o,legendCmpt:a,legendType:s}=n;if("symbol"!==s)return;const{markDef:l,encoding:c,config:u,mark:f}=r,d=l.filled&&"trail"!==f;let m={..._n({},r,ho),...Vc(r,{filled:d})};const p=a.get("symbolOpacity")??u.legend.symbolOpacity,g=a.get("symbolFillColor")??u.legend.symbolFillColor,h=a.get("symbolStrokeColor")??u.legend.symbolStrokeColor,y=void 0===p?_f(c.opacity)??l.opacity:void 0;if(m.fill)if("fill"===o||d&&o===me)delete m.fill;else if(m.fill.field)g?delete m.fill:(m.fill=Fn(u.legend.symbolBaseFillColor??"black"),m.fillOpacity=Fn(y??1));else if(t.isArray(m.fill)){const e=Cf(c.fill??c.color)??l.fill??(d&&l.color);e&&(m.fill=Fn(e))}if(m.stroke)if("stroke"===o||!d&&o===me)delete m.stroke;else if(m.stroke.field||h)delete m.stroke;else if(t.isArray(m.stroke)){const e=U(Cf(c.stroke||c.color),l.stroke,d?l.color:void 0);e&&(m.stroke={value:e})}if(o!==be){const e=Ro(i)&&Pf(r,a,i);e?m.opacity=[{test:e,...Fn(y??1)},Fn(u.legend.unselectedOpacity)]:y&&(m.opacity=Fn(y))}return m={...m,...e},S(m)?void 0:m},gradient:function(e,t){let{model:n,legendType:i,legendCmpt:r}=t;if("gradient"!==i)return;const{config:o,markDef:a,encoding:s}=n;let l={};const c=void 0===(r.get("gradientOpacity")??o.legend.gradientOpacity)?_f(s.opacity)||a.opacity:void 0;c&&(l.opacity=Fn(c));return l={...l,...e},S(l)?void 0:l},labels:function(e,t){let{fieldOrDatumDef:n,model:i,channel:r,legendCmpt:o}=t;const a=i.legend(r)||{},s=i.config,l=Ro(n)?Pf(i,o,n):void 0,c=l?[{test:l,value:1},{value:s.legend.unselectedOpacity}]:void 0,{format:u,formatType:f}=a;let d;Lr(f)?d=Br({fieldOrDatumDef:n,field:"datum.value",format:u,formatType:f,config:s}):void 0===u&&void 0===f&&s.customFormatTypes&&("quantitative"===n.type&&s.numberFormatType?d=Br({fieldOrDatumDef:n,field:"datum.value",format:s.numberFormat,formatType:s.numberFormatType,config:s}):"temporal"===n.type&&s.timeFormatType&&Ro(n)&&void 0===n.timeUnit&&(d=Br({fieldOrDatumDef:n,field:"datum.value",format:s.timeFormat,formatType:s.timeFormatType,config:s})));const m={...c?{opacity:c}:{},...d?{text:d}:{},...e};return S(m)?void 0:m},entries:function(e,t){let{legendCmpt:n}=t;const i=n.get("selections");return i?.length?{...e,fill:{value:"transparent"}}:e}};function _f(e){return Nf(e,((e,t)=>Math.max(e,t.value)))}function Cf(e){return Nf(e,((e,t)=>U(e,t.value)))}function Nf(e,n){return function(e){const n=e?.condition;return!!n&&(t.isArray(n)||Xo(n))}(e)?t.array(e.condition).reduce(n,e.value):Xo(e)?e.value:void 0}function Pf(e,n,i){const r=n.get("selections");if(!r?.length)return;const o=t.stringValue(i.field);return r.map((e=>`(!length(data(${t.stringValue(_(e)+Ou)})) || (${e}[${o}] && indexof(${e}[${o}], datum.value) >= 0))`)).join(" || ")}const Af={direction:e=>{let{direction:t}=e;return t},format:e=>{let{fieldOrDatumDef:t,legend:n,config:i}=e;const{format:r,formatType:o}=n;return Ir(t,t.type,r,o,i,!1)},formatType:e=>{let{legend:t,fieldOrDatumDef:n,scaleType:i}=e;const{formatType:r}=t;return Hr(r,n,i)},gradientLength:e=>{const{legend:t,legendConfig:n}=e;return t.gradientLength??n.gradientLength??function(e){let{legendConfig:t,model:n,direction:i,orient:r,scaleType:o}=e;const{gradientHorizontalMaxLength:a,gradientHorizontalMinLength:s,gradientVerticalMaxLength:l,gradientVerticalMinLength:c}=t;if(vr(o))return"horizontal"===i?"top"===r||"bottom"===r?Ef(n,"width",s,a):s:Ef(n,"height",c,l);return}(e)},labelOverlap:e=>{let{legend:t,legendConfig:n,scaleType:i}=e;return t.labelOverlap??n.labelOverlap??function(e){if(p(["quantile","threshold","log","symlog"],e))return"greedy";return}(i)},symbolType:e=>{let{legend:t,markDef:n,channel:i,encoding:r}=e;return t.symbolType??function(e,t,n,i){if("shape"!==t){const e=Cf(n)??i;if(e)return e}switch(e){case"bar":case"rect":case"image":case"square":return"square";case"line":case"trail":case"rule":return"stroke";case"arc":case"point":case"circle":case"tick":case"geoshape":case"area":case"text":return"circle"}}(n.type,i,r.shape,n.shape)},title:e=>{let{fieldOrDatumDef:t,config:n}=e;return aa(t,n,{allowDisabling:!0})},type:e=>{let{legendType:t,scaleType:n,channel:i}=e;if(qe(i)&&vr(n)){if("gradient"===t)return}else if("symbol"===t)return;return t},values:e=>{let{fieldOrDatumDef:n,legend:i}=e;return function(e,n){const i=e.values;if(t.isArray(i))return ba(n,i);if(yn(i))return i;return}(i,n)}};function jf(e){const{legend:t}=e;return U(t.type,function(e){let{channel:t,timeUnit:n,scaleType:i}=e;if(qe(t)){if(p(["quarter","month","day"],n))return"symbol";if(vr(i))return"gradient"}return"symbol"}(e))}function Tf(e){let{legendConfig:t,legendType:n,orient:i,legend:r}=e;return r.direction??t[n?"gradientDirection":"symbolDirection"]??function(e,t){switch(e){case"top":case"bottom":return"horizontal";case"left":case"right":case"none":case void 0:return;default:return"gradient"===t?"horizontal":void 0}}(i,n)}function Ef(e,t,n,i){return{signal:`clamp(${e.getSizeSignalRef(t).signal}, ${n}, ${i})`}}function Mf(e){const t=gm(e)?function(e){const{encoding:t}=e,n={};for(const i of[me,...gs]){const r=fa(t[i]);r&&e.getScaleComponent(i)&&(i===he&&Ro(r)&&r.type===rr||(n[i]=qf(e,i)))}return n}(e):function(e){const{legends:t,resolve:n}=e.component;for(const i of e.children){Mf(i);for(const r of D(i.component.legends))n.legend[r]=Df(e.component.resolve,r),"shared"===n.legend[r]&&(t[r]=Uf(t[r],i.component.legends[r]),t[r]||(n.legend[r]="independent",delete t[r]))}for(const i of D(t))for(const t of e.children)t.component.legends[i]&&"shared"===n.legend[i]&&delete t.component.legends[i];return t}(e);return e.component.legends=t,t}function Lf(e,t,n,i){switch(t){case"disable":return void 0!==n;case"values":return!!n?.values;case"title":if("title"===t&&e===i?.title)return!0}return e===(n||{})[t]}function qf(e,t){let n=e.legend(t);const{markDef:i,encoding:r,config:o}=e,a=o.legend,s=new zf({},function(e,t){const n=e.scaleName(t);if("trail"===e.mark){if("color"===t)return{stroke:n};if("size"===t)return{strokeWidth:n}}return"color"===t?e.markDef.filled?{fill:n}:{stroke:n}:{[t]:n}}(e,t));!function(e,t,n){const i=e.fieldDef(t)?.field;for(const r of F(e.component.selection??{})){const e=r.project.hasField[i]??r.project.hasChannel[t];if(e&&bu.defined(r)){const t=n.get("selections")??[];t.push(r.name),n.set("selections",t,!1),e.hasLegend=!0}}}(e,t,s);const l=void 0!==n?!n:a.disable;if(s.set("disable",l,void 0!==n),l)return s;n=n||{};const c=e.getScaleComponent(t).get("type"),u=fa(r[t]),f=Ro(u)?Ei(u.timeUnit)?.unit:void 0,d=n.orient||o.legend.orient||"right",m=jf({legend:n,channel:t,timeUnit:f,scaleType:c}),p={legend:n,channel:t,model:e,markDef:i,encoding:r,fieldOrDatumDef:u,legendConfig:a,config:o,scaleType:c,orient:d,legendType:m,direction:Tf({legend:n,legendType:m,orient:d,legendConfig:a})};for(const i of Ff){if("gradient"===m&&i.startsWith("symbol")||"symbol"===m&&i.startsWith("gradient"))continue;const r=i in Af?Af[i](p):n[i];if(void 0!==r){const a=Lf(r,i,n,e.fieldDef(t));(a||void 0===o.legend[i])&&s.set(i,r,a)}}const g=n?.encoding??{},h=s.get("selections"),y={},v={fieldOrDatumDef:u,model:e,channel:t,legendCmpt:s,legendType:m};for(const t of["labels","legend","title","symbols","gradient","entries"]){const n=kf(g[t]??{},e),i=t in Of?Of[t](n,v):n;void 0===i||S(i)||(y[t]={...h?.length&&Ro(u)?{name:`${_(u.field)}_legend_${t}`}:{},...h?.length?{interactive:!!h}:{},update:i})}return S(y)||s.set("encode",y,!!n?.encoding),s}function Uf(e,t){if(!e)return t.clone();const n=e.getWithExplicit("orient"),i=t.getWithExplicit("orient");if(n.explicit&&i.explicit&&n.value!==i.value)return;let r=!1;for(const n of Ff){const i=Kl(e.getWithExplicit(n),t.getWithExplicit(n),n,"legend",((e,t)=>{switch(n){case"symbolType":return Rf(e,t);case"title":return Ln(e,t);case"type":return r=!0,Xl("symbol")}return Jl(e,t,n,"legend")}));e.setWithExplicit(n,i)}return r&&(e.implicit?.encode?.gradient&&N(e.implicit,["encode","gradient"]),e.explicit?.encode?.gradient&&N(e.explicit,["encode","gradient"])),e}function Rf(e,t){return"circle"===t.value?t:e}function Wf(e){const t=e.component.legends,n={};for(const i of D(t)){const r=X(e.getScaleComponent(i).get("domains"));if(n[r])for(const e of n[r]){Uf(e,t[i])||n[r].push(t[i])}else n[r]=[t[i].clone()]}return F(n).flat().map((t=>function(e,t){const{disable:n,labelExpr:i,selections:r,...o}=e.combine();if(n)return;!1===t.aria&&null==o.aria&&(o.aria=!1);if(o.encode?.symbols){const e=o.encode.symbols.update;!e.fill||"transparent"===e.fill.value||e.stroke||o.stroke||(e.stroke={value:"transparent"});for(const t of gs)o[t]&&delete e[t]}o.title||delete o.title;if(void 0!==i){let e=i;o.encode?.labels?.update&&yn(o.encode.labels.update.text)&&(e=M(i,"datum.label",o.encode.labels.update.text.signal)),function(e,t,n,i){e.encode??={},e.encode[t]??={},e.encode[t].update??={},e.encode[t].update[n]=i}(o,"labels","text",{signal:e})}return o}(t,e.config))).filter((e=>void 0!==e))}function Bf(e){return vm(e)||ym(e)?function(e){return e.children.reduce(((e,t)=>e.concat(t.assembleProjections())),If(e))}(e):If(e)}function If(e){const t=e.component.projection;if(!t||t.merged)return[];const n=t.combine(),{name:i}=n;if(t.data){const r={signal:`[${t.size.map((e=>e.signal)).join(", ")}]`},o=t.data.reduce(((t,n)=>{const i=yn(n)?n.signal:`data('${e.lookupDataSource(n)}')`;return p(t,i)||t.push(i),t}),[]);if(o.length<=0)throw new Error("Projection's fit didn't find any data sources");return[{name:i,size:r,fit:{signal:o.length>1?`[${o.join(", ")}]`:o[0]},...n}]}return[{name:i,translate:{signal:"[width / 2, height / 2]"},...n}]}const Hf=["type","clipAngle","clipExtent","center","rotate","precision","reflectX","reflectY","coefficient","distance","fraction","lobes","parallel","radius","ratio","spacing","tilt"];class Vf extends Gl{merged=!1;constructor(e,t,n,i){super({...t},{name:e}),this.specifiedProjection=t,this.size=n,this.data=i}get isFit(){return!!this.data}}function Gf(e){e.component.projection=gm(e)?function(e){if(e.hasProjection){const t=pn(e.specifiedProjection),n=!(t&&(null!=t.scale||null!=t.translate)),i=n?[e.getSizeSignalRef("width"),e.getSizeSignalRef("height")]:void 0,r=n?function(e){const t=[],{encoding:n}=e;for(const i of[[ue,ce],[de,fe]])(fa(n[i[0]])||fa(n[i[1]]))&&t.push({signal:e.getName(`geojson_${t.length}`)});e.channelHasField(he)&&e.typedFieldDef(he).type===rr&&t.push({signal:e.getName(`geojson_${t.length}`)});0===t.length&&t.push(e.requestDataName(sc.Main));return t}(e):void 0,o=new Vf(e.projectionName(!0),{...pn(e.config.projection),...t},i,r);return o.get("type")||o.set("type","equalEarth",!1),o}return}(e):function(e){if(0===e.children.length)return;let n;for(const t of e.children)Gf(t);const i=h(e.children,(e=>{const i=e.component.projection;if(i){if(n){const e=function(e,n){const i=h(Hf,(i=>!t.hasOwnProperty(e.explicit,i)&&!t.hasOwnProperty(n.explicit,i)||!!(t.hasOwnProperty(e.explicit,i)&&t.hasOwnProperty(n.explicit,i)&&Y(e.get(i),n.get(i)))));if(Y(e.size,n.size)){if(i)return e;if(Y(e.explicit,{}))return n;if(Y(n.explicit,{}))return e}return null}(n,i);return e&&(n=e),!!e}return n=i,!0}return!0}));if(n&&i){const t=e.projectionName(!0),i=new Vf(t,n.specifiedProjection,n.size,l(n.data));for(const n of e.children){const e=n.component.projection;e&&(e.isFit&&i.data.push(...n.component.projection.data),n.renameProjection(e.get("name"),t),e.merged=!0)}return i}return}(e)}function Yf(e,t,n,i){if(xa(t,n)){const r=gm(e)?e.axis(n)??e.legend(n)??{}:{},o=ta(t,{expr:"datum"}),a=ta(t,{expr:"datum",binSuffix:"end"});return{formulaAs:ta(t,{binSuffix:"range",forAs:!0}),formula:Xr(o,a,r.format,r.formatType,i)}}return{}}function Xf(e,t){return`${sn(e)}_${t}`}function Qf(e,t,n){const i=Xf(ga(n,void 0)??{},t);return e.getName(`${i}_bins`)}function Jf(e,n,i){let r,o;r=function(e){return"as"in e}(e)?t.isString(e.as)?[e.as,`${e.as}_end`]:[e.as[0],e.as[1]]:[ta(e,{forAs:!0}),ta(e,{binSuffix:"end",forAs:!0})];const a={...ga(n,void 0)},s=Xf(a,e.field),{signal:l,extentSignal:c}=function(e,t){return{signal:e.getName(`${t}_bins`),extentSignal:e.getName(`${t}_extent`)}}(i,s);if(fn(a.extent)){const e=a.extent;o=Ru(i,e.param,e),delete a.extent}return{key:s,binComponent:{bin:a,field:e.field,as:[r],...l?{signal:l}:{},...c?{extentSignal:c}:{},...o?{span:o}:{}}}}class Kf extends pc{clone(){return new Kf(null,l(this.bins))}constructor(e,t){super(e),this.bins=t}static makeFromEncoding(e,t){const n=t.reduceFieldDef(((e,n,i)=>{if(Yo(n)&&ln(n.bin)){const{key:r,binComponent:o}=Jf(n,n.bin,t);e[r]={...o,...e[r],...Yf(t,n,i,t.config)}}return e}),{});return S(n)?null:new Kf(e,n)}static makeFromTransform(e,t,n){const{key:i,binComponent:r}=Jf(t,t.bin,n);return new Kf(e,{[i]:r})}merge(e,t){for(const n of D(e.bins))n in this.bins?(t(e.bins[n].signal,this.bins[n].signal),this.bins[n].as=b([...this.bins[n].as,...e.bins[n].as],d)):this.bins[n]=e.bins[n];for(const t of e.children)e.removeChild(t),t.parent=this;e.remove()}producedFields(){return new Set(F(this.bins).map((e=>e.as)).flat(2))}dependentFields(){return new Set(F(this.bins).map((e=>e.field)))}hash(){return`Bin ${d(this.bins)}`}assemble(){return F(this.bins).flatMap((e=>{const t=[],[n,...i]=e.as,{extent:r,...o}=e.bin,a={type:"bin",field:E(e.field),as:n,signal:e.signal,...fn(r)?{extent:null}:{extent:r},...e.span?{span:{signal:`span(${e.span})`}}:{},...o};!r&&e.extentSignal&&(t.push({type:"extent",field:E(e.field),signal:e.extentSignal}),a.extent={signal:e.extentSignal}),t.push(a);for(const e of i)for(let i=0;i<2;i++)t.push({type:"formula",expr:ta({field:n[i]},{expr:"datum"}),as:e[i]});return e.formula&&t.push({type:"formula",expr:e.formula,as:e.formulaAs}),t}))}}function Zf(e,n,i,r){const o=gm(r)?r.encoding[it(n)]:void 0;if(Yo(i)&&gm(r)&&Eo(i,o,r.markDef,r.config)){e.add(ta(i,{})),e.add(ta(i,{suffix:"end"}));const{mark:t,markDef:o,config:a}=r,s=jo({fieldDef:i,markDef:o,config:a});mo(t)&&.5!==s&&zt(n)&&(e.add(ta(i,{suffix:bc})),e.add(ta(i,{suffix:xc}))),i.bin&&xa(i,n)&&e.add(ta(i,{binSuffix:"range"}))}else if(Ee(n)){const t=Te(n);e.add(r.getName(t))}else e.add(ta(i));return Qo(i)&&function(e){return t.isObject(e)&&"field"in e}(i.scale?.range)&&e.add(i.scale.range.field),e}class ed extends pc{clone(){return new ed(null,new Set(this.dimensions),l(this.measures))}constructor(e,t,n){super(e),this.dimensions=t,this.measures=n}get groupBy(){return this.dimensions}static makeFromEncoding(e,t){let n=!1;t.forEachFieldDef((e=>{e.aggregate&&(n=!0)}));const i={},r=new Set;return n?(t.forEachFieldDef(((e,n)=>{const{aggregate:o,field:a}=e;if(o)if("count"===o)i["*"]??={},i["*"].count=new Set([ta(e,{forAs:!0})]);else{if(Zt(o)||en(o)){const e=Zt(o)?"argmin":"argmax",t=o[e];i[t]??={},i[t][e]=new Set([ta({op:e,field:t},{forAs:!0})])}else i[a]??={},i[a][o]=new Set([ta(e,{forAs:!0})]);Ht(n)&&"unaggregated"===t.scaleDomain(n)&&(i[a]??={},i[a].min=new Set([ta({field:a,aggregate:"min"},{forAs:!0})]),i[a].max=new Set([ta({field:a,aggregate:"max"},{forAs:!0})]))}else Zf(r,n,e,t)})),r.size+D(i).length===0?null:new ed(e,r,i)):null}static makeFromTransform(e,t){const n=new Set,i={};for(const e of t.aggregate){const{op:t,field:n,as:r}=e;t&&("count"===t?(i["*"]??={},i["*"].count=new Set([r||ta(e,{forAs:!0})])):(i[n]??={},i[n][t]=new Set([r||ta(e,{forAs:!0})])))}for(const e of t.groupby??[])n.add(e);return n.size+D(i).length===0?null:new ed(e,n,i)}merge(e){return x(this.dimensions,e.dimensions)?(function(e,t){for(const n of D(t)){const i=t[n];for(const t of D(i))n in e?e[n][t]=new Set([...e[n][t]??[],...i[t]]):e[n]={[t]:i[t]}}}(this.measures,e.measures),!0):(function(){hi.debug(...arguments)}("different dimensions, cannot merge"),!1)}addDimensions(e){e.forEach(this.dimensions.add,this.dimensions)}dependentFields(){return new Set([...this.dimensions,...D(this.measures)])}producedFields(){const e=new Set;for(const t of D(this.measures))for(const n of D(this.measures[t])){const i=this.measures[t][n];0===i.size?e.add(`${n}_${t}`):i.forEach(e.add,e)}return e}hash(){return`Aggregate ${d({dimensions:this.dimensions,measures:this.measures})}`}assemble(){const e=[],t=[],n=[];for(const i of D(this.measures))for(const r of D(this.measures[i]))for(const o of this.measures[i][r])n.push(o),e.push(r),t.push("*"===i?null:E(i));return{type:"aggregate",groupby:[...this.dimensions].map(E),ops:e,fields:t,as:n}}}class td extends pc{constructor(e,n,i,r){super(e),this.model=n,this.name=i,this.data=r;for(const e of Re){const i=n.facet[e];if(i){const{bin:r,sort:o}=i;this[e]={name:n.getName(`${e}_domain`),fields:[ta(i),...ln(r)?[ta(i,{binSuffix:"end"})]:[]],...zo(o)?{sortField:o}:t.isArray(o)?{sortIndexField:tf(i,e)}:{}}}}this.childModel=n.child}hash(){let e="Facet";for(const t of Re)this[t]&&(e+=` ${t.charAt(0)}:${d(this[t])}`);return e}get fields(){const e=[];for(const t of Re)this[t]?.fields&&e.push(...this[t].fields);return e}dependentFields(){const e=new Set(this.fields);for(const t of Re)this[t]&&(this[t].sortField&&e.add(this[t].sortField.field),this[t].sortIndexField&&e.add(this[t].sortIndexField));return e}producedFields(){return new Set}getSource(){return this.name}getChildIndependentFieldsWithStep(){const e={};for(const t of Ft){const n=this.childModel.component.scales[t];if(n&&!n.merged){const i=n.get("type"),r=n.get("range");if(hr(i)&&vn(r)){const n=Hd(Vd(this.childModel,t));n?e[t]=n:yi(In(t))}}}return e}assembleRowColumnHeaderData(e,t,n){const i={row:"y",column:"x",facet:void 0}[e],r=[],o=[],a=[];i&&n&&n[i]&&(t?(r.push(`distinct_${n[i]}`),o.push("max")):(r.push(n[i]),o.push("distinct")),a.push(`distinct_${n[i]}`));const{sortField:s,sortIndexField:l}=this[e];if(s){const{op:e=ko,field:t}=s;r.push(t),o.push(e),a.push(ta(s,{forAs:!0}))}else l&&(r.push(l),o.push("max"),a.push(l));return{name:this[e].name,source:t??this.data,transform:[{type:"aggregate",groupby:this[e].fields,...r.length?{fields:r,ops:o,as:a}:{}}]}}assembleFacetHeaderData(e){const{columns:t}=this.model.layout,{layoutHeaders:n}=this.model.component,i=[],r={};for(const e of af){for(const t of sf){const i=(n[e]&&n[e][t])??[];for(const t of i)if(t.axes?.length>0){r[e]=!0;break}}if(r[e]){const n=`length(data("${this.facet.name}"))`,r="row"===e?t?{signal:`ceil(${n} / ${t})`}:1:t?{signal:`min(${n}, ${t})`}:{signal:n};i.push({name:`${this.facet.name}_${e}`,transform:[{type:"sequence",start:0,stop:r}]})}}const{row:o,column:a}=r;return(o||a)&&i.unshift(this.assembleRowColumnHeaderData("facet",null,e)),i}assemble(){const e=[];let t=null;const n=this.getChildIndependentFieldsWithStep(),{column:i,row:r,facet:o}=this;if(i&&r&&(n.x||n.y)){t=`cross_${this.column.name}_${this.row.name}`;const i=[].concat(n.x??[],n.y??[]),r=i.map((()=>"distinct"));e.push({name:t,source:this.data,transform:[{type:"aggregate",groupby:this.fields,fields:i,ops:r}]})}for(const i of[J,Q])this[i]&&e.push(this.assembleRowColumnHeaderData(i,t,n));if(o){const t=this.assembleFacetHeaderData(n);t&&e.push(...t)}return e}}function nd(e){return e.startsWith("'")&&e.endsWith("'")||e.startsWith('"')&&e.endsWith('"')?e.slice(1,-1):e}function id(e){const n={};return a(e.filter,(e=>{if(Gi(e)){let i=null;Ui(e)?i=Sn(e.equal):Wi(e)?i=Sn(e.lte):Ri(e)?i=Sn(e.lt):Bi(e)?i=Sn(e.gt):Ii(e)?i=Sn(e.gte):Hi(e)?i=e.range[0]:Vi(e)&&(i=(e.oneOf??e.in)[0]),i&&(vi(i)?n[e.field]="date":t.isNumber(i)?n[e.field]="number":t.isString(i)&&(n[e.field]="string")),e.timeUnit&&(n[e.field]="date")}})),n}function rd(e){const n={};function i(e){var i;ya(e)?n[e.field]="date":"quantitative"===e.type&&(i=e.aggregate,t.isString(i)&&p(["min","max"],i))?n[e.field]="number":q(e.field)>1?e.field in n||(n[e.field]="flatten"):Qo(e)&&zo(e.sort)&&q(e.sort.field)>1&&(e.sort.field in n||(n[e.sort.field]="flatten"))}if((gm(e)||hm(e))&&e.forEachFieldDef(((t,n)=>{if(Yo(t))i(t);else{const r=tt(n),o=e.fieldDef(r);i({...t,type:o.type})}})),gm(e)){const{mark:t,markDef:i,encoding:r}=e;if(fo(t)&&!e.encoding.order){const e=r["horizontal"===i.orient?"y":"x"];Ro(e)&&"quantitative"===e.type&&!(e.field in n)&&(n[e.field]="number")}}return n}class od extends pc{clone(){return new od(null,l(this._parse))}constructor(e,t){super(e),this._parse=t}hash(){return`Parse ${d(this._parse)}`}static makeExplicit(e,t,n){let i={};const r=t.data;return!ic(r)&&r?.format?.parse&&(i=r.format.parse),this.makeWithAncestors(e,i,{},n)}static makeWithAncestors(e,t,n,i){for(const e of D(n)){const t=i.getWithExplicit(e);void 0!==t.value&&(t.explicit||t.value===n[e]||"derived"===t.value||"flatten"===n[e]?delete n[e]:yi(Qn(e,n[e],t.value)))}for(const e of D(t)){const n=i.get(e);void 0!==n&&(n===t[e]?delete t[e]:yi(Qn(e,t[e],n)))}const r=new Gl(t,n);i.copyAll(r);const o={};for(const e of D(r.combine())){const t=r.get(e);null!==t&&(o[e]=t)}return 0===D(o).length||i.parseNothing?null:new od(e,o)}get parse(){return this._parse}merge(e){this._parse={...this._parse,...e.parse},e.remove()}assembleFormatParse(){const e={};for(const t of D(this._parse)){const n=this._parse[t];1===q(t)&&(e[t]=n)}return e}producedFields(){return new Set(D(this._parse))}dependentFields(){return new Set(D(this._parse))}assembleTransforms(){let e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];return D(this._parse).filter((t=>!e||q(t)>1)).map((e=>{const t=function(e,t){const n=A(e);if("number"===t)return`toNumber(${n})`;if("boolean"===t)return`toBoolean(${n})`;if("string"===t)return`toString(${n})`;if("date"===t)return`toDate(${n})`;if("flatten"===t)return n;if(t.startsWith("date:"))return`timeParse(${n},'${nd(t.slice(5,t.length))}')`;if(t.startsWith("utc:"))return`utcParse(${n},'${nd(t.slice(4,t.length))}')`;return yi(`Unrecognized parse "${t}".`),null}(e,this._parse[e]);if(!t)return null;return{type:"formula",expr:t,as:L(e)}})).filter((e=>null!==e))}}class ad extends pc{clone(){return new ad(null)}constructor(e){super(e)}dependentFields(){return new Set}producedFields(){return new Set([hs])}hash(){return"Identifier"}assemble(){return{type:"identifier",as:hs}}}class sd extends pc{clone(){return new sd(null,this.params)}constructor(e,t){super(e),this.params=t}dependentFields(){return new Set}producedFields(){}hash(){return`Graticule ${d(this.params)}`}assemble(){return{type:"graticule",...!0===this.params?{}:this.params}}}class ld extends pc{clone(){return new ld(null,this.params)}constructor(e,t){super(e),this.params=t}dependentFields(){return new Set}producedFields(){return new Set([this.params.as??"data"])}hash(){return`Hash ${d(this.params)}`}assemble(){return{type:"sequence",...this.params}}}class cd extends pc{constructor(e){let t;if(super(null),e??={name:"source"},ic(e)||(t=e.format?{...f(e.format,["parse"])}:{}),tc(e))this._data={values:e.values};else if(ec(e)){if(this._data={url:e.url},!t.type){let n=/(?:\.([^.]+))?$/.exec(e.url)[1];p(["json","csv","tsv","dsv","topojson"],n)||(n="json"),t.type=n}}else oc(e)?this._data={values:[{type:"Sphere"}]}:(nc(e)||ic(e))&&(this._data={});this._generator=ic(e),e.name&&(this._name=e.name),t&&!S(t)&&(this._data.format=t)}dependentFields(){return new Set}producedFields(){}get data(){return this._data}hasName(){return!!this._name}get isGenerator(){return this._generator}get dataName(){return this._name}set dataName(e){this._name=e}set parent(e){throw new Error("Source nodes have to be roots.")}remove(){throw new Error("Source nodes are roots and cannot be removed.")}hash(){throw new Error("Cannot hash sources")}assemble(){return{name:this._name,...this._data,transform:[]}}}function ud(e){return e instanceof cd||e instanceof sd||e instanceof ld}class fd{#e;constructor(){this.#e=!1}setModified(){this.#e=!0}get modifiedFlag(){return this.#e}}class dd extends fd{getNodeDepths(e,t,n){n.set(e,t);for(const i of e.children)this.getNodeDepths(i,t+1,n);return n}optimize(e){const t=[...this.getNodeDepths(e,0,new Map).entries()].sort(((e,t)=>t[1]-e[1]));for(const e of t)this.run(e[0]);return this.modifiedFlag}}class md extends fd{optimize(e){this.run(e);for(const t of e.children)this.optimize(t);return this.modifiedFlag}}class pd extends md{mergeNodes(e,t){const n=t.shift();for(const i of t)e.removeChild(i),i.parent=n,i.remove()}run(e){const t=e.children.map((e=>e.hash())),n={};for(let i=0;i1&&(this.setModified(),this.mergeNodes(e,n[t]))}}class gd extends md{constructor(e){super(),this.requiresSelectionId=e&&ju(e)}run(e){e instanceof ad&&(this.requiresSelectionId&&(ud(e.parent)||e.parent instanceof ed||e.parent instanceof od)||(this.setModified(),e.remove()))}}class hd extends fd{optimize(e){return this.run(e,new Set),this.modifiedFlag}run(e,t){let n=new Set;e instanceof vc&&(n=e.producedFields(),$(n,t)&&(this.setModified(),e.removeFormulas(t),0===e.producedFields.length&&e.remove()));for(const i of e.children)this.run(i,new Set([...t,...n]))}}class yd extends md{constructor(){super()}run(e){e instanceof gc&&!e.isRequired()&&(this.setModified(),e.remove())}}class vd extends dd{run(e){if(!(ud(e)||e.numChildren()>1))for(const t of e.children)if(t instanceof od)if(e instanceof od)this.setModified(),e.merge(t);else{if(k(e.producedFields(),t.dependentFields()))continue;this.setModified(),t.swapWithParent()}}}class bd extends dd{run(e){const t=[...e.children],n=e.children.filter((e=>e instanceof od));if(e.numChildren()>1&&n.length>=1){const i={},r=new Set;for(const e of n){const t=e.parse;for(const e of D(t))e in i?i[e]!==t[e]&&r.add(e):i[e]=t[e]}for(const e of r)delete i[e];if(!S(i)){this.setModified();const n=new od(e,i);for(const r of t){if(r instanceof od)for(const e of D(i))delete r.parse[e];e.removeChild(r),r.parent=n,r instanceof od&&0===D(r.parse).length&&r.remove()}}}}}class xd extends dd{run(e){e instanceof gc||e.numChildren()>0||e instanceof td||e instanceof cd||(this.setModified(),e.remove())}}class $d extends dd{run(e){const t=e.children.filter((e=>e instanceof vc)),n=t.pop();for(const e of t)this.setModified(),n.merge(e)}}class wd extends dd{run(e){const t=e.children.filter((e=>e instanceof ed)),n={};for(const e of t){const t=d(e.groupBy);t in n||(n[t]=[]),n[t].push(e)}for(const t of D(n)){const i=n[t];if(i.length>1){const t=i.pop();for(const n of i)t.merge(n)&&(e.removeChild(n),n.parent=t,n.remove(),this.setModified())}}}}class kd extends dd{constructor(e){super(),this.model=e}run(e){const t=!(ud(e)||e instanceof qu||e instanceof od||e instanceof ad),n=[],i=[];for(const r of e.children)r instanceof Kf&&(t&&!k(e.producedFields(),r.dependentFields())?n.push(r):i.push(r));if(n.length>0){const t=n.pop();for(const e of n)t.merge(e,this.model.renameSignal.bind(this.model));this.setModified(),e instanceof Kf?e.merge(t,this.model.renameSignal.bind(this.model)):t.swapWithParent()}if(i.length>1){const e=i.pop();for(const t of i)e.merge(t,this.model.renameSignal.bind(this.model));this.setModified()}}}class Sd extends dd{run(e){const t=[...e.children];if(!g(t,(e=>e instanceof gc))||e.numChildren()<=1)return;const n=[];let i;for(const r of t)if(r instanceof gc){let t=r;for(;1===t.numChildren();){const[e]=t.children;if(!(e instanceof gc))break;t=e}n.push(...t.children),i?(e.removeChild(r),r.parent=i.parent,i.parent.removeChild(i),i.parent=t,this.setModified()):i=t}else n.push(r);if(n.length){this.setModified();for(const e of n)e.parent.removeChild(e),e.parent=i}}}class Dd extends pc{clone(){return new Dd(null,l(this.transform))}constructor(e,t){super(e),this.transform=t}addDimensions(e){this.transform.groupby=b(this.transform.groupby.concat(e),(e=>e))}dependentFields(){const e=new Set;return this.transform.groupby&&this.transform.groupby.forEach(e.add,e),this.transform.joinaggregate.map((e=>e.field)).filter((e=>void 0!==e)).forEach(e.add,e),e}producedFields(){return new Set(this.transform.joinaggregate.map(this.getDefaultName))}getDefaultName(e){return e.as??ta(e)}hash(){return`JoinAggregateTransform ${d(this.transform)}`}assemble(){const e=[],t=[],n=[];for(const i of this.transform.joinaggregate)t.push(i.op),n.push(this.getDefaultName(i)),e.push(void 0===i.field?null:i.field);const i=this.transform.groupby;return{type:"joinaggregate",as:n,ops:t,fields:e,...void 0!==i?{groupby:i}:{}}}}class Fd extends pc{clone(){return new Fd(null,l(this._stack))}constructor(e,t){super(e),this._stack=t}static makeFromTransform(e,n){const{stack:i,groupby:r,as:o,offset:a="zero"}=n,s=[],l=[];if(void 0!==n.sort)for(const e of n.sort)s.push(e.field),l.push(U(e.order,"ascending"));const c={field:s,order:l};let u;return u=function(e){return t.isArray(e)&&e.every((e=>t.isString(e)))&&e.length>1}(o)?o:t.isString(o)?[o,`${o}_end`]:[`${n.stack}_start`,`${n.stack}_end`],new Fd(e,{dimensionFieldDefs:[],stackField:i,groupby:r,offset:a,sort:c,facetby:[],as:u})}static makeFromEncoding(e,n){const i=n.stack,{encoding:r}=n;if(!i)return null;const{groupbyChannels:o,fieldChannel:a,offset:s,impute:l}=i,c=o.map((e=>ua(r[e]))).filter((e=>!!e)),u=function(e){return e.stack.stackBy.reduce(((e,t)=>{const n=ta(t.fieldDef);return n&&e.push(n),e}),[])}(n),f=n.encoding.order;let d;if(t.isArray(f)||Ro(f))d=Tn(f);else{const e=Mo(f)?f.sort:"y"===a?"descending":"ascending";d=u.reduce(((t,n)=>(t.field.includes(n)||(t.field.push(n),t.order.push(e)),t)),{field:[],order:[]})}return new Fd(e,{dimensionFieldDefs:c,stackField:n.vgField(a),facetby:[],stackby:u,sort:d,offset:s,impute:l,as:[n.vgField(a,{suffix:"start",forAs:!0}),n.vgField(a,{suffix:"end",forAs:!0})]})}get stack(){return this._stack}addDimensions(e){this._stack.facetby.push(...e)}dependentFields(){const e=new Set;return e.add(this._stack.stackField),this.getGroupbyFields().forEach(e.add,e),this._stack.facetby.forEach(e.add,e),this._stack.sort.field.forEach(e.add,e),e}producedFields(){return new Set(this._stack.as)}hash(){return`Stack ${d(this._stack)}`}getGroupbyFields(){const{dimensionFieldDefs:e,impute:t,groupby:n}=this._stack;return e.length>0?e.map((e=>e.bin?t?[ta(e,{binSuffix:"mid"})]:[ta(e,{}),ta(e,{binSuffix:"end"})]:[ta(e)])).flat():n??[]}assemble(){const e=[],{facetby:t,dimensionFieldDefs:n,stackField:i,stackby:r,sort:o,offset:a,impute:s,as:l}=this._stack;if(s)for(const o of n){const{bandPosition:n=.5,bin:a}=o;if(a){const t=ta(o,{expr:"datum"}),i=ta(o,{expr:"datum",binSuffix:"end"});e.push({type:"formula",expr:`${n}*${t}+${1-n}*${i}`,as:ta(o,{binSuffix:"mid",forAs:!0})})}e.push({type:"impute",field:i,groupby:[...r,...t],key:ta(o,{binSuffix:"mid"}),method:"value",value:0})}return e.push({type:"stack",groupby:[...this.getGroupbyFields(),...t],field:i,sort:o,as:l,offset:a}),e}}class zd extends pc{clone(){return new zd(null,l(this.transform))}constructor(e,t){super(e),this.transform=t}addDimensions(e){this.transform.groupby=b(this.transform.groupby.concat(e),(e=>e))}dependentFields(){const e=new Set;return(this.transform.groupby??[]).forEach(e.add,e),(this.transform.sort??[]).forEach((t=>e.add(t.field))),this.transform.window.map((e=>e.field)).filter((e=>void 0!==e)).forEach(e.add,e),e}producedFields(){return new Set(this.transform.window.map(this.getDefaultName))}getDefaultName(e){return e.as??ta(e)}hash(){return`WindowTransform ${d(this.transform)}`}assemble(){const e=[],t=[],n=[],i=[];for(const r of this.transform.window)t.push(r.op),n.push(this.getDefaultName(r)),i.push(void 0===r.param?null:r.param),e.push(void 0===r.field?null:r.field);const r=this.transform.frame,o=this.transform.groupby;if(r&&null===r[0]&&null===r[1]&&t.every((e=>tn(e))))return{type:"joinaggregate",as:n,ops:t,fields:e,...void 0!==o?{groupby:o}:{}};const a=[],s=[];if(void 0!==this.transform.sort)for(const e of this.transform.sort)a.push(e.field),s.push(e.order??"ascending");const l={field:a,order:s},c=this.transform.ignorePeers;return{type:"window",params:i,as:n,ops:t,fields:e,sort:l,...void 0!==c?{ignorePeers:c}:{},...void 0!==o?{groupby:o}:{},...void 0!==r?{frame:r}:{}}}}function Od(e){if(e instanceof td)if(1!==e.numChildren()||e.children[0]instanceof gc){const n=e.model.component.data.main;_d(n);const i=(t=e,function e(n){if(!(n instanceof td)){const i=n.clone();if(i instanceof gc){const e=Cd+i.getSource();i.setSource(e),t.model.component.data.outputNodes[e]=i}else(i instanceof ed||i instanceof Fd||i instanceof zd||i instanceof Dd)&&i.addDimensions(t.fields);for(const t of n.children.flatMap(e))t.parent=i;return[i]}return n.children.flatMap(e)}),r=e.children.map(i).flat();for(const e of r)e.parent=n}else{const t=e.children[0];(t instanceof ed||t instanceof Fd||t instanceof zd||t instanceof Dd)&&t.addDimensions(e.fields),t.swapWithParent(),Od(e)}else e.children.map(Od);var t}function _d(e){if(e instanceof gc&&e.type===sc.Main&&1===e.numChildren()){const t=e.children[0];t instanceof td||(t.swapWithParent(),_d(e))}}const Cd="scale_",Nd=5;function Pd(e){for(const t of e){for(const e of t.children)if(e.parent!==t)return!1;if(!Pd(t.children))return!1}return!0}function Ad(e,t){let n=!1;for(const i of t)n=e.optimize(i)||n;return n}function jd(e,t,n){let i=e.sources,r=!1;return r=Ad(new yd,i)||r,r=Ad(new gd(t),i)||r,i=i.filter((e=>e.numChildren()>0)),r=Ad(new xd,i)||r,i=i.filter((e=>e.numChildren()>0)),n||(r=Ad(new vd,i)||r,r=Ad(new kd(t),i)||r,r=Ad(new hd,i)||r,r=Ad(new bd,i)||r,r=Ad(new wd,i)||r,r=Ad(new $d,i)||r,r=Ad(new pd,i)||r,r=Ad(new Sd,i)||r),e.sources=i,r}class Td{constructor(e){Object.defineProperty(this,"signal",{enumerable:!0,get:e})}static fromName(e,t){return new Td((()=>e(t)))}}function Ed(e){gm(e)?function(e){const t=e.component.scales;for(const n of D(t)){const i=Md(e,n);if(t[n].setWithExplicit("domains",i),Rd(e,n),e.component.data.isFaceted){let t=e;for(;!hm(t)&&t.parent;)t=t.parent;if("shared"===t.component.resolve.scale[n])for(const e of i.value)bn(e)&&(e.data=Cd+e.data.replace(Cd,""))}}}(e):function(e){for(const t of e.children)Ed(t);const t=e.component.scales;for(const n of D(t)){let i,r=null;for(const t of e.children){const e=t.component.scales[n];if(e){i=void 0===i?e.getWithExplicit("domains"):Kl(i,e.getWithExplicit("domains"),"domains","scale",Bd);const t=e.get("selectionExtent");r&&t&&r.param!==t.param&&yi(Yn),r=t}}t[n].setWithExplicit("domains",i),r&&t[n].set("selectionExtent",r,!0)}}(e)}function Md(e,t){const n=e.getScaleComponent(t).get("type"),{encoding:i}=e,r=function(e,t,n,i){if("unaggregated"===e){const{valid:e,reason:i}=Wd(t,n);if(!e)return void yi(i)}else if(void 0===e&&i.useUnaggregatedDomain){const{valid:e}=Wd(t,n);if(e)return"unaggregated"}return e}(e.scaleDomain(t),e.typedFieldDef(t),n,e.config.scale);return r!==e.scaleDomain(t)&&(e.specifiedScales[t]={...e.specifiedScales[t],domain:r}),"x"===t&&fa(i.x2)?fa(i.x)?Kl(qd(n,r,e,"x"),qd(n,r,e,"x2"),"domain","scale",Bd):qd(n,r,e,"x2"):"y"===t&&fa(i.y2)?fa(i.y)?Kl(qd(n,r,e,"y"),qd(n,r,e,"y2"),"domain","scale",Bd):qd(n,r,e,"y2"):qd(n,r,e,t)}function Ld(e,t,n){const i=Ei(n)?.unit;return"temporal"===t||i?function(e,t,n){return e.map((e=>({signal:`{data: ${va(e,{timeUnit:n,type:t})}}`})))}(e,t,i):[e]}function qd(e,n,i,r){const{encoding:o,markDef:a,mark:s,config:l,stack:c}=i,u=fa(o[r]),{type:f}=u,d=u.timeUnit;if(function(e){return e?.unionWith}(n)){const t=qd(e,void 0,i,r);return Yl([...Ld(n.unionWith,f,d),...t.value])}if(yn(n))return Yl([n]);if(n&&"unaggregated"!==n&&!xr(n))return Yl(Ld(n,f,d));if(c&&r===c.fieldChannel){if("normalize"===c.offset)return Xl([[0,1]]);const e=i.requestDataName(sc.Main);return Xl([{data:e,field:i.vgField(r,{suffix:"start"})},{data:e,field:i.vgField(r,{suffix:"end"})}])}const m=Ht(r)&&Ro(u)?function(e,t,n){if(!hr(n))return;const i=e.fieldDef(t),r=i.sort;if(Oo(r))return{op:"min",field:tf(i,t),order:"ascending"};const{stack:o}=e,a=o?new Set([...o.groupbyFields,...o.stackBy.map((e=>e.fieldDef.field))]):void 0;if(zo(r)){return Ud(r,o&&!a.has(r.field))}if(Fo(r)){const{encoding:t,order:n}=r,i=e.fieldDef(t),{aggregate:s,field:l}=i,c=o&&!a.has(l);if(Zt(s)||en(s))return Ud({field:ta(i),order:n},c);if(tn(s)||!s)return Ud({op:s,field:l,order:n},c)}else{if("descending"===r)return{op:"min",field:e.vgField(t),order:"descending"};if(p(["ascending",void 0],r))return!0}return}(i,r,e):void 0;if(Bo(u)){return Xl(Ld([u.datum],f,d))}const g=u;if("unaggregated"===n){const e=i.requestDataName(sc.Main),{field:t}=u;return Xl([{data:e,field:ta({field:t,aggregate:"min"})},{data:e,field:ta({field:t,aggregate:"max"})}])}if(ln(g.bin)){if(hr(e))return Xl("bin-ordinal"===e?[]:[{data:O(m)?i.requestDataName(sc.Main):i.requestDataName(sc.Raw),field:i.vgField(r,xa(g,r)?{binSuffix:"range"}:{}),sort:!0!==m&&t.isObject(m)?m:{field:i.vgField(r,{}),op:"min"}}]);{const{bin:e}=g;if(ln(e)){const t=Qf(i,g.field,e);return Xl([new Td((()=>{const e=i.getSignalName(t);return`[${e}.start, ${e}.stop]`}))])}return Xl([{data:i.requestDataName(sc.Main),field:i.vgField(r,{})}])}}if(g.timeUnit&&p(["time","utc"],e)){const e=o[it(r)];if(Eo(g,e,a,l)){const t=i.requestDataName(sc.Main),n=jo({fieldDef:g,fieldDef2:e,markDef:a,config:l}),o=mo(s)&&.5!==n&&zt(r);return Xl([{data:t,field:i.vgField(r,o?{suffix:bc}:{})},{data:t,field:i.vgField(r,{suffix:o?xc:"end"})}])}}return Xl(m?[{data:O(m)?i.requestDataName(sc.Main):i.requestDataName(sc.Raw),field:i.vgField(r),sort:m}]:[{data:i.requestDataName(sc.Main),field:i.vgField(r)}])}function Ud(e,t){const{op:n,field:i,order:r}=e;return{op:n??(t?"sum":ko),...i?{field:E(i)}:{},...r?{order:r}:{}}}function Rd(e,t){const n=e.component.scales[t],i=e.specifiedScales[t].domain,r=e.fieldDef(t)?.bin,o=xr(i)&&i,a=un(r)&&fn(r.extent)&&r.extent;(o||a)&&n.set("selectionExtent",o??a,!0)}function Wd(e,n){const{aggregate:i,type:r}=e;return i?t.isString(i)&&!an.has(i)?{valid:!1,reason:si(i)}:"quantitative"===r&&"log"===n?{valid:!1,reason:li(e)}:{valid:!0}:{valid:!1,reason:ai(e)}}function Bd(e,t,n,i){return e.explicit&&t.explicit&&yi(function(e,t,n,i){return`Conflicting ${t.toString()} property "${e.toString()}" (${X(n)} and ${X(i)}). Using the union of the two domains.`}(n,i,e.value,t.value)),{explicit:e.explicit,value:[...e.value,...t.value]}}function Id(e){const n=b(e.map((e=>{if(bn(e)){const{sort:t,...n}=e;return n}return e})),d),i=b(e.map((e=>{if(bn(e)){const t=e.sort;return void 0===t||O(t)||("op"in t&&"count"===t.op&&delete t.field,"ascending"===t.order&&delete t.order),t}})).filter((e=>void 0!==e)),d);if(0===n.length)return;if(1===n.length){const n=e[0];if(bn(n)&&i.length>0){let e=i[0];if(i.length>1){yi(fi);const n=i.filter((e=>t.isObject(e)&&"op"in e&&"min"!==e.op));e=!i.every((e=>t.isObject(e)&&"op"in e))||1!==n.length||n[0]}else if(t.isObject(e)&&"field"in e){const t=e.field;n.field===t&&(e=!e.order||{order:e.order})}return{...n,sort:e}}return n}const r=b(i.map((e=>O(e)||!("op"in e)||t.isString(e.op)&&e.op in Kt?e:(yi(function(e){return`Dropping sort property ${X(e)} as unioned domains only support boolean or op "count", "min", and "max".`}(e)),!0))),d);let o;1===r.length?o=r[0]:r.length>1&&(yi(fi),o=!0);const a=b(e.map((e=>bn(e)?e.data:null)),(e=>e));if(1===a.length&&null!==a[0]){return{data:a[0],fields:n.map((e=>e.field)),...o?{sort:o}:{}}}return{fields:n,...o?{sort:o}:{}}}function Hd(e){if(bn(e)&&t.isString(e.field))return e.field;if(function(e){return!t.isArray(e)&&"fields"in e&&!("data"in e)}(e)){let n;for(const i of e.fields)if(bn(i)&&t.isString(i.field))if(n){if(n!==i.field)return yi("Detected faceted independent scales that union domain of multiple fields from different data sources. We will use the first field. The result view size may be incorrect."),n}else n=i.field;return yi("Detected faceted independent scales that union domain of the same fields from different source. We will assume that this is the same field from a different fork of the same data source. However, if this is not the case, the result view size may be incorrect."),n}if(function(e){return!t.isArray(e)&&"fields"in e&&"data"in e}(e)){yi("Detected faceted independent scales that union domain of multiple fields from the same data source. We will use the first field. The result view size may be incorrect.");const n=e.fields[0];return t.isString(n)?n:void 0}}function Vd(e,t){const n=e.component.scales[t].get("domains").map((t=>(bn(t)&&(t.data=e.lookupDataSource(t.data)),t)));return Id(n)}function Gd(e){return vm(e)||ym(e)?e.children.reduce(((e,t)=>e.concat(Gd(t))),Yd(e)):Yd(e)}function Yd(e){return D(e.component.scales).reduce(((n,i)=>{const r=e.component.scales[i];if(r.merged)return n;const o=r.combine(),{name:a,type:s,selectionExtent:l,domains:c,range:u,reverse:f,...d}=o,m=function(e,n,i,r){if(zt(i)){if(vn(e))return{step:{signal:`${n}_step`}}}else if(t.isObject(e)&&bn(e))return{...e,data:r.lookupDataSource(e.data)};return e}(o.range,a,i,e),p=Vd(e,i),g=l?function(e,n,i,r){const o=Ru(e,n.param,n);return{signal:yr(i.get("type"))&&t.isArray(r)&&r[0]>r[1]?`isValid(${o}) && reverse(${o})`:o}}(e,l,r,p):null;return n.push({name:a,type:s,...p?{domain:p}:{},...g?{domainRaw:g}:{},range:m,...void 0!==f?{reverse:f}:{},...d}),n}),[])}class Xd extends Gl{merged=!1;constructor(e,t){super({},{name:e}),this.setWithExplicit("type",t)}domainDefinitelyIncludesZero(){return!1!==this.get("zero")||g(this.get("domains"),(e=>t.isArray(e)&&2===e.length&&t.isNumber(e[0])&&e[0]<=0&&t.isNumber(e[1])&&e[1]>=0))}}const Qd=["range","scheme"];function Jd(e,n){const i=e.fieldDef(n);if(i?.bin){const{bin:r,field:o}=i,a=rt(n),s=e.getName(a);if(t.isObject(r)&&r.binned&&void 0!==r.step)return new Td((()=>{const t=e.scaleName(n),i=`(domain("${t}")[1] - domain("${t}")[0]) / ${r.step}`;return`${e.getSignalName(s)} / (${i})`}));if(ln(r)){const t=Qf(e,o,r);return new Td((()=>{const n=e.getSignalName(t),i=`(${n}.stop - ${n}.start) / ${n}.step`;return`${e.getSignalName(s)} / (${i})`}))}}}function Kd(e,n){const i=n.specifiedScales[e],{size:r}=n,o=n.getScaleComponent(e).get("type");for(const r of Qd)if(void 0!==i[r]){const a=_r(o,r),s=Cr(e,r);if(a)if(s)yi(s);else switch(r){case"range":{const r=i.range;if(t.isArray(r)){if(zt(e))return Yl(r.map((e=>{if("width"===e||"height"===e){const t=n.getName(e),i=n.getSignalName.bind(n);return Td.fromName(i,t)}return e})))}else if(t.isObject(r))return Yl({data:n.requestDataName(sc.Main),field:r.field,sort:{op:"min",field:n.vgField(e)}});return Yl(r)}case"scheme":return Yl(Zd(i[r]))}else yi(ci(o,r,e))}const a=e===Z||"xOffset"===e?"width":"height",s=r[a];if(Fs(s))if(zt(e))if(hr(o)){const t=tm(s,n,e);if(t)return Yl({step:t})}else yi(ui(a));else if(Pt(e)){const t=e===ie?"x":"y";if("band"===n.getScaleComponent(t).get("type")){const e=nm(s,o);if(e)return Yl(e)}}const{rangeMin:l,rangeMax:u}=i,f=function(e,n){const{size:i,config:r,mark:o,encoding:a}=n,{type:s}=fa(a[e]),l=n.getScaleComponent(e),u=l.get("type"),{domain:f,domainMid:d}=n.specifiedScales[e];switch(e){case Z:case ee:if(p(["point","band"],u)){const t=im(e,i,r.view);if(Fs(t)){return{step:tm(t,n,e)}}}return em(e,n,u);case ie:case re:return function(e,t,n){const i=e===ie?"x":"y",r=t.getScaleComponent(i);if(!r)return em(i,t,n,{center:!0});const o=r.get("type"),a=t.scaleName(i),{markDef:s,config:l}=t;if("band"===o){const e=im(i,t.size,t.config.view);if(Fs(e)){const t=nm(e,n);if(t)return t}return[0,{signal:`bandwidth('${a}')`}]}{const n=t.encoding[i];if(Ro(n)&&n.timeUnit){const e=Mi(n.timeUnit,(e=>`scale('${a}', ${e})`)),i=t.config.scale.bandWithNestedOffsetPaddingInner,r=jo({fieldDef:n,markDef:s,config:l})-.5,o=0!==r?` + ${r}`:"";if(i){return[{signal:`${yn(i)?`${i.signal}/2`+o:`${i/2+r}`} * (${e})`},{signal:`${yn(i)?`(1 - ${i.signal}/2)`+o:`${1-i/2+r}`} * (${e})`}]}return[0,{signal:e}]}return c(`Cannot use ${e} scale if ${i} scale is not discrete.`)}}(e,n,u);case ye:{const a=rm(o,n.component.scales[e].get("zero"),r),s=function(e,n,i,r){const o={x:Jd(i,"x"),y:Jd(i,"y")};switch(e){case"bar":case"tick":{if(void 0!==r.scale.maxBandSize)return r.scale.maxBandSize;const e=am(n,o,r.view);return t.isNumber(e)?e-1:new Td((()=>`${e.signal} - 1`))}case"line":case"trail":case"rule":return r.scale.maxStrokeWidth;case"text":return r.scale.maxFontSize;case"point":case"square":case"circle":{if(r.scale.maxSize)return r.scale.maxSize;const e=am(n,o,r.view);return t.isNumber(e)?Math.pow(om*e,2):new Td((()=>`pow(${om} * ${e.signal}, 2)`))}}throw new Error(ni("size",e))}(o,i,n,r);return br(u)?function(e,t,n){const i=()=>{const i=On(t),r=On(e),o=`(${i} - ${r}) / (${n} - 1)`;return`sequence(${r}, ${i} + ${o}, ${o})`};return yn(t)?new Td(i):{signal:i()}}(a,s,function(e,n,i,r){switch(e){case"quantile":return n.scale.quantileCount;case"quantize":return n.scale.quantizeCount;case"threshold":return void 0!==i&&t.isArray(i)?i.length+1:(yi(function(e){return`Domain for ${e} is required for threshold scale.`}(r)),3)}}(u,r,f,e)):[a,s]}case se:return[0,2*Math.PI];case ve:return[0,360];case oe:return[0,new Td((()=>`min(${n.getSignalName(hm(n.parent)?"child_width":"width")},${n.getSignalName(hm(n.parent)?"child_height":"height")})/2`))];case we:return[r.scale.minStrokeWidth,r.scale.maxStrokeWidth];case ke:return[[1,0],[4,2],[2,1],[1,1],[1,2,4,2]];case he:return"symbol";case me:case pe:case ge:return"ordinal"===u?"nominal"===s?"category":"ordinal":void 0!==d?"diverging":"rect"===o||"geoshape"===o?"heatmap":"ramp";case be:case xe:case $e:return[r.scale.minOpacity,r.scale.maxOpacity]}}(e,n);return(void 0!==l||void 0!==u)&&_r(o,"rangeMin")&&t.isArray(f)&&2===f.length?Yl([l??f[0],u??f[1]]):Xl(f)}function Zd(e){return function(e){return!t.isString(e)&&!!e.name}(e)?{scheme:e.name,...f(e,["name"])}:{scheme:e}}function em(e,t,n){let{center:i}=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{};const r=rt(e),o=t.getName(r),a=t.getSignalName.bind(t);return e===ee&&yr(n)?i?[Td.fromName((e=>`${a(e)}/2`),o),Td.fromName((e=>`-${a(e)}/2`),o)]:[Td.fromName(a,o),0]:i?[Td.fromName((e=>`-${a(e)}/2`),o),Td.fromName((e=>`${a(e)}/2`),o)]:[0,Td.fromName(a,o)]}function tm(e,n,i){const{encoding:r}=n,o=n.getScaleComponent(i),a=at(i),s=r[a];if("offset"===Ds({step:e,offsetIsDiscrete:Go(s)&&Zi(s.type)})&&Pa(r,a)){const i=n.getScaleComponent(a);let r=`domain('${n.scaleName(a)}').length`;if("band"===i.get("type")){r=`bandspace(${r}, ${i.get("paddingInner")??i.get("padding")??0}, ${i.get("paddingOuter")??i.get("padding")??0})`}const s=o.get("paddingInner")??o.get("padding");return{signal:`${e.step} * ${r} / (1-${l=s,yn(l)?l.signal:t.stringValue(l)})`}}return e.step;var l}function nm(e,t){if("offset"===Ds({step:e,offsetIsDiscrete:hr(t)}))return{step:e.step}}function im(e,t,n){const i=e===Z?"width":"height",r=t[i];return r||Ns(n,i)}function rm(e,t,n){if(t)return yn(t)?{signal:`${t.signal} ? 0 : ${rm(e,!1,n)}`}:0;switch(e){case"bar":case"tick":return n.scale.minBandSize;case"line":case"trail":case"rule":return n.scale.minStrokeWidth;case"text":return n.scale.minFontSize;case"point":case"square":case"circle":return n.scale.minSize}throw new Error(ni("size",e))}const om=.95;function am(e,t,n){const i=Fs(e.width)?e.width.step:Cs(n,"width"),r=Fs(e.height)?e.height.step:Cs(n,"height");return t.x||t.y?new Td((()=>`min(${[t.x?t.x.signal:i,t.y?t.y.signal:r].join(", ")})`)):Math.min(i,r)}function sm(e,t){gm(e)?function(e,t){const n=e.component.scales,{config:i,encoding:r,markDef:o,specifiedScales:a}=e;for(const s of D(n)){const l=a[s],c=n[s],u=e.getScaleComponent(s),f=fa(r[s]),d=l[t],m=u.get("type"),p=u.get("padding"),g=u.get("paddingInner"),h=_r(m,t),y=Cr(s,t);if(void 0!==d&&(h?y&&yi(y):yi(ci(m,t,s))),h&&void 0===y)if(void 0!==d){const e=f.timeUnit,n=f.type;switch(t){case"domainMax":case"domainMin":vi(l[t])||"temporal"===n||e?c.set(t,{signal:va(l[t],{type:n,timeUnit:e})},!0):c.set(t,l[t],!0);break;default:c.copyKeyFromObject(t,l)}}else{const n=t in lm?lm[t]({model:e,channel:s,fieldOrDatumDef:f,scaleType:m,scalePadding:p,scalePaddingInner:g,domain:l.domain,domainMin:l.domainMin,domainMax:l.domainMax,markDef:o,config:i,hasNestedOffsetScale:Aa(r,s),hasSecondaryRangeChannel:!!r[it(s)]}):i.scale[t];void 0!==n&&c.set(t,n,!1)}}}(e,t):um(e,t)}const lm={bins:e=>{let{model:t,fieldOrDatumDef:n}=e;return Ro(n)?function(e,t){const n=t.bin;if(ln(n)){const i=Qf(e,t.field,n);return new Td((()=>e.getSignalName(i)))}if(cn(n)&&un(n)&&void 0!==n.step)return{step:n.step};return}(t,n):void 0},interpolate:e=>{let{channel:t,fieldOrDatumDef:n}=e;return function(e,t){if(p([me,pe,ge],e)&&"nominal"!==t)return"hcl";return}(t,n.type)},nice:e=>{let{scaleType:n,channel:i,domain:r,domainMin:o,domainMax:a,fieldOrDatumDef:s}=e;return function(e,n,i,r,o,a){if(ua(a)?.bin||t.isArray(i)||null!=o||null!=r||p([or.TIME,or.UTC],e))return;return!!zt(n)||void 0}(n,i,r,o,a,s)},padding:e=>{let{channel:t,scaleType:n,fieldOrDatumDef:i,markDef:r,config:o}=e;return function(e,t,n,i,r,o){if(zt(e)){if(vr(t)){if(void 0!==n.continuousPadding)return n.continuousPadding;const{type:t,orient:a}=r;if("bar"===t&&(!Ro(i)||!i.bin&&!i.timeUnit)&&("vertical"===a&&"x"===e||"horizontal"===a&&"y"===e))return o.continuousBandSize}if(t===or.POINT)return n.pointPadding}return}(t,n,o.scale,i,r,o.bar)},paddingInner:e=>{let{scalePadding:t,channel:n,markDef:i,scaleType:r,config:o,hasNestedOffsetScale:a}=e;return function(e,t,n,i,r){let o=arguments.length>5&&void 0!==arguments[5]&&arguments[5];if(void 0!==e)return;if(zt(t)){const{bandPaddingInner:e,barBandPaddingInner:t,rectBandPaddingInner:i,bandWithNestedOffsetPaddingInner:a}=r;return o?a:U(e,"bar"===n?t:i)}if(Pt(t)&&i===or.BAND)return r.offsetBandPaddingInner;return}(t,n,i.type,r,o.scale,a)},paddingOuter:e=>{let{scalePadding:t,channel:n,scaleType:i,scalePaddingInner:r,config:o,hasNestedOffsetScale:a}=e;return function(e,t,n,i,r){let o=arguments.length>5&&void 0!==arguments[5]&&arguments[5];if(void 0!==e)return;if(zt(t)){const{bandPaddingOuter:e,bandWithNestedOffsetPaddingOuter:t}=r;if(o)return t;if(n===or.BAND)return U(e,yn(i)?{signal:`${i.signal}/2`}:i/2)}else if(Pt(t)){if(n===or.POINT)return.5;if(n===or.BAND)return r.offsetBandPaddingOuter}return}(t,n,i,r,o.scale,a)},reverse:e=>{let{fieldOrDatumDef:t,scaleType:n,channel:i,config:r}=e;return function(e,t,n,i){if("x"===n&&void 0!==i.xReverse)return yr(e)&&"descending"===t?yn(i.xReverse)?{signal:`!${i.xReverse.signal}`}:!i.xReverse:i.xReverse;if(yr(e)&&"descending"===t)return!0;return}(n,Ro(t)?t.sort:void 0,i,r.scale)},zero:e=>{let{channel:n,fieldOrDatumDef:i,domain:r,markDef:o,scaleType:a,config:s,hasSecondaryRangeChannel:l}=e;return function(e,n,i,r,o,a,s){if(i&&"unaggregated"!==i&&yr(o)){if(t.isArray(i)){const e=i[0],n=i[i.length-1];if(t.isNumber(e)&&e<=0&&t.isNumber(n)&&n>=0)return!0}return!1}if("size"===e&&"quantitative"===n.type&&!br(o))return!0;if((!Ro(n)||!n.bin)&&p([...Ft,..._t],e)){const{orient:t,type:n}=r;return(!p(["bar","area","line","trail"],n)||!("horizontal"===t&&"y"===e||"vertical"===t&&"x"===e))&&(!(!p(["bar","area"],n)||s)||a?.zero)}return!1}(n,i,r,o,a,s.scale,l)}};function cm(e){gm(e)?function(e){const t=e.component.scales;for(const n of It){const i=t[n];if(!i)continue;const r=Kd(n,e);i.setWithExplicit("range",r)}}(e):um(e,"range")}function um(e,t){const n=e.component.scales;for(const n of e.children)"range"===t?cm(n):sm(n,t);for(const i of D(n)){let r;for(const n of e.children){const e=n.component.scales[i];if(e){r=Kl(r,e.getWithExplicit(t),t,"scale",Ql(((e,n)=>"range"===t&&e.step&&n.step?e.step-n.step:0)))}}n[i].setWithExplicit(t,r)}}function fm(e,t,n,i){const r=function(e,t,n,i){switch(t.type){case"nominal":case"ordinal":if(qe(e)||"discrete"===Qt(e))return"shape"===e&&"ordinal"===t.type&&yi(oi(e,"ordinal")),"ordinal";if(zt(e)||Pt(e)){if(p(["rect","bar","image","rule"],n.type))return"band";if(i)return"band"}else if("arc"===n.type&&e in Ot)return"band";return bo(n[rt(e)])||Jo(t)&&t.axis?.tickBand?"band":"point";case"temporal":return qe(e)?"time":"discrete"===Qt(e)?(yi(oi(e,"temporal")),"ordinal"):Ro(t)&&t.timeUnit&&Ei(t.timeUnit).utc?"utc":"time";case"quantitative":return qe(e)?Ro(t)&&ln(t.bin)?"bin-ordinal":"linear":"discrete"===Qt(e)?(yi(oi(e,"quantitative")),"ordinal"):"linear";case"geojson":return}throw new Error(Zn(t.type))}(t,n,i,arguments.length>4&&void 0!==arguments[4]&&arguments[4]),{type:o}=e;return Ht(t)?void 0!==o?function(e,t){let n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];if(!Ht(e))return!1;switch(e){case Z:case ee:case ie:case re:case se:case oe:return!!vr(t)||"band"===t||"point"===t&&!n;case ye:case we:case be:case xe:case $e:case ve:return vr(t)||br(t)||p(["band","point","ordinal"],t);case me:case pe:case ge:return"band"!==t;case ke:case he:return"ordinal"===t||br(t)}}(t,o)?Ro(n)&&(a=o,s=n.type,!(p([tr,ir],s)?void 0===a||hr(a):s===nr?p([or.TIME,or.UTC,void 0],a):s!==er||dr(a)||br(a)||void 0===a))?(yi(function(e,t){return`FieldDef does not work with "${e}" scale. We are using "${t}" scale instead.`}(o,r)),r):o:(yi(function(e,t,n){return`Channel "${e}" does not work with "${t}" scale. We are using "${n}" scale instead.`}(t,o,r)),r):r:null;var a,s}function dm(e){gm(e)?e.component.scales=function(e){const{encoding:t,mark:n,markDef:i}=e,r={};for(const o of It){const a=fa(t[o]);if(a&&n===uo&&o===he&&a.type===rr)continue;let s=a&&a.scale;if(a&&null!==s&&!1!==s){s??={};const n=fm(s,o,a,i,Aa(t,o));r[o]=new Xd(e.scaleName(`${o}`,!0),{value:n,explicit:s.type===n})}}return r}(e):e.component.scales=function(e){const t=e.component.scales={},n={},i=e.component.resolve;for(const t of e.children){dm(t);for(const r of D(t.component.scales))if(i.scale[r]??=Sf(r,e),"shared"===i.scale[r]){const e=n[r],o=t.component.scales[r].getWithExplicit("type");e?sr(e.value,o.value)?n[r]=Kl(e,o,"type","scale",mm):(i.scale[r]="independent",delete n[r]):n[r]=o}}for(const i of D(n)){const r=e.scaleName(i,!0),o=n[i];t[i]=new Xd(r,o);for(const t of e.children){const e=t.component.scales[i];e&&(t.renameScale(e.get("name"),r),e.merged=!0)}}return t}(e)}const mm=Ql(((e,t)=>cr(e)-cr(t)));class pm{constructor(){this.nameMap={}}rename(e,t){this.nameMap[e]=t}has(e){return void 0!==this.nameMap[e]}get(e){for(;this.nameMap[e]&&e!==this.nameMap[e];)e=this.nameMap[e];return e}}function gm(e){return"unit"===e?.type}function hm(e){return"facet"===e?.type}function ym(e){return"concat"===e?.type}function vm(e){return"layer"===e?.type}class bm{constructor(e,n,i,r,o,a,c){this.type=n,this.parent=i,this.config=o,this.parent=i,this.config=o,this.view=pn(c),this.name=e.name??r,this.title=hn(e.title)?{text:e.title}:e.title?pn(e.title):void 0,this.scaleNameMap=i?i.scaleNameMap:new pm,this.projectionNameMap=i?i.projectionNameMap:new pm,this.signalNameMap=i?i.signalNameMap:new pm,this.data=e.data,this.description=e.description,this.transforms=(e.transform??[]).map((e=>gl(e)?{filter:s(e.filter,Ji)}:e)),this.layout="layer"===n||"unit"===n?{}:function(e,n,i){const r=i[n],o={},{spacing:a,columns:s}=r;void 0!==a&&(o.spacing=a),void 0!==s&&(No(e)&&!_o(e.facet)||ws(e))&&(o.columns=s),ks(e)&&(o.columns=1);for(const n of Os)if(void 0!==e[n])if("spacing"===n){const i=e[n];o[n]=t.isNumber(i)?i:{row:i.row??a,column:i.column??a}}else o[n]=e[n];return o}(e,n,o),this.component={data:{sources:i?i.component.data.sources:[],outputNodes:i?i.component.data.outputNodes:{},outputNodeRefCounts:i?i.component.data.outputNodeRefCounts:{},isFaceted:No(e)||i?.component.data.isFaceted&&void 0===e.data},layoutSize:new Gl,layoutHeaders:{row:{},column:{},facet:{}},mark:null,resolve:{scale:{},axis:{},legend:{},...a?l(a):{}},selection:null,scales:null,projection:null,axes:{},legends:{}}}get width(){return this.getSizeSignalRef("width")}get height(){return this.getSizeSignalRef("height")}parse(){this.parseScale(),this.parseLayoutSize(),this.renameTopLevelLayoutSizeSignal(),this.parseSelections(),this.parseProjection(),this.parseData(),this.parseAxesAndHeaders(),this.parseLegends(),this.parseMarkGroup()}parseScale(){!function(e){let{ignoreRange:t}=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};dm(e),Ed(e);for(const t of Or)sm(e,t);t||cm(e)}(this)}parseProjection(){Gf(this)}renameTopLevelLayoutSizeSignal(){"width"!==this.getName("width")&&this.renameSignal(this.getName("width"),"width"),"height"!==this.getName("height")&&this.renameSignal(this.getName("height"),"height")}parseLegends(){Mf(this)}assembleEncodeFromView(e){const{style:t,...n}=e,i={};for(const e of D(n)){const t=n[e];void 0!==t&&(i[e]=Fn(t))}return i}assembleGroupEncodeEntry(e){let t={};return this.view&&(t=this.assembleEncodeFromView(this.view)),e||(this.description&&(t.description=Fn(this.description)),"unit"!==this.type&&"layer"!==this.type)?S(t)?void 0:t:{width:this.getSizeSignalRef("width"),height:this.getSizeSignalRef("height"),...t}}assembleLayout(){if(!this.layout)return;const{spacing:e,...t}=this.layout,{component:n,config:i}=this,r=function(e,t){const n={};for(const i of Re){const r=e[i];if(r?.facetFieldDef){const{titleAnchor:e,titleOrient:o}=of(["titleAnchor","titleOrient"],r.facetFieldDef.header,t,i),a=nf(i,o),s=hf(e,a);void 0!==s&&(n[a]=s)}}return S(n)?void 0:n}(n.layoutHeaders,i);return{padding:e,...this.assembleDefaultLayout(),...t,...r?{titleBand:r}:{}}}assembleDefaultLayout(){return{}}assembleHeaderMarks(){const{layoutHeaders:e}=this.component;let t=[];for(const n of Re)e[n].title&&t.push(lf(this,n));for(const e of af)t=t.concat(ff(this,e));return t}assembleAxes(){return function(e,t){const{x:n=[],y:i=[]}=e;return[...n.map((e=>Iu(e,"grid",t))),...i.map((e=>Iu(e,"grid",t))),...n.map((e=>Iu(e,"main",t))),...i.map((e=>Iu(e,"main",t)))].filter((e=>e))}(this.component.axes,this.config)}assembleLegends(){return Wf(this)}assembleProjections(){return Bf(this)}assembleTitle(){const{encoding:e,...t}=this.title??{},n={...gn(this.config.title).nonMarkTitleProperties,...t,...e?{encode:{update:e}}:{}};if(n.text)return p(["unit","layer"],this.type)?p(["middle",void 0],n.anchor)&&(n.frame??="group"):n.anchor??="start",S(n)?void 0:n}assembleGroup(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];const t={};e=e.concat(this.assembleSignals()),e.length>0&&(t.signals=e);const n=this.assembleLayout();n&&(t.layout=n),t.marks=[].concat(this.assembleHeaderMarks(),this.assembleMarks());const i=!this.parent||hm(this.parent)?Gd(this):[];i.length>0&&(t.scales=i);const r=this.assembleAxes();r.length>0&&(t.axes=r);const o=this.assembleLegends();return o.length>0&&(t.legends=o),t}getName(e){return _((this.name?`${this.name}_`:"")+e)}getDataName(e){return this.getName(sc[e].toLowerCase())}requestDataName(e){const t=this.getDataName(e),n=this.component.data.outputNodeRefCounts;return n[t]=(n[t]||0)+1,t}getSizeSignalRef(e){if(hm(this.parent)){const t=Ct(wf(e)),n=this.component.scales[t];if(n&&!n.merged){const e=n.get("type"),i=n.get("range");if(hr(e)&&vn(i)){const e=n.get("name"),i=Hd(Vd(this,t));if(i){return{signal:$f(e,n,ta({aggregate:"distinct",field:i},{expr:"datum"}))}}return yi(In(t)),null}}}return{signal:this.signalNameMap.get(this.getName(e))}}lookupDataSource(e){const t=this.component.data.outputNodes[e];return t?t.getSource():e}getSignalName(e){return this.signalNameMap.get(e)}renameSignal(e,t){this.signalNameMap.rename(e,t)}renameScale(e,t){this.scaleNameMap.rename(e,t)}renameProjection(e,t){this.projectionNameMap.rename(e,t)}scaleName(e,t){return t?this.getName(e):Ke(e)&&Ht(e)&&this.component.scales[e]||this.scaleNameMap.has(this.getName(e))?this.scaleNameMap.get(this.getName(e)):void 0}projectionName(e){return e?this.getName("projection"):this.component.projection&&!this.component.projection.merged||this.projectionNameMap.has(this.getName("projection"))?this.projectionNameMap.get(this.getName("projection")):void 0}correctDataNames=e=>(e.from?.data&&(e.from.data=this.lookupDataSource(e.from.data)),e.from?.facet?.data&&(e.from.facet.data=this.lookupDataSource(e.from.facet.data)),e);getScaleComponent(e){if(!this.component.scales)throw new Error("getScaleComponent cannot be called before parseScale(). Make sure you have called parseScale or use parseUnitModelWithScale().");const t=this.component.scales[e];return t&&!t.merged?t:this.parent?this.parent.getScaleComponent(e):void 0}getSelectionComponent(e,t){let n=this.component.selection[e];if(!n&&this.parent&&(n=this.parent.getSelectionComponent(e,t)),!n)throw new Error(function(e){return`Cannot find a selection named "${e}".`}(t));return n}hasAxisOrientSignalRef(){return this.component.axes.x?.some((e=>e.hasOrientSignalRef()))||this.component.axes.y?.some((e=>e.hasOrientSignalRef()))}}class xm extends bm{vgField(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};const n=this.fieldDef(e);if(n)return ta(n,t)}reduceFieldDef(e,n){return function(e,n,i,r){return e?D(e).reduce(((i,o)=>{const a=e[o];return t.isArray(a)?a.reduce(((e,t)=>n.call(r,e,t,o)),i):n.call(r,i,a,o)}),i):i}(this.getMapping(),((t,n,i)=>{const r=ua(n);return r?e(t,r,i):t}),n)}forEachFieldDef(e,t){La(this.getMapping(),((t,n)=>{const i=ua(t);i&&e(i,n)}),t)}}class $m extends pc{clone(){return new $m(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const n=this.transform.as??[void 0,void 0];this.transform.as=[n[0]??"value",n[1]??"density"];const i=this.transform.resolve??"shared";this.transform.resolve=i}dependentFields(){return new Set([this.transform.density,...this.transform.groupby??[]])}producedFields(){return new Set(this.transform.as)}hash(){return`DensityTransform ${d(this.transform)}`}assemble(){const{density:e,...t}=this.transform,n={type:"kde",field:e,...t};return n.resolve=this.transform.resolve,n}}class wm extends pc{clone(){return new wm(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t)}dependentFields(){return new Set([this.transform.extent])}producedFields(){return new Set([])}hash(){return`ExtentTransform ${d(this.transform)}`}assemble(){const{extent:e,param:t}=this.transform;return{type:"extent",field:e,signal:t}}}class km extends pc{clone(){return new km(null,{...this.filter})}constructor(e,t){super(e),this.filter=t}static make(e,t){const{config:n,mark:i,markDef:r}=t;if("filter"!==Nn("invalid",r,n))return null;const o=t.reduceFieldDef(((e,n,r)=>{const o=Ht(r)&&t.getScaleComponent(r);if(o){yr(o.get("type"))&&"count"!==n.aggregate&&!fo(i)&&(e[n.field]=n)}return e}),{});return D(o).length?new km(e,o):null}dependentFields(){return new Set(D(this.filter))}producedFields(){return new Set}hash(){return`FilterInvalid ${d(this.filter)}`}assemble(){const e=D(this.filter).reduce(((e,t)=>{const n=this.filter[t],i=ta(n,{expr:"datum"});return null!==n&&("temporal"===n.type?e.push(`(isDate(${i}) || (isValid(${i}) && isFinite(+${i})))`):"quantitative"===n.type&&(e.push(`isValid(${i})`),e.push(`isFinite(+${i})`))),e}),[]);return e.length>0?{type:"filter",expr:e.join(" && ")}:null}}class Sm extends pc{clone(){return new Sm(this.parent,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const{flatten:n,as:i=[]}=this.transform;this.transform.as=n.map(((e,t)=>i[t]??e))}dependentFields(){return new Set(this.transform.flatten)}producedFields(){return new Set(this.transform.as)}hash(){return`FlattenTransform ${d(this.transform)}`}assemble(){const{flatten:e,as:t}=this.transform;return{type:"flatten",fields:e,as:t}}}class Dm extends pc{clone(){return new Dm(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const n=this.transform.as??[void 0,void 0];this.transform.as=[n[0]??"key",n[1]??"value"]}dependentFields(){return new Set(this.transform.fold)}producedFields(){return new Set(this.transform.as)}hash(){return`FoldTransform ${d(this.transform)}`}assemble(){const{fold:e,as:t}=this.transform;return{type:"fold",fields:e,as:t}}}class Fm extends pc{clone(){return new Fm(null,l(this.fields),this.geojson,this.signal)}static parseAll(e,t){if(t.component.projection&&!t.component.projection.isFit)return e;let n=0;for(const i of[[ue,ce],[de,fe]]){const r=i.map((e=>{const n=fa(t.encoding[e]);return Ro(n)?n.field:Bo(n)?{expr:`${n.datum}`}:Xo(n)?{expr:`${n.value}`}:void 0}));(r[0]||r[1])&&(e=new Fm(e,r,null,t.getName("geojson_"+n++)))}if(t.channelHasField(he)){const i=t.typedFieldDef(he);i.type===rr&&(e=new Fm(e,null,i.field,t.getName("geojson_"+n++)))}return e}constructor(e,t,n,i){super(e),this.fields=t,this.geojson=n,this.signal=i}dependentFields(){const e=(this.fields??[]).filter(t.isString);return new Set([...this.geojson?[this.geojson]:[],...e])}producedFields(){return new Set}hash(){return`GeoJSON ${this.geojson} ${this.signal} ${d(this.fields)}`}assemble(){return[...this.geojson?[{type:"filter",expr:`isValid(datum["${this.geojson}"])`}]:[],{type:"geojson",...this.fields?{fields:this.fields}:{},...this.geojson?{geojson:this.geojson}:{},signal:this.signal}]}}class zm extends pc{clone(){return new zm(null,this.projection,l(this.fields),l(this.as))}constructor(e,t,n,i){super(e),this.projection=t,this.fields=n,this.as=i}static parseAll(e,t){if(!t.projectionName())return e;for(const n of[[ue,ce],[de,fe]]){const i=n.map((e=>{const n=fa(t.encoding[e]);return Ro(n)?n.field:Bo(n)?{expr:`${n.datum}`}:Xo(n)?{expr:`${n.value}`}:void 0})),r=n[0]===de?"2":"";(i[0]||i[1])&&(e=new zm(e,t.projectionName(),i,[t.getName(`x${r}`),t.getName(`y${r}`)]))}return e}dependentFields(){return new Set(this.fields.filter(t.isString))}producedFields(){return new Set(this.as)}hash(){return`Geopoint ${this.projection} ${d(this.fields)} ${d(this.as)}`}assemble(){return{type:"geopoint",projection:this.projection,fields:this.fields,as:this.as}}}class Om extends pc{clone(){return new Om(null,l(this.transform))}constructor(e,t){super(e),this.transform=t}dependentFields(){return new Set([this.transform.impute,this.transform.key,...this.transform.groupby??[]])}producedFields(){return new Set([this.transform.impute])}processSequence(e){const{start:t=0,stop:n,step:i}=e;return{signal:`sequence(${[t,n,...i?[i]:[]].join(",")})`}}static makeFromTransform(e,t){return new Om(e,t)}static makeFromEncoding(e,t){const n=t.encoding,i=n.x,r=n.y;if(Ro(i)&&Ro(r)){const o=i.impute?i:r.impute?r:void 0;if(void 0===o)return;const a=i.impute?r:r.impute?i:void 0,{method:s,value:l,frame:c,keyvals:u}=o.impute,f=qa(t.mark,n);return new Om(e,{impute:o.field,key:a.field,...s?{method:s}:{},...void 0!==l?{value:l}:{},...c?{frame:c}:{},...void 0!==u?{keyvals:u}:{},...f.length?{groupby:f}:{}})}return null}hash(){return`Impute ${d(this.transform)}`}assemble(){const{impute:e,key:t,keyvals:n,method:i,groupby:r,value:o,frame:a=[null,null]}=this.transform,s={type:"impute",field:e,key:t,...n?{keyvals:(l=n,void 0!==l?.stop?this.processSequence(n):n)}:{},method:"value",...r?{groupby:r}:{},value:i&&"value"!==i?null:o};var l;if(i&&"value"!==i){return[s,{type:"window",as:[`imputed_${e}_value`],ops:[i],fields:[e],frame:a,ignorePeers:!1,...r?{groupby:r}:{}},{type:"formula",expr:`datum.${e} === null ? datum.imputed_${e}_value : datum.${e}`,as:e}]}return[s]}}class _m extends pc{clone(){return new _m(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const n=this.transform.as??[void 0,void 0];this.transform.as=[n[0]??t.on,n[1]??t.loess]}dependentFields(){return new Set([this.transform.loess,this.transform.on,...this.transform.groupby??[]])}producedFields(){return new Set(this.transform.as)}hash(){return`LoessTransform ${d(this.transform)}`}assemble(){const{loess:e,on:t,...n}=this.transform;return{type:"loess",x:t,y:e,...n}}}class Cm extends pc{clone(){return new Cm(null,l(this.transform),this.secondary)}constructor(e,t,n){super(e),this.transform=t,this.secondary=n}static make(e,t,n,i){const r=t.component.data.sources,{from:o}=n;let a=null;if(function(e){return"data"in e}(o)){let e=Hm(o.data,r);e||(e=new cd(o.data),r.push(e));const n=t.getName(`lookup_${i}`);a=new gc(e,n,sc.Lookup,t.component.data.outputNodeRefCounts),t.component.data.outputNodes[n]=a}else if(function(e){return"param"in e}(o)){const e=o.param;let i;n={as:e,...n};try{i=t.getSelectionComponent(_(e),e)}catch(t){throw new Error(function(e){return`Lookups can only be performed on selection parameters. "${e}" is a variable parameter.`}(e))}if(a=i.materialized,!a)throw new Error(function(e){return`Cannot define and lookup the "${e}" selection in the same view. Try moving the lookup into a second, layered view?`}(e))}return new Cm(e,n,a.getSource())}dependentFields(){return new Set([this.transform.lookup])}producedFields(){return new Set(this.transform.as?t.array(this.transform.as):this.transform.from.fields)}hash(){return`Lookup ${d({transform:this.transform,secondary:this.secondary})}`}assemble(){let e;if(this.transform.from.fields)e={values:this.transform.from.fields,...this.transform.as?{as:t.array(this.transform.as)}:{}};else{let n=this.transform.as;t.isString(n)||(yi('If "from.fields" is not specified, "as" has to be a string that specifies the key to be used for the data from the secondary source.'),n="_lookup"),e={as:[n]}}return{type:"lookup",from:this.secondary,key:this.transform.from.key,fields:[this.transform.lookup],...e,...this.transform.default?{default:this.transform.default}:{}}}}class Nm extends pc{clone(){return new Nm(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const n=this.transform.as??[void 0,void 0];this.transform.as=[n[0]??"prob",n[1]??"value"]}dependentFields(){return new Set([this.transform.quantile,...this.transform.groupby??[]])}producedFields(){return new Set(this.transform.as)}hash(){return`QuantileTransform ${d(this.transform)}`}assemble(){const{quantile:e,...t}=this.transform;return{type:"quantile",field:e,...t}}}class Pm extends pc{clone(){return new Pm(null,l(this.transform))}constructor(e,t){super(e),this.transform=t,this.transform=l(t);const n=this.transform.as??[void 0,void 0];this.transform.as=[n[0]??t.on,n[1]??t.regression]}dependentFields(){return new Set([this.transform.regression,this.transform.on,...this.transform.groupby??[]])}producedFields(){return new Set(this.transform.as)}hash(){return`RegressionTransform ${d(this.transform)}`}assemble(){const{regression:e,on:t,...n}=this.transform;return{type:"regression",x:t,y:e,...n}}}class Am extends pc{clone(){return new Am(null,l(this.transform))}constructor(e,t){super(e),this.transform=t}addDimensions(e){this.transform.groupby=b((this.transform.groupby??[]).concat(e),(e=>e))}producedFields(){}dependentFields(){return new Set([this.transform.pivot,this.transform.value,...this.transform.groupby??[]])}hash(){return`PivotTransform ${d(this.transform)}`}assemble(){const{pivot:e,value:t,groupby:n,limit:i,op:r}=this.transform;return{type:"pivot",field:e,value:t,...void 0!==i?{limit:i}:{},...void 0!==r?{op:r}:{},...void 0!==n?{groupby:n}:{}}}}class jm extends pc{clone(){return new jm(null,l(this.transform))}constructor(e,t){super(e),this.transform=t}dependentFields(){return new Set}producedFields(){return new Set}hash(){return`SampleTransform ${d(this.transform)}`}assemble(){return{type:"sample",size:this.transform.sample}}}function Tm(e){let t=0;return function n(i,r){if(i instanceof cd&&!i.isGenerator&&!ec(i.data)){e.push(r);r={name:null,source:r.name,transform:[]}}if(i instanceof od&&(i.parent instanceof cd&&!r.source?(r.format={...r.format,parse:i.assembleFormatParse()},r.transform.push(...i.assembleTransforms(!0))):r.transform.push(...i.assembleTransforms())),i instanceof td)return r.name||(r.name="data_"+t++),!r.source||r.transform.length>0?(e.push(r),i.data=r.name):i.data=r.source,void e.push(...i.assemble());if((i instanceof sd||i instanceof ld||i instanceof km||i instanceof qu||i instanceof ef||i instanceof zm||i instanceof ed||i instanceof Cm||i instanceof zd||i instanceof Dd||i instanceof Dm||i instanceof Sm||i instanceof $m||i instanceof _m||i instanceof Nm||i instanceof Pm||i instanceof ad||i instanceof jm||i instanceof Am||i instanceof wm)&&r.transform.push(i.assemble()),(i instanceof Kf||i instanceof vc||i instanceof Om||i instanceof Fd||i instanceof Fm)&&r.transform.push(...i.assemble()),i instanceof gc)if(r.source&&0===r.transform.length)i.setSource(r.source);else if(i.parent instanceof gc)i.setSource(r.name);else if(r.name||(r.name="data_"+t++),i.setSource(r.name),1===i.numChildren()){e.push(r);r={name:null,source:r.name,transform:[]}}switch(i.numChildren()){case 0:i instanceof gc&&(!r.source||r.transform.length>0)&&e.push(r);break;case 1:n(i.children[0],r);break;default:{r.name||(r.name="data_"+t++);let o=r.name;!r.source||r.transform.length>0?e.push(r):o=r.source;for(const e of i.children){n(e,{name:null,source:o,transform:[]})}break}}}}function Em(e){return"top"===e||"left"===e||yn(e)?"header":"footer"}function Mm(e,n){const{facet:i,config:r,child:o,component:a}=e;if(e.channelHasField(n)){const s=i[n],l=rf("title",null,r,n);let c=aa(s,r,{allowDisabling:!0,includeDefault:void 0===l||!!l});o.component.layoutHeaders[n].title&&(c=t.isArray(c)?c.join(", "):c,c+=` / ${o.component.layoutHeaders[n].title}`,o.component.layoutHeaders[n].title=null);const u=rf("labelOrient",s.header,r,n),f=null!==s.header&&U(s.header?.labels,r.header.labels,!0),d=p(["bottom","right"],u)?"footer":"header";a.layoutHeaders[n]={title:null!==s.header?c:null,facetFieldDef:s,[d]:"facet"===n?[]:[Lm(e,n,f)]}}}function Lm(e,t,n){const i="row"===t?"height":"width";return{labels:n,sizeSignal:e.child.component.layoutSize.get(i)?e.child.getSizeSignalRef(i):void 0,axes:[]}}function qm(e,t){const{child:n}=e;if(n.component.axes[t]){const{layoutHeaders:i,resolve:r}=e.component;if(r.axis[t]=Df(r,t),"shared"===r.axis[t]){const r="x"===t?"column":"row",o=i[r];for(const i of n.component.axes[t]){const t=Em(i.get("orient"));o[t]??=[Lm(e,r,!1)];const n=Iu(i,"main",e.config,{header:!0});n&&o[t][0].axes.push(n),i.mainExtracted=!0}}}}function Um(e){for(const t of e.children)t.parseLayoutSize()}function Rm(e,t){const n=wf(t),i=Ct(n),r=e.component.resolve,o=e.component.layoutSize;let a;for(const t of e.children){const o=t.component.layoutSize.getWithExplicit(n),s=r.scale[i]??Sf(i,e);if("independent"===s&&"step"===o.value){a=void 0;break}if(a){if("independent"===s&&a.value!==o.value){a=void 0;break}a=Kl(a,o,n,"")}else a=o}if(a){for(const i of e.children)e.renameSignal(i.getName(n),e.getName(t)),i.component.layoutSize.set(n,"merged",!1);o.setWithExplicit(t,a)}else o.setWithExplicit(t,{explicit:!1,value:void 0})}function Wm(e,t){const n="width"===t?"x":"y",i=e.config,r=e.getScaleComponent(n);if(r){const e=r.get("type"),n=r.get("range");if(hr(e)){const e=Ns(i.view,t);return vn(n)||Fs(e)?"step":e}return _s(i.view,t)}if(e.hasProjection||"arc"===e.mark)return _s(i.view,t);{const e=Ns(i.view,t);return Fs(e)?e.step:e}}function Bm(e,t,n){return ta(t,{suffix:`by_${ta(e)}`,...n})}class Im extends xm{constructor(e,t,n,i){super(e,"facet",t,n,i,e.resolve),this.child=vp(e.spec,this,this.getName("child"),void 0,i),this.children=[this.child],this.facet=this.initFacet(e.facet)}initFacet(e){if(!_o(e))return{facet:this.initFacetFieldDef(e,"facet")};const t=D(e),n={};for(const i of t){if(![Q,J].includes(i)){yi(ni(i,"facet"));break}const t=e[i];if(void 0===t.field){yi(ti(t,i));break}n[i]=this.initFacetFieldDef(t,i)}return n}initFacetFieldDef(e,t){const n=pa(e,t);return n.header?n.header=pn(n.header):null===n.header&&(n.header=null),n}channelHasField(e){return!!this.facet[e]}fieldDef(e){return this.facet[e]}parseData(){this.component.data=Vm(this),this.child.parseData()}parseLayoutSize(){Um(this)}parseSelections(){this.child.parseSelections(),this.component.selection=this.child.component.selection}parseMarkGroup(){this.child.parseMarkGroup()}parseAxesAndHeaders(){this.child.parseAxesAndHeaders(),function(e){for(const t of Re)Mm(e,t);qm(e,"x"),qm(e,"y")}(this)}assembleSelectionTopLevelSignals(e){return this.child.assembleSelectionTopLevelSignals(e)}assembleSignals(){return this.child.assembleSignals(),[]}assembleSelectionData(e){return this.child.assembleSelectionData(e)}getHeaderLayoutMixins(){const e={};for(const t of Re)for(const n of sf){const i=this.component.layoutHeaders[t],r=i[n],{facetFieldDef:o}=i;if(o){const n=rf("titleOrient",o.header,this.config,t);if(["right","bottom"].includes(n)){const i=nf(t,n);e.titleAnchor??={},e.titleAnchor[i]="end"}}if(r?.[0]){const r="row"===t?"height":"width",o="header"===n?"headerBand":"footerBand";"facet"===t||this.child.component.layoutSize.get(r)||(e[o]??={},e[o][t]=.5),i.title&&(e.offset??={},e.offset["row"===t?"rowTitle":"columnTitle"]=10)}}return e}assembleDefaultLayout(){const{column:e,row:t}=this.facet,n=e?this.columnDistinctSignal():t?1:void 0;let i="all";return(t||"independent"!==this.component.resolve.scale.x)&&(e||"independent"!==this.component.resolve.scale.y)||(i="none"),{...this.getHeaderLayoutMixins(),...n?{columns:n}:{},bounds:"full",align:i}}assembleLayoutSignals(){return this.child.assembleLayoutSignals()}columnDistinctSignal(){if(!(this.parent&&this.parent instanceof Im)){return{signal:`length(data('${this.getName("column_domain")}'))`}}}assembleGroupStyle(){}assembleGroup(e){return this.parent&&this.parent instanceof Im?{...this.channelHasField("column")?{encode:{update:{columns:{field:ta(this.facet.column,{prefix:"distinct"})}}}}:{},...super.assembleGroup(e)}:super.assembleGroup(e)}getCardinalityAggregateForChild(){const e=[],t=[],n=[];if(this.child instanceof Im){if(this.child.channelHasField("column")){const i=ta(this.child.facet.column);e.push(i),t.push("distinct"),n.push(`distinct_${i}`)}}else for(const i of Ft){const r=this.child.component.scales[i];if(r&&!r.merged){const o=r.get("type"),a=r.get("range");if(hr(o)&&vn(a)){const r=Hd(Vd(this.child,i));r?(e.push(r),t.push("distinct"),n.push(`distinct_${r}`)):yi(In(i))}}}return{fields:e,ops:t,as:n}}assembleFacet(){const{name:e,data:n}=this.component.data.facetRoot,{row:i,column:r}=this.facet,{fields:o,ops:a,as:s}=this.getCardinalityAggregateForChild(),l=[];for(const e of Re){const n=this.facet[e];if(n){l.push(ta(n));const{bin:c,sort:u}=n;if(ln(c)&&l.push(ta(n,{binSuffix:"end"})),zo(u)){const{field:e,op:t=ko}=u,l=Bm(n,u);i&&r?(o.push(l),a.push("max"),s.push(l)):(o.push(e),a.push(t),s.push(l))}else if(t.isArray(u)){const t=tf(n,e);o.push(t),a.push("max"),s.push(t)}}}const c=!!i&&!!r;return{name:e,data:n,groupby:l,...c||o.length>0?{aggregate:{...c?{cross:c}:{},...o.length?{fields:o,ops:a,as:s}:{}}}:{}}}facetSortFields(e){const{facet:n}=this,i=n[e];return i?zo(i.sort)?[Bm(i,i.sort,{expr:"datum"})]:t.isArray(i.sort)?[tf(i,e,{expr:"datum"})]:[ta(i,{expr:"datum"})]:[]}facetSortOrder(e){const{facet:n}=this,i=n[e];if(i){const{sort:e}=i;return[(zo(e)?e.order:!t.isArray(e)&&e)||"ascending"]}return[]}assembleLabelTitle(){const{facet:e,config:t}=this;if(e.facet)return mf(e.facet,"facet",t);const n={row:["top","bottom"],column:["left","right"]};for(const i of af)if(e[i]){const r=rf("labelOrient",e[i]?.header,t,i);if(n[i].includes(r))return mf(e[i],i,t)}}assembleMarks(){const{child:e}=this,t=function(e){const t=[],n=Tm(t);for(const t of e.children)n(t,{source:e.name,name:null,transform:[]});return t}(this.component.data.facetRoot),n=e.assembleGroupEncodeEntry(!1),i=this.assembleLabelTitle()||e.assembleTitle(),r=e.assembleGroupStyle();return[{name:this.getName("cell"),type:"group",...i?{title:i}:{},...r?{style:r}:{},from:{facet:this.assembleFacet()},sort:{field:Re.map((e=>this.facetSortFields(e))).flat(),order:Re.map((e=>this.facetSortOrder(e))).flat()},...t.length>0?{data:t}:{},...n?{encode:{update:n}}:{},...e.assembleGroup(fc(this,[]))}]}getMapping(){return this.facet}}function Hm(e,t){for(const n of t){const t=n.data;if(e.name&&n.hasName()&&e.name!==n.dataName)continue;const i=e.format?.mesh,r=t.format?.feature;if(i&&r)continue;const o=e.format?.feature;if((o||r)&&o!==r)continue;const a=t.format?.mesh;if(!i&&!a||i===a)if(tc(e)&&tc(t)){if(Y(e.values,t.values))return n}else if(ec(e)&&ec(t)){if(e.url===t.url)return n}else if(nc(e)&&e.name===n.dataName)return n}return null}function Vm(e){let t=function(e,t){if(e.data||!e.parent){if(null===e.data){const e=new cd({values:[]});return t.push(e),e}const n=Hm(e.data,t);if(n)return ic(e.data)||(n.data.format=y({},e.data.format,n.data.format)),!n.hasName()&&e.data.name&&(n.dataName=e.data.name),n;{const n=new cd(e.data);return t.push(n),n}}return e.parent.component.data.facetRoot?e.parent.component.data.facetRoot:e.parent.component.data.main}(e,e.component.data.sources);const{outputNodes:n,outputNodeRefCounts:i}=e.component.data,r=e.data,o=!(r&&(ic(r)||ec(r)||tc(r)))&&e.parent?e.parent.component.data.ancestorParse.clone():new Zl;ic(r)?(rc(r)?t=new ld(t,r.sequence):ac(r)&&(t=new sd(t,r.graticule)),o.parseNothing=!0):null===r?.format?.parse&&(o.parseNothing=!0),t=od.makeExplicit(t,e,o)??t,t=new ad(t);const a=e.parent&&vm(e.parent);(gm(e)||hm(e))&&a&&(t=Kf.makeFromEncoding(t,e)??t),e.transforms.length>0&&(t=function(e,t,n){let i=0;for(const r of t.transforms){let o,a;if(Fl(r))a=e=new ef(e,r),o="derived";else if(gl(r)){const i=id(r);a=e=od.makeWithAncestors(e,{},i,n)??e,e=new qu(e,t,r.filter)}else if(zl(r))a=e=Kf.makeFromTransform(e,r,t),o="number";else if(_l(r))o="date",void 0===n.getWithExplicit(r.field).value&&(e=new od(e,{[r.field]:o}),n.set(r.field,o,!1)),a=e=vc.makeFromTransform(e,r);else if(Cl(r))a=e=ed.makeFromTransform(e,r),o="number",ju(t)&&(e=new ad(e));else if(hl(r))a=e=Cm.make(e,t,r,i++),o="derived";else if(kl(r))a=e=new zd(e,r),o="number";else if(Sl(r))a=e=new Dd(e,r),o="number";else if(Nl(r))a=e=Fd.makeFromTransform(e,r),o="derived";else if(Pl(r))a=e=new Dm(e,r),o="derived";else if(Al(r))a=e=new wm(e,r),o="derived";else if(Dl(r))a=e=new Sm(e,r),o="derived";else if(yl(r))a=e=new Am(e,r),o="derived";else if(wl(r))e=new jm(e,r);else if(Ol(r))a=e=Om.makeFromTransform(e,r),o="derived";else if(vl(r))a=e=new $m(e,r),o="derived";else if(bl(r))a=e=new Nm(e,r),o="derived";else if(xl(r))a=e=new Pm(e,r),o="derived";else{if(!$l(r)){yi(`Ignoring an invalid transform: ${X(r)}.`);continue}a=e=new _m(e,r),o="derived"}if(a&&void 0!==o)for(const e of a.producedFields()??[])n.set(e,o,!1)}return e}(t,e,o));const s=function(e){const t={};if(gm(e)&&e.component.selection)for(const n of D(e.component.selection)){const i=e.component.selection[n];for(const e of i.project.items)!e.channel&&q(e.field)>1&&(t[e.field]="flatten")}return t}(e),l=rd(e);t=od.makeWithAncestors(t,{},{...s,...l},o)??t,gm(e)&&(t=Fm.parseAll(t,e),t=zm.parseAll(t,e)),(gm(e)||hm(e))&&(a||(t=Kf.makeFromEncoding(t,e)??t),t=vc.makeFromEncoding(t,e)??t,t=ef.parseAllForSortIndex(t,e));const c=t=Gm(sc.Raw,e,t);if(gm(e)){const n=ed.makeFromEncoding(t,e);n&&(t=n,ju(e)&&(t=new ad(t))),t=Om.makeFromEncoding(t,e)??t,t=Fd.makeFromEncoding(t,e)??t}gm(e)&&(t=km.make(t,e)??t);const u=t=Gm(sc.Main,e,t);gm(e)&&function(e,t){for(const[n,i]of z(e.component.selection??{})){const r=e.getName(`lookup_${n}`);e.component.data.outputNodes[r]=i.materialized=new gc(new qu(t,e,{param:n}),r,sc.Lookup,e.component.data.outputNodeRefCounts)}}(e,u);let f=null;if(hm(e)){const i=e.getName("facet");t=function(e,t){const{row:n,column:i}=t;if(n&&i){let t=null;for(const r of[n,i])if(zo(r.sort)){const{field:n,op:i=ko}=r.sort;e=t=new Dd(e,{joinaggregate:[{op:i,field:n,as:Bm(r,r.sort,{forAs:!0})}],groupby:[ta(r)]})}return t}return null}(t,e.facet)??t,f=new td(t,e,i,u.getSource()),n[i]=f}return{...e.component.data,outputNodes:n,outputNodeRefCounts:i,raw:c,main:u,facetRoot:f,ancestorParse:o}}function Gm(e,t,n){const{outputNodes:i,outputNodeRefCounts:r}=t.component.data,o=t.getDataName(e),a=new gc(n,o,e,r);return i[o]=a,a}class Ym extends bm{constructor(e,t,n,i){super(e,"concat",t,n,i,e.resolve),"shared"!==e.resolve?.axis?.x&&"shared"!==e.resolve?.axis?.y||yi("Axes cannot be shared in concatenated or repeated views yet (https://github.com/vega/vega-lite/issues/2415)."),this.children=this.getChildren(e).map(((e,t)=>vp(e,this,this.getName(`concat_${t}`),void 0,i)))}parseData(){this.component.data=Vm(this);for(const e of this.children)e.parseData()}parseSelections(){this.component.selection={};for(const e of this.children){e.parseSelections();for(const t of D(e.component.selection))this.component.selection[t]=e.component.selection[t]}}parseMarkGroup(){for(const e of this.children)e.parseMarkGroup()}parseAxesAndHeaders(){for(const e of this.children)e.parseAxesAndHeaders()}getChildren(e){return ks(e)?e.vconcat:Ss(e)?e.hconcat:e.concat}parseLayoutSize(){!function(e){Um(e);const t=1===e.layout.columns?"width":"childWidth",n=void 0===e.layout.columns?"height":"childHeight";Rm(e,t),Rm(e,n)}(this)}parseAxisGroup(){return null}assembleSelectionTopLevelSignals(e){return this.children.reduce(((e,t)=>t.assembleSelectionTopLevelSignals(e)),e)}assembleSignals(){return this.children.forEach((e=>e.assembleSignals())),[]}assembleLayoutSignals(){const e=vf(this);for(const t of this.children)e.push(...t.assembleLayoutSignals());return e}assembleSelectionData(e){return this.children.reduce(((e,t)=>t.assembleSelectionData(e)),e)}assembleMarks(){return this.children.map((e=>{const t=e.assembleTitle(),n=e.assembleGroupStyle(),i=e.assembleGroupEncodeEntry(!1);return{type:"group",name:e.getName("group"),...t?{title:t}:{},...n?{style:n}:{},...i?{encode:{update:i}}:{},...e.assembleGroup()}}))}assembleGroupStyle(){}assembleDefaultLayout(){const e=this.layout.columns;return{...null!=e?{columns:e}:{},bounds:"full",align:"each"}}}const Xm={disable:1,gridScale:1,scale:1,...Da,labelExpr:1,encode:1},Qm=D(Xm);class Jm extends Gl{constructor(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];super(),this.explicit=e,this.implicit=t,this.mainExtracted=n}clone(){return new Jm(l(this.explicit),l(this.implicit),this.mainExtracted)}hasAxisPart(e){return"axis"===e||("grid"===e||"title"===e?!!this.get(e):!(!1===(t=this.get(e))||null===t));var t}hasOrientSignalRef(){return yn(this.explicit.orient)}}const Km={bottom:"top",top:"bottom",left:"right",right:"left"};function Zm(e,t){if(!e)return t.map((e=>e.clone()));{if(e.length!==t.length)return;const n=e.length;for(let i=0;i{switch(n){case"title":return Ln(e,t);case"gridScale":return{explicit:e.explicit,value:U(e.value,t.value)}}return Jl(e,t,n,"axis")}));e.setWithExplicit(n,i)}return e}function tp(e,t,n,i,r){if("disable"===t)return void 0!==n;switch(n=n||{},t){case"titleAngle":case"labelAngle":return e===(yn(n.labelAngle)?n.labelAngle:H(n.labelAngle));case"values":return!!n.values;case"encode":return!!n.encoding||!!n.labelAngle;case"title":if(e===Zu(i,r))return!0}return e===n[t]}const np=new Set(["grid","translate","format","formatType","orient","labelExpr","tickCount","position","tickMinStep"]);function ip(e,t){let n=t.axis(e);const i=new Jm,r=fa(t.encoding[e]),{mark:o,config:a}=t,s=n?.orient||a["x"===e?"axisX":"axisY"]?.orient||a.axis?.orient||function(e){return"x"===e?"bottom":"left"}(e),l=t.getScaleComponent(e).get("type"),c=function(e,t,n,i){const r="band"===t?["axisDiscrete","axisBand"]:"point"===t?["axisDiscrete","axisPoint"]:dr(t)?["axisQuantitative"]:"time"===t||"utc"===t?["axisTemporal"]:[],o="x"===e?"axisX":"axisY",a=yn(n)?"axisOrient":`axis${P(n)}`,s=[...r,...r.map((e=>o+e.substr(4)))],l=["axis",a,o];return{vlOnlyAxisConfig:Vu(s,i,e,n),vgAxisConfig:Vu(l,i,e,n),axisConfigStyle:Gu([...l,...s],i)}}(e,l,s,t.config),u=void 0!==n?!n:Yu("disable",a.style,n?.style,c).configValue;if(i.set("disable",u,void 0!==n),u)return i;n=n||{};const f=function(e,t,n,i,r){const o=t?.labelAngle;if(void 0!==o)return yn(o)?o:H(o);{const{configValue:o}=Yu("labelAngle",i,t?.style,r);return void 0!==o?H(o):n!==Z||!p([ir,tr],e.type)||Ro(e)&&e.timeUnit?void 0:270}}(r,n,e,a.style,c),d=Hr(n.formatType,r,l),m=Ir(r,r.type,n.format,n.formatType,a,!0),g={fieldOrDatumDef:r,axis:n,channel:e,model:t,scaleType:l,orient:s,labelAngle:f,format:m,formatType:d,mark:o,config:a};for(const r of Qm){const o=r in Xu?Xu[r](g):za(r)?n[r]:void 0,s=void 0!==o,l=tp(o,r,n,t,e);if(s&&l)i.set(r,o,l);else{const{configValue:e,configFrom:t}=za(r)&&"values"!==r?Yu(r,a.style,n.style,c):{},u=void 0!==e;s&&!u?i.set(r,o,l):("vgAxisConfig"!==t||np.has(r)&&u||wa(e)||yn(e))&&i.set(r,e,!1)}}const h=n.encoding??{},y=ka.reduce(((n,r)=>{if(!i.hasAxisPart(r))return n;const o=kf(h[r]??{},t),a="labels"===r?function(e,t,n){const{encoding:i,config:r}=e,o=fa(i[t])??fa(i[it(t)]),a=e.axis(t)||{},{format:s,formatType:l}=a;if(Lr(l))return{text:Br({fieldOrDatumDef:o,field:"datum.value",format:s,formatType:l,config:r}),...n};if(void 0===s&&void 0===l&&r.customFormatTypes){if("quantitative"===Wo(o)){if(Jo(o)&&"normalize"===o.stack&&r.normalizedNumberFormatType)return{text:Br({fieldOrDatumDef:o,field:"datum.value",format:r.normalizedNumberFormat,formatType:r.normalizedNumberFormatType,config:r}),...n};if(r.numberFormatType)return{text:Br({fieldOrDatumDef:o,field:"datum.value",format:r.numberFormat,formatType:r.numberFormatType,config:r}),...n}}if("temporal"===Wo(o)&&r.timeFormatType&&Ro(o)&&!o.timeUnit)return{text:Br({fieldOrDatumDef:o,field:"datum.value",format:r.timeFormat,formatType:r.timeFormatType,config:r}),...n}}return n}(t,e,o):o;return void 0===a||S(a)||(n[r]={update:a}),n}),{});return S(y)||i.set("encode",y,!!n.encoding||void 0!==n.labelAngle),i}function rp(e,t){const{config:n}=e;return{...lu(e,{align:"ignore",baseline:"ignore",color:"include",size:"include",orient:"ignore",theta:"ignore"}),...Xc("x",e,{defaultPos:"mid"}),...Xc("y",e,{defaultPos:"mid"}),...Hc("size",e),...Hc("angle",e),...op(e,n,t)}}function op(e,t,n){return n?{shape:{value:n}}:Hc("shape",e)}const ap={vgMark:"rule",encodeEntry:e=>{const{markDef:t}=e,n=t.orient;return e.encoding.x||e.encoding.y||e.encoding.latitude||e.encoding.longitude?{...lu(e,{align:"ignore",baseline:"ignore",color:"include",orient:"ignore",size:"ignore",theta:"ignore"}),...eu("x",e,{defaultPos:"horizontal"===n?"zeroOrMax":"mid",defaultPos2:"zeroOrMin",range:"vertical"!==n}),...eu("y",e,{defaultPos:"vertical"===n?"zeroOrMax":"mid",defaultPos2:"zeroOrMin",range:"horizontal"!==n}),...Hc("size",e,{vgChannel:"strokeWidth"})}:{}}};function sp(e,t,n){if(void 0===Nn("align",e,n))return"center"}function lp(e,t,n){if(void 0===Nn("baseline",e,n))return"middle"}const cp={vgMark:"rect",encodeEntry:e=>{const{config:t,markDef:n}=e,i=n.orient,r="horizontal"===i?"width":"height",o="horizontal"===i?"height":"width";return{...lu(e,{align:"ignore",baseline:"ignore",color:"include",orient:"ignore",size:"ignore",theta:"ignore"}),...Xc("x",e,{defaultPos:"mid",vgChannel:"xc"}),...Xc("y",e,{defaultPos:"mid",vgChannel:"yc"}),...Hc("size",e,{defaultValue:up(e),vgChannel:r}),[o]:Fn(Nn("thickness",n,t))}}};function up(e){const{config:n,markDef:i}=e,{orient:r}=i,o="horizontal"===r?"width":"height",a=e.getScaleComponent("horizontal"===r?"x":"y"),s=Nn("size",i,n,{vgChannel:o})??n.tick.bandSize;if(void 0!==s)return s;{const e=a?a.get("range"):void 0;if(e&&vn(e)&&t.isNumber(e.step))return 3*e.step/4;return 3*Cs(n.view,o)/4}}const fp={arc:{vgMark:"arc",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",size:"ignore",orient:"ignore",theta:"ignore"}),...Xc("x",e,{defaultPos:"mid"}),...Xc("y",e,{defaultPos:"mid"}),...iu(e,"radius"),...iu(e,"theta")})},area:{vgMark:"area",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",orient:"include",size:"ignore",theta:"ignore"}),...eu("x",e,{defaultPos:"zeroOrMin",defaultPos2:"zeroOrMin",range:"horizontal"===e.markDef.orient}),...eu("y",e,{defaultPos:"zeroOrMin",defaultPos2:"zeroOrMin",range:"vertical"===e.markDef.orient}),...fu(e)})},bar:{vgMark:"rect",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",orient:"ignore",size:"ignore",theta:"ignore"}),...iu(e,"x"),...iu(e,"y")})},circle:{vgMark:"symbol",encodeEntry:e=>rp(e,"circle")},geoshape:{vgMark:"shape",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",size:"ignore",orient:"ignore",theta:"ignore"})}),postEncodingTransform:e=>{const{encoding:t}=e,n=t.shape;return[{type:"geoshape",projection:e.projectionName(),...n&&Ro(n)&&n.type===rr?{field:ta(n,{expr:"datum"})}:{}}]}},image:{vgMark:"image",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"ignore",orient:"ignore",size:"ignore",theta:"ignore"}),...iu(e,"x"),...iu(e,"y"),...Mc(e,"url")})},line:{vgMark:"line",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",size:"ignore",orient:"ignore",theta:"ignore"}),...Xc("x",e,{defaultPos:"mid"}),...Xc("y",e,{defaultPos:"mid"}),...Hc("size",e,{vgChannel:"strokeWidth"}),...fu(e)})},point:{vgMark:"symbol",encodeEntry:e=>rp(e)},rect:{vgMark:"rect",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",orient:"ignore",size:"ignore",theta:"ignore"}),...iu(e,"x"),...iu(e,"y")})},rule:ap,square:{vgMark:"symbol",encodeEntry:e=>rp(e,"square")},text:{vgMark:"text",encodeEntry:e=>{const{config:t,encoding:n}=e;return{...lu(e,{align:"include",baseline:"include",color:"include",size:"ignore",orient:"ignore",theta:"include"}),...Xc("x",e,{defaultPos:"mid"}),...Xc("y",e,{defaultPos:"mid"}),...Mc(e),...Hc("size",e,{vgChannel:"fontSize"}),...Hc("angle",e),...du("align",sp(e.markDef,n,t)),...du("baseline",lp(e.markDef,n,t)),...Xc("radius",e,{defaultPos:null}),...Xc("theta",e,{defaultPos:null})}}},tick:cp,trail:{vgMark:"trail",encodeEntry:e=>({...lu(e,{align:"ignore",baseline:"ignore",color:"include",size:"include",orient:"ignore",theta:"ignore"}),...Xc("x",e,{defaultPos:"mid"}),...Xc("y",e,{defaultPos:"mid"}),...Hc("size",e),...fu(e)})}};function dp(e){if(p([to,Kr,so],e.mark)){const t=qa(e.mark,e.encoding);if(t.length>0)return function(e,t){return[{name:e.getName("pathgroup"),type:"group",from:{facet:{name:mp+e.requestDataName(sc.Main),data:e.requestDataName(sc.Main),groupby:t}},encode:{update:{width:{field:{group:"width"}},height:{field:{group:"height"}}}},marks:gp(e,{fromPrefix:mp})}]}(e,t)}else if(e.mark===Zr){const t=wn.some((t=>Nn(t,e.markDef,e.config)));if(e.stack&&!e.fieldDef("size")&&t)return function(e){const[t]=gp(e,{fromPrefix:pp}),n=e.scaleName(e.stack.fieldChannel),i=function(){let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return e.vgField(e.stack.fieldChannel,t)},r=(e,t)=>`${e}(${[i({prefix:"min",suffix:"start",expr:t}),i({prefix:"max",suffix:"start",expr:t}),i({prefix:"min",suffix:"end",expr:t}),i({prefix:"max",suffix:"end",expr:t})].map((e=>`scale('${n}',${e})`)).join(",")})`;let o,a;"x"===e.stack.fieldChannel?(o={...u(t.encode.update,["y","yc","y2","height",...wn]),x:{signal:r("min","datum")},x2:{signal:r("max","datum")},clip:{value:!0}},a={x:{field:{group:"x"},mult:-1},height:{field:{group:"height"}}},t.encode.update={...f(t.encode.update,["y","yc","y2"]),height:{field:{group:"height"}}}):(o={...u(t.encode.update,["x","xc","x2","width"]),y:{signal:r("min","datum")},y2:{signal:r("max","datum")},clip:{value:!0}},a={y:{field:{group:"y"},mult:-1},width:{field:{group:"width"}}},t.encode.update={...f(t.encode.update,["x","xc","x2"]),width:{field:{group:"width"}}});for(const n of wn){const i=Pn(n,e.markDef,e.config);t.encode.update[n]?(o[n]=t.encode.update[n],delete t.encode.update[n]):i&&(o[n]=Fn(i)),i&&(t.encode.update[n]={value:0})}const s=[];if(e.stack.groupbyChannels?.length>0)for(const t of e.stack.groupbyChannels){const n=e.fieldDef(t),i=ta(n);i&&s.push(i),(n?.bin||n?.timeUnit)&&s.push(ta(n,{binSuffix:"end"}))}o=["stroke","strokeWidth","strokeJoin","strokeCap","strokeDash","strokeDashOffset","strokeMiterLimit","strokeOpacity"].reduce(((n,i)=>{if(t.encode.update[i])return{...n,[i]:t.encode.update[i]};{const t=Pn(i,e.markDef,e.config);return void 0!==t?{...n,[i]:Fn(t)}:n}}),o),o.stroke&&(o.strokeForeground={value:!0},o.strokeOffset={value:0});return[{type:"group",from:{facet:{data:e.requestDataName(sc.Main),name:pp+e.requestDataName(sc.Main),groupby:s,aggregate:{fields:[i({suffix:"start"}),i({suffix:"start"}),i({suffix:"end"}),i({suffix:"end"})],ops:["min","max","min","max"]}}},encode:{update:o},marks:[{type:"group",encode:{update:a},marks:[t]}]}]}(e)}return gp(e)}const mp="faceted_path_";const pp="stack_group_";function gp(e){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{fromPrefix:""};const{mark:i,markDef:r,encoding:o,config:a}=e,s=U(r.clip,function(e){const t=e.getScaleComponent("x"),n=e.getScaleComponent("y");return!(!t?.get("selectionExtent")&&!n?.get("selectionExtent"))||void 0}(e),function(e){const t=e.component.projection;return!(!t||t.isFit)||void 0}(e)),l=Cn(r),c=o.key,u=function(e){const{encoding:n,stack:i,mark:r,markDef:o,config:a}=e,s=n.order;if(!(!t.isArray(s)&&Xo(s)&&m(s.value)||!s&&m(Nn("order",o,a)))){if((t.isArray(s)||Ro(s))&&!i)return Tn(s,{expr:"datum"});if(fo(r)){const i="horizontal"===o.orient?"y":"x",r=n[i];if(Ro(r)){const n=r.sort;return t.isArray(n)?{field:ta(r,{prefix:i,suffix:"sort_index",expr:"datum"})}:zo(n)?{field:ta({aggregate:ja(e.encoding)?n.op:void 0,field:n.field},{expr:"datum"})}:Fo(n)?{field:ta(e.fieldDef(n.encoding),{expr:"datum"}),order:n.order}:null===n?void 0:{field:ta(r,{binSuffix:e.stack?.impute?"mid":void 0,expr:"datum"})}}}}}(e),f=function(e){if(!e.component.selection)return null;const t=D(e.component.selection).length;let n=t,i=e.parent;for(;i&&0===n;)n=D(i.component.selection).length,i=i.parent;return n?{interactive:t>0||"geoshape"===e.mark||!!e.encoding.tooltip||!!e.markDef.tooltip}:null}(e),d=Nn("aria",r,a),p=fp[i].postEncodingTransform?fp[i].postEncodingTransform(e):null;return[{name:e.getName("marks"),type:fp[i].vgMark,...s?{clip:s}:{},...l?{style:l}:{},...c?{key:c.field}:{},...u?{sort:u}:{},...f||{},...!1===d?{aria:d}:{},from:{data:n.fromPrefix+e.requestDataName(sc.Main)},encode:{update:fp[i].encodeEntry(e)},...p?{transform:p}:{}}]}class hp extends xm{specifiedScales={};specifiedAxes={};specifiedLegends={};specifiedProjection={};selection=[];children=[];constructor(e,n,i){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},o=arguments.length>4?arguments[4]:void 0;super(e,"unit",n,i,o,void 0,zs(e)?e.view:void 0);const a=go(e.mark)?{...e.mark}:{type:e.mark},s=a.type;void 0===a.filled&&(a.filled=function(e,t,n){let{graticule:i}=n;if(i)return!1;const r=Pn("filled",e,t),o=e.type;return U(r,o!==no&&o!==to&&o!==ro)}(a,o,{graticule:e.data&&ac(e.data)}));const l=this.encoding=function(e,n,i,r){const o={};for(const t of D(e))Ke(t)||yi(`${a=t}-encoding is dropped as ${a} is not a valid encoding channel.`);var a;for(let a of lt){if(!e[a])continue;const s=e[a];if(Pt(a)){const e=st(a),t=o[e];if(Ro(t)&&Ki(t.type)&&Ro(s)&&!t.timeUnit){yi(Kn(e));continue}}if("angle"!==a||"arc"!==n||e.theta||(yi("Arc marks uses theta channel rather than angle, replacing angle with theta."),a=se),Ea(e,a,n)){if(a===ye&&"line"===n){const t=ua(e[a]);if(t?.aggregate){yi("Line marks cannot encode size with a non-groupby field. You may want to use trail marks instead.");continue}}if(a===me&&(i?"fill"in e:"stroke"in e))yi(ei("encoding",{fill:"fill"in e,stroke:"stroke"in e}));else if(a===Fe||a===De&&!t.isArray(s)&&!Xo(s)||a===Oe&&t.isArray(s)){if(s){if(a===De){const t=e[a];if(Mo(t)){o[a]=t;continue}}o[a]=t.array(s).reduce(((e,t)=>(Ro(t)?e.push(pa(t,a)):yi(ti(t,a)),e)),[])}}else{if(a===Oe&&null===s)o[a]=null;else if(!(Ro(s)||Bo(s)||Xo(s)||Lo(s)||yn(s))){yi(ti(s,a));continue}o[a]=da(s,a,r)}}else yi(ni(a,n))}return o}(e.encoding||{},s,a.filled,o);this.markDef=Zs(a,l,o),this.size=function(e){let{encoding:t,size:n}=e;for(const e of Ft){const i=rt(e);Fs(n[i])&&Io(t[e])&&(delete n[i],yi(ui(i)))}return n}({encoding:l,size:zs(e)?{...r,...e.width?{width:e.width}:{},...e.height?{height:e.height}:{}}:r}),this.stack=Ks(this.markDef,l),this.specifiedScales=this.initScales(s,l),this.specifiedAxes=this.initAxes(l),this.specifiedLegends=this.initLegends(l),this.specifiedProjection=e.projection,this.selection=(e.params??[]).filter((e=>xs(e)))}get hasProjection(){const{encoding:e}=this,t=this.mark===uo,n=e&&Me.some((t=>Go(e[t])));return t||n}scaleDomain(e){const t=this.specifiedScales[e];return t?t.domain:void 0}axis(e){return this.specifiedAxes[e]}legend(e){return this.specifiedLegends[e]}initScales(e,t){return It.reduce(((e,n)=>{const i=fa(t[n]);return i&&(e[n]=this.initScale(i.scale??{})),e}),{})}initScale(e){const{domain:n,range:i}=e,r=pn(e);return t.isArray(n)&&(r.domain=n.map(Sn)),t.isArray(i)&&(r.range=i.map(Sn)),r}initAxes(e){return Ft.reduce(((t,n)=>{const i=e[n];if(Go(i)||n===Z&&Go(e.x2)||n===ee&&Go(e.y2)){const e=Go(i)?i.axis:void 0;t[n]=e?this.initAxis({...e}):e}return t}),{})}initAxis(e){const t=D(e),n={};for(const i of t){const t=e[i];n[i]=wa(t)?kn(t):Sn(t)}return n}initLegends(e){return Wt.reduce(((t,n)=>{const i=fa(e[n]);if(i&&function(e){switch(e){case me:case pe:case ge:case ye:case he:case be:case we:case ke:return!0;case xe:case $e:case ve:return!1}}(n)){const e=i.legend;t[n]=e?pn(e):e}return t}),{})}parseData(){this.component.data=Vm(this)}parseLayoutSize(){!function(e){const{size:t,component:n}=e;for(const i of Ft){const r=rt(i);if(t[r]){const e=t[r];n.layoutSize.set(r,Fs(e)?"step":e,!0)}else{const t=Wm(e,r);n.layoutSize.set(r,t,!1)}}}(this)}parseSelections(){this.component.selection=function(e,n){const i={},r=e.config.selection;if(!n||!n.length)return i;for(const o of n){const n=_(o.name),a=o.select,s=t.isString(a)?a:a.type,c=t.isObject(a)?l(a):{type:s},u=r[s];for(const e in u)"fields"!==e&&"encodings"!==e&&("mark"===e&&(c[e]={...u[e],...c[e]}),void 0!==c[e]&&!0!==c[e]||(c[e]=l(u[e]??c[e])));const f=i[n]={...c,name:n,type:s,init:o.value,bind:o.bind,events:t.isString(c.on)?t.parseSelector(c.on,"scope"):t.array(l(c.on))},d=l(o);for(const t of Pu)t.defined(f)&&t.parse&&t.parse(e,f,d)}return i}(this,this.selection)}parseMarkGroup(){this.component.mark=dp(this)}parseAxesAndHeaders(){var e;this.component.axes=(e=this,Ft.reduce(((t,n)=>(e.component.scales[n]&&(t[n]=[ip(n,e)]),t)),{}))}assembleSelectionTopLevelSignals(e){return function(e,n){let i=!1;for(const r of F(e.component.selection??{})){const o=r.name,a=t.stringValue(o+Ou);if(0===n.filter((e=>e.name===o)).length){const e="global"===r.resolve?"union":r.resolve,i="point"===r.type?", true, true)":")";n.push({name:r.name,update:`${Nu}(${a}, ${t.stringValue(e)}${i}`})}i=!0;for(const t of Pu)t.defined(r)&&t.topLevelSignals&&(n=t.topLevelSignals(e,r,n))}i&&0===n.filter((e=>"unit"===e.name)).length&&n.unshift({name:"unit",value:{},on:[{events:"pointermove",update:"isTuple(group()) ? group() : unit"}]});return mc(n)}(this,e)}assembleSignals(){return[...Hu(this),...uc(this,[])]}assembleSelectionData(e){return function(e,t){const n=[...t],i=Au(e,{escape:!1});for(const t of F(e.component.selection??{})){const e={name:t.name+Ou};if(t.project.hasSelectionId&&(e.transform=[{type:"collect",sort:{field:hs}}]),t.init){const n=t.project.items.map(lc);e.values=t.project.hasSelectionId?t.init.map((e=>({unit:i,[hs]:cc(e,!1)[0]}))):t.init.map((e=>({unit:i,fields:n,values:cc(e,!1)})))}n.filter((e=>e.name===t.name+Ou)).length||n.push(e)}return n}(this,e)}assembleLayout(){return null}assembleLayoutSignals(){return vf(this)}assembleMarks(){let e=this.component.mark??[];return this.parent&&vm(this.parent)||(e=dc(this,e)),e.map(this.correctDataNames)}assembleGroupStyle(){const{style:e}=this.view||{};return void 0!==e?e:this.encoding.x||this.encoding.y?"cell":"view"}getMapping(){return this.encoding}get mark(){return this.markDef.type}channelHasField(e){return Na(this.encoding,e)}fieldDef(e){return ua(this.encoding[e])}typedFieldDef(e){const t=this.fieldDef(e);return Yo(t)?t:null}}class yp extends bm{constructor(e,t,n,i,r){super(e,"layer",t,n,r,e.resolve,e.view);const o={...i,...e.width?{width:e.width}:{},...e.height?{height:e.height}:{}};this.children=e.layer.map(((e,t)=>{if(Hs(e))return new yp(e,this,this.getName(`layer_${t}`),o,r);if(_a(e))return new hp(e,this,this.getName(`layer_${t}`),o,r);throw new Error(qn(e))}))}parseData(){this.component.data=Vm(this);for(const e of this.children)e.parseData()}parseLayoutSize(){var e;Um(e=this),Rm(e,"width"),Rm(e,"height")}parseSelections(){this.component.selection={};for(const e of this.children){e.parseSelections();for(const t of D(e.component.selection))this.component.selection[t]=e.component.selection[t]}}parseMarkGroup(){for(const e of this.children)e.parseMarkGroup()}parseAxesAndHeaders(){!function(e){const{axes:t,resolve:n}=e.component,i={top:0,bottom:0,right:0,left:0};for(const i of e.children){i.parseAxesAndHeaders();for(const r of D(i.component.axes))n.axis[r]=Df(e.component.resolve,r),"shared"===n.axis[r]&&(t[r]=Zm(t[r],i.component.axes[r]),t[r]||(n.axis[r]="independent",delete t[r]))}for(const r of Ft){for(const o of e.children)if(o.component.axes[r]){if("independent"===n.axis[r]){t[r]=(t[r]??[]).concat(o.component.axes[r]);for(const e of o.component.axes[r]){const{value:t,explicit:n}=e.getWithExplicit("orient");if(!yn(t)){if(i[t]>0&&!n){const n=Km[t];i[t]>i[n]&&e.set("orient",n,!1)}i[t]++}}}delete o.component.axes[r]}if("independent"===n.axis[r]&&t[r]&&t[r].length>1)for(const[e,n]of(t[r]||[]).entries())e>0&&n.get("grid")&&!n.explicit.grid&&(n.implicit.grid=!1)}}(this)}assembleSelectionTopLevelSignals(e){return this.children.reduce(((e,t)=>t.assembleSelectionTopLevelSignals(e)),e)}assembleSignals(){return this.children.reduce(((e,t)=>e.concat(t.assembleSignals())),Hu(this))}assembleLayoutSignals(){return this.children.reduce(((e,t)=>e.concat(t.assembleLayoutSignals())),vf(this))}assembleSelectionData(e){return this.children.reduce(((e,t)=>t.assembleSelectionData(e)),e)}assembleGroupStyle(){const e=new Set;for(const n of this.children)for(const i of t.array(n.assembleGroupStyle()))e.add(i);const n=Array.from(e);return n.length>1?n:1===n.length?n[0]:void 0}assembleTitle(){let e=super.assembleTitle();if(e)return e;for(const t of this.children)if(e=t.assembleTitle(),e)return e}assembleLayout(){return null}assembleMarks(){return function(e,t){for(const n of e.children)gm(n)&&(t=dc(n,t));return t}(this,this.children.flatMap((e=>e.assembleMarks())))}assembleLegends(){return this.children.reduce(((e,t)=>e.concat(t.assembleLegends())),Wf(this))}}function vp(e,t,n,i,r){if(No(e))return new Im(e,t,n,r);if(Hs(e))return new yp(e,t,n,i,r);if(_a(e))return new hp(e,t,n,i,r);if(function(e){return ks(e)||Ss(e)||ws(e)}(e))return new Ym(e,t,n,r);throw new Error(qn(e))}const bp=n;e.accessPathDepth=q,e.accessPathWithDatum=A,e.compile=function(e){let n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};var i;n.logger&&(i=n.logger,hi=i),n.fieldTitle&&oa(n.fieldTitle);try{const i=qs(t.mergeConfig(n.config,e.config)),r=Ul(e,i),o=vp(r,null,"",void 0,i);o.parse(),function(e,t){Pd(e.sources);let n=0,i=0;for(let i=0;i2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3?arguments[3]:void 0;const r=e.config?Bs(e.config):void 0,o=[].concat(e.assembleSelectionData([]),function(e,t){const n=[],i=Tm(n);let r=0;for(const t of e.sources){t.hasName()||(t.dataName="source_"+r++);const e=t.assemble();i(t,e)}for(const e of n)0===e.transform.length&&delete e.transform;let o=0;for(const[e,t]of n.entries())0!==(t.transform??[]).length||t.source||n.splice(o++,0,n.splice(e,1)[0]);for(const t of n)for(const n of t.transform??[])"lookup"===n.type&&(n.from=e.outputNodes[n.from].getSource());for(const e of n)e.name in t&&(e.values=t[e.name]);return n}(e.component.data,n)),a=e.assembleProjections(),s=e.assembleTitle(),l=e.assembleGroupStyle(),c=e.assembleGroupEncodeEntry(!0);let u=e.assembleLayoutSignals();u=u.filter((e=>"width"!==e.name&&"height"!==e.name||void 0===e.value||(t[e.name]=+e.value,!1)));const{params:f,...d}=t;return{$schema:"https://vega.github.io/schema/vega/v5.json",...e.description?{description:e.description}:{},...d,...s?{title:s}:{},...l?{style:l}:{},...c?{encode:{update:c}}:{},data:o,...a.length>0?{projections:a}:{},...e.assembleGroup([...u,...e.assembleSelectionTopLevelSignals([]),...$s(f)]),...r?{config:r}:{},...i?{usermeta:i}:{}}}(o,function(e,n,i,r){const o=r.component.layoutSize.get("width"),a=r.component.layoutSize.get("height");void 0===n?(n={type:"pad"},r.hasAxisOrientSignalRef()&&(n.resize=!0)):t.isString(n)&&(n={type:n});if(o&&a&&(s=n.type,"fit"===s||"fit-x"===s||"fit-y"===s))if("step"===o&&"step"===a)yi(Bn()),n.type="pad";else if("step"===o||"step"===a){const e="step"===o?"width":"height";yi(Bn(Ct(e)));const t="width"===e?"height":"width";n.type=function(e){return e?`fit-${Ct(e)}`:"fit"}(t)}var s;return{...1===D(n).length&&n.type?"pad"===n.type?{}:{autosize:n.type}:{autosize:n},...Vl(i,!1),...Vl(e,!0)}}(e,r.autosize,i,o),e.datasets,e.usermeta);return{spec:a,normalized:r}}finally{n.logger&&(hi=gi),n.fieldTitle&&oa(ia)}},e.contains=p,e.deepEqual=Y,e.deleteNestedProperty=N,e.duplicate=l,e.entries=z,e.every=h,e.fieldIntersection=k,e.flatAccessWithDatum=j,e.getFirstDefined=U,e.hasIntersection=$,e.hash=d,e.internalField=B,e.isBoolean=O,e.isEmpty=S,e.isEqual=function(e,t){const n=D(e),i=D(t);if(n.length!==i.length)return!1;for(const i of n)if(e[i]!==t[i])return!1;return!0},e.isInternalField=I,e.isNullOrFalse=m,e.isNumeric=V,e.keys=D,e.logicalExpr=C,e.mergeDeep=y,e.never=c,e.normalize=Ul,e.normalizeAngle=H,e.omit=f,e.pick=u,e.prefixGenerator=w,e.removePathFromField=L,e.replaceAll=M,e.replacePathInField=E,e.resetIdCounter=function(){R=42},e.setEqual=x,e.some=g,e.stringify=X,e.titleCase=P,e.unique=b,e.uniqueId=W,e.vals=F,e.varName=_,e.version=bp})); +//# sourceMappingURL=vega-lite.min.js.map diff --git a/docs/javascripts/vega@5.js b/docs/javascripts/vega@5.js new file mode 100644 index 0000000000..b0bd348924 --- /dev/null +++ b/docs/javascripts/vega@5.js @@ -0,0 +1,2 @@ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).vega={})}(this,(function(t){"use strict";function e(t,e,n){return t.fields=e||[],t.fname=n,t}function n(t){return null==t?null:t.fname}function r(t){return null==t?null:t.fields}function i(t){return 1===t.length?o(t[0]):a(t)}const o=t=>function(e){return e[t]},a=t=>{const e=t.length;return function(n){for(let r=0;rr&&c(),u=r=i+1):"]"===o&&(u||s("Access path missing open bracket: "+t),u>0&&c(),u=0,r=i+1):i>r?c():r=i+1}return u&&s("Access path missing closing bracket: "+t),a&&s("Access path missing closing quote: "+t),i>r&&(i++,c()),e}function l(t,n,r){const o=u(t);return t=1===o.length?o[0]:t,e((r&&r.get||i)(o),[t],n||t)}const c=l("id"),f=e((t=>t),[],"identity"),h=e((()=>0),[],"zero"),d=e((()=>1),[],"one"),p=e((()=>!0),[],"true"),g=e((()=>!1),[],"false");function m(t,e,n){const r=[e].concat([].slice.call(n));console[t].apply(console,r)}const y=0,v=1,_=2,x=3,b=4;function w(t,e){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:m,r=t||y;return{level(t){return arguments.length?(r=+t,this):r},error(){return r>=v&&n(e||"error","ERROR",arguments),this},warn(){return r>=_&&n(e||"warn","WARN",arguments),this},info(){return r>=x&&n(e||"log","INFO",arguments),this},debug(){return r>=b&&n(e||"log","DEBUG",arguments),this}}}var k=Array.isArray;function A(t){return t===Object(t)}const M=t=>"__proto__"!==t;function E(){for(var t=arguments.length,e=new Array(t),n=0;n{for(const n in e)if("signals"===n)t.signals=C(t.signals,e.signals);else{const r="legend"===n?{layout:1}:"style"===n||null;D(t,n,e[n],r)}return t}),{})}function D(t,e,n,r){if(!M(e))return;let i,o;if(A(n)&&!k(n))for(i in o=A(t[e])?t[e]:t[e]={},n)r&&(!0===r||r[i])?D(o,i,n[i]):M(i)&&(o[i]=n[i]);else t[e]=n}function C(t,e){if(null==t)return e;const n={},r=[];function i(t){n[t.name]||(n[t.name]=1,r.push(t))}return e.forEach(i),t.forEach(i),r}function F(t){return t[t.length-1]}function S(t){return null==t||""===t?null:+t}const $=t=>e=>t*Math.exp(e),T=t=>e=>Math.log(t*e),B=t=>e=>Math.sign(e)*Math.log1p(Math.abs(e/t)),z=t=>e=>Math.sign(e)*Math.expm1(Math.abs(e))*t,N=t=>e=>e<0?-Math.pow(-e,t):Math.pow(e,t);function O(t,e,n,r){const i=n(t[0]),o=n(F(t)),a=(o-i)*e;return[r(i-a),r(o-a)]}function R(t,e){return O(t,e,S,f)}function U(t,e){var n=Math.sign(t[0]);return O(t,e,T(n),$(n))}function L(t,e,n){return O(t,e,N(n),N(1/n))}function q(t,e,n){return O(t,e,B(n),z(n))}function P(t,e,n,r,i){const o=r(t[0]),a=r(F(t)),s=null!=e?r(e):(o+a)/2;return[i(s+(o-s)*n),i(s+(a-s)*n)]}function j(t,e,n){return P(t,e,n,S,f)}function I(t,e,n){const r=Math.sign(t[0]);return P(t,e,n,T(r),$(r))}function W(t,e,n,r){return P(t,e,n,N(r),N(1/r))}function H(t,e,n,r){return P(t,e,n,B(r),z(r))}function Y(t){return 1+~~(new Date(t).getMonth()/3)}function G(t){return 1+~~(new Date(t).getUTCMonth()/3)}function V(t){return null!=t?k(t)?t:[t]:[]}function X(t,e,n){let r,i=t[0],o=t[1];return o=n-e?[e,n]:[i=Math.min(Math.max(i,e),n-r),i+r]}function J(t){return"function"==typeof t}const Z="descending";function Q(t,n,i){i=i||{},n=V(n)||[];const o=[],a=[],s={},u=i.comparator||tt;return V(t).forEach(((t,e)=>{null!=t&&(o.push(n[e]===Z?-1:1),a.push(t=J(t)?t:l(t,null,i)),(r(t)||[]).forEach((t=>s[t]=1)))})),0===a.length?null:e(u(a,o),Object.keys(s))}const K=(t,e)=>(te||null==e)&&null!=t?1:(e=e instanceof Date?+e:e,(t=t instanceof Date?+t:t)!==t&&e==e?-1:e!=e&&t==t?1:0),tt=(t,e)=>1===t.length?et(t[0],e[0]):nt(t,e,t.length),et=(t,e)=>function(n,r){return K(t(n),t(r))*e},nt=(t,e,n)=>(e.push(0),function(r,i){let o,a=0,s=-1;for(;0===a&&++st}function it(t,e){let n;return r=>{n&&clearTimeout(n),n=setTimeout((()=>(e(r),n=null)),t)}}function ot(t){for(let e,n,r=1,i=arguments.length;ro&&(o=r))}else{for(r=e(t[a]);ao&&(o=r))}return[i,o]}function st(t,e){const n=t.length;let r,i,o,a,s,u=-1;if(null==e){for(;++u=i){r=o=i;break}if(u===n)return[-1,-1];for(a=s=u;++ui&&(r=i,a=u),o=i){r=o=i;break}if(u===n)return[-1,-1];for(a=s=u;++ui&&(r=i,a=u),or(t)?n[t]:void 0,set(t,e){return r(t)||(++i.size,n[t]===ct&&--i.empty),n[t]=e,this},delete(t){return r(t)&&(--i.size,++i.empty,n[t]=ct),this},clear(){i.size=i.empty=0,i.object=n={}},test(t){return arguments.length?(e=t,i):e},clean(){const t={};let r=0;for(const i in n){const o=n[i];o===ct||e&&e(o)||(t[i]=o,++r)}i.size=r,i.empty=0,i.object=n=t}};return t&&Object.keys(t).forEach((e=>{i.set(e,t[e])})),i}function ht(t,e,n,r,i,o){if(!n&&0!==n)return o;const a=+n;let s,u=t[0],l=F(t);la&&(i=o,o=a,a=i),r=void 0===r||r,((n=void 0===n||n)?o<=t:ot.replace(/\\(.)/g,"$1"))):V(t));const o=t&&t.length,a=r&&r.get||i,s=t=>a(n?[t]:u(t));let l;if(o)if(1===o){const e=s(t[0]);l=function(t){return""+e(t)}}else{const e=t.map(s);l=function(t){let n=""+e[0](t),r=0;for(;++r{e={},n={},r=0},o=(i,o)=>(++r>t&&(n=e,e={},r=1),e[i]=o);return i(),{clear:i,has:t=>lt(e,t)||lt(n,t),get:t=>lt(e,t)?e[t]:lt(n,t)?o(t,n[t]):void 0,set:(t,n)=>lt(e,t)?e[t]=n:o(t,n)}}function At(t,e,n,r){const i=e.length,o=n.length;if(!o)return e;if(!i)return n;const a=r||new e.constructor(i+o);let s=0,u=0,l=0;for(;s0?n[u++]:e[s++];for(;s=0;)n+=t;return n}function Et(t,e,n,r){const i=n||" ",o=t+"",a=e-o.length;return a<=0?o:"left"===r?Mt(i,a)+o:"center"===r?Mt(i,~~(a/2))+o+Mt(i,Math.ceil(a/2)):o+Mt(i,a)}function Dt(t){return t&&F(t)-t[0]||0}function Ct(t){return k(t)?"["+t.map(Ct)+"]":A(t)||xt(t)?JSON.stringify(t).replace("\u2028","\\u2028").replace("\u2029","\\u2029"):t}function Ft(t){return null==t||""===t?null:!(!t||"false"===t||"0"===t)&&!!t}const St=t=>vt(t)||mt(t)?t:Date.parse(t);function $t(t,e){return e=e||St,null==t||""===t?null:e(t)}function Tt(t){return null==t||""===t?null:t+""}function Bt(t){const e={},n=t.length;for(let r=0;r9999?"+"+It(e,6):It(e,4))+"-"+It(t.getUTCMonth()+1,2)+"-"+It(t.getUTCDate(),2)+(o?"T"+It(n,2)+":"+It(r,2)+":"+It(i,2)+"."+It(o,3)+"Z":i?"T"+It(n,2)+":"+It(r,2)+":"+It(i,2)+"Z":r||n?"T"+It(n,2)+":"+It(r,2)+"Z":"")}function Ht(t){var e=new RegExp('["'+t+"\n\r]"),n=t.charCodeAt(0);function r(t,e){var r,i=[],o=t.length,a=0,s=0,u=o<=0,l=!1;function c(){if(u)return Rt;if(l)return l=!1,Ot;var e,r,i=a;if(t.charCodeAt(i)===Ut){for(;a++=o?u=!0:(r=t.charCodeAt(a++))===Lt?l=!0:r===qt&&(l=!0,t.charCodeAt(a)===Lt&&++a),t.slice(i+1,e-1).replace(/""/g,'"')}for(;a1)r=function(t,e,n){var r,i=[],o=[];function a(t){var e=t<0?~t:t;(o[e]||(o[e]=[])).push({i:t,g:r})}function s(t){t.forEach(a)}function u(t){t.forEach(s)}function l(t){t.forEach(u)}function c(t){switch(r=t,t.type){case"GeometryCollection":t.geometries.forEach(c);break;case"LineString":s(t.arcs);break;case"MultiLineString":case"Polygon":u(t.arcs);break;case"MultiPolygon":l(t.arcs)}}return c(e),o.forEach(null==n?function(t){i.push(t[0].i)}:function(t){n(t[0].g,t[t.length-1].g)&&i.push(t[0].i)}),i}(0,e,n);else for(i=0,r=new Array(o=t.arcs.length);ie?1:t>=e?0:NaN}function te(t,e){return null==t||null==e?NaN:et?1:e>=t?0:NaN}function ee(t){let e,n,r;function i(t,r){let i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:t.length;if(i>>1;n(t[e],r)<0?i=e+1:o=e}while(iKt(t(e),n),r=(e,n)=>t(e)-n):(e=t===Kt||t===te?t:ne,n=t,r=t),{left:i,center:function(t,e){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0;const o=i(t,e,n,(arguments.length>3&&void 0!==arguments[3]?arguments[3]:t.length)-1);return o>n&&r(t[o-1],e)>-r(t[o],e)?o-1:o},right:function(t,r){let i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:t.length;if(i>>1;n(t[e],r)<=0?i=e+1:o=e}while(i0){for(o=t[--i];i>0&&(e=o,n=t[--i],o=e+n,r=n-(o-e),!r););i>0&&(r<0&&t[i-1]<0||r>0&&t[i-1]>0)&&(n=2*r,e=o+n,n==e-o&&(o=e))}return o}}class ue extends Map{constructor(t){let e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:de;if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:e}}),null!=t)for(const[e,n]of t)this.set(e,n)}get(t){return super.get(ce(this,t))}has(t){return super.has(ce(this,t))}set(t,e){return super.set(fe(this,t),e)}delete(t){return super.delete(he(this,t))}}class le extends Set{constructor(t){let e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:de;if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:e}}),null!=t)for(const e of t)this.add(e)}has(t){return super.has(ce(this,t))}add(t){return super.add(fe(this,t))}delete(t){return super.delete(he(this,t))}}function ce(t,e){let{_intern:n,_key:r}=t;const i=r(e);return n.has(i)?n.get(i):e}function fe(t,e){let{_intern:n,_key:r}=t;const i=r(e);return n.has(i)?n.get(i):(n.set(i,e),e)}function he(t,e){let{_intern:n,_key:r}=t;const i=r(e);return n.has(i)&&(e=n.get(i),n.delete(i)),e}function de(t){return null!==t&&"object"==typeof t?t.valueOf():t}function pe(t,e){return(null==t||!(t>=t))-(null==e||!(e>=e))||(te?1:0)}const ge=Math.sqrt(50),me=Math.sqrt(10),ye=Math.sqrt(2);function ve(t,e,n){const r=(e-t)/Math.max(0,n),i=Math.floor(Math.log10(r)),o=r/Math.pow(10,i),a=o>=ge?10:o>=me?5:o>=ye?2:1;let s,u,l;return i<0?(l=Math.pow(10,-i)/a,s=Math.round(t*l),u=Math.round(e*l),s/le&&--u,l=-l):(l=Math.pow(10,i)*a,s=Math.round(t/l),u=Math.round(e/l),s*le&&--u),u0))return[];if((t=+t)===(e=+e))return[t];const r=e=i))return[];const s=o-i+1,u=new Array(s);if(r)if(a<0)for(let t=0;t=e)&&(n=e);else{let r=-1;for(let i of t)null!=(i=e(i,++r,t))&&(n=i)&&(n=i)}return n}function ke(t,e){let n;if(void 0===e)for(const e of t)null!=e&&(n>e||void 0===n&&e>=e)&&(n=e);else{let r=-1;for(let i of t)null!=(i=e(i,++r,t))&&(n>i||void 0===n&&i>=i)&&(n=i)}return n}function Ae(t,e){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1/0,i=arguments.length>4?arguments[4]:void 0;if(e=Math.floor(e),n=Math.floor(Math.max(0,n)),r=Math.floor(Math.min(t.length-1,r)),!(n<=e&&e<=r))return t;for(i=void 0===i?pe:function(){let t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:Kt;if(t===Kt)return pe;if("function"!=typeof t)throw new TypeError("compare is not a function");return(e,n)=>{const r=t(e,n);return r||0===r?r:(0===t(n,n))-(0===t(e,e))}}(i);r>n;){if(r-n>600){const o=r-n+1,a=e-n+1,s=Math.log(o),u=.5*Math.exp(2*s/3),l=.5*Math.sqrt(s*u*(o-u)/o)*(a-o/2<0?-1:1);Ae(t,e,Math.max(n,Math.floor(e-a*u/o+l)),Math.min(r,Math.floor(e+(o-a)*u/o+l)),i)}const o=t[e];let a=n,s=r;for(Me(t,n,e),i(t[r],o)>0&&Me(t,n,r);a0;)--s}0===i(t[n],o)?Me(t,n,s):(++s,Me(t,s,r)),s<=e&&(n=s+1),e<=s&&(r=s-1)}return t}function Me(t,e,n){const r=t[e];t[e]=t[n],t[n]=r}function Ee(t,e,n){if(t=Float64Array.from(function*(t,e){if(void 0===e)for(let e of t)null!=e&&(e=+e)>=e&&(yield e);else{let n=-1;for(let r of t)null!=(r=e(r,++n,t))&&(r=+r)>=r&&(yield r)}}(t,n)),(r=t.length)&&!isNaN(e=+e)){if(e<=0||r<2)return ke(t);if(e>=1)return we(t);var r,i=(r-1)*e,o=Math.floor(i),a=we(Ae(t,o).subarray(0,o+1));return a+(ke(t.subarray(o+1))-a)*(i-o)}}function De(t,e){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:re;if((r=t.length)&&!isNaN(e=+e)){if(e<=0||r<2)return+n(t[0],0,t);if(e>=1)return+n(t[r-1],r-1,t);var r,i=(r-1)*e,o=Math.floor(i),a=+n(t[o],o,t);return a+(+n(t[o+1],o+1,t)-a)*(i-o)}}function Ce(t,e){return Ee(t,.5,e)}function Fe(t){return Array.from(function*(t){for(const e of t)yield*e}(t))}function Se(t,e,n){t=+t,e=+e,n=(i=arguments.length)<2?(e=t,t=0,1):i<3?1:+n;for(var r=-1,i=0|Math.max(0,Math.ceil((e-t)/n)),o=new Array(i);++r1?r[0]+r.slice(2):r,+t.slice(n+1)]}function ze(t){return(t=Be(Math.abs(t)))?t[1]:NaN}var Ne,Oe=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Re(t){if(!(e=Oe.exec(t)))throw new Error("invalid format: "+t);var e;return new Ue({fill:e[1],align:e[2],sign:e[3],symbol:e[4],zero:e[5],width:e[6],comma:e[7],precision:e[8]&&e[8].slice(1),trim:e[9],type:e[10]})}function Ue(t){this.fill=void 0===t.fill?" ":t.fill+"",this.align=void 0===t.align?">":t.align+"",this.sign=void 0===t.sign?"-":t.sign+"",this.symbol=void 0===t.symbol?"":t.symbol+"",this.zero=!!t.zero,this.width=void 0===t.width?void 0:+t.width,this.comma=!!t.comma,this.precision=void 0===t.precision?void 0:+t.precision,this.trim=!!t.trim,this.type=void 0===t.type?"":t.type+""}function Le(t,e){var n=Be(t,e);if(!n)return t+"";var r=n[0],i=n[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}Re.prototype=Ue.prototype,Ue.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};var qe={"%":(t,e)=>(100*t).toFixed(e),b:t=>Math.round(t).toString(2),c:t=>t+"",d:function(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)},e:(t,e)=>t.toExponential(e),f:(t,e)=>t.toFixed(e),g:(t,e)=>t.toPrecision(e),o:t=>Math.round(t).toString(8),p:(t,e)=>Le(100*t,e),r:Le,s:function(t,e){var n=Be(t,e);if(!n)return t+"";var r=n[0],i=n[1],o=i-(Ne=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,a=r.length;return o===a?r:o>a?r+new Array(o-a+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+Be(t,Math.max(0,e+o-1))[0]},X:t=>Math.round(t).toString(16).toUpperCase(),x:t=>Math.round(t).toString(16)};function Pe(t){return t}var je,Ie,We,He=Array.prototype.map,Ye=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];function Ge(t){var e,n,r=void 0===t.grouping||void 0===t.thousands?Pe:(e=He.call(t.grouping,Number),n=t.thousands+"",function(t,r){for(var i=t.length,o=[],a=0,s=e[0],u=0;i>0&&s>0&&(u+s+1>r&&(s=Math.max(1,r-u)),o.push(t.substring(i-=s,i+s)),!((u+=s+1)>r));)s=e[a=(a+1)%e.length];return o.reverse().join(n)}),i=void 0===t.currency?"":t.currency[0]+"",o=void 0===t.currency?"":t.currency[1]+"",a=void 0===t.decimal?".":t.decimal+"",s=void 0===t.numerals?Pe:function(t){return function(e){return e.replace(/[0-9]/g,(function(e){return t[+e]}))}}(He.call(t.numerals,String)),u=void 0===t.percent?"%":t.percent+"",l=void 0===t.minus?"−":t.minus+"",c=void 0===t.nan?"NaN":t.nan+"";function f(t){var e=(t=Re(t)).fill,n=t.align,f=t.sign,h=t.symbol,d=t.zero,p=t.width,g=t.comma,m=t.precision,y=t.trim,v=t.type;"n"===v?(g=!0,v="g"):qe[v]||(void 0===m&&(m=12),y=!0,v="g"),(d||"0"===e&&"="===n)&&(d=!0,e="0",n="=");var _="$"===h?i:"#"===h&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",x="$"===h?o:/[%p]/.test(v)?u:"",b=qe[v],w=/[defgprs%]/.test(v);function k(t){var i,o,u,h=_,k=x;if("c"===v)k=b(t)+k,t="";else{var A=(t=+t)<0||1/t<0;if(t=isNaN(t)?c:b(Math.abs(t),m),y&&(t=function(t){t:for(var e,n=t.length,r=1,i=-1;r0&&(i=0)}return i>0?t.slice(0,i)+t.slice(e+1):t}(t)),A&&0==+t&&"+"!==f&&(A=!1),h=(A?"("===f?f:l:"-"===f||"("===f?"":f)+h,k=("s"===v?Ye[8+Ne/3]:"")+k+(A&&"("===f?")":""),w)for(i=-1,o=t.length;++i(u=t.charCodeAt(i))||u>57){k=(46===u?a+t.slice(i+1):t.slice(i))+k,t=t.slice(0,i);break}}g&&!d&&(t=r(t,1/0));var M=h.length+t.length+k.length,E=M>1)+h+t+k+E.slice(M);break;default:t=E+h+t+k}return s(t)}return m=void 0===m?6:/[gprs]/.test(v)?Math.max(1,Math.min(21,m)):Math.max(0,Math.min(20,m)),k.toString=function(){return t+""},k}return{format:f,formatPrefix:function(t,e){var n=f(((t=Re(t)).type="f",t)),r=3*Math.max(-8,Math.min(8,Math.floor(ze(e)/3))),i=Math.pow(10,-r),o=Ye[8+r/3];return function(t){return n(i*t)+o}}}}function Ve(t){return Math.max(0,-ze(Math.abs(t)))}function Xe(t,e){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(ze(e)/3)))-ze(Math.abs(t)))}function Je(t,e){return t=Math.abs(t),e=Math.abs(e)-t,Math.max(0,ze(e)-ze(t))+1}!function(t){je=Ge(t),Ie=je.format,We=je.formatPrefix}({thousands:",",grouping:[3],currency:["$",""]});const Ze=new Date,Qe=new Date;function Ke(t,e,n,r){function i(e){return t(e=0===arguments.length?new Date:new Date(+e)),e}return i.floor=e=>(t(e=new Date(+e)),e),i.ceil=n=>(t(n=new Date(n-1)),e(n,1),t(n),n),i.round=t=>{const e=i(t),n=i.ceil(t);return t-e(e(t=new Date(+t),null==n?1:Math.floor(n)),t),i.range=(n,r,o)=>{const a=[];if(n=i.ceil(n),o=null==o?1:Math.floor(o),!(n0))return a;let s;do{a.push(s=new Date(+n)),e(n,o),t(n)}while(sKe((e=>{if(e>=e)for(;t(e),!n(e);)e.setTime(e-1)}),((t,r)=>{if(t>=t)if(r<0)for(;++r<=0;)for(;e(t,-1),!n(t););else for(;--r>=0;)for(;e(t,1),!n(t););})),n&&(i.count=(e,r)=>(Ze.setTime(+e),Qe.setTime(+r),t(Ze),t(Qe),Math.floor(n(Ze,Qe))),i.every=t=>(t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?e=>r(e)%t==0:e=>i.count(0,e)%t==0):i:null)),i}const tn=Ke((()=>{}),((t,e)=>{t.setTime(+t+e)}),((t,e)=>e-t));tn.every=t=>(t=Math.floor(t),isFinite(t)&&t>0?t>1?Ke((e=>{e.setTime(Math.floor(e/t)*t)}),((e,n)=>{e.setTime(+e+n*t)}),((e,n)=>(n-e)/t)):tn:null),tn.range;const en=1e3,nn=6e4,rn=36e5,on=864e5,an=6048e5,sn=2592e6,un=31536e6,ln=Ke((t=>{t.setTime(t-t.getMilliseconds())}),((t,e)=>{t.setTime(+t+e*en)}),((t,e)=>(e-t)/en),(t=>t.getUTCSeconds()));ln.range;const cn=Ke((t=>{t.setTime(t-t.getMilliseconds()-t.getSeconds()*en)}),((t,e)=>{t.setTime(+t+e*nn)}),((t,e)=>(e-t)/nn),(t=>t.getMinutes()));cn.range;const fn=Ke((t=>{t.setUTCSeconds(0,0)}),((t,e)=>{t.setTime(+t+e*nn)}),((t,e)=>(e-t)/nn),(t=>t.getUTCMinutes()));fn.range;const hn=Ke((t=>{t.setTime(t-t.getMilliseconds()-t.getSeconds()*en-t.getMinutes()*nn)}),((t,e)=>{t.setTime(+t+e*rn)}),((t,e)=>(e-t)/rn),(t=>t.getHours()));hn.range;const dn=Ke((t=>{t.setUTCMinutes(0,0,0)}),((t,e)=>{t.setTime(+t+e*rn)}),((t,e)=>(e-t)/rn),(t=>t.getUTCHours()));dn.range;const pn=Ke((t=>t.setHours(0,0,0,0)),((t,e)=>t.setDate(t.getDate()+e)),((t,e)=>(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*nn)/on),(t=>t.getDate()-1));pn.range;const gn=Ke((t=>{t.setUTCHours(0,0,0,0)}),((t,e)=>{t.setUTCDate(t.getUTCDate()+e)}),((t,e)=>(e-t)/on),(t=>t.getUTCDate()-1));gn.range;const mn=Ke((t=>{t.setUTCHours(0,0,0,0)}),((t,e)=>{t.setUTCDate(t.getUTCDate()+e)}),((t,e)=>(e-t)/on),(t=>Math.floor(t/on)));function yn(t){return Ke((e=>{e.setDate(e.getDate()-(e.getDay()+7-t)%7),e.setHours(0,0,0,0)}),((t,e)=>{t.setDate(t.getDate()+7*e)}),((t,e)=>(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*nn)/an))}mn.range;const vn=yn(0),_n=yn(1),xn=yn(2),bn=yn(3),wn=yn(4),kn=yn(5),An=yn(6);function Mn(t){return Ke((e=>{e.setUTCDate(e.getUTCDate()-(e.getUTCDay()+7-t)%7),e.setUTCHours(0,0,0,0)}),((t,e)=>{t.setUTCDate(t.getUTCDate()+7*e)}),((t,e)=>(e-t)/an))}vn.range,_n.range,xn.range,bn.range,wn.range,kn.range,An.range;const En=Mn(0),Dn=Mn(1),Cn=Mn(2),Fn=Mn(3),Sn=Mn(4),$n=Mn(5),Tn=Mn(6);En.range,Dn.range,Cn.range,Fn.range,Sn.range,$n.range,Tn.range;const Bn=Ke((t=>{t.setDate(1),t.setHours(0,0,0,0)}),((t,e)=>{t.setMonth(t.getMonth()+e)}),((t,e)=>e.getMonth()-t.getMonth()+12*(e.getFullYear()-t.getFullYear())),(t=>t.getMonth()));Bn.range;const zn=Ke((t=>{t.setUTCDate(1),t.setUTCHours(0,0,0,0)}),((t,e)=>{t.setUTCMonth(t.getUTCMonth()+e)}),((t,e)=>e.getUTCMonth()-t.getUTCMonth()+12*(e.getUTCFullYear()-t.getUTCFullYear())),(t=>t.getUTCMonth()));zn.range;const Nn=Ke((t=>{t.setMonth(0,1),t.setHours(0,0,0,0)}),((t,e)=>{t.setFullYear(t.getFullYear()+e)}),((t,e)=>e.getFullYear()-t.getFullYear()),(t=>t.getFullYear()));Nn.every=t=>isFinite(t=Math.floor(t))&&t>0?Ke((e=>{e.setFullYear(Math.floor(e.getFullYear()/t)*t),e.setMonth(0,1),e.setHours(0,0,0,0)}),((e,n)=>{e.setFullYear(e.getFullYear()+n*t)})):null,Nn.range;const On=Ke((t=>{t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)}),((t,e)=>{t.setUTCFullYear(t.getUTCFullYear()+e)}),((t,e)=>e.getUTCFullYear()-t.getUTCFullYear()),(t=>t.getUTCFullYear()));function Rn(t,e,n,r,i,o){const a=[[ln,1,en],[ln,5,5e3],[ln,15,15e3],[ln,30,3e4],[o,1,nn],[o,5,3e5],[o,15,9e5],[o,30,18e5],[i,1,rn],[i,3,108e5],[i,6,216e5],[i,12,432e5],[r,1,on],[r,2,1728e5],[n,1,an],[e,1,sn],[e,3,7776e6],[t,1,un]];function s(e,n,r){const i=Math.abs(n-e)/r,o=ee((t=>{let[,,e]=t;return e})).right(a,i);if(o===a.length)return t.every(be(e/un,n/un,r));if(0===o)return tn.every(Math.max(be(e,n,r),1));const[s,u]=a[i/a[o-1][2]isFinite(t=Math.floor(t))&&t>0?Ke((e=>{e.setUTCFullYear(Math.floor(e.getUTCFullYear()/t)*t),e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)}),((e,n)=>{e.setUTCFullYear(e.getUTCFullYear()+n*t)})):null,On.range;const[Un,Ln]=Rn(On,zn,En,mn,dn,fn),[qn,Pn]=Rn(Nn,Bn,vn,pn,hn,cn),jn="year",In="quarter",Wn="month",Hn="week",Yn="date",Gn="day",Vn="dayofyear",Xn="hours",Jn="minutes",Zn="seconds",Qn="milliseconds",Kn=[jn,In,Wn,Hn,Yn,Gn,Vn,Xn,Jn,Zn,Qn],tr=Kn.reduce(((t,e,n)=>(t[e]=1+n,t)),{});function er(t){const e=V(t).slice(),n={};e.length||s("Missing time unit."),e.forEach((t=>{lt(tr,t)?n[t]=1:s(`Invalid time unit: ${t}.`)}));return(n[Hn]||n[Gn]?1:0)+(n[In]||n[Wn]||n[Yn]?1:0)+(n[Vn]?1:0)>1&&s(`Incompatible time units: ${t}`),e.sort(((t,e)=>tr[t]-tr[e])),e}const nr={[jn]:"%Y ",[In]:"Q%q ",[Wn]:"%b ",[Yn]:"%d ",[Hn]:"W%U ",[Gn]:"%a ",[Vn]:"%j ",[Xn]:"%H:00",[Jn]:"00:%M",[Zn]:":%S",[Qn]:".%L",[`${jn}-${Wn}`]:"%Y-%m ",[`${jn}-${Wn}-${Yn}`]:"%Y-%m-%d ",[`${Xn}-${Jn}`]:"%H:%M"};function rr(t,e){const n=ot({},nr,e),r=er(t),i=r.length;let o,a,s="",u=0;for(u=0;uu;--o)if(a=r.slice(u,o).join("-"),null!=n[a]){s+=n[a],u=o;break}return s.trim()}const ir=new Date;function or(t){return ir.setFullYear(t),ir.setMonth(0),ir.setDate(1),ir.setHours(0,0,0,0),ir}function ar(t){return ur(new Date(t))}function sr(t){return lr(new Date(t))}function ur(t){return pn.count(or(t.getFullYear())-1,t)}function lr(t){return vn.count(or(t.getFullYear())-1,t)}function cr(t){return or(t).getDay()}function fr(t,e,n,r,i,o,a){if(0<=t&&t<100){const s=new Date(-1,e,n,r,i,o,a);return s.setFullYear(t),s}return new Date(t,e,n,r,i,o,a)}function hr(t){return pr(new Date(t))}function dr(t){return gr(new Date(t))}function pr(t){const e=Date.UTC(t.getUTCFullYear(),0,1);return gn.count(e-1,t)}function gr(t){const e=Date.UTC(t.getUTCFullYear(),0,1);return En.count(e-1,t)}function mr(t){return ir.setTime(Date.UTC(t,0,1)),ir.getUTCDay()}function yr(t,e,n,r,i,o,a){if(0<=t&&t<100){const t=new Date(Date.UTC(-1,e,n,r,i,o,a));return t.setUTCFullYear(n.y),t}return new Date(Date.UTC(t,e,n,r,i,o,a))}function vr(t,e,n,r,i){const o=e||1,a=F(t),s=(t,e,i)=>function(t,e,n,r){const i=n<=1?t:r?(e,i)=>r+n*Math.floor((t(e,i)-r)/n):(e,r)=>n*Math.floor(t(e,r)/n);return e?(t,n)=>e(i(t,n),n):i}(n[i=i||t],r[i],t===a&&o,e),u=new Date,l=Bt(t),c=l[jn]?s(jn):rt(2012),f=l[Wn]?s(Wn):l[In]?s(In):h,p=l[Hn]&&l[Gn]?s(Gn,1,Hn+Gn):l[Hn]?s(Hn,1):l[Gn]?s(Gn,1):l[Yn]?s(Yn,1):l[Vn]?s(Vn,1):d,g=l[Xn]?s(Xn):h,m=l[Jn]?s(Jn):h,y=l[Zn]?s(Zn):h,v=l[Qn]?s(Qn):h;return function(t){u.setTime(+t);const e=c(u);return i(e,f(u),p(u,e),g(u),m(u),y(u),v(u))}}function _r(t,e,n){return e+7*t-(n+6)%7}const xr={[jn]:t=>t.getFullYear(),[In]:t=>Math.floor(t.getMonth()/3),[Wn]:t=>t.getMonth(),[Yn]:t=>t.getDate(),[Xn]:t=>t.getHours(),[Jn]:t=>t.getMinutes(),[Zn]:t=>t.getSeconds(),[Qn]:t=>t.getMilliseconds(),[Vn]:t=>ur(t),[Hn]:t=>lr(t),[Hn+Gn]:(t,e)=>_r(lr(t),t.getDay(),cr(e)),[Gn]:(t,e)=>_r(1,t.getDay(),cr(e))},br={[In]:t=>3*t,[Hn]:(t,e)=>_r(t,0,cr(e))};function wr(t,e){return vr(t,e||1,xr,br,fr)}const kr={[jn]:t=>t.getUTCFullYear(),[In]:t=>Math.floor(t.getUTCMonth()/3),[Wn]:t=>t.getUTCMonth(),[Yn]:t=>t.getUTCDate(),[Xn]:t=>t.getUTCHours(),[Jn]:t=>t.getUTCMinutes(),[Zn]:t=>t.getUTCSeconds(),[Qn]:t=>t.getUTCMilliseconds(),[Vn]:t=>pr(t),[Hn]:t=>gr(t),[Gn]:(t,e)=>_r(1,t.getUTCDay(),mr(e)),[Hn+Gn]:(t,e)=>_r(gr(t),t.getUTCDay(),mr(e))},Ar={[In]:t=>3*t,[Hn]:(t,e)=>_r(t,0,mr(e))};function Mr(t,e){return vr(t,e||1,kr,Ar,yr)}const Er={[jn]:Nn,[In]:Bn.every(3),[Wn]:Bn,[Hn]:vn,[Yn]:pn,[Gn]:pn,[Vn]:pn,[Xn]:hn,[Jn]:cn,[Zn]:ln,[Qn]:tn},Dr={[jn]:On,[In]:zn.every(3),[Wn]:zn,[Hn]:En,[Yn]:gn,[Gn]:gn,[Vn]:gn,[Xn]:dn,[Jn]:fn,[Zn]:ln,[Qn]:tn};function Cr(t){return Er[t]}function Fr(t){return Dr[t]}function Sr(t,e,n){return t?t.offset(e,n):void 0}function $r(t,e,n){return Sr(Cr(t),e,n)}function Tr(t,e,n){return Sr(Fr(t),e,n)}function Br(t,e,n,r){return t?t.range(e,n,r):void 0}function zr(t,e,n,r){return Br(Cr(t),e,n,r)}function Nr(t,e,n,r){return Br(Fr(t),e,n,r)}const Or=1e3,Rr=6e4,Ur=36e5,Lr=864e5,qr=2592e6,Pr=31536e6,jr=[jn,Wn,Yn,Xn,Jn,Zn,Qn],Ir=jr.slice(0,-1),Wr=Ir.slice(0,-1),Hr=Wr.slice(0,-1),Yr=Hr.slice(0,-1),Gr=[jn,Wn],Vr=[jn],Xr=[[Ir,1,Or],[Ir,5,5e3],[Ir,15,15e3],[Ir,30,3e4],[Wr,1,Rr],[Wr,5,3e5],[Wr,15,9e5],[Wr,30,18e5],[Hr,1,Ur],[Hr,3,108e5],[Hr,6,216e5],[Hr,12,432e5],[Yr,1,Lr],[[jn,Hn],1,6048e5],[Gr,1,qr],[Gr,3,7776e6],[Vr,1,Pr]];function Jr(t){const e=t.extent,n=t.maxbins||40,r=Math.abs(Dt(e))/n;let i,o,a=ee((t=>t[2])).right(Xr,r);return a===Xr.length?(i=Vr,o=be(e[0]/Pr,e[1]/Pr,n)):a?(a=Xr[r/Xr[a-1][2]=12)]},q:function(t){return 1+~~(t.getMonth()/3)},Q:wo,s:ko,S:ji,u:Ii,U:Wi,V:Yi,w:Gi,W:Vi,x:null,X:null,y:Xi,Y:Zi,Z:Ki,"%":bo},x={a:function(t){return a[t.getUTCDay()]},A:function(t){return o[t.getUTCDay()]},b:function(t){return u[t.getUTCMonth()]},B:function(t){return s[t.getUTCMonth()]},c:null,d:to,e:to,f:oo,g:yo,G:_o,H:eo,I:no,j:ro,L:io,m:ao,M:so,p:function(t){return i[+(t.getUTCHours()>=12)]},q:function(t){return 1+~~(t.getUTCMonth()/3)},Q:wo,s:ko,S:uo,u:lo,U:co,V:ho,w:po,W:go,x:null,X:null,y:mo,Y:vo,Z:xo,"%":bo},b={a:function(t,e,n){var r=d.exec(e.slice(n));return r?(t.w=p.get(r[0].toLowerCase()),n+r[0].length):-1},A:function(t,e,n){var r=f.exec(e.slice(n));return r?(t.w=h.get(r[0].toLowerCase()),n+r[0].length):-1},b:function(t,e,n){var r=y.exec(e.slice(n));return r?(t.m=v.get(r[0].toLowerCase()),n+r[0].length):-1},B:function(t,e,n){var r=g.exec(e.slice(n));return r?(t.m=m.get(r[0].toLowerCase()),n+r[0].length):-1},c:function(t,n,r){return A(t,e,n,r)},d:Ai,e:Ai,f:Si,g:xi,G:_i,H:Ei,I:Ei,j:Mi,L:Fi,m:ki,M:Di,p:function(t,e,n){var r=l.exec(e.slice(n));return r?(t.p=c.get(r[0].toLowerCase()),n+r[0].length):-1},q:wi,Q:Ti,s:Bi,S:Ci,u:gi,U:mi,V:yi,w:pi,W:vi,x:function(t,e,r){return A(t,n,e,r)},X:function(t,e,n){return A(t,r,e,n)},y:xi,Y:_i,Z:bi,"%":$i};function w(t,e){return function(n){var r,i,o,a=[],s=-1,u=0,l=t.length;for(n instanceof Date||(n=new Date(+n));++s53)return null;"w"in o||(o.w=1),"Z"in o?(i=(r=Qr(Kr(o.y,0,1))).getUTCDay(),r=i>4||0===i?Dn.ceil(r):Dn(r),r=gn.offset(r,7*(o.V-1)),o.y=r.getUTCFullYear(),o.m=r.getUTCMonth(),o.d=r.getUTCDate()+(o.w+6)%7):(i=(r=Zr(Kr(o.y,0,1))).getDay(),r=i>4||0===i?_n.ceil(r):_n(r),r=pn.offset(r,7*(o.V-1)),o.y=r.getFullYear(),o.m=r.getMonth(),o.d=r.getDate()+(o.w+6)%7)}else("W"in o||"U"in o)&&("w"in o||(o.w="u"in o?o.u%7:"W"in o?1:0),i="Z"in o?Qr(Kr(o.y,0,1)).getUTCDay():Zr(Kr(o.y,0,1)).getDay(),o.m=0,o.d="W"in o?(o.w+6)%7+7*o.W-(i+5)%7:o.w+7*o.U-(i+6)%7);return"Z"in o?(o.H+=o.Z/100|0,o.M+=o.Z%100,Qr(o)):Zr(o)}}function A(t,e,n,r){for(var i,o,a=0,s=e.length,u=n.length;a=u)return-1;if(37===(i=e.charCodeAt(a++))){if(i=e.charAt(a++),!(o=b[i in ai?e.charAt(a++):i])||(r=o(t,n,r))<0)return-1}else if(i!=n.charCodeAt(r++))return-1}return r}return _.x=w(n,_),_.X=w(r,_),_.c=w(e,_),x.x=w(n,x),x.X=w(r,x),x.c=w(e,x),{format:function(t){var e=w(t+="",_);return e.toString=function(){return t},e},parse:function(t){var e=k(t+="",!1);return e.toString=function(){return t},e},utcFormat:function(t){var e=w(t+="",x);return e.toString=function(){return t},e},utcParse:function(t){var e=k(t+="",!0);return e.toString=function(){return t},e}}}var ei,ni,ri,ii,oi,ai={"-":"",_:" ",0:"0"},si=/^\s*\d+/,ui=/^%/,li=/[\\^$*+?|[\]().{}]/g;function ci(t,e,n){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o[t.toLowerCase(),e])))}function pi(t,e,n){var r=si.exec(e.slice(n,n+1));return r?(t.w=+r[0],n+r[0].length):-1}function gi(t,e,n){var r=si.exec(e.slice(n,n+1));return r?(t.u=+r[0],n+r[0].length):-1}function mi(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.U=+r[0],n+r[0].length):-1}function yi(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.V=+r[0],n+r[0].length):-1}function vi(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.W=+r[0],n+r[0].length):-1}function _i(t,e,n){var r=si.exec(e.slice(n,n+4));return r?(t.y=+r[0],n+r[0].length):-1}function xi(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.y=+r[0]+(+r[0]>68?1900:2e3),n+r[0].length):-1}function bi(t,e,n){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(e.slice(n,n+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),n+r[0].length):-1}function wi(t,e,n){var r=si.exec(e.slice(n,n+1));return r?(t.q=3*r[0]-3,n+r[0].length):-1}function ki(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.m=r[0]-1,n+r[0].length):-1}function Ai(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.d=+r[0],n+r[0].length):-1}function Mi(t,e,n){var r=si.exec(e.slice(n,n+3));return r?(t.m=0,t.d=+r[0],n+r[0].length):-1}function Ei(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.H=+r[0],n+r[0].length):-1}function Di(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.M=+r[0],n+r[0].length):-1}function Ci(t,e,n){var r=si.exec(e.slice(n,n+2));return r?(t.S=+r[0],n+r[0].length):-1}function Fi(t,e,n){var r=si.exec(e.slice(n,n+3));return r?(t.L=+r[0],n+r[0].length):-1}function Si(t,e,n){var r=si.exec(e.slice(n,n+6));return r?(t.L=Math.floor(r[0]/1e3),n+r[0].length):-1}function $i(t,e,n){var r=ui.exec(e.slice(n,n+1));return r?n+r[0].length:-1}function Ti(t,e,n){var r=si.exec(e.slice(n));return r?(t.Q=+r[0],n+r[0].length):-1}function Bi(t,e,n){var r=si.exec(e.slice(n));return r?(t.s=+r[0],n+r[0].length):-1}function zi(t,e){return ci(t.getDate(),e,2)}function Ni(t,e){return ci(t.getHours(),e,2)}function Oi(t,e){return ci(t.getHours()%12||12,e,2)}function Ri(t,e){return ci(1+pn.count(Nn(t),t),e,3)}function Ui(t,e){return ci(t.getMilliseconds(),e,3)}function Li(t,e){return Ui(t,e)+"000"}function qi(t,e){return ci(t.getMonth()+1,e,2)}function Pi(t,e){return ci(t.getMinutes(),e,2)}function ji(t,e){return ci(t.getSeconds(),e,2)}function Ii(t){var e=t.getDay();return 0===e?7:e}function Wi(t,e){return ci(vn.count(Nn(t)-1,t),e,2)}function Hi(t){var e=t.getDay();return e>=4||0===e?wn(t):wn.ceil(t)}function Yi(t,e){return t=Hi(t),ci(wn.count(Nn(t),t)+(4===Nn(t).getDay()),e,2)}function Gi(t){return t.getDay()}function Vi(t,e){return ci(_n.count(Nn(t)-1,t),e,2)}function Xi(t,e){return ci(t.getFullYear()%100,e,2)}function Ji(t,e){return ci((t=Hi(t)).getFullYear()%100,e,2)}function Zi(t,e){return ci(t.getFullYear()%1e4,e,4)}function Qi(t,e){var n=t.getDay();return ci((t=n>=4||0===n?wn(t):wn.ceil(t)).getFullYear()%1e4,e,4)}function Ki(t){var e=t.getTimezoneOffset();return(e>0?"-":(e*=-1,"+"))+ci(e/60|0,"0",2)+ci(e%60,"0",2)}function to(t,e){return ci(t.getUTCDate(),e,2)}function eo(t,e){return ci(t.getUTCHours(),e,2)}function no(t,e){return ci(t.getUTCHours()%12||12,e,2)}function ro(t,e){return ci(1+gn.count(On(t),t),e,3)}function io(t,e){return ci(t.getUTCMilliseconds(),e,3)}function oo(t,e){return io(t,e)+"000"}function ao(t,e){return ci(t.getUTCMonth()+1,e,2)}function so(t,e){return ci(t.getUTCMinutes(),e,2)}function uo(t,e){return ci(t.getUTCSeconds(),e,2)}function lo(t){var e=t.getUTCDay();return 0===e?7:e}function co(t,e){return ci(En.count(On(t)-1,t),e,2)}function fo(t){var e=t.getUTCDay();return e>=4||0===e?Sn(t):Sn.ceil(t)}function ho(t,e){return t=fo(t),ci(Sn.count(On(t),t)+(4===On(t).getUTCDay()),e,2)}function po(t){return t.getUTCDay()}function go(t,e){return ci(Dn.count(On(t)-1,t),e,2)}function mo(t,e){return ci(t.getUTCFullYear()%100,e,2)}function yo(t,e){return ci((t=fo(t)).getUTCFullYear()%100,e,2)}function vo(t,e){return ci(t.getUTCFullYear()%1e4,e,4)}function _o(t,e){var n=t.getUTCDay();return ci((t=n>=4||0===n?Sn(t):Sn.ceil(t)).getUTCFullYear()%1e4,e,4)}function xo(){return"+0000"}function bo(){return"%"}function wo(t){return+t}function ko(t){return Math.floor(+t/1e3)}function Ao(t){const e={};return n=>e[n]||(e[n]=t(n))}function Mo(t){const e=Ao(t.format),n=t.formatPrefix;return{format:e,formatPrefix:n,formatFloat(t){const n=Re(t||",");if(null==n.precision){switch(n.precision=12,n.type){case"%":n.precision-=2;break;case"e":n.precision-=1}return r=e(n),i=e(".1f")(1)[1],t=>{const e=r(t),n=e.indexOf(i);if(n<0)return e;let o=function(t,e){let n,r=t.lastIndexOf("e");if(r>0)return r;for(r=t.length;--r>e;)if(n=t.charCodeAt(r),n>=48&&n<=57)return r+1}(e,n);const a=on;)if("0"!==e[o]){++o;break}return e.slice(0,o)+a}}return e(n);var r,i},formatSpan(t,r,i,o){o=Re(null==o?",f":o);const a=be(t,r,i),s=Math.max(Math.abs(t),Math.abs(r));let u;if(null==o.precision)switch(o.type){case"s":return isNaN(u=Xe(a,s))||(o.precision=u),n(o,s);case"":case"e":case"g":case"p":case"r":isNaN(u=Je(a,s))||(o.precision=u-("e"===o.type));break;case"f":case"%":isNaN(u=Ve(a))||(o.precision=u-2*("%"===o.type))}return e(o)}}}let Eo,Do;function Co(){return Eo=Mo({format:Ie,formatPrefix:We})}function Fo(t){return Mo(Ge(t))}function So(t){return arguments.length?Eo=Fo(t):Eo}function $o(t,e,n){A(n=n||{})||s(`Invalid time multi-format specifier: ${n}`);const r=e(Zn),i=e(Jn),o=e(Xn),a=e(Yn),u=e(Hn),l=e(Wn),c=e(In),f=e(jn),h=t(n[Qn]||".%L"),d=t(n[Zn]||":%S"),p=t(n[Jn]||"%I:%M"),g=t(n[Xn]||"%I %p"),m=t(n[Yn]||n[Gn]||"%a %d"),y=t(n[Hn]||"%b %d"),v=t(n[Wn]||"%B"),_=t(n[In]||"%B"),x=t(n[jn]||"%Y");return t=>(r(t)xt(t)?e(t):$o(e,Cr,t),utcFormat:t=>xt(t)?n(t):$o(n,Fr,t),timeParse:Ao(t.parse),utcParse:Ao(t.utcParse)}}function Bo(){return Do=To({format:ni,parse:ri,utcFormat:ii,utcParse:oi})}function zo(t){return To(ti(t))}function No(t){return arguments.length?Do=zo(t):Do}!function(t){ei=ti(t),ni=ei.format,ri=ei.parse,ii=ei.utcFormat,oi=ei.utcParse}({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]}),Co(),Bo();const Oo=(t,e)=>ot({},t,e);function Ro(t,e){const n=t?Fo(t):So(),r=e?zo(e):No();return Oo(n,r)}function Uo(t,e){const n=arguments.length;return n&&2!==n&&s("defaultLocale expects either zero or two arguments."),n?Oo(So(t),No(e)):Oo(So(),No())}const Lo=/^(data:|([A-Za-z]+:)?\/\/)/,qo=/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp|file|data):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i,Po=/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205f\u3000]/g,jo="file://";async function Io(t,e){const n=await this.sanitize(t,e),r=n.href;return n.localFile?this.file(r):this.http(r,e)}async function Wo(t,e){e=ot({},this.options,e);const n=this.fileAccess,r={href:null};let i,o,a;const u=qo.test(t.replace(Po,""));null!=t&&"string"==typeof t&&u||s("Sanitize failure, invalid URI: "+Ct(t));const l=Lo.test(t);return(a=e.baseURL)&&!l&&(t.startsWith("/")||a.endsWith("/")||(t="/"+t),t=a+t),o=(i=t.startsWith(jo))||"file"===e.mode||"http"!==e.mode&&!l&&n,i?t=t.slice(jo.length):t.startsWith("//")&&("file"===e.defaultProtocol?(t=t.slice(2),o=!0):t=(e.defaultProtocol||"http")+":"+t),Object.defineProperty(r,"localFile",{value:!!o}),r.href=t,e.target&&(r.target=e.target+""),e.rel&&(r.rel=e.rel+""),"image"===e.context&&e.crossOrigin&&(r.crossOrigin=e.crossOrigin+""),r}function Ho(t){return t?e=>new Promise(((n,r)=>{t.readFile(e,((t,e)=>{t?r(t):n(e)}))})):Yo}async function Yo(){s("No file system access.")}function Go(t){return t?async function(e,n){const r=ot({},this.options.http,n),i=n&&n.response,o=await t(e,r);return o.ok?J(o[i])?o[i]():o.text():s(o.status+""+o.statusText)}:Vo}async function Vo(){s("No HTTP fetch method available.")}const Xo=t=>null!=t&&t==t,Jo=t=>!(Number.isNaN(+t)||t instanceof Date),Zo={boolean:Ft,integer:S,number:S,date:$t,string:Tt,unknown:f},Qo=[t=>"true"===t||"false"===t||!0===t||!1===t,t=>Jo(t)&&Number.isInteger(+t),Jo,t=>!Number.isNaN(Date.parse(t))],Ko=["boolean","integer","number","date"];function ta(t,e){if(!t||!t.length)return"unknown";const n=t.length,r=Qo.length,i=Qo.map(((t,e)=>e+1));for(let o,a,s=0,u=0;s0===t?e:t),0)-1]}function ea(t,e){return e.reduce(((e,n)=>(e[n]=ta(t,n),e)),{})}function na(t){const e=function(e,n){const r={delimiter:t};return ra(e,n?ot(n,r):r)};return e.responseType="text",e}function ra(t,e){return e.header&&(t=e.header.map(Ct).join(e.delimiter)+"\n"+t),Ht(e.delimiter).parse(t+"")}function ia(t,e){const n=e&&e.property?l(e.property):f;return!A(t)||(r=t,"function"==typeof Buffer&&J(Buffer.isBuffer)&&Buffer.isBuffer(r))?n(JSON.parse(t)):function(t,e){!k(t)&&yt(t)&&(t=[...t]);return e&&e.copy?JSON.parse(JSON.stringify(t)):t}(n(t),e);var r}ra.responseType="text",ia.responseType="json";const oa={interior:(t,e)=>t!==e,exterior:(t,e)=>t===e};function aa(t,e){let n,r,i,o;return t=ia(t,e),e&&e.feature?(n=Gt,i=e.feature):e&&e.mesh?(n=Zt,i=e.mesh,o=oa[e.filter]):s("Missing TopoJSON feature or mesh parameter."),r=(r=t.objects[i])?n(t,r,o):s("Invalid TopoJSON object: "+i),r&&r.features||[r]}aa.responseType="json";const sa={dsv:ra,csv:na(","),tsv:na("\t"),json:ia,topojson:aa};function ua(t,e){return arguments.length>1?(sa[t]=e,this):lt(sa,t)?sa[t]:null}function la(t){const e=ua(t);return e&&e.responseType||"text"}function ca(t,e,n,r){const i=ua((e=e||{}).type||"json");return i||s("Unknown data format type: "+e.type),t=i(t,e),e.parse&&function(t,e,n,r){if(!t.length)return;const i=No();n=n||i.timeParse,r=r||i.utcParse;let o,a,s,u,l,c,f=t.columns||Object.keys(t[0]);"auto"===e&&(e=ea(t,f));f=Object.keys(e);const h=f.map((t=>{const i=e[t];let o,a;if(i&&(i.startsWith("date:")||i.startsWith("utc:"))){o=i.split(/:(.+)?/,2),a=o[1],("'"===a[0]&&"'"===a[a.length-1]||'"'===a[0]&&'"'===a[a.length-1])&&(a=a.slice(1,-1));return("utc"===o[0]?r:n)(a)}if(!Zo[i])throw Error("Illegal format pattern: "+t+":"+i);return Zo[i]}));for(s=0,l=t.length,c=f.length;s({options:n||{},sanitize:Wo,load:Io,fileAccess:!!e,file:Ho(e),http:Go(t)})}("undefined"!=typeof fetch&&fetch,null);function ha(t){const e=t||f,n=[],r={};return n.add=t=>{const i=e(t);return r[i]||(r[i]=1,n.push(t)),n},n.remove=t=>{const i=e(t);if(r[i]){r[i]=0;const e=n.indexOf(t);e>=0&&n.splice(e,1)}return n},n}async function da(t,e){try{await e(t)}catch(e){t.error(e)}}const pa=Symbol("vega_id");let ga=1;function ma(t){return!(!t||!ya(t))}function ya(t){return t[pa]}function va(t,e){return t[pa]=e,t}function _a(t){const e=t===Object(t)?t:{data:t};return ya(e)?e:va(e,ga++)}function xa(t){return ba(t,_a({}))}function ba(t,e){for(const n in t)e[n]=t[n];return e}function wa(t,e){return va(e,ya(t))}function ka(t,e){return t?e?(n,r)=>t(n,r)||ya(e(n))-ya(e(r)):(e,n)=>t(e,n)||ya(e)-ya(n):null}function Aa(t){return t&&t.constructor===Ma}function Ma(){const t=[],e=[],n=[],r=[],i=[];let o=null,a=!1;return{constructor:Ma,insert(e){const n=V(e),r=n.length;for(let e=0;e{p(t)&&(l[ya(t)]=-1)}));for(f=0,h=t.length;f0&&(y(g,p,d.value),s.modifies(p));for(f=0,h=i.length;f{p(t)&&l[ya(t)]>0&&y(t,d.field,d.value)})),s.modifies(d.field);if(a)s.mod=e.length||r.length?u.filter((t=>l[ya(t)]>0)):u.slice();else for(m in c)s.mod.push(c[m]);return(o||null==o&&(e.length||r.length))&&s.clean(!0),s}}}const Ea="_:mod:_";function Da(){Object.defineProperty(this,Ea,{writable:!0,value:{}})}Da.prototype={set(t,e,n,r){const i=this,o=i[t],a=i[Ea];return null!=e&&e>=0?(o[e]!==n||r)&&(o[e]=n,a[e+":"+t]=-1,a[t]=-1):(o!==n||r)&&(i[t]=n,a[t]=k(n)?1+n.length:-1),i},modified(t,e){const n=this[Ea];if(!arguments.length){for(const t in n)if(n[t])return!0;return!1}if(k(t)){for(let e=0;e=0?e+1{a instanceof Sa?(a!==this&&(e&&a.targets().add(this),o.push(a)),i.push({op:a,name:t,index:n})):r.set(t,n,a)};for(a in t)if(u=t[a],"pulse"===a)V(u).forEach((t=>{t instanceof Sa?t!==this&&(t.targets().add(this),o.push(t)):s("Pulse parameters must be operator instances.")})),this.source=u;else if(k(u))for(r.set(a,-1,Array(l=u.length)),c=0;c{const n=Date.now();return n-e>t?(e=n,1):0}))},debounce(t){const e=za();return this.targets().add(za(null,null,it(t,(t=>{const n=t.dataflow;e.receive(t),n&&n.run&&n.run()})))),e},between(t,e){let n=!1;return t.targets().add(za(null,null,(()=>n=!0))),e.targets().add(za(null,null,(()=>n=!1))),this.filter((()=>n))},detach(){this._filter=p,this._targets=null}};const Na={skip:!0};function Oa(t,e,n,r,i,o){const a=ot({},o,Na);let s,u;J(n)||(n=rt(n)),void 0===r?s=e=>t.touch(n(e)):J(r)?(u=new Sa(null,r,i,!1),s=e=>{u.evaluate(e);const r=n(e),i=u.value;Aa(i)?t.pulse(r,i,o):t.update(r,i,a)}):s=e=>t.update(n(e),r,a),e.apply(s)}function Ra(t,e,n,r,i,o){if(void 0===r)e.targets().add(n);else{const a=o||{},s=new Sa(null,function(t,e){return e=J(e)?e:rt(e),t?function(n,r){const i=e(n,r);return t.skip()||(t.skip(i!==this.value).value=i),i}:e}(n,r),i,!1);s.modified(a.force),s.rank=e.rank,e.targets().add(s),n&&(s.skip(!0),s.value=n.value,s.targets().add(n),t.connect(n,[s]))}}const Ua={};function La(t,e,n){this.dataflow=t,this.stamp=null==e?-1:e,this.add=[],this.rem=[],this.mod=[],this.fields=null,this.encode=n||null}function qa(t,e){const n=[];return Nt(t,e,(t=>n.push(t))),n}function Pa(t,e){const n={};return t.visit(e,(t=>{n[ya(t)]=1})),t=>n[ya(t)]?null:t}function ja(t,e){return t?(n,r)=>t(n,r)&&e(n,r):e}function Ia(t,e,n,r){const i=this;let o=0;this.dataflow=t,this.stamp=e,this.fields=null,this.encode=r||null,this.pulses=n;for(const t of n)if(t.stamp===e){if(t.fields){const e=i.fields||(i.fields={});for(const n in t.fields)e[n]=1}t.changed(i.ADD)&&(o|=i.ADD),t.changed(i.REM)&&(o|=i.REM),t.changed(i.MOD)&&(o|=i.MOD)}this.changes=o}function Wa(t){return t.error("Dataflow already running. Use runAsync() to chain invocations."),t}La.prototype={StopPropagation:Ua,ADD:1,REM:2,MOD:4,ADD_REM:3,ADD_MOD:5,ALL:7,REFLOW:8,SOURCE:16,NO_SOURCE:32,NO_FIELDS:64,fork(t){return new La(this.dataflow).init(this,t)},clone(){const t=this.fork(7);return t.add=t.add.slice(),t.rem=t.rem.slice(),t.mod=t.mod.slice(),t.source&&(t.source=t.source.slice()),t.materialize(23)},addAll(){let t=this;return!t.source||t.add===t.rem||!t.rem.length&&t.source.length===t.add.length||(t=new La(this.dataflow).init(this),t.add=t.source,t.rem=[]),t},init(t,e){const n=this;return n.stamp=t.stamp,n.encode=t.encode,!t.fields||64&e||(n.fields=t.fields),1&e?(n.addF=t.addF,n.add=t.add):(n.addF=null,n.add=[]),2&e?(n.remF=t.remF,n.rem=t.rem):(n.remF=null,n.rem=[]),4&e?(n.modF=t.modF,n.mod=t.mod):(n.modF=null,n.mod=[]),32&e?(n.srcF=null,n.source=null):(n.srcF=t.srcF,n.source=t.source,t.cleans&&(n.cleans=t.cleans)),n},runAfter(t){this.dataflow.runAfter(t)},changed(t){const e=t||7;return 1&e&&this.add.length||2&e&&this.rem.length||4&e&&this.mod.length},reflow(t){if(t)return this.fork(7).reflow();const e=this.add.length,n=this.source&&this.source.length;return n&&n!==e&&(this.mod=this.source,e&&this.filter(4,Pa(this,1))),this},clean(t){return arguments.length?(this.cleans=!!t,this):this.cleans},modifies(t){const e=this.fields||(this.fields={});return k(t)?t.forEach((t=>e[t]=!0)):e[t]=!0,this},modified(t,e){const n=this.fields;return!(!e&&!this.mod.length||!n)&&(arguments.length?k(t)?t.some((t=>n[t])):n[t]:!!n)},filter(t,e){const n=this;return 1&t&&(n.addF=ja(n.addF,e)),2&t&&(n.remF=ja(n.remF,e)),4&t&&(n.modF=ja(n.modF,e)),16&t&&(n.srcF=ja(n.srcF,e)),n},materialize(t){const e=this;return 1&(t=t||7)&&e.addF&&(e.add=qa(e.add,e.addF),e.addF=null),2&t&&e.remF&&(e.rem=qa(e.rem,e.remF),e.remF=null),4&t&&e.modF&&(e.mod=qa(e.mod,e.modF),e.modF=null),16&t&&e.srcF&&(e.source=e.source.filter(e.srcF),e.srcF=null),e},visit(t,e){const n=this,r=e;if(16&t)return Nt(n.source,n.srcF,r),n;1&t&&Nt(n.add,n.addF,r),2&t&&Nt(n.rem,n.remF,r),4&t&&Nt(n.mod,n.modF,r);const i=n.source;if(8&t&&i){const t=n.add.length+n.mod.length;t===i.length||Nt(i,t?Pa(n,5):n.srcF,r)}return n}},dt(Ia,La,{fork(t){const e=new La(this.dataflow).init(this,t&this.NO_FIELDS);return void 0!==t&&(t&e.ADD&&this.visit(e.ADD,(t=>e.add.push(t))),t&e.REM&&this.visit(e.REM,(t=>e.rem.push(t))),t&e.MOD&&this.visit(e.MOD,(t=>e.mod.push(t)))),e},changed(t){return this.changes&t},modified(t){const e=this,n=e.fields;return n&&e.changes&e.MOD?k(t)?t.some((t=>n[t])):n[t]:0},filter(){s("MultiPulse does not support filtering.")},materialize(){s("MultiPulse does not support materialization.")},visit(t,e){const n=this,r=n.pulses,i=r.length;let o=0;if(t&n.SOURCE)for(;oe=[],size:()=>e.length,peek:()=>e[0],push:n=>(e.push(n),Ga(e,0,e.length-1,t)),pop:()=>{const n=e.pop();let r;return e.length?(r=e[0],e[0]=n,function(t,e,n){const r=e,i=t.length,o=t[e];let a,s=1+(e<<1);for(;s=0&&(s=a),t[e]=t[s],s=1+((e=s)<<1);t[e]=o,Ga(t,r,e,n)}(e,0,t)):r=n,r}}}function Ga(t,e,n,r){let i,o;const a=t[n];for(;n>e&&(o=n-1>>1,i=t[o],r(a,i)<0);)t[n]=i,n=o;return t[n]=a}function Va(){this.logger(w()),this.logLevel(v),this._clock=0,this._rank=0,this._locale=Uo();try{this._loader=fa()}catch(t){}this._touched=ha(c),this._input={},this._pulse=null,this._heap=Ya(((t,e)=>t.qrank-e.qrank)),this._postrun=[]}function Xa(t){return function(){return this._log[t].apply(this,arguments)}}function Ja(t,e){Sa.call(this,t,null,e)}Va.prototype={stamp(){return this._clock},loader(t){return arguments.length?(this._loader=t,this):this._loader},locale(t){return arguments.length?(this._locale=t,this):this._locale},logger(t){return arguments.length?(this._log=t,this):this._log},error:Xa("error"),warn:Xa("warn"),info:Xa("info"),debug:Xa("debug"),logLevel:Xa("level"),cleanThreshold:1e4,add:function(t,e,n,r){let i,o=1;return t instanceof Sa?i=t:t&&t.prototype instanceof Sa?i=new t:J(t)?i=new Sa(null,t):(o=0,i=new Sa(t,e)),this.rank(i),o&&(r=n,n=e),n&&this.connect(i,i.parameters(n,r)),this.touch(i),i},connect:function(t,e){const n=t.rank,r=e.length;for(let i=0;i=0;)e.push(n=r[i]),n===t&&s("Cycle detected in dataflow graph.")},pulse:function(t,e,n){this.touch(t,n||Ha);const r=new La(this,this._clock+(this._pulse?0:1)),i=t.pulse&&t.pulse.source||[];return r.target=t,this._input[t.id]=e.pulse(r,i),this},touch:function(t,e){const n=e||Ha;return this._pulse?this._enqueue(t):this._touched.add(t),n.skip&&t.skip(!0),this},update:function(t,e,n){const r=n||Ha;return(t.set(e)||r.force)&&this.touch(t,r),this},changeset:Ma,ingest:function(t,e,n){return e=this.parse(e,n),this.pulse(t,this.changeset().insert(e))},parse:function(t,e){const n=this.locale();return ca(t,e,n.timeParse,n.utcParse)},preload:async function(t,e,n){const r=this,i=r._pending||function(t){let e;const n=new Promise((t=>e=t));return n.requests=0,n.done=()=>{0==--n.requests&&(t._pending=null,e(t))},t._pending=n}(r);i.requests+=1;const o=await r.request(e,n);return r.pulse(t,r.changeset().remove(p).insert(o.data||[])),i.done(),o},request:async function(t,e){const n=this;let r,i=0;try{r=await n.loader().load(t,{context:"dataflow",response:la(e&&e.type)});try{r=n.parse(r,e)}catch(e){i=-2,n.warn("Data ingestion failed",t,e)}}catch(e){i=-1,n.warn("Loading failed",t,e)}return{data:r,status:i}},events:function(t,e,n,r){const i=this,o=za(n,r),a=function(t){t.dataflow=i;try{o.receive(t)}catch(t){i.error(t)}finally{i.run()}};let s;s="string"==typeof t&&"undefined"!=typeof document?document.querySelectorAll(t):V(t);const u=s.length;for(let t=0;tr._enqueue(t,!0))),r._touched=ha(c);let a,s,u,l=0;try{for(;r._heap.size()>0;)a=r._heap.pop(),a.rank===a.qrank?(s=a.run(r._getPulse(a,t)),s.then?s=await s:s.async&&(i.push(s.async),s=Ua),s!==Ua&&a._targets&&a._targets.forEach((t=>r._enqueue(t))),++l):r._enqueue(a,!0)}catch(t){r._heap.clear(),u=t}if(r._input={},r._pulse=null,r.debug(`Pulse ${o}: ${l} operators`),u&&(r._postrun=[],r.error(u)),r._postrun.length){const t=r._postrun.sort(((t,e)=>e.priority-t.priority));r._postrun=[];for(let e=0;er.runAsync(null,(()=>{t.forEach((t=>{try{t(r)}catch(t){r.error(t)}}))})))),r},run:function(t,e,n){return this._pulse?Wa(this):(this.evaluate(t,e,n),this)},runAsync:async function(t,e,n){for(;this._running;)await this._running;const r=()=>this._running=null;return(this._running=this.evaluate(t,e,n)).then(r,r),this._running},runAfter:function(t,e,n){if(this._pulse||e)this._postrun.push({priority:n||0,callback:t});else try{t(this)}catch(t){this.error(t)}},_enqueue:function(t,e){const n=t.stampt.pulse)),e):this._input[t.id]||function(t,e){if(e&&e.stamp===t.stamp)return e;t=t.fork(),e&&e!==Ua&&(t.source=e.source);return t}(this._pulse,n&&n.pulse)}},dt(Ja,Sa,{run(t){if(t.stampthis.pulse=t)):e!==t.StopPropagation&&(this.pulse=e),e},evaluate(t){const e=this.marshall(t.stamp),n=this.transform(e,t);return e.clear(),n},transform(){}});const Za={};function Qa(t){const e=Ka(t);return e&&e.Definition||null}function Ka(t){return t=t&&t.toLowerCase(),lt(Za,t)?Za[t]:null}function*ts(t,e){if(null==e)for(let e of t)null!=e&&""!==e&&(e=+e)>=e&&(yield e);else{let n=-1;for(let r of t)r=e(r,++n,t),null!=r&&""!==r&&(r=+r)>=r&&(yield r)}}function es(t,e,n){const r=Float64Array.from(ts(t,n));return r.sort(Kt),e.map((t=>De(r,t)))}function ns(t,e){return es(t,[.25,.5,.75],e)}function rs(t,e){const n=t.length,r=function(t,e){const n=function(t,e){let n,r=0,i=0,o=0;if(void 0===e)for(let e of t)null!=e&&(e=+e)>=e&&(n=e-i,i+=n/++r,o+=n*(e-i));else{let a=-1;for(let s of t)null!=(s=e(s,++a,t))&&(s=+s)>=s&&(n=s-i,i+=n/++r,o+=n*(s-i))}if(r>1)return o/(r-1)}(t,e);return n?Math.sqrt(n):n}(t,e),i=ns(t,e),o=(i[2]-i[0])/1.34;return 1.06*(Math.min(r,o)||r||Math.abs(i[0])||1)*Math.pow(n,-.2)}function is(t){const e=t.maxbins||20,n=t.base||10,r=Math.log(n),i=t.divide||[5,2];let o,a,s,u,l,c,f=t.extent[0],h=t.extent[1];const d=t.span||h-f||Math.abs(f)||1;if(t.step)o=t.step;else if(t.steps){for(u=d/e,l=0,c=t.steps.length;le;)o*=n;for(l=0,c=i.length;l=s&&d/u<=e&&(o=u)}u=Math.log(o);const p=u>=0?0:1+~~(-u/r),g=Math.pow(n,-p-1);return(t.nice||void 0===t.nice)&&(u=Math.floor(f/o+g)*o,f=ft);const i=t.length,o=new Float64Array(i);let a,s=0,u=1,l=r(t[0]),c=l,f=l+e;for(;u=f){for(c=(l+c)/2;s>1);ia;)t[i--]=t[o]}o=a,a=r}return t}(o,e+e/4):o}t.random=Math.random;const ss=Math.sqrt(2*Math.PI),us=Math.SQRT2;let ls=NaN;function cs(e,n){e=e||0,n=null==n?1:n;let r,i,o=0,a=0;if(ls==ls)o=ls,ls=NaN;else{do{o=2*t.random()-1,a=2*t.random()-1,r=o*o+a*a}while(0===r||r>1);i=Math.sqrt(-2*Math.log(r)/r),o*=i,ls=a*i}return e+o*n}function fs(t,e,n){const r=(t-(e||0))/(n=null==n?1:n);return Math.exp(-.5*r*r)/(n*ss)}function hs(t,e,n){const r=(t-(e=e||0))/(n=null==n?1:n),i=Math.abs(r);let o;if(i>37)o=0;else{const t=Math.exp(-i*i/2);let e;i<7.07106781186547?(e=.0352624965998911*i+.700383064443688,e=e*i+6.37396220353165,e=e*i+33.912866078383,e=e*i+112.079291497871,e=e*i+221.213596169931,e=e*i+220.206867912376,o=t*e,e=.0883883476483184*i+1.75566716318264,e=e*i+16.064177579207,e=e*i+86.7807322029461,e=e*i+296.564248779674,e=e*i+637.333633378831,e=e*i+793.826512519948,e=e*i+440.413735824752,o/=e):(e=i+.65,e=i+4/e,e=i+3/e,e=i+2/e,e=i+1/e,o=t/e/2.506628274631)}return r>0?1-o:o}function ds(t,e,n){return t<0||t>1?NaN:(e||0)+(null==n?1:n)*us*function(t){let e,n=-Math.log((1-t)*(1+t));n<6.25?(n-=3.125,e=-364441206401782e-35,e=e*n-16850591381820166e-35,e=128584807152564e-32+e*n,e=11157877678025181e-33+e*n,e=e*n-1333171662854621e-31,e=20972767875968562e-33+e*n,e=6637638134358324e-30+e*n,e=e*n-4054566272975207e-29,e=e*n-8151934197605472e-29,e=26335093153082323e-28+e*n,e=e*n-12975133253453532e-27,e=e*n-5415412054294628e-26,e=1.0512122733215323e-9+e*n,e=e*n-4.112633980346984e-9,e=e*n-2.9070369957882005e-8,e=4.2347877827932404e-7+e*n,e=e*n-13654692000834679e-22,e=e*n-13882523362786469e-21,e=.00018673420803405714+e*n,e=e*n-.000740702534166267,e=e*n-.006033670871430149,e=.24015818242558962+e*n,e=1.6536545626831027+e*n):n<16?(n=Math.sqrt(n)-3.25,e=2.2137376921775787e-9,e=9.075656193888539e-8+e*n,e=e*n-2.7517406297064545e-7,e=1.8239629214389228e-8+e*n,e=15027403968909828e-22+e*n,e=e*n-4013867526981546e-21,e=29234449089955446e-22+e*n,e=12475304481671779e-21+e*n,e=e*n-47318229009055734e-21,e=6828485145957318e-20+e*n,e=24031110387097894e-21+e*n,e=e*n-.0003550375203628475,e=.0009532893797373805+e*n,e=e*n-.0016882755560235047,e=.002491442096107851+e*n,e=e*n-.003751208507569241,e=.005370914553590064+e*n,e=1.0052589676941592+e*n,e=3.0838856104922208+e*n):Number.isFinite(n)?(n=Math.sqrt(n)-5,e=-27109920616438573e-27,e=e*n-2.555641816996525e-10,e=1.5076572693500548e-9+e*n,e=e*n-3.789465440126737e-9,e=7.61570120807834e-9+e*n,e=e*n-1.496002662714924e-8,e=2.914795345090108e-8+e*n,e=e*n-6.771199775845234e-8,e=2.2900482228026655e-7+e*n,e=e*n-9.9298272942317e-7,e=4526062597223154e-21+e*n,e=e*n-1968177810553167e-20,e=7599527703001776e-20+e*n,e=e*n-.00021503011930044477,e=e*n-.00013871931833623122,e=1.0103004648645344+e*n,e=4.849906401408584+e*n):e=1/0;return e*t}(2*t-1)}function ps(t,e){let n,r;const i={mean(t){return arguments.length?(n=t||0,i):n},stdev(t){return arguments.length?(r=null==t?1:t,i):r},sample:()=>cs(n,r),pdf:t=>fs(t,n,r),cdf:t=>hs(t,n,r),icdf:t=>ds(t,n,r)};return i.mean(t).stdev(e)}function gs(e,n){const r=ps();let i=0;const o={data(t){return arguments.length?(e=t,i=t?t.length:0,o.bandwidth(n)):e},bandwidth(t){return arguments.length?(!(n=t)&&e&&(n=rs(e)),o):n},sample:()=>e[~~(t.random()*i)]+n*r.sample(),pdf(t){let o=0,a=0;for(;ams(n,r),pdf:t=>ys(t,n,r),cdf:t=>vs(t,n,r),icdf:t=>_s(t,n,r)};return i.mean(t).stdev(e)}function bs(e,n){let r,i=0;const o={weights(t){return arguments.length?(r=function(t){const e=[];let n,r=0;for(n=0;n=e&&t<=n?1/(n-e):0}function As(t,e,n){return null==n&&(n=null==e?1:e,e=0),tn?1:(t-e)/(n-e)}function Ms(t,e,n){return null==n&&(n=null==e?1:e,e=0),t>=0&&t<=1?e+t*(n-e):NaN}function Es(t,e){let n,r;const i={min(t){return arguments.length?(n=t||0,i):n},max(t){return arguments.length?(r=null==t?1:t,i):r},sample:()=>ws(n,r),pdf:t=>ks(t,n,r),cdf:t=>As(t,n,r),icdf:t=>Ms(t,n,r)};return null==e&&(e=null==t?1:t,t=0),i.min(t).max(e)}function Ds(t,e,n){let r=0,i=0;for(const o of t){const t=n(o);null==e(o)||null==t||isNaN(t)||(r+=(t-r)/++i)}return{coef:[r],predict:()=>r,rSquared:0}}function Cs(t,e,n,r){const i=r-t*t,o=Math.abs(i)<1e-24?0:(n-t*e)/i;return[e-o*t,o]}function Fs(t,e,n,r){t=t.filter((t=>{let r=e(t),i=n(t);return null!=r&&(r=+r)>=r&&null!=i&&(i=+i)>=i})),r&&t.sort(((t,n)=>e(t)-e(n)));const i=t.length,o=new Float64Array(i),a=new Float64Array(i);let s,u,l,c=0,f=0,h=0;for(l of t)o[c]=s=+e(l),a[c]=u=+n(l),++c,f+=(s-f)/c,h+=(u-h)/c;for(c=0;c=i&&null!=o&&(o=+o)>=o&&r(i,o,++a)}function $s(t,e,n,r,i){let o=0,a=0;return Ss(t,e,n,((t,e)=>{const n=e-i(t),s=e-r;o+=n*n,a+=s*s})),1-o/a}function Ts(t,e,n){let r=0,i=0,o=0,a=0,s=0;Ss(t,e,n,((t,e)=>{++s,r+=(t-r)/s,i+=(e-i)/s,o+=(t*e-o)/s,a+=(t*t-a)/s}));const u=Cs(r,i,o,a),l=t=>u[0]+u[1]*t;return{coef:u,predict:l,rSquared:$s(t,e,n,i,l)}}function Bs(t,e,n){let r=0,i=0,o=0,a=0,s=0;Ss(t,e,n,((t,e)=>{++s,t=Math.log(t),r+=(t-r)/s,i+=(e-i)/s,o+=(t*e-o)/s,a+=(t*t-a)/s}));const u=Cs(r,i,o,a),l=t=>u[0]+u[1]*Math.log(t);return{coef:u,predict:l,rSquared:$s(t,e,n,i,l)}}function zs(t,e,n){const[r,i,o,a]=Fs(t,e,n);let s,u,l,c=0,f=0,h=0,d=0,p=0;Ss(t,e,n,((t,e)=>{s=r[p++],u=Math.log(e),l=s*e,c+=(e*u-c)/p,f+=(l-f)/p,h+=(l*u-h)/p,d+=(s*l-d)/p}));const[g,m]=Cs(f/a,c/a,h/a,d/a),y=t=>Math.exp(g+m*(t-o));return{coef:[Math.exp(g-m*o),m],predict:y,rSquared:$s(t,e,n,a,y)}}function Ns(t,e,n){let r=0,i=0,o=0,a=0,s=0,u=0;Ss(t,e,n,((t,e)=>{const n=Math.log(t),l=Math.log(e);++u,r+=(n-r)/u,i+=(l-i)/u,o+=(n*l-o)/u,a+=(n*n-a)/u,s+=(e-s)/u}));const l=Cs(r,i,o,a),c=t=>l[0]*Math.pow(t,l[1]);return l[0]=Math.exp(l[0]),{coef:l,predict:c,rSquared:$s(t,e,n,s,c)}}function Os(t,e,n){const[r,i,o,a]=Fs(t,e,n),s=r.length;let u,l,c,f,h=0,d=0,p=0,g=0,m=0;for(u=0;u_*(t-=o)*t+x*t+b+a;return{coef:[b-x*o+_*o*o+a,x-2*_*o,_],predict:w,rSquared:$s(t,e,n,a,w)}}function Rs(t,e,n,r){if(0===r)return Ds(t,e,n);if(1===r)return Ts(t,e,n);if(2===r)return Os(t,e,n);const[i,o,a,s]=Fs(t,e,n),u=i.length,l=[],c=[],f=r+1;let h,d,p,g,m;for(h=0;hMath.abs(t[r][a])&&(a=i);for(o=r;o=r;o--)t[o][i]-=t[o][r]*t[r][i]/t[r][r]}for(i=e-1;i>=0;--i){for(s=0,o=i+1;o{t-=a;let e=s+y[0]+y[1]*t+y[2]*t*t;for(h=3;h=0;--o)for(s=e[o],u=1,i[o]+=s,a=1;a<=o;++a)u*=(o+1-a)/a,i[o-a]+=s*Math.pow(n,a)*u;return i[0]+=r,i}function Ls(t,e,n,r){const[i,o,a,s]=Fs(t,e,n,!0),u=i.length,l=Math.max(2,~~(r*u)),c=new Float64Array(u),f=new Float64Array(u),h=new Float64Array(u).fill(1);for(let t=-1;++t<=2;){const e=[0,l-1];for(let t=0;ti[a]-n?r:a;let u=0,l=0,d=0,p=0,g=0;const m=1/Math.abs(i[s]-n||1);for(let t=r;t<=a;++t){const e=i[t],r=o[t],a=qs(Math.abs(n-e)*m)*h[t],s=e*a;u+=a,l+=s,d+=r*a,p+=r*s,g+=e*s}const[y,v]=Cs(l/u,d/u,p/u,g/u);c[t]=y+v*n,f[t]=Math.abs(o[t]-c[t]),Ps(i,t+1,e)}if(2===t)break;const n=Ce(f);if(Math.abs(n)<1e-12)break;for(let t,e,r=0;r=1?1e-12:(e=1-t*t)*e}return function(t,e,n,r){const i=t.length,o=[];let a,s=0,u=0,l=[];for(;s=t.length))for(;e>i&&t[o]-r<=r-t[i];)n[0]=++i,n[1]=o,++o}const js=.5*Math.PI/180;function Is(t,e,n,r){n=n||25,r=Math.max(n,r||200);const i=e=>[e,t(e)],o=e[0],a=e[1],s=a-o,u=s/r,l=[i(o)],c=[];if(n===r){for(let t=1;t0;)c.push(i(o+t/n*s));let f=l[0],h=c[c.length-1];const d=1/s,p=function(t,e){let n=t,r=t;const i=e.length;for(let t=0;tr&&(r=i)}return 1/(r-n)}(f[1],c);for(;h;){const t=i((f[0]+h[0])/2);t[0]-f[0]>=u&&Ws(f,t,h,d,p)>js?c.push(t):(f=h,l.push(h),c.pop()),h=c[c.length-1]}return l}function Ws(t,e,n,r,i){const o=Math.atan2(i*(n[1]-t[1]),r*(n[0]-t[0])),a=Math.atan2(i*(e[1]-t[1]),r*(e[0]-t[0]));return Math.abs(o-a)}function Hs(t){return t&&t.length?1===t.length?t[0]:(e=t,t=>{const n=e.length;let r=1,i=String(e[0](t));for(;r{},Vs={init:Gs,add:Gs,rem:Gs,idx:0},Xs={values:{init:t=>t.cell.store=!0,value:t=>t.cell.data.values(),idx:-1},count:{value:t=>t.cell.num},__count__:{value:t=>t.missing+t.valid},missing:{value:t=>t.missing},valid:{value:t=>t.valid},sum:{init:t=>t.sum=0,value:t=>t.valid?t.sum:void 0,add:(t,e)=>t.sum+=+e,rem:(t,e)=>t.sum-=e},product:{init:t=>t.product=1,value:t=>t.valid?t.product:void 0,add:(t,e)=>t.product*=e,rem:(t,e)=>t.product/=e},mean:{init:t=>t.mean=0,value:t=>t.valid?t.mean:void 0,add:(t,e)=>(t.mean_d=e-t.mean,t.mean+=t.mean_d/t.valid),rem:(t,e)=>(t.mean_d=e-t.mean,t.mean-=t.valid?t.mean_d/t.valid:t.mean)},average:{value:t=>t.valid?t.mean:void 0,req:["mean"],idx:1},variance:{init:t=>t.dev=0,value:t=>t.valid>1?t.dev/(t.valid-1):void 0,add:(t,e)=>t.dev+=t.mean_d*(e-t.mean),rem:(t,e)=>t.dev-=t.mean_d*(e-t.mean),req:["mean"],idx:1},variancep:{value:t=>t.valid>1?t.dev/t.valid:void 0,req:["variance"],idx:2},stdev:{value:t=>t.valid>1?Math.sqrt(t.dev/(t.valid-1)):void 0,req:["variance"],idx:2},stdevp:{value:t=>t.valid>1?Math.sqrt(t.dev/t.valid):void 0,req:["variance"],idx:2},stderr:{value:t=>t.valid>1?Math.sqrt(t.dev/(t.valid*(t.valid-1))):void 0,req:["variance"],idx:2},distinct:{value:t=>t.cell.data.distinct(t.get),req:["values"],idx:3},ci0:{value:t=>t.cell.data.ci0(t.get),req:["values"],idx:3},ci1:{value:t=>t.cell.data.ci1(t.get),req:["values"],idx:3},median:{value:t=>t.cell.data.q2(t.get),req:["values"],idx:3},q1:{value:t=>t.cell.data.q1(t.get),req:["values"],idx:3},q3:{value:t=>t.cell.data.q3(t.get),req:["values"],idx:3},min:{init:t=>t.min=void 0,value:t=>t.min=Number.isNaN(t.min)?t.cell.data.min(t.get):t.min,add:(t,e)=>{(e{e<=t.min&&(t.min=NaN)},req:["values"],idx:4},max:{init:t=>t.max=void 0,value:t=>t.max=Number.isNaN(t.max)?t.cell.data.max(t.get):t.max,add:(t,e)=>{(e>t.max||void 0===t.max)&&(t.max=e)},rem:(t,e)=>{e>=t.max&&(t.max=NaN)},req:["values"],idx:4},argmin:{init:t=>t.argmin=void 0,value:t=>t.argmin||t.cell.data.argmin(t.get),add:(t,e,n)=>{e{e<=t.min&&(t.argmin=void 0)},req:["min","values"],idx:3},argmax:{init:t=>t.argmax=void 0,value:t=>t.argmax||t.cell.data.argmax(t.get),add:(t,e,n)=>{e>t.max&&(t.argmax=n)},rem:(t,e)=>{e>=t.max&&(t.argmax=void 0)},req:["max","values"],idx:3},exponential:{init:(t,e)=>{t.exp=0,t.exp_r=e},value:t=>t.valid?t.exp*(1-t.exp_r)/(1-t.exp_r**t.valid):void 0,add:(t,e)=>t.exp=t.exp_r*t.exp+e,rem:(t,e)=>t.exp=(t.exp-e/t.exp_r**(t.valid-1))/t.exp_r},exponentialb:{value:t=>t.valid?t.exp*(1-t.exp_r):void 0,req:["exponential"],idx:1}},Js=Object.keys(Xs).filter((t=>"__count__"!==t));function Zs(t,e,n){return Xs[t](n,e)}function Qs(t,e){return t.idx-e.idx}function Ks(){this.valid=0,this.missing=0,this._ops.forEach((t=>null==t.aggregate_param?t.init(this):t.init(this,t.aggregate_param)))}function tu(t,e){null!=t&&""!==t?t==t&&(++this.valid,this._ops.forEach((n=>n.add(this,t,e)))):++this.missing}function eu(t,e){null!=t&&""!==t?t==t&&(--this.valid,this._ops.forEach((n=>n.rem(this,t,e)))):--this.missing}function nu(t){return this._out.forEach((e=>t[e.out]=e.value(this))),t}function ru(t,e){const n=e||f,r=function(t){const e={};t.forEach((t=>e[t.name]=t));const n=t=>{t.req&&t.req.forEach((t=>{e[t]||n(e[t]=Xs[t]())}))};return t.forEach(n),Object.values(e).sort(Qs)}(t),i=t.slice().sort(Qs);function o(t){this._ops=r,this._out=i,this.cell=t,this.init()}return o.prototype.init=Ks,o.prototype.add=tu,o.prototype.rem=eu,o.prototype.set=nu,o.prototype.get=n,o.fields=t.map((t=>t.out)),o}function iu(t){this._key=t?l(t):ya,this.reset()}[...Js,"__count__"].forEach((t=>{Xs[t]=function(t,e){return(n,r)=>ot({name:t,aggregate_param:r,out:n||t},Vs,e)}(t,Xs[t])}));const ou=iu.prototype;function au(t){Ja.call(this,null,t),this._adds=[],this._mods=[],this._alen=0,this._mlen=0,this._drop=!0,this._cross=!1,this._dims=[],this._dnames=[],this._measures=[],this._countOnly=!1,this._counts=null,this._prev=null,this._inputs=null,this._outputs=null}ou.reset=function(){this._add=[],this._rem=[],this._ext=null,this._get=null,this._q=null},ou.add=function(t){this._add.push(t)},ou.rem=function(t){this._rem.push(t)},ou.values=function(){if(this._get=null,0===this._rem.length)return this._add;const t=this._add,e=this._rem,n=this._key,r=t.length,i=e.length,o=Array(r-i),a={};let s,u,l;for(s=0;s=0;)r=t(e[i])+"",lt(n,r)||(n[r]=1,++o);return o},ou.extent=function(t){if(this._get!==t||!this._ext){const e=this.values(),n=st(e,t);this._ext=[e[n[0]],e[n[1]]],this._get=t}return this._ext},ou.argmin=function(t){return this.extent(t)[0]||{}},ou.argmax=function(t){return this.extent(t)[1]||{}},ou.min=function(t){const e=this.extent(t)[0];return null!=e?t(e):void 0},ou.max=function(t){const e=this.extent(t)[1];return null!=e?t(e):void 0},ou.quartile=function(t){return this._get===t&&this._q||(this._q=ns(this.values(),t),this._get=t),this._q},ou.q1=function(t){return this.quartile(t)[0]},ou.q2=function(t){return this.quartile(t)[1]},ou.q3=function(t){return this.quartile(t)[2]},ou.ci=function(t){return this._get===t&&this._ci||(this._ci=os(this.values(),1e3,.05,t),this._get=t),this._ci},ou.ci0=function(t){return this.ci(t)[0]},ou.ci1=function(t){return this.ci(t)[1]},au.Definition={type:"Aggregate",metadata:{generates:!0,changes:!0},params:[{name:"groupby",type:"field",array:!0},{name:"ops",type:"enum",array:!0,values:Js},{name:"aggregate_params",type:"number",null:!0,array:!0},{name:"fields",type:"field",null:!0,array:!0},{name:"as",type:"string",null:!0,array:!0},{name:"drop",type:"boolean",default:!0},{name:"cross",type:"boolean",default:!1},{name:"key",type:"field"}]},dt(au,Ja,{transform(t,e){const n=this,r=e.fork(e.NO_SOURCE|e.NO_FIELDS),i=t.modified();return n.stamp=r.stamp,n.value&&(i||e.modified(n._inputs,!0))?(n._prev=n.value,n.value=i?n.init(t):Object.create(null),e.visit(e.SOURCE,(t=>n.add(t)))):(n.value=n.value||n.init(t),e.visit(e.REM,(t=>n.rem(t))),e.visit(e.ADD,(t=>n.add(t)))),r.modifies(n._outputs),n._drop=!1!==t.drop,t.cross&&n._dims.length>1&&(n._drop=!1,n.cross()),e.clean()&&n._drop&&r.clean(!0).runAfter((()=>this.clean())),n.changes(r)},cross(){const t=this,e=t.value,n=t._dnames,r=n.map((()=>({}))),i=n.length;function o(t){let e,o,a,s;for(e in t)for(a=t[e].tuple,o=0;o{const e=n(t);return a(t),i.push(e),e})),this.cellkey=t.key?t.key:Hs(this._dims),this._countOnly=!0,this._counts=[],this._measures=[];const u=t.fields||[null],l=t.ops||["count"],c=t.aggregate_params||[null],f=t.as||[],h=u.length,d={};let p,g,m,y,v,_,x;for(h!==l.length&&s("Unmatched number of fields and aggregate ops."),x=0;xru(t,t.field))),Object.create(null)},cellkey:Hs(),cell(t,e){let n=this.value[t];return n?0===n.num&&this._drop&&n.stampo.push(t),remove:t=>a[r(t)]=++s,size:()=>i.length,data:(t,e)=>(s&&(i=i.filter((t=>!a[r(t)])),a={},s=0),e&&t&&i.sort(t),o.length&&(i=t?At(t,i,o.sort(t)):i.concat(o),o=[]),i)}}function lu(t){Ja.call(this,[],t)}function cu(t){Sa.call(this,null,fu,t)}function fu(t){return this.value&&!t.modified()?this.value:Q(t.fields,t.orders)}function hu(t){Ja.call(this,null,t)}function du(t){Ja.call(this,null,t)}su.Definition={type:"Bin",metadata:{modifies:!0},params:[{name:"field",type:"field",required:!0},{name:"interval",type:"boolean",default:!0},{name:"anchor",type:"number"},{name:"maxbins",type:"number",default:20},{name:"base",type:"number",default:10},{name:"divide",type:"number",array:!0,default:[5,2]},{name:"extent",type:"number",array:!0,length:2,required:!0},{name:"span",type:"number"},{name:"step",type:"number"},{name:"steps",type:"number",array:!0},{name:"minstep",type:"number",default:0},{name:"nice",type:"boolean",default:!0},{name:"name",type:"string"},{name:"as",type:"string",array:!0,length:2,default:["bin0","bin1"]}]},dt(su,Ja,{transform(t,e){const n=!1!==t.interval,i=this._bins(t),o=i.start,a=i.step,s=t.as||["bin0","bin1"],u=s[0],l=s[1];let c;return c=t.modified()?(e=e.reflow(!0)).SOURCE:e.modified(r(t.field))?e.ADD_MOD:e.ADD,e.visit(c,n?t=>{const e=i(t);t[u]=e,t[l]=null==e?null:o+a*(1+(e-o)/a)}:t=>t[u]=i(t)),e.modifies(n?s:u)},_bins(t){if(this.value&&!t.modified())return this.value;const i=t.field,o=is(t),a=o.step;let s,u,l=o.start,c=l+Math.ceil((o.stop-l)/a)*a;null!=(s=t.anchor)&&(u=s-(l+a*Math.floor((s-l)/a)),l+=u,c+=u);const f=function(t){let e=S(i(t));return null==e?null:ec?1/0:(e=Math.max(l,Math.min(e,c-a)),l+a*Math.floor(1e-14+(e-l)/a))};return f.start=l,f.stop=o.stop,f.step=a,this.value=e(f,r(i),t.name||"bin_"+n(i))}}),lu.Definition={type:"Collect",metadata:{source:!0},params:[{name:"sort",type:"compare"}]},dt(lu,Ja,{transform(t,e){const n=e.fork(e.ALL),r=uu(ya,this.value,n.materialize(n.ADD).add),i=t.sort,o=e.changed()||i&&(t.modified("sort")||e.modified(i.fields));return n.visit(n.REM,r.remove),this.modified(o),this.value=n.source=r.data(ka(i),o),e.source&&e.source.root&&(this.value.root=e.source.root),n}}),dt(cu,Sa),hu.Definition={type:"CountPattern",metadata:{generates:!0,changes:!0},params:[{name:"field",type:"field",required:!0},{name:"case",type:"enum",values:["upper","lower","mixed"],default:"mixed"},{name:"pattern",type:"string",default:'[\\w"]+'},{name:"stopwords",type:"string",default:""},{name:"as",type:"string",array:!0,length:2,default:["text","count"]}]},dt(hu,Ja,{transform(t,e){const n=e=>n=>{for(var r,i=function(t,e,n){switch(e){case"upper":t=t.toUpperCase();break;case"lower":t=t.toLowerCase()}return t.match(n)}(s(n),t.case,o)||[],u=0,l=i.length;ui[t]=1+(i[t]||0))),c=n((t=>i[t]-=1));return r?e.visit(e.SOURCE,l):(e.visit(e.ADD,l),e.visit(e.REM,c)),this._finish(e,u)},_parameterCheck(t,e){let n=!1;return!t.modified("stopwords")&&this._stop||(this._stop=new RegExp("^"+(t.stopwords||"")+"$","i"),n=!0),!t.modified("pattern")&&this._match||(this._match=new RegExp(t.pattern||"[\\w']+","g"),n=!0),(t.modified("field")||e.modified(t.field.fields))&&(n=!0),n&&(this._counts={}),n},_finish(t,e){const n=this._counts,r=this._tuples||(this._tuples={}),i=e[0],o=e[1],a=t.fork(t.NO_SOURCE|t.NO_FIELDS);let s,u,l;for(s in n)u=r[s],l=n[s]||0,!u&&l?(r[s]=u=_a({}),u[i]=s,u[o]=l,a.add.push(u)):0===l?(u&&a.rem.push(u),n[s]=null,r[s]=null):u[o]!==l&&(u[o]=l,a.mod.push(u));return a.modifies(e)}}),du.Definition={type:"Cross",metadata:{generates:!0},params:[{name:"filter",type:"expr"},{name:"as",type:"string",array:!0,length:2,default:["a","b"]}]},dt(du,Ja,{transform(t,e){const n=e.fork(e.NO_SOURCE),r=t.as||["a","b"],i=r[0],o=r[1],a=!this.value||e.changed(e.ADD_REM)||t.modified("as")||t.modified("filter");let s=this.value;return a?(s&&(n.rem=s),s=e.materialize(e.SOURCE).source,n.add=this.value=function(t,e,n,r){for(var i,o,a=[],s={},u=t.length,l=0;lmu(t,e)))):typeof r[n]===gu&&r[n](t[n]);return r}function yu(t){Ja.call(this,null,t)}const vu=[{key:{function:"normal"},params:[{name:"mean",type:"number",default:0},{name:"stdev",type:"number",default:1}]},{key:{function:"lognormal"},params:[{name:"mean",type:"number",default:0},{name:"stdev",type:"number",default:1}]},{key:{function:"uniform"},params:[{name:"min",type:"number",default:0},{name:"max",type:"number",default:1}]},{key:{function:"kde"},params:[{name:"field",type:"field",required:!0},{name:"from",type:"data"},{name:"bandwidth",type:"number",default:0}]}],_u={key:{function:"mixture"},params:[{name:"distributions",type:"param",array:!0,params:vu},{name:"weights",type:"number",array:!0}]};function xu(t,e){return t?t.map(((t,r)=>e[r]||n(t))):null}function bu(t,e,n){const r=[],i=t=>t(u);let o,a,s,u,l,c;if(null==e)r.push(t.map(n));else for(o={},a=0,s=t.length;at.materialize(t.SOURCE).source}(e)),i=t.steps||t.minsteps||25,o=t.steps||t.maxsteps||200;let a=t.method||"pdf";"pdf"!==a&&"cdf"!==a&&s("Invalid density method: "+a),t.extent||r.data||s("Missing density extent parameter."),a=r[a];const u=t.as||["value","density"],l=Is(a,t.extent||at(r.data()),i,o).map((t=>{const e={};return e[u[0]]=t[0],e[u[1]]=t[1],_a(e)}));this.value&&(n.rem=this.value),this.value=n.add=n.source=l}return n}});function wu(t){Ja.call(this,null,t)}wu.Definition={type:"DotBin",metadata:{modifies:!0},params:[{name:"field",type:"field",required:!0},{name:"groupby",type:"field",array:!0},{name:"step",type:"number"},{name:"smooth",type:"boolean",default:!1},{name:"as",type:"string",default:"bin"}]};function ku(t){Sa.call(this,null,Au,t),this.modified(!0)}function Au(t){const i=t.expr;return this.value&&!t.modified("expr")?this.value:e((e=>i(e,t)),r(i),n(i))}function Mu(t){Ja.call(this,[void 0,void 0],t)}function Eu(t,e){Sa.call(this,t),this.parent=e,this.count=0}function Du(t){Ja.call(this,{},t),this._keys=ft();const e=this._targets=[];e.active=0,e.forEach=t=>{for(let n=0,r=e.active;nl(t))):l(t.name,t.as)}function Su(t){Ja.call(this,ft(),t)}function $u(t){Ja.call(this,[],t)}function Tu(t){Ja.call(this,[],t)}function Bu(t){Ja.call(this,null,t)}function zu(t){Ja.call(this,[],t)}dt(wu,Ja,{transform(t,e){if(this.value&&!t.modified()&&!e.changed())return e;const n=e.materialize(e.SOURCE).source,r=bu(e.source,t.groupby,f),i=t.smooth||!1,o=t.field,a=t.step||((t,e)=>Dt(at(t,e))/30)(n,o),s=ka(((t,e)=>o(t)-o(e))),u=t.as||"bin",l=r.length;let c,h=1/0,d=-1/0,p=0;for(;pd&&(d=e),t[++c][u]=e}return this.value={start:h,stop:d,step:a},e.reflow(!0).modifies(u)}}),dt(ku,Sa),Mu.Definition={type:"Extent",metadata:{},params:[{name:"field",type:"field",required:!0}]},dt(Mu,Ja,{transform(t,e){const r=this.value,i=t.field,o=e.changed()||e.modified(i.fields)||t.modified("field");let a=r[0],s=r[1];if((o||null==a)&&(a=1/0,s=-1/0),e.visit(o?e.SOURCE:e.ADD,(t=>{const e=S(i(t));null!=e&&(es&&(s=e))})),!Number.isFinite(a)||!Number.isFinite(s)){let t=n(i);t&&(t=` for field "${t}"`),e.dataflow.warn(`Infinite extent${t}: [${a}, ${s}]`),a=s=void 0}this.value=[a,s]}}),dt(Eu,Sa,{connect(t){return this.detachSubflow=t.detachSubflow,this.targets().add(t),t.source=this},add(t){this.count+=1,this.value.add.push(t)},rem(t){this.count-=1,this.value.rem.push(t)},mod(t){this.value.mod.push(t)},init(t){this.value.init(t,t.NO_SOURCE)},evaluate(){return this.value}}),dt(Du,Ja,{activate(t){this._targets[this._targets.active++]=t},subflow(t,e,n,r){const i=this.value;let o,a,s=lt(i,t)&&i[t];return s?s.value.stampt&&t.count>0));this.initTargets(t)}},initTargets(t){const e=this._targets,n=e.length,r=t?t.length:0;let i=0;for(;ithis.subflow(t,i,e);return this._group=t.group||{},this.initTargets(),e.visit(e.REM,(t=>{const e=ya(t),n=o.get(e);void 0!==n&&(o.delete(e),s(n).rem(t))})),e.visit(e.ADD,(t=>{const e=r(t);o.set(ya(t),e),s(e).add(t)})),a||e.modified(r.fields)?e.visit(e.MOD,(t=>{const e=ya(t),n=o.get(e),i=r(t);n===i?s(i).mod(t):(o.set(e,i),s(n).rem(t),s(i).add(t))})):e.changed(e.MOD)&&e.visit(e.MOD,(t=>{s(o.get(ya(t))).mod(t)})),a&&e.visit(e.REFLOW,(t=>{const e=ya(t),n=o.get(e),i=r(t);n!==i&&(o.set(e,i),s(n).rem(t),s(i).add(t))})),e.clean()?n.runAfter((()=>{this.clean(),o.clean()})):o.empty>n.cleanThreshold&&n.runAfter(o.clean),e}}),dt(Cu,Sa),Su.Definition={type:"Filter",metadata:{changes:!0},params:[{name:"expr",type:"expr",required:!0}]},dt(Su,Ja,{transform(t,e){const n=e.dataflow,r=this.value,i=e.fork(),o=i.add,a=i.rem,s=i.mod,u=t.expr;let l=!0;function c(e){const n=ya(e),i=u(e,t),c=r.get(n);i&&c?(r.delete(n),o.push(e)):i||c?l&&i&&!c&&s.push(e):(r.set(n,1),a.push(e))}return e.visit(e.REM,(t=>{const e=ya(t);r.has(e)?r.delete(e):a.push(t)})),e.visit(e.ADD,(e=>{u(e,t)?o.push(e):r.set(ya(e),1)})),e.visit(e.MOD,c),t.modified()&&(l=!1,e.visit(e.REFLOW,c)),r.empty>n.cleanThreshold&&n.runAfter(r.clean),i}}),$u.Definition={type:"Flatten",metadata:{generates:!0},params:[{name:"fields",type:"field",array:!0,required:!0},{name:"index",type:"string"},{name:"as",type:"string",array:!0}]},dt($u,Ja,{transform(t,e){const n=e.fork(e.NO_SOURCE),r=t.fields,i=xu(r,t.as||[]),o=t.index||null,a=i.length;return n.rem=this.value,e.visit(e.SOURCE,(t=>{const e=r.map((e=>e(t))),s=e.reduce(((t,e)=>Math.max(t,e.length)),0);let u,l,c,f=0;for(;f{for(let e,n=0;ne[r]=n(e,t)))}}),dt(zu,Ja,{transform(t,e){const n=e.fork(e.ALL),r=t.generator;let i,o,a,s=this.value,u=t.size-s.length;if(u>0){for(i=[];--u>=0;)i.push(a=_a(r(t))),s.push(a);n.add=n.add.length?n.materialize(n.ADD).add.concat(i):i}else o=s.slice(0,-u),n.rem=n.rem.length?n.materialize(n.REM).rem.concat(o):o,s=s.slice(-u);return n.source=this.value=s,n}});const Nu={value:"value",median:Ce,mean:function(t,e){let n=0,r=0;if(void 0===e)for(let e of t)null!=e&&(e=+e)>=e&&(++n,r+=e);else{let i=-1;for(let o of t)null!=(o=e(o,++i,t))&&(o=+o)>=o&&(++n,r+=o)}if(n)return r/n},min:ke,max:we},Ou=[];function Ru(t){Ja.call(this,[],t)}function Uu(t){au.call(this,t)}function Lu(t){Ja.call(this,null,t)}function qu(t){Sa.call(this,null,Pu,t)}function Pu(t){return this.value&&!t.modified()?this.value:bt(t.fields,t.flat)}function ju(t){Ja.call(this,[],t),this._pending=null}function Iu(t,e,n){n.forEach(_a);const r=e.fork(e.NO_FIELDS&e.NO_SOURCE);return r.rem=t.value,t.value=r.source=r.add=n,t._pending=null,r.rem.length&&r.clean(!0),r}function Wu(t){Ja.call(this,{},t)}function Hu(t){Sa.call(this,null,Yu,t)}function Yu(t){if(this.value&&!t.modified())return this.value;const e=t.extents,n=e.length;let r,i,o=1/0,a=-1/0;for(r=0;ra&&(a=i[1]);return[o,a]}function Gu(t){Sa.call(this,null,Vu,t)}function Vu(t){return this.value&&!t.modified()?this.value:t.values.reduce(((t,e)=>t.concat(e)),[])}function Xu(t){Ja.call(this,null,t)}function Ju(t){au.call(this,t)}function Zu(t){Du.call(this,t)}function Qu(t){Ja.call(this,null,t)}function Ku(t){Ja.call(this,null,t)}function tl(t){Ja.call(this,null,t)}Ru.Definition={type:"Impute",metadata:{changes:!0},params:[{name:"field",type:"field",required:!0},{name:"key",type:"field",required:!0},{name:"keyvals",array:!0},{name:"groupby",type:"field",array:!0},{name:"method",type:"enum",default:"value",values:["value","mean","median","max","min"]},{name:"value",default:0}]},dt(Ru,Ja,{transform(t,e){var r,i,o,a,u,l,c,f,h,d,p=e.fork(e.ALL),g=function(t){var e,n=t.method||Nu.value;if(null!=Nu[n])return n===Nu.value?(e=void 0!==t.value?t.value:0,()=>e):Nu[n];s("Unrecognized imputation method: "+n)}(t),m=function(t){const e=t.field;return t=>t?e(t):NaN}(t),y=n(t.field),v=n(t.key),_=(t.groupby||[]).map(n),x=function(t,e,n,r){var i,o,a,s,u,l,c,f,h=t=>t(f),d=[],p=r?r.slice():[],g={},m={};for(p.forEach(((t,e)=>g[t]=e+1)),s=0,c=t.length;sn.add(t)))):(i=n.value=n.value||this.init(t),e.visit(e.REM,(t=>n.rem(t))),e.visit(e.ADD,(t=>n.add(t)))),n.changes(),e.visit(e.SOURCE,(t=>{ot(t,i[n.cellkey(t)].tuple)})),e.reflow(r).modifies(this._outputs)},changes(){const t=this._adds,e=this._mods;let n,r;for(n=0,r=this._alen;n{const n=gs(e,u)[l],r=t.counts?e.length:1;Is(n,h||at(e),d,p).forEach((t=>{const n={};for(let t=0;t(this._pending=V(t.data),t=>t.touch(this))));return{async:e}}return n.request(t.url,t.format).then((t=>Iu(this,e,V(t.data))))}}),Wu.Definition={type:"Lookup",metadata:{modifies:!0},params:[{name:"index",type:"index",params:[{name:"from",type:"data",required:!0},{name:"key",type:"field",required:!0}]},{name:"values",type:"field",array:!0},{name:"fields",type:"field",array:!0,required:!0},{name:"as",type:"string",array:!0},{name:"default",default:null}]},dt(Wu,Ja,{transform(t,e){const r=t.fields,i=t.index,o=t.values,a=null==t.default?null:t.default,u=t.modified(),l=r.length;let c,f,h,d=u?e.SOURCE:e.ADD,p=e,g=t.as;return o?(f=o.length,l>1&&!g&&s('Multi-field lookup requires explicit "as" parameter.'),g&&g.length!==l*f&&s('The "as" parameter has too few output field names.'),g=g||o.map(n),c=function(t){for(var e,n,s=0,u=0;se.modified(t.fields))),d|=h?e.MOD:0),e.visit(d,c),p.modifies(g)}}),dt(Hu,Sa),dt(Gu,Sa),dt(Xu,Ja,{transform(t,e){return this.modified(t.modified()),this.value=t,e.fork(e.NO_SOURCE|e.NO_FIELDS)}}),Ju.Definition={type:"Pivot",metadata:{generates:!0,changes:!0},params:[{name:"groupby",type:"field",array:!0},{name:"field",type:"field",required:!0},{name:"value",type:"field",required:!0},{name:"op",type:"enum",values:Js,default:"sum"},{name:"limit",type:"number",default:0},{name:"key",type:"field"}]},dt(Ju,au,{_transform:au.prototype.transform,transform(t,n){return this._transform(function(t,n){const i=t.field,o=t.value,a=("count"===t.op?"__count__":t.op)||"sum",s=r(i).concat(r(o)),u=function(t,e,n){const r={},i=[];return n.visit(n.SOURCE,(e=>{const n=t(e);r[n]||(r[n]=1,i.push(n))})),i.sort(K),e?i.slice(0,e):i}(i,t.limit||0,n);n.changed()&&t.set("__pivot__",null,null,!0);return{key:t.key,groupby:t.groupby,ops:u.map((()=>a)),fields:u.map((t=>function(t,n,r,i){return e((e=>n(e)===t?r(e):NaN),i,t+"")}(t,i,o,s))),as:u.map((t=>t+"")),modified:t.modified.bind(t)}}(t,n),n)}}),dt(Zu,Du,{transform(t,e){const n=t.subflow,i=t.field,o=t=>this.subflow(ya(t),n,e,t);return(t.modified("field")||i&&e.modified(r(i)))&&s("PreFacet does not support field modification."),this.initTargets(),i?(e.visit(e.MOD,(t=>{const e=o(t);i(t).forEach((t=>e.mod(t)))})),e.visit(e.ADD,(t=>{const e=o(t);i(t).forEach((t=>e.add(_a(t))))})),e.visit(e.REM,(t=>{const e=o(t);i(t).forEach((t=>e.rem(t)))}))):(e.visit(e.MOD,(t=>o(t).mod(t))),e.visit(e.ADD,(t=>o(t).add(t))),e.visit(e.REM,(t=>o(t).rem(t)))),e.clean()&&e.runAfter((()=>this.clean())),e}}),Qu.Definition={type:"Project",metadata:{generates:!0,changes:!0},params:[{name:"fields",type:"field",array:!0},{name:"as",type:"string",null:!0,array:!0}]},dt(Qu,Ja,{transform(t,e){const n=e.fork(e.NO_SOURCE),r=t.fields,i=xu(t.fields,t.as||[]),o=r?(t,e)=>function(t,e,n,r){for(let i=0,o=n.length;i{const e=ya(t);n.rem.push(a[e]),a[e]=null})),e.visit(e.ADD,(t=>{const e=o(t,_a({}));a[ya(t)]=e,n.add.push(e)})),e.visit(e.MOD,(t=>{n.mod.push(o(t,a[ya(t)]))})),n}}),dt(Ku,Ja,{transform(t,e){return this.value=t.value,t.modified("value")?e.fork(e.NO_SOURCE|e.NO_FIELDS):e.StopPropagation}}),tl.Definition={type:"Quantile",metadata:{generates:!0,changes:!0},params:[{name:"groupby",type:"field",array:!0},{name:"field",type:"field",required:!0},{name:"probs",type:"number",array:!0},{name:"step",type:"number",default:.01},{name:"as",type:"string",array:!0,default:["prob","value"]}]};function el(t){Ja.call(this,null,t)}function nl(t){Ja.call(this,[],t),this.count=0}function rl(t){Ja.call(this,null,t)}function il(t){Ja.call(this,null,t),this.modified(!0)}function ol(t){Ja.call(this,null,t)}dt(tl,Ja,{transform(t,e){const r=e.fork(e.NO_SOURCE|e.NO_FIELDS),i=t.as||["prob","value"];if(this.value&&!t.modified()&&!e.changed())return r.source=this.value,r;const o=bu(e.materialize(e.SOURCE).source,t.groupby,t.field),a=(t.groupby||[]).map(n),s=[],u=t.step||.01,l=t.probs||Se(u/2,1-1e-14,u),c=l.length;return o.forEach((t=>{const e=es(t,l);for(let n=0;n{const e=ya(t);n.rem.push(r[e]),r[e]=null})),e.visit(e.ADD,(t=>{const e=xa(t);r[ya(t)]=e,n.add.push(e)})),e.visit(e.MOD,(t=>{const e=r[ya(t)];for(const r in t)e[r]=t[r],n.modifies(r);n.mod.push(e)}))),n}}),nl.Definition={type:"Sample",metadata:{},params:[{name:"size",type:"number",default:1e3}]},dt(nl,Ja,{transform(e,n){const r=n.fork(n.NO_SOURCE),i=e.modified("size"),o=e.size,a=this.value.reduce(((t,e)=>(t[ya(e)]=1,t)),{});let s=this.value,u=this.count,l=0;function c(e){let n,i;s.length=l&&(n=s[i],a[ya(n)]&&r.rem.push(n),s[i]=e)),++u}if(n.rem.length&&(n.visit(n.REM,(t=>{const e=ya(t);a[e]&&(a[e]=-1,r.rem.push(t)),--u})),s=s.filter((t=>-1!==a[ya(t)]))),(n.rem.length||i)&&s.length{a[ya(t)]||c(t)})),l=-1),i&&s.length>o){const t=s.length-o;for(let e=0;e{a[ya(t)]&&r.mod.push(t)})),n.add.length&&n.visit(n.ADD,c),(n.add.length||l<0)&&(r.add=s.filter((t=>!a[ya(t)]))),this.count=u,this.value=r.source=s,r}}),rl.Definition={type:"Sequence",metadata:{generates:!0,changes:!0},params:[{name:"start",type:"number",required:!0},{name:"stop",type:"number",required:!0},{name:"step",type:"number",default:1},{name:"as",type:"string",default:"data"}]},dt(rl,Ja,{transform(t,e){if(this.value&&!t.modified())return;const n=e.materialize().fork(e.MOD),r=t.as||"data";return n.rem=this.value?e.rem.concat(this.value):e.rem,this.value=Se(t.start,t.stop,t.step||1).map((t=>{const e={};return e[r]=t,_a(e)})),n.add=e.add.concat(this.value),n}}),dt(il,Ja,{transform(t,e){return this.value=e.source,e.changed()?e.fork(e.NO_SOURCE|e.NO_FIELDS):e.StopPropagation}});const al=["unit0","unit1"];function sl(t){Ja.call(this,ft(),t)}function ul(t){Ja.call(this,null,t)}ol.Definition={type:"TimeUnit",metadata:{modifies:!0},params:[{name:"field",type:"field",required:!0},{name:"interval",type:"boolean",default:!0},{name:"units",type:"enum",values:Kn,array:!0},{name:"step",type:"number",default:1},{name:"maxbins",type:"number",default:40},{name:"extent",type:"date",array:!0},{name:"timezone",type:"enum",default:"local",values:["local","utc"]},{name:"as",type:"string",array:!0,length:2,default:al}]},dt(ol,Ja,{transform(t,e){const n=t.field,i=!1!==t.interval,o="utc"===t.timezone,a=this._floor(t,e),s=(o?Fr:Cr)(a.unit).offset,u=t.as||al,l=u[0],c=u[1],f=a.step;let h=a.start||1/0,d=a.stop||-1/0,p=e.ADD;return(t.modified()||e.changed(e.REM)||e.modified(r(n)))&&(p=(e=e.reflow(!0)).SOURCE,h=1/0,d=-1/0),e.visit(p,(t=>{const e=n(t);let r,o;null==e?(t[l]=null,i&&(t[c]=null)):(t[l]=r=o=a(e),i&&(t[c]=o=s(r,f)),rd&&(d=o))})),a.start=h,a.stop=d,e.modifies(i?u:l)},_floor(t,e){const n="utc"===t.timezone,{units:r,step:i}=t.units?{units:t.units,step:t.step||1}:Jr({extent:t.extent||at(e.materialize(e.SOURCE).source,t.field),maxbins:t.maxbins}),o=er(r),a=this.value||{},s=(n?Mr:wr)(o,i);return s.unit=F(o),s.units=o,s.step=i,s.start=a.start,s.stop=a.stop,this.value=s}}),dt(sl,Ja,{transform(t,e){const n=e.dataflow,r=t.field,i=this.value,o=t=>i.set(r(t),t);let a=!0;return t.modified("field")||e.modified(r.fields)?(i.clear(),e.visit(e.SOURCE,o)):e.changed()?(e.visit(e.REM,(t=>i.delete(r(t)))),e.visit(e.ADD,o)):a=!1,this.modified(a),i.empty>n.cleanThreshold&&n.runAfter(i.clean),e.fork()}}),dt(ul,Ja,{transform(t,e){(!this.value||t.modified("field")||t.modified("sort")||e.changed()||t.sort&&e.modified(t.sort.fields))&&(this.value=(t.sort?e.source.slice().sort(ka(t.sort)):e.source).map(t.field))}});const ll={row_number:function(){return{next:t=>t.index+1}},rank:function(){let t;return{init:()=>t=1,next:e=>{const n=e.index,r=e.data;return n&&e.compare(r[n-1],r[n])?t=n+1:t}}},dense_rank:function(){let t;return{init:()=>t=1,next:e=>{const n=e.index,r=e.data;return n&&e.compare(r[n-1],r[n])?++t:t}}},percent_rank:function(){const t=ll.rank(),e=t.next;return{init:t.init,next:t=>(e(t)-1)/(t.data.length-1)}},cume_dist:function(){let t;return{init:()=>t=0,next:e=>{const n=e.data,r=e.compare;let i=e.index;if(t0||s("ntile num must be greater than zero.");const n=ll.cume_dist(),r=n.next;return{init:n.init,next:t=>Math.ceil(e*r(t))}},lag:function(t,e){return e=+e||1,{next:n=>{const r=n.index-e;return r>=0?t(n.data[r]):null}}},lead:function(t,e){return e=+e||1,{next:n=>{const r=n.index+e,i=n.data;return rt(e.data[e.i0])}},last_value:function(t){return{next:e=>t(e.data[e.i1-1])}},nth_value:function(t,e){return(e=+e)>0||s("nth_value nth must be greater than zero."),{next:n=>{const r=n.i0+(e-1);return re=null,next:n=>{const r=t(n.data[n.index]);return null!=r?e=r:e}}},next_value:function(t){let e,n;return{init:()=>(e=null,n=-1),next:r=>{const i=r.data;return r.index<=n?e:(n=function(t,e,n){for(let r=e.length;nf[t]=1))}y(t.sort),e.forEach(((t,e)=>{const r=i[e],f=o[e],v=a[e]||null,_=n(r),x=Ys(t,_,u[e]);if(y(r),l.push(x),lt(ll,t))c.push(function(t,e,n,r){const i=ll[t](e,n);return{init:i.init||h,update:function(t,e){e[r]=i.next(t)}}}(t,r,f,x));else{if(null==r&&"count"!==t&&s("Null aggregate field specified."),"count"===t)return void p.push(x);m=!1;let e=d[_];e||(e=d[_]=[],e.field=r,g.push(e)),e.push(Zs(t,v,x))}})),(p.length||g.length)&&(this.cell=function(t,e,n){t=t.map((t=>ru(t,t.field)));const r={num:0,agg:null,store:!1,count:e};if(!n)for(var i=t.length,o=r.agg=Array(i),a=0;a0&&!i(o[n],o[n-1])&&(t.i0=e.left(o,o[n])),rt.init())),this.cell&&this.cell.init()},hl.update=function(t,e){const n=this.cell,r=this.windows,i=t.data,o=r&&r.length;let a;if(n){for(a=t.p0;athis.group(i(t));let a=this.state;a&&!n||(a=this.state=new fl(t)),n||e.modified(a.inputs)?(this.value={},e.visit(e.SOURCE,(t=>o(t).add(t)))):(e.visit(e.REM,(t=>o(t).remove(t))),e.visit(e.ADD,(t=>o(t).add(t))));for(let e=0,n=this._mlen;e=1?Cl:t<=-1?-Cl:Math.asin(t)}const $l=Math.PI,Tl=2*$l,Bl=1e-6,zl=Tl-Bl;function Nl(t){this._+=t[0];for(let e=1,n=t.length;e=0))throw new Error(`invalid digits: ${t}`);if(e>15)return Nl;const n=10**e;return function(t){this._+=t[0];for(let e=1,r=t.length;eBl)if(Math.abs(c*s-u*l)>Bl&&i){let h=n-o,d=r-a,p=s*s+u*u,g=h*h+d*d,m=Math.sqrt(p),y=Math.sqrt(f),v=i*Math.tan(($l-Math.acos((p+f-g)/(2*m*y)))/2),_=v/y,x=v/m;Math.abs(_-1)>Bl&&this._append`L${t+_*l},${e+_*c}`,this._append`A${i},${i},0,0,${+(c*h>l*d)},${this._x1=t+x*s},${this._y1=e+x*u}`}else this._append`L${this._x1=t},${this._y1=e}`;else;}arc(t,e,n,r,i,o){if(t=+t,e=+e,o=!!o,(n=+n)<0)throw new Error(`negative radius: ${n}`);let a=n*Math.cos(r),s=n*Math.sin(r),u=t+a,l=e+s,c=1^o,f=o?r-i:i-r;null===this._x1?this._append`M${u},${l}`:(Math.abs(this._x1-u)>Bl||Math.abs(this._y1-l)>Bl)&&this._append`L${u},${l}`,n&&(f<0&&(f=f%Tl+Tl),f>zl?this._append`A${n},${n},0,1,${c},${t-a},${e-s}A${n},${n},0,1,${c},${this._x1=u},${this._y1=l}`:f>Bl&&this._append`A${n},${n},0,${+(f>=$l)},${c},${this._x1=t+n*Math.cos(i)},${this._y1=e+n*Math.sin(i)}`)}rect(t,e,n,r){this._append`M${this._x0=this._x1=+t},${this._y0=this._y1=+e}h${n=+n}v${+r}h${-n}Z`}toString(){return this._}};function Rl(){return new Ol}function Ul(t){let e=3;return t.digits=function(n){if(!arguments.length)return e;if(null==n)e=null;else{const t=Math.floor(n);if(!(t>=0))throw new RangeError(`invalid digits: ${n}`);e=t}return t},()=>new Ol(e)}function Ll(t){return t.innerRadius}function ql(t){return t.outerRadius}function Pl(t){return t.startAngle}function jl(t){return t.endAngle}function Il(t){return t&&t.padAngle}function Wl(t,e,n,r,i,o,a){var s=t-n,u=e-r,l=(a?o:-o)/Ml(s*s+u*u),c=l*u,f=-l*s,h=t+c,d=e+f,p=n+c,g=r+f,m=(h+p)/2,y=(d+g)/2,v=p-h,_=g-d,x=v*v+_*_,b=i-o,w=h*g-p*d,k=(_<0?-1:1)*Ml(wl(0,b*b*x-w*w)),A=(w*_-v*k)/x,M=(-w*v-_*k)/x,E=(w*_+v*k)/x,D=(-w*v+_*k)/x,C=A-m,F=M-y,S=E-m,$=D-y;return C*C+F*F>S*S+$*$&&(A=E,M=D),{cx:A,cy:M,x01:-c,y01:-f,x11:A*(i/b-1),y11:M*(i/b-1)}}function Hl(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}function Yl(t){this._context=t}function Gl(t){return new Yl(t)}function Vl(t){return t[0]}function Xl(t){return t[1]}function Jl(t,e){var n=vl(!0),r=null,i=Gl,o=null,a=Ul(s);function s(s){var u,l,c,f=(s=Hl(s)).length,h=!1;for(null==r&&(o=i(c=a())),u=0;u<=f;++u)!(u=f;--h)s.point(y[h],v[h]);s.lineEnd(),s.areaEnd()}m&&(y[c]=+t(d,c,l),v[c]=+e(d,c,l),s.point(r?+r(d,c,l):y[c],n?+n(d,c,l):v[c]))}if(p)return s=null,p+""||null}function c(){return Jl().defined(i).curve(a).context(o)}return t="function"==typeof t?t:void 0===t?Vl:vl(+t),e="function"==typeof e?e:vl(void 0===e?0:+e),n="function"==typeof n?n:void 0===n?Xl:vl(+n),l.x=function(e){return arguments.length?(t="function"==typeof e?e:vl(+e),r=null,l):t},l.x0=function(e){return arguments.length?(t="function"==typeof e?e:vl(+e),l):t},l.x1=function(t){return arguments.length?(r=null==t?null:"function"==typeof t?t:vl(+t),l):r},l.y=function(t){return arguments.length?(e="function"==typeof t?t:vl(+t),n=null,l):e},l.y0=function(t){return arguments.length?(e="function"==typeof t?t:vl(+t),l):e},l.y1=function(t){return arguments.length?(n=null==t?null:"function"==typeof t?t:vl(+t),l):n},l.lineX0=l.lineY0=function(){return c().x(t).y(e)},l.lineY1=function(){return c().x(t).y(n)},l.lineX1=function(){return c().x(r).y(e)},l.defined=function(t){return arguments.length?(i="function"==typeof t?t:vl(!!t),l):i},l.curve=function(t){return arguments.length?(a=t,null!=o&&(s=a(o)),l):a},l.context=function(t){return arguments.length?(null==t?o=s=null:s=a(o=t),l):o},l}Rl.prototype=Ol.prototype,Yl.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:this._context.lineTo(t,e)}}};var Ql={draw(t,e){const n=Ml(e/Dl);t.moveTo(n,0),t.arc(0,0,n,0,Fl)}};function Kl(){}function tc(t,e,n){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+e)/6,(t._y0+4*t._y1+n)/6)}function ec(t){this._context=t}function nc(t){this._context=t}function rc(t){this._context=t}function ic(t,e){this._basis=new ec(t),this._beta=e}ec.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:tc(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:tc(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},nc.prototype={areaStart:Kl,areaEnd:Kl,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x2=t,this._y2=e;break;case 1:this._point=2,this._x3=t,this._y3=e;break;case 2:this._point=3,this._x4=t,this._y4=e,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+e)/6);break;default:tc(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},rc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var n=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+e)/6;this._line?this._context.lineTo(n,r):this._context.moveTo(n,r);break;case 3:this._point=4;default:tc(this,t,e)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}},ic.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,e=this._y,n=t.length-1;if(n>0)for(var r,i=t[0],o=e[0],a=t[n]-i,s=e[n]-o,u=-1;++u<=n;)r=u/n,this._basis.point(this._beta*t[u]+(1-this._beta)*(i+r*a),this._beta*e[u]+(1-this._beta)*(o+r*s));this._x=this._y=null,this._basis.lineEnd()},point:function(t,e){this._x.push(+t),this._y.push(+e)}};var oc=function t(e){function n(t){return 1===e?new ec(t):new ic(t,e)}return n.beta=function(e){return t(+e)},n}(.85);function ac(t,e,n){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-e),t._y2+t._k*(t._y1-n),t._x2,t._y2)}function sc(t,e){this._context=t,this._k=(1-e)/6}sc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:ac(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2,this._x1=t,this._y1=e;break;case 2:this._point=3;default:ac(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var uc=function t(e){function n(t){return new sc(t,e)}return n.tension=function(e){return t(+e)},n}(0);function lc(t,e){this._context=t,this._k=(1-e)/6}lc.prototype={areaStart:Kl,areaEnd:Kl,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:ac(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var cc=function t(e){function n(t){return new lc(t,e)}return n.tension=function(e){return t(+e)},n}(0);function fc(t,e){this._context=t,this._k=(1-e)/6}fc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:ac(this,t,e)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var hc=function t(e){function n(t){return new fc(t,e)}return n.tension=function(e){return t(+e)},n}(0);function dc(t,e,n){var r=t._x1,i=t._y1,o=t._x2,a=t._y2;if(t._l01_a>El){var s=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,u=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*s-t._x0*t._l12_2a+t._x2*t._l01_2a)/u,i=(i*s-t._y0*t._l12_2a+t._y2*t._l01_2a)/u}if(t._l23_a>El){var l=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,c=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*l+t._x1*t._l23_2a-e*t._l12_2a)/c,a=(a*l+t._y1*t._l23_2a-n*t._l12_2a)/c}t._context.bezierCurveTo(r,i,o,a,t._x2,t._y2)}function pc(t,e){this._context=t,this._alpha=e}pc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3;default:dc(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var gc=function t(e){function n(t){return e?new pc(t,e):new sc(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function mc(t,e){this._context=t,this._alpha=e}mc.prototype={areaStart:Kl,areaEnd:Kl,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:dc(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var yc=function t(e){function n(t){return e?new mc(t,e):new lc(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function vc(t,e){this._context=t,this._alpha=e}vc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var n=this._x2-t,r=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(n*n+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:dc(this,t,e)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};var _c=function t(e){function n(t){return e?new vc(t,e):new fc(t,0)}return n.alpha=function(e){return t(+e)},n}(.5);function xc(t){this._context=t}function bc(t){return t<0?-1:1}function wc(t,e,n){var r=t._x1-t._x0,i=e-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),a=(n-t._y1)/(i||r<0&&-0),s=(o*i+a*r)/(r+i);return(bc(o)+bc(a))*Math.min(Math.abs(o),Math.abs(a),.5*Math.abs(s))||0}function kc(t,e){var n=t._x1-t._x0;return n?(3*(t._y1-t._y0)/n-e)/2:e}function Ac(t,e,n){var r=t._x0,i=t._y0,o=t._x1,a=t._y1,s=(o-r)/3;t._context.bezierCurveTo(r+s,i+s*e,o-s,a-s*n,o,a)}function Mc(t){this._context=t}function Ec(t){this._context=new Dc(t)}function Dc(t){this._context=t}function Cc(t){this._context=t}function Fc(t){var e,n,r=t.length-1,i=new Array(r),o=new Array(r),a=new Array(r);for(i[0]=0,o[0]=2,a[0]=t[0]+2*t[1],e=1;e=0;--e)i[e]=(a[e]-i[e+1])/o[e];for(o[r-1]=(t[r]+i[r-1])/2,e=0;e=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,e),this._context.lineTo(t,e);else{var n=this._x*(1-this._t)+t*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,e)}}this._x=t,this._y=e}};const Tc=()=>"undefined"!=typeof Image?Image:null;function Bc(t,e){switch(arguments.length){case 0:break;case 1:this.range(t);break;default:this.range(e).domain(t)}return this}function zc(t,e){switch(arguments.length){case 0:break;case 1:"function"==typeof t?this.interpolator(t):this.range(t);break;default:this.domain(t),"function"==typeof e?this.interpolator(e):this.range(e)}return this}const Nc=Symbol("implicit");function Oc(){var t=new ue,e=[],n=[],r=Nc;function i(i){let o=t.get(i);if(void 0===o){if(r!==Nc)return r;t.set(i,o=e.push(i)-1)}return n[o%n.length]}return i.domain=function(n){if(!arguments.length)return e.slice();e=[],t=new ue;for(const r of n)t.has(r)||t.set(r,e.push(r)-1);return i},i.range=function(t){return arguments.length?(n=Array.from(t),i):n.slice()},i.unknown=function(t){return arguments.length?(r=t,i):r},i.copy=function(){return Oc(e,n).unknown(r)},Bc.apply(i,arguments),i}function Rc(t,e,n){t.prototype=e.prototype=n,n.constructor=t}function Uc(t,e){var n=Object.create(t.prototype);for(var r in e)n[r]=e[r];return n}function Lc(){}var qc=.7,Pc=1/qc,jc="\\s*([+-]?\\d+)\\s*",Ic="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)\\s*",Wc="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)%\\s*",Hc=/^#([0-9a-f]{3,8})$/,Yc=new RegExp(`^rgb\\(${jc},${jc},${jc}\\)$`),Gc=new RegExp(`^rgb\\(${Wc},${Wc},${Wc}\\)$`),Vc=new RegExp(`^rgba\\(${jc},${jc},${jc},${Ic}\\)$`),Xc=new RegExp(`^rgba\\(${Wc},${Wc},${Wc},${Ic}\\)$`),Jc=new RegExp(`^hsl\\(${Ic},${Wc},${Wc}\\)$`),Zc=new RegExp(`^hsla\\(${Ic},${Wc},${Wc},${Ic}\\)$`),Qc={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074};function Kc(){return this.rgb().formatHex()}function tf(){return this.rgb().formatRgb()}function ef(t){var e,n;return t=(t+"").trim().toLowerCase(),(e=Hc.exec(t))?(n=e[1].length,e=parseInt(e[1],16),6===n?nf(e):3===n?new sf(e>>8&15|e>>4&240,e>>4&15|240&e,(15&e)<<4|15&e,1):8===n?rf(e>>24&255,e>>16&255,e>>8&255,(255&e)/255):4===n?rf(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|240&e,((15&e)<<4|15&e)/255):null):(e=Yc.exec(t))?new sf(e[1],e[2],e[3],1):(e=Gc.exec(t))?new sf(255*e[1]/100,255*e[2]/100,255*e[3]/100,1):(e=Vc.exec(t))?rf(e[1],e[2],e[3],e[4]):(e=Xc.exec(t))?rf(255*e[1]/100,255*e[2]/100,255*e[3]/100,e[4]):(e=Jc.exec(t))?df(e[1],e[2]/100,e[3]/100,1):(e=Zc.exec(t))?df(e[1],e[2]/100,e[3]/100,e[4]):Qc.hasOwnProperty(t)?nf(Qc[t]):"transparent"===t?new sf(NaN,NaN,NaN,0):null}function nf(t){return new sf(t>>16&255,t>>8&255,255&t,1)}function rf(t,e,n,r){return r<=0&&(t=e=n=NaN),new sf(t,e,n,r)}function of(t){return t instanceof Lc||(t=ef(t)),t?new sf((t=t.rgb()).r,t.g,t.b,t.opacity):new sf}function af(t,e,n,r){return 1===arguments.length?of(t):new sf(t,e,n,null==r?1:r)}function sf(t,e,n,r){this.r=+t,this.g=+e,this.b=+n,this.opacity=+r}function uf(){return`#${hf(this.r)}${hf(this.g)}${hf(this.b)}`}function lf(){const t=cf(this.opacity);return`${1===t?"rgb(":"rgba("}${ff(this.r)}, ${ff(this.g)}, ${ff(this.b)}${1===t?")":`, ${t})`}`}function cf(t){return isNaN(t)?1:Math.max(0,Math.min(1,t))}function ff(t){return Math.max(0,Math.min(255,Math.round(t)||0))}function hf(t){return((t=ff(t))<16?"0":"")+t.toString(16)}function df(t,e,n,r){return r<=0?t=e=n=NaN:n<=0||n>=1?t=e=NaN:e<=0&&(t=NaN),new mf(t,e,n,r)}function pf(t){if(t instanceof mf)return new mf(t.h,t.s,t.l,t.opacity);if(t instanceof Lc||(t=ef(t)),!t)return new mf;if(t instanceof mf)return t;var e=(t=t.rgb()).r/255,n=t.g/255,r=t.b/255,i=Math.min(e,n,r),o=Math.max(e,n,r),a=NaN,s=o-i,u=(o+i)/2;return s?(a=e===o?(n-r)/s+6*(n0&&u<1?0:a,new mf(a,s,u,t.opacity)}function gf(t,e,n,r){return 1===arguments.length?pf(t):new mf(t,e,n,null==r?1:r)}function mf(t,e,n,r){this.h=+t,this.s=+e,this.l=+n,this.opacity=+r}function yf(t){return(t=(t||0)%360)<0?t+360:t}function vf(t){return Math.max(0,Math.min(1,t||0))}function _f(t,e,n){return 255*(t<60?e+(n-e)*t/60:t<180?n:t<240?e+(n-e)*(240-t)/60:e)}Rc(Lc,ef,{copy(t){return Object.assign(new this.constructor,this,t)},displayable(){return this.rgb().displayable()},hex:Kc,formatHex:Kc,formatHex8:function(){return this.rgb().formatHex8()},formatHsl:function(){return pf(this).formatHsl()},formatRgb:tf,toString:tf}),Rc(sf,af,Uc(Lc,{brighter(t){return t=null==t?Pc:Math.pow(Pc,t),new sf(this.r*t,this.g*t,this.b*t,this.opacity)},darker(t){return t=null==t?qc:Math.pow(qc,t),new sf(this.r*t,this.g*t,this.b*t,this.opacity)},rgb(){return this},clamp(){return new sf(ff(this.r),ff(this.g),ff(this.b),cf(this.opacity))},displayable(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:uf,formatHex:uf,formatHex8:function(){return`#${hf(this.r)}${hf(this.g)}${hf(this.b)}${hf(255*(isNaN(this.opacity)?1:this.opacity))}`},formatRgb:lf,toString:lf})),Rc(mf,gf,Uc(Lc,{brighter(t){return t=null==t?Pc:Math.pow(Pc,t),new mf(this.h,this.s,this.l*t,this.opacity)},darker(t){return t=null==t?qc:Math.pow(qc,t),new mf(this.h,this.s,this.l*t,this.opacity)},rgb(){var t=this.h%360+360*(this.h<0),e=isNaN(t)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*e,i=2*n-r;return new sf(_f(t>=240?t-240:t+120,i,r),_f(t,i,r),_f(t<120?t+240:t-120,i,r),this.opacity)},clamp(){return new mf(yf(this.h),vf(this.s),vf(this.l),cf(this.opacity))},displayable(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl(){const t=cf(this.opacity);return`${1===t?"hsl(":"hsla("}${yf(this.h)}, ${100*vf(this.s)}%, ${100*vf(this.l)}%${1===t?")":`, ${t})`}`}}));const xf=Math.PI/180,bf=180/Math.PI,wf=.96422,kf=1,Af=.82521,Mf=4/29,Ef=6/29,Df=3*Ef*Ef,Cf=Ef*Ef*Ef;function Ff(t){if(t instanceof $f)return new $f(t.l,t.a,t.b,t.opacity);if(t instanceof Rf)return Uf(t);t instanceof sf||(t=of(t));var e,n,r=Nf(t.r),i=Nf(t.g),o=Nf(t.b),a=Tf((.2225045*r+.7168786*i+.0606169*o)/kf);return r===i&&i===o?e=n=a:(e=Tf((.4360747*r+.3850649*i+.1430804*o)/wf),n=Tf((.0139322*r+.0971045*i+.7141733*o)/Af)),new $f(116*a-16,500*(e-a),200*(a-n),t.opacity)}function Sf(t,e,n,r){return 1===arguments.length?Ff(t):new $f(t,e,n,null==r?1:r)}function $f(t,e,n,r){this.l=+t,this.a=+e,this.b=+n,this.opacity=+r}function Tf(t){return t>Cf?Math.pow(t,1/3):t/Df+Mf}function Bf(t){return t>Ef?t*t*t:Df*(t-Mf)}function zf(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Nf(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Of(t,e,n,r){return 1===arguments.length?function(t){if(t instanceof Rf)return new Rf(t.h,t.c,t.l,t.opacity);if(t instanceof $f||(t=Ff(t)),0===t.a&&0===t.b)return new Rf(NaN,0=1?(n=1,e-1):Math.floor(n*e),i=t[r],o=t[r+1],a=r>0?t[r-1]:2*i-o,s=r()=>t;function Kf(t,e){return function(n){return t+n*e}}function th(t,e){var n=e-t;return n?Kf(t,n>180||n<-180?n-360*Math.round(n/360):n):Qf(isNaN(t)?e:t)}function eh(t){return 1==(t=+t)?nh:function(e,n){return n-e?function(t,e,n){return t=Math.pow(t,n),e=Math.pow(e,n)-t,n=1/n,function(r){return Math.pow(t+r*e,n)}}(e,n,t):Qf(isNaN(e)?n:e)}}function nh(t,e){var n=e-t;return n?Kf(t,n):Qf(isNaN(t)?e:t)}var rh=function t(e){var n=eh(e);function r(t,e){var r=n((t=af(t)).r,(e=af(e)).r),i=n(t.g,e.g),o=n(t.b,e.b),a=nh(t.opacity,e.opacity);return function(e){return t.r=r(e),t.g=i(e),t.b=o(e),t.opacity=a(e),t+""}}return r.gamma=t,r}(1);function ih(t){return function(e){var n,r,i=e.length,o=new Array(i),a=new Array(i),s=new Array(i);for(n=0;no&&(i=e.slice(o,i),s[a]?s[a]+=i:s[++a]=i),(n=n[0])===(r=r[0])?s[a]?s[a]+=r:s[++a]=r:(s[++a]=null,u.push({i:a,x:fh(n,r)})),o=ph.lastIndex;return o180?e+=360:e-t>180&&(t+=360),o.push({i:n.push(i(n)+"rotate(",null,r)-2,x:fh(t,e)})):e&&n.push(i(n)+"rotate("+e+r)}(o.rotate,a.rotate,s,u),function(t,e,n,o){t!==e?o.push({i:n.push(i(n)+"skewX(",null,r)-2,x:fh(t,e)}):e&&n.push(i(n)+"skewX("+e+r)}(o.skewX,a.skewX,s,u),function(t,e,n,r,o,a){if(t!==n||e!==r){var s=o.push(i(o)+"scale(",null,",",null,")");a.push({i:s-4,x:fh(t,n)},{i:s-2,x:fh(e,r)})}else 1===n&&1===r||o.push(i(o)+"scale("+n+","+r+")")}(o.scaleX,o.scaleY,a.scaleX,a.scaleY,s,u),o=a=null,function(t){for(var e,n=-1,r=u.length;++ne&&(n=t,t=e,e=n),function(n){return Math.max(t,Math.min(e,n))}}(a[0],a[t-1])),r=t>2?Ih:jh,i=o=null,f}function f(e){return null==e||isNaN(e=+e)?n:(i||(i=r(a.map(t),s,u)))(t(l(e)))}return f.invert=function(n){return l(e((o||(o=r(s,a.map(t),fh)))(n)))},f.domain=function(t){return arguments.length?(a=Array.from(t,Uh),c()):a.slice()},f.range=function(t){return arguments.length?(s=Array.from(t),c()):s.slice()},f.rangeRound=function(t){return s=Array.from(t),u=yh,c()},f.clamp=function(t){return arguments.length?(l=!!t||qh,c()):l!==qh},f.interpolate=function(t){return arguments.length?(u=t,c()):u},f.unknown=function(t){return arguments.length?(n=t,f):n},function(n,r){return t=n,e=r,c()}}function Yh(){return Hh()(qh,qh)}function Gh(t,e,n,r){var i,o=be(t,e,n);switch((r=Re(null==r?",f":r)).type){case"s":var a=Math.max(Math.abs(t),Math.abs(e));return null!=r.precision||isNaN(i=Xe(o,a))||(r.precision=i),We(r,a);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(i=Je(o,Math.max(Math.abs(t),Math.abs(e))))||(r.precision=i-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(i=Ve(o))||(r.precision=i-2*("%"===r.type))}return Ie(r)}function Vh(t){var e=t.domain;return t.ticks=function(t){var n=e();return _e(n[0],n[n.length-1],null==t?10:t)},t.tickFormat=function(t,n){var r=e();return Gh(r[0],r[r.length-1],null==t?10:t,n)},t.nice=function(n){null==n&&(n=10);var r,i,o=e(),a=0,s=o.length-1,u=o[a],l=o[s],c=10;for(l0;){if((i=xe(u,l,n))===r)return o[a]=u,o[s]=l,e(o);if(i>0)u=Math.floor(u/i)*i,l=Math.ceil(l/i)*i;else{if(!(i<0))break;u=Math.ceil(u*i)/i,l=Math.floor(l*i)/i}r=i}return t},t}function Xh(t,e){var n,r=0,i=(t=t.slice()).length-1,o=t[r],a=t[i];return a-t(-e,n)}function nd(t){const e=t(Jh,Zh),n=e.domain;let r,i,o=10;function a(){return r=function(t){return t===Math.E?Math.log:10===t&&Math.log10||2===t&&Math.log2||(t=Math.log(t),e=>Math.log(e)/t)}(o),i=function(t){return 10===t?td:t===Math.E?Math.exp:e=>Math.pow(t,e)}(o),n()[0]<0?(r=ed(r),i=ed(i),t(Qh,Kh)):t(Jh,Zh),e}return e.base=function(t){return arguments.length?(o=+t,a()):o},e.domain=function(t){return arguments.length?(n(t),a()):n()},e.ticks=t=>{const e=n();let a=e[0],s=e[e.length-1];const u=s0){for(;f<=h;++f)for(l=1;ls)break;p.push(c)}}else for(;f<=h;++f)for(l=o-1;l>=1;--l)if(c=f>0?l/i(-f):l*i(f),!(cs)break;p.push(c)}2*p.length{if(null==t&&(t=10),null==n&&(n=10===o?"s":","),"function"!=typeof n&&(o%1||null!=(n=Re(n)).precision||(n.trim=!0),n=Ie(n)),t===1/0)return n;const a=Math.max(1,o*t/e.ticks().length);return t=>{let e=t/i(Math.round(r(t)));return e*on(Xh(n(),{floor:t=>i(Math.floor(r(t))),ceil:t=>i(Math.ceil(r(t)))})),e}function rd(t){return function(e){return Math.sign(e)*Math.log1p(Math.abs(e/t))}}function id(t){return function(e){return Math.sign(e)*Math.expm1(Math.abs(e))*t}}function od(t){var e=1,n=t(rd(e),id(e));return n.constant=function(n){return arguments.length?t(rd(e=+n),id(e)):e},Vh(n)}function ad(t){return function(e){return e<0?-Math.pow(-e,t):Math.pow(e,t)}}function sd(t){return t<0?-Math.sqrt(-t):Math.sqrt(t)}function ud(t){return t<0?-t*t:t*t}function ld(t){var e=t(qh,qh),n=1;return e.exponent=function(e){return arguments.length?1===(n=+e)?t(qh,qh):.5===n?t(sd,ud):t(ad(n),ad(1/n)):n},Vh(e)}function cd(){var t=ld(Hh());return t.copy=function(){return Wh(t,cd()).exponent(t.exponent())},Bc.apply(t,arguments),t}function fd(t){return new Date(t)}function hd(t){return t instanceof Date?+t:+new Date(+t)}function dd(t,e,n,r,i,o,a,s,u,l){var c=Yh(),f=c.invert,h=c.domain,d=l(".%L"),p=l(":%S"),g=l("%I:%M"),m=l("%I %p"),y=l("%a %d"),v=l("%b %d"),_=l("%B"),x=l("%Y");function b(t){return(u(t)0?r:1:0}const Td="linear",Bd="log",zd="pow",Nd="sqrt",Od="symlog",Rd="time",Ud="utc",Ld="sequential",qd="diverging",Pd="quantile",jd="quantize",Id="threshold",Wd="ordinal",Hd="point",Yd="band",Gd="bin-ordinal",Vd="continuous",Xd="discrete",Jd="discretizing",Zd="interpolating",Qd="temporal";function Kd(){const t=Oc().unknown(void 0),e=t.domain,n=t.range;let r,i,o=[0,1],a=!1,s=0,u=0,l=.5;function c(){const t=e().length,c=o[1]d+r*t));return n(c?p.reverse():p)}return delete t.unknown,t.domain=function(t){return arguments.length?(e(t),c()):e()},t.range=function(t){return arguments.length?(o=[+t[0],+t[1]],c()):o.slice()},t.rangeRound=function(t){return o=[+t[0],+t[1]],a=!0,c()},t.bandwidth=function(){return i},t.step=function(){return r},t.round=function(t){return arguments.length?(a=!!t,c()):a},t.padding=function(t){return arguments.length?(u=Math.max(0,Math.min(1,t)),s=u,c()):s},t.paddingInner=function(t){return arguments.length?(s=Math.max(0,Math.min(1,t)),c()):s},t.paddingOuter=function(t){return arguments.length?(u=Math.max(0,Math.min(1,t)),c()):u},t.align=function(t){return arguments.length?(l=Math.max(0,Math.min(1,t)),c()):l},t.invertRange=function(t){if(null==t[0]||null==t[1])return;const r=o[1]o[1-r])?void 0:(u=Math.max(0,oe(a,f)-1),l=f===h?u:oe(a,h)-1,f-a[u]>i+1e-10&&++u,r&&(c=u,u=s-l,l=s-c),u>l?void 0:e().slice(u,l+1))},t.invert=function(e){const n=t.invertRange([e,e]);return n?n[0]:n},t.copy=function(){return Kd().domain(e()).range(o).round(a).paddingInner(s).paddingOuter(u).align(l)},c()}function tp(t){const e=t.copy;return t.padding=t.paddingOuter,delete t.paddingInner,t.copy=function(){return tp(e())},t}var ep=Array.prototype.map;const np=Array.prototype.slice;const rp=new Map,ip=Symbol("vega_scale");function op(t){return t[ip]=!0,t}function ap(t,e,n){return arguments.length>1?(rp.set(t,function(t,e,n){const r=function(){const n=e();return n.invertRange||(n.invertRange=n.invert?function(t){return function(e){let n,r=e[0],i=e[1];return i=s&&n[o]<=u&&(l<0&&(l=o),r=o);if(!(l<0))return s=t.invertExtent(n[l]),u=t.invertExtent(n[r]),[void 0===s[0]?s[1]:s[0],void 0===u[1]?u[0]:u[1]]}}(n):void 0),n.type=t,op(n)};return r.metadata=Bt(V(n)),r}(t,e,n)),this):sp(t)?rp.get(t):void 0}function sp(t){return rp.has(t)}function up(t,e){const n=rp.get(t);return n&&n.metadata[e]}function lp(t){return up(t,Vd)}function cp(t){return up(t,Xd)}function fp(t){return up(t,Jd)}function hp(t){return up(t,Bd)}function dp(t){return up(t,Zd)}function pp(t){return up(t,Pd)}ap("identity",(function t(e){var n;function r(t){return null==t||isNaN(t=+t)?n:t}return r.invert=r,r.domain=r.range=function(t){return arguments.length?(e=Array.from(t,Uh),r):e.slice()},r.unknown=function(t){return arguments.length?(n=t,r):n},r.copy=function(){return t(e).unknown(n)},e=arguments.length?Array.from(e,Uh):[0,1],Vh(r)})),ap(Td,(function t(){var e=Yh();return e.copy=function(){return Wh(e,t())},Bc.apply(e,arguments),Vh(e)}),Vd),ap(Bd,(function t(){const e=nd(Hh()).domain([1,10]);return e.copy=()=>Wh(e,t()).base(e.base()),Bc.apply(e,arguments),e}),[Vd,Bd]),ap(zd,cd,Vd),ap(Nd,(function(){return cd.apply(null,arguments).exponent(.5)}),Vd),ap(Od,(function t(){var e=od(Hh());return e.copy=function(){return Wh(e,t()).constant(e.constant())},Bc.apply(e,arguments)}),Vd),ap(Rd,(function(){return Bc.apply(dd(qn,Pn,Nn,Bn,vn,pn,hn,cn,ln,ni).domain([new Date(2e3,0,1),new Date(2e3,0,2)]),arguments)}),[Vd,Qd]),ap(Ud,(function(){return Bc.apply(dd(Un,Ln,On,zn,En,gn,dn,fn,ln,ii).domain([Date.UTC(2e3,0,1),Date.UTC(2e3,0,2)]),arguments)}),[Vd,Qd]),ap(Ld,md,[Vd,Zd]),ap(`${Ld}-${Td}`,md,[Vd,Zd]),ap(`${Ld}-${Bd}`,(function t(){var e=nd(pd()).domain([1,10]);return e.copy=function(){return gd(e,t()).base(e.base())},zc.apply(e,arguments)}),[Vd,Zd,Bd]),ap(`${Ld}-${zd}`,yd,[Vd,Zd]),ap(`${Ld}-${Nd}`,(function(){return yd.apply(null,arguments).exponent(.5)}),[Vd,Zd]),ap(`${Ld}-${Od}`,(function t(){var e=od(pd());return e.copy=function(){return gd(e,t()).constant(e.constant())},zc.apply(e,arguments)}),[Vd,Zd]),ap(`${qd}-${Td}`,(function t(){var e=Vh(vd()(qh));return e.copy=function(){return gd(e,t())},zc.apply(e,arguments)}),[Vd,Zd]),ap(`${qd}-${Bd}`,(function t(){var e=nd(vd()).domain([.1,1,10]);return e.copy=function(){return gd(e,t()).base(e.base())},zc.apply(e,arguments)}),[Vd,Zd,Bd]),ap(`${qd}-${zd}`,_d,[Vd,Zd]),ap(`${qd}-${Nd}`,(function(){return _d.apply(null,arguments).exponent(.5)}),[Vd,Zd]),ap(`${qd}-${Od}`,(function t(){var e=od(vd());return e.copy=function(){return gd(e,t()).constant(e.constant())},zc.apply(e,arguments)}),[Vd,Zd]),ap(Pd,(function t(){var e,n=[],r=[],i=[];function o(){var t=0,e=Math.max(1,r.length);for(i=new Array(e-1);++t0?i[e-1]:n[0],e=i?[o[i-1],r]:[o[e-1],o[e]]},s.unknown=function(t){return arguments.length?(e=t,s):s},s.thresholds=function(){return o.slice()},s.copy=function(){return t().domain([n,r]).range(a).unknown(e)},Bc.apply(Vh(s),arguments)}),Jd),ap(Id,(function t(){var e,n=[.5],r=[0,1],i=1;function o(t){return null!=t&&t<=t?r[oe(n,t,0,i)]:e}return o.domain=function(t){return arguments.length?(n=Array.from(t),i=Math.min(n.length,r.length-1),o):n.slice()},o.range=function(t){return arguments.length?(r=Array.from(t),i=Math.min(n.length,r.length-1),o):r.slice()},o.invertExtent=function(t){var e=r.indexOf(t);return[n[e-1],n[e]]},o.unknown=function(t){return arguments.length?(e=t,o):e},o.copy=function(){return t().domain(n).range(r).unknown(e)},Bc.apply(o,arguments)}),Jd),ap(Gd,(function t(){let e=[],n=[];function r(t){return null==t||t!=t?void 0:n[(oe(e,t)-1)%n.length]}return r.domain=function(t){return arguments.length?(e=function(t){return ep.call(t,S)}(t),r):e.slice()},r.range=function(t){return arguments.length?(n=np.call(t),r):n.slice()},r.tickFormat=function(t,n){return Gh(e[0],F(e),null==t?10:t,n)},r.copy=function(){return t().domain(r.domain()).range(r.range())},r}),[Xd,Jd]),ap(Wd,Oc,Xd),ap(Yd,Kd,Xd),ap(Hd,(function(){return tp(Kd().paddingInner(1))}),Xd);const gp=["clamp","base","constant","exponent"];function mp(t,e){const n=e[0],r=F(e)-n;return function(e){return t(n+e*r)}}function yp(t,e,n){return Oh(xp(e||"rgb",n),t)}function vp(t,e){const n=new Array(e),r=e+1;for(let i=0;it[e]?a[e](t[e]()):0)),a):rt(.5)}function xp(t,e){const n=Rh[function(t){return"interpolate"+t.toLowerCase().split("-").map((t=>t[0].toUpperCase()+t.slice(1))).join("")}(t)];return null!=e&&n&&n.gamma?n.gamma(e):n}function bp(t){if(k(t))return t;const e=t.length/6|0,n=new Array(e);for(let r=0;r1?(kp[t]=e,this):kp[t]}wp({accent:wd,category10:bd,category20:"1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5",category20b:"393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6",category20c:"3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9",dark2:kd,observable10:Ad,paired:Md,pastel1:Ed,pastel2:Dd,set1:Cd,set2:Fd,set3:Sd,tableau10:"4c78a8f58518e4575672b7b254a24beeca3bb279a2ff9da69d755dbab0ac",tableau20:"4c78a89ecae9f58518ffbf7954a24b88d27ab79a20f2cf5b43989483bcb6e45756ff9d9879706ebab0acd67195fcbfd2b279a2d6a5c99e765fd8b5a5"},bp),wp({blues:"cfe1f2bed8eca8cee58fc1de74b2d75ba3cf4592c63181bd206fb2125ca40a4a90",greens:"d3eecdc0e6baabdda594d3917bc77d60ba6c46ab5e329a512089430e7735036429",greys:"e2e2e2d4d4d4c4c4c4b1b1b19d9d9d8888887575756262624d4d4d3535351e1e1e",oranges:"fdd8b3fdc998fdb87bfda55efc9244f87f2cf06b18e4580bd14904b93d029f3303",purples:"e2e1efd4d4e8c4c5e0b4b3d6a3a0cc928ec3827cb97566ae684ea25c3696501f8c",reds:"fdc9b4fcb49afc9e80fc8767fa7051f6573fec3f2fdc2a25c81b1db21218970b13",blueGreen:"d5efedc1e8e0a7ddd18bd2be70c6a958ba9144ad77319c5d2089460e7736036429",bluePurple:"ccddecbad0e4a8c2dd9ab0d4919cc98d85be8b6db28a55a6873c99822287730f71",greenBlue:"d3eecec5e8c3b1e1bb9bd8bb82cec269c2ca51b2cd3c9fc7288abd1675b10b60a1",orangeRed:"fddcaffdcf9bfdc18afdad77fb9562f67d53ee6545e24932d32d1ebf130da70403",purpleBlue:"dbdaebc8cee4b1c3de97b7d87bacd15b9fc93a90c01e7fb70b70ab056199045281",purpleBlueGreen:"dbd8eac8cee4b0c3de93b7d872acd1549fc83892bb1c88a3097f8702736b016353",purpleRed:"dcc9e2d3b3d7ce9eccd186c0da6bb2e14da0e23189d91e6fc61159ab07498f023a",redPurple:"fccfccfcbec0faa9b8f98faff571a5ec539ddb3695c41b8aa908808d0179700174",yellowGreen:"e4f4acd1eca0b9e2949ed68880c97c62bb6e47aa5e3297502083440e723b036034",yellowOrangeBrown:"feeaa1fedd84fecc63feb746fca031f68921eb7215db5e0bc54c05ab3d038f3204",yellowOrangeRed:"fee087fed16ffebd59fea849fd903efc7335f9522bee3423de1b20ca0b22af0225",blueOrange:"134b852f78b35da2cb9dcae1d2e5eff2f0ebfce0bafbbf74e8932fc5690d994a07",brownBlueGreen:"704108a0651ac79548e3c78af3e6c6eef1eac9e9e48ed1c74da79e187a72025147",purpleGreen:"5b1667834792a67fb6c9aed3e6d6e8eff0efd9efd5aedda971bb75368e490e5e29",purpleOrange:"4114696647968f83b7b9b4d6dadbebf3eeeafce0bafbbf74e8932fc5690d994a07",redBlue:"8c0d25bf363adf745ef4ae91fbdbc9f2efeed2e5ef9dcae15da2cb2f78b3134b85",redGrey:"8c0d25bf363adf745ef4ae91fcdccbfaf4f1e2e2e2c0c0c0969696646464343434",yellowGreenBlue:"eff9bddbf1b4bde5b594d5b969c5be45b4c22c9ec02182b82163aa23479c1c3185",redYellowBlue:"a50026d4322cf16e43fcac64fedd90faf8c1dcf1ecabd6e875abd04a74b4313695",redYellowGreen:"a50026d4322cf16e43fcac63fedd8df9f7aed7ee8ea4d86e64bc6122964f006837",pinkYellowGreen:"8e0152c0267edd72adf0b3d6faddedf5f3efe1f2cab6de8780bb474f9125276419",spectral:"9e0142d13c4bf0704afcac63fedd8dfbf8b0e0f3a1a9dda269bda94288b55e4fa2",viridis:"440154470e61481a6c482575472f7d443a834144873d4e8a39568c35608d31688e2d708e2a788e27818e23888e21918d1f988b1fa08822a8842ab07f35b77943bf7154c56866cc5d7ad1518fd744a5db36bcdf27d2e21be9e51afde725",magma:"0000040404130b0924150e3720114b2c11603b0f704a107957157e651a80721f817f24828c29819a2e80a8327db6377ac43c75d1426fde4968e95462f1605df76f5cfa7f5efc8f65fe9f6dfeaf78febf84fece91fddea0fcedaffcfdbf",inferno:"0000040403130c0826170c3b240c4f330a5f420a68500d6c5d126e6b176e781c6d86216b932667a12b62ae305cbb3755c73e4cd24644dd513ae65c30ed6925f3771af8850ffb9506fca50afcb519fac62df6d645f2e661f3f484fcffa4",plasma:"0d088723069033059742039d5002a25d01a66a00a87801a88405a7900da49c179ea72198b12a90ba3488c33d80cb4779d35171da5a69e16462e76e5bed7953f2834cf68f44fa9a3dfca636fdb32ffec029fcce25f9dc24f5ea27f0f921",cividis:"00205100235800265d002961012b65042e670831690d346b11366c16396d1c3c6e213f6e26426e2c456e31476e374a6e3c4d6e42506e47536d4c566d51586e555b6e5a5e6e5e616e62646f66676f6a6a706e6d717270717573727976737c79747f7c75827f758682768985778c8877908b78938e789691789a94789e9778a19b78a59e77a9a177aea575b2a874b6ab73bbaf71c0b26fc5b66dc9b96acebd68d3c065d8c462ddc85fe2cb5ce7cf58ebd355f0d652f3da4ff7de4cfae249fce647",rainbow:"6e40aa883eb1a43db3bf3cafd83fa4ee4395fe4b83ff576eff6659ff7847ff8c38f3a130e2b72fcfcc36bee044aff05b8ff4576ff65b52f6673af27828ea8d1ddfa319d0b81cbecb23abd82f96e03d82e14c6edb5a5dd0664dbf6e40aa",sinebow:"ff4040fc582af47218e78d0bd5a703bfbf00a7d5038de70b72f41858fc2a40ff402afc5818f4720be78d03d5a700bfbf03a7d50b8de71872f42a58fc4040ff582afc7218f48d0be7a703d5bf00bfd503a7e70b8df41872fc2a58ff4040",turbo:"23171b32204a3e2a71453493493eae4b49c54a53d7485ee44569ee4074f53c7ff8378af93295f72e9ff42ba9ef28b3e926bce125c5d925cdcf27d5c629dcbc2de3b232e9a738ee9d3ff39347f68950f9805afc7765fd6e70fe667cfd5e88fc5795fb51a1f84badf545b9f140c5ec3cd0e637dae034e4d931ecd12ef4c92bfac029ffb626ffad24ffa223ff9821ff8d1fff821dff771cfd6c1af76118f05616e84b14df4111d5380fcb2f0dc0260ab61f07ac1805a313029b0f00950c00910b00",browns:"eedbbdecca96e9b97ae4a865dc9856d18954c7784cc0673fb85536ad44339f3632",tealBlues:"bce4d89dd3d181c3cb65b3c245a2b9368fae347da0306a932c5985",teals:"bbdfdfa2d4d58ac9c975bcbb61b0af4da5a43799982b8b8c1e7f7f127273006667",warmGreys:"dcd4d0cec5c1c0b8b4b3aaa7a59c9998908c8b827f7e7673726866665c5a59504e",goldGreen:"f4d166d5ca60b6c35c98bb597cb25760a6564b9c533f8f4f33834a257740146c36",goldOrange:"f4d166f8be5cf8aa4cf5983bf3852aef701be2621fd65322c54923b142239e3a26",goldRed:"f4d166f6be59f9aa51fc964ef6834bee734ae56249db5247cf4244c43141b71d3e",lightGreyRed:"efe9e6e1dad7d5cbc8c8bdb9bbaea9cd967ddc7b43e15f19df4011dc000b",lightGreyTeal:"e4eaead6dcddc8ced2b7c2c7a6b4bc64b0bf22a6c32295c11f85be1876bc",lightMulti:"e0f1f2c4e9d0b0de9fd0e181f6e072f6c053f3993ef77440ef4a3c",lightOrange:"f2e7daf7d5baf9c499fab184fa9c73f68967ef7860e8645bde515bd43d5b",lightTealBlue:"e3e9e0c0dccf9aceca7abfc859afc0389fb9328dad2f7ca0276b95255988",darkBlue:"3232322d46681a5c930074af008cbf05a7ce25c0dd38daed50f3faffffff",darkGold:"3c3c3c584b37725e348c7631ae8b2bcfa424ecc31ef9de30fff184ffffff",darkGreen:"3a3a3a215748006f4d048942489e4276b340a6c63dd2d836ffeb2cffffaa",darkMulti:"3737371f5287197d8c29a86995ce3fffe800ffffff",darkRed:"3434347036339e3c38cc4037e75d1eec8620eeab29f0ce32ffeb2c"},(t=>yp(bp(t))));const Mp="symbol",Ep="discrete",Dp=t=>k(t)?t.map((t=>String(t))):String(t),Cp=(t,e)=>t[1]-e[1],Fp=(t,e)=>e[1]-t[1];function Sp(t,e,n){let r;return vt(e)&&(t.bins&&(e=Math.max(e,t.bins.length)),null!=n&&(e=Math.min(e,Math.floor(Dt(t.domain())/n||1)+1))),A(e)&&(r=e.step,e=e.interval),xt(e)&&(e=t.type===Rd?Cr(e):t.type==Ud?Fr(e):s("Only time and utc scales accept interval strings."),r&&(e=e.every(r))),e}function $p(t,e,n){let r=t.range(),i=r[0],o=F(r),a=Cp;if(i>o&&(r=o,o=i,i=r,a=Fp),i=Math.floor(i),o=Math.ceil(o),e=e.map((e=>[e,t(e)])).filter((t=>i<=t[1]&&t[1]<=o)).sort(a).map((t=>t[0])),n>0&&e.length>1){const t=[e[0],F(e)];for(;e.length>n&&e.length>=3;)e=e.filter(((t,e)=>!(e%2)));e.length<3&&(e=t)}return e}function Tp(t,e){return t.bins?$p(t,t.bins,e):t.ticks?t.ticks(e):t.domain()}function Bp(t,e,n,r,i,o){const a=e.type;let s=Dp;if(a===Rd||i===Rd)s=t.timeFormat(r);else if(a===Ud||i===Ud)s=t.utcFormat(r);else if(hp(a)){const i=t.formatFloat(r);if(o||e.bins)s=i;else{const t=zp(e,n,!1);s=e=>t(e)?i(e):""}}else if(e.tickFormat){const i=e.domain();s=t.formatSpan(i[0],i[i.length-1],n,r)}else r&&(s=t.format(r));return s}function zp(t,e,n){const r=Tp(t,e),i=t.base(),o=Math.log(i),a=Math.max(1,i*e/r.length),s=t=>{let e=t/Math.pow(i,Math.round(Math.log(t)/o));return e*iNp[t.type]||t.bins;function Lp(t,e,n,r,i,o,a){const s=Op[e.type]&&o!==Rd&&o!==Ud?function(t,e,n){const r=e[Op[e.type]](),i=r.length;let o,a=i>1?r[1]-r[0]:r[0];for(o=1;o(e,n,r)=>{const i=Pp(r[n+1],Pp(r.max,1/0)),o=Wp(e,t),a=Wp(i,t);return o&&a?o+" – "+a:a?"< "+a:"≥ "+o},Pp=(t,e)=>null!=t?t:e,jp=t=>(e,n)=>n?t(e):null,Ip=t=>e=>t(e),Wp=(t,e)=>Number.isFinite(t)?e(t):null;function Hp(t,e,n,r){const i=r||e.type;return xt(n)&&function(t){return up(t,Qd)}(i)&&(n=n.replace(/%a/g,"%A").replace(/%b/g,"%B")),n||i!==Rd?n||i!==Ud?Lp(t,e,5,null,n,r,!0):t.utcFormat("%A, %d %B %Y, %X UTC"):t.timeFormat("%A, %d %B %Y, %X")}function Yp(t,e,n){n=n||{};const r=Math.max(3,n.maxlen||7),i=Hp(t,e,n.format,n.formatType);if(fp(e.type)){const t=Rp(e).slice(1).map(i),n=t.length;return`${n} boundar${1===n?"y":"ies"}: ${t.join(", ")}`}if(cp(e.type)){const t=e.domain(),n=t.length;return`${n} value${1===n?"":"s"}: ${n>r?t.slice(0,r-2).map(i).join(", ")+", ending with "+t.slice(-1).map(i):t.map(i).join(", ")}`}{const t=e.domain();return`values from ${i(t[0])} to ${i(F(t))}`}}let Gp=0;const Vp="p_";function Xp(t){return t&&t.gradient}function Jp(t,e,n){const r=t.gradient;let i=t.id,o="radial"===r?Vp:"";return i||(i=t.id="gradient_"+Gp++,"radial"===r?(t.x1=Zp(t.x1,.5),t.y1=Zp(t.y1,.5),t.r1=Zp(t.r1,0),t.x2=Zp(t.x2,.5),t.y2=Zp(t.y2,.5),t.r2=Zp(t.r2,.5),o=Vp):(t.x1=Zp(t.x1,0),t.y1=Zp(t.y1,0),t.x2=Zp(t.x2,1),t.y2=Zp(t.y2,0))),e[i]=t,"url("+(n||"")+"#"+o+i+")"}function Zp(t,e){return null!=t?t:e}function Qp(t,e){var n,r=[];return n={gradient:"linear",x1:t?t[0]:0,y1:t?t[1]:0,x2:e?e[0]:1,y2:e?e[1]:0,stops:r,stop:function(t,e){return r.push({offset:t,color:e}),n}}}const Kp={basis:{curve:function(t){return new ec(t)}},"basis-closed":{curve:function(t){return new nc(t)}},"basis-open":{curve:function(t){return new rc(t)}},bundle:{curve:oc,tension:"beta",value:.85},cardinal:{curve:uc,tension:"tension",value:0},"cardinal-open":{curve:hc,tension:"tension",value:0},"cardinal-closed":{curve:cc,tension:"tension",value:0},"catmull-rom":{curve:gc,tension:"alpha",value:.5},"catmull-rom-closed":{curve:yc,tension:"alpha",value:.5},"catmull-rom-open":{curve:_c,tension:"alpha",value:.5},linear:{curve:Gl},"linear-closed":{curve:function(t){return new xc(t)}},monotone:{horizontal:function(t){return new Ec(t)},vertical:function(t){return new Mc(t)}},natural:{curve:function(t){return new Cc(t)}},step:{curve:function(t){return new Sc(t,.5)}},"step-after":{curve:function(t){return new Sc(t,1)}},"step-before":{curve:function(t){return new Sc(t,0)}}};function tg(t,e,n){var r=lt(Kp,t)&&Kp[t],i=null;return r&&(i=r.curve||r[e||"vertical"],r.tension&&null!=n&&(i=i[r.tension](n))),i}const eg={m:2,l:2,h:1,v:1,z:0,c:6,s:4,q:4,t:2,a:7},ng=/[mlhvzcsqta]([^mlhvzcsqta]+|$)/gi,rg=/^[+-]?(([0-9]*\.[0-9]+)|([0-9]+\.)|([0-9]+))([eE][+-]?[0-9]+)?/,ig=/^((\s+,?\s*)|(,\s*))/,og=/^[01]/;function ag(t){const e=[];return(t.match(ng)||[]).forEach((t=>{let n=t[0];const r=n.toLowerCase(),i=eg[r],o=function(t,e,n){const r=[];for(let i=0;e&&i1&&(g=Math.sqrt(g),n*=g,r*=g);const m=h/n,y=f/n,v=-f/r,_=h/r,x=m*s+y*u,b=v*s+_*u,w=m*t+y*e,k=v*t+_*e;let A=1/((w-x)*(w-x)+(k-b)*(k-b))-.25;A<0&&(A=0);let M=Math.sqrt(A);o==i&&(M=-M);const E=.5*(x+w)-M*(k-b),D=.5*(b+k)+M*(w-x),C=Math.atan2(b-D,x-E);let F=Math.atan2(k-D,w-E)-C;F<0&&1===o?F+=lg:F>0&&0===o&&(F-=lg);const S=Math.ceil(Math.abs(F/(ug+.001))),$=[];for(let t=0;t+t}function Fg(t,e,n){return Math.max(e,Math.min(t,n))}function Sg(){var t=Ag,e=Mg,n=Eg,r=Dg,i=Cg(0),o=i,a=i,s=i,u=null;function l(l,c,f){var h,d=null!=c?c:+t.call(this,l),p=null!=f?f:+e.call(this,l),g=+n.call(this,l),m=+r.call(this,l),y=Math.min(g,m)/2,v=Fg(+i.call(this,l),0,y),_=Fg(+o.call(this,l),0,y),x=Fg(+a.call(this,l),0,y),b=Fg(+s.call(this,l),0,y);if(u||(u=h=Rl()),v<=0&&_<=0&&x<=0&&b<=0)u.rect(d,p,g,m);else{var w=d+g,k=p+m;u.moveTo(d+v,p),u.lineTo(w-_,p),u.bezierCurveTo(w-kg*_,p,w,p+kg*_,w,p+_),u.lineTo(w,k-b),u.bezierCurveTo(w,k-kg*b,w-kg*b,k,w-b,k),u.lineTo(d+x,k),u.bezierCurveTo(d+kg*x,k,d,k-kg*x,d,k-x),u.lineTo(d,p+v),u.bezierCurveTo(d,p+kg*v,d+kg*v,p,d+v,p),u.closePath()}if(h)return u=null,h+""||null}return l.x=function(e){return arguments.length?(t=Cg(e),l):t},l.y=function(t){return arguments.length?(e=Cg(t),l):e},l.width=function(t){return arguments.length?(n=Cg(t),l):n},l.height=function(t){return arguments.length?(r=Cg(t),l):r},l.cornerRadius=function(t,e,n,r){return arguments.length?(i=Cg(t),o=null!=e?Cg(e):i,s=null!=n?Cg(n):i,a=null!=r?Cg(r):o,l):i},l.context=function(t){return arguments.length?(u=null==t?null:t,l):u},l}function $g(){var t,e,n,r,i,o,a,s,u=null;function l(t,e,n){const r=n/2;if(i){var l=a-e,c=t-o;if(l||c){var f=Math.hypot(l,c),h=(l/=f)*s,d=(c/=f)*s,p=Math.atan2(c,l);u.moveTo(o-h,a-d),u.lineTo(t-l*r,e-c*r),u.arc(t,e,r,p-Math.PI,p),u.lineTo(o+h,a+d),u.arc(o,a,s,p,p+Math.PI)}else u.arc(t,e,r,0,lg);u.closePath()}else i=1;o=t,a=e,s=r}function c(o){var a,s,c,f=o.length,h=!1;for(null==u&&(u=c=Rl()),a=0;a<=f;++a)!(at.x||0,zg=t=>t.y||0,Ng=t=>!(!1===t.defined),Og=function(){var t=Ll,e=ql,n=vl(0),r=null,i=Pl,o=jl,a=Il,s=null,u=Ul(l);function l(){var l,c,f=+t.apply(this,arguments),h=+e.apply(this,arguments),d=i.apply(this,arguments)-Cl,p=o.apply(this,arguments)-Cl,g=_l(p-d),m=p>d;if(s||(s=l=u()),hEl)if(g>Fl-El)s.moveTo(h*bl(d),h*Al(d)),s.arc(0,0,h,d,p,!m),f>El&&(s.moveTo(f*bl(p),f*Al(p)),s.arc(0,0,f,p,d,m));else{var y,v,_=d,x=p,b=d,w=p,k=g,A=g,M=a.apply(this,arguments)/2,E=M>El&&(r?+r.apply(this,arguments):Ml(f*f+h*h)),D=kl(_l(h-f)/2,+n.apply(this,arguments)),C=D,F=D;if(E>El){var S=Sl(E/f*Al(M)),$=Sl(E/h*Al(M));(k-=2*S)>El?(b+=S*=m?1:-1,w-=S):(k=0,b=w=(d+p)/2),(A-=2*$)>El?(_+=$*=m?1:-1,x-=$):(A=0,_=x=(d+p)/2)}var T=h*bl(_),B=h*Al(_),z=f*bl(w),N=f*Al(w);if(D>El){var O,R=h*bl(x),U=h*Al(x),L=f*bl(b),q=f*Al(b);if(g1?0:t<-1?Dl:Math.acos(t)}((P*I+j*W)/(Ml(P*P+j*j)*Ml(I*I+W*W)))/2),Y=Ml(O[0]*O[0]+O[1]*O[1]);C=kl(D,(f-Y)/(H-1)),F=kl(D,(h-Y)/(H+1))}else C=F=0}A>El?F>El?(y=Wl(L,q,T,B,h,F,m),v=Wl(R,U,z,N,h,F,m),s.moveTo(y.cx+y.x01,y.cy+y.y01),FEl&&k>El?C>El?(y=Wl(z,N,R,U,f,-C,m),v=Wl(T,B,L,q,f,-C,m),s.lineTo(y.cx+y.x01,y.cy+y.y01),Ct.startAngle||0)).endAngle((t=>t.endAngle||0)).padAngle((t=>t.padAngle||0)).innerRadius((t=>t.innerRadius||0)).outerRadius((t=>t.outerRadius||0)).cornerRadius((t=>t.cornerRadius||0)),Rg=Zl().x(Bg).y1(zg).y0((t=>(t.y||0)+(t.height||0))).defined(Ng),Ug=Zl().y(zg).x1(Bg).x0((t=>(t.x||0)+(t.width||0))).defined(Ng),Lg=Jl().x(Bg).y(zg).defined(Ng),qg=Sg().x(Bg).y(zg).width((t=>t.width||0)).height((t=>t.height||0)).cornerRadius((t=>Tg(t.cornerRadiusTopLeft,t.cornerRadius)||0),(t=>Tg(t.cornerRadiusTopRight,t.cornerRadius)||0),(t=>Tg(t.cornerRadiusBottomRight,t.cornerRadius)||0),(t=>Tg(t.cornerRadiusBottomLeft,t.cornerRadius)||0)),Pg=function(t,e){let n=null,r=Ul(i);function i(){let i;if(n||(n=i=r()),t.apply(this,arguments).draw(n,+e.apply(this,arguments)),i)return n=null,i+""||null}return t="function"==typeof t?t:vl(t||Ql),e="function"==typeof e?e:vl(void 0===e?64:+e),i.type=function(e){return arguments.length?(t="function"==typeof e?e:vl(e),i):t},i.size=function(t){return arguments.length?(e="function"==typeof t?t:vl(+t),i):e},i.context=function(t){return arguments.length?(n=null==t?null:t,i):n},i}().type((t=>bg(t.shape||"circle"))).size((t=>Tg(t.size,64))),jg=$g().x(Bg).y(zg).defined(Ng).size((t=>t.size||1));function Ig(t){return t.cornerRadius||t.cornerRadiusTopLeft||t.cornerRadiusTopRight||t.cornerRadiusBottomRight||t.cornerRadiusBottomLeft}function Wg(t,e,n,r){return qg.context(t)(e,n,r)}var Hg=1;function Yg(){Hg=1}function Gg(t,e,n){var r=e.clip,i=t._defs,o=e.clip_id||(e.clip_id="clip"+Hg++),a=i.clipping[o]||(i.clipping[o]={id:o});return J(r)?a.path=r(null):Ig(n)?a.path=Wg(null,n,0,0):(a.width=n.width||0,a.height=n.height||0),"url(#"+o+")"}function Vg(t){this.clear(),t&&this.union(t)}function Xg(t){this.mark=t,this.bounds=this.bounds||new Vg}function Jg(t){Xg.call(this,t),this.items=this.items||[]}Vg.prototype={clone(){return new Vg(this)},clear(){return this.x1=+Number.MAX_VALUE,this.y1=+Number.MAX_VALUE,this.x2=-Number.MAX_VALUE,this.y2=-Number.MAX_VALUE,this},empty(){return this.x1===+Number.MAX_VALUE&&this.y1===+Number.MAX_VALUE&&this.x2===-Number.MAX_VALUE&&this.y2===-Number.MAX_VALUE},equals(t){return this.x1===t.x1&&this.y1===t.y1&&this.x2===t.x2&&this.y2===t.y2},set(t,e,n,r){return nthis.x2&&(this.x2=t),e>this.y2&&(this.y2=e),this},expand(t){return this.x1-=t,this.y1-=t,this.x2+=t,this.y2+=t,this},round(){return this.x1=Math.floor(this.x1),this.y1=Math.floor(this.y1),this.x2=Math.ceil(this.x2),this.y2=Math.ceil(this.y2),this},scale(t){return this.x1*=t,this.y1*=t,this.x2*=t,this.y2*=t,this},translate(t,e){return this.x1+=t,this.x2+=t,this.y1+=e,this.y2+=e,this},rotate(t,e,n){const r=this.rotatedPoints(t,e,n);return this.clear().add(r[0],r[1]).add(r[2],r[3]).add(r[4],r[5]).add(r[6],r[7])},rotatedPoints(t,e,n){var{x1:r,y1:i,x2:o,y2:a}=this,s=Math.cos(t),u=Math.sin(t),l=e-e*s+n*u,c=n-e*u-n*s;return[s*r-u*i+l,u*r+s*i+c,s*r-u*a+l,u*r+s*a+c,s*o-u*i+l,u*o+s*i+c,s*o-u*a+l,u*o+s*a+c]},union(t){return t.x1this.x2&&(this.x2=t.x2),t.y2>this.y2&&(this.y2=t.y2),this},intersect(t){return t.x1>this.x1&&(this.x1=t.x1),t.y1>this.y1&&(this.y1=t.y1),t.x2=t.x2&&this.y1<=t.y1&&this.y2>=t.y2},alignsWith(t){return t&&(this.x1==t.x1||this.x2==t.x2||this.y1==t.y1||this.y2==t.y2)},intersects(t){return t&&!(this.x2t.x2||this.y2t.y2)},contains(t,e){return!(tthis.x2||ethis.y2)},width(){return this.x2-this.x1},height(){return this.y2-this.y1}},dt(Jg,Xg);class Zg{constructor(t){this._pending=0,this._loader=t||fa()}pending(){return this._pending}sanitizeURL(t){const e=this;return Qg(e),e._loader.sanitize(t,{context:"href"}).then((t=>(Kg(e),t))).catch((()=>(Kg(e),null)))}loadImage(t){const e=this,n=Tc();return Qg(e),e._loader.sanitize(t,{context:"image"}).then((t=>{const r=t.href;if(!r||!n)throw{url:r};const i=new n,o=lt(t,"crossOrigin")?t.crossOrigin:"anonymous";return null!=o&&(i.crossOrigin=o),i.onload=()=>Kg(e),i.onerror=()=>Kg(e),i.src=r,i})).catch((t=>(Kg(e),{complete:!1,width:0,height:0,src:t&&t.url||""})))}ready(){const t=this;return new Promise((e=>{!function n(r){t.pending()?setTimeout((()=>{n(!0)}),10):e(r)}(!1)}))}}function Qg(t){t._pending+=1}function Kg(t){t._pending-=1}function tm(t,e,n){if(e.stroke&&0!==e.opacity&&0!==e.strokeOpacity){const r=null!=e.strokeWidth?+e.strokeWidth:1;t.expand(r+(n?function(t,e){return t.strokeJoin&&"miter"!==t.strokeJoin?0:e}(e,r):0))}return t}const em=lg-1e-8;let nm,rm,im,om,am,sm,um,lm;const cm=(t,e)=>nm.add(t,e),fm=(t,e)=>cm(rm=t,im=e),hm=t=>cm(t,nm.y1),dm=t=>cm(nm.x1,t),pm=(t,e)=>am*t+um*e,gm=(t,e)=>sm*t+lm*e,mm=(t,e)=>cm(pm(t,e),gm(t,e)),ym=(t,e)=>fm(pm(t,e),gm(t,e));function vm(t,e){return nm=t,e?(om=e*sg,am=lm=Math.cos(om),sm=Math.sin(om),um=-sm):(am=lm=1,om=sm=um=0),_m}const _m={beginPath(){},closePath(){},moveTo:ym,lineTo:ym,rect(t,e,n,r){om?(mm(t+n,e),mm(t+n,e+r),mm(t,e+r),ym(t,e)):(cm(t+n,e+r),fm(t,e))},quadraticCurveTo(t,e,n,r){const i=pm(t,e),o=gm(t,e),a=pm(n,r),s=gm(n,r);xm(rm,i,a,hm),xm(im,o,s,dm),fm(a,s)},bezierCurveTo(t,e,n,r,i,o){const a=pm(t,e),s=gm(t,e),u=pm(n,r),l=gm(n,r),c=pm(i,o),f=gm(i,o);bm(rm,a,u,c,hm),bm(im,s,l,f,dm),fm(c,f)},arc(t,e,n,r,i,o){if(r+=om,i+=om,rm=n*Math.cos(i)+t,im=n*Math.sin(i)+e,Math.abs(i-r)>em)cm(t-n,e-n),cm(t+n,e+n);else{const a=r=>cm(n*Math.cos(r)+t,n*Math.sin(r)+e);let s,u;if(a(r),a(i),i!==r)if((r%=lg)<0&&(r+=lg),(i%=lg)<0&&(i+=lg),ii;++u,s-=ug)a(s);else for(s=r-r%ug+ug,u=0;u<4&&s1e-14?(u=a*a+s*o,u>=0&&(u=Math.sqrt(u),l=(-a+u)/o,c=(-a-u)/o)):l=.5*s/a,0m)return!1;d>g&&(g=d)}else if(f>0){if(d0&&(t.globalAlpha=n,t.fillStyle=Bm(t,e,e.fill),!0)}var Nm=[];function Om(t,e,n){var r=null!=(r=e.strokeWidth)?r:1;return!(r<=0)&&((n*=null==e.strokeOpacity?1:e.strokeOpacity)>0&&(t.globalAlpha=n,t.strokeStyle=Bm(t,e,e.stroke),t.lineWidth=r,t.lineCap=e.strokeCap||"butt",t.lineJoin=e.strokeJoin||"miter",t.miterLimit=e.strokeMiterLimit||10,t.setLineDash&&(t.setLineDash(e.strokeDash||Nm),t.lineDashOffset=e.strokeDashOffset||0),!0))}function Rm(t,e){return t.zindex-e.zindex||t.index-e.index}function Um(t){if(!t.zdirty)return t.zitems;var e,n,r,i=t.items,o=[];for(n=0,r=i.length;n=0;)if(n=e(i[r]))return n;if(i===o)for(r=(i=t.items).length;--r>=0;)if(!i[r].zindex&&(n=e(i[r])))return n;return null}function Pm(t){return function(e,n,r){Lm(n,(n=>{r&&!r.intersects(n.bounds)||Im(t,e,n,n)}))}}function jm(t){return function(e,n,r){!n.items.length||r&&!r.intersects(n.bounds)||Im(t,e,n.items[0],n.items)}}function Im(t,e,n,r){var i=null==n.opacity?1:n.opacity;0!==i&&(t(e,r)||(Sm(e,n),n.fill&&zm(e,n,i)&&e.fill(),n.stroke&&Om(e,n,i)&&e.stroke()))}function Wm(t){return t=t||p,function(e,n,r,i,o,a){return r*=e.pixelRatio,i*=e.pixelRatio,qm(n,(n=>{const s=n.bounds;if((!s||s.contains(o,a))&&s)return t(e,n,r,i,o,a)?n:void 0}))}}function Hm(t,e){return function(n,r,i,o){var a,s,u=Array.isArray(r)?r[0]:r,l=null==e?u.fill:e,c=u.stroke&&n.isPointInStroke;return c&&(a=u.strokeWidth,s=u.strokeCap,n.lineWidth=null!=a?a:1,n.lineCap=null!=s?s:"butt"),!t(n,r)&&(l&&n.isPointInPath(i,o)||c&&n.isPointInStroke(i,o))}}function Ym(t){return Wm(Hm(t))}function Gm(t,e){return"translate("+t+","+e+")"}function Vm(t){return"rotate("+t+")"}function Xm(t){return Gm(t.x||0,t.y||0)}function Jm(t,e,n){function r(t,n){var r=n.x||0,i=n.y||0,o=n.angle||0;t.translate(r,i),o&&t.rotate(o*=sg),t.beginPath(),e(t,n),o&&t.rotate(-o),t.translate(-r,-i)}return{type:t,tag:"path",nested:!1,attr:function(t,n){t("transform",function(t){return Gm(t.x||0,t.y||0)+(t.angle?" "+Vm(t.angle):"")}(n)),t("d",e(null,n))},bound:function(t,n){return e(vm(t,n.angle),n),tm(t,n).translate(n.x||0,n.y||0)},draw:Pm(r),pick:Ym(r),isect:n||Mm(r)}}var Zm=Jm("arc",(function(t,e){return Og.context(t)(e)}));function Qm(t,e,n){function r(t,n){t.beginPath(),e(t,n)}const i=Hm(r);return{type:t,tag:"path",nested:!0,attr:function(t,n){var r=n.mark.items;r.length&&t("d",e(null,r))},bound:function(t,n){var r=n.items;return 0===r.length?t:(e(vm(t),r),tm(t,r[0]))},draw:jm(r),pick:function(t,e,n,r,o,a){var s=e.items,u=e.bounds;return!s||!s.length||u&&!u.contains(o,a)?null:(n*=t.pixelRatio,r*=t.pixelRatio,i(t,s,n,r)?s[0]:null)},isect:Em,tip:n}}var Km=Qm("area",(function(t,e){const n=e[0],r=n.interpolate||"linear";return("horizontal"===n.orient?Ug:Rg).curve(tg(r,n.orient,n.tension)).context(t)(e)}),(function(t,e){for(var n,r,i="horizontal"===t[0].orient?e[1]:e[0],o="horizontal"===t[0].orient?"y":"x",a=t.length,s=1/0;--a>=0;)!1!==t[a].defined&&(r=Math.abs(t[a][o]-i)).5&&e<1.5?.5-Math.abs(e-1):0}function ny(t,e){const n=ey(e);t("d",Wg(null,e,n,n))}function ry(t,e,n,r){const i=ey(e);t.beginPath(),Wg(t,e,(n||0)+i,(r||0)+i)}const iy=Hm(ry),oy=Hm(ry,!1),ay=Hm(ry,!0);var sy={type:"group",tag:"g",nested:!1,attr:function(t,e){t("transform",Xm(e))},bound:function(t,e){if(!e.clip&&e.items){const n=e.items,r=n.length;for(let e=0;e{const i=e.x||0,o=e.y||0,a=e.strokeForeground,s=null==e.opacity?1:e.opacity;(e.stroke||e.fill)&&s&&(ry(t,e,i,o),Sm(t,e),e.fill&&zm(t,e,s)&&t.fill(),e.stroke&&!a&&Om(t,e,s)&&t.stroke()),t.save(),t.translate(i,o),e.clip&&ty(t,e),n&&n.translate(-i,-o),Lm(e,(e=>{("group"===e.marktype||null==r||r.includes(e.marktype))&&this.draw(t,e,n,r)})),n&&n.translate(i,o),t.restore(),a&&e.stroke&&s&&(ry(t,e,i,o),Sm(t,e),Om(t,e,s)&&t.stroke())}))},pick:function(t,e,n,r,i,o){if(e.bounds&&!e.bounds.contains(i,o)||!e.items)return null;const a=n*t.pixelRatio,s=r*t.pixelRatio;return qm(e,(u=>{let l,c,f;const h=u.bounds;if(h&&!h.contains(i,o))return;c=u.x||0,f=u.y||0;const d=c+(u.width||0),p=f+(u.height||0),g=u.clip;if(g&&(id||op))return;if(t.save(),t.translate(c,f),c=i-c,f=o-f,g&&Ig(u)&&!ay(t,u,a,s))return t.restore(),null;const m=u.strokeForeground,y=!1!==e.interactive;return y&&m&&u.stroke&&oy(t,u,a,s)?(t.restore(),u):(l=qm(u,(t=>function(t,e,n){return(!1!==t.interactive||"group"===t.marktype)&&t.bounds&&t.bounds.contains(e,n)}(t,c,f)?this.pick(t,n,r,c,f):null)),!l&&y&&(u.fill||!m&&u.stroke)&&iy(t,u,a,s)&&(l=u),t.restore(),l||null)}))},isect:Dm,content:function(t,e,n){t("clip-path",e.clip?Gg(n,e,e):null)},background:function(t,e){t("class","background"),t("aria-hidden",!0),ny(t,e)},foreground:function(t,e){t("class","foreground"),t("aria-hidden",!0),e.strokeForeground?ny(t,e):t("d","")}},uy={xmlns:"http://www.w3.org/2000/svg","xmlns:xlink":"http://www.w3.org/1999/xlink",version:"1.1"};function ly(t,e){var n=t.image;return(!n||t.url&&t.url!==n.url)&&(n={complete:!1,width:0,height:0},e.loadImage(t.url).then((e=>{t.image=e,t.image.url=t.url}))),n}function cy(t,e){return null!=t.width?t.width:e&&e.width?!1!==t.aspect&&t.height?t.height*e.width/e.height:e.width:0}function fy(t,e){return null!=t.height?t.height:e&&e.height?!1!==t.aspect&&t.width?t.width*e.height/e.width:e.height:0}function hy(t,e){return"center"===t?e/2:"right"===t?e:0}function dy(t,e){return"middle"===t?e/2:"bottom"===t?e:0}var py={type:"image",tag:"image",nested:!1,attr:function(t,e,n){const r=ly(e,n),i=cy(e,r),o=fy(e,r),a=(e.x||0)-hy(e.align,i),s=(e.y||0)-dy(e.baseline,o);t("href",!r.src&&r.toDataURL?r.toDataURL():r.src||"",uy["xmlns:xlink"],"xlink:href"),t("transform",Gm(a,s)),t("width",i),t("height",o),t("preserveAspectRatio",!1===e.aspect?"none":"xMidYMid")},bound:function(t,e){const n=e.image,r=cy(e,n),i=fy(e,n),o=(e.x||0)-hy(e.align,r),a=(e.y||0)-dy(e.baseline,i);return t.set(o,a,o+r,a+i)},draw:function(t,e,n){Lm(e,(e=>{if(n&&!n.intersects(e.bounds))return;const r=ly(e,this);let i=cy(e,r),o=fy(e,r);if(0===i||0===o)return;let a,s,u,l,c=(e.x||0)-hy(e.align,i),f=(e.y||0)-dy(e.baseline,o);!1!==e.aspect&&(s=r.width/r.height,u=e.width/e.height,s==s&&u==u&&s!==u&&(u=0;)if(!1!==t[o].defined&&(n=t[o].x-e[0])*n+(r=t[o].y-e[1])*r{if(!n||n.intersects(e.bounds)){var r=null==e.opacity?1:e.opacity;r&&xy(t,e,r)&&(Sm(t,e),t.stroke())}}))},pick:Wm((function(t,e,n,r){return!!t.isPointInStroke&&(xy(t,e,1)&&t.isPointInStroke(n,r))})),isect:Cm},wy=Jm("shape",(function(t,e){return(e.mark.shape||e.shape).context(t)(e)})),ky=Jm("symbol",(function(t,e){return Pg.context(t)(e)}),Em);const Ay=kt();var My={height:$y,measureWidth:Fy,estimateWidth:Dy,width:Dy,canvas:Ey};function Ey(t){My.width=t&&km?Fy:Dy}function Dy(t,e){return Cy(Ny(t,e),$y(t))}function Cy(t,e){return~~(.8*t.length*e)}function Fy(t,e){return $y(t)<=0||!(e=Ny(t,e))?0:Sy(e,Ry(t))}function Sy(t,e){const n=`(${e}) ${t}`;let r=Ay.get(n);return void 0===r&&(km.font=e,r=km.measureText(t).width,Ay.set(n,r)),r}function $y(t){return null!=t.fontSize?+t.fontSize||0:11}function Ty(t){return null!=t.lineHeight?t.lineHeight:$y(t)+2}function By(t){return e=t.lineBreak&&t.text&&!k(t.text)?t.text.split(t.lineBreak):t.text,k(e)?e.length>1?e:e[0]:e;var e}function zy(t){const e=By(t);return(k(e)?e.length-1:0)*Ty(t)}function Ny(t,e){const n=null==e?"":(e+"").trim();return t.limit>0&&n.length?function(t,e){var n=+t.limit,r=function(t){if(My.width===Fy){const e=Ry(t);return t=>Sy(t,e)}if(My.width===Dy){const e=$y(t);return t=>Cy(t,e)}return e=>My.width(t,e)}(t);if(r(e)>>1,r(e.slice(i))>n?s=i+1:u=i;return o+e.slice(s)}for(;s>>1),r(e.slice(0,i))Math.max(t,My.width(e,n))),0)):r=My.width(e,f),"center"===o?l-=r/2:"right"===o&&(l-=r),t.set(l+=s,c+=u,l+r,c+i),e.angle&&!n)t.rotate(e.angle*sg,s,u);else if(2===n)return t.rotatedPoints(e.angle*sg,s,u);return t}var Iy={type:"text",tag:"text",nested:!1,attr:function(t,e){var n,r=e.dx||0,i=(e.dy||0)+Uy(e),o=Py(e),a=o.x1,s=o.y1,u=e.angle||0;t("text-anchor",Ly[e.align]||"start"),u?(n=Gm(a,s)+" "+Vm(u),(r||i)&&(n+=" "+Gm(r,i))):n=Gm(a+r,s+i),t("transform",n)},bound:jy,draw:function(t,e,n){Lm(e,(e=>{var r,i,o,a,s,u,l,c=null==e.opacity?1:e.opacity;if(!(n&&!n.intersects(e.bounds)||0===c||e.fontSize<=0||null==e.text||0===e.text.length)){if(t.font=Ry(e),t.textAlign=e.align||"left",i=(r=Py(e)).x1,o=r.y1,e.angle&&(t.save(),t.translate(i,o),t.rotate(e.angle*sg),i=o=0),i+=e.dx||0,o+=(e.dy||0)+Uy(e),u=By(e),Sm(t,e),k(u))for(s=Ty(e),a=0;a=0;)if(!1!==t[i].defined&&(n=t[i].x-e[0])*n+(r=t[i].y-e[1])*r<(n=t[i].size||1)*n)return t[i];return null})),Hy={arc:Zm,area:Km,group:sy,image:py,line:gy,path:yy,rect:_y,rule:by,shape:wy,symbol:ky,text:Iy,trail:Wy};function Yy(t,e,n){var r=Hy[t.mark.marktype],i=e||r.bound;return r.nested&&(t=t.mark),i(t.bounds||(t.bounds=new Vg),t,n)}var Gy={mark:null};function Vy(t,e,n){var r,i,o,a,s=Hy[t.marktype],u=s.bound,l=t.items,c=l&&l.length;if(s.nested)return c?o=l[0]:(Gy.mark=t,o=Gy),a=Yy(o,u,n),e=e&&e.union(a)||a;if(e=e||t.bounds&&t.bounds.clear()||new Vg,c)for(r=0,i=l.length;re;)t.removeChild(n[--r]);return t}function ov(t){return"mark-"+t.marktype+(t.role?" role-"+t.role:"")+(t.name?" "+t.name:"")}function av(t,e){const n=e.getBoundingClientRect();return[t.clientX-n.left-(e.clientLeft||0),t.clientY-n.top-(e.clientTop||0)]}class sv{constructor(t,e){this._active=null,this._handlers={},this._loader=t||fa(),this._tooltip=e||uv}initialize(t,e,n){return this._el=t,this._obj=n||null,this.origin(e)}element(){return this._el}canvas(){return this._el&&this._el.firstChild}origin(t){return arguments.length?(this._origin=t||[0,0],this):this._origin.slice()}scene(t){return arguments.length?(this._scene=t,this):this._scene}on(){}off(){}_handlerIndex(t,e,n){for(let r=t?t.length:0;--r>=0;)if(t[r].type===e&&(!n||t[r].handler===n))return r;return-1}handlers(t){const e=this._handlers,n=[];if(t)n.push(...e[this.eventName(t)]);else for(const t in e)n.push(...e[t]);return n}eventName(t){const e=t.indexOf(".");return e<0?t:t.slice(0,e)}handleHref(t,e,n){this._loader.sanitize(n,{context:"href"}).then((e=>{const n=new MouseEvent(t.type,t),r=ev(null,"a");for(const t in e)r.setAttribute(t,e[t]);r.dispatchEvent(n)})).catch((()=>{}))}handleTooltip(t,e,n){if(e&&null!=e.tooltip){e=function(t,e,n,r){var i,o,a=t&&t.mark;if(a&&(i=Hy[a.marktype]).tip){for((o=av(e,n))[0]-=r[0],o[1]-=r[1];t=t.mark.group;)o[0]-=t.x||0,o[1]-=t.y||0;t=i.tip(a.items,o)}return t}(e,t,this.canvas(),this._origin);const r=n&&e&&e.tooltip||null;this._tooltip.call(this._obj,this,t,e,r)}}getItemBoundingClientRect(t){const e=this.canvas();if(!e)return;const n=e.getBoundingClientRect(),r=this._origin,i=t.bounds,o=i.width(),a=i.height();let s=i.x1+r[0]+n.left,u=i.y1+r[1]+n.top;for(;t.mark&&(t=t.mark.group);)s+=t.x||0,u+=t.y||0;return{x:s,y:u,width:o,height:a,left:s,top:u,right:s+o,bottom:u+a}}}function uv(t,e,n,r){t.element().setAttribute("title",r||"")}class lv{constructor(t){this._el=null,this._bgcolor=null,this._loader=new Zg(t)}initialize(t,e,n,r,i){return this._el=t,this.resize(e,n,r,i)}element(){return this._el}canvas(){return this._el&&this._el.firstChild}background(t){return 0===arguments.length?this._bgcolor:(this._bgcolor=t,this)}resize(t,e,n,r){return this._width=t,this._height=e,this._origin=n||[0,0],this._scale=r||1,this}dirty(){}render(t,e){const n=this;return n._call=function(){n._render(t,e)},n._call(),n._call=null,n}_render(){}renderAsync(t,e){const n=this.render(t,e);return this._ready?this._ready.then((()=>n)):Promise.resolve(n)}_load(t,e){var n=this,r=n._loader[t](e);if(!n._ready){const t=n._call;n._ready=n._loader.ready().then((e=>{e&&t(),n._ready=null}))}return r}sanitizeURL(t){return this._load("sanitizeURL",t)}loadImage(t){return this._load("loadImage",t)}}const cv="dragenter",fv="dragleave",hv="dragover",dv="pointerdown",pv="pointermove",gv="pointerout",mv="pointerover",yv="mousedown",vv="mousemove",_v="mouseout",xv="mouseover",bv="click",wv="mousewheel",kv="touchstart",Av="touchmove",Mv="touchend",Ev=["keydown","keypress","keyup",cv,fv,hv,dv,"pointerup",pv,gv,mv,yv,"mouseup",vv,_v,xv,bv,"dblclick","wheel",wv,kv,Av,Mv],Dv=pv,Cv=_v,Fv=bv;class Sv extends sv{constructor(t,e){super(t,e),this._down=null,this._touch=null,this._first=!0,this._events={},this.events=Ev,this.pointermove=zv([pv,vv],[mv,xv],[gv,_v]),this.dragover=zv([hv],[cv],[fv]),this.pointerout=Nv([gv,_v]),this.dragleave=Nv([fv])}initialize(t,e,n){return this._canvas=t&&nv(t,"canvas"),[bv,yv,dv,pv,gv,fv].forEach((t=>Tv(this,t))),super.initialize(t,e,n)}canvas(){return this._canvas}context(){return this._canvas.getContext("2d")}DOMMouseScroll(t){this.fire(wv,t)}pointerdown(t){this._down=this._active,this.fire(dv,t)}mousedown(t){this._down=this._active,this.fire(yv,t)}click(t){this._down===this._active&&(this.fire(bv,t),this._down=null)}touchstart(t){this._touch=this.pickEvent(t.changedTouches[0]),this._first&&(this._active=this._touch,this._first=!1),this.fire(kv,t,!0)}touchmove(t){this.fire(Av,t,!0)}touchend(t){this.fire(Mv,t,!0),this._touch=null}fire(t,e,n){const r=n?this._touch:this._active,i=this._handlers[t];if(e.vegaType=t,t===Fv&&r&&r.href?this.handleHref(e,r,r.href):t!==Dv&&t!==Cv||this.handleTooltip(e,r,t!==Cv),i)for(let t=0,n=i.length;t=0&&r.splice(i,1),this}pickEvent(t){const e=av(t,this._canvas),n=this._origin;return this.pick(this._scene,e[0],e[1],e[0]-n[0],e[1]-n[1])}pick(t,e,n,r,i){const o=this.context();return Hy[t.marktype].pick.call(this,o,t,e,n,r,i)}}const $v=t=>t===kv||t===Av||t===Mv?[kv,Av,Mv]:[t];function Tv(t,e){$v(e).forEach((e=>function(t,e){const n=t.canvas();n&&!t._events[e]&&(t._events[e]=1,n.addEventListener(e,t[e]?n=>t[e](n):n=>t.fire(e,n)))}(t,e)))}function Bv(t,e,n){e.forEach((e=>t.fire(e,n)))}function zv(t,e,n){return function(r){const i=this._active,o=this.pickEvent(r);o===i||(i&&i.exit||Bv(this,n,r),this._active=o,Bv(this,e,r)),Bv(this,t,r)}}function Nv(t){return function(e){Bv(this,t,e),this._active=null}}function Ov(t,e,n,r,i,o){const a="undefined"!=typeof HTMLElement&&t instanceof HTMLElement&&null!=t.parentNode,s=t.getContext("2d"),u=a?"undefined"!=typeof window&&window.devicePixelRatio||1:i;t.width=e*u,t.height=n*u;for(const t in o)s[t]=o[t];return a&&1!==u&&(t.style.width=e+"px",t.style.height=n+"px"),s.pixelRatio=u,s.setTransform(u,0,0,u,u*r[0],u*r[1]),t}class Rv extends lv{constructor(t){super(t),this._options={},this._redraw=!1,this._dirty=new Vg,this._tempb=new Vg}initialize(t,e,n,r,i,o){return this._options=o||{},this._canvas=this._options.externalContext?null:$c(1,1,this._options.type),t&&this._canvas&&(iv(t,0).appendChild(this._canvas),this._canvas.setAttribute("class","marks")),super.initialize(t,e,n,r,i)}resize(t,e,n,r){if(super.resize(t,e,n,r),this._canvas)Ov(this._canvas,this._width,this._height,this._origin,this._scale,this._options.context);else{const t=this._options.externalContext;t||s("CanvasRenderer is missing a valid canvas or context"),t.scale(this._scale,this._scale),t.translate(this._origin[0],this._origin[1])}return this._redraw=!0,this}canvas(){return this._canvas}context(){return this._options.externalContext||(this._canvas?this._canvas.getContext("2d"):null)}dirty(t){const e=this._tempb.clear().union(t.bounds);let n=t.mark.group;for(;n;)e.translate(n.x||0,n.y||0),n=n.mark.group;this._dirty.union(e)}_render(t,e){const n=this.context(),r=this._origin,i=this._width,o=this._height,a=this._dirty,s=Uv(r,i,o);n.save();const u=this._redraw||a.empty()?(this._redraw=!1,s.expand(1)):function(t,e,n){e.expand(1).round(),t.pixelRatio%1&&e.scale(t.pixelRatio).round().scale(1/t.pixelRatio);return e.translate(-n[0]%1,-n[1]%1),t.beginPath(),t.rect(e.x1,e.y1,e.width(),e.height()),t.clip(),e}(n,s.intersect(a),r);return this.clear(-r[0],-r[1],i,o),this.draw(n,t,u,e),n.restore(),a.clear(),this}draw(t,e,n,r){if("group"!==e.marktype&&null!=r&&!r.includes(e.marktype))return;const i=Hy[e.marktype];e.clip&&function(t,e){var n=e.clip;t.save(),J(n)?(t.beginPath(),n(t),t.clip()):ty(t,e.group)}(t,e),i.draw.call(this,t,e,n,r),e.clip&&t.restore()}clear(t,e,n,r){const i=this._options,o=this.context();"pdf"===i.type||i.externalContext||o.clearRect(t,e,n,r),null!=this._bgcolor&&(o.fillStyle=this._bgcolor,o.fillRect(t,e,n,r))}}const Uv=(t,e,n)=>(new Vg).set(0,0,e,n).translate(-t[0],-t[1]);class Lv extends sv{constructor(t,e){super(t,e);const n=this;n._hrefHandler=qv(n,((t,e)=>{e&&e.href&&n.handleHref(t,e,e.href)})),n._tooltipHandler=qv(n,((t,e)=>{n.handleTooltip(t,e,t.type!==Cv)}))}initialize(t,e,n){let r=this._svg;return r&&(r.removeEventListener(Fv,this._hrefHandler),r.removeEventListener(Dv,this._tooltipHandler),r.removeEventListener(Cv,this._tooltipHandler)),this._svg=r=t&&nv(t,"svg"),r&&(r.addEventListener(Fv,this._hrefHandler),r.addEventListener(Dv,this._tooltipHandler),r.addEventListener(Cv,this._tooltipHandler)),super.initialize(t,e,n)}canvas(){return this._svg}on(t,e){const n=this.eventName(t),r=this._handlers;if(this._handlerIndex(r[n],t,e)<0){const i={type:t,handler:e,listener:qv(this,e)};(r[n]||(r[n]=[])).push(i),this._svg&&this._svg.addEventListener(n,i.listener)}return this}off(t,e){const n=this.eventName(t),r=this._handlers[n],i=this._handlerIndex(r,t,e);return i>=0&&(this._svg&&this._svg.removeEventListener(n,r[i].listener),r.splice(i,1)),this}}const qv=(t,e)=>n=>{let r=n.target.__data__;r=Array.isArray(r)?r[0]:r,n.vegaType=n.type,e.call(t._obj,n,r)},Pv="aria-hidden",jv="aria-label",Iv="role",Wv="aria-roledescription",Hv="graphics-object",Yv="graphics-symbol",Gv=(t,e,n)=>({[Iv]:t,[Wv]:e,[jv]:n||void 0}),Vv=Bt(["axis-domain","axis-grid","axis-label","axis-tick","axis-title","legend-band","legend-entry","legend-gradient","legend-label","legend-title","legend-symbol","title"]),Xv={axis:{desc:"axis",caption:function(t){const e=t.datum,n=t.orient,r=e.title?t_(t):null,i=t.context,o=i.scales[e.scale].value,a=i.dataflow.locale(),s=o.type;return("left"===n||"right"===n?"Y":"X")+"-axis"+(r?` titled '${r}'`:"")+` for a ${cp(s)?"discrete":s} scale`+` with ${Yp(a,o,t)}`}},legend:{desc:"legend",caption:function(t){const e=t.datum,n=e.title?t_(t):null,r=`${e.type||""} legend`.trim(),i=e.scales,o=Object.keys(i),a=t.context,s=a.scales[i[o[0]]].value,u=a.dataflow.locale();return l=r,(l.length?l[0].toUpperCase()+l.slice(1):l)+(n?` titled '${n}'`:"")+` for ${function(t){return t=t.map((t=>t+("fill"===t||"stroke"===t?" color":""))),t.length<2?t[0]:t.slice(0,-1).join(", ")+" and "+F(t)}(o)}`+` with ${Yp(u,s,t)}`;var l}},"title-text":{desc:"title",caption:t=>`Title text '${Kv(t)}'`},"title-subtitle":{desc:"subtitle",caption:t=>`Subtitle text '${Kv(t)}'`}},Jv={ariaRole:Iv,ariaRoleDescription:Wv,description:jv};function Zv(t,e){const n=!1===e.aria;if(t(Pv,n||void 0),n||null==e.description)for(const e in Jv)t(Jv[e],void 0);else{const n=e.mark.marktype;t(jv,e.description),t(Iv,e.ariaRole||("group"===n?Hv:Yv)),t(Wv,e.ariaRoleDescription||`${n} mark`)}}function Qv(t){return!1===t.aria?{[Pv]:!0}:Vv[t.role]?null:Xv[t.role]?function(t,e){try{const n=t.items[0],r=e.caption||(()=>"");return Gv(e.role||Yv,e.desc,n.description||r(n))}catch(t){return null}}(t,Xv[t.role]):function(t){const e=t.marktype,n="group"===e||"text"===e||t.items.some((t=>null!=t.description&&!1!==t.aria));return Gv(n?Hv:Yv,`${e} mark container`,t.description)}(t)}function Kv(t){return V(t.text).join(" ")}function t_(t){try{return V(F(t.items).items[0].text).join(" ")}catch(t){return null}}const e_=t=>(t+"").replace(/&/g,"&").replace(//g,">");function n_(){let t="",e="",n="";const r=[],i=()=>e=n="",o=(t,n)=>{var r;return null!=n&&(e+=` ${t}="${r=n,e_(r).replace(/"/g,""").replace(/\t/g," ").replace(/\n/g," ").replace(/\r/g," ")}"`),a},a={open(s){(o=>{e&&(t+=`${e}>${n}`,i()),r.push(o)})(s),e="<"+s;for(var u=arguments.length,l=new Array(u>1?u-1:0),c=1;c${n}`:"/>"):``,i(),a},attr:o,text:t=>(n+=e_(t),a),toString:()=>t};return a}const r_=t=>i_(n_(),t)+"";function i_(t,e){if(t.open(e.tagName),e.hasAttributes()){const n=e.attributes,r=n.length;for(let e=0;e{t.dirty=e}))),r.zdirty||(n.exit?(o.nested&&r.items.length?(u=r.items[0],u._svg&&this._update(o,u._svg,u)):n._svg&&(u=n._svg.parentNode,u&&u.removeChild(n._svg)),n._svg=null):(n=o.nested?r.items[0]:n,n._update!==e&&(n._svg&&n._svg.ownerSVGElement?this._update(o,n._svg,n):(this._dirtyAll=!1,f_(n,e)),n._update=e)));return!this._dirtyAll}mark(t,e,n,r){if(!this.isDirty(e))return e._svg;const i=this._svg,o=e.marktype,a=Hy[o],s=!1===e.interactive?"none":null,u="g"===a.tag,l=p_(e,t,n,"g",i);if("group"!==o&&null!=r&&!r.includes(o))return iv(l,0),e._svg;l.setAttribute("class",ov(e));const c=Qv(e);for(const t in c)b_(l,t,c[t]);u||b_(l,"pointer-events",s),b_(l,"clip-path",e.clip?Gg(this,e,e.group):null);let f=null,h=0;const d=t=>{const e=this.isDirty(t),n=p_(t,l,f,a.tag,i);e&&(this._update(a,n,t),u&&function(t,e,n,r){e=e.lastChild.previousSibling;let i,o=0;Lm(n,(n=>{i=t.mark(e,n,i,r),++o})),iv(e,1+o)}(this,n,t,r)),f=n,++h};return a.nested?e.items.length&&d(e.items[0]):Lm(e,d),iv(l,h),l}_update(t,e,n){g_=e,m_=e.__values__,Zv(v_,n),t.attr(v_,n,this);const r=y_[t.type];r&&r.call(this,t,e,n),g_&&this.style(g_,n)}style(t,e){if(null!=e){for(const n in o_){let r="font"===n?Oy(e):e[n];if(r===m_[n])continue;const i=o_[n];null==r?t.removeAttribute(i):(Xp(r)&&(r=Jp(r,this._defs.gradient,w_())),t.setAttribute(i,r+"")),m_[n]=r}for(const n in a_)__(t,a_[n],e[n])}}defs(){const t=this._svg,e=this._defs;let n=e.el,r=0;for(const i in e.gradient)n||(e.el=n=rv(t,1,"defs",l_)),r=h_(n,e.gradient[i],r);for(const i in e.clipping)n||(e.el=n=rv(t,1,"defs",l_)),r=d_(n,e.clipping[i],r);n&&(0===r?(t.removeChild(n),e.el=null):iv(n,r))}_clearDefs(){const t=this._defs;t.gradient={},t.clipping={}}}function f_(t,e){for(;t&&t.dirty!==e;t=t.mark.group){if(t.dirty=e,!t.mark||t.mark.dirty===e)return;t.mark.dirty=e}}function h_(t,e,n){let r,i,o;if("radial"===e.gradient){let r=rv(t,n++,"pattern",l_);x_(r,{id:Vp+e.id,viewBox:"0,0,1,1",width:"100%",height:"100%",preserveAspectRatio:"xMidYMid slice"}),r=rv(r,0,"rect",l_),x_(r,{width:1,height:1,fill:`url(${w_()}#${e.id})`}),x_(t=rv(t,n++,"radialGradient",l_),{id:e.id,fx:e.x1,fy:e.y1,fr:e.r1,cx:e.x2,cy:e.y2,r:e.r2})}else x_(t=rv(t,n++,"linearGradient",l_),{id:e.id,x1:e.x1,x2:e.x2,y1:e.y1,y2:e.y2});for(r=0,i=e.stops.length;r1&&t.previousSibling!=e}(a,n))&&e.insertBefore(a,n?n.nextSibling:e.firstChild),a}let g_=null,m_=null;const y_={group(t,e,n){const r=g_=e.childNodes[2];m_=r.__values__,t.foreground(v_,n,this),m_=e.__values__,g_=e.childNodes[1],t.content(v_,n,this);const i=g_=e.childNodes[0];t.background(v_,n,this);const o=!1===n.mark.interactive?"none":null;if(o!==m_.events&&(b_(r,"pointer-events",o),b_(i,"pointer-events",o),m_.events=o),n.strokeForeground&&n.stroke){const t=n.fill;b_(r,"display",null),this.style(i,n),b_(i,"stroke",null),t&&(n.fill=null),m_=r.__values__,this.style(r,n),t&&(n.fill=t),g_=null}else b_(r,"display","none")},image(t,e,n){!1===n.smooth?(__(e,"image-rendering","optimizeSpeed"),__(e,"image-rendering","pixelated")):__(e,"image-rendering",null)},text(t,e,n){const r=By(n);let i,o,a,s;k(r)?(o=r.map((t=>Ny(n,t))),i=o.join("\n"),i!==m_.text&&(iv(e,0),a=e.ownerDocument,s=Ty(n),o.forEach(((t,r)=>{const i=ev(a,"tspan",l_);i.__data__=n,i.textContent=t,r&&(i.setAttribute("x",0),i.setAttribute("dy",s)),e.appendChild(i)})),m_.text=i)):(o=Ny(n,r),o!==m_.text&&(e.textContent=o,m_.text=o)),b_(e,"font-family",Oy(n)),b_(e,"font-size",$y(n)+"px"),b_(e,"font-style",n.fontStyle),b_(e,"font-variant",n.fontVariant),b_(e,"font-weight",n.fontWeight)}};function v_(t,e,n){e!==m_[t]&&(n?function(t,e,n,r){null!=n?t.setAttributeNS(r,e,n):t.removeAttributeNS(r,e)}(g_,t,e,n):b_(g_,t,e),m_[t]=e)}function __(t,e,n){n!==m_[e]&&(null==n?t.style.removeProperty(e):t.style.setProperty(e,n+""),m_[e]=n)}function x_(t,e){for(const n in e)b_(t,n,e[n])}function b_(t,e,n){null!=n?t.setAttribute(e,n):t.removeAttribute(e)}function w_(){let t;return"undefined"==typeof window?"":(t=window.location).hash?t.href.slice(0,-t.hash.length):t.href}class k_ extends lv{constructor(t){super(t),this._text=null,this._defs={gradient:{},clipping:{}}}svg(){return this._text}_render(t){const e=n_();e.open("svg",ot({},uy,{class:"marks",width:this._width*this._scale,height:this._height*this._scale,viewBox:`0 0 ${this._width} ${this._height}`}));const n=this._bgcolor;return n&&"transparent"!==n&&"none"!==n&&e.open("rect",{width:this._width,height:this._height,fill:n}).close(),e.open("g",s_,{transform:"translate("+this._origin+")"}),this.mark(e,t),e.close(),this.defs(e),this._text=e.close()+"",this}mark(t,e){const n=Hy[e.marktype],r=n.tag,i=[Zv,n.attr];t.open("g",{class:ov(e),"clip-path":e.clip?Gg(this,e,e.group):null},Qv(e),{"pointer-events":"g"!==r&&!1===e.interactive?"none":null});const o=o=>{const a=this.href(o);if(a&&t.open("a",a),t.open(r,this.attr(e,o,i,"g"!==r?r:null)),"text"===r){const e=By(o);if(k(e)){const n={x:0,dy:Ty(o)};for(let r=0;rthis.mark(t,e))),t.close(),r&&a?(i&&(o.fill=null),o.stroke=a,t.open("path",this.attr(e,o,n.foreground,"bgrect")).close(),i&&(o.fill=i)):t.open("path",this.attr(e,o,n.foreground,"bgfore")).close()}t.close(),a&&t.close()};return n.nested?e.items&&e.items.length&&o(e.items[0]):Lm(e,o),t.close()}href(t){const e=t.href;let n;if(e){if(n=this._hrefs&&this._hrefs[e])return n;this.sanitizeURL(e).then((t=>{t["xlink:href"]=t.href,t.href=null,(this._hrefs||(this._hrefs={}))[e]=t}))}return null}attr(t,e,n,r){const i={},o=(t,e,n,r)=>{i[r||t]=e};return Array.isArray(n)?n.forEach((t=>t(o,e,this))):n(o,e,this),r&&function(t,e,n,r,i){let o;if(null==e)return t;"bgrect"===r&&!1===n.interactive&&(t["pointer-events"]="none");if("bgfore"===r&&(!1===n.interactive&&(t["pointer-events"]="none"),t.display="none",null!==e.fill))return t;"image"===r&&!1===e.smooth&&(o=["image-rendering: optimizeSpeed;","image-rendering: pixelated;"]);"text"===r&&(t["font-family"]=Oy(e),t["font-size"]=$y(e)+"px",t["font-style"]=e.fontStyle,t["font-variant"]=e.fontVariant,t["font-weight"]=e.fontWeight);for(const n in o_){let r=e[n];const o=o_[n];("transparent"!==r||"fill"!==o&&"stroke"!==o)&&null!=r&&(Xp(r)&&(r=Jp(r,i.gradient,"")),t[o]=r)}for(const t in a_){const n=e[t];null!=n&&(o=o||[],o.push(`${a_[t]}: ${n};`))}o&&(t.style=o.join(" "))}(i,e,t,r,this._defs),i}defs(t){const e=this._defs.gradient,n=this._defs.clipping;if(0!==Object.keys(e).length+Object.keys(n).length){t.open("defs");for(const n in e){const r=e[n],i=r.stops;"radial"===r.gradient?(t.open("pattern",{id:Vp+n,viewBox:"0,0,1,1",width:"100%",height:"100%",preserveAspectRatio:"xMidYMid slice"}),t.open("rect",{width:"1",height:"1",fill:"url(#"+n+")"}).close(),t.close(),t.open("radialGradient",{id:n,fx:r.x1,fy:r.y1,fr:r.r1,cx:r.x2,cy:r.y2,r:r.r2})):t.open("linearGradient",{id:n,x1:r.x1,x2:r.x2,y1:r.y1,y2:r.y2});for(let e=0;e!A_.svgMarkTypes.includes(t)));this._svgRenderer.render(t,A_.svgMarkTypes),this._canvasRenderer.render(t,n)}resize(t,e,n,r){return super.resize(t,e,n,r),this._svgRenderer.resize(t,e,n,r),this._canvasRenderer.resize(t,e,n,r),this}background(t){return A_.svgOnTop?this._canvasRenderer.background(t):this._svgRenderer.background(t),this}}class E_ extends Sv{constructor(t,e){super(t,e)}initialize(t,e,n){const r=rv(rv(t,0,"div"),A_.svgOnTop?0:1,"div");return super.initialize(r,e,n)}}const D_="canvas",C_="hybrid",F_="none",S_={Canvas:D_,PNG:"png",SVG:"svg",Hybrid:C_,None:F_},$_={};function T_(t,e){return t=String(t||"").toLowerCase(),arguments.length>1?($_[t]=e,this):$_[t]}function B_(t,e,n){const r=[],i=(new Vg).union(e),o=t.marktype;return o?z_(t,i,n,r):"group"===o?N_(t,i,n,r):s("Intersect scene must be mark node or group item.")}function z_(t,e,n,r){if(function(t,e,n){return t.bounds&&e.intersects(t.bounds)&&("group"===t.marktype||!1!==t.interactive&&(!n||n(t)))}(t,e,n)){const i=t.items,o=t.marktype,a=i.length;let s=0;if("group"===o)for(;s=0;r--)if(i[r]!=o[r])return!1;for(r=i.length-1;r>=0;r--)if(!q_(t[n=i[r]],e[n],n))return!1;return typeof t==typeof e}(t,e):t==e)}function P_(t,e){return q_(ag(t),ag(e))}const j_="top",I_="left",W_="right",H_="bottom",Y_="top-left",G_="top-right",V_="bottom-left",X_="bottom-right",J_="start",Z_="middle",Q_="end",K_="x",tx="y",ex="group",nx="axis",rx="title",ix="frame",ox="scope",ax="legend",sx="row-header",ux="row-footer",lx="row-title",cx="column-header",fx="column-footer",hx="column-title",dx="padding",px="symbol",gx="fit",mx="fit-x",yx="fit-y",vx="pad",_x="none",xx="all",bx="each",wx="flush",kx="column",Ax="row";function Mx(t){Ja.call(this,null,t)}function Ex(t,e,n){return e(t.bounds.clear(),t,n)}dt(Mx,Ja,{transform(t,e){const n=e.dataflow,r=t.mark,i=r.marktype,o=Hy[i],a=o.bound;let s,u=r.bounds;if(o.nested)r.items.length&&n.dirty(r.items[0]),u=Ex(r,a),r.items.forEach((t=>{t.bounds.clear().union(u)}));else if(i===ex||t.modified())switch(e.visit(e.MOD,(t=>n.dirty(t))),u.clear(),r.items.forEach((t=>u.union(Ex(t,a)))),r.role){case nx:case ax:case rx:e.reflow()}else s=e.changed(e.REM),e.visit(e.ADD,(t=>{u.union(Ex(t,a))})),e.visit(e.MOD,(t=>{s=s||u.alignsWith(t.bounds),n.dirty(t),u.union(Ex(t,a))})),s&&(u.clear(),r.items.forEach((t=>u.union(t.bounds))));return U_(r),e.modifies("bounds")}});const Dx=":vega_identifier:";function Cx(t){Ja.call(this,0,t)}function Fx(t){Ja.call(this,null,t)}function Sx(t){Ja.call(this,null,t)}Cx.Definition={type:"Identifier",metadata:{modifies:!0},params:[{name:"as",type:"string",required:!0}]},dt(Cx,Ja,{transform(t,e){const n=(i=e.dataflow)._signals[Dx]||(i._signals[Dx]=i.add(0)),r=t.as;var i;let o=n.value;return e.visit(e.ADD,(t=>t[r]=t[r]||++o)),n.set(this.value=o),e}}),dt(Fx,Ja,{transform(t,e){let n=this.value;n||(n=e.dataflow.scenegraph().mark(t.markdef,function(t){const e=t.groups,n=t.parent;return e&&1===e.size?e.get(Object.keys(e.object)[0]):e&&n?e.lookup(n):null}(t),t.index),n.group.context=t.context,t.context.group||(t.context.group=n.group),n.source=this.source,n.clip=t.clip,n.interactive=t.interactive,this.value=n);const r=n.marktype===ex?Jg:Xg;return e.visit(e.ADD,(t=>r.call(t,n))),(t.modified("clip")||t.modified("interactive"))&&(n.clip=t.clip,n.interactive=!!t.interactive,n.zdirty=!0,e.reflow()),n.items=e.source,e}});const $x={parity:t=>t.filter(((t,e)=>e%2?t.opacity=0:1)),greedy:(t,e)=>{let n;return t.filter(((t,r)=>r&&Tx(n.bounds,t.bounds,e)?t.opacity=0:(n=t,1)))}},Tx=(t,e,n)=>n>Math.max(e.x1-t.x2,t.x1-e.x2,e.y1-t.y2,t.y1-e.y2),Bx=(t,e)=>{for(var n,r=1,i=t.length,o=t[0].bounds;r{const e=t.bounds;return e.width()>1&&e.height()>1},Nx=t=>(t.forEach((t=>t.opacity=1)),t),Ox=(t,e)=>t.reflow(e.modified()).modifies("opacity");function Rx(t){Ja.call(this,null,t)}dt(Sx,Ja,{transform(t,e){const n=$x[t.method]||$x.parity,r=t.separation||0;let i,o,a=e.materialize(e.SOURCE).source;if(!a||!a.length)return;if(!t.method)return t.modified("method")&&(Nx(a),e=Ox(e,t)),e;if(a=a.filter(zx),!a.length)return;if(t.sort&&(a=a.slice().sort(t.sort)),i=Nx(a),e=Ox(e,t),i.length>=3&&Bx(i,r)){do{i=n(i,r)}while(i.length>=3&&Bx(i,r));i.length<3&&!F(a).opacity&&(i.length>1&&(F(i).opacity=0),F(a).opacity=1)}t.boundScale&&t.boundTolerance>=0&&(o=((t,e,n)=>{var r=t.range(),i=new Vg;return e===j_||e===H_?i.set(r[0],-1/0,r[1],1/0):i.set(-1/0,r[0],1/0,r[1]),i.expand(n||1),t=>i.encloses(t.bounds)})(t.boundScale,t.boundOrient,+t.boundTolerance),a.forEach((t=>{o(t)||(t.opacity=0)})));const s=i[0].mark.bounds.clear();return a.forEach((t=>{t.opacity&&s.union(t.bounds)})),e}}),dt(Rx,Ja,{transform(t,e){const n=e.dataflow;if(e.visit(e.ALL,(t=>n.dirty(t))),e.fields&&e.fields.zindex){const t=e.source&&e.source[0];t&&(t.mark.zdirty=!0)}}});const Ux=new Vg;function Lx(t,e,n){return t[e]===n?0:(t[e]=n,1)}function qx(t){var e=t.items[0].orient;return e===I_||e===W_}function Px(t,e,n,r){var i,o,a=e.items[0],s=a.datum,u=null!=a.translate?a.translate:.5,l=a.orient,c=function(t){let e=+t.grid;return[t.ticks?e++:-1,t.labels?e++:-1,e+ +t.domain]}(s),f=a.range,h=a.offset,d=a.position,p=a.minExtent,g=a.maxExtent,m=s.title&&a.items[c[2]].items[0],y=a.titlePadding,v=a.bounds,_=m&&zy(m),x=0,b=0;switch(Ux.clear().union(v),v.clear(),(i=c[0])>-1&&v.union(a.items[i].bounds),(i=c[1])>-1&&v.union(a.items[i].bounds),l){case j_:x=d||0,b=-h,o=Math.max(p,Math.min(g,-v.y1)),v.add(0,-o).add(f,0),m&&jx(t,m,o,y,_,0,-1,v);break;case I_:x=-h,b=d||0,o=Math.max(p,Math.min(g,-v.x1)),v.add(-o,0).add(0,f),m&&jx(t,m,o,y,_,1,-1,v);break;case W_:x=n+h,b=d||0,o=Math.max(p,Math.min(g,v.x2)),v.add(0,0).add(o,f),m&&jx(t,m,o,y,_,1,1,v);break;case H_:x=d||0,b=r+h,o=Math.max(p,Math.min(g,v.y2)),v.add(0,0).add(f,o),m&&jx(t,m,o,y,0,0,1,v);break;default:x=a.x,b=a.y}return tm(v.translate(x,b),a),Lx(a,"x",x+u)|Lx(a,"y",b+u)&&(a.bounds=Ux,t.dirty(a),a.bounds=v,t.dirty(a)),a.mark.bounds.clear().union(v)}function jx(t,e,n,r,i,o,a,s){const u=e.bounds;if(e.auto){const s=a*(n+i+r);let l=0,c=0;t.dirty(e),o?l=(e.x||0)-(e.x=s):c=(e.y||0)-(e.y=s),e.mark.bounds.clear().union(u.translate(-l,-c)),t.dirty(e)}s.union(u)}const Ix=(t,e)=>Math.floor(Math.min(t,e)),Wx=(t,e)=>Math.ceil(Math.max(t,e));function Hx(t){return(new Vg).set(0,0,t.width||0,t.height||0)}function Yx(t){const e=t.bounds.clone();return e.empty()?e.set(0,0,0,0):e.translate(-(t.x||0),-(t.y||0))}function Gx(t,e,n){const r=A(t)?t[e]:t;return null!=r?r:void 0!==n?n:0}function Vx(t){return t<0?Math.ceil(-t):0}function Xx(t,e,n){var r,i,o,a,s,u,l,c,f,h,d,p=!n.nodirty,g=n.bounds===wx?Hx:Yx,m=Ux.set(0,0,0,0),y=Gx(n.align,kx),v=Gx(n.align,Ax),_=Gx(n.padding,kx),x=Gx(n.padding,Ax),b=n.columns||e.length,w=b<=0?1:Math.ceil(e.length/b),k=e.length,A=Array(k),M=Array(b),E=0,D=Array(k),C=Array(w),F=0,S=Array(k),$=Array(k),T=Array(k);for(i=0;i1)for(i=0;i0&&(S[i]+=f/2);if(v&&Gx(n.center,Ax)&&1!==b)for(i=0;i0&&($[i]+=h/2);for(i=0;ii&&(t.warn("Grid headers exceed limit: "+i),e=e.slice(0,i)),A+=o,g=0,y=e.length;g=0&&null==(x=n[m]);m-=h);s?(b=null==d?x.x:Math.round(x.bounds.x1+d*x.bounds.width()),w=A):(b=A,w=null==d?x.y:Math.round(x.bounds.y1+d*x.bounds.height())),v.union(_.bounds.translate(b-(_.x||0),w-(_.y||0))),_.x=b,_.y=w,t.dirty(_),M=a(M,v[l])}return M}function tb(t,e,n,r,i,o){if(e){t.dirty(e);var a=n,s=n;r?a=Math.round(i.x1+o*i.width()):s=Math.round(i.y1+o*i.height()),e.bounds.translate(a-(e.x||0),s-(e.y||0)),e.mark.bounds.clear().union(e.bounds),e.x=a,e.y=s,t.dirty(e)}}function eb(t,e,n,r,i,o,a){const s=function(t,e){const n=t[e]||{};return(e,r)=>null!=n[e]?n[e]:null!=t[e]?t[e]:r}(n,e),u=function(t,e){let n=-1/0;return t.forEach((t=>{null!=t.offset&&(n=Math.max(n,t.offset))})),n>-1/0?n:e}(t,s("offset",0)),l=s("anchor",J_),c=l===Q_?1:l===Z_?.5:0,f={align:bx,bounds:s("bounds",wx),columns:"vertical"===s("direction")?1:t.length,padding:s("margin",8),center:s("center"),nodirty:!0};switch(e){case I_:f.anchor={x:Math.floor(r.x1)-u,column:Q_,y:c*(a||r.height()+2*r.y1),row:l};break;case W_:f.anchor={x:Math.ceil(r.x2)+u,y:c*(a||r.height()+2*r.y1),row:l};break;case j_:f.anchor={y:Math.floor(i.y1)-u,row:Q_,x:c*(o||i.width()+2*i.x1),column:l};break;case H_:f.anchor={y:Math.ceil(i.y2)+u,x:c*(o||i.width()+2*i.x1),column:l};break;case Y_:f.anchor={x:u,y:u};break;case G_:f.anchor={x:o-u,y:u,column:Q_};break;case V_:f.anchor={x:u,y:a-u,row:Q_};break;case X_:f.anchor={x:o-u,y:a-u,column:Q_,row:Q_}}return f}function nb(t,e){var n,r,i=e.items[0],o=i.datum,a=i.orient,s=i.bounds,u=i.x,l=i.y;return i._bounds?i._bounds.clear().union(s):i._bounds=s.clone(),s.clear(),function(t,e,n){var r=e.padding,i=r-n.x,o=r-n.y;if(e.datum.title){var a=e.items[1].items[0],s=a.anchor,u=e.titlePadding||0,l=r-a.x,c=r-a.y;switch(a.orient){case I_:i+=Math.ceil(a.bounds.width())+u;break;case W_:case H_:break;default:o+=a.bounds.height()+u}switch((i||o)&&ib(t,n,i,o),a.orient){case I_:c+=rb(e,n,a,s,1,1);break;case W_:l+=rb(e,n,a,Q_,0,0)+u,c+=rb(e,n,a,s,1,1);break;case H_:l+=rb(e,n,a,s,0,0),c+=rb(e,n,a,Q_,-1,0,1)+u;break;default:l+=rb(e,n,a,s,0,0)}(l||c)&&ib(t,a,l,c),(l=Math.round(a.bounds.x1-r))<0&&(ib(t,n,-l,0),ib(t,a,-l,0))}else(i||o)&&ib(t,n,i,o)}(t,i,i.items[0].items[0]),s=function(t,e){return t.items.forEach((t=>e.union(t.bounds))),e.x1=t.padding,e.y1=t.padding,e}(i,s),n=2*i.padding,r=2*i.padding,s.empty()||(n=Math.ceil(s.width()+n),r=Math.ceil(s.height()+r)),o.type===px&&function(t){const e=t.reduce(((t,e)=>(t[e.column]=Math.max(e.bounds.x2-e.x,t[e.column]||0),t)),{});t.forEach((t=>{t.width=e[t.column],t.height=t.bounds.y2-t.y}))}(i.items[0].items[0].items[0].items),a!==_x&&(i.x=u=0,i.y=l=0),i.width=n,i.height=r,tm(s.set(u,l,u+n,l+r),i),i.mark.bounds.clear().union(s),i}function rb(t,e,n,r,i,o,a){const s="symbol"!==t.datum.type,u=n.datum.vgrad,l=(!s||!o&&u||a?e:e.items[0]).bounds[i?"y2":"x2"]-t.padding,c=u&&o?l:0,f=u&&o?0:l,h=i<=0?0:zy(n);return Math.round(r===J_?c:r===Q_?f-h:.5*(l-h))}function ib(t,e,n,r){e.x+=n,e.y+=r,e.bounds.translate(n,r),e.mark.bounds.translate(n,r),t.dirty(e)}function ob(t){Ja.call(this,null,t)}dt(ob,Ja,{transform(t,e){const n=e.dataflow;return t.mark.items.forEach((e=>{t.layout&&Jx(n,e,t.layout),function(t,e,n){var r,i,o,a,s,u=e.items,l=Math.max(0,e.width||0),c=Math.max(0,e.height||0),f=(new Vg).set(0,0,l,c),h=f.clone(),d=f.clone(),p=[];for(a=0,s=u.length;a{(o=t.orient||W_)!==_x&&(e[o]||(e[o]=[])).push(t)}));for(const r in e){const i=e[r];Xx(t,i,eb(i,r,n.legends,h,d,l,c))}p.forEach((e=>{const r=e.bounds;if(r.equals(e._bounds)||(e.bounds=e._bounds,t.dirty(e),e.bounds=r,t.dirty(e)),!n.autosize||n.autosize.type!==gx&&n.autosize.type!==mx&&n.autosize.type!==yx)f.union(r);else switch(e.orient){case I_:case W_:f.add(r.x1,0).add(r.x2,0);break;case j_:case H_:f.add(0,r.y1).add(0,r.y2)}}))}f.union(h).union(d),r&&f.union(function(t,e,n,r,i){var o,a=e.items[0],s=a.frame,u=a.orient,l=a.anchor,c=a.offset,f=a.padding,h=a.items[0].items[0],d=a.items[1]&&a.items[1].items[0],p=u===I_||u===W_?r:n,g=0,m=0,y=0,v=0,_=0;if(s!==ex?u===I_?(g=i.y2,p=i.y1):u===W_?(g=i.y1,p=i.y2):(g=i.x1,p=i.x2):u===I_&&(g=r,p=0),o=l===J_?g:l===Q_?p:(g+p)/2,d&&d.text){switch(u){case j_:case H_:_=h.bounds.height()+f;break;case I_:v=h.bounds.width()+f;break;case W_:v=-h.bounds.width()-f}Ux.clear().union(d.bounds),Ux.translate(v-(d.x||0),_-(d.y||0)),Lx(d,"x",v)|Lx(d,"y",_)&&(t.dirty(d),d.bounds.clear().union(Ux),d.mark.bounds.clear().union(Ux),t.dirty(d)),Ux.clear().union(d.bounds)}else Ux.clear();switch(Ux.union(h.bounds),u){case j_:m=o,y=i.y1-Ux.height()-c;break;case I_:m=i.x1-Ux.width()-c,y=o;break;case W_:m=i.x2+Ux.width()+c,y=o;break;case H_:m=o,y=i.y2+c;break;default:m=a.x,y=a.y}return Lx(a,"x",m)|Lx(a,"y",y)&&(Ux.translate(m,y),t.dirty(a),a.bounds.clear().union(Ux),e.bounds.clear().union(Ux),t.dirty(a)),a.bounds}(t,r,l,c,f));e.clip&&f.set(0,0,e.width||0,e.height||0);!function(t,e,n,r){const i=r.autosize||{},o=i.type;if(t._autosize<1||!o)return;let a=t._width,s=t._height,u=Math.max(0,e.width||0),l=Math.max(0,Math.ceil(-n.x1)),c=Math.max(0,e.height||0),f=Math.max(0,Math.ceil(-n.y1));const h=Math.max(0,Math.ceil(n.x2-u)),d=Math.max(0,Math.ceil(n.y2-c));if(i.contains===dx){const e=t.padding();a-=e.left+e.right,s-=e.top+e.bottom}o===_x?(l=0,f=0,u=a,c=s):o===gx?(u=Math.max(0,a-l-h),c=Math.max(0,s-f-d)):o===mx?(u=Math.max(0,a-l-h),s=c+f+d):o===yx?(a=u+l+h,c=Math.max(0,s-f-d)):o===vx&&(a=u+l+h,s=c+f+d);t._resizeView(a,s,u,c,[l,f],i.resize)}(t,e,f,n)}(n,e,t)})),function(t){return t&&"legend-entry"!==t.mark.role}(t.mark.group)?e.reflow():e}});var ab=Object.freeze({__proto__:null,bound:Mx,identifier:Cx,mark:Fx,overlap:Sx,render:Rx,viewlayout:ob});function sb(t){Ja.call(this,null,t)}function ub(t){Ja.call(this,null,t)}function lb(){return _a({})}function cb(t){Ja.call(this,null,t)}function fb(t){Ja.call(this,[],t)}dt(sb,Ja,{transform(t,e){if(this.value&&!t.modified())return e.StopPropagation;var n=e.dataflow.locale(),r=e.fork(e.NO_SOURCE|e.NO_FIELDS),i=this.value,o=t.scale,a=Sp(o,null==t.count?t.values?t.values.length:10:t.count,t.minstep),s=t.format||Bp(n,o,a,t.formatSpecifier,t.formatType,!!t.values),u=t.values?$p(o,t.values,a):Tp(o,a);return i&&(r.rem=i),i=u.map(((t,e)=>_a({index:e/(u.length-1||1),value:t,label:s(t)}))),t.extra&&i.length&&i.push(_a({index:-1,extra:{value:i[0].value},label:""})),r.source=i,r.add=i,this.value=i,r}}),dt(ub,Ja,{transform(t,e){var n=e.dataflow,r=e.fork(e.NO_SOURCE|e.NO_FIELDS),i=t.item||lb,o=t.key||ya,a=this.value;return k(r.encode)&&(r.encode=null),a&&(t.modified("key")||e.modified(o))&&s("DataJoin does not support modified key function or fields."),a||(e=e.addAll(),this.value=a=function(t){const e=ft().test((t=>t.exit));return e.lookup=n=>e.get(t(n)),e}(o)),e.visit(e.ADD,(t=>{const e=o(t);let n=a.get(e);n?n.exit?(a.empty--,r.add.push(n)):r.mod.push(n):(n=i(t),a.set(e,n),r.add.push(n)),n.datum=t,n.exit=!1})),e.visit(e.MOD,(t=>{const e=o(t),n=a.get(e);n&&(n.datum=t,r.mod.push(n))})),e.visit(e.REM,(t=>{const e=o(t),n=a.get(e);t!==n.datum||n.exit||(r.rem.push(n),n.exit=!0,++a.empty)})),e.changed(e.ADD_MOD)&&r.modifies("datum"),(e.clean()||t.clean&&a.empty>n.cleanThreshold)&&n.runAfter(a.clean),r}}),dt(cb,Ja,{transform(t,e){var n=e.fork(e.ADD_REM),r=t.mod||!1,i=t.encoders,o=e.encode;if(k(o)){if(!n.changed()&&!o.every((t=>i[t])))return e.StopPropagation;o=o[0],n.encode=null}var a="enter"===o,s=i.update||g,u=i.enter||g,l=i.exit||g,c=(o&&!a?i[o]:s)||g;if(e.changed(e.ADD)&&(e.visit(e.ADD,(e=>{u(e,t),s(e,t)})),n.modifies(u.output),n.modifies(s.output),c!==g&&c!==s&&(e.visit(e.ADD,(e=>{c(e,t)})),n.modifies(c.output))),e.changed(e.REM)&&l!==g&&(e.visit(e.REM,(e=>{l(e,t)})),n.modifies(l.output)),a||c!==g){const i=e.MOD|(t.modified()?e.REFLOW:0);a?(e.visit(i,(e=>{const i=u(e,t)||r;(c(e,t)||i)&&n.mod.push(e)})),n.mod.length&&n.modifies(u.output)):e.visit(i,(e=>{(c(e,t)||r)&&n.mod.push(e)})),n.mod.length&&n.modifies(c.output)}return n.changed()?n:e.StopPropagation}}),dt(fb,Ja,{transform(t,e){if(null!=this.value&&!t.modified())return e.StopPropagation;var n,r,i,o,a,s=e.dataflow.locale(),u=e.fork(e.NO_SOURCE|e.NO_FIELDS),l=this.value,c=t.type||Mp,f=t.scale,h=+t.limit,d=Sp(f,null==t.count?5:t.count,t.minstep),p=!!t.values||c===Mp,g=t.format||Lp(s,f,d,c,t.formatSpecifier,t.formatType,p),m=t.values||Rp(f,d);return l&&(u.rem=l),c===Mp?(h&&m.length>h?(e.dataflow.warn("Symbol legend count exceeds limit, filtering items."),l=m.slice(0,h-1),a=!0):l=m,J(i=t.size)?(t.values||0!==f(l[0])||(l=l.slice(1)),o=l.reduce(((e,n)=>Math.max(e,i(n,t))),0)):i=rt(o=i||8),l=l.map(((e,n)=>_a({index:n,label:g(e,n,l),value:e,offset:o,size:i(e,t)}))),a&&(a=m[l.length],l.push(_a({index:l.length,label:`…${m.length-l.length} entries`,value:a,offset:o,size:i(a,t)})))):"gradient"===c?(n=f.domain(),r=_p(f,n[0],F(n)),m.length<3&&!t.values&&n[0]!==F(n)&&(m=[n[0],F(n)]),l=m.map(((t,e)=>_a({index:e,label:g(t,e,m),value:t,perc:r(t)})))):(i=m.length-1,r=function(t){const e=t.domain(),n=e.length-1;let r=+e[0],i=+F(e),o=i-r;if(t.type===Id){const t=n?o/n:.1;r-=t,i+=t,o=i-r}return t=>(t-r)/o}(f),l=m.map(((t,e)=>_a({index:e,label:g(t,e,m),value:t,perc:e?r(t):0,perc2:e===i?1:r(m[e+1])})))),u.source=l,u.add=l,this.value=l,u}});const hb=t=>t.source.x,db=t=>t.source.y,pb=t=>t.target.x,gb=t=>t.target.y;function mb(t){Ja.call(this,{},t)}mb.Definition={type:"LinkPath",metadata:{modifies:!0},params:[{name:"sourceX",type:"field",default:"source.x"},{name:"sourceY",type:"field",default:"source.y"},{name:"targetX",type:"field",default:"target.x"},{name:"targetY",type:"field",default:"target.y"},{name:"orient",type:"enum",default:"vertical",values:["horizontal","vertical","radial"]},{name:"shape",type:"enum",default:"line",values:["line","arc","curve","diagonal","orthogonal"]},{name:"require",type:"signal"},{name:"as",type:"string",default:"path"}]},dt(mb,Ja,{transform(t,e){var n=t.sourceX||hb,r=t.sourceY||db,i=t.targetX||pb,o=t.targetY||gb,a=t.as||"path",u=t.orient||"vertical",l=t.shape||"line",c=xb.get(l+"-"+u)||xb.get(l);return c||s("LinkPath unsupported type: "+t.shape+(t.orient?"-"+t.orient:"")),e.visit(e.SOURCE,(t=>{t[a]=c(n(t),r(t),i(t),o(t))})),e.reflow(t.modified()).modifies(a)}});const yb=(t,e,n,r)=>"M"+t+","+e+"L"+n+","+r,vb=(t,e,n,r)=>{var i=n-t,o=r-e,a=Math.hypot(i,o)/2;return"M"+t+","+e+"A"+a+","+a+" "+180*Math.atan2(o,i)/Math.PI+" 0 1 "+n+","+r},_b=(t,e,n,r)=>{const i=n-t,o=r-e,a=.2*(i+o),s=.2*(o-i);return"M"+t+","+e+"C"+(t+a)+","+(e+s)+" "+(n+s)+","+(r-a)+" "+n+","+r},xb=ft({line:yb,"line-radial":(t,e,n,r)=>yb(e*Math.cos(t),e*Math.sin(t),r*Math.cos(n),r*Math.sin(n)),arc:vb,"arc-radial":(t,e,n,r)=>vb(e*Math.cos(t),e*Math.sin(t),r*Math.cos(n),r*Math.sin(n)),curve:_b,"curve-radial":(t,e,n,r)=>_b(e*Math.cos(t),e*Math.sin(t),r*Math.cos(n),r*Math.sin(n)),"orthogonal-horizontal":(t,e,n,r)=>"M"+t+","+e+"V"+r+"H"+n,"orthogonal-vertical":(t,e,n,r)=>"M"+t+","+e+"H"+n+"V"+r,"orthogonal-radial":(t,e,n,r)=>{const i=Math.cos(t),o=Math.sin(t),a=Math.cos(n),s=Math.sin(n);return"M"+e*i+","+e*o+"A"+e+","+e+" 0 0,"+((Math.abs(n-t)>Math.PI?n<=t:n>t)?1:0)+" "+e*a+","+e*s+"L"+r*a+","+r*s},"diagonal-horizontal":(t,e,n,r)=>{const i=(t+n)/2;return"M"+t+","+e+"C"+i+","+e+" "+i+","+r+" "+n+","+r},"diagonal-vertical":(t,e,n,r)=>{const i=(e+r)/2;return"M"+t+","+e+"C"+t+","+i+" "+n+","+i+" "+n+","+r},"diagonal-radial":(t,e,n,r)=>{const i=Math.cos(t),o=Math.sin(t),a=Math.cos(n),s=Math.sin(n),u=(e+r)/2;return"M"+e*i+","+e*o+"C"+u*i+","+u*o+" "+u*a+","+u*s+" "+r*a+","+r*s}});function bb(t){Ja.call(this,null,t)}bb.Definition={type:"Pie",metadata:{modifies:!0},params:[{name:"field",type:"field"},{name:"startAngle",type:"number",default:0},{name:"endAngle",type:"number",default:6.283185307179586},{name:"sort",type:"boolean",default:!1},{name:"as",type:"string",array:!0,length:2,default:["startAngle","endAngle"]}]},dt(bb,Ja,{transform(t,e){var n,r,i,o=t.as||["startAngle","endAngle"],a=o[0],s=o[1],u=t.field||d,l=t.startAngle||0,c=null!=t.endAngle?t.endAngle:2*Math.PI,f=e.source,h=f.map(u),p=h.length,g=l,m=(c-l)/$e(h),y=Se(p);for(t.sort&&y.sort(((t,e)=>h[t]-h[e])),n=0;nt+(e<0?-1:e>0?1:0)),0))!==e.length&&n.warn("Log scale domain includes zero: "+Ct(e)));return e}function Db(t,e,n){return J(t)&&(e||n)?mp(t,Cb(e||[0,1],n)):t}function Cb(t,e){return e?t.slice().reverse():t}function Fb(t){Ja.call(this,null,t)}dt(Mb,Ja,{transform(t,e){var n=e.dataflow,r=this.value,i=function(t){var e,n=t.type,r="";if(n===Ld)return Ld+"-"+Td;(function(t){const e=t.type;return lp(e)&&e!==Rd&&e!==Ud&&(t.scheme||t.range&&t.range.length&&t.range.every(xt))})(t)&&(r=2===(e=t.rawDomain?t.rawDomain.length:t.domain?t.domain.length+ +(null!=t.domainMid):0)?Ld+"-":3===e?qd+"-":"");return(r+n||Td).toLowerCase()}(t);for(i in r&&i===r.type||(this.value=r=ap(i)()),t)if(!Ab[i]){if("padding"===i&&kb(r.type))continue;J(r[i])?r[i](t[i]):n.warn("Unsupported scale property: "+i)}return function(t,e,n){var r=t.type,i=e.round||!1,o=e.range;if(null!=e.rangeStep)o=function(t,e,n){t!==Yd&&t!==Hd&&s("Only band and point scales support rangeStep.");var r=(null!=e.paddingOuter?e.paddingOuter:e.padding)||0,i=t===Hd?1:(null!=e.paddingInner?e.paddingInner:e.padding)||0;return[0,e.rangeStep*$d(n,i,r)]}(r,e,n);else if(e.scheme&&(o=function(t,e,n){var r,i=e.schemeExtent;k(e.scheme)?r=yp(e.scheme,e.interpolate,e.interpolateGamma):(r=Ap(e.scheme.toLowerCase()))||s(`Unrecognized scheme name: ${e.scheme}`);return n=t===Id?n+1:t===Gd?n-1:t===Pd||t===jd?+e.schemeCount||wb:n,dp(t)?Db(r,i,e.reverse):J(r)?vp(Db(r,i),n):t===Wd?r:r.slice(0,n)}(r,e,n),J(o))){if(t.interpolator)return t.interpolator(o);s(`Scale type ${r} does not support interpolating color schemes.`)}if(o&&dp(r))return t.interpolator(yp(Cb(o,e.reverse),e.interpolate,e.interpolateGamma));o&&e.interpolate&&t.interpolate?t.interpolate(xp(e.interpolate,e.interpolateGamma)):J(t.round)?t.round(i):J(t.rangeRound)&&t.interpolate(i?yh:mh);o&&t.range(Cb(o,e.reverse))}(r,t,function(t,e,n){let r=e.bins;if(r&&!k(r)){const e=t.domain(),n=e[0],i=F(e),o=r.step;let a=null==r.start?n:r.start,u=null==r.stop?i:r.stop;o||s("Scale bins parameter missing step property."),ai&&(u=o*Math.floor(i/o)),r=Se(a,u+o/2,o)}r?t.bins=r:t.bins&&delete t.bins;t.type===Gd&&(r?e.domain||e.domainRaw||(t.domain(r),n=r.length):t.bins=t.domain());return n}(r,t,function(t,e,n){const r=function(t,e,n){return e?(t.domain(Eb(t.type,e,n)),e.length):-1}(t,e.domainRaw,n);if(r>-1)return r;var i,o,a=e.domain,s=t.type,u=e.zero||void 0===e.zero&&function(t){const e=t.type;return!t.bins&&(e===Td||e===zd||e===Nd)}(t);if(!a)return 0;if((u||null!=e.domainMin||null!=e.domainMax||null!=e.domainMid)&&(i=(a=a.slice()).length-1||1,u&&(a[0]>0&&(a[0]=0),a[i]<0&&(a[i]=0)),null!=e.domainMin&&(a[0]=e.domainMin),null!=e.domainMax&&(a[i]=e.domainMax),null!=e.domainMid)){const t=(o=e.domainMid)>a[i]?i+1:ot(u);if(null==e)d.push(t.slice());else for(i={},o=0,a=t.length;oh&&(h=f),n&&c.sort(n)}return d.max=h,d}(e.source,t.groupby,l,c),r=0,i=n.length,o=n.max;r0?1:t<0?-1:0},iw=Math.sqrt,ow=Math.tan;function aw(t){return t>1?0:t<-1?Pb:Math.acos(t)}function sw(t){return t>1?jb:t<-1?-jb:Math.asin(t)}function uw(){}function lw(t,e){t&&fw.hasOwnProperty(t.type)&&fw[t.type](t,e)}var cw={Feature:function(t,e){lw(t.geometry,e)},FeatureCollection:function(t,e){for(var n=t.features,r=-1,i=n.length;++r=0?1:-1,i=r*n,o=Jb(e=(e*=Yb)/2+Ib),a=nw(e),s=_w*a,u=vw*o+s*Jb(i),l=s*r*nw(i);$w.add(Xb(l,u)),yw=t,vw=o,_w=a}function Uw(t){return[Xb(t[1],t[0]),sw(t[2])]}function Lw(t){var e=t[0],n=t[1],r=Jb(n);return[r*Jb(e),r*nw(e),nw(n)]}function qw(t,e){return t[0]*e[0]+t[1]*e[1]+t[2]*e[2]}function Pw(t,e){return[t[1]*e[2]-t[2]*e[1],t[2]*e[0]-t[0]*e[2],t[0]*e[1]-t[1]*e[0]]}function jw(t,e){t[0]+=e[0],t[1]+=e[1],t[2]+=e[2]}function Iw(t,e){return[t[0]*e,t[1]*e,t[2]*e]}function Ww(t){var e=iw(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=e,t[1]/=e,t[2]/=e}var Hw,Yw,Gw,Vw,Xw,Jw,Zw,Qw,Kw,tk,ek,nk,rk,ik,ok,ak,sk={point:uk,lineStart:ck,lineEnd:fk,polygonStart:function(){sk.point=hk,sk.lineStart=dk,sk.lineEnd=pk,Cw=new se,Bw.polygonStart()},polygonEnd:function(){Bw.polygonEnd(),sk.point=uk,sk.lineStart=ck,sk.lineEnd=fk,$w<0?(xw=-(ww=180),bw=-(kw=90)):Cw>Lb?kw=90:Cw<-Lb&&(bw=-90),Sw[0]=xw,Sw[1]=ww},sphere:function(){xw=-(ww=180),bw=-(kw=90)}};function uk(t,e){Fw.push(Sw=[xw=t,ww=t]),ekw&&(kw=e)}function lk(t,e){var n=Lw([t*Yb,e*Yb]);if(Dw){var r=Pw(Dw,n),i=Pw([r[1],-r[0],0],r);Ww(i),i=Uw(i);var o,a=t-Aw,s=a>0?1:-1,u=i[0]*Hb*s,l=Gb(a)>180;l^(s*Awkw&&(kw=o):l^(s*Aw<(u=(u+360)%360-180)&&ukw&&(kw=e)),l?tgk(xw,ww)&&(ww=t):gk(t,ww)>gk(xw,ww)&&(xw=t):ww>=xw?(tww&&(ww=t)):t>Aw?gk(xw,t)>gk(xw,ww)&&(ww=t):gk(t,ww)>gk(xw,ww)&&(xw=t)}else Fw.push(Sw=[xw=t,ww=t]);ekw&&(kw=e),Dw=n,Aw=t}function ck(){sk.point=lk}function fk(){Sw[0]=xw,Sw[1]=ww,sk.point=uk,Dw=null}function hk(t,e){if(Dw){var n=t-Aw;Cw.add(Gb(n)>180?n+(n>0?360:-360):n)}else Mw=t,Ew=e;Bw.point(t,e),lk(t,e)}function dk(){Bw.lineStart()}function pk(){hk(Mw,Ew),Bw.lineEnd(),Gb(Cw)>Lb&&(xw=-(ww=180)),Sw[0]=xw,Sw[1]=ww,Dw=null}function gk(t,e){return(e-=t)<0?e+360:e}function mk(t,e){return t[0]-e[0]}function yk(t,e){return t[0]<=t[1]?t[0]<=e&&e<=t[1]:ePb&&(t-=Math.round(t/Wb)*Wb),[t,e]}function $k(t,e,n){return(t%=Wb)?e||n?Fk(Bk(t),zk(e,n)):Bk(t):e||n?zk(e,n):Sk}function Tk(t){return function(e,n){return Gb(e+=t)>Pb&&(e-=Math.round(e/Wb)*Wb),[e,n]}}function Bk(t){var e=Tk(t);return e.invert=Tk(-t),e}function zk(t,e){var n=Jb(t),r=nw(t),i=Jb(e),o=nw(e);function a(t,e){var a=Jb(e),s=Jb(t)*a,u=nw(t)*a,l=nw(e),c=l*n+s*r;return[Xb(u*i-c*o,s*n-l*r),sw(c*i+u*o)]}return a.invert=function(t,e){var a=Jb(e),s=Jb(t)*a,u=nw(t)*a,l=nw(e),c=l*i-u*o;return[Xb(u*i+l*o,s*n+c*r),sw(c*n-s*r)]},a}function Nk(t,e){(e=Lw(e))[0]-=t,Ww(e);var n=aw(-e[1]);return((-e[2]<0?-n:n)+Wb-Lb)%Wb}function Ok(){var t,e=[];return{point:function(e,n,r){t.push([e,n,r])},lineStart:function(){e.push(t=[])},lineEnd:uw,rejoin:function(){e.length>1&&e.push(e.pop().concat(e.shift()))},result:function(){var n=e;return e=[],t=null,n}}}function Rk(t,e){return Gb(t[0]-e[0])=0;--o)i.point((c=l[o])[0],c[1]);else r(h.x,h.p.x,-1,i);h=h.p}l=(h=h.o).z,d=!d}while(!h.v);i.lineEnd()}}}function qk(t){if(e=t.length){for(var e,n,r=0,i=t[0];++r=0?1:-1,E=M*A,D=E>Pb,C=m*w;if(u.add(Xb(C*M*nw(E),y*k+C*Jb(E))),a+=D?A+M*Wb:A,D^p>=n^x>=n){var F=Pw(Lw(d),Lw(_));Ww(F);var S=Pw(o,F);Ww(S);var $=(D^A>=0?-1:1)*sw(S[2]);(r>$||r===$&&(F[0]||F[1]))&&(s+=D^A>=0?1:-1)}}return(a<-Lb||a0){for(f||(i.polygonStart(),f=!0),i.lineStart(),t=0;t1&&2&u&&h.push(h.pop().concat(h.shift())),a.push(h.filter(Ik))}return h}}function Ik(t){return t.length>1}function Wk(t,e){return((t=t.x)[0]<0?t[1]-jb-Lb:jb-t[1])-((e=e.x)[0]<0?e[1]-jb-Lb:jb-e[1])}Sk.invert=Sk;var Hk=jk((function(){return!0}),(function(t){var e,n=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),e=1},point:function(o,a){var s=o>0?Pb:-Pb,u=Gb(o-n);Gb(u-Pb)0?jb:-jb),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(s,r),t.point(o,r),e=0):i!==s&&u>=Pb&&(Gb(n-i)Lb?Vb((nw(e)*(o=Jb(r))*nw(n)-nw(r)*(i=Jb(e))*nw(t))/(i*o*a)):(e+r)/2}(n,r,o,a),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(s,r),e=0),t.point(n=o,r=a),i=s},lineEnd:function(){t.lineEnd(),n=r=NaN},clean:function(){return 2-e}}}),(function(t,e,n,r){var i;if(null==t)i=n*jb,r.point(-Pb,i),r.point(0,i),r.point(Pb,i),r.point(Pb,0),r.point(Pb,-i),r.point(0,-i),r.point(-Pb,-i),r.point(-Pb,0),r.point(-Pb,i);else if(Gb(t[0]-e[0])>Lb){var o=t[0]0,i=Gb(e)>Lb;function o(t,n){return Jb(t)*Jb(n)>e}function a(t,n,r){var i=[1,0,0],o=Pw(Lw(t),Lw(n)),a=qw(o,o),s=o[0],u=a-s*s;if(!u)return!r&&t;var l=e*a/u,c=-e*s/u,f=Pw(i,o),h=Iw(i,l);jw(h,Iw(o,c));var d=f,p=qw(h,d),g=qw(d,d),m=p*p-g*(qw(h,h)-1);if(!(m<0)){var y=iw(m),v=Iw(d,(-p-y)/g);if(jw(v,h),v=Uw(v),!r)return v;var _,x=t[0],b=n[0],w=t[1],k=n[1];b0^v[1]<(Gb(v[0]-x)Pb^(x<=v[0]&&v[0]<=b)){var E=Iw(d,(-p+y)/g);return jw(E,h),[v,Uw(E)]}}}function s(e,n){var i=r?t:Pb-t,o=0;return e<-i?o|=1:e>i&&(o|=2),n<-i?o|=4:n>i&&(o|=8),o}return jk(o,(function(t){var e,n,u,l,c;return{lineStart:function(){l=u=!1,c=1},point:function(f,h){var d,p=[f,h],g=o(f,h),m=r?g?0:s(f,h):g?s(f+(f<0?Pb:-Pb),h):0;if(!e&&(l=u=g)&&t.lineStart(),g!==u&&(!(d=a(e,p))||Rk(e,d)||Rk(p,d))&&(p[2]=1),g!==u)c=0,g?(t.lineStart(),d=a(p,e),t.point(d[0],d[1])):(d=a(e,p),t.point(d[0],d[1],2),t.lineEnd()),e=d;else if(i&&e&&r^g){var y;m&n||!(y=a(p,e,!0))||(c=0,r?(t.lineStart(),t.point(y[0][0],y[0][1]),t.point(y[1][0],y[1][1]),t.lineEnd()):(t.point(y[1][0],y[1][1]),t.lineEnd(),t.lineStart(),t.point(y[0][0],y[0][1],3)))}!g||e&&Rk(e,p)||t.point(p[0],p[1]),e=p,u=g,n=m},lineEnd:function(){u&&t.lineEnd(),e=null},clean:function(){return c|(l&&u)<<1}}}),(function(e,r,i,o){!function(t,e,n,r,i,o){if(n){var a=Jb(e),s=nw(e),u=r*n;null==i?(i=e+r*Wb,o=e-u/2):(i=Nk(a,i),o=Nk(a,o),(r>0?io)&&(i+=r*Wb));for(var l,c=i;r>0?c>o:c0)do{l.point(0===c||3===c?t:n,c>1?r:e)}while((c=(c+s+4)%4)!==f);else l.point(o[0],o[1])}function a(r,i){return Gb(r[0]-t)0?0:3:Gb(r[0]-n)0?2:1:Gb(r[1]-e)0?1:0:i>0?3:2}function s(t,e){return u(t.x,e.x)}function u(t,e){var n=a(t,1),r=a(e,1);return n!==r?n-r:0===n?e[1]-t[1]:1===n?t[0]-e[0]:2===n?t[1]-e[1]:e[0]-t[0]}return function(a){var u,l,c,f,h,d,p,g,m,y,v,_=a,x=Ok(),b={point:w,lineStart:function(){b.point=k,l&&l.push(c=[]);y=!0,m=!1,p=g=NaN},lineEnd:function(){u&&(k(f,h),d&&m&&x.rejoin(),u.push(x.result()));b.point=w,m&&_.lineEnd()},polygonStart:function(){_=x,u=[],l=[],v=!0},polygonEnd:function(){var e=function(){for(var e=0,n=0,i=l.length;nr&&(h-o)*(r-a)>(d-a)*(t-o)&&++e:d<=r&&(h-o)*(r-a)<(d-a)*(t-o)&&--e;return e}(),n=v&&e,i=(u=Fe(u)).length;(n||i)&&(a.polygonStart(),n&&(a.lineStart(),o(null,null,1,a),a.lineEnd()),i&&Lk(u,s,e,o,a),a.polygonEnd());_=a,u=l=c=null}};function w(t,e){i(t,e)&&_.point(t,e)}function k(o,a){var s=i(o,a);if(l&&c.push([o,a]),y)f=o,h=a,d=s,y=!1,s&&(_.lineStart(),_.point(o,a));else if(s&&m)_.point(o,a);else{var u=[p=Math.max(Vk,Math.min(Gk,p)),g=Math.max(Vk,Math.min(Gk,g))],x=[o=Math.max(Vk,Math.min(Gk,o)),a=Math.max(Vk,Math.min(Gk,a))];!function(t,e,n,r,i,o){var a,s=t[0],u=t[1],l=0,c=1,f=e[0]-s,h=e[1]-u;if(a=n-s,f||!(a>0)){if(a/=f,f<0){if(a0){if(a>c)return;a>l&&(l=a)}if(a=i-s,f||!(a<0)){if(a/=f,f<0){if(a>c)return;a>l&&(l=a)}else if(f>0){if(a0)){if(a/=h,h<0){if(a0){if(a>c)return;a>l&&(l=a)}if(a=o-u,h||!(a<0)){if(a/=h,h<0){if(a>c)return;a>l&&(l=a)}else if(h>0){if(a0&&(t[0]=s+l*f,t[1]=u+l*h),c<1&&(e[0]=s+c*f,e[1]=u+c*h),!0}}}}}(u,x,t,e,n,r)?s&&(_.lineStart(),_.point(o,a),v=!1):(m||(_.lineStart(),_.point(u[0],u[1])),_.point(x[0],x[1]),s||_.lineEnd(),v=!1)}p=o,g=a,m=s}return b}}function Jk(t,e,n){var r=Se(t,e-Lb,n).concat(e);return function(t){return r.map((function(e){return[t,e]}))}}function Zk(t,e,n){var r=Se(t,e-Lb,n).concat(e);return function(t){return r.map((function(e){return[e,t]}))}}var Qk,Kk,tA,eA,nA=t=>t,rA=new se,iA=new se,oA={point:uw,lineStart:uw,lineEnd:uw,polygonStart:function(){oA.lineStart=aA,oA.lineEnd=lA},polygonEnd:function(){oA.lineStart=oA.lineEnd=oA.point=uw,rA.add(Gb(iA)),iA=new se},result:function(){var t=rA/2;return rA=new se,t}};function aA(){oA.point=sA}function sA(t,e){oA.point=uA,Qk=tA=t,Kk=eA=e}function uA(t,e){iA.add(eA*t-tA*e),tA=t,eA=e}function lA(){uA(Qk,Kk)}var cA=1/0,fA=cA,hA=-cA,dA=hA,pA={point:function(t,e){thA&&(hA=t);edA&&(dA=e)},lineStart:uw,lineEnd:uw,polygonStart:uw,polygonEnd:uw,result:function(){var t=[[cA,fA],[hA,dA]];return hA=dA=-(fA=cA=1/0),t}};var gA,mA,yA,vA,_A=0,xA=0,bA=0,wA=0,kA=0,AA=0,MA=0,EA=0,DA=0,CA={point:FA,lineStart:SA,lineEnd:BA,polygonStart:function(){CA.lineStart=zA,CA.lineEnd=NA},polygonEnd:function(){CA.point=FA,CA.lineStart=SA,CA.lineEnd=BA},result:function(){var t=DA?[MA/DA,EA/DA]:AA?[wA/AA,kA/AA]:bA?[_A/bA,xA/bA]:[NaN,NaN];return _A=xA=bA=wA=kA=AA=MA=EA=DA=0,t}};function FA(t,e){_A+=t,xA+=e,++bA}function SA(){CA.point=$A}function $A(t,e){CA.point=TA,FA(yA=t,vA=e)}function TA(t,e){var n=t-yA,r=e-vA,i=iw(n*n+r*r);wA+=i*(yA+t)/2,kA+=i*(vA+e)/2,AA+=i,FA(yA=t,vA=e)}function BA(){CA.point=FA}function zA(){CA.point=OA}function NA(){RA(gA,mA)}function OA(t,e){CA.point=RA,FA(gA=yA=t,mA=vA=e)}function RA(t,e){var n=t-yA,r=e-vA,i=iw(n*n+r*r);wA+=i*(yA+t)/2,kA+=i*(vA+e)/2,AA+=i,MA+=(i=vA*t-yA*e)*(yA+t),EA+=i*(vA+e),DA+=3*i,FA(yA=t,vA=e)}function UA(t){this._context=t}UA.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,e){switch(this._point){case 0:this._context.moveTo(t,e),this._point=1;break;case 1:this._context.lineTo(t,e);break;default:this._context.moveTo(t+this._radius,e),this._context.arc(t,e,this._radius,0,Wb)}},result:uw};var LA,qA,PA,jA,IA,WA=new se,HA={point:uw,lineStart:function(){HA.point=YA},lineEnd:function(){LA&&GA(qA,PA),HA.point=uw},polygonStart:function(){LA=!0},polygonEnd:function(){LA=null},result:function(){var t=+WA;return WA=new se,t}};function YA(t,e){HA.point=GA,qA=jA=t,PA=IA=e}function GA(t,e){jA-=t,IA-=e,WA.add(iw(jA*jA+IA*IA)),jA=t,IA=e}let VA,XA,JA,ZA;class QA{constructor(t){this._append=null==t?KA:function(t){const e=Math.floor(t);if(!(e>=0))throw new RangeError(`invalid digits: ${t}`);if(e>15)return KA;if(e!==VA){const t=10**e;VA=e,XA=function(e){let n=1;this._+=e[0];for(const r=e.length;n=0))throw new RangeError(`invalid digits: ${t}`);i=e}return null===e&&(r=new QA(i)),a},a.projection(t).digits(i).context(e)}function eM(t){return function(e){var n=new nM;for(var r in t)n[r]=t[r];return n.stream=e,n}}function nM(){}function rM(t,e,n){var r=t.clipExtent&&t.clipExtent();return t.scale(150).translate([0,0]),null!=r&&t.clipExtent(null),pw(n,t.stream(pA)),e(pA.result()),null!=r&&t.clipExtent(r),t}function iM(t,e,n){return rM(t,(function(n){var r=e[1][0]-e[0][0],i=e[1][1]-e[0][1],o=Math.min(r/(n[1][0]-n[0][0]),i/(n[1][1]-n[0][1])),a=+e[0][0]+(r-o*(n[1][0]+n[0][0]))/2,s=+e[0][1]+(i-o*(n[1][1]+n[0][1]))/2;t.scale(150*o).translate([a,s])}),n)}function oM(t,e,n){return iM(t,[[0,0],e],n)}function aM(t,e,n){return rM(t,(function(n){var r=+e,i=r/(n[1][0]-n[0][0]),o=(r-i*(n[1][0]+n[0][0]))/2,a=-i*n[0][1];t.scale(150*i).translate([o,a])}),n)}function sM(t,e,n){return rM(t,(function(n){var r=+e,i=r/(n[1][1]-n[0][1]),o=-i*n[0][0],a=(r-i*(n[1][1]+n[0][1]))/2;t.scale(150*i).translate([o,a])}),n)}nM.prototype={constructor:nM,point:function(t,e){this.stream.point(t,e)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var uM=16,lM=Jb(30*Yb);function cM(t,e){return+e?function(t,e){function n(r,i,o,a,s,u,l,c,f,h,d,p,g,m){var y=l-r,v=c-i,_=y*y+v*v;if(_>4*e&&g--){var x=a+h,b=s+d,w=u+p,k=iw(x*x+b*b+w*w),A=sw(w/=k),M=Gb(Gb(w)-1)e||Gb((y*F+v*S)/_-.5)>.3||a*h+s*d+u*p2?t[2]%360*Yb:0,F()):[m*Hb,y*Hb,v*Hb]},D.angle=function(t){return arguments.length?(_=t%360*Yb,F()):_*Hb},D.reflectX=function(t){return arguments.length?(x=t?-1:1,F()):x<0},D.reflectY=function(t){return arguments.length?(b=t?-1:1,F()):b<0},D.precision=function(t){return arguments.length?(a=cM(s,E=t*t),S()):iw(E)},D.fitExtent=function(t,e){return iM(D,t,e)},D.fitSize=function(t,e){return oM(D,t,e)},D.fitWidth=function(t,e){return aM(D,t,e)},D.fitHeight=function(t,e){return sM(D,t,e)},function(){return e=t.apply(this,arguments),D.invert=e.invert&&C,F()}}function gM(t){var e=0,n=Pb/3,r=pM(t),i=r(e,n);return i.parallels=function(t){return arguments.length?r(e=t[0]*Yb,n=t[1]*Yb):[e*Hb,n*Hb]},i}function mM(t,e){var n=nw(t),r=(n+nw(e))/2;if(Gb(r)2?t[2]*Yb:0),e.invert=function(e){return(e=t.invert(e[0]*Yb,e[1]*Yb))[0]*=Hb,e[1]*=Hb,e},e}(i.rotate()).invert([0,0]));return u(null==l?[[s[0]-o,s[1]-o],[s[0]+o,s[1]+o]]:t===kM?[[Math.max(s[0]-o,l),e],[Math.min(s[0]+o,n),r]]:[[l,Math.max(s[1]-o,e)],[n,Math.min(s[1]+o,r)]])}return i.scale=function(t){return arguments.length?(a(t),c()):a()},i.translate=function(t){return arguments.length?(s(t),c()):s()},i.center=function(t){return arguments.length?(o(t),c()):o()},i.clipExtent=function(t){return arguments.length?(null==t?l=e=n=r=null:(l=+t[0][0],e=+t[0][1],n=+t[1][0],r=+t[1][1]),c()):null==l?null:[[l,e],[n,r]]},c()}function MM(t){return ow((jb+t)/2)}function EM(t,e){var n=Jb(t),r=t===e?nw(t):tw(n/Jb(e))/tw(MM(e)/MM(t)),i=n*ew(MM(t),r)/r;if(!r)return kM;function o(t,e){i>0?e<-jb+Lb&&(e=-jb+Lb):e>jb-Lb&&(e=jb-Lb);var n=i/ew(MM(e),r);return[n*nw(r*t),i-n*Jb(r*t)]}return o.invert=function(t,e){var n=i-e,o=rw(r)*iw(t*t+n*n),a=Xb(t,Gb(n))*rw(n);return n*r<0&&(a-=Pb*rw(t)*rw(n)),[a/r,2*Vb(ew(i/o,1/r))-jb]},o}function DM(t,e){return[t,e]}function CM(t,e){var n=Jb(t),r=t===e?nw(t):(n-Jb(e))/(e-t),i=n/r+t;if(Gb(r)Lb&&--i>0);return[t/(.8707+(o=r*r)*(o*(o*o*o*(.003971-.001529*o)-.013791)-.131979)),r]},RM.invert=xM(sw),UM.invert=xM((function(t){return 2*Vb(t)})),LM.invert=function(t,e){return[-e,2*Vb(Qb(t))-jb]};var qM=Math.abs,PM=Math.cos,jM=Math.sin,IM=1e-6,WM=Math.PI,HM=WM/2,YM=function(t){return t>0?Math.sqrt(t):0}(2);function GM(t){return t>1?HM:t<-1?-HM:Math.asin(t)}function VM(t,e){var n,r=t*jM(e),i=30;do{e-=n=(e+jM(e)-r)/(1+PM(e))}while(qM(n)>IM&&--i>0);return e/2}var XM=function(t,e,n){function r(r,i){return[t*r*PM(i=VM(n,i)),e*jM(i)]}return r.invert=function(r,i){return i=GM(i/e),[r/(t*PM(i)),GM((2*i+jM(2*i))/n)]},r}(YM/HM,YM,WM);const JM=tM(),ZM=["clipAngle","clipExtent","scale","translate","center","rotate","parallels","precision","reflectX","reflectY","coefficient","distance","fraction","lobes","parallel","radius","ratio","spacing","tilt"];function QM(t,e){if(!t||"string"!=typeof t)throw new Error("Projection type must be a name string.");return t=t.toLowerCase(),arguments.length>1?(tE[t]=function(t,e){return function n(){const r=e();return r.type=t,r.path=tM().projection(r),r.copy=r.copy||function(){const t=n();return ZM.forEach((e=>{r[e]&&t[e](r[e]())})),t.path.pointRadius(r.path.pointRadius()),t},op(r)}}(t,e),this):tE[t]||null}function KM(t){return t&&t.path||JM}const tE={albers:vM,albersusa:function(){var t,e,n,r,i,o,a=vM(),s=yM().rotate([154,0]).center([-2,58.5]).parallels([55,65]),u=yM().rotate([157,0]).center([-3,19.9]).parallels([8,18]),l={point:function(t,e){o=[t,e]}};function c(t){var e=t[0],a=t[1];return o=null,n.point(e,a),o||(r.point(e,a),o)||(i.point(e,a),o)}function f(){return t=e=null,c}return c.invert=function(t){var e=a.scale(),n=a.translate(),r=(t[0]-n[0])/e,i=(t[1]-n[1])/e;return(i>=.12&&i<.234&&r>=-.425&&r<-.214?s:i>=.166&&i<.234&&r>=-.214&&r<-.115?u:a).invert(t)},c.stream=function(n){return t&&e===n?t:(r=[a.stream(e=n),s.stream(n),u.stream(n)],i=r.length,t={point:function(t,e){for(var n=-1;++n2?t[2]+90:90]):[(t=n())[0],t[1],t[2]-90]},n([0,0,90]).scale(159.155)}};for(const t in tE)QM(t,tE[t]);function eE(){}const nE=[[],[[[1,1.5],[.5,1]]],[[[1.5,1],[1,1.5]]],[[[1.5,1],[.5,1]]],[[[1,.5],[1.5,1]]],[[[1,1.5],[.5,1]],[[1,.5],[1.5,1]]],[[[1,.5],[1,1.5]]],[[[1,.5],[.5,1]]],[[[.5,1],[1,.5]]],[[[1,1.5],[1,.5]]],[[[.5,1],[1,.5]],[[1.5,1],[1,1.5]]],[[[1.5,1],[1,.5]]],[[[.5,1],[1.5,1]]],[[[1,1.5],[1.5,1]]],[[[.5,1],[1,1.5]]],[]];function rE(){var t=1,e=1,n=a;function r(t,e){return e.map((e=>i(t,e)))}function i(r,i){var a=[],s=[];return function(n,r,i){var a,s,u,l,c,f,h=[],d=[];a=s=-1,l=n[0]>=r,nE[l<<1].forEach(p);for(;++a=r,nE[u|l<<1].forEach(p);nE[l<<0].forEach(p);for(;++s=r,c=n[s*t]>=r,nE[l<<1|c<<2].forEach(p);++a=r,f=c,c=n[s*t+a+1]>=r,nE[u|l<<1|c<<2|f<<3].forEach(p);nE[l|c<<3].forEach(p)}a=-1,c=n[s*t]>=r,nE[c<<2].forEach(p);for(;++a=r,nE[c<<2|f<<3].forEach(p);function p(t){var e,n,r=[t[0][0]+a,t[0][1]+s],u=[t[1][0]+a,t[1][1]+s],l=o(r),c=o(u);(e=d[l])?(n=h[c])?(delete d[e.end],delete h[n.start],e===n?(e.ring.push(u),i(e.ring)):h[e.start]=d[n.end]={start:e.start,end:n.end,ring:e.ring.concat(n.ring)}):(delete d[e.end],e.ring.push(u),d[e.end=c]=e):(e=h[c])?(n=d[l])?(delete h[e.start],delete d[n.end],e===n?(e.ring.push(u),i(e.ring)):h[n.start]=d[e.end]={start:n.start,end:e.end,ring:n.ring.concat(e.ring)}):(delete h[e.start],e.ring.unshift(r),h[e.start=l]=e):h[l]=d[c]={start:l,end:c,ring:[r,u]}}nE[c<<3].forEach(p)}(r,i,(t=>{n(t,r,i),function(t){var e=0,n=t.length,r=t[n-1][1]*t[0][0]-t[n-1][0]*t[0][1];for(;++e0?a.push([t]):s.push(t)})),s.forEach((t=>{for(var e,n=0,r=a.length;n{var o,a=n[0],s=n[1],u=0|a,l=0|s,c=r[l*t+u];a>0&&a0&&s=0&&o>=0||s("invalid size"),t=i,e=o,r},r.smooth=function(t){return arguments.length?(n=t?a:eE,r):n===a},r}function iE(t,e){for(var n,r=-1,i=e.length;++rr!=d>r&&n<(h-l)*(r-c)/(d-c)+l&&(i=-i)}return i}function aE(t,e,n){var r,i,o,a;return function(t,e,n){return(e[0]-t[0])*(n[1]-t[1])==(n[0]-t[0])*(e[1]-t[1])}(t,e,n)&&(i=t[r=+(t[0]===e[0])],o=n[r],a=e[r],i<=o&&o<=a||a<=o&&o<=i)}function sE(t,e,n){return function(r){var i=at(r),o=n?Math.min(i[0],0):i[0],a=i[1],s=a-o,u=e?be(o,a,t):s/(t+1);return Se(o+u,a,u)}}function uE(t){Ja.call(this,null,t)}function lE(t,e,n,r,i){const o=t.x1||0,a=t.y1||0,s=e*n<0;function u(t){t.forEach(l)}function l(t){s&&t.reverse(),t.forEach(c)}function c(t){t[0]=(t[0]-o)*e+r,t[1]=(t[1]-a)*n+i}return function(t){return t.coordinates.forEach(u),t}}function cE(t,e,n){const r=t>=0?t:rs(e,n);return Math.round((Math.sqrt(4*r*r+1)-1)/2)}function fE(t){return J(t)?t:rt(+t)}function hE(){var t=t=>t[0],e=t=>t[1],n=d,r=[-1,-1],i=960,o=500,a=2;function u(s,u){const l=cE(r[0],s,t)>>a,c=cE(r[1],s,e)>>a,f=l?l+2:0,h=c?c+2:0,d=2*f+(i>>a),p=2*h+(o>>a),g=new Float32Array(d*p),m=new Float32Array(d*p);let y=g;s.forEach((r=>{const i=f+(+t(r)>>a),o=h+(+e(r)>>a);i>=0&&i=0&&o0&&c>0?(dE(d,p,g,m,l),pE(d,p,m,g,c),dE(d,p,g,m,l),pE(d,p,m,g,c),dE(d,p,g,m,l),pE(d,p,m,g,c)):l>0?(dE(d,p,g,m,l),dE(d,p,m,g,l),dE(d,p,g,m,l),y=m):c>0&&(pE(d,p,g,m,c),pE(d,p,m,g,c),pE(d,p,g,m,c),y=m);const v=u?Math.pow(2,-2*a):1/$e(y);for(let t=0,e=d*p;t>a),y2:h+(o>>a)}}return u.x=function(e){return arguments.length?(t=fE(e),u):t},u.y=function(t){return arguments.length?(e=fE(t),u):e},u.weight=function(t){return arguments.length?(n=fE(t),u):n},u.size=function(t){if(!arguments.length)return[i,o];var e=+t[0],n=+t[1];return e>=0&&n>=0||s("invalid size"),i=e,o=n,u},u.cellSize=function(t){return arguments.length?((t=+t)>=1||s("invalid cell size"),a=Math.floor(Math.log(t)/Math.LN2),u):1<=i&&(e>=o&&(s-=n[e-o+a*t]),r[e-i+a*t]=s/Math.min(e+1,t-1+o-e,o))}function pE(t,e,n,r,i){const o=1+(i<<1);for(let a=0;a=i&&(s>=o&&(u-=n[a+(s-o)*t]),r[a+(s-i)*t]=u/Math.min(s+1,e-1+o-s,o))}function gE(t){Ja.call(this,null,t)}uE.Definition={type:"Isocontour",metadata:{generates:!0},params:[{name:"field",type:"field"},{name:"thresholds",type:"number",array:!0},{name:"levels",type:"number"},{name:"nice",type:"boolean",default:!1},{name:"resolve",type:"enum",values:["shared","independent"],default:"independent"},{name:"zero",type:"boolean",default:!0},{name:"smooth",type:"boolean",default:!0},{name:"scale",type:"number",expr:!0},{name:"translate",type:"number",array:!0,expr:!0},{name:"as",type:"string",null:!0,default:"contour"}]},dt(uE,Ja,{transform(t,e){if(this.value&&!e.changed()&&!t.modified())return e.StopPropagation;var n=e.fork(e.NO_SOURCE|e.NO_FIELDS),r=e.materialize(e.SOURCE).source,i=t.field||f,o=rE().smooth(!1!==t.smooth),a=t.thresholds||function(t,e,n){const r=sE(n.levels||10,n.nice,!1!==n.zero);return"shared"!==n.resolve?r:r(t.map((t=>we(e(t).values))))}(r,i,t),s=null===t.as?null:t.as||"contour",u=[];return r.forEach((e=>{const n=i(e),r=o.size([n.width,n.height])(n.values,k(a)?a:a(n.values));!function(t,e,n,r){let i=r.scale||e.scale,o=r.translate||e.translate;J(i)&&(i=i(n,r));J(o)&&(o=o(n,r));if((1===i||null==i)&&!o)return;const a=(vt(i)?i:i[0])||1,s=(vt(i)?i:i[1])||1,u=o&&o[0]||0,l=o&&o[1]||0;t.forEach(lE(e,a,s,u,l))}(r,n,e,t),r.forEach((t=>{u.push(ba(e,_a(null!=s?{[s]:t}:t)))}))})),this.value&&(n.rem=this.value),this.value=n.source=n.add=u,n}}),gE.Definition={type:"KDE2D",metadata:{generates:!0},params:[{name:"size",type:"number",array:!0,length:2,required:!0},{name:"x",type:"field",required:!0},{name:"y",type:"field",required:!0},{name:"weight",type:"field"},{name:"groupby",type:"field",array:!0},{name:"cellSize",type:"number"},{name:"bandwidth",type:"number",array:!0,length:2},{name:"counts",type:"boolean",default:!1},{name:"as",type:"string",default:"grid"}]};const mE=["x","y","weight","size","cellSize","bandwidth"];function yE(t,e){return mE.forEach((n=>null!=e[n]?t[n](e[n]):0)),t}function vE(t){Ja.call(this,null,t)}dt(gE,Ja,{transform(t,e){if(this.value&&!e.changed()&&!t.modified())return e.StopPropagation;var r,i=e.fork(e.NO_SOURCE|e.NO_FIELDS),o=function(t,e){var n,r,i,o,a,s,u=[],l=t=>t(o);if(null==e)u.push(t);else for(n={},r=0,i=t.length;r_a(function(t,e){for(let n=0;nLb})).map(u)).concat(Se(Zb(o/d)*d,i,d).filter((function(t){return Gb(t%g)>Lb})).map(l))}return y.lines=function(){return v().map((function(t){return{type:"LineString",coordinates:t}}))},y.outline=function(){return{type:"Polygon",coordinates:[c(r).concat(f(a).slice(1),c(n).reverse().slice(1),f(s).reverse().slice(1))]}},y.extent=function(t){return arguments.length?y.extentMajor(t).extentMinor(t):y.extentMinor()},y.extentMajor=function(t){return arguments.length?(r=+t[0][0],n=+t[1][0],s=+t[0][1],a=+t[1][1],r>n&&(t=r,r=n,n=t),s>a&&(t=s,s=a,a=t),y.precision(m)):[[r,s],[n,a]]},y.extentMinor=function(n){return arguments.length?(e=+n[0][0],t=+n[1][0],o=+n[0][1],i=+n[1][1],e>t&&(n=e,e=t,t=n),o>i&&(n=o,o=i,i=n),y.precision(m)):[[e,o],[t,i]]},y.step=function(t){return arguments.length?y.stepMajor(t).stepMinor(t):y.stepMinor()},y.stepMajor=function(t){return arguments.length?(p=+t[0],g=+t[1],y):[p,g]},y.stepMinor=function(t){return arguments.length?(h=+t[0],d=+t[1],y):[h,d]},y.precision=function(h){return arguments.length?(m=+h,u=Jk(o,i,90),l=Zk(e,t,m),c=Jk(s,a,90),f=Zk(r,n,m),y):m},y.extentMajor([[-180,-90+Lb],[180,90-Lb]]).extentMinor([[-180,-80-Lb],[180,80+Lb]])}()}function EE(t){Ja.call(this,null,t)}function DE(t){if(!J(t))return!1;const e=Bt(r(t));return e.$x||e.$y||e.$value||e.$max}function CE(t){Ja.call(this,null,t),this.modified(!0)}function FE(t,e,n){J(t[e])&&t[e](n)}bE.Definition={type:"GeoJSON",metadata:{},params:[{name:"fields",type:"field",array:!0,length:2},{name:"geojson",type:"field"}]},dt(bE,Ja,{transform(t,e){var n,i=this._features,o=this._points,a=t.fields,s=a&&a[0],u=a&&a[1],l=t.geojson||!a&&f,c=e.ADD;n=t.modified()||e.changed(e.REM)||e.modified(r(l))||s&&e.modified(r(s))||u&&e.modified(r(u)),this.value&&!n||(c=e.SOURCE,this._features=i=[],this._points=o=[]),l&&e.visit(c,(t=>i.push(l(t)))),s&&u&&(e.visit(c,(t=>{var e=s(t),n=u(t);null!=e&&null!=n&&(e=+e)===e&&(n=+n)===n&&o.push([e,n])})),i=i.concat({type:_E,geometry:{type:"MultiPoint",coordinates:o}})),this.value={type:xE,features:i}}}),wE.Definition={type:"GeoPath",metadata:{modifies:!0},params:[{name:"projection",type:"projection"},{name:"field",type:"field"},{name:"pointRadius",type:"number",expr:!0},{name:"as",type:"string",default:"path"}]},dt(wE,Ja,{transform(t,e){var n=e.fork(e.ALL),r=this.value,i=t.field||f,o=t.as||"path",a=n.SOURCE;!r||t.modified()?(this.value=r=KM(t.projection),n.materialize().reflow()):a=i===f||e.modified(i.fields)?n.ADD_MOD:n.ADD;const s=function(t,e){const n=t.pointRadius();t.context(null),null!=e&&t.pointRadius(e);return n}(r,t.pointRadius);return n.visit(a,(t=>t[o]=r(i(t)))),r.pointRadius(s),n.modifies(o)}}),kE.Definition={type:"GeoPoint",metadata:{modifies:!0},params:[{name:"projection",type:"projection",required:!0},{name:"fields",type:"field",array:!0,required:!0,length:2},{name:"as",type:"string",array:!0,length:2,default:["x","y"]}]},dt(kE,Ja,{transform(t,e){var n,r=t.projection,i=t.fields[0],o=t.fields[1],a=t.as||["x","y"],s=a[0],u=a[1];function l(t){const e=r([i(t),o(t)]);e?(t[s]=e[0],t[u]=e[1]):(t[s]=void 0,t[u]=void 0)}return t.modified()?e=e.materialize().reflow(!0).visit(e.SOURCE,l):(n=e.modified(i.fields)||e.modified(o.fields),e.visit(n?e.ADD_MOD:e.ADD,l)),e.modifies(a)}}),AE.Definition={type:"GeoShape",metadata:{modifies:!0,nomod:!0},params:[{name:"projection",type:"projection"},{name:"field",type:"field",default:"datum"},{name:"pointRadius",type:"number",expr:!0},{name:"as",type:"string",default:"shape"}]},dt(AE,Ja,{transform(t,e){var n=e.fork(e.ALL),r=this.value,i=t.as||"shape",o=n.ADD;return r&&!t.modified()||(this.value=r=function(t,e,n){const r=null==n?n=>t(e(n)):r=>{var i=t.pointRadius(),o=t.pointRadius(n)(e(r));return t.pointRadius(i),o};return r.context=e=>(t.context(e),r),r}(KM(t.projection),t.field||l("datum"),t.pointRadius),n.materialize().reflow(),o=n.SOURCE),n.visit(o,(t=>t[i]=r)),n.modifies(i)}}),ME.Definition={type:"Graticule",metadata:{changes:!0,generates:!0},params:[{name:"extent",type:"array",array:!0,length:2,content:{type:"number",array:!0,length:2}},{name:"extentMajor",type:"array",array:!0,length:2,content:{type:"number",array:!0,length:2}},{name:"extentMinor",type:"array",array:!0,length:2,content:{type:"number",array:!0,length:2}},{name:"step",type:"number",array:!0,length:2},{name:"stepMajor",type:"number",array:!0,length:2,default:[90,360]},{name:"stepMinor",type:"number",array:!0,length:2,default:[10,10]},{name:"precision",type:"number",default:2.5}]},dt(ME,Ja,{transform(t,e){var n,r=this.value,i=this.generator;if(!r.length||t.modified())for(const e in t)J(i[e])&&i[e](t[e]);return n=i(),r.length?e.mod.push(wa(r[0],n)):e.add.push(_a(n)),r[0]=n,e}}),EE.Definition={type:"heatmap",metadata:{modifies:!0},params:[{name:"field",type:"field"},{name:"color",type:"string",expr:!0},{name:"opacity",type:"number",expr:!0},{name:"resolve",type:"enum",values:["shared","independent"],default:"independent"},{name:"as",type:"string",default:"image"}]},dt(EE,Ja,{transform(t,e){if(!e.changed()&&!t.modified())return e.StopPropagation;var n=e.materialize(e.SOURCE).source,r="shared"===t.resolve,i=t.field||f,o=function(t,e){let n;J(t)?(n=n=>t(n,e),n.dep=DE(t)):t?n=rt(t):(n=t=>t.$value/t.$max||0,n.dep=!0);return n}(t.opacity,t),a=function(t,e){let n;J(t)?(n=n=>af(t(n,e)),n.dep=DE(t)):n=rt(af(t||"#888"));return n}(t.color,t),s=t.as||"image",u={$x:0,$y:0,$value:0,$max:r?we(n.map((t=>we(i(t).values)))):0};return n.forEach((t=>{const e=i(t),n=ot({},t,u);r||(n.$max=we(e.values||[])),t[s]=function(t,e,n,r){const i=t.width,o=t.height,a=t.x1||0,s=t.y1||0,u=t.x2||i,l=t.y2||o,c=t.values,f=c?t=>c[t]:h,d=$c(u-a,l-s),p=d.getContext("2d"),g=p.getImageData(0,0,u-a,l-s),m=g.data;for(let t=s,o=0;t{null!=t[e]&&FE(n,e,t[e])}))):ZM.forEach((e=>{t.modified(e)&&FE(n,e,t[e])})),null!=t.pointRadius&&n.path.pointRadius(t.pointRadius),t.fit&&function(t,e){const n=function(t){return t=V(t),1===t.length?t[0]:{type:xE,features:t.reduce(((t,e)=>t.concat(function(t){return t.type===xE?t.features:V(t).filter((t=>null!=t)).map((t=>t.type===_E?t:{type:_E,geometry:t}))}(e))),[])}}(e.fit);e.extent?t.fitExtent(e.extent,n):e.size&&t.fitSize(e.size,n)}(n,t),e.fork(e.NO_SOURCE|e.NO_FIELDS)}});var SE=Object.freeze({__proto__:null,contour:vE,geojson:bE,geopath:wE,geopoint:kE,geoshape:AE,graticule:ME,heatmap:EE,isocontour:uE,kde2d:gE,projection:CE});function $E(t,e,n,r){if(isNaN(e)||isNaN(n))return t;var i,o,a,s,u,l,c,f,h,d=t._root,p={data:r},g=t._x0,m=t._y0,y=t._x1,v=t._y1;if(!d)return t._root=p,t;for(;d.length;)if((l=e>=(o=(g+y)/2))?g=o:y=o,(c=n>=(a=(m+v)/2))?m=a:v=a,i=d,!(d=d[f=c<<1|l]))return i[f]=p,t;if(s=+t._x.call(null,d.data),u=+t._y.call(null,d.data),e===s&&n===u)return p.next=d,i?i[f]=p:t._root=p,t;do{i=i?i[f]=new Array(4):t._root=new Array(4),(l=e>=(o=(g+y)/2))?g=o:y=o,(c=n>=(a=(m+v)/2))?m=a:v=a}while((f=c<<1|l)==(h=(u>=a)<<1|s>=o));return i[h]=d,i[f]=p,t}function TE(t,e,n,r,i){this.node=t,this.x0=e,this.y0=n,this.x1=r,this.y1=i}function BE(t){return t[0]}function zE(t){return t[1]}function NE(t,e,n){var r=new OE(null==e?BE:e,null==n?zE:n,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function OE(t,e,n,r,i,o){this._x=t,this._y=e,this._x0=n,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function RE(t){for(var e={data:t.data},n=e;t=t.next;)n=n.next={data:t.data};return e}var UE=NE.prototype=OE.prototype;function LE(t){return function(){return t}}function qE(t){return 1e-6*(t()-.5)}function PE(t){return t.x+t.vx}function jE(t){return t.y+t.vy}function IE(t){return t.index}function WE(t,e){var n=t.get(e);if(!n)throw new Error("node not found: "+e);return n}UE.copy=function(){var t,e,n=new OE(this._x,this._y,this._x0,this._y0,this._x1,this._y1),r=this._root;if(!r)return n;if(!r.length)return n._root=RE(r),n;for(t=[{source:r,target:n._root=new Array(4)}];r=t.pop();)for(var i=0;i<4;++i)(e=r.source[i])&&(e.length?t.push({source:e,target:r.target[i]=new Array(4)}):r.target[i]=RE(e));return n},UE.add=function(t){const e=+this._x.call(null,t),n=+this._y.call(null,t);return $E(this.cover(e,n),e,n,t)},UE.addAll=function(t){var e,n,r,i,o=t.length,a=new Array(o),s=new Array(o),u=1/0,l=1/0,c=-1/0,f=-1/0;for(n=0;nc&&(c=r),if&&(f=i));if(u>c||l>f)return this;for(this.cover(u,l).cover(c,f),n=0;nt||t>=i||r>e||e>=o;)switch(s=(eh||(o=u.y0)>d||(a=u.x1)=y)<<1|t>=m)&&(u=p[p.length-1],p[p.length-1]=p[p.length-1-l],p[p.length-1-l]=u)}else{var v=t-+this._x.call(null,g.data),_=e-+this._y.call(null,g.data),x=v*v+_*_;if(x=(s=(p+m)/2))?p=s:m=s,(c=a>=(u=(g+y)/2))?g=u:y=u,e=d,!(d=d[f=c<<1|l]))return this;if(!d.length)break;(e[f+1&3]||e[f+2&3]||e[f+3&3])&&(n=e,h=f)}for(;d.data!==t;)if(r=d,!(d=d.next))return this;return(i=d.next)&&delete d.next,r?(i?r.next=i:delete r.next,this):e?(i?e[f]=i:delete e[f],(d=e[0]||e[1]||e[2]||e[3])&&d===(e[3]||e[2]||e[1]||e[0])&&!d.length&&(n?n[h]=d:this._root=d),this):(this._root=i,this)},UE.removeAll=function(t){for(var e=0,n=t.length;e{}};function YE(){for(var t,e=0,n=arguments.length,r={};e=0&&(e=t.slice(n+1),t=t.slice(0,n)),t&&!r.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}}))),a=-1,s=o.length;if(!(arguments.length<2)){if(null!=e&&"function"!=typeof e)throw new Error("invalid callback: "+e);for(;++a0)for(var n,r,i=new Array(n),o=0;o=0&&e._call.call(void 0,t),e=e._next;--QE}()}finally{QE=0,function(){var t,e,n=JE,r=1/0;for(;n;)n._call?(r>n._time&&(r=n._time),t=n,n=n._next):(e=n._next,n._next=null,n=t?t._next=e:JE=e);ZE=t,dD(r)}(),rD=0}}function hD(){var t=oD.now(),e=t-nD;e>eD&&(iD-=e,nD=t)}function dD(t){QE||(KE&&(KE=clearTimeout(KE)),t-rD>24?(t<1/0&&(KE=setTimeout(fD,t-oD.now()-iD)),tD&&(tD=clearInterval(tD))):(tD||(nD=oD.now(),tD=setInterval(hD,eD)),QE=1,aD(fD)))}lD.prototype=cD.prototype={constructor:lD,restart:function(t,e,n){if("function"!=typeof t)throw new TypeError("callback is not a function");n=(null==n?sD():+n)+(null==e?0:+e),this._next||ZE===this||(ZE?ZE._next=this:JE=this,ZE=this),this._call=t,this._time=n,dD()},stop:function(){this._call&&(this._call=null,this._time=1/0,dD())}};const pD=1664525,gD=1013904223,mD=4294967296;function yD(t){return t.x}function vD(t){return t.y}var _D=10,xD=Math.PI*(3-Math.sqrt(5));function bD(t){var e,n=1,r=.001,i=1-Math.pow(r,1/300),o=0,a=.6,s=new Map,u=cD(f),l=YE("tick","end"),c=function(){let t=1;return()=>(t=(pD*t+gD)%mD)/mD}();function f(){h(),l.call("tick",e),n1?(null==n?s.delete(t):s.set(t,p(n)),e):s.get(t)},find:function(e,n,r){var i,o,a,s,u,l=0,c=t.length;for(null==r?r=1/0:r*=r,l=0;l1?(l.on(t,n),e):l.on(t)}}}const wD={center:function(t,e){var n,r=1;function i(){var i,o,a=n.length,s=0,u=0;for(i=0;il+p||oc+p||au.index){var g=l-s.x-s.vx,m=c-s.y-s.vy,y=g*g+m*m;yt.r&&(t.r=t[e].r)}function u(){if(e){var r,i,o=e.length;for(n=new Array(o),r=0;r=s)){(t.data!==e||t.next)&&(0===f&&(p+=(f=qE(n))*f),0===h&&(p+=(h=qE(n))*h),p[s(t,e,r),t])));for(a=0,i=new Array(l);ae(t,n):e)}DD.Definition={type:"Force",metadata:{modifies:!0},params:[{name:"static",type:"boolean",default:!1},{name:"restart",type:"boolean",default:!1},{name:"iterations",type:"number",default:300},{name:"alpha",type:"number",default:1},{name:"alphaMin",type:"number",default:.001},{name:"alphaTarget",type:"number",default:0},{name:"velocityDecay",type:"number",default:.4},{name:"forces",type:"param",array:!0,params:[{key:{force:"center"},params:[{name:"x",type:"number",default:0},{name:"y",type:"number",default:0}]},{key:{force:"collide"},params:[{name:"radius",type:"number",expr:!0},{name:"strength",type:"number",default:.7},{name:"iterations",type:"number",default:1}]},{key:{force:"nbody"},params:[{name:"strength",type:"number",default:-30,expr:!0},{name:"theta",type:"number",default:.9},{name:"distanceMin",type:"number",default:1},{name:"distanceMax",type:"number"}]},{key:{force:"link"},params:[{name:"links",type:"data"},{name:"id",type:"field"},{name:"distance",type:"number",default:30,expr:!0},{name:"strength",type:"number",expr:!0},{name:"iterations",type:"number",default:1}]},{key:{force:"x"},params:[{name:"strength",type:"number",default:.1},{name:"x",type:"field"}]},{key:{force:"y"},params:[{name:"strength",type:"number",default:.1},{name:"y",type:"field"}]}]},{name:"as",type:"string",array:!0,modify:!1,default:ED}]},dt(DD,Ja,{transform(t,e){var n,r,i=this.value,o=e.changed(e.ADD_REM),a=t.modified(AD),s=t.iterations||300;if(i?(o&&(e.modifies("index"),i.nodes(e.source)),(a||e.changed(e.MOD))&&CD(i,t,0,e)):(this.value=i=function(t,e){const n=bD(t),r=n.stop,i=n.restart;let o=!1;return n.stopped=()=>o,n.restart=()=>(o=!1,i()),n.stop=()=>(o=!0,r()),CD(n,e,!0).on("end",(()=>o=!0))}(e.source,t),i.on("tick",(n=e.dataflow,r=this,()=>n.touch(r).run())),t.static||(o=!0,i.tick()),e.modifies("index")),a||o||t.modified(MD)||e.changed()&&t.restart)if(i.alpha(Math.max(i.alpha(),t.alpha||1)).alphaDecay(1-Math.pow(i.alphaMin(),1/s)),t.static)for(i.stop();--s>=0;)i.tick();else if(i.stopped()&&i.restart(),!o)return e.StopPropagation;return this.finish(t,e)},finish(t,e){const n=e.dataflow;for(let t,e=this._argops,s=0,u=e.length;s=0;)e+=n[r].value;else e=1;t.value=e}function RD(t,e){t instanceof Map?(t=[void 0,t],void 0===e&&(e=LD)):void 0===e&&(e=UD);for(var n,r,i,o,a,s=new jD(t),u=[s];n=u.pop();)if((i=e(n.data))&&(a=(i=Array.from(i)).length))for(n.children=i,o=a-1;o>=0;--o)u.push(r=i[o]=new jD(i[o])),r.parent=n,r.depth=n.depth+1;return s.eachBefore(PD)}function UD(t){return t.children}function LD(t){return Array.isArray(t)?t[1]:null}function qD(t){void 0!==t.data.value&&(t.value=t.data.value),t.data=t.data.data}function PD(t){var e=0;do{t.height=e}while((t=t.parent)&&t.height<++e)}function jD(t){this.data=t,this.depth=this.height=0,this.parent=null}function ID(t){return null==t?null:WD(t)}function WD(t){if("function"!=typeof t)throw new Error;return t}function HD(){return 0}function YD(t){return function(){return t}}jD.prototype=RD.prototype={constructor:jD,count:function(){return this.eachAfter(OD)},each:function(t,e){let n=-1;for(const r of this)t.call(e,r,++n,this);return this},eachAfter:function(t,e){for(var n,r,i,o=this,a=[o],s=[],u=-1;o=a.pop();)if(s.push(o),n=o.children)for(r=0,i=n.length;r=0;--r)o.push(n[r]);return this},find:function(t,e){let n=-1;for(const r of this)if(t.call(e,r,++n,this))return r},sum:function(t){return this.eachAfter((function(e){for(var n=+t(e.data)||0,r=e.children,i=r&&r.length;--i>=0;)n+=r[i].value;e.value=n}))},sort:function(t){return this.eachBefore((function(e){e.children&&e.children.sort(t)}))},path:function(t){for(var e=this,n=function(t,e){if(t===e)return t;var n=t.ancestors(),r=e.ancestors(),i=null;t=n.pop(),e=r.pop();for(;t===e;)i=t,t=n.pop(),e=r.pop();return i}(e,t),r=[e];e!==n;)e=e.parent,r.push(e);for(var i=r.length;t!==n;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,e=[t];t=t.parent;)e.push(t);return e},descendants:function(){return Array.from(this)},leaves:function(){var t=[];return this.eachBefore((function(e){e.children||t.push(e)})),t},links:function(){var t=this,e=[];return t.each((function(n){n!==t&&e.push({source:n.parent,target:n})})),e},copy:function(){return RD(this).eachBefore(qD)},[Symbol.iterator]:function*(){var t,e,n,r,i=this,o=[i];do{for(t=o.reverse(),o=[];i=t.pop();)if(yield i,e=i.children)for(n=0,r=e.length;n0&&n*n>r*r+i*i}function KD(t,e){for(var n=0;n1e-6?(D+Math.sqrt(D*D-4*E*C))/(2*E):C/D);return{x:r+w+k*F,y:i+A+M*F,r:F}}function rC(t,e,n){var r,i,o,a,s=t.x-e.x,u=t.y-e.y,l=s*s+u*u;l?(i=e.r+n.r,i*=i,a=t.r+n.r,i>(a*=a)?(r=(l+a-i)/(2*l),o=Math.sqrt(Math.max(0,a/l-r*r)),n.x=t.x-r*s-o*u,n.y=t.y-r*u+o*s):(r=(l+i-a)/(2*l),o=Math.sqrt(Math.max(0,i/l-r*r)),n.x=e.x+r*s-o*u,n.y=e.y+r*u+o*s)):(n.x=e.x+n.r,n.y=e.y)}function iC(t,e){var n=t.r+e.r-1e-6,r=e.x-t.x,i=e.y-t.y;return n>0&&n*n>r*r+i*i}function oC(t){var e=t._,n=t.next._,r=e.r+n.r,i=(e.x*n.r+n.x*e.r)/r,o=(e.y*n.r+n.y*e.r)/r;return i*i+o*o}function aC(t){this._=t,this.next=null,this.previous=null}function sC(t,e){if(!(o=(t=function(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}(t)).length))return 0;var n,r,i,o,a,s,u,l,c,f,h;if((n=t[0]).x=0,n.y=0,!(o>1))return n.r;if(r=t[1],n.x=-r.r,r.x=n.r,r.y=0,!(o>2))return n.r+r.r;rC(r,n,i=t[2]),n=new aC(n),r=new aC(r),i=new aC(i),n.next=i.previous=r,r.next=n.previous=i,i.next=r.previous=n;t:for(u=3;ufunction(t){t=`${t}`;let e=t.length;bC(t,e-1)&&!bC(t,e-2)&&(t=t.slice(0,-1));return"/"===t[0]?t:`/${t}`}(t(e,n,r)))),n=e.map(xC),i=new Set(e).add("");for(const t of n)i.has(t)||(i.add(t),e.push(t),n.push(xC(t)),h.push(mC));d=(t,n)=>e[n],p=(t,e)=>n[e]}for(a=0,i=h.length;a=0&&(l=h[t]).data===mC;--t)l.data=null}if(s.parent=pC,s.eachBefore((function(t){t.depth=t.parent.depth+1,--i})).eachBefore(PD),s.parent=null,i>0)throw new Error("cycle");return s}return r.id=function(t){return arguments.length?(e=ID(t),r):e},r.parentId=function(t){return arguments.length?(n=ID(t),r):n},r.path=function(e){return arguments.length?(t=ID(e),r):t},r}function xC(t){let e=t.length;if(e<2)return"";for(;--e>1&&!bC(t,e););return t.slice(0,e)}function bC(t,e){if("/"===t[e]){let n=0;for(;e>0&&"\\"===t[--e];)++n;if(0==(1&n))return!0}return!1}function wC(t,e){return t.parent===e.parent?1:2}function kC(t){var e=t.children;return e?e[0]:t.t}function AC(t){var e=t.children;return e?e[e.length-1]:t.t}function MC(t,e,n){var r=n/(e.i-t.i);e.c-=r,e.s+=n,t.c+=r,e.z+=n,e.m+=n}function EC(t,e,n){return t.a.parent===e.parent?t.a:n}function DC(t,e){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=e}function CC(t,e,n,r,i){for(var o,a=t.children,s=-1,u=a.length,l=t.value&&(i-n)/t.value;++sh&&(h=s),m=c*c*g,(d=Math.max(h/m,m/f))>p){c-=s;break}p=d}y.push(a={value:c,dice:u1?e:1)},n}(FC);var TC=function t(e){function n(t,n,r,i,o){if((a=t._squarify)&&a.ratio===e)for(var a,s,u,l,c,f=-1,h=a.length,d=t.value;++f1?e:1)},n}(FC);function BC(t,e,n){const r={};return t.each((t=>{const i=t.data;n(i)&&(r[e(i)]=t)})),t.lookup=r,t}function zC(t){Ja.call(this,null,t)}zC.Definition={type:"Nest",metadata:{treesource:!0,changes:!0},params:[{name:"keys",type:"field",array:!0},{name:"generate",type:"boolean"}]};const NC=t=>t.values;function OC(){const t=[],e={entries:t=>r(n(t,0),0),key:n=>(t.push(n),e)};function n(e,r){if(r>=t.length)return e;const i=e.length,o=t[r++],a={},s={};let u,l,c,f=-1;for(;++ft.length)return e;const i=[];for(const t in e)i.push({key:t,values:r(e[t],n)});return i}return e}function RC(t){Ja.call(this,null,t)}dt(zC,Ja,{transform(t,e){e.source||s("Nest transform requires an upstream data source.");var n=t.generate,r=t.modified(),i=e.clone(),o=this.value;return(!o||r||e.changed())&&(o&&o.each((t=>{t.children&&ma(t.data)&&i.rem.push(t.data)})),this.value=o=RD({values:V(t.keys).reduce(((t,e)=>(t.key(e),t)),OC()).entries(i.source)},NC),n&&o.each((t=>{t.children&&(t=_a(t.data),i.add.push(t),i.source.push(t))})),BC(o,ya,ya)),i.source.root=o,i}});const UC=(t,e)=>t.parent===e.parent?1:2;dt(RC,Ja,{transform(t,e){e.source&&e.source.root||s(this.constructor.name+" transform requires a backing tree data source.");const n=this.layout(t.method),r=this.fields,i=e.source.root,o=t.as||r;t.field?i.sum(t.field):i.count(),t.sort&&i.sort(ka(t.sort,(t=>t.data))),function(t,e,n){for(let r,i=0,o=e.length;ifunction(t,e,n){const r=t.data,i=e.length-1;for(let o=0;o(t=(GD*t+VD)%XD)/XD}();return i.x=e/2,i.y=n/2,t?i.eachBefore(lC(t)).eachAfter(cC(r,.5,o)).eachBefore(fC(1)):i.eachBefore(lC(uC)).eachAfter(cC(HD,1,o)).eachAfter(cC(r,i.r/Math.min(e,n),o)).eachBefore(fC(Math.min(e,n)/(2*i.r))),i}return i.radius=function(e){return arguments.length?(t=ID(e),i):t},i.size=function(t){return arguments.length?(e=+t[0],n=+t[1],i):[e,n]},i.padding=function(t){return arguments.length?(r="function"==typeof t?t:YD(+t),i):r},i},params:["radius","size","padding"],fields:LC});const PC=["x0","y0","x1","y1","depth","children"];function jC(t){RC.call(this,t)}function IC(t){Ja.call(this,null,t)}jC.Definition={type:"Partition",metadata:{tree:!0,modifies:!0},params:[{name:"field",type:"field"},{name:"sort",type:"compare"},{name:"padding",type:"number",default:0},{name:"round",type:"boolean",default:!1},{name:"size",type:"number",array:!0,length:2},{name:"as",type:"string",array:!0,length:PC.length,default:PC}]},dt(jC,RC,{layout:function(){var t=1,e=1,n=0,r=!1;function i(i){var o=i.height+1;return i.x0=i.y0=n,i.x1=t,i.y1=e/o,i.eachBefore(function(t,e){return function(r){r.children&&dC(r,r.x0,t*(r.depth+1)/e,r.x1,t*(r.depth+2)/e);var i=r.x0,o=r.y0,a=r.x1-n,s=r.y1-n;a=0;--i)s.push(n=e.children[i]=new DC(r[i],i)),n.parent=e;return(a.parent=new DC(null,0)).children=[a],a}(i);if(u.eachAfter(o),u.parent.m=-u.z,u.eachBefore(a),r)i.eachBefore(s);else{var l=i,c=i,f=i;i.eachBefore((function(t){t.xc.x&&(c=t),t.depth>f.depth&&(f=t)}));var h=l===c?1:t(l,c)/2,d=h-l.x,p=e/(c.x+h+d),g=n/(f.depth||1);i.eachBefore((function(t){t.x=(t.x+d)*p,t.y=t.depth*g}))}return i}function o(e){var n=e.children,r=e.parent.children,i=e.i?r[e.i-1]:null;if(n){!function(t){for(var e,n=0,r=0,i=t.children,o=i.length;--o>=0;)(e=i[o]).z+=n,e.m+=n,n+=e.s+(r+=e.c)}(e);var o=(n[0].z+n[n.length-1].z)/2;i?(e.z=i.z+t(e._,i._),e.m=e.z-o):e.z=o}else i&&(e.z=i.z+t(e._,i._));e.parent.A=function(e,n,r){if(n){for(var i,o=e,a=e,s=n,u=o.parent.children[0],l=o.m,c=a.m,f=s.m,h=u.m;s=AC(s),o=kC(o),s&&o;)u=kC(u),(a=AC(a)).a=e,(i=s.z+f-o.z-l+t(s._,o._))>0&&(MC(EC(s,e,r),e,i),l+=i,c+=i),f+=s.m,l+=o.m,h+=u.m,c+=a.m;s&&!AC(a)&&(a.t=s,a.m+=f-c),o&&!kC(u)&&(u.t=o,u.m+=l-h,r=e)}return r}(e,i,e.parent.A||r[0])}function a(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function s(t){t.x*=e,t.y=t.depth*n}return i.separation=function(e){return arguments.length?(t=e,i):t},i.size=function(t){return arguments.length?(r=!1,e=+t[0],n=+t[1],i):r?null:[e,n]},i.nodeSize=function(t){return arguments.length?(r=!0,e=+t[0],n=+t[1],i):r?[e,n]:null},i},cluster:function(){var t=BD,e=1,n=1,r=!1;function i(i){var o,a=0;i.eachAfter((function(e){var n=e.children;n?(e.x=function(t){return t.reduce(zD,0)/t.length}(n),e.y=function(t){return 1+t.reduce(ND,0)}(n)):(e.x=o?a+=t(e,o):0,e.y=0,o=e)}));var s=function(t){for(var e;e=t.children;)t=e[0];return t}(i),u=function(t){for(var e;e=t.children;)t=e[e.length-1];return t}(i),l=s.x-t(s,u)/2,c=u.x+t(u,s)/2;return i.eachAfter(r?function(t){t.x=(t.x-i.x)*e,t.y=(i.y-t.y)*n}:function(t){t.x=(t.x-l)/(c-l)*e,t.y=(1-(i.y?t.y/i.y:1))*n})}return i.separation=function(e){return arguments.length?(t=e,i):t},i.size=function(t){return arguments.length?(r=!1,e=+t[0],n=+t[1],i):r?null:[e,n]},i.nodeSize=function(t){return arguments.length?(r=!0,e=+t[0],n=+t[1],i):r?[e,n]:null},i}},HC=["x","y","depth","children"];function YC(t){RC.call(this,t)}function GC(t){Ja.call(this,[],t)}YC.Definition={type:"Tree",metadata:{tree:!0,modifies:!0},params:[{name:"field",type:"field"},{name:"sort",type:"compare"},{name:"method",type:"enum",default:"tidy",values:["tidy","cluster"]},{name:"size",type:"number",array:!0,length:2},{name:"nodeSize",type:"number",array:!0,length:2},{name:"separation",type:"boolean",default:!0},{name:"as",type:"string",array:!0,length:HC.length,default:HC}]},dt(YC,RC,{layout(t){const e=t||"tidy";if(lt(WC,e))return WC[e]();s("Unrecognized Tree layout method: "+e)},params:["size","nodeSize"],fields:HC}),GC.Definition={type:"TreeLinks",metadata:{tree:!0,generates:!0,changes:!0},params:[]},dt(GC,Ja,{transform(t,e){const n=this.value,r=e.source&&e.source.root,i=e.fork(e.NO_SOURCE),o={};return r||s("TreeLinks transform requires a tree data source."),e.changed(e.ADD_REM)?(i.rem=n,e.visit(e.SOURCE,(t=>o[ya(t)]=1)),r.each((t=>{const e=t.data,n=t.parent&&t.parent.data;n&&o[ya(e)]&&o[ya(n)]&&i.add.push(_a({source:n,target:e}))})),this.value=i.add):e.changed(e.MOD)&&(e.visit(e.MOD,(t=>o[ya(t)]=1)),n.forEach((t=>{(o[ya(t.source)]||o[ya(t.target)])&&i.mod.push(t)}))),i}});const VC={binary:function(t,e,n,r,i){var o,a,s=t.children,u=s.length,l=new Array(u+1);for(l[0]=a=o=0;o=n-1){var c=s[e];return c.x0=i,c.y0=o,c.x1=a,void(c.y1=u)}var f=l[e],h=r/2+f,d=e+1,p=n-1;for(;d>>1;l[g]u-o){var v=r?(i*y+a*m)/r:a;t(e,d,m,i,o,v,u),t(d,n,y,v,o,a,u)}else{var _=r?(o*y+u*m)/r:u;t(e,d,m,i,o,a,_),t(d,n,y,i,_,a,u)}}(0,u,t.value,e,n,r,i)},dice:dC,slice:CC,slicedice:function(t,e,n,r,i){(1&t.depth?CC:dC)(t,e,n,r,i)},squarify:$C,resquarify:TC},XC=["x0","y0","x1","y1","depth","children"];function JC(t){RC.call(this,t)}JC.Definition={type:"Treemap",metadata:{tree:!0,modifies:!0},params:[{name:"field",type:"field"},{name:"sort",type:"compare"},{name:"method",type:"enum",default:"squarify",values:["squarify","resquarify","binary","dice","slice","slicedice"]},{name:"padding",type:"number",default:0},{name:"paddingInner",type:"number",default:0},{name:"paddingOuter",type:"number",default:0},{name:"paddingTop",type:"number",default:0},{name:"paddingRight",type:"number",default:0},{name:"paddingBottom",type:"number",default:0},{name:"paddingLeft",type:"number",default:0},{name:"ratio",type:"number",default:1.618033988749895},{name:"round",type:"boolean",default:!1},{name:"size",type:"number",array:!0,length:2},{name:"as",type:"string",array:!0,length:XC.length,default:XC}]},dt(JC,RC,{layout(){const t=function(){var t=$C,e=!1,n=1,r=1,i=[0],o=HD,a=HD,s=HD,u=HD,l=HD;function c(t){return t.x0=t.y0=0,t.x1=n,t.y1=r,t.eachBefore(f),i=[0],e&&t.eachBefore(hC),t}function f(e){var n=i[e.depth],r=e.x0+n,c=e.y0+n,f=e.x1-n,h=e.y1-n;f{const n=t.tile();n.ratio&&t.tile(n.ratio(e))},t.method=e=>{lt(VC,e)?t.tile(VC[e]):s("Unrecognized Treemap layout method: "+e)},t},params:["method","ratio","size","round","padding","paddingInner","paddingOuter","paddingTop","paddingRight","paddingBottom","paddingLeft"],fields:XC});var ZC=Object.freeze({__proto__:null,nest:zC,pack:qC,partition:jC,stratify:IC,tree:YC,treelinks:GC,treemap:JC});const QC=4278190080;function KC(t,e,n){return new Uint32Array(t.getImageData(0,0,e,n).data.buffer)}function tF(t,e,n){if(!e.length)return;const r=e[0].mark.marktype;"group"===r?e.forEach((e=>{e.items.forEach((e=>tF(t,e.items,n)))})):Hy[r].draw(t,{items:n?e.map(eF):e})}function eF(t){const e=ba(t,{});return e.stroke&&0!==e.strokeOpacity||e.fill&&0!==e.fillOpacity?{...e,strokeOpacity:1,stroke:"#000",fillOpacity:0}:e}const nF=5,rF=31,iF=32,oF=new Uint32Array(iF+1),aF=new Uint32Array(iF+1);aF[0]=0,oF[0]=~aF[0];for(let t=1;t<=iF;++t)aF[t]=aF[t-1]<<1|1,oF[t]=~aF[t];function sF(t,e,n){const r=Math.max(1,Math.sqrt(t*e/1e6)),i=~~((t+2*n+r)/r),o=~~((e+2*n+r)/r),a=t=>~~((t+n)/r);return a.invert=t=>t*r-n,a.bitmap=()=>function(t,e){const n=new Uint32Array(~~((t*e+iF)/iF));function r(t,e){n[t]|=e}function i(t,e){n[t]&=e}return{array:n,get:(e,r)=>{const i=r*t+e;return n[i>>>nF]&1<<(i&rF)},set:(e,n)=>{const i=n*t+e;r(i>>>nF,1<<(i&rF))},clear:(e,n)=>{const r=n*t+e;i(r>>>nF,~(1<<(r&rF)))},getRange:(e,r,i,o)=>{let a,s,u,l,c=o;for(;c>=r;--c)if(a=c*t+e,s=c*t+i,u=a>>>nF,l=s>>>nF,u===l){if(n[u]&oF[a&rF]&aF[1+(s&rF)])return!0}else{if(n[u]&oF[a&rF])return!0;if(n[l]&aF[1+(s&rF)])return!0;for(let t=u+1;t{let a,s,u,l,c;for(;n<=o;++n)if(a=n*t+e,s=n*t+i,u=a>>>nF,l=s>>>nF,u===l)r(u,oF[a&rF]&aF[1+(s&rF)]);else for(r(u,oF[a&rF]),r(l,aF[1+(s&rF)]),c=u+1;c{let a,s,u,l,c;for(;n<=o;++n)if(a=n*t+e,s=n*t+r,u=a>>>nF,l=s>>>nF,u===l)i(u,aF[a&rF]|oF[1+(s&rF)]);else for(i(u,aF[a&rF]),i(l,oF[1+(s&rF)]),c=u+1;cn<0||r<0||o>=e||i>=t}}(i,o),a.ratio=r,a.padding=n,a.width=t,a.height=e,a}function uF(t,e,n,r,i,o){let a=n/2;return t-a<0||t+a>i||e-(a=r/2)<0||e+a>o}function lF(t,e,n,r,i,o,a,s){const u=i*o/(2*r),l=t(e-u),c=t(e+u),f=t(n-(o/=2)),h=t(n+o);return a.outOfBounds(l,f,c,h)||a.getRange(l,f,c,h)||s&&s.getRange(l,f,c,h)}const cF=[-1,-1,1,1],fF=[-1,1,-1,1];const hF=["right","center","left"],dF=["bottom","middle","top"];function pF(t,e,n,r,i,o,a,s,u,l,c,f){return!(i.outOfBounds(t,n,e,r)||(f&&o||i).getRange(t,n,e,r))}const gF={"top-left":0,top:1,"top-right":2,left:4,middle:5,right:6,"bottom-left":8,bottom:9,"bottom-right":10},mF={naive:function(t,e,n,r){const i=t.width,o=t.height;return function(t){const e=t.datum.datum.items[r].items,n=e.length,a=t.datum.fontSize,s=My.width(t.datum,t.datum.text);let u,l,c,f,h,d,p,g=0;for(let r=0;r=g&&(g=p,t.x=h,t.y=d);return h=s/2,d=a/2,u=t.x-h,l=t.x+h,c=t.y-d,f=t.y+d,t.align="center",u<0&&l<=i?t.align="left":0<=u&&i=1;)h=(d+p)/2,lF(t,c,f,l,u,h,a,s)?p=h:d=h;if(d>r)return[c,f,d,!0]}}return function(e){const s=e.datum.datum.items[r].items,l=s.length,c=e.datum.fontSize,f=My.width(e.datum,e.datum.text);let h,d,p,g,m,y,v,_,x,b,w,k,A,M,E,D,C,F=n?c:0,S=!1,$=!1,T=0;for(let r=0;rd&&(C=h,h=d,d=C),p>g&&(C=p,p=g,g=C),x=t(h),w=t(d),b=~~((x+w)/2),k=t(p),M=t(g),A=~~((k+M)/2),v=b;v>=x;--v)for(_=A;_>=k;--_)D=u(v,_,F,f,c),D&&([e.x,e.y,F,S]=D);for(v=b;v<=w;++v)for(_=A;_<=M;++_)D=u(v,_,F,f,c),D&&([e.x,e.y,F,S]=D);S||n||(E=Math.abs(d-h+g-p),m=(h+d)/2,y=(p+g)/2,E>=T&&!uF(m,y,f,c,i,o)&&!lF(t,m,y,c,f,c,a,null)&&(T=E,e.x=m,e.y=y,$=!0))}return!(!S&&!$)&&(m=f/2,y=c/2,a.setRange(t(e.x-m),t(e.y-y),t(e.x+m),t(e.y+y)),e.align="center",e.baseline="middle",!0)}},floodfill:function(t,e,n,r){const i=t.width,o=t.height,a=e[0],s=e[1],u=t.bitmap();return function(e){const l=e.datum.datum.items[r].items,c=l.length,f=e.datum.fontSize,h=My.width(e.datum,e.datum.text),d=[];let p,g,m,y,v,_,x,b,w,k,A,M,E=n?f:0,D=!1,C=!1,F=0;for(let r=0;r=1;)A=(w+k)/2,lF(t,v,_,f,h,A,a,s)?k=A:w=A;w>E&&(e.x=v,e.y=_,E=w,D=!0)}}D||n||(M=Math.abs(g-p+y-m),v=(p+g)/2,_=(m+y)/2,M>=F&&!uF(v,_,h,f,i,o)&&!lF(t,v,_,f,h,f,a,null)&&(F=M,e.x=v,e.y=_,C=!0))}return!(!D&&!C)&&(v=h/2,_=f/2,a.setRange(t(e.x-v),t(e.y-_),t(e.x+v),t(e.y+_)),e.align="center",e.baseline="middle",!0)}}};function yF(t,e,n,r,i,o,a,s,u,l,c){if(!t.length)return t;const f=Math.max(r.length,i.length),h=function(t,e){const n=new Float64Array(e),r=t.length;for(let e=0;e[t.x,t.x,t.x,t.y,t.y,t.y];return t?"line"===t||"area"===t?t=>i(t.datum):"line"===e?t=>{const e=t.datum.items[r].items;return i(e.length?e["start"===n?0:e.length-1]:{x:NaN,y:NaN})}:t=>{const e=t.datum.bounds;return[e.x1,(e.x1+e.x2)/2,e.x2,e.y1,(e.y1+e.y2)/2,e.y2]}:i}(p,g,s,u),v=null===l||l===1/0,_=m&&"naive"===c;var x;let b=-1,w=-1;const k=t.map((t=>{const e=v?My.width(t,t.text):void 0;return b=Math.max(b,e),w=Math.max(w,t.fontSize),{datum:t,opacity:0,x:void 0,y:void 0,align:void 0,baseline:void 0,boundary:y(t),textWidth:e}}));l=null===l||l===1/0?Math.max(b,w)+Math.max(...r):l;const A=sF(e[0],e[1],l);let M;if(!_){n&&k.sort(((t,e)=>n(t.datum,e.datum)));let e=!1;for(let t=0;tt.datum));M=o.length||r?function(t,e,n,r,i){const o=t.width,a=t.height,s=r||i,u=$c(o,a).getContext("2d"),l=$c(o,a).getContext("2d"),c=s&&$c(o,a).getContext("2d");n.forEach((t=>tF(u,t,!1))),tF(l,e,!1),s&&tF(c,e,!0);const f=KC(u,o,a),h=KC(l,o,a),d=s&&KC(c,o,a),p=t.bitmap(),g=s&&t.bitmap();let m,y,v,_,x,b,w,k;for(y=0;yn.set(t(e.boundary[0]),t(e.boundary[3])))),[n,void 0]}(A,a&&k)}const E=m?mF[c](A,M,a,u):function(t,e,n,r){const i=t.width,o=t.height,a=e[0],s=e[1],u=r.length;return function(e){const l=e.boundary,c=e.datum.fontSize;if(l[2]<0||l[5]<0||l[0]>i||l[3]>o)return!1;let f,h,d,p,g,m,y,v,_,x,b,w,k,A,M,E=e.textWidth??0;for(let i=0;i>>2&3)-1,d=0===f&&0===h||r[i]<0,p=f&&h?Math.SQRT1_2:1,g=r[i]<0?-1:1,m=l[1+f]+r[i]*f*p,b=l[4+h]+g*c*h/2+r[i]*h*p,v=b-c/2,_=b+c/2,w=t(m),A=t(v),M=t(_),!E){if(!pF(w,w,A,M,a,s,0,0,0,0,0,d))continue;E=My.width(e.datum,e.datum.text)}if(x=m+g*E*f/2,m=x-E/2,y=x+E/2,w=t(m),k=t(y),pF(w,k,A,M,a,s,0,0,0,0,0,d))return e.x=f?f*g<0?y:m:x,e.y=h?h*g<0?_:v:b,e.align=hF[f*g+1],e.baseline=dF[h*g+1],a.setRange(w,A,k,M),!0}return!1}}(A,M,d,h);return k.forEach((t=>t.opacity=+E(t))),k}const vF=["x","y","opacity","align","baseline"],_F=["top-left","left","bottom-left","top","bottom","top-right","right","bottom-right"];function xF(t){Ja.call(this,null,t)}xF.Definition={type:"Label",metadata:{modifies:!0},params:[{name:"size",type:"number",array:!0,length:2,required:!0},{name:"sort",type:"compare"},{name:"anchor",type:"string",array:!0,default:_F},{name:"offset",type:"number",array:!0,default:[1]},{name:"padding",type:"number",default:0,null:!0},{name:"lineAnchor",type:"string",values:["start","end"],default:"end"},{name:"markIndex",type:"number",default:0},{name:"avoidBaseMark",type:"boolean",default:!0},{name:"avoidMarks",type:"data",array:!0},{name:"method",type:"string",default:"naive"},{name:"as",type:"string",array:!0,length:vF.length,default:vF}]},dt(xF,Ja,{transform(t,e){const n=t.modified();if(!(n||e.changed(e.ADD_REM)||function(n){const r=t[n];return J(r)&&e.modified(r.fields)}("sort")))return;t.size&&2===t.size.length||s("Size parameter should be specified as a [width, height] array.");const r=t.as||vF;return yF(e.materialize(e.SOURCE).source||[],t.size,t.sort,V(null==t.offset?1:t.offset),V(t.anchor||_F),t.avoidMarks||[],!1!==t.avoidBaseMark,t.lineAnchor||"end",t.markIndex||0,void 0===t.padding?0:t.padding,t.method||"naive").forEach((t=>{const e=t.datum;e[r[0]]=t.x,e[r[1]]=t.y,e[r[2]]=t.opacity,e[r[3]]=t.align,e[r[4]]=t.baseline})),e.reflow(n).modifies(r)}});var bF=Object.freeze({__proto__:null,label:xF});function wF(t,e){var n,r,i,o,a,s,u=[],l=function(t){return t(o)};if(null==e)u.push(t);else for(n={},r=0,i=t.length;r{Ls(e,t.x,t.y,t.bandwidth||.3).forEach((t=>{const n={};for(let t=0;t"poly"===t?e:"quad"===t?2:1)(a,u),c=t.as||[n(t.x),n(t.y)],f=AF[a],h=[];let d=t.extent;lt(AF,a)||s("Invalid regression method: "+a),null!=d&&"log"===a&&d[0]<=0&&(e.dataflow.warn("Ignoring extent with values <= 0 for log regression."),d=null),i.forEach((n=>{if(n.length<=l)return void e.dataflow.warn("Skipping regression with more parameters than data points.");const r=f(n,t.x,t.y,u);if(t.params)return void h.push(_a({keys:n.dims,coef:r.coef,rSquared:r.rSquared}));const i=d||at(n,t.x),s=t=>{const e={};for(let t=0;ts([t,r.predict(t)]))):Is(r.predict,i,25,200).forEach(s)})),this.value&&(r.rem=this.value),this.value=r.add=r.source=h}return r}});var EF=Object.freeze({__proto__:null,loess:kF,regression:MF});const DF=134217729,CF=33306690738754706e-32;function FF(t,e,n,r,i){let o,a,s,u,l=e[0],c=r[0],f=0,h=0;c>l==c>-l?(o=l,l=e[++f]):(o=c,c=r[++h]);let d=0;if(fl==c>-l?(a=l+o,s=o-(a-l),l=e[++f]):(a=c+o,s=o-(a-c),c=r[++h]),o=a,0!==s&&(i[d++]=s);fl==c>-l?(a=o+l,u=a-o,s=o-(a-u)+(l-u),l=e[++f]):(a=o+c,u=a-o,s=o-(a-u)+(c-u),c=r[++h]),o=a,0!==s&&(i[d++]=s);for(;f0!=s>0)return u;const l=Math.abs(a+s);return Math.abs(u)>=33306690738754716e-32*l?u:-function(t,e,n,r,i,o,a){let s,u,l,c,f,h,d,p,g,m,y,v,_,x,b,w,k,A;const M=t-i,E=n-i,D=e-o,C=r-o;x=M*C,h=DF*M,d=h-(h-M),p=M-d,h=DF*C,g=h-(h-C),m=C-g,b=p*m-(x-d*g-p*g-d*m),w=D*E,h=DF*D,d=h-(h-D),p=D-d,h=DF*E,g=h-(h-E),m=E-g,k=p*m-(w-d*g-p*g-d*m),y=b-k,f=b-y,BF[0]=b-(y+f)+(f-k),v=x+y,f=v-x,_=x-(v-f)+(y-f),y=_-w,f=_-y,BF[1]=_-(y+f)+(f-w),A=v+y,f=A-v,BF[2]=v-(A-f)+(y-f),BF[3]=A;let F=function(t,e){let n=e[0];for(let r=1;r=S||-F>=S)return F;if(f=t-M,s=t-(M+f)+(f-i),f=n-E,l=n-(E+f)+(f-i),f=e-D,u=e-(D+f)+(f-o),f=r-C,c=r-(C+f)+(f-o),0===s&&0===u&&0===l&&0===c)return F;if(S=TF*a+CF*Math.abs(F),F+=M*c+C*s-(D*l+E*u),F>=S||-F>=S)return F;x=s*C,h=DF*s,d=h-(h-s),p=s-d,h=DF*C,g=h-(h-C),m=C-g,b=p*m-(x-d*g-p*g-d*m),w=u*E,h=DF*u,d=h-(h-u),p=u-d,h=DF*E,g=h-(h-E),m=E-g,k=p*m-(w-d*g-p*g-d*m),y=b-k,f=b-y,RF[0]=b-(y+f)+(f-k),v=x+y,f=v-x,_=x-(v-f)+(y-f),y=_-w,f=_-y,RF[1]=_-(y+f)+(f-w),A=v+y,f=A-v,RF[2]=v-(A-f)+(y-f),RF[3]=A;const $=FF(4,BF,4,RF,zF);x=M*c,h=DF*M,d=h-(h-M),p=M-d,h=DF*c,g=h-(h-c),m=c-g,b=p*m-(x-d*g-p*g-d*m),w=D*l,h=DF*D,d=h-(h-D),p=D-d,h=DF*l,g=h-(h-l),m=l-g,k=p*m-(w-d*g-p*g-d*m),y=b-k,f=b-y,RF[0]=b-(y+f)+(f-k),v=x+y,f=v-x,_=x-(v-f)+(y-f),y=_-w,f=_-y,RF[1]=_-(y+f)+(f-w),A=v+y,f=A-v,RF[2]=v-(A-f)+(y-f),RF[3]=A;const T=FF($,zF,4,RF,NF);x=s*c,h=DF*s,d=h-(h-s),p=s-d,h=DF*c,g=h-(h-c),m=c-g,b=p*m-(x-d*g-p*g-d*m),w=u*l,h=DF*u,d=h-(h-u),p=u-d,h=DF*l,g=h-(h-l),m=l-g,k=p*m-(w-d*g-p*g-d*m),y=b-k,f=b-y,RF[0]=b-(y+f)+(f-k),v=x+y,f=v-x,_=x-(v-f)+(y-f),y=_-w,f=_-y,RF[1]=_-(y+f)+(f-w),A=v+y,f=A-v,RF[2]=v-(A-f)+(y-f),RF[3]=A;const B=FF(T,NF,4,RF,OF);return OF[B-1]}(t,e,n,r,i,o,l)}const LF=Math.pow(2,-52),qF=new Uint32Array(512);class PF{static from(t){let e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:GF,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:VF;const r=t.length,i=new Float64Array(2*r);for(let o=0;o>1;if(e>0&&"number"!=typeof t[0])throw new Error("Expected coords to contain numbers.");this.coords=t;const n=Math.max(2*e-5,0);this._triangles=new Uint32Array(3*n),this._halfedges=new Int32Array(3*n),this._hashSize=Math.ceil(Math.sqrt(e)),this._hullPrev=new Uint32Array(e),this._hullNext=new Uint32Array(e),this._hullTri=new Uint32Array(e),this._hullHash=new Int32Array(this._hashSize).fill(-1),this._ids=new Uint32Array(e),this._dists=new Float64Array(e),this.update()}update(){const{coords:t,_hullPrev:e,_hullNext:n,_hullTri:r,_hullHash:i}=this,o=t.length>>1;let a=1/0,s=1/0,u=-1/0,l=-1/0;for(let e=0;eu&&(u=n),r>l&&(l=r),this._ids[e]=e}const c=(a+u)/2,f=(s+l)/2;let h,d,p,g=1/0;for(let e=0;e0&&(d=e,g=n)}let v=t[2*d],_=t[2*d+1],x=1/0;for(let e=0;er&&(e[n++]=i,r=this._dists[i])}return this.hull=e.subarray(0,n),this.triangles=new Uint32Array(0),void(this.halfedges=new Uint32Array(0))}if(UF(m,y,v,_,b,w)<0){const t=d,e=v,n=_;d=p,v=b,_=w,p=t,b=e,w=n}const k=function(t,e,n,r,i,o){const a=n-t,s=r-e,u=i-t,l=o-e,c=a*a+s*s,f=u*u+l*l,h=.5/(a*l-s*u),d=t+(l*c-s*f)*h,p=e+(a*f-u*c)*h;return{x:d,y:p}}(m,y,v,_,b,w);this._cx=k.x,this._cy=k.y;for(let e=0;e0&&Math.abs(l-o)<=LF&&Math.abs(c-a)<=LF)continue;if(o=l,a=c,u===h||u===d||u===p)continue;let f=0;for(let t=0,e=this._hashKey(l,c);t=0;)if(m=g,m===f){m=-1;break}if(-1===m)continue;let y=this._addTriangle(m,u,n[m],-1,-1,r[m]);r[u]=this._legalize(y+2),r[m]=y,A++;let v=n[m];for(;g=n[v],UF(l,c,t[2*v],t[2*v+1],t[2*g],t[2*g+1])<0;)y=this._addTriangle(v,u,g,r[u],-1,r[v]),r[u]=this._legalize(y+2),n[v]=v,A--,v=g;if(m===f)for(;g=e[m],UF(l,c,t[2*g],t[2*g+1],t[2*m],t[2*m+1])<0;)y=this._addTriangle(g,u,m,-1,r[m],r[g]),this._legalize(y+2),r[g]=y,n[m]=m,A--,m=g;this._hullStart=e[u]=m,n[m]=e[v]=u,n[u]=v,i[this._hashKey(l,c)]=u,i[this._hashKey(t[2*m],t[2*m+1])]=m}this.hull=new Uint32Array(A);for(let t=0,e=this._hullStart;t0?3-n:1+n)/4}(t-this._cx,e-this._cy)*this._hashSize)%this._hashSize}_legalize(t){const{_triangles:e,_halfedges:n,coords:r}=this;let i=0,o=0;for(;;){const a=n[t],s=t-t%3;if(o=s+(t+2)%3,-1===a){if(0===i)break;t=qF[--i];continue}const u=a-a%3,l=s+(t+1)%3,c=u+(a+2)%3,f=e[o],h=e[t],d=e[l],p=e[c];if(IF(r[2*f],r[2*f+1],r[2*h],r[2*h+1],r[2*d],r[2*d+1],r[2*p],r[2*p+1])){e[t]=p,e[a]=f;const r=n[c];if(-1===r){let e=this._hullStart;do{if(this._hullTri[e]===c){this._hullTri[e]=t;break}e=this._hullPrev[e]}while(e!==this._hullStart)}this._link(t,r),this._link(a,n[o]),this._link(o,c);const s=u+(a+1)%3;i=n&&e[t[a]]>o;)t[a+1]=t[a--];t[a+1]=r}else{let i=n+1,o=r;YF(t,n+r>>1,i),e[t[n]]>e[t[r]]&&YF(t,n,r),e[t[i]]>e[t[r]]&&YF(t,i,r),e[t[n]]>e[t[i]]&&YF(t,n,i);const a=t[i],s=e[a];for(;;){do{i++}while(e[t[i]]s);if(o=o-n?(HF(t,e,i,r),HF(t,e,n,o-1)):(HF(t,e,n,o-1),HF(t,e,i,r))}}function YF(t,e,n){const r=t[e];t[e]=t[n],t[n]=r}function GF(t){return t[0]}function VF(t){return t[1]}const XF=1e-6;class JF{constructor(){this._x0=this._y0=this._x1=this._y1=null,this._=""}moveTo(t,e){this._+=`M${this._x0=this._x1=+t},${this._y0=this._y1=+e}`}closePath(){null!==this._x1&&(this._x1=this._x0,this._y1=this._y0,this._+="Z")}lineTo(t,e){this._+=`L${this._x1=+t},${this._y1=+e}`}arc(t,e,n){const r=(t=+t)+(n=+n),i=e=+e;if(n<0)throw new Error("negative radius");null===this._x1?this._+=`M${r},${i}`:(Math.abs(this._x1-r)>XF||Math.abs(this._y1-i)>XF)&&(this._+="L"+r+","+i),n&&(this._+=`A${n},${n},0,1,1,${t-n},${e}A${n},${n},0,1,1,${this._x1=r},${this._y1=i}`)}rect(t,e,n,r){this._+=`M${this._x0=this._x1=+t},${this._y0=this._y1=+e}h${+n}v${+r}h${-n}Z`}value(){return this._||null}}class ZF{constructor(){this._=[]}moveTo(t,e){this._.push([t,e])}closePath(){this._.push(this._[0].slice())}lineTo(t,e){this._.push([t,e])}value(){return this._.length?this._:null}}let QF=class{constructor(t){let[e,n,r,i]=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[0,0,960,500];if(!((r=+r)>=(e=+e)&&(i=+i)>=(n=+n)))throw new Error("invalid bounds");this.delaunay=t,this._circumcenters=new Float64Array(2*t.points.length),this.vectors=new Float64Array(2*t.points.length),this.xmax=r,this.xmin=e,this.ymax=i,this.ymin=n,this._init()}update(){return this.delaunay.update(),this._init(),this}_init(){const{delaunay:{points:t,hull:e,triangles:n},vectors:r}=this;let i,o;const a=this.circumcenters=this._circumcenters.subarray(0,n.length/3*2);for(let r,s,u=0,l=0,c=n.length;u1;)i-=2;for(let t=2;t0){if(e>=this.ymax)return null;(i=(this.ymax-e)/r)0){if(t>=this.xmax)return null;(i=(this.xmax-t)/n)this.xmax?2:0)|(ethis.ymax?8:0)}_simplify(t){if(t&&t.length>4){for(let e=0;e1&&void 0!==arguments[1]?arguments[1]:eS,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:nS,r=arguments.length>3?arguments[3]:void 0;return new iS("length"in t?function(t,e,n,r){const i=t.length,o=new Float64Array(2*i);for(let a=0;a2&&function(t){const{triangles:e,coords:n}=t;for(let t=0;t1e-10)return!1}return!0}(t)){this.collinear=Int32Array.from({length:e.length/2},((t,e)=>e)).sort(((t,n)=>e[2*t]-e[2*n]||e[2*t+1]-e[2*n+1]));const t=this.collinear[0],n=this.collinear[this.collinear.length-1],r=[e[2*t],e[2*t+1],e[2*n],e[2*n+1]],i=1e-8*Math.hypot(r[3]-r[1],r[2]-r[0]);for(let t=0,n=e.length/2;t0&&(this.triangles=new Int32Array(3).fill(-1),this.halfedges=new Int32Array(3).fill(-1),this.triangles[0]=r[0],o[r[0]]=1,2===r.length&&(o[r[1]]=0,this.triangles[1]=r[1],this.triangles[2]=r[1]))}voronoi(t){return new QF(this,t)}*neighbors(t){const{inedges:e,hull:n,_hullIndex:r,halfedges:i,triangles:o,collinear:a}=this;if(a){const e=a.indexOf(t);return e>0&&(yield a[e-1]),void(e2&&void 0!==arguments[2]?arguments[2]:0;if((t=+t)!=t||(e=+e)!=e)return-1;const r=n;let i;for(;(i=this._step(n,t,e))>=0&&i!==n&&i!==r;)n=i;return i}_step(t,e,n){const{inedges:r,hull:i,_hullIndex:o,halfedges:a,triangles:s,points:u}=this;if(-1===r[t]||!u.length)return(t+1)%(u.length>>1);let l=t,c=tS(e-u[2*t],2)+tS(n-u[2*t+1],2);const f=r[t];let h=f;do{let r=s[h];const f=tS(e-u[2*r],2)+tS(n-u[2*r+1],2);if(f=f));)if(e.x=a+i,e.y=l+o,!(e.x+e.x0<0||e.y+e.y0<0||e.x+e.x1>s[0]||e.y+e.y1>s[1])&&(!n||!pS(e,t,s[0]))&&(!n||mS(e,n))){for(var g,m=e.sprite,y=e.width>>5,v=s[0]>>5,_=e.x-(y<<4),x=127&_,b=32-x,w=e.y1-e.y0,k=(e.y+e.y0)*v+(_>>5),A=0;A>>x:0);k+=v}return e.sprite=null,!0}return!1}return f.layout=function(){for(var u=function(t){t.width=t.height=1;var e=Math.sqrt(t.getContext("2d").getImageData(0,0,1,1).data.length>>2);t.width=(cS<<5)/e,t.height=fS/e;var n=t.getContext("2d");return n.fillStyle=n.strokeStyle="red",n.textAlign="center",{context:n,ratio:e}}($c()),f=function(t){var e=[],n=-1;for(;++n>5)*s[1]),d=null,p=l.length,g=-1,m=[],y=l.map((s=>({text:t(s),font:e(s),style:r(s),weight:i(s),rotate:o(s),size:~~(n(s)+1e-14),padding:a(s),xoff:0,yoff:0,x1:0,y1:0,x0:0,y0:0,hasText:!1,sprite:null,datum:s}))).sort(((t,e)=>e.size-t.size));++g>1,v.y=s[1]*(c()+.5)>>1,dS(u,v,y,g),v.hasText&&h(f,v,d)&&(m.push(v),d?gS(d,v):d=[{x:v.x+v.x0,y:v.y+v.y0},{x:v.x+v.x1,y:v.y+v.y1}],v.x-=s[0]>>1,v.y-=s[1]>>1)}return m},f.words=function(t){return arguments.length?(l=t,f):l},f.size=function(t){return arguments.length?(s=[+t[0],+t[1]],f):s},f.font=function(t){return arguments.length?(e=vS(t),f):e},f.fontStyle=function(t){return arguments.length?(r=vS(t),f):r},f.fontWeight=function(t){return arguments.length?(i=vS(t),f):i},f.rotate=function(t){return arguments.length?(o=vS(t),f):o},f.text=function(e){return arguments.length?(t=vS(e),f):t},f.spiral=function(t){return arguments.length?(u=_S[t]||t,f):u},f.fontSize=function(t){return arguments.length?(n=vS(t),f):n},f.padding=function(t){return arguments.length?(a=vS(t),f):a},f.random=function(t){return arguments.length?(c=t,f):c},f}function dS(t,e,n,r){if(!e.sprite){var i=t.context,o=t.ratio;i.clearRect(0,0,(cS<<5)/o,fS/o);var a,s,u,l,c,f=0,h=0,d=0,p=n.length;for(--r;++r>5<<5,u=~~Math.max(Math.abs(v+_),Math.abs(v-_))}else a=a+31>>5<<5;if(u>d&&(d=u),f+a>=cS<<5&&(f=0,h+=d,d=0),h+u>=fS)break;i.translate((f+(a>>1))/o,(h+(u>>1))/o),e.rotate&&i.rotate(e.rotate*lS),i.fillText(e.text,0,0),e.padding&&(i.lineWidth=2*e.padding,i.strokeText(e.text,0,0)),i.restore(),e.width=a,e.height=u,e.xoff=f,e.yoff=h,e.x1=a>>1,e.y1=u>>1,e.x0=-e.x1,e.y0=-e.y1,e.hasText=!0,f+=a}for(var b=i.getImageData(0,0,(cS<<5)/o,fS/o).data,w=[];--r>=0;)if((e=n[r]).hasText){for(s=(a=e.width)>>5,u=e.y1-e.y0,l=0;l>5),E=b[(h+c)*(cS<<5)+(f+l)<<2]?1<<31-l%32:0;w[M]|=E,k|=E}k?A=c:(e.y0++,u--,c--,h++)}e.y1=e.y0+A,e.sprite=w.slice(0,(e.y1-e.y0)*s)}}}function pS(t,e,n){n>>=5;for(var r,i=t.sprite,o=t.width>>5,a=t.x-(o<<4),s=127&a,u=32-s,l=t.y1-t.y0,c=(t.y+t.y0)*n+(a>>5),f=0;f>>s:0))&e[c+h])return!0;c+=n}return!1}function gS(t,e){var n=t[0],r=t[1];e.x+e.x0r.x&&(r.x=e.x+e.x1),e.y+e.y1>r.y&&(r.y=e.y+e.y1)}function mS(t,e){return t.x+t.x1>e[0].x&&t.x+t.x0e[0].y&&t.y+t.y0e(t(n))}i.forEach((t=>{t[a[0]]=NaN,t[a[1]]=NaN,t[a[3]]=0}));const c=o.words(i).text(e.text).size(e.size||[500,500]).padding(e.padding||1).spiral(e.spiral||"archimedean").rotate(e.rotate||0).font(e.font||"sans-serif").fontStyle(e.fontStyle||"normal").fontWeight(e.fontWeight||"normal").fontSize(l).random(t.random).layout(),f=o.size(),h=f[0]>>1,d=f[1]>>1,p=c.length;for(let t,e,n=0;nnew Uint8Array(t),MS=t=>new Uint16Array(t),ES=t=>new Uint32Array(t);function DS(t,e,n){const r=(e<257?AS:e<65537?MS:ES)(t);return n&&r.set(n),r}function CS(t,e,n){const r=1<{const r=t[e],i=t[n];return ri?1:0})),function(t,e){return Array.from(e,(e=>t[e]))}(t,e)}(h,u),a)l=e,c=t,e=Array(a+s),t=ES(a+s),function(t,e,n,r,i,o,a,s,u){let l,c=0,f=0;for(l=0;c0)for(f=0;ft,size:()=>n}}function SS(t){Ja.call(this,function(){let t=8,e=[],n=ES(0),r=DS(0,t),i=DS(0,t);return{data:()=>e,seen:()=>n=function(t,e,n){return t.length>=e?t:((n=n||new t.constructor(e)).set(t),n)}(n,e.length),add(t){for(let n,r=0,i=e.length,o=t.length;re.length,curr:()=>r,prev:()=>i,reset:t=>i[t]=r[t],all:()=>t<257?255:t<65537?65535:4294967295,set(t,e){r[t]|=e},clear(t,e){r[t]&=~e},resize(e,n){(e>r.length||n>t)&&(t=Math.max(n,t),r=DS(e,t,r),i=DS(e,t))}}}(),t),this._indices=null,this._dims=null}function $S(t){Ja.call(this,null,t)}SS.Definition={type:"CrossFilter",metadata:{},params:[{name:"fields",type:"field",array:!0,required:!0},{name:"query",type:"array",array:!0,required:!0,content:{type:"number",array:!0,length:2}}]},dt(SS,Ja,{transform(t,e){return this._dims?t.modified("fields")||t.fields.some((t=>e.modified(t.fields)))?this.reinit(t,e):this.eval(t,e):this.init(t,e)},init(t,e){const n=t.fields,r=t.query,i=this._indices={},o=this._dims=[],a=r.length;let s,u,l=0;for(;l{const t=i.remove(e,n);for(const e in r)r[e].reindex(t)}))},update(t,e,n){const r=this._dims,i=t.query,o=e.stamp,a=r.length;let s,u,l=0;for(n.filters=0,u=0;ud)for(m=d,y=Math.min(f,p);mp)for(m=Math.max(f,p),y=h;mc)for(d=c,p=Math.min(u,f);df)for(d=Math.max(u,f),p=l;ds[t]&n?null:a[t];return o.filter(o.MOD,l),i&i-1?(o.filter(o.ADD,(t=>{const e=s[t]&n;return!e&&e^u[t]&n?a[t]:null})),o.filter(o.REM,(t=>{const e=s[t]&n;return e&&!(e^e^u[t]&n)?a[t]:null}))):(o.filter(o.ADD,l),o.filter(o.REM,(t=>(s[t]&n)===i?a[t]:null))),o.filter(o.SOURCE,(t=>l(t._index)))}});var TS=Object.freeze({__proto__:null,crossfilter:SS,resolvefilter:$S});const BS="Literal",zS="Property",NS="ArrayExpression",OS="BinaryExpression",RS="CallExpression",US="ConditionalExpression",LS="LogicalExpression",qS="MemberExpression",PS="ObjectExpression",jS="UnaryExpression";function IS(t){this.type=t}var WS,HS,YS,GS,VS;IS.prototype.visit=function(t){let e,n,r;if(t(this))return 1;for(e=function(t){switch(t.type){case NS:return t.elements;case OS:case LS:return[t.left,t.right];case RS:return[t.callee].concat(t.arguments);case US:return[t.test,t.consequent,t.alternate];case qS:return[t.object,t.property];case PS:return t.properties;case zS:return[t.key,t.value];case jS:return[t.argument];default:return[]}}(this),n=0,r=e.length;n",WS[ZS]="Identifier",WS[QS]="Keyword",WS[KS]="Null",WS[t$]="Numeric",WS[e$]="Punctuator",WS[n$]="String",WS[9]="RegularExpression";var r$="ArrayExpression",i$="BinaryExpression",o$="CallExpression",a$="ConditionalExpression",s$="Identifier",u$="Literal",l$="LogicalExpression",c$="MemberExpression",f$="ObjectExpression",h$="Property",d$="UnaryExpression",p$="Unexpected token %0",g$="Unexpected number",m$="Unexpected string",y$="Unexpected identifier",v$="Unexpected reserved word",_$="Unexpected end of input",x$="Invalid regular expression",b$="Invalid regular expression: missing /",w$="Octal literals are not allowed in strict mode.",k$="Duplicate data property in object literal not allowed in strict mode",A$="ILLEGAL",M$="Disabled.",E$=new RegExp("[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0370-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0620-\\u064A\\u066E\\u066F\\u0671-\\u06D3\\u06D5\\u06E5\\u06E6\\u06EE\\u06EF\\u06FA-\\u06FC\\u06FF\\u0710\\u0712-\\u072F\\u074D-\\u07A5\\u07B1\\u07CA-\\u07EA\\u07F4\\u07F5\\u07FA\\u0800-\\u0815\\u081A\\u0824\\u0828\\u0840-\\u0858\\u08A0-\\u08B2\\u0904-\\u0939\\u093D\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BD\\u09CE\\u09DC\\u09DD\\u09DF-\\u09E1\\u09F0\\u09F1\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A59-\\u0A5C\\u0A5E\\u0A72-\\u0A74\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABD\\u0AD0\\u0AE0\\u0AE1\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3D\\u0B5C\\u0B5D\\u0B5F-\\u0B61\\u0B71\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BD0\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D\\u0C58\\u0C59\\u0C60\\u0C61\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBD\\u0CDE\\u0CE0\\u0CE1\\u0CF1\\u0CF2\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D\\u0D4E\\u0D60\\u0D61\\u0D7A-\\u0D7F\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0E01-\\u0E30\\u0E32\\u0E33\\u0E40-\\u0E46\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB0\\u0EB2\\u0EB3\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EDC-\\u0EDF\\u0F00\\u0F40-\\u0F47\\u0F49-\\u0F6C\\u0F88-\\u0F8C\\u1000-\\u102A\\u103F\\u1050-\\u1055\\u105A-\\u105D\\u1061\\u1065\\u1066\\u106E-\\u1070\\u1075-\\u1081\\u108E\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176C\\u176E-\\u1770\\u1780-\\u17B3\\u17D7\\u17DC\\u1820-\\u1877\\u1880-\\u18A8\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1950-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19C1-\\u19C7\\u1A00-\\u1A16\\u1A20-\\u1A54\\u1AA7\\u1B05-\\u1B33\\u1B45-\\u1B4B\\u1B83-\\u1BA0\\u1BAE\\u1BAF\\u1BBA-\\u1BE5\\u1C00-\\u1C23\\u1C4D-\\u1C4F\\u1C5A-\\u1C7D\\u1CE9-\\u1CEC\\u1CEE-\\u1CF1\\u1CF5\\u1CF6\\u1D00-\\u1DBF\\u1E00-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u2071\\u207F\\u2090-\\u209C\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CEE\\u2CF2\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D80-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2E2F\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA61F\\uA62A\\uA62B\\uA640-\\uA66E\\uA67F-\\uA69D\\uA6A0-\\uA6EF\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA801\\uA803-\\uA805\\uA807-\\uA80A\\uA80C-\\uA822\\uA840-\\uA873\\uA882-\\uA8B3\\uA8F2-\\uA8F7\\uA8FB\\uA90A-\\uA925\\uA930-\\uA946\\uA960-\\uA97C\\uA984-\\uA9B2\\uA9CF\\uA9E0-\\uA9E4\\uA9E6-\\uA9EF\\uA9FA-\\uA9FE\\uAA00-\\uAA28\\uAA40-\\uAA42\\uAA44-\\uAA4B\\uAA60-\\uAA76\\uAA7A\\uAA7E-\\uAAAF\\uAAB1\\uAAB5\\uAAB6\\uAAB9-\\uAABD\\uAAC0\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEA\\uAAF2-\\uAAF4\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABE2\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D\\uFB1F-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF21-\\uFF3A\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]"),D$=new RegExp("[\\xAA\\xB5\\xBA\\xC0-\\xD6\\xD8-\\xF6\\xF8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0300-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u0483-\\u0487\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0610-\\u061A\\u0620-\\u0669\\u066E-\\u06D3\\u06D5-\\u06DC\\u06DF-\\u06E8\\u06EA-\\u06FC\\u06FF\\u0710-\\u074A\\u074D-\\u07B1\\u07C0-\\u07F5\\u07FA\\u0800-\\u082D\\u0840-\\u085B\\u08A0-\\u08B2\\u08E4-\\u0963\\u0966-\\u096F\\u0971-\\u0983\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BC-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CE\\u09D7\\u09DC\\u09DD\\u09DF-\\u09E3\\u09E6-\\u09F1\\u0A01-\\u0A03\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A59-\\u0A5C\\u0A5E\\u0A66-\\u0A75\\u0A81-\\u0A83\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABC-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AD0\\u0AE0-\\u0AE3\\u0AE6-\\u0AEF\\u0B01-\\u0B03\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3C-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B5C\\u0B5D\\u0B5F-\\u0B63\\u0B66-\\u0B6F\\u0B71\\u0B82\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD0\\u0BD7\\u0BE6-\\u0BEF\\u0C00-\\u0C03\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C58\\u0C59\\u0C60-\\u0C63\\u0C66-\\u0C6F\\u0C81-\\u0C83\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBC-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CDE\\u0CE0-\\u0CE3\\u0CE6-\\u0CEF\\u0CF1\\u0CF2\\u0D01-\\u0D03\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4E\\u0D57\\u0D60-\\u0D63\\u0D66-\\u0D6F\\u0D7A-\\u0D7F\\u0D82\\u0D83\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DE6-\\u0DEF\\u0DF2\\u0DF3\\u0E01-\\u0E3A\\u0E40-\\u0E4E\\u0E50-\\u0E59\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB9\\u0EBB-\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EC8-\\u0ECD\\u0ED0-\\u0ED9\\u0EDC-\\u0EDF\\u0F00\\u0F18\\u0F19\\u0F20-\\u0F29\\u0F35\\u0F37\\u0F39\\u0F3E-\\u0F47\\u0F49-\\u0F6C\\u0F71-\\u0F84\\u0F86-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u1000-\\u1049\\u1050-\\u109D\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u135D-\\u135F\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1714\\u1720-\\u1734\\u1740-\\u1753\\u1760-\\u176C\\u176E-\\u1770\\u1772\\u1773\\u1780-\\u17D3\\u17D7\\u17DC\\u17DD\\u17E0-\\u17E9\\u180B-\\u180D\\u1810-\\u1819\\u1820-\\u1877\\u1880-\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1920-\\u192B\\u1930-\\u193B\\u1946-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19B0-\\u19C9\\u19D0-\\u19D9\\u1A00-\\u1A1B\\u1A20-\\u1A5E\\u1A60-\\u1A7C\\u1A7F-\\u1A89\\u1A90-\\u1A99\\u1AA7\\u1AB0-\\u1ABD\\u1B00-\\u1B4B\\u1B50-\\u1B59\\u1B6B-\\u1B73\\u1B80-\\u1BF3\\u1C00-\\u1C37\\u1C40-\\u1C49\\u1C4D-\\u1C7D\\u1CD0-\\u1CD2\\u1CD4-\\u1CF6\\u1CF8\\u1CF9\\u1D00-\\u1DF5\\u1DFC-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u200C\\u200D\\u203F\\u2040\\u2054\\u2071\\u207F\\u2090-\\u209C\\u20D0-\\u20DC\\u20E1\\u20E5-\\u20F0\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D7F-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2DE0-\\u2DFF\\u2E2F\\u3005-\\u3007\\u3021-\\u302F\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u3099\\u309A\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA62B\\uA640-\\uA66F\\uA674-\\uA67D\\uA67F-\\uA69D\\uA69F-\\uA6F1\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA827\\uA840-\\uA873\\uA880-\\uA8C4\\uA8D0-\\uA8D9\\uA8E0-\\uA8F7\\uA8FB\\uA900-\\uA92D\\uA930-\\uA953\\uA960-\\uA97C\\uA980-\\uA9C0\\uA9CF-\\uA9D9\\uA9E0-\\uA9FE\\uAA00-\\uAA36\\uAA40-\\uAA4D\\uAA50-\\uAA59\\uAA60-\\uAA76\\uAA7A-\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEF\\uAAF2-\\uAAF6\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABEA\\uABEC\\uABED\\uABF0-\\uABF9\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE00-\\uFE0F\\uFE20-\\uFE2D\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF10-\\uFF19\\uFF21-\\uFF3A\\uFF3F\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]");function C$(t,e){if(!t)throw new Error("ASSERT: "+e)}function F$(t){return t>=48&&t<=57}function S$(t){return"0123456789abcdefABCDEF".includes(t)}function $$(t){return"01234567".includes(t)}function T$(t){return 32===t||9===t||11===t||12===t||160===t||t>=5760&&[5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8239,8287,12288,65279].includes(t)}function B$(t){return 10===t||13===t||8232===t||8233===t}function z$(t){return 36===t||95===t||t>=65&&t<=90||t>=97&&t<=122||92===t||t>=128&&E$.test(String.fromCharCode(t))}function N$(t){return 36===t||95===t||t>=65&&t<=90||t>=97&&t<=122||t>=48&&t<=57||92===t||t>=128&&D$.test(String.fromCharCode(t))}const O$={if:1,in:1,do:1,var:1,for:1,new:1,try:1,let:1,this:1,else:1,case:1,void:1,with:1,enum:1,while:1,break:1,catch:1,throw:1,const:1,yield:1,class:1,super:1,return:1,typeof:1,delete:1,switch:1,export:1,import:1,public:1,static:1,default:1,finally:1,extends:1,package:1,private:1,function:1,continue:1,debugger:1,interface:1,protected:1,instanceof:1,implements:1};function R$(){for(;YS1114111||"}"!==t)&&tT({},p$,A$),e<=65535?String.fromCharCode(e):(n=55296+(e-65536>>10),r=56320+(e-65536&1023),String.fromCharCode(n,r))}function q$(){var t,e;for(t=HS.charCodeAt(YS++),e=String.fromCharCode(t),92===t&&(117!==HS.charCodeAt(YS)&&tT({},p$,A$),++YS,(t=U$("u"))&&"\\"!==t&&z$(t.charCodeAt(0))||tT({},p$,A$),e=t);YS>>="===(r=HS.substr(YS,4))?{type:e$,value:r,start:i,end:YS+=4}:">>>"===(n=r.substr(0,3))||"<<="===n||">>="===n?{type:e$,value:n,start:i,end:YS+=3}:a===(e=n.substr(0,2))[1]&&"+-<>&|".includes(a)||"=>"===e?{type:e$,value:e,start:i,end:YS+=2}:("//"===e&&tT({},p$,A$),"<>=!+-*%&|^/".includes(a)?(++YS,{type:e$,value:a,start:i,end:YS}):void tT({},p$,A$))}function I$(){var t,e,n;if(C$(F$((n=HS[YS]).charCodeAt(0))||"."===n,"Numeric literal must start with a decimal digit or a decimal point"),e=YS,t="","."!==n){if(t=HS[YS++],n=HS[YS],"0"===t){if("x"===n||"X"===n)return++YS,function(t){let e="";for(;YS=0&&tT({},x$,n),{value:n,literal:e}}(),r=function(t,e){let n=t;e.includes("u")&&(n=n.replace(/\\u\{([0-9a-fA-F]+)\}/g,((t,e)=>{if(parseInt(e,16)<=1114111)return"x";tT({},x$)})).replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g,"x"));try{new RegExp(n)}catch(t){tT({},x$)}try{return new RegExp(t,e)}catch(t){return null}}(e.value,n.value),{literal:e.literal+n.literal,value:r,regex:{pattern:e.value,flags:n.value},start:t,end:YS}}function H$(){if(R$(),YS>=GS)return{type:JS,start:YS,end:YS};const t=HS.charCodeAt(YS);return z$(t)?P$():40===t||41===t||59===t?j$():39===t||34===t?function(){var t,e,n,r,i="",o=!1;for(C$("'"===(t=HS[YS])||'"'===t,"String literal must starts with a quote"),e=YS,++YS;YS(C$(e":case"<=":case">=":case"instanceof":case"in":e=7;break;case"<<":case">>":case">>>":e=8;break;case"+":case"-":e=9;break;case"*":case"/":case"%":e=11}return e}function yT(){var t,e;return t=function(){var t,e,n,r,i,o,a,s,u,l;if(t=VS,u=gT(),0===(i=mT(r=VS)))return u;for(r.prec=i,Y$(),e=[t,VS],o=[u,r,a=gT()];(i=mT(VS))>0;){for(;o.length>2&&i<=o[o.length-2].prec;)a=o.pop(),s=o.pop().value,u=o.pop(),e.pop(),n=V$(s,u,a),o.push(n);(r=Y$()).prec=i,o.push(r),e.push(VS),n=gT(),o.push(n)}for(n=o[l=o.length-1],e.pop();l>1;)e.pop(),n=V$(o[l-1].value,o[l-2],n),l-=2;return n}(),rT("?")&&(Y$(),e=yT(),nT(":"),t=function(t,e,n){const r=new IS(a$);return r.test=t,r.consequent=e,r.alternate=n,r}(t,e,yT())),t}function vT(){const t=yT();if(rT(","))throw new Error(M$);return t}function _T(t){YS=0,GS=(HS=t).length,VS=null,G$();const e=vT();if(VS.type!==JS)throw new Error("Unexpect token after expression.");return e}var xT={NaN:"NaN",E:"Math.E",LN2:"Math.LN2",LN10:"Math.LN10",LOG2E:"Math.LOG2E",LOG10E:"Math.LOG10E",PI:"Math.PI",SQRT1_2:"Math.SQRT1_2",SQRT2:"Math.SQRT2",MIN_VALUE:"Number.MIN_VALUE",MAX_VALUE:"Number.MAX_VALUE"};function bT(t){function e(e,n,r){return i=>function(e,n,r,i){let o=t(n[0]);return r&&(o=r+"("+o+")",0===r.lastIndexOf("new ",0)&&(o="("+o+")")),o+"."+e+(i<0?"":0===i?"()":"("+n.slice(1).map(t).join(",")+")")}(e,i,n,r)}const n="new Date",r="String",i="RegExp";return{isNaN:"Number.isNaN",isFinite:"Number.isFinite",abs:"Math.abs",acos:"Math.acos",asin:"Math.asin",atan:"Math.atan",atan2:"Math.atan2",ceil:"Math.ceil",cos:"Math.cos",exp:"Math.exp",floor:"Math.floor",hypot:"Math.hypot",log:"Math.log",max:"Math.max",min:"Math.min",pow:"Math.pow",random:"Math.random",round:"Math.round",sin:"Math.sin",sqrt:"Math.sqrt",tan:"Math.tan",clamp:function(e){e.length<3&&s("Missing arguments to clamp function."),e.length>3&&s("Too many arguments to clamp function.");const n=e.map(t);return"Math.max("+n[1]+", Math.min("+n[2]+","+n[0]+"))"},now:"Date.now",utc:"Date.UTC",datetime:n,date:e("getDate",n,0),day:e("getDay",n,0),year:e("getFullYear",n,0),month:e("getMonth",n,0),hours:e("getHours",n,0),minutes:e("getMinutes",n,0),seconds:e("getSeconds",n,0),milliseconds:e("getMilliseconds",n,0),time:e("getTime",n,0),timezoneoffset:e("getTimezoneOffset",n,0),utcdate:e("getUTCDate",n,0),utcday:e("getUTCDay",n,0),utcyear:e("getUTCFullYear",n,0),utcmonth:e("getUTCMonth",n,0),utchours:e("getUTCHours",n,0),utcminutes:e("getUTCMinutes",n,0),utcseconds:e("getUTCSeconds",n,0),utcmilliseconds:e("getUTCMilliseconds",n,0),length:e("length",null,-1),parseFloat:"parseFloat",parseInt:"parseInt",upper:e("toUpperCase",r,0),lower:e("toLowerCase",r,0),substring:e("substring",r),split:e("split",r),trim:e("trim",r,0),regexp:i,test:e("test",i),if:function(e){e.length<3&&s("Missing arguments to if function."),e.length>3&&s("Too many arguments to if function.");const n=e.map(t);return"("+n[0]+"?"+n[1]+":"+n[2]+")"}}}function wT(t){const e=(t=t||{}).allowed?Bt(t.allowed):{},n=t.forbidden?Bt(t.forbidden):{},r=t.constants||xT,i=(t.functions||bT)(h),o=t.globalvar,a=t.fieldvar,u=J(o)?o:t=>`${o}["${t}"]`;let l={},c={},f=0;function h(t){if(xt(t))return t;const e=d[t.type];return null==e&&s("Unsupported type: "+t.type),e(t)}const d={Literal:t=>t.raw,Identifier:t=>{const i=t.name;return f>0?i:lt(n,i)?s("Illegal identifier: "+i):lt(r,i)?r[i]:lt(e,i)?i:(l[i]=1,u(i))},MemberExpression:t=>{const e=!t.computed,n=h(t.object);e&&(f+=1);const r=h(t.property);return n===a&&(c[function(t){const e=t&&t.length-1;return e&&('"'===t[0]&&'"'===t[e]||"'"===t[0]&&"'"===t[e])?t.slice(1,-1):t}(r)]=1),e&&(f-=1),n+(e?"."+r:"["+r+"]")},CallExpression:t=>{"Identifier"!==t.callee.type&&s("Illegal callee type: "+t.callee.type);const e=t.callee.name,n=t.arguments,r=lt(i,e)&&i[e];return r||s("Unrecognized function: "+e),J(r)?r(n):r+"("+n.map(h).join(",")+")"},ArrayExpression:t=>"["+t.elements.map(h).join(",")+"]",BinaryExpression:t=>"("+h(t.left)+" "+t.operator+" "+h(t.right)+")",UnaryExpression:t=>"("+t.operator+h(t.argument)+")",ConditionalExpression:t=>"("+h(t.test)+"?"+h(t.consequent)+":"+h(t.alternate)+")",LogicalExpression:t=>"("+h(t.left)+t.operator+h(t.right)+")",ObjectExpression:t=>"{"+t.properties.map(h).join(",")+"}",Property:t=>{f+=1;const e=h(t.key);return f-=1,e+":"+h(t.value)}};function p(t){const e={code:h(t),globals:Object.keys(l),fields:Object.keys(c)};return l={},c={},e}return p.functions=i,p.constants=r,p}const kT=Symbol("vega_selection_getter");function AT(t){return t.getter&&t.getter[kT]||(t.getter=l(t.field),t.getter[kT]=!0),t.getter}const MT="intersect",ET="union",DT="_vgsid_",CT=l(DT),FT="E",ST="R",$T="R-E",TT="R-LE",BT="R-RE",zT="index:unit";function NT(t,e){for(var n,r,i=e.fields,o=e.values,a=i.length,s=0;s1?e-1:0),r=1;re.includes(t))):e},R_union:function(t,e){var n=S(e[0]),r=S(e[1]);return n>r&&(n=e[1],r=e[0]),t.length?(t[0]>n&&(t[0]=n),t[1]r&&(n=e[1],r=e[0]),t.length?rr&&(t[1]=r),t):[n,r]}};function qT(t,e,n,r){e[0].type!==BS&&s("First argument to selection functions must be a string literal.");const i=e[0].value,o="unit",a="@"+o,u=":"+i;(e.length>=2&&F(e).value)!==MT||lt(r,a)||(r[a]=n.getData(i).indataRef(n,o)),lt(r,u)||(r[u]=n.getData(i).tuplesRef())}function PT(t){const e=this.context.data[t];return e?e.values.value:[]}const jT=t=>function(e,n){return this.context.dataflow.locale()[t](n)(e)},IT=jT("format"),WT=jT("timeFormat"),HT=jT("utcFormat"),YT=jT("timeParse"),GT=jT("utcParse"),VT=new Date(2e3,0,1);function XT(t,e,n){return Number.isInteger(t)&&Number.isInteger(e)?(VT.setYear(2e3),VT.setMonth(t),VT.setDate(e),WT.call(this,VT,n)):""}const JT="%",ZT="$";function QT(t,e,n,r){e[0].type!==BS&&s("First argument to data functions must be a string literal.");const i=e[0].value,o=":"+i;if(!lt(o,r))try{r[o]=n.getData(i).tuplesRef()}catch(t){}}function KT(t,e,n,r){if(e[0].type===BS)tB(n,r,e[0].value);else for(t in n.scales)tB(n,r,t)}function tB(t,e,n){const r=JT+n;if(!lt(e,r))try{e[r]=t.scaleRef(n)}catch(t){}}function eB(t,e){if(J(t))return t;if(xt(t)){const n=e.scales[t];return n&&function(t){return t&&!0===t[ip]}(n.value)?n.value:void 0}}function nB(t,e,n){e.__bandwidth=t=>t&&t.bandwidth?t.bandwidth():0,n._bandwidth=KT,n._range=KT,n._scale=KT;const r=e=>"_["+(e.type===BS?Ct(JT+e.value):Ct(JT)+"+"+t(e))+"]";return{_bandwidth:t=>`this.__bandwidth(${r(t[0])})`,_range:t=>`${r(t[0])}.range()`,_scale:e=>`${r(e[0])}(${t(e[1])})`}}function rB(t,e){return function(n,r,i){if(n){const e=eB(n,(i||this).context);return e&&e.path[t](r)}return e(r)}}const iB=rB("area",(function(t){return Tw=new se,pw(t,Bw),2*Tw})),oB=rB("bounds",(function(t){var e,n,r,i,o,a,s;if(kw=ww=-(xw=bw=1/0),Fw=[],pw(t,sk),n=Fw.length){for(Fw.sort(mk),e=1,o=[r=Fw[0]];egk(r[0],r[1])&&(r[1]=i[1]),gk(i[0],r[1])>gk(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(a=-1/0,e=0,r=o[n=o.length-1];e<=n;r=i,++e)i=o[e],(s=gk(r[1],i[0]))>a&&(a=s,xw=i[0],ww=r[1])}return Fw=Sw=null,xw===1/0||bw===1/0?[[NaN,NaN],[NaN,NaN]]:[[xw,bw],[ww,kw]]})),aB=rB("centroid",(function(t){Hw=Yw=Gw=Vw=Xw=Jw=Zw=Qw=0,Kw=new se,tk=new se,ek=new se,pw(t,vk);var e=+Kw,n=+tk,r=+ek,i=Kb(e,n,r);return ifB(t,e)}const dB={};function pB(t){return k(t)||ArrayBuffer.isView(t)?t:null}function gB(t){return pB(t)||(xt(t)?t:null)}const mB=t=>t.data;function yB(t,e){const n=PT.call(e,t);return n.root&&n.root.lookup||{}}const vB=()=>"undefined"!=typeof window&&window||null;function _B(t,e,n){if(!t)return[];const[r,i]=t,o=(new Vg).set(r[0],r[1],i[0],i[1]);return B_(n||this.context.dataflow.scenegraph().root,o,function(t){let e=null;if(t){const n=V(t.marktype),r=V(t.markname);e=t=>(!n.length||n.some((e=>t.marktype===e)))&&(!r.length||r.some((e=>t.name===e)))}return e}(e))}const xB={random:()=>t.random(),cumulativeNormal:hs,cumulativeLogNormal:vs,cumulativeUniform:As,densityNormal:fs,densityLogNormal:ys,densityUniform:ks,quantileNormal:ds,quantileLogNormal:_s,quantileUniform:Ms,sampleNormal:cs,sampleLogNormal:ms,sampleUniform:ws,isArray:k,isBoolean:gt,isDate:mt,isDefined:t=>void 0!==t,isNumber:vt,isObject:A,isRegExp:_t,isString:xt,isTuple:ma,isValid:t=>null!=t&&t==t,toBoolean:Ft,toDate:t=>$t(t),toNumber:S,toString:Tt,indexof:function(t){for(var e=arguments.length,n=new Array(e>1?e-1:0),r=1;r1?e-1:0),r=1;r1?e-1:0),r=1;r1?e-1:0),r=1;rat(t),inScope:function(t){const e=this.context.group;let n=!1;if(e)for(;t;){if(t===e){n=!0;break}t=t.mark.group}return n},intersect:_B,clampRange:X,pinchDistance:function(t){const e=t.touches,n=e[0].clientX-e[1].clientX,r=e[0].clientY-e[1].clientY;return Math.hypot(n,r)},pinchAngle:function(t){const e=t.touches;return Math.atan2(e[0].clientY-e[1].clientY,e[0].clientX-e[1].clientX)},screen:function(){const t=vB();return t?t.screen:{}},containerSize:function(){const t=this.context.dataflow,e=t.container&&t.container();return e?[e.clientWidth,e.clientHeight]:[void 0,void 0]},windowSize:function(){const t=vB();return t?[t.innerWidth,t.innerHeight]:[void 0,void 0]},bandspace:function(t,e,n){return $d(t||0,e||0,n||0)},setdata:function(t,e){const n=this.context.dataflow,r=this.context.data[t].input;return n.pulse(r,n.changeset().remove(p).insert(e)),1},pathShape:function(t){let e=null;return function(n){return n?yg(n,e=e||ag(t)):t}},panLinear:R,panLog:U,panPow:L,panSymlog:q,zoomLinear:j,zoomLog:I,zoomPow:W,zoomSymlog:H,encode:function(t,e,n){if(t){const n=this.context.dataflow,r=t.mark.source;n.pulse(r,n.changeset().encode(t,e))}return void 0!==n?n:t},modify:function(t,e,n,r,i,o){const a=this.context.dataflow,s=this.context.data[t],u=s.input,l=a.stamp();let c,f,h=s.changes;if(!1===a._trigger||!(u.value.length||e||r))return 0;if((!h||h.stamp{s.modified=!0,a.pulse(u,h).run()}),!0,1)),n&&(c=!0===n?p:k(n)||ma(n)?n:hB(n),h.remove(c)),e&&h.insert(e),r&&(c=hB(r),u.value.some(c)?h.remove(c):h.insert(r)),i)for(f in o)h.modify(i,f,o[f]);return 1},lassoAppend:function(t,e,n){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:5;const i=(t=V(t))[t.length-1];return void 0===i||Math.hypot(i[0]-e,i[1]-n)>r?[...t,[e,n]]:t},lassoPath:function(t){return V(t).reduce(((e,n,r)=>{let[i,o]=n;return e+(0==r?`M ${i},${o} `:r===t.length-1?" Z":`L ${i},${o} `)}),"")},intersectLasso:function(t,e,n){const{x:r,y:i,mark:o}=n,a=(new Vg).set(Number.MAX_SAFE_INTEGER,Number.MAX_SAFE_INTEGER,Number.MIN_SAFE_INTEGER,Number.MIN_SAFE_INTEGER);for(const[t,n]of e)ta.x2&&(a.x2=t),na.y2&&(a.y2=n);return a.translate(r,i),_B([[a.x1,a.y1],[a.x2,a.y2]],t,o).filter((t=>function(t,e,n){let r=0;for(let i=0,o=n.length-1;ie!=s>e&&t<(a-u)*(e-l)/(s-l)+u&&r++}return 1&r}(t.x,t.y,e)))}},bB=["view","item","group","xy","x","y"],wB="this.",kB={},AB={forbidden:["_"],allowed:["datum","event","item"],fieldvar:"datum",globalvar:t=>`_[${Ct(ZT+t)}]`,functions:function(t){const e=bT(t);bB.forEach((t=>e[t]="event.vega."+t));for(const t in xB)e[t]=wB+t;return ot(e,nB(t,xB,kB)),e},constants:xT,visitors:kB},MB=wT(AB);function EB(t,e,n){return 1===arguments.length?xB[t]:(xB[t]=e,n&&(kB[t]=n),MB&&(MB.functions[t]=wB+t),this)}function DB(t,e){const n={};let r;try{r=_T(t=xt(t)?t:Ct(t)+"")}catch(e){s("Expression parse error: "+t)}r.visit((t=>{if(t.type!==RS)return;const r=t.callee.name,i=AB.visitors[r];i&&i(r,t.arguments,e,n)}));const i=MB(r);return i.globals.forEach((t=>{const r=ZT+t;!lt(n,r)&&e.getSignal(t)&&(n[r]=e.signalRef(t))})),{$expr:ot({code:i.code},e.options.ast?{ast:r}:null),$fields:i.fields,$params:n}}EB("bandwidth",(function(t,e){const n=eB(t,(e||this).context);return n&&n.bandwidth?n.bandwidth():0}),KT),EB("copy",(function(t,e){const n=eB(t,(e||this).context);return n?n.copy():void 0}),KT),EB("domain",(function(t,e){const n=eB(t,(e||this).context);return n?n.domain():[]}),KT),EB("range",(function(t,e){const n=eB(t,(e||this).context);return n&&n.range?n.range():[]}),KT),EB("invert",(function(t,e,n){const r=eB(t,(n||this).context);return r?k(e)?(r.invertRange||r.invert)(e):(r.invert||r.invertExtent)(e):void 0}),KT),EB("scale",(function(t,e,n){const r=eB(t,(n||this).context);return r?r(e):void 0}),KT),EB("gradient",(function(t,e,n,r,i){t=eB(t,(i||this).context);const o=Qp(e,n);let a=t.domain(),s=a[0],u=F(a),l=f;return u-s?l=_p(t,s,u):t=(t.interpolator?ap("sequential")().interpolator(t.interpolator()):ap("linear")().interpolate(t.interpolate()).range(t.range())).domain([s=0,u=1]),t.ticks&&(a=t.ticks(+r||15),s!==a[0]&&a.unshift(s),u!==F(a)&&a.push(u)),a.forEach((e=>o.stop(l(e),t(e)))),o}),KT),EB("geoArea",iB,KT),EB("geoBounds",oB,KT),EB("geoCentroid",aB,KT),EB("geoShape",(function(t,e,n){const r=eB(t,(n||this).context);return function(t){return r?r.path.context(t)(e):""}}),KT),EB("geoScale",(function(t,e){const n=eB(t,(e||this).context);return n&&n.scale()}),KT),EB("indata",(function(t,e,n){const r=this.context.data[t]["index:"+e],i=r?r.value.get(n):void 0;return i?i.count:i}),(function(t,e,n,r){e[0].type!==BS&&s("First argument to indata must be a string literal."),e[1].type!==BS&&s("Second argument to indata must be a string literal.");const i=e[0].value,o=e[1].value,a="@"+o;lt(a,r)||(r[a]=n.getData(i).indataRef(n,o))})),EB("data",PT,QT),EB("treePath",(function(t,e,n){const r=yB(t,this),i=r[e],o=r[n];return i&&o?i.path(o).map(mB):void 0}),QT),EB("treeAncestors",(function(t,e){const n=yB(t,this)[e];return n?n.ancestors().map(mB):void 0}),QT),EB("vlSelectionTest",(function(t,e,n){for(var r,i,o,a,s,u=this.context.data[t],l=u?u.values.value:[],c=u?u[zT]&&u[zT].value:void 0,f=n===MT,h=l.length,d=0;d(t[o[n].field]=e,t)),{}))}else u=DT,l=CT(i),(f=(c=v[u]||(v[u]={}))[s]||(c[s]=[])).push(l),n&&(f=_[s]||(_[s]=[])).push({[DT]:l});if(e=e||ET,v[DT]?v[DT]=LT[`${DT}_${e}`](...Object.values(v[DT])):Object.keys(v).forEach((t=>{v[t]=Object.keys(v[t]).map((e=>v[t][e])).reduce(((n,r)=>void 0===n?r:LT[`${x[t]}_${e}`](n,r)))})),y=Object.keys(_),n&&y.length){v[r?"vlPoint":"vlMulti"]=e===ET?{or:y.reduce(((t,e)=>(t.push(..._[e]),t)),[])}:{and:y.map((t=>({or:_[t]})))}}return v}),qT),EB("vlSelectionTuples",(function(t,e){return t.map((t=>ot(e.fields?{values:e.fields.map((e=>AT(e)(t.datum)))}:{[DT]:CT(t.datum)},e)))}));const CB=Bt(["rule"]),FB=Bt(["group","image","rect"]);function SB(t){return(t+"").toLowerCase()}function $B(t,e,n){n.endsWith(";")||(n="return("+n+");");const r=Function(...e.concat(n));return t&&t.functions?r.bind(t.functions):r}var TB={operator:(t,e)=>$B(t,["_"],e.code),parameter:(t,e)=>$B(t,["datum","_"],e.code),event:(t,e)=>$B(t,["event"],e.code),handler:(t,e)=>$B(t,["_","event"],`var datum=event.item&&event.item.datum;return ${e.code};`),encode:(t,e)=>{const{marktype:n,channels:r}=e;let i="var o=item,datum=o.datum,m=0,$;";for(const t in r){const e="o["+Ct(t)+"]";i+=`$=${r[t].code};if(${e}!==$)${e}=$,m=1;`}return i+=function(t,e){let n="";return CB[e]||(t.x2&&(t.x?(FB[e]&&(n+="if(o.x>o.x2)$=o.x,o.x=o.x2,o.x2=$;"),n+="o.width=o.x2-o.x;"):n+="o.x=o.x2-(o.width||0);"),t.xc&&(n+="o.x=o.xc-(o.width||0)/2;"),t.y2&&(t.y?(FB[e]&&(n+="if(o.y>o.y2)$=o.y,o.y=o.y2,o.y2=$;"),n+="o.height=o.y2-o.y;"):n+="o.y=o.y2-(o.height||0);"),t.yc&&(n+="o.y=o.yc-(o.height||0)/2;")),n}(r,n),i+="return m;",$B(t,["item","_"],i)},codegen:{get(t){const e=`[${t.map(Ct).join("][")}]`,n=Function("_",`return _${e};`);return n.path=e,n},comparator(t,e){let n;const r=Function("a","b","var u, v; return "+t.map(((t,r)=>{const i=e[r];let o,a;return t.path?(o=`a${t.path}`,a=`b${t.path}`):((n=n||{})["f"+r]=t,o=`this.f${r}(a)`,a=`this.f${r}(b)`),function(t,e,n,r){return`((u = ${t}) < (v = ${e}) || u == null) && v != null ? ${n}\n : (u > v || v == null) && u != null ? ${r}\n : ((v = v instanceof Date ? +v : v), (u = u instanceof Date ? +u : u)) !== u && v === v ? ${n}\n : v !== v && u === u ? ${r} : `}(o,a,-i,i)})).join("")+"0;");return n?r.bind(n):r}}};function BB(t,e,n){if(!t||!A(t))return t;for(let r,i=0,o=zB.length;it&&t.$tupleid?ya:t));return e.fn[n]||(e.fn[n]=Q(r,t.$order,e.expr.codegen))}},{key:"$context",parse:function(t,e){return e}},{key:"$subflow",parse:function(t,e){const n=t.$subflow;return function(t,r,i){const o=e.fork().parse(n),a=o.get(n.operators[0].id),s=o.signals.parent;return s&&s.set(i),a.detachSubflow=()=>e.detach(o),a}}},{key:"$tupleid",parse:function(){return ya}}];const NB={skip:!0};function OB(t,e,n,r){return new RB(t,e,n,r)}function RB(t,e,n,r){this.dataflow=t,this.transforms=e,this.events=t.events.bind(t),this.expr=r||TB,this.signals={},this.scales={},this.nodes={},this.data={},this.fn={},n&&(this.functions=Object.create(n),this.functions.context=this)}function UB(t){this.dataflow=t.dataflow,this.transforms=t.transforms,this.events=t.events,this.expr=t.expr,this.signals=Object.create(t.signals),this.scales=Object.create(t.scales),this.nodes=Object.create(t.nodes),this.data=Object.create(t.data),this.fn=Object.create(t.fn),t.functions&&(this.functions=Object.create(t.functions),this.functions.context=this)}function LB(t,e){t&&(null==e?t.removeAttribute("aria-label"):t.setAttribute("aria-label",e))}RB.prototype=UB.prototype={fork(){const t=new UB(this);return(this.subcontext||(this.subcontext=[])).push(t),t},detach(t){this.subcontext=this.subcontext.filter((e=>e!==t));const e=Object.keys(t.nodes);for(const n of e)t.nodes[n]._targets=null;for(const n of e)t.nodes[n].detach();t.nodes=null},get(t){return this.nodes[t]},set(t,e){return this.nodes[t]=e},add(t,e){const n=this,r=n.dataflow,i=t.value;if(n.set(t.id,e),function(t){return"collect"===SB(t)}(t.type)&&i&&(i.$ingest?r.ingest(e,i.$ingest,i.$format):i.$request?r.preload(e,i.$request,i.$format):r.pulse(e,r.changeset().insert(i))),t.root&&(n.root=e),t.parent){let i=n.get(t.parent.$ref);i?(r.connect(i,[e]),e.targets().add(i)):(n.unresolved=n.unresolved||[]).push((()=>{i=n.get(t.parent.$ref),r.connect(i,[e]),e.targets().add(i)}))}if(t.signal&&(n.signals[t.signal]=e),t.scale&&(n.scales[t.scale]=e),t.data)for(const r in t.data){const i=n.data[r]||(n.data[r]={});t.data[r].forEach((t=>i[t]=e))}},resolve(){return(this.unresolved||[]).forEach((t=>t())),delete this.unresolved,this},operator(t,e){this.add(t,this.dataflow.add(t.value,e))},transform(t,e){this.add(t,this.dataflow.add(this.transforms[SB(e)]))},stream(t,e){this.set(t.id,e)},update(t,e,n,r,i){this.dataflow.on(e,n,r,i,t.options)},operatorExpression(t){return this.expr.operator(this,t)},parameterExpression(t){return this.expr.parameter(this,t)},eventExpression(t){return this.expr.event(this,t)},handlerExpression(t){return this.expr.handler(this,t)},encodeExpression(t){return this.expr.encode(this,t)},parse:function(t){const e=this,n=t.operators||[];return t.background&&(e.background=t.background),t.eventConfig&&(e.eventConfig=t.eventConfig),t.locale&&(e.locale=t.locale),n.forEach((t=>e.parseOperator(t))),n.forEach((t=>e.parseOperatorParameters(t))),(t.streams||[]).forEach((t=>e.parseStream(t))),(t.updates||[]).forEach((t=>e.parseUpdate(t))),e.resolve()},parseOperator:function(t){const e=this;!function(t){return"operator"===SB(t)}(t.type)&&t.type?e.transform(t,t.type):e.operator(t,t.update?e.operatorExpression(t.update):null)},parseOperatorParameters:function(t){const e=this;if(t.params){const n=e.get(t.id);n||s("Invalid operator id: "+t.id),e.dataflow.connect(n,n.parameters(e.parseParameters(t.params),t.react,t.initonly))}},parseParameters:function(t,e){e=e||{};const n=this;for(const r in t){const i=t[r];e[r]=k(i)?i.map((t=>BB(t,n,e))):BB(i,n,e)}return e},parseStream:function(t){var e,n=this,r=null!=t.filter?n.eventExpression(t.filter):void 0,i=null!=t.stream?n.get(t.stream):void 0;t.source?i=n.events(t.source,t.type,r):t.merge&&(i=(e=t.merge.map((t=>n.get(t))))[0].merge.apply(e[0],e.slice(1))),t.between&&(e=t.between.map((t=>n.get(t))),i=i.between(e[0],e[1])),t.filter&&(i=i.filter(r)),null!=t.throttle&&(i=i.throttle(+t.throttle)),null!=t.debounce&&(i=i.debounce(+t.debounce)),null==i&&s("Invalid stream definition: "+JSON.stringify(t)),t.consume&&i.consume(!0),n.stream(t,i)},parseUpdate:function(t){var e,n=this,r=A(r=t.source)?r.$ref:r,i=n.get(r),o=t.update,a=void 0;i||s("Source not defined: "+t.source),e=t.target&&t.target.$expr?n.eventExpression(t.target.$expr):n.get(t.target),o&&o.$expr&&(o.$params&&(a=n.parseParameters(o.$params)),o=n.handlerExpression(o.$expr)),n.update(t,i,e,o,a)},getState:function(t){var e=this,n={};if(t.signals){var r=n.signals={};Object.keys(e.signals).forEach((n=>{const i=e.signals[n];t.signals(n,i)&&(r[n]=i.value)}))}if(t.data){var i=n.data={};Object.keys(e.data).forEach((n=>{const r=e.data[n];t.data(n,r)&&(i[n]=r.input.value)}))}return e.subcontext&&!1!==t.recurse&&(n.subcontext=e.subcontext.map((e=>e.getState(t)))),n},setState:function(t){var e=this,n=e.dataflow,r=t.data,i=t.signals;Object.keys(i||{}).forEach((t=>{n.update(e.signals[t],i[t],NB)})),Object.keys(r||{}).forEach((t=>{n.pulse(e.data[t].input,n.changeset().remove(p).insert(r[t]))})),(t.subcontext||[]).forEach(((t,n)=>{const r=e.subcontext[n];r&&r.setState(t)}))}};const qB="default";function PB(t,e){const n=t.globalCursor()?"undefined"!=typeof document&&document.body:t.container();if(n)return null==e?n.style.removeProperty("cursor"):n.style.cursor=e}function jB(t,e){var n=t._runtime.data;return lt(n,e)||s("Unrecognized data set: "+e),n[e]}function IB(t,e){Aa(e)||s("Second argument to changes must be a changeset.");const n=jB(this,t);return n.modified=!0,this.pulse(n.input,e)}function WB(t){var e=t.padding();return Math.max(0,t._viewWidth+e.left+e.right)}function HB(t){var e=t.padding();return Math.max(0,t._viewHeight+e.top+e.bottom)}function YB(t){var e=t.padding(),n=t._origin;return[e.left+n[0],e.top+n[1]]}function GB(t,e,n){var r,i,o=t._renderer,a=o&&o.canvas();return a&&(i=YB(t),(r=av(e.changedTouches?e.changedTouches[0]:e,a))[0]-=i[0],r[1]-=i[1]),e.dataflow=t,e.item=n,e.vega=function(t,e,n){const r=e?"group"===e.mark.marktype?e:e.mark.group:null;function i(t){var n,i=r;if(t)for(n=e;n;n=n.mark.group)if(n.mark.name===t){i=n;break}return i&&i.mark&&i.mark.interactive?i:{}}function o(t){if(!t)return n;xt(t)&&(t=i(t));const e=n.slice();for(;t;)e[0]-=t.x||0,e[1]-=t.y||0,t=t.mark&&t.mark.group;return e}return{view:rt(t),item:rt(e||{}),group:i,xy:o,x:t=>o(t)[0],y:t=>o(t)[1]}}(t,n,r),e}const VB="view",XB={trap:!1};function JB(t,e,n,r){t._eventListeners.push({type:n,sources:V(e),handler:r})}function ZB(t,e,n){const r=t._eventConfig&&t._eventConfig[e];return!(!1===r||A(r)&&!r[n])||(t.warn(`Blocked ${e} ${n} event listener.`),!1)}function QB(t){return t.item}function KB(t){return t.item.mark.source}function tz(t){return function(e,n){return n.vega.view().changeset().encode(n.item,t)}}function ez(t,e,n){const r=document.createElement(t);for(const t in e)r.setAttribute(t,e[t]);return null!=n&&(r.textContent=n),r}const nz="vega-bind",rz="vega-bind-name",iz="vega-bind-radio";function oz(t,e,n,r){const i=n.event||"input",o=()=>t.update(e.value);r.signal(n.signal,e.value),e.addEventListener(i,o),JB(r,e,i,o),t.set=t=>{e.value=t,e.dispatchEvent(function(t){return"undefined"!=typeof Event?new Event(t):{type:t}}(i))}}function az(t,e,n,r){const i=r.signal(n.signal),o=ez("div",{class:nz}),a="radio"===n.input?o:o.appendChild(ez("label"));a.appendChild(ez("span",{class:rz},n.name||n.signal)),e.appendChild(o);let s=sz;switch(n.input){case"checkbox":s=uz;break;case"select":s=lz;break;case"radio":s=cz;break;case"range":s=fz}s(t,a,n,i)}function sz(t,e,n,r){const i=ez("input");for(const t in n)"signal"!==t&&"element"!==t&&i.setAttribute("input"===t?"type":t,n[t]);i.setAttribute("name",n.signal),i.value=r,e.appendChild(i),i.addEventListener("input",(()=>t.update(i.value))),t.elements=[i],t.set=t=>i.value=t}function uz(t,e,n,r){const i={type:"checkbox",name:n.signal};r&&(i.checked=!0);const o=ez("input",i);e.appendChild(o),o.addEventListener("change",(()=>t.update(o.checked))),t.elements=[o],t.set=t=>o.checked=!!t||null}function lz(t,e,n,r){const i=ez("select",{name:n.signal}),o=n.labels||[];n.options.forEach(((t,e)=>{const n={value:t};hz(t,r)&&(n.selected=!0),i.appendChild(ez("option",n,(o[e]||t)+""))})),e.appendChild(i),i.addEventListener("change",(()=>{t.update(n.options[i.selectedIndex])})),t.elements=[i],t.set=t=>{for(let e=0,r=n.options.length;e{const s={type:"radio",name:n.signal,value:e};hz(e,r)&&(s.checked=!0);const u=ez("input",s);u.addEventListener("change",(()=>t.update(e)));const l=ez("label",{},(o[a]||e)+"");return l.prepend(u),i.appendChild(l),u})),t.set=e=>{const n=t.elements,r=n.length;for(let t=0;t{u.textContent=s.value,t.update(+s.value)};s.addEventListener("input",l),s.addEventListener("change",l),t.elements=[s],t.set=t=>{s.value=t,u.textContent=t}}function hz(t,e){return t===e||t+""==e+""}function dz(t,e,n,r,i,o){return(e=e||new r(t.loader())).initialize(n,WB(t),HB(t),YB(t),i,o).background(t.background())}function pz(t,e){return e?function(){try{e.apply(this,arguments)}catch(e){t.error(e)}}:null}function gz(t,e,n){if("string"==typeof e){if("undefined"==typeof document)return t.error("DOM document instance not found."),null;if(!(e=document.querySelector(e)))return t.error("Signal bind element not found: "+e),null}if(e&&n)try{e.textContent=""}catch(n){e=null,t.error(n)}return e}const mz=t=>+t||0;function yz(t){return A(t)?{top:mz(t.top),bottom:mz(t.bottom),left:mz(t.left),right:mz(t.right)}:(t=>({top:t,bottom:t,left:t,right:t}))(mz(t))}async function vz(t,e,n,r){const i=T_(e),o=i&&i.headless;return o||s("Unrecognized renderer type: "+e),await t.runAsync(),dz(t,null,null,o,n,r).renderAsync(t._scenegraph.root)}var _z="width",xz="height",bz="padding",wz={skip:!0};function kz(t,e){var n=t.autosize(),r=t.padding();return e-(n&&n.contains===bz?r.left+r.right:0)}function Az(t,e){var n=t.autosize(),r=t.padding();return e-(n&&n.contains===bz?r.top+r.bottom:0)}function Mz(t,e){return e.modified&&k(e.input.value)&&!t.startsWith("_:vega:_")}function Ez(t,e){return!("parent"===t||e instanceof Za.proxy)}function Dz(t,e,n,r){const i=t.element();i&&i.setAttribute("title",function(t){return null==t?"":k(t)?Cz(t):A(t)&&!mt(t)?(e=t,Object.keys(e).map((t=>{const n=e[t];return t+": "+(k(n)?Cz(n):Fz(n))})).join("\n")):t+"";var e}(r))}function Cz(t){return"["+t.map(Fz).join(", ")+"]"}function Fz(t){return k(t)?"[…]":A(t)&&!mt(t)?"{…}":t}function Sz(t,e){const n=this;if(e=e||{},Va.call(n),e.loader&&n.loader(e.loader),e.logger&&n.logger(e.logger),null!=e.logLevel&&n.logLevel(e.logLevel),e.locale||t.locale){const r=ot({},t.locale,e.locale);n.locale(Ro(r.number,r.time))}n._el=null,n._elBind=null,n._renderType=e.renderer||S_.Canvas,n._scenegraph=new Ky;const r=n._scenegraph.root;n._renderer=null,n._tooltip=e.tooltip||Dz,n._redraw=!0,n._handler=(new Sv).scene(r),n._globalCursor=!1,n._preventDefault=!1,n._timers=[],n._eventListeners=[],n._resizeListeners=[],n._eventConfig=function(t){const e=ot({defaults:{}},t),n=(t,e)=>{e.forEach((e=>{k(t[e])&&(t[e]=Bt(t[e]))}))};return n(e.defaults,["prevent","allow"]),n(e,["view","window","selector"]),e}(t.eventConfig),n.globalCursor(n._eventConfig.globalCursor);const i=function(t,e,n){return OB(t,Za,xB,n).parse(e)}(n,t,e.expr);n._runtime=i,n._signals=i.signals,n._bind=(t.bindings||[]).map((t=>({state:null,param:ot({},t)}))),i.root&&i.root.set(r),r.source=i.data.root.input,n.pulse(i.data.root.input,n.changeset().insert(r.items)),n._width=n.width(),n._height=n.height(),n._viewWidth=kz(n,n._width),n._viewHeight=Az(n,n._height),n._origin=[0,0],n._resize=0,n._autosize=1,function(t){var e=t._signals,n=e[_z],r=e[xz],i=e[bz];function o(){t._autosize=t._resize=1}t._resizeWidth=t.add(null,(e=>{t._width=e.size,t._viewWidth=kz(t,e.size),o()}),{size:n}),t._resizeHeight=t.add(null,(e=>{t._height=e.size,t._viewHeight=Az(t,e.size),o()}),{size:r});const a=t.add(null,o,{pad:i});t._resizeWidth.rank=n.rank+1,t._resizeHeight.rank=r.rank+1,a.rank=i.rank+1}(n),function(t){t.add(null,(e=>(t._background=e.bg,t._resize=1,e.bg)),{bg:t._signals.background})}(n),function(t){const e=t._signals.cursor||(t._signals.cursor=t.add({user:qB,item:null}));t.on(t.events("view","pointermove"),e,((t,n)=>{const r=e.value,i=r?xt(r)?r:r.user:qB,o=n.item&&n.item.cursor||null;return r&&i===r.user&&o==r.item?r:{user:i,item:o}})),t.add(null,(function(e){let n=e.cursor,r=this.value;return xt(n)||(r=n.item,n=n.user),PB(t,n&&n!==qB?n:r||n),r}),{cursor:e})}(n),n.description(t.description),e.hover&&n.hover(),e.container&&n.initialize(e.container,e.bind),e.watchPixelRatio&&n._watchPixelRatio()}function $z(t,e){return lt(t._signals,e)?t._signals[e]:s("Unrecognized signal name: "+Ct(e))}function Tz(t,e){const n=(t._targets||[]).filter((t=>t._update&&t._update.handler===e));return n.length?n[0]:null}function Bz(t,e,n,r){let i=Tz(n,r);return i||(i=pz(t,(()=>r(e,n.value))),i.handler=r,t.on(n,null,i)),t}function zz(t,e,n){const r=Tz(e,n);return r&&e._targets.remove(r),t}dt(Sz,Va,{async evaluate(t,e,n){if(await Va.prototype.evaluate.call(this,t,e),this._redraw||this._resize)try{this._renderer&&(this._resize&&(this._resize=0,function(t){var e=YB(t),n=WB(t),r=HB(t);t._renderer.background(t.background()),t._renderer.resize(n,r,e),t._handler.origin(e),t._resizeListeners.forEach((e=>{try{e(n,r)}catch(e){t.error(e)}}))}(this)),await this._renderer.renderAsync(this._scenegraph.root)),this._redraw=!1}catch(t){this.error(t)}return n&&da(this,n),this},dirty(t){this._redraw=!0,this._renderer&&this._renderer.dirty(t)},description(t){if(arguments.length){const e=null!=t?t+"":null;return e!==this._desc&&LB(this._el,this._desc=e),this}return this._desc},container(){return this._el},scenegraph(){return this._scenegraph},origin(){return this._origin.slice()},signal(t,e,n){const r=$z(this,t);return 1===arguments.length?r.value:this.update(r,e,n)},width(t){return arguments.length?this.signal("width",t):this.signal("width")},height(t){return arguments.length?this.signal("height",t):this.signal("height")},padding(t){return arguments.length?this.signal("padding",yz(t)):yz(this.signal("padding"))},autosize(t){return arguments.length?this.signal("autosize",t):this.signal("autosize")},background(t){return arguments.length?this.signal("background",t):this.signal("background")},renderer(t){return arguments.length?(T_(t)||s("Unrecognized renderer type: "+t),t!==this._renderType&&(this._renderType=t,this._resetRenderer()),this):this._renderType},tooltip(t){return arguments.length?(t!==this._tooltip&&(this._tooltip=t,this._resetRenderer()),this):this._tooltip},loader(t){return arguments.length?(t!==this._loader&&(Va.prototype.loader.call(this,t),this._resetRenderer()),this):this._loader},resize(){return this._autosize=1,this.touch($z(this,"autosize"))},_resetRenderer(){this._renderer&&(this._renderer=null,this.initialize(this._el,this._elBind))},_resizeView:function(t,e,n,r,i,o){this.runAfter((a=>{let s=0;a._autosize=0,a.width()!==n&&(s=1,a.signal(_z,n,wz),a._resizeWidth.skip(!0)),a.height()!==r&&(s=1,a.signal(xz,r,wz),a._resizeHeight.skip(!0)),a._viewWidth!==t&&(a._resize=1,a._viewWidth=t),a._viewHeight!==e&&(a._resize=1,a._viewHeight=e),a._origin[0]===i[0]&&a._origin[1]===i[1]||(a._resize=1,a._origin=i),s&&a.run("enter"),o&&a.runAfter((t=>t.resize()))}),!1,1)},addEventListener(t,e,n){let r=e;return n&&!1===n.trap||(r=pz(this,e),r.raw=e),this._handler.on(t,r),this},removeEventListener(t,e){for(var n,r,i=this._handler.handlers(t),o=i.length;--o>=0;)if(r=i[o].type,n=i[o].handler,t===r&&(e===n||e===n.raw)){this._handler.off(r,n);break}return this},addResizeListener(t){const e=this._resizeListeners;return e.includes(t)||e.push(t),this},removeResizeListener(t){var e=this._resizeListeners,n=e.indexOf(t);return n>=0&&e.splice(n,1),this},addSignalListener(t,e){return Bz(this,t,$z(this,t),e)},removeSignalListener(t,e){return zz(this,$z(this,t),e)},addDataListener(t,e){return Bz(this,t,jB(this,t).values,e)},removeDataListener(t,e){return zz(this,jB(this,t).values,e)},globalCursor(t){if(arguments.length){if(this._globalCursor!==!!t){const e=PB(this,null);this._globalCursor=!!t,e&&PB(this,e)}return this}return this._globalCursor},preventDefault(t){return arguments.length?(this._preventDefault=t,this):this._preventDefault},timer:function(t,e){this._timers.push(function(t,e,n){var r=new lD,i=e;return null==e?(r.restart(t,e,n),r):(r._restart=r.restart,r.restart=function(t,e,n){e=+e,n=null==n?sD():+n,r._restart((function o(a){a+=i,r._restart(o,i+=e,n),t(a)}),e,n)},r.restart(t,e,n),r)}((function(e){t({timestamp:Date.now(),elapsed:e})}),e))},events:function(t,e,n){var r,i=this,o=new Ba(n),a=function(n,r){i.runAsync(null,(()=>{t===VB&&function(t,e){var n=t._eventConfig.defaults,r=n.prevent,i=n.allow;return!1!==r&&!0!==i&&(!0===r||!1===i||(r?r[e]:i?!i[e]:t.preventDefault()))}(i,e)&&n.preventDefault(),o.receive(GB(i,n,r))}))};if("timer"===t)ZB(i,"timer",e)&&i.timer(a,e);else if(t===VB)ZB(i,"view",e)&&i.addEventListener(e,a,XB);else if("window"===t?ZB(i,"window",e)&&"undefined"!=typeof window&&(r=[window]):"undefined"!=typeof document&&ZB(i,"selector",e)&&(r=Array.from(document.querySelectorAll(t))),r){for(var s=0,u=r.length;s=0;)a[t].stop();for(t=u.length;--t>=0;)for(e=(n=u[t]).sources.length;--e>=0;)n.sources[e].removeEventListener(n.type,n.handler);for(o&&o.call(this,this._handler,null,null,null),t=s.length;--t>=0;)i=s[t].type,r=s[t].handler,this._handler.off(i,r);return this},hover:function(t,e){return e=[e||"update",(t=[t||"hover"])[0]],this.on(this.events("view","pointerover",QB),KB,tz(t)),this.on(this.events("view","pointerout",QB),KB,tz(e)),this},data:function(t,e){return arguments.length<2?jB(this,t).values.value:IB.call(this,t,Ma().remove(p).insert(e))},change:IB,insert:function(t,e){return IB.call(this,t,Ma().insert(e))},remove:function(t,e){return IB.call(this,t,Ma().remove(e))},scale:function(t){var e=this._runtime.scales;return lt(e,t)||s("Unrecognized scale or projection: "+t),e[t].value},initialize:function(t,e){const n=this,r=n._renderType,i=n._eventConfig.bind,o=T_(r);t=n._el=t?gz(n,t,!0):null,function(t){const e=t.container();e&&(e.setAttribute("role","graphics-document"),e.setAttribute("aria-roleDescription","visualization"),LB(e,t.description()))}(n),o||n.error("Unrecognized renderer type: "+r);const a=o.handler||Sv,s=t?o.renderer:o.headless;return n._renderer=s?dz(n,n._renderer,t,s):null,n._handler=function(t,e,n,r){const i=new r(t.loader(),pz(t,t.tooltip())).scene(t.scenegraph().root).initialize(n,YB(t),t);return e&&e.handlers().forEach((t=>{i.on(t.type,t.handler)})),i}(n,n._handler,t,a),n._redraw=!0,t&&"none"!==i&&(e=e?n._elBind=gz(n,e,!0):t.appendChild(ez("form",{class:"vega-bindings"})),n._bind.forEach((t=>{t.param.element&&"container"!==i&&(t.element=gz(n,t.param.element,!!t.param.input))})),n._bind.forEach((t=>{!function(t,e,n){if(!e)return;const r=n.param;let i=n.state;i||(i=n.state={elements:null,active:!1,set:null,update:e=>{e!=t.signal(r.signal)&&t.runAsync(null,(()=>{i.source=!0,t.signal(r.signal,e)}))}},r.debounce&&(i.update=it(r.debounce,i.update))),(null==r.input&&r.element?oz:az)(i,e,r,t),i.active||(t.on(t._signals[r.signal],null,(()=>{i.source?i.source=!1:i.set(t.signal(r.signal))})),i.active=!0)}(n,t.element||e,t)}))),n},toImageURL:async function(t,e){t!==S_.Canvas&&t!==S_.SVG&&t!==S_.PNG&&s("Unrecognized image type: "+t);const n=await vz(this,t,e);return t===S_.SVG?function(t,e){const n=new Blob([t],{type:e});return window.URL.createObjectURL(n)}(n.svg(),"image/svg+xml"):n.canvas().toDataURL("image/png")},toCanvas:async function(t,e){return(await vz(this,S_.Canvas,t,e)).canvas()},toSVG:async function(t){return(await vz(this,S_.SVG,t)).svg()},getState:function(t){return this._runtime.getState(t||{data:Mz,signals:Ez,recurse:!0})},setState:function(t){return this.runAsync(null,(e=>{e._trigger=!1,e._runtime.setState(t)}),(t=>{t._trigger=!0})),this},_watchPixelRatio:function(){if("canvas"===this.renderer()&&this._renderer._canvas){let t=null;const e=()=>{null!=t&&t();const n=matchMedia(`(resolution: ${window.devicePixelRatio}dppx)`);n.addEventListener("change",e),t=()=>{n.removeEventListener("change",e)},this._renderer._canvas.getContext("2d").pixelRatio=window.devicePixelRatio||1,this._redraw=!0,this._resize=1,this.resize().runAsync()};e()}}});const Nz="view",Oz="[",Rz="]",Uz="{",Lz="}",qz=":",Pz=",",jz="@",Iz=">",Wz=/[[\]{}]/,Hz={"*":1,arc:1,area:1,group:1,image:1,line:1,path:1,rect:1,rule:1,shape:1,symbol:1,text:1,trail:1};let Yz,Gz;function Vz(t,e,n){return Yz=e||Nz,Gz=n||Hz,Jz(t.trim()).map(Zz)}function Xz(t,e,n,r,i){const o=t.length;let a,s=0;for(;e' after between selector: "+t;n=n.map(Zz);const i=Zz(t.slice(1).trim());if(i.between)return{between:n,stream:i};i.between=n;return i}(t):function(t){const e={source:Yz},n=[];let r,i,o=[0,0],a=0,s=0,u=t.length,l=0;if(t[u-1]===Lz){if(l=t.lastIndexOf(Uz),!(l>=0))throw"Unmatched right brace: "+t;try{o=function(t){const e=t.split(Pz);if(!t.length||e.length>2)throw t;return e.map((e=>{const n=+e;if(n!=n)throw t;return n}))}(t.substring(l+1,u-1))}catch(e){throw"Invalid throttle specification: "+t}u=(t=t.slice(0,l).trim()).length,l=0}if(!u)throw t;t[0]===jz&&(a=++l);r=Xz(t,l,qz),r1?(e.type=n[1],a?e.markname=n[0].slice(1):!function(t){return Gz[t]}(n[0])?e.source=n[0]:e.marktype=n[0]):e.type=n[0];"!"===e.type.slice(-1)&&(e.consume=!0,e.type=e.type.slice(0,-1));null!=i&&(e.filter=i);o[0]&&(e.throttle=o[0]);o[1]&&(e.debounce=o[1]);return e}(t)}function Qz(t){return A(t)?t:{type:t||"pad"}}const Kz=t=>+t||0,tN=t=>({top:t,bottom:t,left:t,right:t});function eN(t){return A(t)?t.signal?t:{top:Kz(t.top),bottom:Kz(t.bottom),left:Kz(t.left),right:Kz(t.right)}:tN(Kz(t))}const nN=t=>A(t)&&!k(t)?ot({},t):{value:t};function rN(t,e,n,r){if(null!=n){return A(n)&&!k(n)||k(n)&&n.length&&A(n[0])?t.update[e]=n:t[r||"enter"][e]={value:n},1}return 0}function iN(t,e,n){for(const n in e)rN(t,n,e[n]);for(const e in n)rN(t,e,n[e],"update")}function oN(t,e,n){for(const r in e)n&<(n,r)||(t[r]=ot(t[r]||{},e[r]));return t}function aN(t,e){return e&&(e.enter&&e.enter[t]||e.update&&e.update[t])}const sN="mark",uN="frame",lN="scope",cN="axis",fN="axis-domain",hN="axis-grid",dN="axis-label",pN="axis-tick",gN="axis-title",mN="legend",yN="legend-band",vN="legend-entry",_N="legend-gradient",xN="legend-label",bN="legend-symbol",wN="legend-title",kN="title",AN="title-text",MN="title-subtitle";function EN(t,e,n){t[e]=n&&n.signal?{signal:n.signal}:{value:n}}const DN=t=>xt(t)?Ct(t):t.signal?`(${t.signal})`:$N(t);function CN(t){if(null!=t.gradient)return function(t){const e=[t.start,t.stop,t.count].map((t=>null==t?null:Ct(t)));for(;e.length&&null==F(e);)e.pop();return e.unshift(DN(t.gradient)),`gradient(${e.join(",")})`}(t);let e=t.signal?`(${t.signal})`:t.color?function(t){return t.c?FN("hcl",t.h,t.c,t.l):t.h||t.s?FN("hsl",t.h,t.s,t.l):t.l||t.a?FN("lab",t.l,t.a,t.b):t.r||t.g||t.b?FN("rgb",t.r,t.g,t.b):null}(t.color):null!=t.field?$N(t.field):void 0!==t.value?Ct(t.value):void 0;return null!=t.scale&&(e=function(t,e){const n=DN(t.scale);null!=t.range?e=`lerp(_range(${n}), ${+t.range})`:(void 0!==e&&(e=`_scale(${n}, ${e})`),t.band&&(e=(e?e+"+":"")+`_bandwidth(${n})`+(1==+t.band?"":"*"+SN(t.band)),t.extra&&(e=`(datum.extra ? _scale(${n}, datum.extra.value) : ${e})`)),null==e&&(e="0"));return e}(t,e)),void 0===e&&(e=null),null!=t.exponent&&(e=`pow(${e},${SN(t.exponent)})`),null!=t.mult&&(e+=`*${SN(t.mult)}`),null!=t.offset&&(e+=`+${SN(t.offset)}`),t.round&&(e=`round(${e})`),e}const FN=(t,e,n,r)=>`(${t}(${[e,n,r].map(CN).join(",")})+'')`;function SN(t){return A(t)?"("+CN(t)+")":t}function $N(t){return TN(A(t)?t:{datum:t})}function TN(t){let e,n,r;if(t.signal)e="datum",r=t.signal;else if(t.group||t.parent){for(n=Math.max(1,t.level||1),e="item";n-- >0;)e+=".mark.group";t.parent?(r=t.parent,e+=".datum"):r=t.group}else t.datum?(e="datum",r=t.datum):s("Invalid field reference: "+Ct(t));return t.signal||(r=xt(r)?u(r).map(Ct).join("]["):TN(r)),e+"["+r+"]"}function BN(t,e,n,r,i,o){const a={};(o=o||{}).encoders={$encode:a},t=function(t,e,n,r,i){const o={},a={};let s,u,l,c;for(u in u="lineBreak","text"!==e||null==i[u]||aN(u,t)||EN(o,u,i[u]),("legend"==n||String(n).startsWith("axis"))&&(n=null),c=n===uN?i.group:n===sN?ot({},i.mark,i[e]):null,c)l=aN(u,t)||("fill"===u||"stroke"===u)&&(aN("fill",t)||aN("stroke",t)),l||EN(o,u,c[u]);for(u in V(r).forEach((e=>{const n=i.style&&i.style[e];for(const e in n)aN(e,t)||EN(o,e,n[e])})),t=ot({},t),o)c=o[u],c.signal?(s=s||{})[u]=c:a[u]=c;return t.enter=ot(a,t.enter),s&&(t.update=ot(s,t.update)),t}(t,e,n,r,i.config);for(const n in t)a[n]=zN(t[n],e,o,i);return o}function zN(t,e,n,r){const i={},o={};for(const e in t)null!=t[e]&&(i[e]=NN((a=t[e],k(a)?function(t){let e="";return t.forEach((t=>{const n=CN(t);e+=t.test?`(${t.test})?${n}:`:n})),":"===F(e)&&(e+="null"),e}(a):CN(a)),r,n,o));var a;return{$expr:{marktype:e,channels:i},$fields:Object.keys(o),$output:Object.keys(t)}}function NN(t,e,n,r){const i=DB(t,e);return i.$fields.forEach((t=>r[t]=1)),ot(n,i.$params),i.$expr}const ON="outer",RN=["value","update","init","react","bind"];function UN(t,e){s(t+' for "outer" push: '+Ct(e))}function LN(t,e){const n=t.name;if(t.push===ON)e.signals[n]||UN("No prior signal definition",n),RN.forEach((e=>{void 0!==t[e]&&UN("Invalid property ",e)}));else{const r=e.addSignal(n,t.value);!1===t.react&&(r.react=!1),t.bind&&e.addBinding(n,t.bind)}}function qN(t,e,n,r){this.id=-1,this.type=t,this.value=e,this.params=n,r&&(this.parent=r)}function PN(t,e,n,r){return new qN(t,e,n,r)}function jN(t,e){return PN("operator",t,e)}function IN(t){const e={$ref:t.id};return t.id<0&&(t.refs=t.refs||[]).push(e),e}function WN(t,e){return e?{$field:t,$name:e}:{$field:t}}const HN=WN("key");function YN(t,e){return{$compare:t,$order:e}}const GN="descending";function VN(t,e){return(t&&t.signal?"$"+t.signal:t||"")+(t&&e?"_":"")+(e&&e.signal?"$"+e.signal:e||"")}const XN="scope",JN="view";function ZN(t){return t&&t.signal}function QN(t){if(ZN(t))return!0;if(A(t))for(const e in t)if(QN(t[e]))return!0;return!1}function KN(t,e){return null!=t?t:e}function tO(t){return t&&t.signal||t}const eO="timer";function nO(t,e){return(t.merge?rO:t.stream?iO:t.type?oO:s("Invalid stream specification: "+Ct(t)))(t,e)}function rO(t,e){const n=aO({merge:t.merge.map((t=>nO(t,e)))},t,e);return e.addStream(n).id}function iO(t,e){const n=aO({stream:nO(t.stream,e)},t,e);return e.addStream(n).id}function oO(t,e){let n;t.type===eO?(n=e.event(eO,t.throttle),t={between:t.between,filter:t.filter}):n=e.event(function(t){return t===XN?JN:t||JN}(t.source),t.type);const r=aO({stream:n},t,e);return 1===Object.keys(r).length?n:e.addStream(r).id}function aO(t,e,n){let r=e.between;return r&&(2!==r.length&&s('Stream "between" parameter must have 2 entries: '+Ct(e)),t.between=[nO(r[0],n),nO(r[1],n)]),r=e.filter?[].concat(e.filter):[],(e.marktype||e.markname||e.markrole)&&r.push(function(t,e,n){const r="event.item";return r+(t&&"*"!==t?"&&"+r+".mark.marktype==='"+t+"'":"")+(n?"&&"+r+".mark.role==='"+n+"'":"")+(e?"&&"+r+".mark.name==='"+e+"'":"")}(e.marktype,e.markname,e.markrole)),e.source===XN&&r.push("inScope(event.item)"),r.length&&(t.filter=DB("("+r.join(")&&(")+")",n).$expr),null!=(r=e.throttle)&&(t.throttle=+r),null!=(r=e.debounce)&&(t.debounce=+r),e.consume&&(t.consume=!0),t}const sO={code:"_.$value",ast:{type:"Identifier",value:"value"}};function uO(t,e,n){const r=t.encode,i={target:n};let o=t.events,a=t.update,u=[];o||s("Signal update missing events specification."),xt(o)&&(o=Vz(o,e.isSubscope()?XN:JN)),o=V(o).filter((t=>t.signal||t.scale?(u.push(t),0):1)),u.length>1&&(u=[lO(u)]),o.length&&u.push(o.length>1?{merge:o}:o[0]),null!=r&&(a&&s("Signal encode and update are mutually exclusive."),a="encode(item(),"+Ct(r)+")"),i.update=xt(a)?DB(a,e):null!=a.expr?DB(a.expr,e):null!=a.value?a.value:null!=a.signal?{$expr:sO,$params:{$value:e.signalRef(a.signal)}}:s("Invalid signal update specification."),t.force&&(i.options={force:!0}),u.forEach((t=>e.addUpdate(ot(function(t,e){return{source:t.signal?e.signalRef(t.signal):t.scale?e.scaleRef(t.scale):nO(t,e)}}(t,e),i))))}function lO(t){return{signal:"["+t.map((t=>t.scale?'scale("'+t.scale+'")':t.signal))+"]"}}const cO=t=>(e,n,r)=>PN(t,n,e||void 0,r),fO=cO("aggregate"),hO=cO("axisticks"),dO=cO("bound"),pO=cO("collect"),gO=cO("compare"),mO=cO("datajoin"),yO=cO("encode"),vO=cO("expression"),_O=cO("facet"),xO=cO("field"),bO=cO("key"),wO=cO("legendentries"),kO=cO("load"),AO=cO("mark"),MO=cO("multiextent"),EO=cO("multivalues"),DO=cO("overlap"),CO=cO("params"),FO=cO("prefacet"),SO=cO("projection"),$O=cO("proxy"),TO=cO("relay"),BO=cO("render"),zO=cO("scale"),NO=cO("sieve"),OO=cO("sortitems"),RO=cO("viewlayout"),UO=cO("values");let LO=0;const qO={min:"min",max:"max",count:"sum"};function PO(t,e){const n=e.getScale(t.name).params;let r;for(r in n.domain=HO(t.domain,t,e),null!=t.range&&(n.range=KO(t,e,n)),null!=t.interpolate&&function(t,e){e.interpolate=jO(t.type||t),null!=t.gamma&&(e.interpolateGamma=jO(t.gamma))}(t.interpolate,n),null!=t.nice&&(n.nice=function(t,e){return t.signal?e.signalRef(t.signal):A(t)?{interval:jO(t.interval),step:jO(t.step)}:jO(t)}(t.nice,e)),null!=t.bins&&(n.bins=function(t,e){return t.signal||k(t)?IO(t,e):e.objectProperty(t)}(t.bins,e)),t)lt(n,r)||"name"===r||(n[r]=jO(t[r],e))}function jO(t,e){return A(t)?t.signal?e.signalRef(t.signal):s("Unsupported object: "+Ct(t)):t}function IO(t,e){return t.signal?e.signalRef(t.signal):t.map((t=>jO(t,e)))}function WO(t){s("Can not find data set: "+Ct(t))}function HO(t,e,n){if(t)return t.signal?n.signalRef(t.signal):(k(t)?YO:t.fields?VO:GO)(t,e,n);null==e.domainMin&&null==e.domainMax||s("No scale domain defined for domainMin/domainMax to override.")}function YO(t,e,n){return t.map((t=>jO(t,n)))}function GO(t,e,n){const r=n.getData(t.data);return r||WO(t.data),cp(e.type)?r.valuesRef(n,t.field,JO(t.sort,!1)):pp(e.type)?r.domainRef(n,t.field):r.extentRef(n,t.field)}function VO(t,e,n){const r=t.data,i=t.fields.reduce(((t,e)=>(e=xt(e)?{data:r,field:e}:k(e)||e.signal?function(t,e){const n="_:vega:_"+LO++,r=pO({});if(k(t))r.value={$ingest:t};else if(t.signal){const i="setdata("+Ct(n)+","+t.signal+")";r.params.input=e.signalRef(i)}return e.addDataPipeline(n,[r,NO({})]),{data:n,field:"data"}}(e,n):e,t.push(e),t)),[]);return(cp(e.type)?XO:pp(e.type)?ZO:QO)(t,n,i)}function XO(t,e,n){const r=JO(t.sort,!0);let i,o;const a=n.map((t=>{const n=e.getData(t.data);return n||WO(t.data),n.countsRef(e,t.field,r)})),s={groupby:HN,pulse:a};r&&(i=r.op||"count",o=r.field?VN(i,r.field):"count",s.ops=[qO[i]],s.fields=[e.fieldRef(o)],s.as=[o]),i=e.add(fO(s));const u=e.add(pO({pulse:IN(i)}));return o=e.add(UO({field:HN,sort:e.sortRef(r),pulse:IN(u)})),IN(o)}function JO(t,e){return t&&(t.field||t.op?t.field||"count"===t.op?e&&t.field&&t.op&&!qO[t.op]&&s("Multiple domain scales can not be sorted using "+t.op):s("No field provided for sort aggregate op: "+t.op):A(t)?t.field="key":t={field:"key"}),t}function ZO(t,e,n){const r=n.map((t=>{const n=e.getData(t.data);return n||WO(t.data),n.domainRef(e,t.field)}));return IN(e.add(EO({values:r})))}function QO(t,e,n){const r=n.map((t=>{const n=e.getData(t.data);return n||WO(t.data),n.extentRef(e,t.field)}));return IN(e.add(MO({extents:r})))}function KO(t,e,n){const r=e.config.range;let i=t.range;if(i.signal)return e.signalRef(i.signal);if(xt(i)){if(r&<(r,i))return KO(t=ot({},t,{range:r[i]}),e,n);"width"===i?i=[0,{signal:"width"}]:"height"===i?i=cp(t.type)?[0,{signal:"height"}]:[{signal:"height"},0]:s("Unrecognized scale range value: "+Ct(i))}else{if(i.scheme)return n.scheme=k(i.scheme)?IO(i.scheme,e):jO(i.scheme,e),i.extent&&(n.schemeExtent=IO(i.extent,e)),void(i.count&&(n.schemeCount=jO(i.count,e)));if(i.step)return void(n.rangeStep=jO(i.step,e));if(cp(t.type)&&!k(i))return HO(i,t,e);k(i)||s("Unsupported range type: "+Ct(i))}return i.map((t=>(k(t)?IO:jO)(t,e)))}function tR(t,e,n){return k(t)?t.map((t=>tR(t,e,n))):A(t)?t.signal?n.signalRef(t.signal):"fit"===e?t:s("Unsupported parameter object: "+Ct(t)):t}const eR="top",nR="left",rR="right",iR="bottom",oR="center",aR="vertical",sR="start",uR="end",lR="index",cR="label",fR="offset",hR="perc",dR="perc2",pR="value",gR="guide-label",mR="guide-title",yR="group-title",vR="group-subtitle",_R="symbol",xR="gradient",bR="discrete",wR="size",kR=[wR,"shape","fill","stroke","strokeWidth","strokeDash","opacity"],AR={name:1,style:1,interactive:1},MR={value:0},ER={value:1},DR="group",CR="rect",FR="rule",SR="symbol",$R="text";function TR(t){return t.type=DR,t.interactive=t.interactive||!1,t}function BR(t,e){const n=(n,r)=>KN(t[n],KN(e[n],r));return n.isVertical=n=>aR===KN(t.direction,e.direction||(n?e.symbolDirection:e.gradientDirection)),n.gradientLength=()=>KN(t.gradientLength,e.gradientLength||e.gradientWidth),n.gradientThickness=()=>KN(t.gradientThickness,e.gradientThickness||e.gradientHeight),n.entryColumns=()=>KN(t.columns,KN(e.columns,+n.isVertical(!0))),n}function zR(t,e){const n=e&&(e.update&&e.update[t]||e.enter&&e.enter[t]);return n&&n.signal?n:n?n.value:null}function NR(t,e,n){return`item.anchor === '${sR}' ? ${t} : item.anchor === '${uR}' ? ${e} : ${n}`}const OR=NR(Ct(nR),Ct(rR),Ct(oR));function RR(t,e){return e?t?A(t)?Object.assign({},t,{offset:RR(t.offset,e)}):{value:t,offset:e}:e:t}function UR(t,e){return e?(t.name=e.name,t.style=e.style||t.style,t.interactive=!!e.interactive,t.encode=oN(t.encode,e,AR)):t.interactive=!1,t}function LR(t,e,n,r){const i=BR(t,n),o=i.isVertical(),a=i.gradientThickness(),s=i.gradientLength();let u,l,c,f,h;o?(l=[0,1],c=[0,0],f=a,h=s):(l=[0,0],c=[1,0],f=s,h=a);const d={enter:u={opacity:MR,x:MR,y:MR,width:nN(f),height:nN(h)},update:ot({},u,{opacity:ER,fill:{gradient:e,start:l,stop:c}}),exit:{opacity:MR}};return iN(d,{stroke:i("gradientStrokeColor"),strokeWidth:i("gradientStrokeWidth")},{opacity:i("gradientOpacity")}),UR({type:CR,role:_N,encode:d},r)}function qR(t,e,n,r,i){const o=BR(t,n),a=o.isVertical(),s=o.gradientThickness(),u=o.gradientLength();let l,c,f,h,d="";a?(l="y",f="y2",c="x",h="width",d="1-"):(l="x",f="x2",c="y",h="height");const p={opacity:MR,fill:{scale:e,field:pR}};p[l]={signal:d+"datum."+hR,mult:u},p[c]=MR,p[f]={signal:d+"datum."+dR,mult:u},p[h]=nN(s);const g={enter:p,update:ot({},p,{opacity:ER}),exit:{opacity:MR}};return iN(g,{stroke:o("gradientStrokeColor"),strokeWidth:o("gradientStrokeWidth")},{opacity:o("gradientOpacity")}),UR({type:CR,role:yN,key:pR,from:i,encode:g},r)}const PR=`datum.${hR}<=0?"${nR}":datum.${hR}>=1?"${rR}":"${oR}"`,jR=`datum.${hR}<=0?"${iR}":datum.${hR}>=1?"${eR}":"middle"`;function IR(t,e,n,r){const i=BR(t,e),o=i.isVertical(),a=nN(i.gradientThickness()),s=i.gradientLength();let u,l,c,f,h=i("labelOverlap"),d="";const p={enter:u={opacity:MR},update:l={opacity:ER,text:{field:cR}},exit:{opacity:MR}};return iN(p,{fill:i("labelColor"),fillOpacity:i("labelOpacity"),font:i("labelFont"),fontSize:i("labelFontSize"),fontStyle:i("labelFontStyle"),fontWeight:i("labelFontWeight"),limit:KN(t.labelLimit,e.gradientLabelLimit)}),o?(u.align={value:"left"},u.baseline=l.baseline={signal:jR},c="y",f="x",d="1-"):(u.align=l.align={signal:PR},u.baseline={value:"top"},c="x",f="y"),u[c]=l[c]={signal:d+"datum."+hR,mult:s},u[f]=l[f]=a,a.offset=KN(t.labelOffset,e.gradientLabelOffset)||0,h=h?{separation:i("labelSeparation"),method:h,order:"datum."+lR}:void 0,UR({type:$R,role:xN,style:gR,key:pR,from:r,encode:p,overlap:h},n)}function WR(t,e,n,r,i){const o=BR(t,e),a=n.entries,s=!(!a||!a.interactive),u=a?a.name:void 0,l=o("clipHeight"),c=o("symbolOffset"),f={data:"value"},h=`(${i}) ? datum.${fR} : datum.${wR}`,d=l?nN(l):{field:wR},p=`datum.${lR}`,g=`max(1, ${i})`;let m,y,v,_,x;d.mult=.5,m={enter:y={opacity:MR,x:{signal:h,mult:.5,offset:c},y:d},update:v={opacity:ER,x:y.x,y:y.y},exit:{opacity:MR}};let b=null,w=null;t.fill||(b=e.symbolBaseFillColor,w=e.symbolBaseStrokeColor),iN(m,{fill:o("symbolFillColor",b),shape:o("symbolType"),size:o("symbolSize"),stroke:o("symbolStrokeColor",w),strokeDash:o("symbolDash"),strokeDashOffset:o("symbolDashOffset"),strokeWidth:o("symbolStrokeWidth")},{opacity:o("symbolOpacity")}),kR.forEach((e=>{t[e]&&(v[e]=y[e]={scale:t[e],field:pR})}));const k=UR({type:SR,role:bN,key:pR,from:f,clip:!!l||void 0,encode:m},n.symbols),A=nN(c);A.offset=o("labelOffset"),m={enter:y={opacity:MR,x:{signal:h,offset:A},y:d},update:v={opacity:ER,text:{field:cR},x:y.x,y:y.y},exit:{opacity:MR}},iN(m,{align:o("labelAlign"),baseline:o("labelBaseline"),fill:o("labelColor"),fillOpacity:o("labelOpacity"),font:o("labelFont"),fontSize:o("labelFontSize"),fontStyle:o("labelFontStyle"),fontWeight:o("labelFontWeight"),limit:o("labelLimit")});const M=UR({type:$R,role:xN,style:gR,key:pR,from:f,encode:m},n.labels);return m={enter:{noBound:{value:!l},width:MR,height:l?nN(l):MR,opacity:MR},exit:{opacity:MR},update:v={opacity:ER,row:{signal:null},column:{signal:null}}},o.isVertical(!0)?(_=`ceil(item.mark.items.length / ${g})`,v.row.signal=`${p}%${_}`,v.column.signal=`floor(${p} / ${_})`,x={field:["row",p]}):(v.row.signal=`floor(${p} / ${g})`,v.column.signal=`${p} % ${g}`,x={field:p}),v.column.signal=`(${i})?${v.column.signal}:${p}`,TR({role:lN,from:r={facet:{data:r,name:"value",groupby:lR}},encode:oN(m,a,AR),marks:[k,M],name:u,interactive:s,sort:x})}const HR='item.orient === "left"',YR='item.orient === "right"',GR=`(${HR} || ${YR})`,VR=`datum.vgrad && ${GR}`,XR=NR('"top"','"bottom"','"middle"'),JR=`datum.vgrad && ${YR} ? (${NR('"right"','"left"','"center"')}) : (${GR} && !(datum.vgrad && ${HR})) ? "left" : ${OR}`,ZR=`item._anchor || (${GR} ? "middle" : "start")`,QR=`${VR} ? (${HR} ? -90 : 90) : 0`,KR=`${GR} ? (datum.vgrad ? (${YR} ? "bottom" : "top") : ${XR}) : "top"`;function tU(t,e){let n;return A(t)&&(t.signal?n=t.signal:t.path?n="pathShape("+eU(t.path)+")":t.sphere&&(n="geoShape("+eU(t.sphere)+', {type: "Sphere"})')),n?e.signalRef(n):!!t}function eU(t){return A(t)&&t.signal?t.signal:Ct(t)}function nU(t){const e=t.role||"";return e.startsWith("axis")||e.startsWith("legend")||e.startsWith("title")?e:t.type===DR?lN:e||sN}function rU(t){return{marktype:t.type,name:t.name||void 0,role:t.role||nU(t),zindex:+t.zindex||void 0,aria:t.aria,description:t.description}}function iU(t,e){return t&&t.signal?e.signalRef(t.signal):!1!==t}function oU(t,e){const n=Qa(t.type);n||s("Unrecognized transform type: "+Ct(t.type));const r=PN(n.type.toLowerCase(),null,aU(n,t,e));return t.signal&&e.addSignal(t.signal,e.proxy(r)),r.metadata=n.metadata||{},r}function aU(t,e,n){const r={},i=t.params.length;for(let o=0;olU(t,e,n)))):lU(t,r,n)}(t,e,n):"projection"===r?n.projectionRef(e[t.name]):t.array&&!ZN(i)?i.map((e=>uU(t,e,n))):uU(t,i,n):void(t.required&&s("Missing required "+Ct(e.type)+" parameter: "+Ct(t.name)))}function uU(t,e,n){const r=t.type;if(ZN(e))return dU(r)?s("Expression references can not be signals."):pU(r)?n.fieldRef(e):gU(r)?n.compareRef(e):n.signalRef(e.signal);{const i=t.expr||pU(r);return i&&cU(e)?n.exprRef(e.expr,e.as):i&&fU(e)?WN(e.field,e.as):dU(r)?DB(e,n):hU(r)?IN(n.getData(e).values):pU(r)?WN(e):gU(r)?n.compareRef(e):e}}function lU(t,e,n){const r=t.params.length;let i;for(let n=0;nt&&t.expr,fU=t=>t&&t.field,hU=t=>"data"===t,dU=t=>"expr"===t,pU=t=>"field"===t,gU=t=>"compare"===t;function mU(t,e){return t.$ref?t:t.data&&t.data.$ref?t.data:IN(e.getData(t.data).output)}function yU(t,e,n,r,i){this.scope=t,this.input=e,this.output=n,this.values=r,this.aggregate=i,this.index={}}function vU(t){return xt(t)?t:null}function _U(t,e,n){const r=VN(n.op,n.field);let i;if(e.ops){for(let t=0,n=e.as.length;tnull==t?"null":t)).join(",")+"),0)",e);u.update=l.$expr,u.params=l.$params}function wU(t,e){const n=nU(t),r=t.type===DR,i=t.from&&t.from.facet,o=t.overlap;let a,u,l,c,f,h,d,p=t.layout||n===lN||n===uN;const g=n===sN||p||i,m=function(t,e,n){let r,i,o,a,u;return t?(r=t.facet)&&(e||s("Only group marks can be faceted."),null!=r.field?a=u=mU(r,n):(t.data?u=IN(n.getData(t.data).aggregate):(o=oU(ot({type:"aggregate",groupby:V(r.groupby)},r.aggregate),n),o.params.key=n.keyRef(r.groupby),o.params.pulse=mU(r,n),a=u=IN(n.add(o))),i=n.keyRef(r.groupby,!0))):a=IN(n.add(pO(null,[{}]))),a||(a=mU(t,n)),{key:i,pulse:a,parent:u}}(t.from,r,e);u=e.add(mO({key:m.key||(t.key?WN(t.key):void 0),pulse:m.pulse,clean:!r}));const y=IN(u);u=l=e.add(pO({pulse:y})),u=e.add(AO({markdef:rU(t),interactive:iU(t.interactive,e),clip:tU(t.clip,e),context:{$context:!0},groups:e.lookup(),parent:e.signals.parent?e.signalRef("parent"):null,index:e.markpath(),pulse:IN(u)}));const v=IN(u);u=c=e.add(yO(BN(t.encode,t.type,n,t.style,e,{mod:!1,pulse:v}))),u.params.parent=e.encode(),t.transform&&t.transform.forEach((t=>{const n=oU(t,e),r=n.metadata;(r.generates||r.changes)&&s("Mark transforms should not generate new data."),r.nomod||(c.params.mod=!0),n.params.pulse=IN(u),e.add(u=n)})),t.sort&&(u=e.add(OO({sort:e.compareRef(t.sort),pulse:IN(u)})));const _=IN(u);(i||p)&&(p=e.add(RO({layout:e.objectProperty(t.layout),legends:e.legends,mark:v,pulse:_})),h=IN(p));const x=e.add(dO({mark:v,pulse:h||_}));d=IN(x),r&&(g&&(a=e.operators,a.pop(),p&&a.pop()),e.pushState(_,h||d,y),i?function(t,e,n){const r=t.from.facet,i=r.name,o=mU(r,e);let a;r.name||s("Facet must have a name: "+Ct(r)),r.data||s("Facet must reference a data set: "+Ct(r)),r.field?a=e.add(FO({field:e.fieldRef(r.field),pulse:o})):r.groupby?a=e.add(_O({key:e.keyRef(r.groupby),group:IN(e.proxy(n.parent)),pulse:o})):s("Facet must specify groupby or field: "+Ct(r));const u=e.fork(),l=u.add(pO()),c=u.add(NO({pulse:IN(l)}));u.addData(i,new yU(u,l,l,c)),u.addSignal("parent",null),a.params.subflow={$subflow:u.parse(t).toRuntime()}}(t,e,m):g?function(t,e,n){const r=e.add(FO({pulse:n.pulse})),i=e.fork();i.add(NO()),i.addSignal("parent",null),r.params.subflow={$subflow:i.parse(t).toRuntime()}}(t,e,m):e.parse(t),e.popState(),g&&(p&&a.push(p),a.push(x))),o&&(d=function(t,e,n){const r=t.method,i=t.bound,o=t.separation,a={separation:ZN(o)?n.signalRef(o.signal):o,method:ZN(r)?n.signalRef(r.signal):r,pulse:e};t.order&&(a.sort=n.compareRef({field:t.order}));if(i){const t=i.tolerance;a.boundTolerance=ZN(t)?n.signalRef(t.signal):+t,a.boundScale=n.scaleRef(i.scale),a.boundOrient=i.orient}return IN(n.add(DO(a)))}(o,d,e));const b=e.add(BO({pulse:d})),w=e.add(NO({pulse:IN(b)},void 0,e.parent()));null!=t.name&&(f=t.name,e.addData(f,new yU(e,l,b,w)),t.on&&t.on.forEach((t=>{(t.insert||t.remove||t.toggle)&&s("Marks only support modify triggers."),bU(t,e,f)})))}function kU(t,e){const n=e.config.legend,r=t.encode||{},i=BR(t,n),o=r.legend||{},a=o.name||void 0,u=o.interactive,l=o.style,c={};let f,h,d,p=0;kR.forEach((e=>t[e]?(c[e]=t[e],p=p||t[e]):0)),p||s("Missing valid scale for legend.");const g=function(t,e){let n=t.type||_R;t.type||1!==function(t){return kR.reduce(((e,n)=>e+(t[n]?1:0)),0)}(t)||!t.fill&&!t.stroke||(n=lp(e)?xR:fp(e)?bR:_R);return n!==xR?n:fp(e)?bR:xR}(t,e.scaleType(p)),m={title:null!=t.title,scales:c,type:g,vgrad:"symbol"!==g&&i.isVertical()},y=IN(e.add(pO(null,[m]))),v=IN(e.add(wO(h={type:g,scale:e.scaleRef(p),count:e.objectProperty(i("tickCount")),limit:e.property(i("symbolLimit")),values:e.objectProperty(t.values),minstep:e.property(t.tickMinStep),formatType:e.property(t.formatType),formatSpecifier:e.property(t.format)})));return g===xR?(d=[LR(t,p,n,r.gradient),IR(t,n,r.labels,v)],h.count=h.count||e.signalRef(`max(2,2*floor((${tO(i.gradientLength())})/100))`)):g===bR?d=[qR(t,p,n,r.gradient,v),IR(t,n,r.labels,v)]:(f=function(t,e){const n=BR(t,e);return{align:n("gridAlign"),columns:n.entryColumns(),center:{row:!0,column:!1},padding:{row:n("rowPadding"),column:n("columnPadding")}}}(t,n),d=[WR(t,n,r,v,tO(f.columns))],h.size=function(t,e,n){const r=tO(MU("size",t,n)),i=tO(MU("strokeWidth",t,n)),o=tO(function(t,e,n){return zR("fontSize",t)||function(t,e,n){const r=e.config.style[n];return r&&r[t]}("fontSize",e,n)}(n[1].encode,e,gR));return DB(`max(ceil(sqrt(${r})+${i}),${o})`,e)}(t,e,d[0].marks)),d=[TR({role:vN,from:y,encode:{enter:{x:{value:0},y:{value:0}}},marks:d,layout:f,interactive:u})],m.title&&d.push(function(t,e,n,r){const i=BR(t,e),o={enter:{opacity:MR},update:{opacity:ER,x:{field:{group:"padding"}},y:{field:{group:"padding"}}},exit:{opacity:MR}};return iN(o,{orient:i("titleOrient"),_anchor:i("titleAnchor"),anchor:{signal:ZR},angle:{signal:QR},align:{signal:JR},baseline:{signal:KR},text:t.title,fill:i("titleColor"),fillOpacity:i("titleOpacity"),font:i("titleFont"),fontSize:i("titleFontSize"),fontStyle:i("titleFontStyle"),fontWeight:i("titleFontWeight"),limit:i("titleLimit"),lineHeight:i("titleLineHeight")},{align:i("titleAlign"),baseline:i("titleBaseline")}),UR({type:$R,role:wN,style:mR,from:r,encode:o},n)}(t,n,r.title,y)),wU(TR({role:mN,from:y,encode:oN(AU(i,t,n),o,AR),marks:d,aria:i("aria"),description:i("description"),zindex:i("zindex"),name:a,interactive:u,style:l}),e)}function AU(t,e,n){const r={enter:{},update:{}};return iN(r,{orient:t("orient"),offset:t("offset"),padding:t("padding"),titlePadding:t("titlePadding"),cornerRadius:t("cornerRadius"),fill:t("fillColor"),stroke:t("strokeColor"),strokeWidth:n.strokeWidth,strokeDash:n.strokeDash,x:t("legendX"),y:t("legendY"),format:e.format,formatType:e.formatType}),r}function MU(t,e,n){return e[t]?`scale("${e[t]}",datum)`:zR(t,n[0].encode)}yU.fromEntries=function(t,e){const n=e.length,r=e[n-1],i=e[n-2];let o=e[0],a=null,s=1;for(o&&"load"===o.type&&(o=e[1]),t.add(e[0]);s{n.push(oU(t,e))})),t.on&&t.on.forEach((n=>{bU(n,e,t.name)})),e.addDataPipeline(t.name,function(t,e,n){const r=[];let i,o,a,s,u,l=null,c=!1,f=!1;t.values?ZN(t.values)||QN(t.format)?(r.push($U(e,t)),r.push(l=SU())):r.push(l=SU({$ingest:t.values,$format:t.format})):t.url?QN(t.url)||QN(t.format)?(r.push($U(e,t)),r.push(l=SU())):r.push(l=SU({$request:t.url,$format:t.format})):t.source&&(l=i=V(t.source).map((t=>IN(e.getData(t).output))),r.push(null));for(o=0,a=n.length;ot===iR||t===eR,BU=(t,e,n)=>ZN(t)?qU(t.signal,e,n):t===nR||t===eR?e:n,zU=(t,e,n)=>ZN(t)?UU(t.signal,e,n):TU(t)?e:n,NU=(t,e,n)=>ZN(t)?LU(t.signal,e,n):TU(t)?n:e,OU=(t,e,n)=>ZN(t)?PU(t.signal,e,n):t===eR?{value:e}:{value:n},RU=(t,e,n)=>ZN(t)?jU(t.signal,e,n):t===rR?{value:e}:{value:n},UU=(t,e,n)=>IU(`${t} === '${eR}' || ${t} === '${iR}'`,e,n),LU=(t,e,n)=>IU(`${t} !== '${eR}' && ${t} !== '${iR}'`,e,n),qU=(t,e,n)=>HU(`${t} === '${nR}' || ${t} === '${eR}'`,e,n),PU=(t,e,n)=>HU(`${t} === '${eR}'`,e,n),jU=(t,e,n)=>HU(`${t} === '${rR}'`,e,n),IU=(t,e,n)=>(e=null!=e?nN(e):e,n=null!=n?nN(n):n,WU(e)&&WU(n)?{signal:`${t} ? (${e=e?e.signal||Ct(e.value):null}) : (${n=n?n.signal||Ct(n.value):null})`}:[ot({test:t},e)].concat(n||[])),WU=t=>null==t||1===Object.keys(t).length,HU=(t,e,n)=>({signal:`${t} ? (${GU(e)}) : (${GU(n)})`}),YU=(t,e,n,r,i)=>({signal:(null!=r?`${t} === '${nR}' ? (${GU(r)}) : `:"")+(null!=n?`${t} === '${iR}' ? (${GU(n)}) : `:"")+(null!=i?`${t} === '${rR}' ? (${GU(i)}) : `:"")+(null!=e?`${t} === '${eR}' ? (${GU(e)}) : `:"")+"(null)"}),GU=t=>ZN(t)?t.signal:null==t?null:Ct(t),VU=(t,e)=>0===e?0:ZN(t)?{signal:`(${t.signal}) * ${e}`}:{value:t*e},XU=(t,e)=>{const n=t.signal;return n&&n.endsWith("(null)")?{signal:n.slice(0,-6)+e.signal}:t};function JU(t,e,n,r){let i;if(e&<(e,t))return e[t];if(lt(n,t))return n[t];if(t.startsWith("title")){switch(t){case"titleColor":i="fill";break;case"titleFont":case"titleFontSize":case"titleFontWeight":i=t[5].toLowerCase()+t.slice(6)}return r[mR][i]}if(t.startsWith("label")){switch(t){case"labelColor":i="fill";break;case"labelFont":case"labelFontSize":i=t[5].toLowerCase()+t.slice(6)}return r[gR][i]}return null}function ZU(t){const e={};for(const n of t)if(n)for(const t in n)e[t]=1;return Object.keys(e)}function QU(t,e){return{scale:t.scale,range:e}}function KU(t,e,n,r,i){const o=BR(t,e),a=t.orient,s=t.gridScale,u=BU(a,1,-1),l=function(t,e){if(1===e);else if(A(t)){let n=t=ot({},t);for(;null!=n.mult;){if(!A(n.mult))return n.mult=ZN(e)?{signal:`(${n.mult}) * (${e.signal})`}:n.mult*e,t;n=n.mult=ot({},n.mult)}n.mult=e}else t=ZN(e)?{signal:`(${e.signal}) * (${t||0})`}:e*(t||0);return t}(t.offset,u);let c,f,h;const d={enter:c={opacity:MR},update:h={opacity:ER},exit:f={opacity:MR}};iN(d,{stroke:o("gridColor"),strokeCap:o("gridCap"),strokeDash:o("gridDash"),strokeDashOffset:o("gridDashOffset"),strokeOpacity:o("gridOpacity"),strokeWidth:o("gridWidth")});const p={scale:t.scale,field:pR,band:i.band,extra:i.extra,offset:i.offset,round:o("tickRound")},g=zU(a,{signal:"height"},{signal:"width"}),m=s?{scale:s,range:0,mult:u,offset:l}:{value:0,offset:l},y=s?{scale:s,range:1,mult:u,offset:l}:ot(g,{mult:u,offset:l});return c.x=h.x=zU(a,p,m),c.y=h.y=NU(a,p,m),c.x2=h.x2=NU(a,y),c.y2=h.y2=zU(a,y),f.x=zU(a,p),f.y=NU(a,p),UR({type:FR,role:hN,key:pR,from:r,encode:d},n)}function tL(t,e,n,r,i){return{signal:'flush(range("'+t+'"), scale("'+t+'", datum.value), '+e+","+n+","+r+","+i+")"}}function eL(t,e,n,r){const i=BR(t,e),o=t.orient,a=BU(o,-1,1);let s,u;const l={enter:s={opacity:MR,anchor:nN(i("titleAnchor",null)),align:{signal:OR}},update:u=ot({},s,{opacity:ER,text:nN(t.title)}),exit:{opacity:MR}},c={signal:`lerp(range("${t.scale}"), ${NR(0,1,.5)})`};return u.x=zU(o,c),u.y=NU(o,c),s.angle=zU(o,MR,VU(a,90)),s.baseline=zU(o,OU(o,iR,eR),{value:iR}),u.angle=s.angle,u.baseline=s.baseline,iN(l,{fill:i("titleColor"),fillOpacity:i("titleOpacity"),font:i("titleFont"),fontSize:i("titleFontSize"),fontStyle:i("titleFontStyle"),fontWeight:i("titleFontWeight"),limit:i("titleLimit"),lineHeight:i("titleLineHeight")},{align:i("titleAlign"),angle:i("titleAngle"),baseline:i("titleBaseline")}),function(t,e,n,r){const i=(t,e)=>null!=t?(n.update[e]=XU(nN(t),n.update[e]),!1):!aN(e,r),o=i(t("titleX"),"x"),a=i(t("titleY"),"y");n.enter.auto=a===o?nN(a):zU(e,nN(a),nN(o))}(i,o,l,n),l.update.align=XU(l.update.align,s.align),l.update.angle=XU(l.update.angle,s.angle),l.update.baseline=XU(l.update.baseline,s.baseline),UR({type:$R,role:gN,style:mR,from:r,encode:l},n)}function nL(t,e){const n=function(t,e){var n,r,i,o=e.config,a=o.style,s=o.axis,u="band"===e.scaleType(t.scale)&&o.axisBand,l=t.orient;if(ZN(l)){const t=ZU([o.axisX,o.axisY]),e=ZU([o.axisTop,o.axisBottom,o.axisLeft,o.axisRight]);for(i of(n={},t))n[i]=zU(l,JU(i,o.axisX,s,a),JU(i,o.axisY,s,a));for(i of(r={},e))r[i]=YU(l.signal,JU(i,o.axisTop,s,a),JU(i,o.axisBottom,s,a),JU(i,o.axisLeft,s,a),JU(i,o.axisRight,s,a))}else n=l===eR||l===iR?o.axisX:o.axisY,r=o["axis"+l[0].toUpperCase()+l.slice(1)];return n||r||u?ot({},s,n,r,u):s}(t,e),r=t.encode||{},i=r.axis||{},o=i.name||void 0,a=i.interactive,s=i.style,u=BR(t,n),l=function(t){const e=t("tickBand");let n,r,i=t("tickOffset");return e?e.signal?(n={signal:`(${e.signal}) === 'extent' ? 1 : 0.5`},r={signal:`(${e.signal}) === 'extent'`},A(i)||(i={signal:`(${e.signal}) === 'extent' ? 0 : ${i}`})):"extent"===e?(n=1,r=!0,i=0):(n=.5,r=!1):(n=t("bandPosition"),r=t("tickExtra")),{extra:r,band:n,offset:i}}(u),c={scale:t.scale,ticks:!!u("ticks"),labels:!!u("labels"),grid:!!u("grid"),domain:!!u("domain"),title:null!=t.title},f=IN(e.add(pO({},[c]))),h=IN(e.add(hO({scale:e.scaleRef(t.scale),extra:e.property(l.extra),count:e.objectProperty(t.tickCount),values:e.objectProperty(t.values),minstep:e.property(t.tickMinStep),formatType:e.property(t.formatType),formatSpecifier:e.property(t.format)}))),d=[];let p;return c.grid&&d.push(KU(t,n,r.grid,h,l)),c.ticks&&(p=u("tickSize"),d.push(function(t,e,n,r,i,o){const a=BR(t,e),s=t.orient,u=BU(s,-1,1);let l,c,f;const h={enter:l={opacity:MR},update:f={opacity:ER},exit:c={opacity:MR}};iN(h,{stroke:a("tickColor"),strokeCap:a("tickCap"),strokeDash:a("tickDash"),strokeDashOffset:a("tickDashOffset"),strokeOpacity:a("tickOpacity"),strokeWidth:a("tickWidth")});const d=nN(i);d.mult=u;const p={scale:t.scale,field:pR,band:o.band,extra:o.extra,offset:o.offset,round:a("tickRound")};return f.y=l.y=zU(s,MR,p),f.y2=l.y2=zU(s,d),c.x=zU(s,p),f.x=l.x=NU(s,MR,p),f.x2=l.x2=NU(s,d),c.y=NU(s,p),UR({type:FR,role:pN,key:pR,from:r,encode:h},n)}(t,n,r.ticks,h,p,l))),c.labels&&(p=c.ticks?p:0,d.push(function(t,e,n,r,i,o){const a=BR(t,e),s=t.orient,u=t.scale,l=BU(s,-1,1),c=tO(a("labelFlush")),f=tO(a("labelFlushOffset")),h=a("labelAlign"),d=a("labelBaseline");let p,g=0===c||!!c;const m=nN(i);m.mult=l,m.offset=nN(a("labelPadding")||0),m.offset.mult=l;const y={scale:u,field:pR,band:.5,offset:RR(o.offset,a("labelOffset"))},v=zU(s,g?tL(u,c,'"left"','"right"','"center"'):{value:"center"},RU(s,"left","right")),_=zU(s,OU(s,"bottom","top"),g?tL(u,c,'"top"','"bottom"','"middle"'):{value:"middle"}),x=tL(u,c,`-(${f})`,f,0);g=g&&f;const b={opacity:MR,x:zU(s,y,m),y:NU(s,y,m)},w={enter:b,update:p={opacity:ER,text:{field:cR},x:b.x,y:b.y,align:v,baseline:_},exit:{opacity:MR,x:b.x,y:b.y}};iN(w,{dx:!h&&g?zU(s,x):null,dy:!d&&g?NU(s,x):null}),iN(w,{angle:a("labelAngle"),fill:a("labelColor"),fillOpacity:a("labelOpacity"),font:a("labelFont"),fontSize:a("labelFontSize"),fontWeight:a("labelFontWeight"),fontStyle:a("labelFontStyle"),limit:a("labelLimit"),lineHeight:a("labelLineHeight")},{align:h,baseline:d});const k=a("labelBound");let A=a("labelOverlap");return A=A||k?{separation:a("labelSeparation"),method:A,order:"datum.index",bound:k?{scale:u,orient:s,tolerance:k}:null}:void 0,p.align!==v&&(p.align=XU(p.align,v)),p.baseline!==_&&(p.baseline=XU(p.baseline,_)),UR({type:$R,role:dN,style:gR,key:pR,from:r,encode:w,overlap:A},n)}(t,n,r.labels,h,p,l))),c.domain&&d.push(function(t,e,n,r){const i=BR(t,e),o=t.orient;let a,s;const u={enter:a={opacity:MR},update:s={opacity:ER},exit:{opacity:MR}};iN(u,{stroke:i("domainColor"),strokeCap:i("domainCap"),strokeDash:i("domainDash"),strokeDashOffset:i("domainDashOffset"),strokeWidth:i("domainWidth"),strokeOpacity:i("domainOpacity")});const l=QU(t,0),c=QU(t,1);return a.x=s.x=zU(o,l,MR),a.x2=s.x2=zU(o,c),a.y=s.y=NU(o,l,MR),a.y2=s.y2=NU(o,c),UR({type:FR,role:fN,from:r,encode:u},n)}(t,n,r.domain,f)),c.title&&d.push(eL(t,n,r.title,f)),wU(TR({role:cN,from:f,encode:oN(rL(u,t),i,AR),marks:d,aria:u("aria"),description:u("description"),zindex:u("zindex"),name:o,interactive:a,style:s}),e)}function rL(t,e){const n={enter:{},update:{}};return iN(n,{orient:t("orient"),offset:t("offset")||0,position:KN(e.position,0),titlePadding:t("titlePadding"),minExtent:t("minExtent"),maxExtent:t("maxExtent"),range:{signal:`abs(span(range("${e.scale}")))`},translate:t("translate"),format:e.format,formatType:e.formatType}),n}function iL(t,e,n){const r=V(t.signals),i=V(t.scales);return n||r.forEach((t=>LN(t,e))),V(t.projections).forEach((t=>function(t,e){const n=e.config.projection||{},r={};for(const n in t)"name"!==n&&(r[n]=tR(t[n],n,e));for(const t in n)null==r[t]&&(r[t]=tR(n[t],t,e));e.addProjection(t.name,r)}(t,e))),i.forEach((t=>function(t,e){const n=t.type||"linear";sp(n)||s("Unrecognized scale type: "+Ct(n)),e.addScale(t.name,{type:n,domain:void 0})}(t,e))),V(t.data).forEach((t=>FU(t,e))),i.forEach((t=>PO(t,e))),(n||r).forEach((t=>function(t,e){const n=e.getSignal(t.name);let r=t.update;t.init&&(r?s("Signals can not include both init and update expressions."):(r=t.init,n.initonly=!0)),r&&(r=DB(r,e),n.update=r.$expr,n.params=r.$params),t.on&&t.on.forEach((t=>uO(t,e,n.id)))}(t,e))),V(t.axes).forEach((t=>nL(t,e))),V(t.marks).forEach((t=>wU(t,e))),V(t.legends).forEach((t=>kU(t,e))),t.title&&DU(t.title,e),e.parseLambdas(),e}const oL=t=>oN({enter:{x:{value:0},y:{value:0}},update:{width:{signal:"width"},height:{signal:"height"}}},t);function aL(t,e){const n=e.config,r=IN(e.root=e.add(jN())),i=function(t,e){const n=n=>KN(t[n],e[n]),r=[sL("background",n("background")),sL("autosize",Qz(n("autosize"))),sL("padding",eN(n("padding"))),sL("width",n("width")||0),sL("height",n("height")||0)],i=r.reduce(((t,e)=>(t[e.name]=e,t)),{}),o={};return V(t.signals).forEach((t=>{lt(i,t.name)?t=ot(i[t.name],t):r.push(t),o[t.name]=t})),V(e.signals).forEach((t=>{lt(o,t.name)||lt(i,t.name)||r.push(t)})),r}(t,n);i.forEach((t=>LN(t,e))),e.description=t.description||n.description,e.eventConfig=n.events,e.legends=e.objectProperty(n.legend&&n.legend.layout),e.locale=n.locale;const o=e.add(pO()),a=e.add(yO(BN(oL(t.encode),DR,uN,t.style,e,{pulse:IN(o)}))),s=e.add(RO({layout:e.objectProperty(t.layout),legends:e.legends,autosize:e.signalRef("autosize"),mark:r,pulse:IN(a)}));e.operators.pop(),e.pushState(IN(a),IN(s),null),iL(t,e,i),e.operators.push(s);let u=e.add(dO({mark:r,pulse:IN(s)}));return u=e.add(BO({pulse:IN(u)})),u=e.add(NO({pulse:IN(u)})),e.addData("root",new yU(e,o,o,u)),e}function sL(t,e){return e&&e.signal?{name:t,update:e.signal}:{name:t,value:e}}function uL(t,e){this.config=t||{},this.options=e||{},this.bindings=[],this.field={},this.signals={},this.lambdas={},this.scales={},this.events={},this.data={},this.streams=[],this.updates=[],this.operators=[],this.eventConfig=null,this.locale=null,this._id=0,this._subid=0,this._nextsub=[0],this._parent=[],this._encode=[],this._lookup=[],this._markpath=[]}function lL(t){this.config=t.config,this.options=t.options,this.legends=t.legends,this.field=Object.create(t.field),this.signals=Object.create(t.signals),this.lambdas=Object.create(t.lambdas),this.scales=Object.create(t.scales),this.events=Object.create(t.events),this.data=Object.create(t.data),this.streams=[],this.updates=[],this.operators=[],this._id=0,this._subid=++t._nextsub[0],this._nextsub=t._nextsub,this._parent=t._parent.slice(),this._encode=t._encode.slice(),this._lookup=t._lookup.slice(),this._markpath=t._markpath}function cL(t){return(k(t)?fL:hL)(t)}function fL(t){const e=t.length;let n="[";for(let r=0;r0?",":"")+(A(e)?e.signal||cL(e):Ct(e))}return n+"]"}function hL(t){let e,n,r="{",i=0;for(e in t)n=t[e],r+=(++i>1?",":"")+Ct(e)+":"+(A(n)?n.signal||cL(n):Ct(n));return r+"}"}uL.prototype=lL.prototype={parse(t){return iL(t,this)},fork(){return new lL(this)},isSubscope(){return this._subid>0},toRuntime(){return this.finish(),{description:this.description,operators:this.operators,streams:this.streams,updates:this.updates,bindings:this.bindings,eventConfig:this.eventConfig,locale:this.locale}},id(){return(this._subid?this._subid+":":0)+this._id++},add(t){return this.operators.push(t),t.id=this.id(),t.refs&&(t.refs.forEach((e=>{e.$ref=t.id})),t.refs=null),t},proxy(t){const e=t instanceof qN?IN(t):t;return this.add($O({value:e}))},addStream(t){return this.streams.push(t),t.id=this.id(),t},addUpdate(t){return this.updates.push(t),t},finish(){let t,e;for(t in this.root&&(this.root.root=!0),this.signals)this.signals[t].signal=t;for(t in this.scales)this.scales[t].scale=t;function n(t,e,n){let r,i;t&&(r=t.data||(t.data={}),i=r[e]||(r[e]=[]),i.push(n))}for(t in this.data){e=this.data[t],n(e.input,t,"input"),n(e.output,t,"output"),n(e.values,t,"values");for(const r in e.index)n(e.index[r],t,"index:"+r)}return this},pushState(t,e,n){this._encode.push(IN(this.add(NO({pulse:t})))),this._parent.push(e),this._lookup.push(n?IN(this.proxy(n)):null),this._markpath.push(-1)},popState(){this._encode.pop(),this._parent.pop(),this._lookup.pop(),this._markpath.pop()},parent(){return F(this._parent)},encode(){return F(this._encode)},lookup(){return F(this._lookup)},markpath(){const t=this._markpath;return++t[t.length-1]},fieldRef(t,e){if(xt(t))return WN(t,e);t.signal||s("Unsupported field reference: "+Ct(t));const n=t.signal;let r=this.field[n];if(!r){const t={name:this.signalRef(n)};e&&(t.as=e),this.field[n]=r=IN(this.add(xO(t)))}return r},compareRef(t){let e=!1;const n=t=>ZN(t)?(e=!0,this.signalRef(t.signal)):function(t){return t&&t.expr}(t)?(e=!0,this.exprRef(t.expr)):t,r=V(t.field).map(n),i=V(t.order).map(n);return e?IN(this.add(gO({fields:r,orders:i}))):YN(r,i)},keyRef(t,e){let n=!1;const r=this.signals;return t=V(t).map((t=>ZN(t)?(n=!0,IN(r[t.signal])):t)),n?IN(this.add(bO({fields:t,flat:e}))):function(t,e){const n={$key:t};return e&&(n.$flat=!0),n}(t,e)},sortRef(t){if(!t)return t;const e=VN(t.op,t.field),n=t.order||"ascending";return n.signal?IN(this.add(gO({fields:e,orders:this.signalRef(n.signal)}))):YN(e,n)},event(t,e){const n=t+":"+e;if(!this.events[n]){const r=this.id();this.streams.push({id:r,source:t,type:e}),this.events[n]=r}return this.events[n]},hasOwnSignal(t){return lt(this.signals,t)},addSignal(t,e){this.hasOwnSignal(t)&&s("Duplicate signal name: "+Ct(t));const n=e instanceof qN?e:this.add(jN(e));return this.signals[t]=n},getSignal(t){return this.signals[t]||s("Unrecognized signal name: "+Ct(t)),this.signals[t]},signalRef(t){return this.signals[t]?IN(this.signals[t]):(lt(this.lambdas,t)||(this.lambdas[t]=this.add(jN(null))),IN(this.lambdas[t]))},parseLambdas(){const t=Object.keys(this.lambdas);for(let e=0,n=t.length;er+Math.floor(o*t.random()),pdf:t=>t===Math.floor(t)&&t>=r&&t=i?1:(e-r+1)/o},icdf:t=>t>=0&&t<=1?r-1+Math.floor(t*o):NaN};return a.min(e).max(n)},t.randomKDE=gs,t.randomLCG=function(t){return function(){return(t=(1103515245*t+12345)%2147483647)/2147483647}},t.randomLogNormal=xs,t.randomMixture=bs,t.randomNormal=ps,t.randomUniform=Es,t.read=ca,t.regressionConstant=Ds,t.regressionExp=zs,t.regressionLinear=Ts,t.regressionLoess=Ls,t.regressionLog=Bs,t.regressionPoly=Rs,t.regressionPow=Ns,t.regressionQuad=Os,t.renderModule=T_,t.repeat=Mt,t.resetDefaultLocale=function(){return Co(),Bo(),Uo()},t.resetSVGClipId=Yg,t.resetSVGDefIds=function(){Yg(),Gp=0},t.responseType=la,t.runtimeContext=OB,t.sampleCurve=Is,t.sampleLogNormal=ms,t.sampleNormal=cs,t.sampleUniform=ws,t.scale=ap,t.sceneEqual=q_,t.sceneFromJSON=Zy,t.scenePickVisit=qm,t.sceneToJSON=Jy,t.sceneVisit=Lm,t.sceneZOrder=Um,t.scheme=Ap,t.serializeXML=r_,t.setHybridRendererOptions=function(t){A_.svgMarkTypes=t.svgMarkTypes??["text"],A_.svgOnTop=t.svgOnTop??!0,A_.debug=t.debug??!1},t.setRandom=function(e){t.random=e},t.span=Dt,t.splitAccessPath=u,t.stringValue=Ct,t.textMetrics=My,t.timeBin=Jr,t.timeFloor=wr,t.timeFormatLocale=No,t.timeInterval=Cr,t.timeOffset=$r,t.timeSequence=zr,t.timeUnitSpecifier=rr,t.timeUnits=er,t.toBoolean=Ft,t.toDate=$t,t.toNumber=S,t.toSet=Bt,t.toString=Tt,t.transform=Ka,t.transforms=Za,t.truncate=zt,t.truthy=p,t.tupleid=ya,t.typeParsers=Zo,t.utcFloor=Mr,t.utcInterval=Fr,t.utcOffset=Tr,t.utcSequence=Nr,t.utcdayofyear=hr,t.utcquarter=G,t.utcweek=dr,t.version="5.29.0",t.visitArray=Nt,t.week=sr,t.writeConfig=D,t.zero=h,t.zoomLinear=j,t.zoomLog=I,t.zoomPow=W,t.zoomSymlog=H})); +//# sourceMappingURL=vega.min.js.map diff --git a/docs/modules/custom_yara_rules.md b/docs/modules/custom_yara_rules.md new file mode 100644 index 0000000000..3a7ab005cd --- /dev/null +++ b/docs/modules/custom_yara_rules.md @@ -0,0 +1,149 @@ +# Custom Yara Rules + +### Overview +Through the `excavate` internal module, BBOT supports searching through HTTP response data using custom YARA rules. + +This feature can be utilized with the command line option `--custom-yara-rules` or `-cy`, followed by a file containing the YARA rules. + +Example: + +``` +bbot -m httpx --custom-yara-rules=test.yara -t http://example.com/ +``` + +Where `test.yara` is a file on the filesystem. The file can contain multiple YARA rules, separated by lines. + +YARA rules can be quite simple, the simplest example being a single string search: + +``` +rule find_string { + strings: + $str1 = "AAAABBBB" + + condition: + $str1 +} +``` + +To look for multiple strings, and match if any of them were to hit: + +``` +rule find_string { + strings: + $str1 = "AAAABBBB" + $str2 = "CCCCDDDD" + + condition: + any of them +} +``` + +One of the most important capabilities is the use of regexes within the rule, as shown in the following example. + +``` +rule find_AAAABBBB_regex { + strings: + $regex = /A{1,4}B{1,4}/ + + condition: + $regex +} + +``` + +*Note: YARA uses it's own regex engine that is not a 1:1 match with python regexes. This means many existing regexes will have to be modified before they will work with YARA. The good news is: YARA's regex engine is FAST, immensely more fast than pythons!* + +Further discussion of art of writing complex YARA rules goes far beyond the scope of this documentation. A good place to start learning more is the [official YARA documentation](https://yara.readthedocs.io/en/stable/writingrules.html). + +The YARA engine provides plenty of room to make highly complex signatures possible, with various conditional operators available. Multiple signatures can be linked together to create sophisticated detection rules that can identify a wide range of specific content. This flexibility allows the crafting of efficient rules for detecting security vulnerabilities, leveraging logical operators, regular expressions, and other powerful features. Additionally, YARA's modular structure supports easy updates and maintenance of signature sets. + +### Custom options + +BBOT supports the use of a few custom `meta` attributes within YARA rules, which will alter the behavior of the rule and the post-processing of the results. + +#### description + +The description of the rule. Will end up in the description of any produced events if defined. + +Example with no description provided: + +``` +[FINDING] {"description": "Custom Yara Rule [find_string] Matched via identifier [str1]", "host": "example.com", "url": "http://example.com"} excavate +``` + +Example with the description added: + +``` +[FINDING] {"description": "Custom Yara Rule [AAAABBBB] with description: [contains our test string] Matched via identifier [str1]", "host": "example.com, "url": "http://example.com"} excavate +``` + +That FINDING was produced with the following signature: + +``` +rule AAAABBBB { + + meta: + description = "contains our test string" + strings: + $str1 = "AAAABBBB" + condition: + $str1 +} +``` + +#### tags + +Tags specified with this option will be passed-on to any resulting emitted events. Tags are provided as a comma separated string, as shown below: + +Lets expand on the previous example: + +``` +rule AAAABBBB { + + meta: + description = "contains our test string" + tags = "tag1,tag2,tag3" + strings: + $str1 = "AAAABBBB" + condition: + $str1 +} +``` + +Now, the BBOT FINDING includes these custom tags, as with the following output: + +``` +[FINDING] {"description": "Custom Yara Rule [AAAABBBB] with description: [contains our test string] Matched via identifier [str1]", "host": "example.com", "url": "http://example.com/"} excavate (tag1, tag2, tag3) +``` + +#### emit_match + +When set to True, the contents returned from a successful extraction via a YARA regex will be included in the FINDING event which is emitted. + +Consider the following example YARA rule: + +``` +rule SubstackLink +{ + meta: + description = "contains a Substack link" + emit_match = true + strings: + $substack_link = /https?:\/\/[a-zA-Z0-9.-]+\.substack\.com/ + condition: + $substack_link +} +``` + +When run against the Black Lantern Security homepage with the following BBOT command: + +``` +bbot -m httpx --custom-yara-rules=substack.yara -t http://www.blacklanternsecurity.com/ + +``` + +We get the following result. Note that the finding now contains the actual link that was identified with the regex. + +``` +[FINDING] {"description": "Custom Yara Rule [SubstackLink] with description: [contains a Substack link] Matched via identifier [substack_link] and extracted [https://blacklanternsecurity.substack.com]", "host": "www.blacklanternsecurity.com", "url": "https://www.blacklanternsecurity.com/"} excavate +``` diff --git a/docs/modules/internal_modules.md b/docs/modules/internal_modules.md new file mode 100644 index 0000000000..cb189ae196 --- /dev/null +++ b/docs/modules/internal_modules.md @@ -0,0 +1,85 @@ +# List of Modules + +## What are internal modules? + +Internal modules are just like regular modules, except that they run all the time. They do not have to be explicitly enabled. They can, however, be explicitly disabled if needed. + +Turning them off is simple, a root-level config option is present which can be set to False to disable them: + +``` +# Infer certain events from others, e.g. IPs from IP ranges, DNS_NAMEs from URLs, etc. +speculate: True +# Passively search event data for URLs, hostnames, emails, etc. +excavate: True +# Summarize activity at the end of a scan +aggregate: True +# DNS resolution +dnsresolve: True +# Cloud provider tagging +cloudcheck: True +``` + +These modules are executing core functionality that is normally essential for a typical BBOT scan. Let's take a quick look at each one's functionality: + +### aggregate + +Summarize statistics at the end of a scan. Disable if you don't want to see this table. + +### cloud + +The cloud module looks at events and tries to determine if they are associated with a cloud provider and tags them as such, and can also identify certain cloud resources + +### dns + +The DNS internal module controls the basic DNS resolution the BBOT performs, and all of the supporting machinery like wildcard detection, etc. + +### excavate + +The excavate internal module designed to passively extract valuable information from HTTP response data. It primarily uses YARA regexes to extract information, with various events being produced from the post-processing of the YARA results. + +Here is a summary of the data it produces: + +#### URLs + +By extracting URLs from all visited pages, this is actually already half of a web-spider. The other half is recursion, which is baked in to BBOT from the ground up. Therefore, protections are in place by default in the form of `web_spider_distance` and `web_spider_depth` settings. These settings govern restrictions to URLs recursively harvested from HTTP responses, preventing endless runaway scans. However, in the right situation the controlled use of a web-spider is extremely powerful. + +#### Parameter Extraction + +Parameter Extraction +The parameter extraction functionality identifies and extracts key web parameters from HTTP responses, and produced `WEB_PARAMETER` events. This includes parameters found in GET and POST requests, HTML forms, and jQuery requests. Currently, these are only used by the `hunt` module, and by the `paramminer` modules, to a limited degree. However, future functionality will make extensive use of these events. + +#### Email Extraction + +Detect email addresses within HTTP_RESPONSE data. + +#### Error Detection + +Scans for verbose error messages in HTTP responses and raw text data. By identifying specific error signatures from various programming languages and frameworks, this feature helps uncover misconfigurations, debugging information, and potential vulnerabilities. This insight is invaluable for identifying weak points or anomalies in web applications. + +#### Content Security Policy (CSP) Extraction +The CSP extraction capability focuses on extracting domains from Content-Security-Policy headers. By analyzing these headers, BBOT can identify additional domains which can get fed back into the scan. + +#### Serialization Detection +Serialized objects are a common source of serious security vulnerabilities. Excavate aims to detect those used in Java, .NET, and PHP applications. + +#### Functionality Detection +Looks for specific web functionalities such as file upload fields and WSDL URLs. By identifying these elements, BBOT can pinpoint areas of the application that may require further scrutiny for security vulnerabilities. + +#### Non-HTTP Scheme Detection +The non-HTTP scheme detection capability extracts URLs with non-HTTP schemes, such as ftp, mailto, and javascript. By identifying these URLs, BBOT can uncover additional vectors for attack or information leakage. + +#### Custom Yara Rules + +Excavate supports the use of custom YARA rules, which will be added to the other rules before the scan start. For more info, view this. + +### speculate + +Speculate is all about inferring one data type from another, particularly when certain tools like port scanners are not enabled. This is essential functionality for most BBOT scans, allowing for the discovery of web resources when starting with a DNS-only target list without a port scanner. It bridges gaps in the data, providing a more comprehensive view of the target by leveraging existing information. + +* IP_RANGE: Converts an IP range into individual IP addresses and emits them as IP_ADDRESS events. +* DNS_NAME: Generates parent domains from DNS names. +* URL and URL_UNVERIFIED: Infers open TCP ports from URLs and speculates on sub-directory URLs. +* General URL Speculation: Emits URL_UNVERIFIED events for URLs not already in the event's history. +* IP_ADDRESS / DNS_NAME: Infers open TCP ports if active port scanning is not enabled. +* ORG_STUB: Derives organization stubs from TLDs, social stubs, or Azure tenant names and emits them as ORG_STUB events. +* USERNAME: Converts usernames to email addresses if they validate as such. diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md new file mode 100644 index 0000000000..f7989889b0 --- /dev/null +++ b/docs/modules/list_of_modules.md @@ -0,0 +1,146 @@ +# List of Modules + + +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | Author | Created Date | +|----------------------|----------|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------|------------------|----------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | @liquidsec | 2024-01-18 | +| baddns | scan | No | Check hosts for domain/subdomain takeovers | active, baddns, cloud-enum, safe, subdomain-hijack, web-basic | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING, VULNERABILITY | @liquidsec | 2024-01-18 | +| baddns_direct | scan | No | Check for unusual subdomain / service takeover edge cases that require direct detection | active, baddns, cloud-enum, safe, subdomain-enum | STORAGE_BUCKET, URL | FINDING, VULNERABILITY | @liquidsec | 2024-01-29 | +| baddns_zone | scan | No | Check hosts for DNS zone transfers and NSEC walks | active, baddns, cloud-enum, safe, subdomain-enum | DNS_NAME | FINDING, VULNERABILITY | @liquidsec | 2024-01-29 | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | @liquidsec | 2022-11-19 | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | @TheTechromancer | 2022-11-04 | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | @TheTechromancer | 2022-11-04 | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | @TheTechromancer | 2022-11-08 | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | @TheTechromancer | 2023-03-20 | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | @TheTechromancer | 2022-11-04 | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | @liquidsec | 2022-07-05 | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | @domwhewell-sage | 2023-12-11 | +| dnsbrute | scan | No | Brute-force subdomains with massdns + static wordlist | active, aggressive, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2024-04-24 | +| dnsbrute_mutations | scan | No | Brute-force subdomains with massdns + target-specific mutations | active, aggressive, slow, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2024-04-25 | +| dnscommonsrv | scan | No | Check for common SRV records | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-05-15 | +| dotnetnuke | scan | No | Scan for critical DotNetNuke (DNN) vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE | TECHNOLOGY, VULNERABILITY | @liquidsec | 2023-11-21 | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | @liquidsec | 2022-04-10 | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | @liquidsec | 2022-07-05 | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | FILESYSTEM | @TheTechromancer | 2023-10-11 | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | @TheTechromancer | 2023-01-30 | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | @liquidsec | 2022-07-30 | +| git | scan | No | Check for exposed .git repositories | active, code-enum, safe, web-basic | URL | CODE_REPOSITORY, FINDING | @TheTechromancer | 2023-05-30 | +| gitlab | scan | No | Detect GitLab instances and query them for repositories | active, code-enum, safe | HTTP_RESPONSE, SOCIAL, TECHNOLOGY | CODE_REPOSITORY, FINDING, SOCIAL, TECHNOLOGY | @TheTechromancer | 2024-03-11 | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | @TheTechromancer | 2022-07-08 | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | @liquidsec | 2022-07-27 | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | @TheTechromancer | 2022-07-08 | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | WEB_PARAMETER | FINDING | @liquidsec | 2022-07-20 | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic | URL | URL_HINT | @liquidsec | 2022-04-15 | +| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | active, safe | HTTP_RESPONSE | FINDING | @stryker2k2 | 2024-02-02 | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic | HTTP_RESPONSE, URL | DNS_NAME, FINDING | @liquidsec | 2022-07-25 | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, TECHNOLOGY, VULNERABILITY | @TheTechromancer | 2022-03-12 | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | @TheTechromancer | 2023-07-12 | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE, WEB_PARAMETER | FINDING, WEB_PARAMETER | @liquidsec | 2022-06-27 | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE, WEB_PARAMETER | FINDING, WEB_PARAMETER | @liquidsec | 2022-06-28 | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE, WEB_PARAMETER | WEB_PARAMETER | @liquidsec | 2022-04-15 | +| portscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, portscan, safe | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | @TheTechromancer | 2024-05-15 | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic | URL | URL_UNVERIFIED | @liquidsec | 2023-02-01 | +| securitytxt | scan | No | Check for security.txt content | active, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME | EMAIL_ADDRESS, URL_UNVERIFIED | @colin-stubbs | 2024-05-26 | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | @liquidsec | 2022-07-06 | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | @TheTechromancer | 2022-03-30 | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | @liquidsec | 2022-04-10 | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | @liquidsec | 2022-09-27 | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | @liquidsec | 2022-05-02 | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | @liquidsec | 2023-02-15 | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic | HTTP_RESPONSE | TECHNOLOGY | @liquidsec | 2022-04-15 | +| wpscan | scan | No | Wordpress security scanner. Highly recommended to use an API key for better results. | active, aggressive | HTTP_RESPONSE, TECHNOLOGY | FINDING, TECHNOLOGY, URL_UNVERIFIED, VULNERABILITY | @domwhewell-sage | 2024-05-29 | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | @TheTechromancer | 2022-07-25 | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-10-04 | +| apkpure | scan | No | Download android applications from apkpure.com | code-enum, passive, safe | MOBILE_APP | FILESYSTEM | @domwhewell-sage | 2024-10-11 | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | @TheTechromancer | 2022-07-25 | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | @TheTechromancer | 2023-07-12 | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2024-07-04 | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | @alt-glitch | 2022-10-26 | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-17 | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS, DigitalOcean | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | @TheTechromancer | 2023-11-14 | +| bufferoverrun | scan | Yes | Query BufferOverrun's TLS API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2024-10-23 | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-23 | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-08 | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-04 | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-28 | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-14 | +| code_repository | scan | No | Look for code repository links in webpages | code-enum, passive, safe | URL_UNVERIFIED | CODE_REPOSITORY | @domwhewell-sage | 2024-05-15 | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | @SpamFaux | 2023-10-12 | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-05-13 | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | @SpamFaux | 2023-10-12 | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2023-07-25 | +| dnsbimi | scan | No | Check DNS_NAME's for BIMI records to find image and certificate hosting URL's | cloud-enum, passive, safe, subdomain-enum | DNS_NAME | RAW_DNS_RECORD, URL_UNVERIFIED | @colin-stubbs | 2024-11-15 | +| dnscaa | scan | No | Check for CAA records | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | @colin-stubbs | 2024-05-26 | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-03-12 | +| dnstlsrpt | scan | No | Check for TLS-RPT records | cloud-enum, email-enum, passive, safe, subdomain-enum | DNS_NAME | EMAIL_ADDRESS, RAW_DNS_RECORD, URL_UNVERIFIED | @colin-stubbs | 2024-07-26 | +| docker_pull | scan | No | Download images from a docker repository | code-enum, passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2024-03-24 | +| dockerhub | scan | No | Search for docker repositories of discovered orgs/usernames | code-enum, passive, safe | ORG_STUB, SOCIAL | CODE_REPOSITORY, SOCIAL, URL_UNVERIFIED | @domwhewell-sage | 2024-03-12 | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | @TheTechromancer | 2022-07-11 | +| extractous | scan | No | Module to extract data from files | passive, safe | FILESYSTEM | RAW_TEXT | @domwhewell-sage | 2024-06-03 | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-24 | +| git_clone | scan | No | Clone code github repositories | code-enum, passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2024-03-08 | +| gitdumper | scan | No | Download a leaked .git folder recursively or by fuzzing common names | code-enum, passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2025-02-11 | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | code-enum, passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | @domwhewell-sage | 2023-12-14 | +| github_org | scan | No | Query Github's API for organization and member repositories | code-enum, passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | @domwhewell-sage | 2023-12-14 | +| github_workflows | scan | No | Download a github repositories workflow logs and workflow artifacts | code-enum, passive, safe | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2024-04-29 | +| google_playstore | scan | No | Search for android applications on play.google.com | code-enum, passive, safe | CODE_REPOSITORY, ORG_STUB | MOBILE_APP | @domwhewell-sage | 2024-10-08 | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-28 | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | @TheTechromancer | 2022-04-25 | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | @TheTechromancer | 2023-12-22 | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | @TheTechromancer | 2023-09-12 | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | @TheTechromancer | 2022-06-08 | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | @tycoonslive | 2022-11-26 | +| jadx | scan | No | Decompile APKs and XAPKs using JADX | code-enum, passive, safe | FILESYSTEM | FILESYSTEM | @domwhewell-sage | 2024-11-04 | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-11 | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2023-07-10 | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-24 | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-08 | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | @TheTechromancer | 2022-08-10 | +| portfilter | scan | No | Filter out unwanted open ports from cloud/CDN targets | passive, safe | OPEN_TCP_PORT, URL, URL_UNVERIFIED | | @TheTechromancer | 2025-01-06 | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests and download them | code-enum, passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | @domwhewell-sage | 2024-09-07 | +| postman_download | scan | No | Download workspaces, collections, requests from Postman | code-enum, passive, safe, subdomain-enum | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2024-09-07 | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-24 | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-03 | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-03 | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2023-08-04 | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | @TheTechromancer | 2022-07-11 | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | @TheTechromancer | 2023-03-28 | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2023-07-26 | +| subdomainradar | scan | Yes | Query the Subdomain API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-08 | +| trickest | scan | Yes | Query Trickest's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @amiremami | 2024-07-27 | +| trufflehog | scan | No | TruffleHog is a tool for finding credentials | code-enum, passive, safe | CODE_REPOSITORY, FILESYSTEM, HTTP_RESPONSE, RAW_TEXT | FINDING, VULNERABILITY | @domwhewell-sage | 2024-03-12 | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | @TheTechromancer | 2022-06-09 | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-07-04 | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-25 | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | @liquidsec | 2022-04-01 | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-08-03 | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | @liquidsec | 2022-09-30 | +| csv | output | No | Output to CSV | | * | | @TheTechromancer | 2022-04-07 | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | @domwhewell-sage | 2023-12-23 | +| http | output | No | Send every event to a custom URL via a web request | | * | | @TheTechromancer | 2022-04-13 | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | @TheTechromancer | 2022-04-07 | +| mysql | output | No | Output scan data to a MySQL database | | * | | @TheTechromancer | 2024-11-13 | +| neo4j | output | No | Output to Neo4j | | * | | @TheTechromancer | 2022-04-07 | +| nmap_xml | output | No | Output to Nmap XML | | DNS_NAME, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, PROTOCOL | | @TheTechromancer | 2024-11-16 | +| postgres | output | No | Output scan data to a SQLite database | | * | | @TheTechromancer | 2024-11-08 | +| python | output | No | Output via Python API | | * | | @TheTechromancer | 2022-09-13 | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | @w0Tx | 2024-02-17 | +| sqlite | output | No | Output scan data to a SQLite database | | * | | @TheTechromancer | 2024-11-07 | +| stdout | output | No | Output to text | | * | | @TheTechromancer | 2024-04-03 | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | @TheTechromancer | 2023-07-31 | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | +| txt | output | No | Output to text | | * | | @TheTechromancer | 2024-04-03 | +| web_parameters | output | No | Output WEB_PARAMETER names to a file | | WEB_PARAMETER | | @liquidsec | 2025-01-25 | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | @liquidsec | 2023-02-08 | +| websocket | output | No | Output to websockets | | * | | @TheTechromancer | 2022-04-15 | +| cloudcheck | internal | No | Tag events by cloud provider, identify cloud resources like storage buckets | | * | | @TheTechromancer | 2024-07-07 | +| dnsresolve | internal | No | Perform DNS resolution | | * | DNS_NAME, IP_ADDRESS, RAW_DNS_RECORD | @TheTechromancer | 2022-04-08 | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | @TheTechromancer | 2022-07-25 | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE, RAW_TEXT | URL_UNVERIFIED, WEB_PARAMETER | @liquidsec | 2022-06-27 | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | @liquidsec | 2022-05-03 | +| unarchive | internal | No | Extract different types of files into folders on the filesystem | passive, safe | FILESYSTEM | FILESYSTEM | @domwhewell-sage | 2024-12-08 | + + +For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md new file mode 100644 index 0000000000..e9cb66c0d9 --- /dev/null +++ b/docs/modules/nuclei.md @@ -0,0 +1,122 @@ +# Nuclei + +## Overview + +BBOT integrates with [Nuclei](https://github.com/projectdiscovery/nuclei), an open-source web vulnerability scanner by Project Discovery. This is one of the ways BBOT makes it possible to go from a single target domain/IP all the way to confirmed vulnerabilities, in one scan. + +![Nuclei Killchain](https://github.com/blacklanternsecurity/bbot/assets/24899338/7174c4ba-4a6e-4596-bb89-5a0c5f5abe74) + + +* The BBOT Nuclei module ingests **[URL]** events and emits events of type **[VULNERABILITY]** or **[FINDING]** +* Vulnerabilities will inherit their severity from the Nuclei templates +* Nuclei templates of severity INFO will be emitted as **[FINDINGS]** + +## Default Behavior + +* By default, only "directory URLs" (URLs ending in a slash) will be scanned, but ALL templates will be used (**BE CAREFUL!**) +* Because it's so aggressive, Nuclei is considered a **deadly** module. This means you need to use the flag **--allow-deadly** to turn it on. + +## Specifying custom templates + +You can specify individual nuclei templates by setting the `modules.nuclei.templates` to their comma-separated filenames: + +```bash +bbot -m nuclei -c modules.nuclei.templates=http/takeovers/airee-takeover.yaml,http/takeovers/cargo-takeover.yaml +``` + +...or via the config: + +```yaml +modules: + nuclei: + templates: http/takeovers/airee-takeover.yaml,http/takeovers/cargo-takeover.yaml +``` + +## Configuration and Options + +The Nuclei module has many configuration options: + + +| Config Option | Type | Description | Default | +|-------------------------------|--------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------| +| modules.nuclei.batch_size | int | Number of targets to send to Nuclei per batch (default 200) | 200 | +| modules.nuclei.budget | int | Used in budget mode to set the number of requests which will be allotted to the nuclei scan | 1 | +| modules.nuclei.concurrency | int | maximum number of templates to be executed in parallel (default 25) | 25 | +| modules.nuclei.directory_only | bool | Filter out 'file' URL event (default True) | True | +| modules.nuclei.etags | str | tags to exclude from the scan | | +| modules.nuclei.mode | str | manual | technology | severe | budget. Technology: Only activate based on technology events that match nuclei tags (nuclei -as mode). Manual (DEFAULT): Fully manual settings. Severe: Only critical and high severity templates without intrusive. Budget: Limit Nuclei to a specified number of HTTP requests | manual | +| modules.nuclei.ratelimit | int | maximum number of requests to send per second (default 150) | 150 | +| modules.nuclei.retries | int | number of times to retry a failed request (default 0) | 0 | +| modules.nuclei.severity | str | Filter based on severity field available in the template. | | +| modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | +| modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | +| modules.nuclei.templates | str | template or template directory paths to include in the scan | | +| modules.nuclei.version | str | nuclei version | 3.3.9 | + + +Most of these you probably will **NOT** want to change. In particular, we advise against changing the version of Nuclei, as it's possible the latest version won't work right with BBOT. + +We also do not recommend changing **directory_only** mode. This will cause Nuclei to process every URL. Because BBOT is recursive, this can get very out-of-hand very quickly, depending on which other modules are in use. + +### Modes ### + +The modes with the Nuclei module are generally in place to help you limit the number of templates you are scanning with, to make your scans quicker. + +#### Manual + +This is the default setting, and will use all templates. However, if you're looking to do something particular, you might pair this with some of the pass-through options shown in the next setting. + +#### Severe + +**severe** mode uses only high/critical severity templates. It also excludes the intrusive tag. This is intended to be a shortcut for times when you need to rapidly identify high severity vulnerabilities but can't afford the full scan. Because most templates are INFO, LOW, or MEDIUM, your scan will finish much faster. + +#### Technology + +This is equivalent to the Nuclei '-as' scan option. It only use templates that match detected technologies, using wappalyzer-based signatures. This can be a nice way to run a light-weight scan that still has a chance to find some good vulnerabilities. + +#### Budget + +Budget mode is unique to BBOT. + +For larger scans with thousands of targets, doing a FULL Nuclei scan (1000s of Requests) for each is not realistic. +As an alternative to the other modes, you can take advantage of Nuclei's "collapsible" template feature. + +For only the cost of one (or more) "extra" request(s) per host, it can activate several hundred modules. These are modules which happen to look at a BaseUrl, and typically look for a specific string or other attribute. Nuclei is smart about reusing the request data when it can, and we can use this to our advantage. + +The budget parameter is the # of extra requests per host you are willing to send to "feed" Nuclei templates (defaults to 1). +For those times when vulnerability scanning isn't the main focus, but you want to look for easy wins. + +Of course, there is a rapidly diminishing return when you set he value to more than a handful. Eventually, this becomes 1 template per 1 budget value increase. However, in the 1-10 range there is a lot of value. This graphic should give you a rough visual idea of this concept. + +![Nuclei Budget Mode](https://github.com/blacklanternsecurity/bbot/assets/24899338/08a3429c-5a73-437b-84de-27c07d85a529) + + +### Nuclei pass-through options + +Most of the rest of the options are usually passed straight through to Nuclei when its executed. You can do things like set specific **tags** to include, (or exclude with **etags**), exactly how you'd do with Nuclei directly. You can also limit the templates with **severity**. + +The **ratelimit** and **concurrency** settings default to the same defaults that Nuclei does. These are relatively sane settings, but if you are in a sensitive environment it can certainly help to turn them down. + +**templates** will allow you to set your own templates directory. This can be very useful if you have your own custom templates that you want to use with BBOT. + +### Example Commands + +```bash +# Scan a SINGLE target with a basic port scan and web modules +bbot -f web-basic -m portscan nuclei --allow-deadly -t app.evilcorp.com +``` + +```bash +# Scanning MULTIPLE targets +bbot -f web-basic -m portscan nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +``` + +```bash +# Scanning MULTIPLE targets while performing subdomain enumeration +bbot -f subdomain-enum web-basic -m portscan nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +``` + +```bash +# Scanning MULTIPLE targets on a BUDGET +bbot -f subdomain-enum web-basic -m portscan nuclei --allow-deadly -c modules.nuclei.mode=budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +``` diff --git a/docs/release_history.md b/docs/release_history.md new file mode 100644 index 0000000000..cf1f140688 --- /dev/null +++ b/docs/release_history.md @@ -0,0 +1,51 @@ +### 2.2.0 - Nov 18, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1919](https://github.com/blacklanternsecurity/bbot/pull/1919) + +### 2.1.2 - Nov 1, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1909](https://github.com/blacklanternsecurity/bbot/pull/1909) + +### 2.1.1 - Oct 31, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1885](https://github.com/blacklanternsecurity/bbot/pull/1885) + +### 2.1.0 - Oct 18, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1724](https://github.com/blacklanternsecurity/bbot/pull/1724) + +### 2.0.1 - Aug 29, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1650](https://github.com/blacklanternsecurity/bbot/pull/1650) + +### 2.0.0 - Aug 9, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1424](https://github.com/blacklanternsecurity/bbot/pull/1424) +- [https://github.com/blacklanternsecurity/bbot/pull/1235](https://github.com/blacklanternsecurity/bbot/pull/1235) + +### 1.1.8 - May 29, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1382](https://github.com/blacklanternsecurity/bbot/pull/1382) + +### 1.1.7 - May 15, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1119](https://github.com/blacklanternsecurity/bbot/pull/1119) + +### 1.1.6 - Feb 21, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1002](https://github.com/blacklanternsecurity/bbot/pull/1002) + +### 1.1.5 - Jan 15, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/996](https://github.com/blacklanternsecurity/bbot/pull/996) + +### 1.1.4 - Jan 11, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/837](https://github.com/blacklanternsecurity/bbot/pull/837) + +### 1.1.3 - Nov 4, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/823](https://github.com/blacklanternsecurity/bbot/pull/823) + +### 1.1.2 - Nov 3, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/777](https://github.com/blacklanternsecurity/bbot/pull/777) + +### 1.1.1 - Oct 11, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/668](https://github.com/blacklanternsecurity/bbot/pull/668) + +### 1.1.0 - Aug 4, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/598](https://github.com/blacklanternsecurity/bbot/pull/598) + +### 1.0.5 - Mar 10, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/352](https://github.com/blacklanternsecurity/bbot/pull/352) + +### 1.0.5 - Mar 10, 2023 +- [https://github.com/blacklanternsecurity/bbot/pull/352](https://github.com/blacklanternsecurity/bbot/pull/352) diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md new file mode 100644 index 0000000000..ce6891773a --- /dev/null +++ b/docs/scanning/advanced.md @@ -0,0 +1,166 @@ +# Advanced + +Below you can find some advanced uses of BBOT. + +## BBOT as a Python library + +#### Synchronous +```python +from bbot.scanner import Scanner + +if __name__ == "__main__": + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + for event in scan.start(): + print(event) +``` + +#### Asynchronous +```python +from bbot.scanner import Scanner + +async def main(): + scan = Scanner("evilcorp.com", presets=["subdomain-enum"]) + async for event in scan.async_start(): + print(event.json()) + +if __name__ == "__main__": + import asyncio + asyncio.run(main()) +``` + +## Command-Line Help + + +```text +usage: bbot [-h] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] + [-b BLACKLIST [BLACKLIST ...]] [--strict-scope] + [-p [PRESET ...]] [-c [CONFIG ...]] [-lp] + [-m MODULE [MODULE ...]] [-l] [-lmo] [-em MODULE [MODULE ...]] + [-f FLAG [FLAG ...]] [-lf] [-rf FLAG [FLAG ...]] + [-ef FLAG [FLAG ...]] [--allow-deadly] [-n SCAN_NAME] [-v] [-d] + [-s] [--force] [-y] [--fast-mode] [--dry-run] + [--current-preset] [--current-preset-full] [-o DIR] + [-om MODULE [MODULE ...]] [-lo] [--json] [--brief] + [--event-types EVENT_TYPES [EVENT_TYPES ...]] [--exclude-cdn] + [--no-deps | --force-deps | --retry-deps | + --ignore-failed-deps | --install-all-deps] [--version] + [--proxy HTTP_PROXY] [-H CUSTOM_HEADERS [CUSTOM_HEADERS ...]] + [--custom-yara-rules CUSTOM_YARA_RULES] + [--user-agent USER_AGENT] + +Bighuge BLS OSINT Tool + +options: + -h, --help show this help message and exit + +Target: + -t, --targets TARGET [TARGET ...] + Targets to seed the scan + -w, --whitelist WHITELIST [WHITELIST ...] + What's considered in-scope (by default it's the same as --targets) + -b, --blacklist BLACKLIST [BLACKLIST ...] + Don't touch these things + --strict-scope Don't consider subdomains of target/whitelist to be in-scope + +Presets: + -p, --preset [PRESET ...] + Enable BBOT preset(s) + -c, --config [CONFIG ...] + Custom config options in key=value format: e.g. 'modules.shodan.api_key=1234' + -lp, --list-presets List available presets. + +Modules: + -m, --modules MODULE [MODULE ...] + Modules to enable. Choices: affiliates,ajaxpro,anubisdb,apkpure,asn,azure_realm,azure_tenant,baddns,baddns_direct,baddns_zone,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,bufferoverrun,builtwith,bypass403,c99,censys,certspotter,chaos,code_repository,credshed,crt,dastardly,dehashed,digitorus,dnsbimi,dnsbrute,dnsbrute_mutations,dnscaa,dnscommonsrv,dnsdumpster,dnstlsrpt,docker_pull,dockerhub,dotnetnuke,emailformat,extractous,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,git_clone,gitdumper,github_codesearch,github_org,github_workflows,gitlab,google_playstore,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,jadx,leakix,myssl,newsletters,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,portfilter,portscan,postman,postman_download,rapiddns,robots,securitytrails,securitytxt,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomaincenter,subdomainradar,telerik,trickest,trufflehog,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,wpscan,zoomeye + -l, --list-modules List available modules. + -lmo, --list-module-options + Show all module config options + -em, --exclude-modules MODULE [MODULE ...] + Exclude these modules. + -f, --flags FLAG [FLAG ...] + Enable modules by flag. Choices: active,affiliates,aggressive,baddns,cloud-enum,code-enum,deadly,email-enum,iis-shortnames,passive,portscan,report,safe,service-enum,slow,social-enum,subdomain-enum,subdomain-hijack,web-basic,web-paramminer,web-screenshots,web-thorough + -lf, --list-flags List available flags. + -rf, --require-flags FLAG [FLAG ...] + Only enable modules with these flags (e.g. -rf passive) + -ef, --exclude-flags FLAG [FLAG ...] + Disable modules with these flags. (e.g. -ef aggressive) + --allow-deadly Enable the use of highly aggressive modules + +Scan: + -n, --name SCAN_NAME Name of scan (default: random) + -v, --verbose Be more verbose + -d, --debug Enable debugging + -s, --silent Be quiet + --force Run scan even in the case of condition violations or failed module setups + -y, --yes Skip scan confirmation prompt + --fast-mode Scan only the provided targets as fast as possible, with no extra discovery + --dry-run Abort before executing scan + --current-preset Show the current preset in YAML format + --current-preset-full + Show the current preset in its full form, including defaults + +Output: + -o, --output-dir DIR Directory to output scan results + -om, --output-modules MODULE [MODULE ...] + Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,mysql,neo4j,nmap_xml,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_parameters,web_report,websocket + -lo, --list-output-modules + List available output modules + --json, -j Output scan data in JSON format + --brief, -br Output only the data itself + --event-types EVENT_TYPES [EVENT_TYPES ...] + Choose which event types to display + --exclude-cdn, -ec Filter out unwanted open ports on CDNs/WAFs (80,443 only) + +Module dependencies: + Control how modules install their dependencies + + --no-deps Don't install module dependencies + --force-deps Force install all module dependencies + --retry-deps Try again to install failed module dependencies + --ignore-failed-deps Run modules even if they have failed dependencies + --install-all-deps Install dependencies for all modules + +Misc: + --version show BBOT version and exit + --proxy HTTP_PROXY Use this proxy for all HTTP requests + -H, --custom-headers CUSTOM_HEADERS [CUSTOM_HEADERS ...] + List of custom headers as key value pairs (header=value). + --custom-yara-rules, -cy CUSTOM_YARA_RULES + Add custom yara rules to excavate + --user-agent, -ua USER_AGENT + Set the user-agent for all HTTP requests + +EXAMPLES + + Subdomains: + bbot -t evilcorp.com -p subdomain-enum + + Subdomains (passive only): + bbot -t evilcorp.com -p subdomain-enum -rf passive + + Subdomains + port scan + web screenshots: + bbot -t evilcorp.com -p subdomain-enum -m portscan gowitness -n my_scan -o . + + Subdomains + basic web scan: + bbot -t evilcorp.com -p subdomain-enum web-basic + + Web spider: + bbot -t www.evilcorp.com -p spider -c web.spider_distance=2 web.spider_depth=2 + + Everything everywhere all at once: + bbot -t evilcorp.com -p kitchen-sink + + List modules: + bbot -l + + List output modules: + bbot -lo + + List presets: + bbot -lp + + List flags: + bbot -lf + +``` + diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md new file mode 100644 index 0000000000..85fb55e485 --- /dev/null +++ b/docs/scanning/configuration.md @@ -0,0 +1,564 @@ +# Configuration Overview + +Normally, [Presets](presets.md) are used to configure a scan. However, there may be cases where you want to change BBOT's global defaults so a certain option is always set, even if it's not specified in a preset. + +BBOT has a YAML config at `~/.config/bbot.yml`. This is the first config that BBOT loads, so it's a good place to put default settings like `http_proxy`, `max_threads`, or `http_user_agent`. You can also put any module settings here, including **API keys**. + +For a list of all possible config options, see: + +- [Global Options](#global-config-options) +- [Module Options](#module-config-options) + +For examples of common config changes, see [Tips and Tricks](tips_and_tricks.md). + +## Configuration Files + +BBOT loads its config from the following files, in this order (last one loaded == highest priority): + +- `~/.config/bbot/bbot.yml` <-- Global BBOT config +- presets (`-p`) <-- Presets are good for scan-specific settings +- command line (`-c`) <-- CLI overrides everything + +`bbot.yml` will be automatically created for you when you first run BBOT. + +## YAML Config vs Command Line + +You can specify config options either via the command line or the config. For example, if you want to proxy your BBOT scan through a local proxy like [Burp Suite](https://portswigger.net/burp), you could either do: + +```bash +# send BBOT traffic through an HTTP proxy +bbot -t evilcorp.com -c http_proxy=http://127.0.0.1:8080 +``` + +Or, in `~/.config/bbot/config.yml`: + +```yaml title="~/.bbot/config/bbot.yml" +http_proxy: http://127.0.0.1:8080 +``` + +These two are equivalent. + +Config options specified via the command-line take precedence over all others. You can give BBOT a custom config file with `-c myconf.yml`, or individual arguments like this: `-c modules.shodan_dns.api_key=deadbeef`. To display the full and current BBOT config, including any command-line arguments, use `bbot -c`. + +Note that placing the following in `bbot.yml`: +```yaml title="~/.bbot/config/bbot.yml" +modules: + shodan_dns: + api_key: deadbeef +``` +Is the same as: +```bash +bbot -c modules.shodan_dns.api_key=deadbeef +``` + +## Global Config Options + +Below is a full list of the config options supported, along with their defaults. + + +```yaml title="defaults.yml" +### BASIC OPTIONS ### + +# BBOT working directory +home: ~/.bbot +# How many scan results to keep before cleaning up the older ones +keep_scans: 20 +# Interval for displaying status messages +status_frequency: 15 +# Include the raw data of files (i.e. PDFs, web screenshots) as base64 in the event +file_blobs: false +# Include the raw data of directories (i.e. git repos) as tar.gz base64 in the event +folder_blobs: false + +### SCOPE ### + +scope: + # strict scope means only exact DNS names are considered in-scope + # subdomains are not included unless they are explicitly provided in the target list + strict: false + # Filter by scope distance which events are displayed in the output + # 0 == show only in-scope events (affiliates are always shown) + # 1 == show all events up to distance-1 (1 hop from target) + report_distance: 0 + # How far out from the main scope to search + # Do not change this setting unless you know what you're doing + search_distance: 0 + +### DNS ### + +dns: + # Completely disable DNS resolution (careful if you have IP whitelists/blacklists, consider using minimal=true instead) + disable: false + # Speed up scan by not creating any new DNS events, and only resolving A and AAAA records + minimal: false + # How many instances of the dns module to run concurrently + threads: 25 + # How many concurrent DNS resolvers to use when brute-forcing + # (under the hood this is passed through directly to massdns -s) + brute_threads: 1000 + # nameservers to use for DNS brute-forcing + # default is updated weekly and contains ~10K high-quality public servers + brute_nameservers: https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt + # How far away from the main target to explore via DNS resolution (independent of scope.search_distance) + # This is safe to change + search_distance: 1 + # Limit how many DNS records can be followed in a row (stop malicious/runaway DNS records) + runaway_limit: 5 + # DNS query timeout + timeout: 5 + # How many times to retry DNS queries + retries: 1 + # Completely disable BBOT's DNS wildcard detection + wildcard_disable: False + # Disable BBOT's DNS wildcard detection for select domains + wildcard_ignore: [] + # How many sanity checks to make when verifying wildcard DNS + # Increase this value if BBOT's wildcard detection isn't working + wildcard_tests: 10 + # Skip DNS requests for a certain domain and rdtype after encountering this many timeouts or SERVFAILs + # This helps prevent faulty DNS servers from hanging up the scan + abort_threshold: 50 + # Don't show PTR records containing IP addresses + filter_ptrs: true + # Enable/disable debug messages for DNS queries + debug: false + # For performance reasons, always skip these DNS queries + # Microsoft's DNS infrastructure is misconfigured so that certain queries to mail.protection.outlook.com always time out + omit_queries: + - SRV:mail.protection.outlook.com + - CNAME:mail.protection.outlook.com + - TXT:mail.protection.outlook.com + +### WEB ### + +web: + # HTTP proxy + http_proxy: + # Web user-agent + user_agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.2151.97 + # Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed) + spider_distance: 0 + # Set the maximum directory depth for the web spider + spider_depth: 1 + # Set the maximum number of links that can be followed per page + spider_links_per_page: 25 + # HTTP timeout (for Python requests; API calls, etc.) + http_timeout: 10 + # HTTP timeout (for httpx) + httpx_timeout: 5 + # Custom HTTP headers (e.g. cookies, etc.) + # in the format { "Header-Key": "header_value" } + # These are attached to all in-scope HTTP requests + # Note that some modules (e.g. github) may end up sending these to out-of-scope resources + http_headers: {} + # HTTP retries (for Python requests; API calls, etc.) + http_retries: 1 + # HTTP retries (for httpx) + httpx_retries: 1 + # Enable/disable debug messages for web requests/responses + debug: false + # Maximum number of HTTP redirects to follow + http_max_redirects: 5 + # Whether to verify SSL certificates + ssl_verify: false + +### ENGINE ### + +engine: + debug: false + +# Tool dependencies +deps: + ffuf: + version: "2.1.0" + # How to handle installation of module dependencies + # Choices are: + # - abort_on_failure (default) - if a module dependency fails to install, abort the scan + # - retry_failed - try again to install failed dependencies + # - ignore_failed - run the scan regardless of what happens with dependency installation + # - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) + behavior: abort_on_failure + +### ADVANCED OPTIONS ### + +# Load BBOT modules from these custom paths +module_dirs: [] + +# Infer certain events from others, e.g. IPs from IP ranges, DNS_NAMEs from URLs, etc. +speculate: True +# Passively search event data for URLs, hostnames, emails, etc. +excavate: True +# Summarize activity at the end of a scan +aggregate: True +# DNS resolution, wildcard detection, etc. +dnsresolve: True +# Cloud provider tagging +cloudcheck: True + +# Strip querystring from URLs by default +url_querystring_remove: True +# When query string is retained, by default collapse parameter values down to a single value per parameter +url_querystring_collapse: True + +# Completely ignore URLs with these extensions +url_extension_blacklist: + # images + - png + - jpg + - bmp + - ico + - jpeg + - gif + - svg + - webp + # web/fonts + - css + - woff + - woff2 + - ttf + - eot + - sass + - scss + # audio + - mp3 + - m4a + - wav + - flac + # video + - mp4 + - mkv + - avi + - wmv + - mov + - flv + - webm +# Distribute URLs with these extensions only to httpx (these are omitted from output) +url_extension_httpx_only: + - js + +# These url extensions are almost always static, so we exclude them from modules that fuzz things +url_extension_static: + - pdf + - doc + - docx + - xls + - xlsx + - ppt + - pptx + - txt + - csv + - xml + - yaml + - ini + - log + - conf + - cfg + - env + - md + - rtf + - tiff + - bmp + - jpg + - jpeg + - png + - gif + - svg + - ico + - mp3 + - wav + - flac + - mp4 + - mov + - avi + - mkv + - webm + - zip + - tar + - gz + - bz2 + - 7z + - rar + +# Don't output these types of events (they are still distributed to modules) +omit_event_types: + - HTTP_RESPONSE + - RAW_TEXT + - URL_UNVERIFIED + - DNS_NAME_UNRESOLVED + - FILESYSTEM + - WEB_PARAMETER + - RAW_DNS_RECORD + # - IP_ADDRESS + +# Custom interactsh server settings +interactsh_server: null +interactsh_token: null +interactsh_disable: false + +``` + + +## Module Config Options + +Many modules accept their own configuration options. These options have the ability to change their behavior. For example, the `portscan` module accepts options for `ports`, `rate`, etc. Below is a list of all possible module config options. + + +| Config Option | Type | Description | Default | +|------------------------------------------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| modules.baddns.custom_nameservers | list | Force BadDNS to use a list of custom nameservers | [] | +| modules.baddns.enabled_submodules | list | A list of submodules to enable. Empty list (default) enables CNAME, TXT and MX Only | [] | +| modules.baddns.only_high_confidence | bool | Do not emit low-confidence or generic detections | False | +| modules.baddns_direct.custom_nameservers | list | Force BadDNS to use a list of custom nameservers | [] | +| modules.baddns_zone.custom_nameservers | list | Force BadDNS to use a list of custom nameservers | [] | +| modules.baddns_zone.only_high_confidence | bool | Do not emit low-confidence or generic detections | False | +| modules.badsecrets.custom_secrets | NoneType | Include custom secrets loaded from a local file | None | +| modules.bucket_amazon.permutations | bool | Whether to try permutations | False | +| modules.bucket_azure.permutations | bool | Whether to try permutations | False | +| modules.bucket_digitalocean.permutations | bool | Whether to try permutations | False | +| modules.bucket_firebase.permutations | bool | Whether to try permutations | False | +| modules.bucket_google.permutations | bool | Whether to try permutations | False | +| modules.dnsbrute.max_depth | int | How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com | 5 | +| modules.dnsbrute.wordlist | str | Subdomain wordlist URL | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | +| modules.dnsbrute_mutations.max_mutations | int | Maximum number of target-specific mutations to try per subdomain | 100 | +| modules.dnscommonsrv.max_depth | int | The maximum subdomain depth to brute-force SRV records | 2 | +| modules.ffuf.extensions | str | Optionally include a list of extensions to extend the keyword with (comma separated) | | +| modules.ffuf.ignore_case | bool | Only put lowercase words into the wordlist | False | +| modules.ffuf.lines | int | take only the first N lines from the wordlist when finding directories | 5000 | +| modules.ffuf.max_depth | int | the maximum directory depth to attempt to solve | 0 | +| modules.ffuf.wordlist | str | Specify wordlist to use when finding directories | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/raft-small-directories.txt | +| modules.ffuf_shortnames.extensions | str | Optionally include a list of extensions to extend the keyword with (comma separated) | | +| modules.ffuf_shortnames.find_common_prefixes | bool | Attempt to automatically detect common prefixes and make additional ffuf runs against them | False | +| modules.ffuf_shortnames.find_delimiters | bool | Attempt to detect common delimiters and make additional ffuf runs against them | True | +| modules.ffuf_shortnames.find_subwords | bool | Attempt to detect subwords and make additional ffuf runs against them | False | +| modules.ffuf_shortnames.ignore_redirects | bool | Explicitly ignore redirects (301,302) | True | +| modules.ffuf_shortnames.max_depth | int | the maximum directory depth to attempt to solve | 1 | +| modules.ffuf_shortnames.max_predictions | int | The maximum number of predictions to generate per shortname prefix | 250 | +| modules.ffuf_shortnames.version | str | ffuf version | 2.0.0 | +| modules.ffuf_shortnames.wordlist_extensions | str | Specify wordlist to use when making extension lists | | +| modules.filedownload.base_64_encoded_file | str | Stream the bytes of a file and encode them in base 64 for event data. | false | +| modules.filedownload.extensions | list | File extensions to download | ['bak', 'bash', 'bashrc', 'cfg', 'conf', 'crt', 'csv', 'db', 'dll', 'doc', 'docx', 'exe', 'ica', 'indd', 'ini', 'jar', 'json', 'key', 'log', 'markdown', 'md', 'msi', 'odg', 'odp', 'ods', 'odt', 'pdf', 'pem', 'pps', 'ppsx', 'ppt', 'pptx', 'ps1', 'pub', 'raw', 'rdp', 'rsa', 'sh', 'sql', 'sqlite', 'swp', 'sxw', 'tar.gz', 'tgz', 'tar', 'txt', 'vbs', 'war', 'wpd', 'xls', 'xlsx', 'xml', 'yaml', 'yml', 'zip', 'lzma', 'rar', '7z', 'xz', 'bz2'] | +| modules.filedownload.max_filesize | str | Cancel download if filesize is greater than this size | 10MB | +| modules.fingerprintx.skip_common_web | bool | Skip common web ports such as 80, 443, 8080, 8443, etc. | True | +| modules.fingerprintx.version | str | fingerprintx version | 1.1.4 | +| modules.generic_ssrf.skip_dns_interaction | bool | Do not report DNS interactions (only HTTP interaction) | False | +| modules.gitlab.api_key | str | Gitlab access token | | +| modules.gowitness.idle_timeout | int | Skip the current gowitness batch if it stalls for longer than this many seconds | 1800 | +| modules.gowitness.output_path | str | Where to save screenshots | | +| modules.gowitness.resolution_x | int | Screenshot resolution x | 1440 | +| modules.gowitness.resolution_y | int | Screenshot resolution y | 900 | +| modules.gowitness.social | bool | Whether to screenshot social media webpages | False | +| modules.gowitness.threads | int | How many gowitness threads to spawn (default is number of CPUs x 2) | 0 | +| modules.gowitness.timeout | int | Preflight check timeout | 10 | +| modules.gowitness.version | str | Gowitness version | 2.4.2 | +| modules.httpx.in_scope_only | bool | Only visit web reparents that are in scope. | True | +| modules.httpx.max_response_size | int | Max response size in bytes | 5242880 | +| modules.httpx.probe_all_ips | bool | Probe all the ips associated with same host | False | +| modules.httpx.store_responses | bool | Save raw HTTP responses to scan folder | False | +| modules.httpx.threads | int | Number of httpx threads to use | 50 | +| modules.httpx.version | str | httpx version | 1.2.5 | +| modules.iis_shortnames.detect_only | bool | Only detect the vulnerability and do not run the shortname scanner | True | +| modules.iis_shortnames.max_node_count | int | Limit how many nodes to attempt to resolve on any given recursion branch | 50 | +| modules.iis_shortnames.speculate_magic_urls | bool | Attempt to discover iis 'magic' special folders | True | +| modules.ntlm.try_all | bool | Try every NTLM endpoint | False | +| modules.nuclei.batch_size | int | Number of targets to send to Nuclei per batch (default 200) | 200 | +| modules.nuclei.budget | int | Used in budget mode to set the number of requests which will be allotted to the nuclei scan | 1 | +| modules.nuclei.concurrency | int | maximum number of templates to be executed in parallel (default 25) | 25 | +| modules.nuclei.directory_only | bool | Filter out 'file' URL event (default True) | True | +| modules.nuclei.etags | str | tags to exclude from the scan | | +| modules.nuclei.mode | str | manual | technology | severe | budget. Technology: Only activate based on technology events that match nuclei tags (nuclei -as mode). Manual (DEFAULT): Fully manual settings. Severe: Only critical and high severity templates without intrusive. Budget: Limit Nuclei to a specified number of HTTP requests | manual | +| modules.nuclei.ratelimit | int | maximum number of requests to send per second (default 150) | 150 | +| modules.nuclei.retries | int | number of times to retry a failed request (default 0) | 0 | +| modules.nuclei.severity | str | Filter based on severity field available in the template. | | +| modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | +| modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | +| modules.nuclei.templates | str | template or template directory paths to include in the scan | | +| modules.nuclei.version | str | nuclei version | 3.3.9 | +| modules.oauth.try_all | bool | Check for OAUTH/IODC on every subdomain and URL. | False | +| modules.paramminer_cookies.recycle_words | bool | Attempt to use words found during the scan on all other endpoints | False | +| modules.paramminer_cookies.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | +| modules.paramminer_cookies.wordlist | str | Define the wordlist to be used to derive cookies | | +| modules.paramminer_getparams.recycle_words | bool | Attempt to use words found during the scan on all other endpoints | False | +| modules.paramminer_getparams.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | +| modules.paramminer_getparams.wordlist | str | Define the wordlist to be used to derive headers | | +| modules.paramminer_headers.recycle_words | bool | Attempt to use words found during the scan on all other endpoints | False | +| modules.paramminer_headers.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | +| modules.paramminer_headers.wordlist | str | Define the wordlist to be used to derive headers | | +| modules.portscan.adapter | str | Manually specify a network interface, such as "eth0" or "tun0". If not specified, the first network interface found with a default gateway will be used. | | +| modules.portscan.adapter_ip | str | Send packets using this IP address. Not needed unless masscan's autodetection fails | | +| modules.portscan.adapter_mac | str | Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails | | +| modules.portscan.ping_first | bool | Only portscan hosts that reply to pings | False | +| modules.portscan.ping_only | bool | Ping sweep only, no portscan | False | +| modules.portscan.ports | str | Ports to scan | | +| modules.portscan.rate | int | Rate in packets per second | 300 | +| modules.portscan.router_mac | str | Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails | | +| modules.portscan.top_ports | int | Top ports to scan (default 100) (to override, specify 'ports') | 100 | +| modules.portscan.wait | int | Seconds to wait for replies after scan is complete | 5 | +| modules.robots.include_allow | bool | Include 'Allow' Entries | True | +| modules.robots.include_disallow | bool | Include 'Disallow' Entries | True | +| modules.robots.include_sitemap | bool | Include 'sitemap' entries | False | +| modules.securitytxt.emails | bool | emit EMAIL_ADDRESS events | True | +| modules.securitytxt.urls | bool | emit URL_UNVERIFIED events | True | +| modules.sslcert.skip_non_ssl | bool | Don't try common non-SSL ports | True | +| modules.sslcert.timeout | float | Socket connect timeout in seconds | 5.0 | +| modules.telerik.exploit_RAU_crypto | bool | Attempt to confirm any RAU AXD detections are vulnerable | False | +| modules.telerik.include_subdirs | bool | Include subdirectories in the scan (off by default) | False | +| modules.url_manipulation.allow_redirects | bool | Allowing redirects will sometimes create false positives. Disallowing will sometimes create false negatives. Allowed by default. | True | +| modules.vhost.force_basehost | str | Use a custom base host (e.g. evilcorp.com) instead of the default behavior of using the current URL | | +| modules.vhost.lines | int | take only the first N lines from the wordlist when finding directories | 5000 | +| modules.vhost.wordlist | str | Wordlist containing subdomains | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | +| modules.wafw00f.generic_detect | bool | When no specific WAF detections are made, try to perform a generic detect | True | +| modules.wpscan.api_key | str | WPScan API Key | | +| modules.wpscan.connection_timeout | int | The connection timeout in seconds (default 2) | 2 | +| modules.wpscan.disable_tls_checks | bool | Disables the SSL/TLS certificate verification (Default True) | True | +| modules.wpscan.enumerate | str | Enumeration Process see wpscan help documentation (default: vp,vt,cb,dbe) | vp,vt,cb,dbe | +| modules.wpscan.force | bool | Do not check if the target is running WordPress or returns a 403 | False | +| modules.wpscan.request_timeout | int | The request timeout in seconds (default 5) | 5 | +| modules.wpscan.threads | int | How many wpscan threads to spawn (default is 5) | 5 | +| modules.anubisdb.limit | int | Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API) | 1000 | +| modules.apkpure.output_folder | str | Folder to download apk's to | | +| modules.bevigil.api_key | str | BeVigil OSINT API Key | | +| modules.bevigil.urls | bool | Emit URLs in addition to DNS_NAMEs | False | +| modules.binaryedge.api_key | str | BinaryEdge API key | | +| modules.binaryedge.max_records | int | Limit results to help prevent exceeding API quota | 1000 | +| modules.bucket_file_enum.file_limit | int | Limit the number of files downloaded per bucket | 50 | +| modules.bufferoverrun.api_key | str | BufferOverrun API key | | +| modules.bufferoverrun.commercial | bool | Use commercial API | False | +| modules.builtwith.api_key | str | Builtwith API key | | +| modules.builtwith.redirects | bool | Also look up inbound and outbound redirects | True | +| modules.c99.api_key | str | c99.nl API key | | +| modules.censys.api_key | str | Censys.io API Key in the format of 'key:secret' | | +| modules.censys.max_pages | int | Maximum number of pages to fetch (100 results per page) | 5 | +| modules.chaos.api_key | str | Chaos API key | | +| modules.credshed.credshed_url | str | URL of credshed server | | +| modules.credshed.password | str | Credshed password | | +| modules.credshed.username | str | Credshed username | | +| modules.dehashed.api_key | str | DeHashed API Key | | +| modules.dehashed.username | str | Email Address associated with your API key | | +| modules.dnsbimi.emit_raw_dns_records | bool | Emit RAW_DNS_RECORD events | False | +| modules.dnsbimi.emit_urls | bool | Emit URL_UNVERIFIED events | True | +| modules.dnsbimi.selectors | str | CSV list of BIMI selectors to check | default,email,mail,bimi | +| modules.dnscaa.dns_names | bool | emit DNS_NAME events | True | +| modules.dnscaa.emails | bool | emit EMAIL_ADDRESS events | True | +| modules.dnscaa.in_scope_only | bool | Only check in-scope domains | True | +| modules.dnscaa.urls | bool | emit URL_UNVERIFIED events | True | +| modules.dnstlsrpt.emit_emails | bool | Emit EMAIL_ADDRESS events | True | +| modules.dnstlsrpt.emit_raw_dns_records | bool | Emit RAW_DNS_RECORD events | False | +| modules.dnstlsrpt.emit_urls | bool | Emit URL_UNVERIFIED events | True | +| modules.dnstlsrpt.emit_vulnerabilities | bool | Emit VULNERABILITY events | True | +| modules.docker_pull.all_tags | bool | Download all tags from each registry (Default False) | False | +| modules.docker_pull.output_folder | str | Folder to download docker repositories to | | +| modules.extractous.extensions | list | File extensions to parse | ['bak', 'bash', 'bashrc', 'conf', 'cfg', 'crt', 'csv', 'db', 'sqlite', 'doc', 'docx', 'ica', 'indd', 'ini', 'json', 'key', 'pub', 'log', 'markdown', 'md', 'odg', 'odp', 'ods', 'odt', 'pdf', 'pem', 'pps', 'ppsx', 'ppt', 'pptx', 'ps1', 'rdp', 'rsa', 'sh', 'sql', 'swp', 'sxw', 'txt', 'vbs', 'wpd', 'xls', 'xlsx', 'xml', 'yml', 'yaml'] | +| modules.fullhunt.api_key | str | FullHunt API Key | | +| modules.git_clone.api_key | str | Github token | | +| modules.git_clone.output_folder | str | Folder to clone repositories to | | +| modules.gitdumper.fuzz_tags | bool | Fuzz for common git tag names (v0.0.1, 0.0.2, etc.) up to the max_semanic_version | False | +| modules.gitdumper.max_semanic_version | int |` Maximum version number to fuzz for (default < v10.10.10) `| 10 | +| modules.gitdumper.output_folder | str | Folder to download repositories to | | +| modules.github_codesearch.api_key | str | Github token | | +| modules.github_codesearch.limit | int | Limit code search to this many results | 100 | +| modules.github_org.api_key | str | Github token | | +| modules.github_org.include_member_repos | bool | Also enumerate organization members' repositories | False | +| modules.github_org.include_members | bool | Enumerate organization members | True | +| modules.github_workflows.api_key | str | Github token | | +| modules.github_workflows.num_logs | int | For each workflow fetch the last N successful runs logs (max 100) | 1 | +| modules.hunterio.api_key | str | Hunter.IO API key | | +| modules.internetdb.show_open_ports | bool | Display OPEN_TCP_PORT events in output, even if they didn't lead to an interesting discovery | False | +| modules.ip2location.api_key | str | IP2location.io API Key | | +| modules.ip2location.lang | str | Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name. | | +| modules.ipneighbor.num_bits | int | Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts) | 4 | +| modules.ipstack.api_key | str | IPStack GeoIP API Key | | +| modules.jadx.threads | int | Maximum jadx threads for extracting apk's, default: 4 | 4 | +| modules.leakix.api_key | str | LeakIX API Key | | +| modules.passivetotal.api_key | str | PassiveTotal API Key in the format of 'username:api_key' | | +| modules.pgp.search_urls | list | PGP key servers to search |` ['https://keyserver.ubuntu.com/pks/lookup?fingerprint=on&op=vindex&search=', 'http://the.earth.li:11371/pks/lookup?fingerprint=on&op=vindex&search=', 'https://pgpkeys.eu/pks/lookup?search=&op=index', 'https://pgp.mit.edu/pks/lookup?search=&op=index'] `| +| modules.portfilter.allowed_cdn_ports | str | Comma-separated list of ports that are allowed to be scanned for CDNs | 80,443 | +| modules.portfilter.cdn_tags | str | Comma-separated list of tags to skip, e.g. 'cdn,cloud' | cdn- | +| modules.postman.api_key | str | Postman API Key | | +| modules.postman_download.api_key | str | Postman API Key | | +| modules.postman_download.output_folder | str | Folder to download postman workspaces to | | +| modules.securitytrails.api_key | str | SecurityTrails API key | | +| modules.shodan_dns.api_key | str | Shodan API key | | +| modules.subdomainradar.api_key | str | SubDomainRadar.io API key | | +| modules.subdomainradar.group | str | The enumeration group to use. Choose from fast, medium, deep | fast | +| modules.subdomainradar.timeout | int | Timeout in seconds | 120 | +| modules.trickest.api_key | str | Trickest API key | | +| modules.trufflehog.concurrency | int | Number of concurrent workers | 8 | +| modules.trufflehog.config | str | File path or URL to YAML trufflehog config | | +| modules.trufflehog.deleted_forks | bool | Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours. | False | +| modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | +| modules.trufflehog.version | str | trufflehog version | 3.88.12 | +| modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | +| modules.virustotal.api_key | str | VirusTotal API Key | | +| modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | +| modules.wayback.urls | bool | emit URLs in addition to DNS_NAMEs | False | +| modules.zoomeye.api_key | str | ZoomEye API key | | +| modules.zoomeye.include_related | bool | Include domains which may be related to the target | False | +| modules.zoomeye.max_pages | int | How many pages of results to fetch | 20 | +| modules.asset_inventory.output_file | str | Set a custom output file | | +| modules.asset_inventory.recheck | bool | When use_previous=True, don't retain past details like open ports or findings. Instead, allow them to be rediscovered by the new scan | False | +| modules.asset_inventory.summary_netmask | int | Subnet mask to use when summarizing IP addresses at end of scan | 16 | +| modules.asset_inventory.use_previous | bool |` Emit previous asset inventory as new events (use in conjunction with -n ) `| False | +| modules.csv.output_file | str | Output to CSV file | | +| modules.discord.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| modules.discord.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | +| modules.discord.retries | int | Number of times to retry sending the message before skipping the event | 10 | +| modules.discord.webhook_url | str | Discord webhook URL | | +| modules.emails.output_file | str | Output to file | | +| modules.http.bearer | str | Authorization Bearer token | | +| modules.http.method | str | HTTP method | POST | +| modules.http.password | str | Password (basic auth) | | +| modules.http.siem_friendly | bool | Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc. | False | +| modules.http.timeout | int | HTTP timeout | 10 | +| modules.http.url | str | Web URL | | +| modules.http.username | str | Username (basic auth) | | +| modules.json.output_file | str | Output to file | | +| modules.json.siem_friendly | bool | Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc. | False | +| modules.mysql.database | str | The database name to connect to | bbot | +| modules.mysql.host | str | The server running MySQL | localhost | +| modules.mysql.password | str | The password to connect to MySQL | bbotislife | +| modules.mysql.port | int | The port to connect to MySQL | 3306 | +| modules.mysql.username | str | The username to connect to MySQL | root | +| modules.neo4j.password | str | Neo4j password | bbotislife | +| modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | +| modules.neo4j.username | str | Neo4j username | neo4j | +| modules.postgres.database | str | The database name to connect to | bbot | +| modules.postgres.host | str | The server running Postgres | localhost | +| modules.postgres.password | str | The password to connect to Postgres | bbotislife | +| modules.postgres.port | int | The port to connect to Postgres | 5432 | +| modules.postgres.username | str | The username to connect to Postgres | postgres | +| modules.slack.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| modules.slack.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | +| modules.slack.retries | int | Number of times to retry sending the message before skipping the event | 10 | +| modules.slack.webhook_url | str | Discord webhook URL | | +| modules.splunk.hectoken | str | HEC Token | | +| modules.splunk.index | str | Index to send data to | | +| modules.splunk.source | str | Source path to be added to the metadata | | +| modules.splunk.timeout | int | HTTP timeout | 10 | +| modules.splunk.url | str | Web URL | | +| modules.sqlite.database | str | The path to the sqlite database file | | +| modules.stdout.accept_dupes | bool | Whether to show duplicate events, default True | True | +| modules.stdout.event_fields | list | Which event fields to display | [] | +| modules.stdout.event_types | list | Which events to display, default all event types | [] | +| modules.stdout.format | str | Which text format to display, choices: text,json | text | +| modules.stdout.in_scope_only | bool | Whether to only show in-scope events | False | +| modules.subdomains.include_unresolved | bool | Include unresolved subdomains in output | False | +| modules.subdomains.output_file | str | Output to file | | +| modules.teams.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| modules.teams.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | +| modules.teams.retries | int | Number of times to retry sending the message before skipping the event | 10 | +| modules.teams.webhook_url | str | Teams webhook URL | | +| modules.txt.output_file | str | Output to file | | +| modules.web_parameters.include_count | bool | Include the count of each parameter in the output | False | +| modules.web_parameters.output_file | str | Output to file | | +| modules.web_report.css_theme_file | str | CSS theme URL for HTML output | https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/5.1.0/github-markdown.min.css | +| modules.web_report.output_file | str | Output to file | | +| modules.websocket.preserve_graph | bool | Preserve full chains of events in the graph (prevents orphans) | True | +| modules.websocket.token | str | Authorization Bearer token | | +| modules.websocket.url | str | Web URL | | +| modules.excavate.custom_yara_rules | str | Include custom Yara rules | | +| modules.excavate.retain_querystring | bool | Keep the querystring intact on emitted WEB_PARAMETERS | False | +| modules.excavate.yara_max_match_data | int | Sets the maximum amount of text that can extracted from a YARA regex | 2000 | +| modules.speculate.essential_only | bool | Only enable essential speculate features (no extra discovery) | False | +| modules.speculate.max_hosts | int | Max number of IP_RANGE hosts to convert into IP_ADDRESS events | 65536 | +| modules.speculate.ports | str | The set of ports to speculate on | 80,443 | + diff --git a/docs/scanning/events.md b/docs/scanning/events.md new file mode 100644 index 0000000000..77aae0531c --- /dev/null +++ b/docs/scanning/events.md @@ -0,0 +1,158 @@ +# Events + +An Event is a piece of data discovered by BBOT. Examples include `IP_ADDRESS`, `DNS_NAME`, `EMAIL_ADDRESS`, `URL`, etc. When you run a BBOT scan, events are constantly being exchanged between modules. They are also output to the console: + +```text +[DNS_NAME] www.evilcorp.com sslcert (distance-0, in-scope, resolved, subdomain, a-record) + ^^^^^^^^ ^^^^^^^^^^^^^^^^ ^^^^^^^ ^^^^^^^^^^ +event type event data source module tags +``` + +## Event Attributes + +Each BBOT event has the following attributes. Not all of these attributes are visible in the terminal output. However, they are always saved in `output.json` in the scan output folder. If you want to see them on the terminal, you can use `--json`. + +- `.type`: the event type (e.g. `DNS_NAME`, `IP_ADDRESS`, `OPEN_TCP_PORT`, etc.) +- `.id`: an identifier representing the event type + a SHA1 hash of its data (note: multiple events can have the same `.id`) +- `.uuid`: a universally unique identifier for the event (e.g. `DNS_NAME:6c96d512-090a-47f0-82e4-6860e46aac13`) +- `.scope_description`: describes the scope of the event (e.g. `in-scope`, `affiliate`, `distance-2`) +- `.data`: the actual discovered data (for some events like `DNS_NAME` or `IP_ADDRESS`, this is a string. For other more complex events like `HTTP_RESPONSE`, it's a dictionary) +- `.host`: the hostname or IP address (e.g. `evilcorp.com` or `1.2.3.4`) +- `.port`: the port number (e.g. `80`, `443`) +- `.netloc`: the network location, including both the hostname and port (e.g. `www.evilcorp.com:443`) +- `.resolved_hosts`: a list of all resolved hosts for the event (`A`, `AAAA`, and `CNAME` records) +- `.dns_children`: a dictionary of all DNS records for the event (typically only present on `DNS_NAME`) +- `.web_spider_distance`: a count of how many URL links have been followed in a row to get to this event +- `.scope_distance`: a count of how many hops it is from the main scope (0 == in-scope) +- `.scan`: the ID of the scan that produced the event +- `.timestamp`: the date/time when the event was discovered +- `.parent`: the ID of the parent event that led to the discovery of this event +- `.parent_uuid`: the universally unique identifier for the parent event +- `.tags`: a list of tags describing the event (e.g. `mx-record`, `http-title`, etc.) +- `.module`: the module that discovered the event +- `.module_sequence`: the recent sequence of modules that were executed to discover the event (including omitted events) +- `.discovery_context`: a description of the context in which the event was discovered +- `.discovery_path`: a list of every discovery context leading to this event +- `.parent_chain`: a list of every event UUID leading to the discovery of this event (corresponds exactly to `.discovery_path`) + +These attributes allow us to construct a visual graph of events (e.g. in [Neo4j](../output#neo4j)) and query/filter/grep them more easily. Here is what a typical event looks like in JSON format: + +```json +{ + "type": "DNS_NAME", + "id": "DNS_NAME:33bc005c2bdfea4d73e07db733bd11861cf6520e", + "uuid": "DNS_NAME:6c96d512-090a-47f0-82e4-6860e46aac13", + "scope_description": "in-scope", + "data": "link.evilcorp.com", + "host": "link.evilcorp.com", + "resolved_hosts": [ + "184.31.52.65", + "2600:1402:b800:d82::700", + "2600:1402:b800:d87::700", + "link.evilcorp.com.edgekey.net" + ], + "dns_children": { + "A": [ + "184.31.52.65" + ], + "AAAA": [ + "2600:1402:b800:d82::700", + "2600:1402:b800:d87::700" + ], + "CNAME": [ + "link.evilcorp.com.edgekey.net" + ] + }, + "web_spider_distance": 0, + "scope_distance": 0, + "scan": "SCAN:b6ef48bc036bc8d001595ae5061846a7e6beadb6", + "timestamp": "2024-10-18T15:40:13.716880+00:00", + "parent": "DNS_NAME:94c92b7eaed431b37ae2a757fec4e678cc3bd213", + "parent_uuid": "DNS_NAME:c737dffa-d4f0-4b6e-a72d-cc8c05bd892e", + "tags": [ + "subdomain", + "a-record", + "cdn-akamai", + "in-scope", + "cname-record", + "aaaa-record" + ], + "module": "speculate", + "module_sequence": "speculate->speculate", + "discovery_context": "speculated parent DNS_NAME: link.evilcorp.com", + "discovery_path": [ + "Scan insidious_frederick seeded with DNS_NAME: evilcorp.com", + "TXT record for evilcorp.com contains IP_ADDRESS: 149.72.247.52", + "PTR record for 149.72.247.52 contains DNS_NAME: o1.ptr2410.link.evilcorp.com", + "speculated parent DNS_NAME: ptr2410.link.evilcorp.com", + "speculated parent DNS_NAME: link.evilcorp.com" + ], + "parent_chain": [ + "DNS_NAME:34c657a3-0bfa-457e-9e6e-0f22f04b8da5", + "IP_ADDRESS:efc0fb3b-1b42-44da-916e-83db2360e10e", + "DNS_NAME:c737dffa-d4f0-4b6e-a72d-cc8c05bd892e", + "DNS_NAME_UNRESOLVED:722a3473-30c6-40f1-90aa-908d47105d5a", + "DNS_NAME:6c96d512-090a-47f0-82e4-6860e46aac13" + ] +} +``` + +For a more detailed description of BBOT events, see [Developer Documentation - Event](../../dev/event). + +Below is a full list of event types along with which modules produce/consume them. + +## List of Event Types + + +| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | +|---------------------|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | +| ASN | 0 | 1 | | asn | +| AZURE_TENANT | 1 | 0 | speculate | | +| CODE_REPOSITORY | 7 | 7 | docker_pull, git_clone, gitdumper, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, git, github_codesearch, github_org, gitlab, postman | +| DNS_NAME | 60 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, credshed, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, myssl, nmap_xml, oauth, otx, passivetotal, pgp, portscan, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, crt, digitorus, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnsresolve, fullhunt, hackertarget, hunterio, internetdb, leakix, myssl, ntlm, oauth, otx, passivetotal, rapiddns, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | +| DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | +| EMAIL_ADDRESS | 1 | 10 | emails | credshed, dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, securitytxt, skymem, sslcert | +| FILESYSTEM | 4 | 9 | extractous, jadx, trufflehog, unarchive | apkpure, docker_pull, filedownload, git_clone, gitdumper, github_workflows, jadx, postman_download, unarchive | +| FINDING | 2 | 28 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, smuggler, speculate, telerik, trufflehog, url_manipulation, wpscan | +| GEOLOCATION | 0 | 2 | | ip2location, ipstack | +| HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | +| HTTP_RESPONSE | 20 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, dotnetnuke, excavate, filedownload, gitlab, host_header, newsletters, nmap_xml, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, speculate, telerik, trufflehog, wappalyzer, wpscan | httpx | +| IP_ADDRESS | 9 | 4 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, nmap_xml, portscan, speculate | asset_inventory, dnsresolve, ipneighbor, speculate | +| IP_RANGE | 2 | 0 | portscan, speculate | | +| MOBILE_APP | 1 | 1 | apkpure | google_playstore | +| OPEN_TCP_PORT | 6 | 4 | asset_inventory, fingerprintx, httpx, nmap_xml, portfilter, sslcert | asset_inventory, internetdb, portscan, speculate | +| ORG_STUB | 4 | 1 | dockerhub, github_org, google_playstore, postman | speculate | +| PASSWORD | 0 | 2 | | credshed, dehashed | +| PROTOCOL | 1 | 1 | nmap_xml | fingerprintx | +| RAW_DNS_RECORD | 0 | 3 | | dnsbimi, dnsresolve, dnstlsrpt | +| RAW_TEXT | 2 | 1 | excavate, trufflehog | extractous | +| SOCIAL | 6 | 3 | dockerhub, github_org, gitlab, gowitness, postman, speculate | dockerhub, gitlab, social | +| STORAGE_BUCKET | 8 | 5 | baddns_direct, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | +| TECHNOLOGY | 4 | 8 | asset_inventory, gitlab, web_report, wpscan | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer, wpscan | +| URL | 21 | 2 | ajaxpro, asset_inventory, baddns_direct, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, portfilter, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | +| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | +| URL_UNVERIFIED | 7 | 18 | code_repository, filedownload, httpx, oauth, portfilter, social, speculate | azure_realm, bevigil, bucket_file_enum, dnsbimi, dnscaa, dnstlsrpt, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, robots, securitytxt, urlscan, wayback, wpscan | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | +| VHOST | 1 | 1 | web_report | vhost | +| VULNERABILITY | 2 | 13 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog, wpscan | +| WAF | 1 | 1 | asset_inventory | wafw00f | +| WEBSCREENSHOT | 0 | 1 | | gowitness | +| WEB_PARAMETER | 5 | 4 | hunt, paramminer_cookies, paramminer_getparams, paramminer_headers, web_parameters | excavate, paramminer_cookies, paramminer_getparams, paramminer_headers | + + +## Findings Vs. Vulnerabilities + +BBOT has a sharp distinction between Findings and Vulnerabilities: + +**VULNERABILITY** + +* There's a higher standard for what is allowed to be a vulnerability. They should be considered **confirmed** and **actionable** - no additional confirmation required +* They are always assigned a severity. The possible severities are: LOW, MEDIUM, HIGH, or CRITICAL + +**FINDING** + +* Findings can range anywhere from "slightly interesting behavior" to "likely, but unconfirmed vulnerability" +* Are often false positives + +By making this separation, actionable vulnerabilities can be identified quickly in the midst of a large scan diff --git a/docs/scanning/index.md b/docs/scanning/index.md new file mode 100644 index 0000000000..a25007ff64 --- /dev/null +++ b/docs/scanning/index.md @@ -0,0 +1,280 @@ +# Scanning Overview + +## Scan Names + +Every BBOT scan gets a random, mildly-entertaining name like **`demonic_jimmy`**. Output for that scan, including scan stats and any web screenshots, are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. + +If you don't want a random name, you can change it with `-n`. You can also change the location of BBOT's output with `-o`: + +```bash +# save everything to the folder "my_scan" in the current directory +bbot -t evilcorp.com -f subdomain-enum -m gowitness -n my_scan -o . +``` + +If you reuse a scan name, BBOT will automatically append to your previous output files. + +## Targets (`-t`) + +Targets declare what's in-scope, and seed a scan with initial data. BBOT accepts an unlimited number of targets. They can be any of the following: + +- `DNS_NAME` (`evilcorp.com`) +- `IP_ADDRESS` (`1.2.3.4`) +- `IP_RANGE` (`1.2.3.0/24`) +- `OPEN_TCP_PORT` (`192.168.0.1:80`) +- `URL` (`https://www.evilcorp.com`) +- `EMAIL_ADDRESS` (`bob@evilcorp.com`) +- `ORG_STUB` (`ORG:evilcorp`) +- `USER_STUB` (`USER:bobsmith`) +- `FILESYSTEM` (`FILESYSTEM:/tmp/asdf`) +- `MOBILE_APP` (`MOBILE_APP:https://play.google.com/store/apps/details?id=com.evilcorp.app`) + +Note that BBOT only discriminates down to the host level. This means, for example, if you specify a URL `https://www.evilcorp.com` as the target, the scan will be *seeded* with that URL, but the scope of the scan will be the entire host, `www.evilcorp.com`. Other ports/URLs on that same host may also be scanned. + +You can specify targets directly on the command line, load them from files, or both! For example: + +```bash +$ cat targets.txt +4.3.2.1 +10.0.0.2:80 +1.2.3.0/24 +evilcorp.com +evilcorp.co.uk +https://www.evilcorp.co.uk + +# load targets from a file and from the command-line +$ bbot -t targets.txt fsociety.com 5.6.7.0/24 -m nmap +``` + +On start, BBOT automatically converts Targets into [Events](events.md). + +## Modules (`-m`) + +To see a full list of modules and their descriptions, use `bbot -l` or see [List of Modules](../modules/list_of_modules.md). + +Modules are the part of BBOT that does the work -- port scanning, subdomain brute-forcing, API querying, etc. Modules consume [Events](events.md) (`IP_ADDRESS`, `DNS_NAME`, etc.) from each other, process the data in a useful way, then emit the results as new events. You can enable individual modules with `-m`. + +```bash +# Enable modules: nmap, sslcert, and httpx +bbot -t www.evilcorp.com -m nmap sslcert httpx +``` + +### Types of Modules + +Modules fall into three categories: + +- **Scan Modules**: + - These make up the majority of modules. Examples are `nmap`, `sslcert`, `httpx`, etc. Enable with `-m`. +- **Output Modules**: + - These output scan data to different formats/destinations. `human`, `json`, and `csv` are enabled by default. Enable others with `-om`. (See: [Output](output.md)) +- **Internal Modules**: + - These modules perform essential, common-sense tasks. They are always enabled, unless explicitly disabled via the config (e.g. `-c speculate=false`). + - `aggregate`: Summarizes results at the end of a scan + - `excavate`: Extracts useful data such as subdomains from webpages, etc. + - `speculate`: Intelligently infers new events, e.g. `OPEN_TCP_PORT` from `URL` or `IP_ADDRESS` from `IP_NETWORK`. + +For details in the inner workings of modules, see [Creating a Module](../contribution.md#creating-a-module). + +## Flags (`-f`) + +Flags are how BBOT categorizes its modules. In a way, you can think of them as groups. Flags let you enable a bunch of similar modules at the same time without having to specify them each individually. For example, `-f subdomain-enum` would enable every module with the `subdomain-enum` flag. + +```bash +# list all subdomain-enum modules +bbot -f subdomain-enum -l +``` + +### Filtering Modules + +Modules can be easily enabled/disabled based on their flags: + +- `-f` Enable these flags (e.g. `-f subdomain-enum`) +- `-rf` Require modules to have this flag (e.g. `-rf safe`) +- `-ef` Exclude these flags (e.g. `-ef slow`) +- `-em` Exclude these individual modules (e.g. `-em ipneighbor`) +- `-lf` List all available flags + +Every module is either `safe` or `aggressive`, and either `active` or `passive`. These can be useful for filtering. For example, if you wanted to enable all the `safe` modules, but exclude active ones, you could do: + +```bash +# Enable safe modules but exclude active ones +bbot -t evilcorp.com -f safe -ef active +``` + +This is equivalent to requiring the passive flag: + +```bash +# Enable safe modules but only if they're also passive +bbot -t evilcorp.com -f safe -rf passive +``` + +A single module can have multiple flags. For example, the `securitytrails` module is `passive`, `safe`, `subdomain-enum`. Below is a full list of flags and their associated modules. + +### List of Flags + + +| Flag | # Modules | Description | Modules | +|------------------|-------------|----------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 93 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, extractous, filedownload, fingerprintx, fullhunt, git, git_clone, gitdumper, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, jadx, leakix, myssl, newsletters, ntlm, oauth, otx, passivetotal, pgp, portfilter, portscan, postman, postman_download, rapiddns, robots, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, unarchive, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 69 | Never connects to target systems | affiliates, aggregate, anubisdb, apkpure, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, excavate, extractous, fullhunt, git_clone, gitdumper, github_codesearch, github_org, github_workflows, google_playstore, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, jadx, leakix, myssl, otx, passivetotal, pgp, portfilter, postman, postman_download, rapiddns, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, subdomainradar, trickest, trufflehog, unarchive, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 52 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, sslcert, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, virustotal, wayback, zoomeye | +| active | 46 | Makes active connections to target systems | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnsbrute, dnsbrute_mutations, dnscommonsrv, dotnetnuke, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gitlab, gowitness, host_header, httpx, hunt, iis_shortnames, newsletters, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, portscan, robots, securitytxt, smuggler, sslcert, telerik, url_manipulation, vhost, wafw00f, wappalyzer, wpscan | +| aggressive | 20 | Generates a large amount of network traffic | bypass403, dastardly, dnsbrute, dnsbrute_mutations, dotnetnuke, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f, wpscan | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, securitytxt, sslcert, wappalyzer | +| cloud-enum | 16 | Enumerates cloud resources | azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, dnsbimi, dnstlsrpt, httpx, oauth, securitytxt | +| code-enum | 16 | Find public code repositories and search them for secrets etc. | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, gitdumper, github_codesearch, github_org, github_workflows, gitlab, google_playstore, jadx, postman, postman_download, trufflehog | +| slow | 12 | May take a long time to complete | bucket_digitalocean, dastardly, dnsbrute_mutations, docker_pull, fingerprintx, git_clone, gitdumper, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| web-thorough | 12 | More advanced web scanning functionality | ajaxpro, bucket_digitalocean, bypass403, dastardly, dotnetnuke, ffuf_shortnames, generic_ssrf, host_header, hunt, smuggler, telerik, url_manipulation | +| affiliates | 9 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, trickest, viewdns, zoomeye | +| email-enum | 9 | Enumerates email addresses | dehashed, dnscaa, dnstlsrpt, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| baddns | 3 | Runs all modules from the DNS auditing tool BadDNS | baddns, baddns_direct, baddns_zone | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| portscan | 2 | Discovers open ports | internetdb, portscan | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | baddns | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | + + +## Dependencies + +BBOT modules have external dependencies ranging from OS packages (`openssl`) to binaries (`nmap`) to Python libraries (`wappalyzer`). When a module is enabled, installation of its dependencies happens at runtime with [Ansible](https://github.com/ansible/ansible). BBOT provides several command-line flags to control how dependencies are installed. + +- `--no-deps` - Don't install module dependencies +- `--force-deps` - Force install all module dependencies +- `--retry-deps` - Try again to install failed module dependencies +- `--ignore-failed-deps` - Run modules even if they have failed dependencies +- `--install-all-deps` - Install dependencies for all modules (useful if you are provisioning a pentest system and want to install everything ahead of time) + +For details on how Ansible playbooks are attached to BBOT modules, see [How to Write a Module](../contribution.md#module-dependencies). + +## Scope + +For pentesters and bug bounty hunters, staying in scope is extremely important. BBOT takes this seriously, meaning that active modules (e.g. `nuclei`) will only touch in-scope resources. + +By default, scope is whatever you specify with `-t`. This includes child subdomains. For example, if you specify `-t evilcorp.com`, all its subdomains (`www.evilcorp.com`, `mail.evilcorp.com`, etc.) also become in-scope. + +### Scope Distance + +Since BBOT is recursive, it would quickly resort to scanning the entire internet without some kind of restraining mechanism. To solve this problem, every [event](events.md) discovered by BBOT is assigned a **Scope Distance**. Scope distance represents how far out from the main scope that data was discovered. + +For example, if your target is `evilcorp.com`, `www.evilcorp.com` would have a scope distance of `0` (i.e. in-scope). If BBOT discovers that `www.evilcorp.com` resolves to `1.2.3.4`, `1.2.3.4` is one hop away, which means it would have a scope distance of `1`. If `1.2.3.4` has a PTR record that points to `ecorp.blob.core.windows.net`, `ecorp.blob.core.windows.net` is two hops away, so its scope distance is `2`. + +Scope distance continues to increase the further out you get. Most modules (e.g. `nuclei` and `nmap`) only consume in-scope events. Certain other passive modules such as `asn` accept out to distance `1`. By default, DNS resolution happens out to a distance of `2`. Upon its discovery, any [event](events.md) that's determined to be in-scope (e.g. `www.evilcorp.com`) immediately becomes distance `0`, and the cycle starts over. + +#### Displaying Out-of-scope Events + +By default, BBOT only displays in-scope events (with a few exceptions such as `STORAGE_BUCKET`s). If you want to see more, you must increase the [config](configuration.md) value of `scope.report_distance`: + +```bash +# display out-of-scope events up to one hop away from the main scope +bbot -t evilcorp.com -f subdomain-enum -c scope.report_distance=1 +``` + +### Strict Scope + +If you want to scan **_only_** that specific target hostname and none of its children, you can specify `--strict-scope`. + +Note that `--strict-scope` only applies to targets and whitelists, but not blacklists. This means that if you put `internal.evilcorp.com` in your blacklist, you can be sure none of its subdomains will be scanned, even when using `--strict-scope`. + +### Whitelists and Blacklists + +BBOT allows precise control over scope with whitelists and blacklists. These both use the same syntax as `--target`, meaning they accept the same event types, and you can specify an unlimited number of them, via a file, the CLI, or both. + +#### Whitelists + +`--whitelist` enables you to override what's in scope. For example, if you want to run nuclei against `evilcorp.com`, but stay only inside their corporate IP range of `1.2.3.0/24`, you can accomplish this like so: + +```bash +# Seed scan with evilcorp.com, but restrict scope to 1.2.3.0/24 +bbot -t evilcorp.com --whitelist 1.2.3.0/24 -f subdomain-enum -m nmap nuclei --allow-deadly +``` + +#### Blacklists + +`--blacklist` takes ultimate precedence. Anything in the blacklist is completely excluded from the scan, even if it's in the whitelist. + +```bash +# Scan evilcorp.com, but exclude internal.evilcorp.com and its children +bbot -t evilcorp.com --blacklist internal.evilcorp.com -f subdomain-enum -m nmap nuclei --allow-deadly +``` + +#### Blacklist by Regex + +Blacklists also accept regex patterns. These regexes are are checked against the full URL, including the host and path. + +To specify a regex, prefix the pattern with `RE:`. For example, to exclude all events containing "signout", you could do: + +```bash +bbot -t evilcorp.com --blacklist "RE:signout" +``` + +Note that this would blacklist both of the following events: + +- `[URL] http://evilcorp.com/signout.aspx` +- `[DNS_NAME] signout.evilcorp.com` + +If you only want to blacklist the URL, you could narrow the regex like so: + +```bash +bbot -t evilcorp.com --blacklist 'RE:signout\.aspx$' +``` + +Similar to targets and whitelists, blacklists can be specified in your preset. The `spider` preset makes use of this to prevent the spider from following logout links: + +```yaml title="spider.yml" +description: Recursive web spider + +modules: + - httpx + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + +config: + web: + # how many links to follow in a row + spider_distance: 2 + # don't follow links whose directory depth is higher than 4 + spider_depth: 4 + # maximum number of links to follow per page + spider_links_per_page: 25 +``` + +## DNS Wildcards + +BBOT has robust wildcard detection built-in. It can reliably detect wildcard domains, and will tag them accordingly: + +```text +[DNS_NAME] github.io TARGET (a-record, a-wildcard-domain, aaaa-wildcard-domain, wildcard-domain) + ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^ +``` + +Wildcard hosts are collapsed into a single host beginning with `_wildcard`: + +```text +[DNS_NAME] _wildcard.github.io TARGET (a-record, a-wildcard, a-wildcard-domain, aaaa-record, aaaa-wildcard, aaaa-wildcard-domain, wildcard, wildcard-domain) + ^^^^^^^^^ +``` + +If you don't want this, you can disable wildcard detection on a domain-to-domain basis in the [config](configuration.md): + +```yaml title="~/.bbot/config/bbot.yml" +dns: + wildcard_ignore: + - evilcorp.com + - evilcorp.co.uk +``` + +There are certain edge cases (such as with dynamic DNS rules) where BBOT's wildcard detection fails. In these cases, you can try increasing the number of wildcard checks in the config: + +```yaml title="~/.bbot/config/bbot.yml" +# default == 10 +dns: + wildcard_tests: 20 +``` + +If that doesn't work you can consider [blacklisting](#whitelists-and-blacklists) the offending domain. diff --git a/docs/scanning/output.md b/docs/scanning/output.md new file mode 100644 index 0000000000..b46eb40c86 --- /dev/null +++ b/docs/scanning/output.md @@ -0,0 +1,352 @@ +# Output + +By default, BBOT saves its output in TXT, JSON, and CSV formats. The filenames are logged at the end of each scan: +![bbot output](https://github.com/blacklanternsecurity/bbot/assets/20261699/bb3da441-2682-408f-b955-19b268823b82) + +Every BBOT scan gets a unique and mildly-entertaining name like **`demonic_jimmy`**. Output for that scan, including scan stats and any web screenshots, etc., are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. You can change the location of BBOT's output with `--output`, and you can also pick a custom scan name with `--name`. + +If you reuse a scan name, it will append to its original output files and leverage the previous. + +## Output Modules + +Multiple simultaneous output formats are possible because of **output modules**. Output modules are similar to normal modules except they are enabled with `-om`. + +### STDOUT + +The `stdout` output module is what you see when you execute BBOT in the terminal. By default it looks the same as the [`txt`](#txt) module, but it has options you can customize. You can filter by event type, choose the data format (`text`, `json`), and which fields you want to see: + + +| Config Option | Type | Description | Default | +|------------------------------|--------|--------------------------------------------------|-----------| +| modules.stdout.accept_dupes | bool | Whether to show duplicate events, default True | True | +| modules.stdout.event_fields | list | Which event fields to display | [] | +| modules.stdout.event_types | list | Which events to display, default all event types | [] | +| modules.stdout.format | str | Which text format to display, choices: text,json | text | +| modules.stdout.in_scope_only | bool | Whether to only show in-scope events | False | + + +### TXT + +`txt` output is tab-delimited, so it's easy to grep: + +```bash +# grep out only the DNS_NAMEs +cat ~/.bbot/scans/extreme_johnny/output.txt | grep '[DNS_NAME]' | cut -f2 +evilcorp.com +www.evilcorp.com +mail.evilcorp.com +``` + +### CSV + +The `csv` output module produces a CSV like this: + +| Event type | Event data | IP Address | Source Module | Scope Distance | Event Tags | +| ---------- | ----------------------- | ---------- | ------------- | -------------- | -------------------------------------------------------------------------------------------------------- | +| DNS_NAME | evilcorp.com | 1.2.3.4 | TARGET | 0 | a-record,cdn-github,distance-0,domain,in-scope,mx-record,ns-record,resolved,soa-record,target,txt-record | +| DNS_NAME | www.evilcorp.com | 2.3.4.5 | certspotter | 0 | a-record,aaaa-record,cdn-github,cname-record,distance-0,in-scope,resolved,subdomain | +| URL | http://www.evilcorp.com | 2.3.4.5 | httpx | 0 | a-record,aaaa-record,cdn-github,cname-record,distance-0,in-scope,resolved,subdomain | +| DNS_NAME | admin.evilcorp.com | 5.6.7.8 | otx | 0 | a-record,aaaa-record,cloud-azure,cname-record,distance-0,in-scope,resolved,subdomain | + +### JSON + +If you manually enable the `json` output module, it will go to stdout: + +```bash +bbot -t evilcorp.com -om json | jq +``` + +You will then see [events](events.md) like this: + +```json +{ + "type": "IP_ADDRESS", + "id": "IP_ADDRESS:13cd09c2adf0860a582240229cd7ad1dccdb5eb1", + "data": "1.2.3.4", + "scope_distance": 1, + "scan": "SCAN:64c0e076516ae7aa6502fd99489693d0d5ec26cc", + "timestamp": 1688518967.740472, + "resolved_hosts": ["1.2.3.4"], + "parent": "DNS_NAME:2da045542abbf86723f22383d04eb453e573723c", + "tags": ["distance-1", "ipv4", "internal"], + "module": "A", + "module_sequence": "A" +} +``` + +You can filter on the JSON output with `jq`: + +```bash +# pull out only the .data attribute of every DNS_NAME +$ jq -r 'select(.type=="DNS_NAME") | .data' ~/.bbot/scans/extreme_johnny/output.json +evilcorp.com +www.evilcorp.com +mail.evilcorp.com +``` + +### Discord / Slack / Teams + +![bbot-discord](https://github.com/blacklanternsecurity/bbot/assets/20261699/6d88045c-8eac-43b6-8de9-c621ecf60c2d) + +BBOT supports output via webhooks to `discord`, `slack`, and `teams`. To use them, you must specify a webhook URL either in the config: + +```yaml title="discord_preset.yml" +config: + modules: + discord: + webhook_url: https://discord.com/api/webhooks/1234/deadbeef +``` + +...or on the command line: +```bash +bbot -t evilcorp.com -om discord -c modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef +``` + +By default, only `VULNERABILITY` and `FINDING` events are sent, but this can be customized by setting `event_types` in the config like so: + +```yaml title="discord_preset.yml" +config: + modules: + discord: + event_types: + - VULNERABILITY + - FINDING + - STORAGE_BUCKET +``` + +...or on the command line: +```bash +bbot -t evilcorp.com -om discord -c modules.discord.event_types=["STORAGE_BUCKET","FINDING","VULNERABILITY"] +``` + +You can also filter on the severity of `VULNERABILITY` events by setting `min_severity`: + + +```yaml title="discord_preset.yml" +config: + modules: + discord: + min_severity: HIGH +``` + +### HTTP + +The `http` output module sends [events](events.md) in JSON format to a desired HTTP endpoint. + +```bash +# POST scan results to localhost +bbot -t evilcorp.com -om http -c modules.http.url=http://localhost:8000 +``` + +You can customize the HTTP method if needed. Authentication is also supported: + +```yaml title="http_preset.yml" +config: + modules: + http: + url: https://localhost:8000 + method: PUT + # Authorization: Bearer + bearer: + # OR + username: bob + password: P@ssw0rd +``` + +### Elasticsearch + +When outputting to Elastic, use the `http` output module with the following settings (replace `` with your desired index, e.g. `bbot`): + +```bash +# send scan results directly to elasticsearch +bbot -t evilcorp.com -om http -c \ + modules.http.url=http://localhost:8000//_doc \ + modules.http.siem_friendly=true \ + modules.http.username=elastic \ + modules.http.password=changeme +``` + +Alternatively, via a preset: + +```yaml title="elastic_preset.yml" +config: + modules: + http: + url: http://localhost:8000//_doc + siem_friendly: true + username: elastic + password: changeme +``` + +### Splunk + +The `splunk` output module sends [events](events.md) in JSON format to a desired splunk instance via [HEC](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector). + +You can customize this output with the following config options: + +```yaml title="splunk_preset.yml" +config: + modules: + splunk: + # The full URL with the URI `/services/collector/event` + url: https://localhost:8088/services/collector/event + # Generated from splunk webui + hectoken: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + # Defaults to `main` if not set + index: my-specific-index + # Defaults to `bbot` if not set + source: /my/source.json +``` + +### Asset Inventory + +The `asset_inventory` module produces a CSV like this: + +| Host | Provider | IP(s) | Status | Open Ports | +| ------------------ | ----------- | ------- | ------ | ---------- | +| evilcorp.com | cdn-github | 1.2.3.4 | Active | 80,443 | +| www.evilcorp.com | cdn-github | 2.3.4.5 | Active | 22,80,443 | +| admin.evilcorp.com | cloud-azure | 5.6.7.8 | N/A | | + +### SQLite + +The `sqlite` output module produces a SQLite database containing all events, scans, and targets. By default, it will be saved in the scan directory as `output.sqlite`. + +```bash +# specifying a custom database path +bbot -t evilcorp.com -om sqlite -c modules.sqlite.database=/tmp/bbot.sqlite +``` + +### Postgres + +The `postgres` output module allows you to ingest events, scans, and targets into a Postgres database. By default, it will connect to the server on `localhost` with a username of `postgres` and password of `bbotislife`. You can change this behavior in the config. + +```bash +# specifying an alternate database +bbot -t evilcorp.com -om postgres -c modules.postgres.database=custom_bbot_db +``` + +```yaml title="postgres_preset.yml" +config: + modules: + postgres: + host: psq.fsociety.local + database: custom_bbot_db + port: 5432 + username: postgres + password: bbotislife +``` + +### MySQL + +The `mysql` output module allows you to ingest events, scans, and targets into a MySQL database. By default, it will connect to the server on `localhost` with a username of `root` and password of `bbotislife`. You can change this behavior in the config. + +```bash +# specifying an alternate database +bbot -t evilcorp.com -om mysql -c modules.mysql.database=custom_bbot_db +``` + +```yaml title="mysql_preset.yml" +config: + modules: + mysql: + host: mysql.fsociety.local + database: custom_bbot_db + port: 3306 + username: root + password: bbotislife +``` + +### Subdomains + +The `subdomains` output module produces simple text file containing only in-scope and resolved subdomains: + +```text title="subdomains.txt" +evilcorp.com +www.evilcorp.com +mail.evilcorp.com +portal.evilcorp.com +``` + +## Neo4j + +Neo4j is the funnest (and prettiest) way to view and interact with BBOT data. + +![neo4j](https://github.com/blacklanternsecurity/bbot/assets/20261699/0192d548-5c60-42b6-9a1e-32ba7b921cdf) + +- You can get Neo4j up and running with a single docker command: + +```bash +# start Neo4j in the background with docker +docker run -d -p 7687:7687 -p 7474:7474 -v "$(pwd)/neo4j/:/data/" -e NEO4J_AUTH=neo4j/bbotislife neo4j +``` + +- After that, run bbot with `-om neo4j` + +```bash +bbot -f subdomain-enum -t evilcorp.com -om neo4j +``` + +- Log in at [http://localhost:7474](http://localhost:7474) with `neo4j` / `bbotislife` + +### Cypher Queries and Tips + +Neo4j uses the Cypher Query Language for its graph query language. Cypher uses common clauses to craft relational queries and present the desired data in multiple formats. + +Cypher queries can be broken down into three required pieces; selection, filter, and presentation. The selection piece identifies what data that will be searched against - 90% of the time the "MATCH" clause will be enough but there are means to read from csv or json data files. In all of these examples the "MATCH" clause will be used. The filter piece helps to focus in on the required data and used the "WHERE" clause to accomplish this effort (most basic operators can be used). Finally, the presentation section identifies how the data should be presented back to the querier. While neo4j is a graph database, it can be used in a traditional table view. + +A simple query to grab every URL event with ".com" in the BBOT data field would look like this: +`MATCH (u:URL) WHERE u.data contains ".com" RETURN u` + +In this query the following can be identified: +- Within the MATCH statement "u" is a variable and can be any value needed by the user while the "URL" label is a direct relationship to the BBOT event type. +- The WHERE statement allows the query to filter on any of the BBOT event properties like data, tag, or even the label itself. +- The RETURN statement is a general presentation of the whole URL event but this can be narrowed down to present any of the specific properties of the BBOT event (`RETURN u.data, u.tags`). + +The following are a few recommended queries to get started with: + +```cypher +// Get all "in-scope" DNS Nodes and return just data and tags properties +MATCH (n:DNS_NAME) +WHERE "in-scope" IN n.tags +RETURN n.data, n.tags +``` + +```cypher +// Get the count of labels/BBOT events in the Neo4j Database +MATCH (n) +RETURN labels(n), count(n) +``` + +```cypher +// Get a graph of open ports associated with each domain +MATCH z = ((n:DNS_NAME) --> (p:OPEN_TCP_PORT)) +RETURN z +``` + +```cypher +// Get all domains and IP addresses with open TCP ports +MATCH (n) --> (p:OPEN_TCP_PORT) +WHERE "in-scope" in n.tags and (n:DNS_NAME or n:IP_ADDRESS) +WITH *, TAIL(SPLIT(p.data, ':')) AS port +RETURN n.data, collect(distinct port) +``` + +```cypher +// Clear the database +MATCH (n) DETACH DELETE n +``` + +This is not an exhaustive list of clauses, filters, or other means to use cypher and should be considered a starting point. To build more advanced queries consider reading Neo4j's Cypher [documentation](https://neo4j.com/docs/cypher-manual/current/introduction/). + +Additional note: these sample queries are dependent on the existence of the data in the target neo4j database. + +### Web_parameters + +The `web_parameters` output module will utilize BBOT web parameter extraction capabilities, and output the resulting parameters to a file (web_parameters.txt, by default). Web parameter extraction is disabled by default, but will automatically be enabled when a module is included that consumes WEB_PARAMETER events (including the `web_parameters` output module itself). + +This can be useful for those who want to discover new common web parameters or those which may be associated with a specific target or organization. This could be very useful for further parameter bruteforcing, or even fed back into bbot via the paramminer modules. For example: + +```bash +bbot -t evilcorp.com -m paramminer_getparams -c modules.paramminer_getparams.wordlist=/path/to/your/new/wordlist.txt +``` \ No newline at end of file diff --git a/docs/scanning/presets.md b/docs/scanning/presets.md new file mode 100644 index 0000000000..7fa8f8c93b --- /dev/null +++ b/docs/scanning/presets.md @@ -0,0 +1,199 @@ +# Presets + +Once you start customizing BBOT, your commands can start to get really long. Presets let you put all your scan settings in a single file: + +```bash +bbot -p ./my_preset.yml +``` + +A Preset is a YAML file that can include scan targets, modules, and config options like API keys. + +A typical preset looks like this: + + +```yaml title="subdomain-enum.yml" +description: Enumerate subdomains via APIs, brute-force + +flags: + - subdomain-enum + +output_modules: + - subdomains + +``` + + +## How to use Presets (`-p`) + +BBOT has a ready-made collection of presets for common tasks like subdomain enumeration and web spidering. They live in `~/.bbot/presets`. + +To list them, you can do: + +```bash +# list available presets +bbot -lp +``` + +Enable them with `-p`: + +```bash +# do a subdomain enumeration +bbot -t evilcorp.com -p subdomain-enum + +# multiple presets - subdomain enumeration + web spider +bbot -t evilcorp.com -p subdomain-enum spider + +# start with a preset but only enable modules that have the 'passive' flag +bbot -t evilcorp.com -p subdomain-enum -rf passive + +# preset + manual config override +bbot -t www.evilcorp.com -p spider -c web.spider_distance=10 +``` + +You can build on the default presets, or create your own. Here's an example of a custom preset that builds on `subdomain-enum`: + +```yaml title="my_subdomains.yml" +description: Do a subdomain enumeration + basic web scan + nuclei + +target: + - evilcorp.com + +include: + # include these default presets + - subdomain-enum + - web-basic + +modules: + # enable nuclei in addition to the other modules + - nuclei + +config: + # global config options + web: + http_proxy: http://127.0.0.1:8080 + # module config options + modules: + # api keys + securitytrails: + api_key: 21a270d5f59c9b05813a72bb41707266 + virustotal: + # multiple API keys are allowed + api_key: + - 4f41243847da693a4f356c0486114bc6 + - 5bc6ed268ab6488270e496d3183a1a27 +``` + +To execute your custom preset, you do: + +```bash +bbot -p ./my_subdomains.yml +``` + +## Preset Load Order + +When you enable multiple presets, the order matters. In the case of a conflict, the last preset will always win. This means, for example, if you have a custom preset called `my_spider` that sets `web.spider_distance` to 1: + +```yaml title="my_spider.yml" +config: + web: + spider_distance: 1 +``` + +...and you enable it alongside the default `spider` preset in this order: + +```bash +bbot -t evilcorp.com -p ./my_spider.yml spider +``` + +...the value of `web.spider_distance` will be overridden by `spider`. To ensure this doesn't happen, you would want to switch the order of the presets: + +```bash +bbot -t evilcorp.com -p spider ./my_spider.yml +``` + +## Validating Presets + +To make sure BBOT is configured the way you expect, you can always check the `--current-preset` to show the final version of the config that will be used when BBOT executes: + +```bash +# verify the preset is what you want +bbot -p ./mypreset.yml --current-preset +``` + +## Advanced Usage + +BBOT Presets support advanced features like environment variable substitution and custom conditions. + +### Custom Modules + +If you want to use a custom BBOT `.py` module, you can either move it into `bbot/modules` where BBOT is installed, or add its parent folder to `module_dirs` like so: + +```yaml title="custom_modules.yml" +# load extra BBOT modules from this locaation +module_dirs: + - /home/user/custom_modules +``` + +### Environment Variables + +You can insert environment variables into your preset like this: `${env:}`: + +```yaml title="my_nuclei.yml" +description: Do a nuclei scan + +target: + - evilcorp.com + +modules: + - nuclei + +config: + modules: + nuclei: + # allow the nuclei templates to be specified at runtime via an environment variable + tags: ${env:NUCLEI_TAGS} +``` + +```bash +NUCLEI_TAGS=apache,nginx bbot -p ./my_nuclei.yml +``` + +### Conditions + +Sometimes, you might need to add custom logic to a preset. BBOT supports this via `conditions`. The `conditions` attribute allows you to specify a list of custom conditions that will be evaluated before the scan starts. This is useful for performing last-minute sanity checks, or changing the behavior of the scan based on custom criteria. + +```yaml title="my_preset.yml" +description: Abort if nuclei templates aren't specified + +modules: + - nuclei + +conditions: + - | + {% if not config.modules.nuclei.templates %} + {{ abort("Don't forget to set your templates!") }} + {% endif %} +``` + +```yaml title="my_preset.yml" +description: Enable ffuf but only when the web spider isn't also enabled + +modules: + - ffuf + +conditions: + - | + {% if config.web.spider_distance > 0 and config.web.spider_depth > 0 %} + {{ warn("Disabling ffuf because the web spider is enabled") }} + {{ preset.exclude_module("ffuf") }} + {% endif %} +``` + +Conditions use [Jinja](https://palletsprojects.com/p/jinja/), which means they can contain Python code. They run inside a sandboxed environment which has access to the following variables: + +- `preset` - the current preset object +- `config` - the current config (an alias for `preset.config`) +- `warn(message)` - display a custom warning message to the user +- `abort(message)` - abort the scan with an optional message + +If you aren't able to accomplish what you want with conditions, or if you need access to a new variable/function, please let us know on [Github](https://github.com/blacklanternsecurity/bbot/issues/new/choose). diff --git a/docs/scanning/presets_list.md b/docs/scanning/presets_list.md new file mode 100644 index 0000000000..f28f8feb24 --- /dev/null +++ b/docs/scanning/presets_list.md @@ -0,0 +1,977 @@ +Below is a list of every default BBOT preset, including its YAML. + + +## **baddns-intense** + +Run all baddns modules and submodules. + +??? note "`baddns-intense.yml`" + ```yaml title="~/.bbot/presets/baddns-intense.yml" + description: Run all baddns modules and submodules. + + + modules: + - baddns + - baddns_zone + - baddns_direct + + config: + modules: + baddns: + enabled_submodules: [CNAME,references,MX,NS,TXT] + ``` + + + +Modules: [4]("`baddns_direct`, `baddns_zone`, `baddns`, `httpx`") + +## **cloud-enum** + +Enumerate cloud resources such as storage buckets, etc. + +??? note "`cloud-enum.yml`" + ```yaml title="~/.bbot/presets/cloud-enum.yml" + description: Enumerate cloud resources such as storage buckets, etc. + + include: + - subdomain-enum + + flags: + - cloud-enum + ``` + + + +Modules: [59]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") + +## **code-enum** + +Enumerate Git repositories, Docker images, etc. + +??? note "`code-enum.yml`" + ```yaml title="~/.bbot/presets/code-enum.yml" + description: Enumerate Git repositories, Docker images, etc. + + flags: + - code-enum + ``` + + + +Modules: [18]("`apkpure`, `code_repository`, `docker_pull`, `dockerhub`, `git_clone`, `git`, `gitdumper`, `github_codesearch`, `github_org`, `github_workflows`, `gitlab`, `google_playstore`, `httpx`, `jadx`, `postman_download`, `postman`, `social`, `trufflehog`") + +## **dirbust-heavy** + +Recursive web directory brute-force (aggressive) + +??? note "`dirbust-heavy.yml`" + ```yaml title="~/.bbot/presets/web/dirbust-heavy.yml" + description: Recursive web directory brute-force (aggressive) + + include: + - spider + + flags: + - iis-shortnames + + modules: + - ffuf + - wayback + + config: + modules: + iis_shortnames: + # we exploit the shortnames vulnerability to produce URL_HINTs which are consumed by ffuf_shortnames + detect_only: False + ffuf: + depth: 3 + lines: 5000 + extensions: + - php + - asp + - aspx + - ashx + - asmx + - jsp + - jspx + - cfm + - zip + - conf + - config + - xml + - json + - yml + - yaml + # emit URLs from wayback + wayback: + urls: True + ``` + +Category: web + +Modules: [5]("`ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`, `wayback`") + +## **dirbust-heavy** + +Recursive web directory brute-force (aggressive) + +??? note "`dirbust-heavy.yml`" + ```yaml title="~/.bbot/presets/dirbust-heavy.yml" + description: Recursive web directory brute-force (aggressive) + + include: + - spider + + flags: + - iis-shortnames + + modules: + - ffuf + - wayback + + config: + modules: + iis_shortnames: + # we exploit the shortnames vulnerability to produce URL_HINTs which are consumed by ffuf_shortnames + detect_only: False + ffuf: + depth: 3 + lines: 5000 + extensions: + - php + - asp + - aspx + - ashx + - asmx + - jsp + - jspx + - cfm + - zip + - conf + - config + - xml + - json + - yml + - yaml + # emit URLs from wayback + wayback: + urls: True + ``` + + + +Modules: [5]("`ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`, `wayback`") + +## **dirbust-light** + +Basic web directory brute-force (surface-level directories only) + +??? note "`dirbust-light.yml`" + ```yaml title="~/.bbot/presets/web/dirbust-light.yml" + description: Basic web directory brute-force (surface-level directories only) + + include: + - iis-shortnames + + modules: + - ffuf + + config: + modules: + ffuf: + # wordlist size = 1000 + lines: 1000 + ``` + +Category: web + +Modules: [4]("`ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`") + +## **dirbust-light** + +Basic web directory brute-force (surface-level directories only) + +??? note "`dirbust-light.yml`" + ```yaml title="~/.bbot/presets/dirbust-light.yml" + description: Basic web directory brute-force (surface-level directories only) + + include: + - iis-shortnames + + modules: + - ffuf + + config: + modules: + ffuf: + # wordlist size = 1000 + lines: 1000 + ``` + + + +Modules: [4]("`ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`") + +## **dotnet-audit** + +Comprehensive scan for all IIS/.NET specific modules and module settings + +??? note "`dotnet-audit.yml`" + ```yaml title="~/.bbot/presets/web/dotnet-audit.yml" + description: Comprehensive scan for all IIS/.NET specific modules and module settings + + + include: + - iis-shortnames + + modules: + - httpx + - badsecrets + - ffuf_shortnames + - ffuf + - telerik + - ajaxpro + - dotnetnuke + + config: + modules: + ffuf: + extensions: asp,aspx,ashx,asmx,ascx + extensions_ignore_case: True + ffuf_shortnames: + find_subwords: True + telerik: + exploit_RAU_crypto: True + include_subdirs: True # Run against every directory, not the default first received URL per-host + ``` + +Category: web + +Modules: [8]("`ajaxpro`, `badsecrets`, `dotnetnuke`, `ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`, `telerik`") + +## **dotnet-audit** + +Comprehensive scan for all IIS/.NET specific modules and module settings + +??? note "`dotnet-audit.yml`" + ```yaml title="~/.bbot/presets/dotnet-audit.yml" + description: Comprehensive scan for all IIS/.NET specific modules and module settings + + + include: + - iis-shortnames + + modules: + - httpx + - badsecrets + - ffuf_shortnames + - ffuf + - telerik + - ajaxpro + - dotnetnuke + + config: + modules: + ffuf: + extensions: asp,aspx,ashx,asmx,ascx + extensions_ignore_case: True + ffuf_shortnames: + find_subwords: True + telerik: + exploit_RAU_crypto: True + include_subdirs: True # Run against every directory, not the default first received URL per-host + ``` + + + +Modules: [8]("`ajaxpro`, `badsecrets`, `dotnetnuke`, `ffuf_shortnames`, `ffuf`, `httpx`, `iis_shortnames`, `telerik`") + +## **email-enum** + +Enumerate email addresses from APIs, web crawling, etc. + +??? note "`email-enum.yml`" + ```yaml title="~/.bbot/presets/email-enum.yml" + description: Enumerate email addresses from APIs, web crawling, etc. + + flags: + - email-enum + + output_modules: + - emails + ``` + + + +Modules: [8]("`dehashed`, `dnscaa`, `dnstlsrpt`, `emailformat`, `hunterio`, `pgp`, `skymem`, `sslcert`") + +## **fast** + +Scan only the provided targets as fast as possible - no extra discovery + +??? note "`fast.yml`" + ```yaml title="~/.bbot/presets/fast.yml" + description: Scan only the provided targets as fast as possible - no extra discovery + + exclude_modules: + - excavate + + config: + # only scan the exact targets specified + scope: + strict: true + # speed up dns resolution by doing A/AAAA only - not MX/NS/SRV/etc + dns: + minimal: true + # essential speculation only + modules: + speculate: + essential_only: true + ``` + + + +Modules: [0]("") + +## **iis-shortnames** + +Recursively enumerate IIS shortnames + +??? note "`iis-shortnames.yml`" + ```yaml title="~/.bbot/presets/web/iis-shortnames.yml" + description: Recursively enumerate IIS shortnames + + flags: + - iis-shortnames + + config: + modules: + iis_shortnames: + # exploit the vulnerability + detect_only: false + ``` + +Category: web + +Modules: [3]("`ffuf_shortnames`, `httpx`, `iis_shortnames`") + +## **iis-shortnames** + +Recursively enumerate IIS shortnames + +??? note "`iis-shortnames.yml`" + ```yaml title="~/.bbot/presets/iis-shortnames.yml" + description: Recursively enumerate IIS shortnames + + flags: + - iis-shortnames + + config: + modules: + iis_shortnames: + # exploit the vulnerability + detect_only: false + ``` + + + +Modules: [3]("`ffuf_shortnames`, `httpx`, `iis_shortnames`") + +## **kitchen-sink** + +Everything everywhere all at once + +??? note "`kitchen-sink.yml`" + ```yaml title="~/.bbot/presets/kitchen-sink.yml" + description: Everything everywhere all at once + + include: + - subdomain-enum + - cloud-enum + - code-enum + - email-enum + - spider + - web-basic + - paramminer + - dirbust-light + - web-screenshots + - baddns-intense + + config: + modules: + baddns: + enable_references: True + ``` + + + +Modules: [87]("`anubisdb`, `apkpure`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `badsecrets`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `code_repository`, `crt`, `dehashed`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `docker_pull`, `dockerhub`, `emailformat`, `ffuf_shortnames`, `ffuf`, `filedownload`, `fullhunt`, `git_clone`, `git`, `gitdumper`, `github_codesearch`, `github_org`, `github_workflows`, `gitlab`, `google_playstore`, `gowitness`, `hackertarget`, `httpx`, `hunterio`, `iis_shortnames`, `internetdb`, `ipneighbor`, `jadx`, `leakix`, `myssl`, `ntlm`, `oauth`, `otx`, `paramminer_cookies`, `paramminer_getparams`, `paramminer_headers`, `passivetotal`, `pgp`, `postman_download`, `postman`, `rapiddns`, `robots`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `skymem`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `trufflehog`, `urlscan`, `virustotal`, `wappalyzer`, `wayback`, `zoomeye`") + +## **nuclei** + +Run nuclei scans against all discovered targets + +??? note "`nuclei.yml`" + ```yaml title="~/.bbot/presets/nuclei/nuclei.yml" + description: Run nuclei scans against all discovered targets + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + directory_only: True # Do not run nuclei on individual non-directory URLs + + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + + + + # Additional Examples: + + # Slowing Down Scan + + #config: + # modules: + # nuclei: + # ratelimit: 10 + # concurrency: 5 + + + + + ``` + +Category: nuclei + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **nuclei** + +Run nuclei scans against all discovered targets + +??? note "`nuclei.yml`" + ```yaml title="~/.bbot/presets/nuclei.yml" + description: Run nuclei scans against all discovered targets + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + directory_only: True # Do not run nuclei on individual non-directory URLs + + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + + + + # Additional Examples: + + # Slowing Down Scan + + #config: + # modules: + # nuclei: + # ratelimit: 10 + # concurrency: 5 + + + + + ``` + + + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **nuclei-budget** + +Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests + +??? note "`nuclei-budget.yml`" + ```yaml title="~/.bbot/presets/nuclei/nuclei-budget.yml" + description: Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + mode: budget + budget: 10 + directory_only: true # Do not run nuclei on individual non-directory URLs + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + ``` + +Category: nuclei + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **nuclei-budget** + +Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests + +??? note "`nuclei-budget.yml`" + ```yaml title="~/.bbot/presets/nuclei-budget.yml" + description: Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + mode: budget + budget: 10 + directory_only: true # Do not run nuclei on individual non-directory URLs + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + ``` + + + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **nuclei-intense** + +Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. + +??? note "`nuclei-intense.yml`" + ```yaml title="~/.bbot/presets/nuclei/nuclei-intense.yml" + description: Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. + + modules: + - httpx + - nuclei + - robots + - urlscan + - portfilter + - wayback + + config: + modules: + nuclei: + directory_only: False # Will run nuclei on ALL discovered URLs - Be careful! + wayback: + urls: true + + conditions: + - | + {% if config.web.spider_distance == 0 and config.modules.nuclei.directory_only == False %} + {{ warn("The 'nuclei-intense' preset turns the 'directory_only' limitation off on the nuclei module. To make the best use of this, you may want to enable spidering with 'spider' or 'spider-intense' preset.") }} + {% endif %} + + + # Example for also running a dirbust + + #include: + # - dirbust-light + ``` + +Category: nuclei + +Modules: [6]("`httpx`, `nuclei`, `portfilter`, `robots`, `urlscan`, `wayback`") + +## **nuclei-intense** + +Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. + +??? note "`nuclei-intense.yml`" + ```yaml title="~/.bbot/presets/nuclei-intense.yml" + description: Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. + + modules: + - httpx + - nuclei + - robots + - urlscan + - portfilter + - wayback + + config: + modules: + nuclei: + directory_only: False # Will run nuclei on ALL discovered URLs - Be careful! + wayback: + urls: true + + conditions: + - | + {% if config.web.spider_distance == 0 and config.modules.nuclei.directory_only == False %} + {{ warn("The 'nuclei-intense' preset turns the 'directory_only' limitation off on the nuclei module. To make the best use of this, you may want to enable spidering with 'spider' or 'spider-intense' preset.") }} + {% endif %} + + + # Example for also running a dirbust + + #include: + # - dirbust-light + ``` + + + +Modules: [6]("`httpx`, `nuclei`, `portfilter`, `robots`, `urlscan`, `wayback`") + +## **nuclei-technology** + +Run nuclei scans against all discovered targets, running templates which match discovered technologies + +??? note "`nuclei-technology.yml`" + ```yaml title="~/.bbot/presets/nuclei/nuclei-technology.yml" + description: Run nuclei scans against all discovered targets, running templates which match discovered technologies + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + mode: technology + directory_only: True # Do not run nuclei on individual non-directory URLs. This is less unsafe to disable with technology mode. + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + + # Example for also running a dirbust + + #include: + # - dirbust-light + ``` + +Category: nuclei + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **nuclei-technology** + +Run nuclei scans against all discovered targets, running templates which match discovered technologies + +??? note "`nuclei-technology.yml`" + ```yaml title="~/.bbot/presets/nuclei-technology.yml" + description: Run nuclei scans against all discovered targets, running templates which match discovered technologies + + modules: + - httpx + - nuclei + - portfilter + + config: + modules: + nuclei: + mode: technology + directory_only: True # Do not run nuclei on individual non-directory URLs. This is less unsafe to disable with technology mode. + + conditions: + - | + {% if config.web.spider_distance != 0 %} + {{ warn("Running nuclei with spider enabled is generally not recommended. Consider removing 'spider' preset.") }} + {% endif %} + + # Example for also running a dirbust + + #include: + # - dirbust-light + ``` + + + +Modules: [3]("`httpx`, `nuclei`, `portfilter`") + +## **paramminer** + +Discover new web parameters via brute-force + +??? note "`paramminer.yml`" + ```yaml title="~/.bbot/presets/web/paramminer.yml" + description: Discover new web parameters via brute-force + + flags: + - web-paramminer + + modules: + - httpx + + config: + web: + spider_distance: 1 + spider_depth: 4 + ``` + +Category: web + +Modules: [4]("`httpx`, `paramminer_cookies`, `paramminer_getparams`, `paramminer_headers`") + +## **paramminer** + +Discover new web parameters via brute-force + +??? note "`paramminer.yml`" + ```yaml title="~/.bbot/presets/paramminer.yml" + description: Discover new web parameters via brute-force + + flags: + - web-paramminer + + modules: + - httpx + + config: + web: + spider_distance: 1 + spider_depth: 4 + ``` + + + +Modules: [4]("`httpx`, `paramminer_cookies`, `paramminer_getparams`, `paramminer_headers`") + +## **spider** + +Recursive web spider + +??? note "`spider.yml`" + ```yaml title="~/.bbot/presets/spider.yml" + description: Recursive web spider + + modules: + - httpx + + blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + + config: + web: + # how many links to follow in a row + spider_distance: 2 + # don't follow links whose directory depth is higher than 4 + spider_depth: 4 + # maximum number of links to follow per page + spider_links_per_page: 25 + ``` + + + +Modules: [1]("`httpx`") + +## **spider-intense** + +Recursive web spider with more aggressive settings + +??? note "`spider-intense.yml`" + ```yaml title="~/.bbot/presets/spider-intense.yml" + description: Recursive web spider with more aggressive settings + + include: + - spider + + config: + web: + # how many links to follow in a row + spider_distance: 4 + # don't follow links whose directory depth is higher than 6 + spider_depth: 6 + # maximum number of links to follow per page + spider_links_per_page: 50 + ``` + + + +Modules: [1]("`httpx`") + +## **subdomain-enum** + +Enumerate subdomains via APIs, brute-force + +??? note "`subdomain-enum.yml`" + ```yaml title="~/.bbot/presets/subdomain-enum.yml" + description: Enumerate subdomains via APIs, brute-force + + flags: + # enable every module with the subdomain-enum flag + - subdomain-enum + + output_modules: + # output unique subdomains to TXT file + - subdomains + + config: + dns: + threads: 25 + brute_threads: 1000 + # put your API keys here + # modules: + # github: + # api_key: "" + # chaos: + # api_key: "" + # securitytrails: + # api_key: "" + ``` + + + +Modules: [52]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `bevigil`, `binaryedge`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") + +## **tech-detect** + +Detect technologies via Wappalyzer, Nuclei, and FingerprintX + +??? note "`tech-detect.yml`" + ```yaml title="~/.bbot/presets/tech-detect.yml" + description: Detect technologies via Wappalyzer, Nuclei, and FingerprintX + + modules: + - nuclei + - wappalyzer + - fingerprintx + + config: + modules: + nuclei: + tags: tech + ``` + + + +Modules: [4]("`fingerprintx`, `httpx`, `nuclei`, `wappalyzer`") + +## **web-basic** + +Quick web scan + +??? note "`web-basic.yml`" + ```yaml title="~/.bbot/presets/web-basic.yml" + description: Quick web scan + + include: + - iis-shortnames + + flags: + - web-basic + ``` + + + +Modules: [18]("`azure_realm`, `baddns`, `badsecrets`, `bucket_amazon`, `bucket_azure`, `bucket_firebase`, `bucket_google`, `ffuf_shortnames`, `filedownload`, `git`, `httpx`, `iis_shortnames`, `ntlm`, `oauth`, `robots`, `securitytxt`, `sslcert`, `wappalyzer`") + +## **web-screenshots** + +Take screenshots of webpages + +??? note "`web-screenshots.yml`" + ```yaml title="~/.bbot/presets/web-screenshots.yml" + description: Take screenshots of webpages + + flags: + - web-screenshots + + config: + modules: + gowitness: + resolution_x: 1440 + resolution_y: 900 + # folder to output web screenshots (default is inside ~/.bbot/scans/scan_name) + output_path: "" + # whether to take screenshots of social media pages + social: True + ``` + + + +Modules: [3]("`gowitness`, `httpx`, `social`") + +## **web-thorough** + +Aggressive web scan + +??? note "`web-thorough.yml`" + ```yaml title="~/.bbot/presets/web-thorough.yml" + description: Aggressive web scan + + include: + # include the web-basic preset + - web-basic + + flags: + - web-thorough + ``` + + + +Modules: [29]("`ajaxpro`, `azure_realm`, `baddns`, `badsecrets`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_firebase`, `bucket_google`, `bypass403`, `dastardly`, `dotnetnuke`, `ffuf_shortnames`, `filedownload`, `generic_ssrf`, `git`, `host_header`, `httpx`, `hunt`, `iis_shortnames`, `ntlm`, `oauth`, `robots`, `securitytxt`, `smuggler`, `sslcert`, `telerik`, `url_manipulation`, `wappalyzer`") + + +## Table of Default Presets + +Here is a the same data, but in a table: + + +| Preset | Category | Description | # Modules | Modules | +|-------------------|------------|------------------------------------------------------------------------------------------------------------------------------------------|-------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| baddns-intense | | Run all baddns modules and submodules. | 4 | baddns, baddns_direct, baddns_zone, httpx | +| cloud-enum | | Enumerate cloud resources such as storage buckets, etc. | 59 | anubisdb, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | +| code-enum | | Enumerate Git repositories, Docker images, etc. | 18 | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, gitdumper, github_codesearch, github_org, github_workflows, gitlab, google_playstore, httpx, jadx, postman, postman_download, social, trufflehog | +| dirbust-heavy | web | Recursive web directory brute-force (aggressive) | 5 | ffuf, ffuf_shortnames, httpx, iis_shortnames, wayback | +| dirbust-heavy | | Recursive web directory brute-force (aggressive) | 5 | ffuf, ffuf_shortnames, httpx, iis_shortnames, wayback | +| dirbust-light | web | Basic web directory brute-force (surface-level directories only) | 4 | ffuf, ffuf_shortnames, httpx, iis_shortnames | +| dirbust-light | | Basic web directory brute-force (surface-level directories only) | 4 | ffuf, ffuf_shortnames, httpx, iis_shortnames | +| dotnet-audit | web | Comprehensive scan for all IIS/.NET specific modules and module settings | 8 | ajaxpro, badsecrets, dotnetnuke, ffuf, ffuf_shortnames, httpx, iis_shortnames, telerik | +| dotnet-audit | | Comprehensive scan for all IIS/.NET specific modules and module settings | 8 | ajaxpro, badsecrets, dotnetnuke, ffuf, ffuf_shortnames, httpx, iis_shortnames, telerik | +| email-enum | | Enumerate email addresses from APIs, web crawling, etc. | 8 | dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, skymem, sslcert | +| fast | | Scan only the provided targets as fast as possible - no extra discovery | 0 | | +| iis-shortnames | web | Recursively enumerate IIS shortnames | 3 | ffuf_shortnames, httpx, iis_shortnames | +| iis-shortnames | | Recursively enumerate IIS shortnames | 3 | ffuf_shortnames, httpx, iis_shortnames | +| kitchen-sink | | Everything everywhere all at once | 87 | anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, ffuf, ffuf_shortnames, filedownload, fullhunt, git, git_clone, gitdumper, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunterio, iis_shortnames, internetdb, ipneighbor, jadx, leakix, myssl, ntlm, oauth, otx, paramminer_cookies, paramminer_getparams, paramminer_headers, passivetotal, pgp, postman, postman_download, rapiddns, robots, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, virustotal, wappalyzer, wayback, zoomeye | +| nuclei | nuclei | Run nuclei scans against all discovered targets | 3 | httpx, nuclei, portfilter | +| nuclei | | Run nuclei scans against all discovered targets | 3 | httpx, nuclei, portfilter | +| nuclei-budget | nuclei | Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests | 3 | httpx, nuclei, portfilter | +| nuclei-budget | | Run nuclei scans against all discovered targets, using budget mode to look for low hanging fruit with greatly reduced number of requests | 3 | httpx, nuclei, portfilter | +| nuclei-intense | nuclei | Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. | 6 | httpx, nuclei, portfilter, robots, urlscan, wayback | +| nuclei-intense | | Run nuclei scans against all discovered targets, allowing for spidering, against ALL URLs, and with additional discovery modules. | 6 | httpx, nuclei, portfilter, robots, urlscan, wayback | +| nuclei-technology | nuclei | Run nuclei scans against all discovered targets, running templates which match discovered technologies | 3 | httpx, nuclei, portfilter | +| nuclei-technology | | Run nuclei scans against all discovered targets, running templates which match discovered technologies | 3 | httpx, nuclei, portfilter | +| paramminer | web | Discover new web parameters via brute-force | 4 | httpx, paramminer_cookies, paramminer_getparams, paramminer_headers | +| paramminer | | Discover new web parameters via brute-force | 4 | httpx, paramminer_cookies, paramminer_getparams, paramminer_headers | +| spider | | Recursive web spider | 1 | httpx | +| spider-intense | | Recursive web spider with more aggressive settings | 1 | httpx | +| subdomain-enum | | Enumerate subdomains via APIs, brute-force | 52 | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | +| tech-detect | | Detect technologies via Wappalyzer, Nuclei, and FingerprintX | 4 | fingerprintx, httpx, nuclei, wappalyzer | +| web-basic | | Quick web scan | 18 | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, ffuf_shortnames, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, securitytxt, sslcert, wappalyzer | +| web-screenshots | | Take screenshots of webpages | 3 | gowitness, httpx, social | +| web-thorough | | Aggressive web scan | 29 | ajaxpro, azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dotnetnuke, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, ntlm, oauth, robots, securitytxt, smuggler, sslcert, telerik, url_manipulation, wappalyzer | + diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md new file mode 100644 index 0000000000..4fbe9e0363 --- /dev/null +++ b/docs/scanning/tips_and_tricks.md @@ -0,0 +1,184 @@ +# Tips and Tricks + +Below are some helpful tricks to help you in your adventures. + +## Change Verbosity During Scan +Press enter during a BBOT scan to change the log level. This will allow you to see debugging messages, etc. + + + +## Kill Individual Module During Scan +Sometimes a certain module can get stuck or slow down the scan. If this happens and you want to kill it, just type "`kill `" in the terminal and press enter. This will kill and disable the module for the rest of the scan. + +You can also kill multiple modules at a time by specifying them in a space or comma-separated list: + +```bash +kill httpx sslcert +``` + + + +## Common Config Changes + +### Speed Up Slow Modules + +BBOT modules can be parallelized so that more than one instance runs at a time. By default, many modules are already set to reasonable defaults: + +```python +class baddns(BaseModule): + module_threads = 8 +``` + +To override this, you can set a module's `module_threads` in the config: + +```bash +# increase baddns threads to 20 +bbot -t evilcorp.com -m baddns -c modules.baddns.module_threads=20 +``` + +### Boost DNS Brute-force Speed + +If you have a fast internet connection or are running BBOT from a cloud VM, you can speed up subdomain enumeration by cranking the threads for `massdns`. The default is `1000`, which is about 1MB/s of DNS traffic: + +```bash +# massdns with 5000 resolvers, about 5MB/s +bbot -t evilcorp.com -f subdomain-enum -c dns.brute_threads=5000 +``` + +### Web Spider + +The web spider is great for finding juicy data like subdomains, email addresses, and javascript secrets buried in webpages. However since it can lengthen the duration of a scan, it's disabled by default. To enable the web spider, you must increase the value of `web.spider_distance`. + +The web spider is controlled with three config values: + +- `web.spider_depth` (default: `1`: the maximum directory depth allowed. This is to prevent the spider from delving too deep into a website. +- `web.spider_distance` (`0` == all spidering disabled, default: `0`): the maximum number of links that can be followed in a row. This is designed to limit the spider in cases where `web.spider_depth` fails (e.g. for an ecommerce website with thousands of base-level URLs). +- `web.spider_links_per_page` (default: `25`): the maximum number of links per page that can be followed. This is designed to save you in cases where a single page has hundreds or thousands of links. + +Here is a typical example: + +```yaml title="spider.yml" +config: + web: + spider_depth: 2 + spider_distance: 2 + spider_links_per_page: 25 +``` + +```bash +# run the web spider against www.evilcorp.com +bbot -t www.evilcorp.com -m httpx -c spider.yml +``` + +You can also pair the web spider with subdomain enumeration: + +```bash +# spider every subdomain of evilcorp.com +bbot -t evilcorp.com -f subdomain-enum -c spider.yml +``` + +### Exclude CDNs from Port Scan + +Use `--exclude-cdns` to filter out unwanted open ports from CDNs and WAFs, e.g. Cloudflare. You can also customize the criteria by setting `modules.portfilter.cdn_tags`. By default, only open ports with `cdn-*` tags are filtered, but you can include all cloud providers by setting `cdn_tags` to `cdn,cloud`: + +```bash +bbot -t evilcorp.com --exclude-cdns -c modules.portfilter.cdn_tags=cdn,cloud +``` + +Additionally, you can customize the allowed ports by setting `modules.portscan.allowed_cdn_ports`. + +```bash +bbot -t evilcorp.com --exclude-cdns -c modules.portfilter.allowed_cdn_ports=80,443,8443 +``` + +Example preset: + +```yaml title="skip_cdns.yml" +modules: + - portfilter + +config: + modules: + portfilter: + cdn_tags: cdn-,cloud- + allowed_cdn_ports: 80,443,8443 +``` + +```bash +bbot -t evilcorp.com -p skip_cdns.yml +``` + +### Ingest BBOT Data Into SIEM (Elastic, Splunk) + +If your goal is to run a BBOT scan and later feed its data into a SIEM such as Elastic, be sure to enable this option when scanning: + +```bash +bbot -t evilcorp.com -c modules.json.siem_friendly=true +``` + +This ensures the `.data` event attribute is always the same type (a dictionary), by nesting it like so: +```json +{ + "type": "DNS_NAME", + "data": { + "DNS_NAME": "blacklanternsecurity.com" + } +} +``` + +### Custom HTTP Proxy + +Web pentesters may appreciate BBOT's ability to quickly populate Burp Suite site maps for all subdomains in a target. If your scan includes gowitness, this will capture the traffic as if you manually visited each website in your browser -- including auxiliary web resources and javascript API calls. To accomplish this, set the `web.http_proxy` config option like so: + +```bash +# enumerate subdomains, take web screenshots, proxy through Burp +bbot -t evilcorp.com -f subdomain-enum -m gowitness -c web.http_proxy=http://127.0.0.1:8080 +``` + +### Display `HTTP_RESPONSE` Events + +BBOT's `httpx` module emits `HTTP_RESPONSE` events, but by default they're hidden from output. These events contain the full raw HTTP body along with headers, etc. If you want to see them, you can modify `omit_event_types` in the config: + +```yaml title="~/.bbot/config/bbot.yml" +omit_event_types: + - URL_UNVERIFIED + # - HTTP_RESPONSE +``` + +### Display Out-of-scope Events +By default, BBOT only shows in-scope events (with a few exceptions for things like storage buckets). If you want to see events that BBOT is emitting internally (such as for DNS resolution, etc.), you can increase `scope.report_distance` in the config or on the command line like so: +~~~bash +# display events up to scope distance 2 (default == 0) +bbot -f subdomain-enum -t evilcorp.com -c scope.report_distance=2 +~~~ + +### Speed Up Scans By Disabling DNS Resolution + +If you already have a list of discovered targets (e.g. URLs), you can speed up the scan by skipping BBOT's DNS resolution. You can do this by setting `dns.disable` to `true`: + +~~~bash +# completely disable DNS resolution +bbot -m httpx gowitness wappalyzer -t urls.txt -c dns.disable=true +~~~ + +Note that the above setting _completely_ disables DNS resolution, meaning even `A` and `AAAA` records are not resolved. This can cause problems if you're using an IP whitelist or blacklist. In this case, you'll want to use `dns.minimal` instead: + +~~~bash +# only resolve A and AAAA records +bbot -m httpx gowitness wappalyzer -t urls.txt -c dns.minimal=true +~~~ + +## FAQ + +### What is `URL_UNVERIFIED`? + +`URL_UNVERIFIED` events are URLs that haven't yet been visited by `httpx`. Once `httpx` visits them, it reraises them as `URL`s, tagged with their resulting status code. + +For example, when [`excavate`](index.md/#types-of-modules) gets an `HTTP_RESPONSE` event, it extracts links from the raw HTTP response as `URL_UNVERIFIED`s and then passes them back to `httpx` to be visited. + +By default, `URL_UNVERIFIED`s are hidden from output. If you want to see all of them including the out-of-scope ones, you can do it by changing `omit_event_types` and `scope.report_distance` in the config like so: + +```bash +# visit www.evilcorp.com and extract all the links +bbot -t www.evilcorp.com -m httpx -c omit_event_types=[] scope.report_distance=2 +``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 0000000000..0ccfa8627e --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,35 @@ +# Troubleshooting + +## Installation troubleshooting +- `Fatal error from pip prevented installation.` +- `ERROR: No matching distribution found for bbot` +- `bash: /home/user/.local/bin/bbot: /home/user/.local/pipx/venvs/bbot/bin/python: bad interpreter` + +If you get errors resembling any of the above, it's probably because your Python version is too old. To install a newer version (3.9+ is required), you will need to do something like this: +```bash +# install a newer version of python +sudo apt install python3.9 python3.9-venv +# install pipx +python3.9 -m pip install --user pipx +# add pipx to your path +python3.9 -m pipx ensurepath +# reboot +reboot +# install bbot +python3.9 -m pipx install bbot +# run bbot +bbot --help +``` + +## `ModuleNotFoundError` +If you run into a `ModuleNotFoundError`, try running your `bbot` command again with `--force-deps`. This will repair your modules' Python dependencies. + +## Regenerate Config +As a troubleshooting step it is sometimes useful to clear out your older configs and let BBOT generate new ones. This will ensure that new defaults are property restored, etc. +```bash +# make a backup of the old configs +mv ~/.config/bbot ~/.config/bbot.bak + +# generate new configs +bbot +``` diff --git a/examples/discord_bot.py b/examples/discord_bot.py new file mode 100644 index 0000000000..f435b0301c --- /dev/null +++ b/examples/discord_bot.py @@ -0,0 +1,71 @@ +import discord +from discord.ext import commands + +from bbot.scanner import Scanner +from bbot.modules.output.discord import Discord + + +class BBOTDiscordBot(commands.Cog): + """ + A simple Discord bot capable of running a BBOT scan. + + To set up: + 1. Go to Discord Developer Portal (https://discord.com/developers) + 2. Create a new application + 3. Create an invite link for the bot, visit the link to invite it to your server + - Your Application --> OAuth2 --> URL Generator + - For Scopes, select "bot"" + - For Bot Permissions, select: + - Read Messages/View Channels + - Send Messages + 4. Turn on "Message Content Intent" + - Your Application --> Bot --> Privileged Gateway Intents --> Message Content Intent + 5. Copy your Discord Bot Token and put it at the top this file + - Your Application --> Bot --> Reset Token + 6. Run this script + + To scan evilcorp.com, you would type: + + /scan evilcorp.com + + Results will be output to the same channel. + """ + + def __init__(self): + self.current_scan = None + + @commands.command(name="scan", description="Scan a target with BBOT.") + async def scan(self, ctx, target: str): + if self.current_scan is not None: + self.current_scan.stop() + await ctx.send(f"Starting scan against {target}.") + + # creates scan instance + self.current_scan = Scanner(target, flags="subdomain-enum") + discord_module = Discord(self.current_scan) + + seen = set() + num_events = 0 + # start scan and iterate through results + async for event in self.current_scan.async_start(): + if hash(event) in seen: + continue + seen.add(hash(event)) + await ctx.send(discord_module.format_message(event)) + num_events += 1 + + await ctx.send(f"Finished scan against {target}. {num_events:,} results.") + self.current_scan = None + + +if __name__ == "__main__": + intents = discord.Intents.default() + intents.message_content = True + bot = commands.Bot(command_prefix="/", intents=intents) + + @bot.event + async def on_ready(): + print(f"We have logged in as {bot.user}") + await bot.add_cog(BBOTDiscordBot()) + + bot.run("DISCORD_BOT_TOKEN_HERE") diff --git a/extra_sass/style.css.scss b/extra_sass/style.css.scss new file mode 100644 index 0000000000..c6a4514c42 --- /dev/null +++ b/extra_sass/style.css.scss @@ -0,0 +1,99 @@ +/* GLOBAL STYLES */ + +:root { + --bbot-orange: #ff8400; +} + +// .md-grid { +// margin-left: unset; +// margin-right: unset; +// max-width: unset; +// } + +p img { + max-width: 60em !important; +} + +.demonic-jimmy { + color: var(--bbot-orange); +} + +.md-nav__link--active { + font-weight: bold; +} + +.md-typeset__table td:first-child { + font-weight: bold; +} + +a.md-source, +.md-header__topic > span, +a:hover { + color: var(--bbot-orange); +} + +article.md-content__inner { + h1 { + font-weight: 500; + color: var(--bbot-orange); + } + h1, + h2 { + color: var(--bbot-orange); + } + h2, + h3, + h4, + h5 { + font-weight: 300; + } + div.highlight { + background-color: unset !important; + } +} + +table { + font-family: monospace; + + td { + max-width: 100em; + } +} + +/* DARK MODE SPECIFIC */ + +[data-md-color-primary=black] p a.md-button--primary { + background-color: black; + border: none; +} + +[data-md-color-primary=black] p a.md-button--primary:hover { + background-color: var(--bbot-orange); +} + +[data-md-color-scheme="slate"] { + div.md-source__repository ul { + color: white; + } + + .md-nav__link { + color: white; + } + + .md-nav__link--active { + font-weight: bold; + } + + .md-typeset__table tr { + background-color: #202027; + } + + .md-nav__link.md-nav__link--active { + color: var(--bbot-orange); + } + + .md-typeset__table thead tr { + color: var(--bbot-orange); + background-color: var(--md-primary-fg-color--dark); + } +} diff --git a/funding.yml b/funding.yml new file mode 100644 index 0000000000..de70a36af0 --- /dev/null +++ b/funding.yml @@ -0,0 +1 @@ +github: blacklanternsecurity diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000000..4413fac487 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,130 @@ +# Project information +site_name: BBOT Docs +site_url: https://blacklanternsecurity.github.io/bbot/ +site_author: TheTechromancer +site_description: >- + OSINT automation for hackers +# Repository +repo_name: blacklanternsecurity/bbot +repo_url: https://github.com/blacklanternsecurity/bbot +watch: + - "mkdocs.yml" + - "bbot" + - "docs" + +# Page tree +nav: +- User Manual: + - Basics: + - Getting Started: index.md + - How it Works: how_it_works.md + - Comparison to Other Tools: comparison.md + - Scanning: + - Scanning Overview: scanning/index.md + - Presets: + - Overview: scanning/presets.md + - List of Presets: scanning/presets_list.md + - Events: scanning/events.md + - Output: scanning/output.md + - Tips and Tricks: scanning/tips_and_tricks.md + - Advanced Usage: scanning/advanced.md + - Configuration: scanning/configuration.md + - Modules: + - List of Modules: modules/list_of_modules.md + - Nuclei: modules/nuclei.md + - Custom YARA Rules: modules/custom_yara_rules.md + - Misc: + - Contribution: contribution.md + - Release History: release_history.md + - Troubleshooting: troubleshooting.md +- Developer Manual: + - Development Overview: dev/index.md + - Setting Up a Dev Environment: dev/dev_environment.md + - BBOT Internal Architecture: dev/architecture.md + - How to Write a BBOT Module: dev/module_howto.md + - Unit Tests: dev/tests.md + - Discord Bot Example: dev/discord_bot.md + - Code Reference: + - Scanner: dev/scanner.md + - Presets: dev/presets.md + - Event: dev/event.md + - Target: dev/target.md + - BaseModule: dev/basemodule.md + - BBOTCore: dev/core.md + - Engine: dev/engine.md + - Helpers: + - Overview: dev/helpers/index.md + - Command: dev/helpers/command.md + - DNS: dev/helpers/dns.md + - Interactsh: dev/helpers/interactsh.md + - Miscellaneous: dev/helpers/misc.md + - Web: dev/helpers/web.md + - Word Cloud: dev/helpers/wordcloud.md + +theme: + name: material + logo: bbot.png + favicon: favicon.png + features: + - content.code.copy + - content.tooltips + - navigation.tabs + - navigation.sections + - navigation.expand + - toc.integrate + palette: + - scheme: slate + primary: black + accent: deep orange + +plugins: + - mike + - search + - extra-sass + - mkdocstrings: + enable_inventory: true + handlers: + python: + options: + heading_level: 1 + show_signature_annotations: true + show_root_toc_entry: false + show_root_heading: true + show_root_full_path: false + separate_signature: true + docstring_section_style: "list" + filters: + - "!^_" + - "^__init__$" + import: + - https://docs.python.org/3.11/objects.inv + - https://omegaconf.readthedocs.io/en/latest/objects.inv + +extra: + version: + provider: mike + default: Stable + +markdown_extensions: + - tables + - attr_list + - admonition + - pymdownx.details + - pymdownx.snippets + - pymdownx.superfences + - pymdownx.highlight: + use_pygments: True + noclasses: True + pygments_style: github-dark + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + +extra_javascript: + - javascripts/tablesort.js + - javascripts/tablesort.min.js + - javascripts/vega@5.js + - javascripts/vega-lite@5.js + - javascripts/vega-embed@6.js diff --git a/poetry.lock b/poetry.lock index f9168cae19..f21dfc610f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,172 +1,447 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + [[package]] -name = "ansible" -version = "5.10.0" -description = "Radically simple IT automation" -category = "main" +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" - -[package.dependencies] -ansible-core = ">=2.12.7,<2.13.0" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] [[package]] name = "ansible-core" -version = "2.12.8" +version = "2.15.13" description = "Radically simple IT automation" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +files = [ + {file = "ansible_core-2.15.13-py3-none-any.whl", hash = "sha256:e7f50bbb61beae792f5ecb86eff82149d3948d078361d70aedb01d76bc483c30"}, + {file = "ansible_core-2.15.13.tar.gz", hash = "sha256:f542e702ee31fb049732143aeee6b36311ca48b7d13960a0685afffa0d742d7f"}, +] [package.dependencies] cryptography = "*" -jinja2 = "*" +importlib-resources = {version = ">=5.0,<5.1", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.0" packaging = "*" -PyYAML = "*" -resolvelib = ">=0.5.3,<0.6.0" +PyYAML = ">=5.1" +resolvelib = ">=0.5.3,<1.1.0" [[package]] name = "ansible-runner" -version = "2.2.1" +version = "2.4.0" description = "\"Consistent Ansible Python API and CLI with container and process isolation runtime capabilities\"" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.9" +files = [ + {file = "ansible-runner-2.4.0.tar.gz", hash = "sha256:82d02b2548830f37a53517b65c823c4af371069406c7d213b5c9041d45e0c5b6"}, + {file = "ansible_runner-2.4.0-py3-none-any.whl", hash = "sha256:a3f592ae4cdfa62a72ad15de60da9c8210f376d67f495c4a78d4cf1dc7ccdf89"}, +] [package.dependencies] +importlib-metadata = {version = ">=4.6,<6.3", markers = "python_version < \"3.10\""} packaging = "*" pexpect = ">=4.5" python-daemon = "*" pyyaml = "*" -six = "*" [[package]] name = "antlr4-python3-runtime" version = "4.9.3" description = "ANTLR 4.9.3 runtime for Python 3.7" -category = "main" optional = false python-versions = "*" +files = [ + {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, +] [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = "*" +python-versions = ">=3.9" +files = [ + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, +] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] [[package]] -name = "attrs" -version = "21.4.0" -description = "Classes Without Boilerplate" -category = "main" +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] -name = "black" -version = "22.6.0" -description = "The uncompromising code formatter." -category = "dev" +name = "beautifulsoup4" +version = "4.13.3" +description = "Screen-scraping library" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.0" +files = [ + {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, + {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, +] [package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +soupsieve = ">1.2" +typing-extensions = ">=4.0.0" [package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] [[package]] -name = "cattrs" -version = "22.1.0" -description = "Composable complex class support for attrs and dataclasses." -category = "main" +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -attrs = ">=20" -exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] [[package]] name = "certifi" -version = "2022.6.15" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] [[package]] name = "cffi" -version = "1.15.1" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode_backport = ["unicodedata2"] +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] [[package]] name = "click" -version = "8.1.3" +version = "8.1.8" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudcheck" +version = "7.0.47" +description = "Check whether an IP address belongs to a cloud provider" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "cloudcheck-7.0.47-py3-none-any.whl", hash = "sha256:71faaf5c090e9ae1501b692b0c7c2ed1f5efb88d02b190187d9d410f7a823d87"}, + {file = "cloudcheck-7.0.47.tar.gz", hash = "sha256:61c4a3b70dcd86349c72e3179e427e7db6ee046cc88ba0d76ada1bea84223242"}, +] + +[package.dependencies] +httpx = ">=0.26,<0.29" +pydantic = ">=2.4.2,<3.0.0" +radixtarget = ">=3.0.13,<4.0.0" +regex = ">=2024.4.16,<2025.0.0" + [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "coverage" -version = "6.4.4" +version = "7.6.12" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, +] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -176,478 +451,2205 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "37.0.4" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools_rust (>=0.11.4)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "deepdiff" -version = "5.8.1" -description = "Deep Difference and Search of any Python object/data." -category = "main" +version = "8.2.0" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "deepdiff-8.2.0-py3-none-any.whl", hash = "sha256:5091f2cdfd372b1b9f6bfd8065ba323ae31118dc4e42594371b38c8bea3fd0a4"}, + {file = "deepdiff-8.2.0.tar.gz", hash = "sha256:6ec78f65031485735545ffbe7a61e716c3c2d12ca6416886d5e9291fc76c46c3"}, +] [package.dependencies] -ordered-set = ">=4.1.0,<4.2.0" +orderly-set = ">=5.3.0,<6" [package.extras] -cli = ["click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0.10.2)", "clevercsv (==0.7.1)"] +cli = ["click (==8.1.8)", "pyyaml (==6.0.2)"] +optimize = ["orjson"] + +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] [[package]] name = "dnspython" -version = "2.2.1" +version = "2.7.0" description = "DNS toolkit" -category = "main" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.9" +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.20)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] [[package]] -name = "docutils" -version = "0.19" -description = "Docutils -- Python Documentation Utilities" -category = "main" +name = "dunamai" +version = "1.23.0" +description = "Dynamic version generation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.5" +files = [ + {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, + {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, +] + +[package.dependencies] +packaging = ">=20.9" [[package]] name = "exceptiongroup" -version = "1.0.0rc8" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "fastapi" +version = "0.115.8" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, + {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.46.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + [[package]] name = "filelock" -version = "3.8.0" +version = "3.17.0" description = "A platform independent file lock." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, + {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, +] [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." optional = false -python-versions = ">=3.6" +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] [package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] [[package]] -name = "idna" -version = "3.3" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" +name = "griffe" +version = "1.5.7" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +files = [ + {file = "griffe-1.5.7-py3-none-any.whl", hash = "sha256:4af8ec834b64de954d447c7b6672426bb145e71605c74a4e22d510cc79fe7d8b"}, + {file = "griffe-1.5.7.tar.gz", hash = "sha256:465238c86deaf1137761f700fb343edd8ffc846d72f6de43c3c345ccdfbebe92"}, +] + +[package.dependencies] +colorama = ">=0.4" [[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] [[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] [package.dependencies] -MarkupSafe = ">=2.0" +certifi = "*" +h11 = ">=0.13,<0.15" [package.extras] -i18n = ["Babel (>=2.7)"] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -category = "main" +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." optional = false -python-versions = "*" +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" +name = "identify" +version = "2.6.7" +description = "File identification library for Python" optional = false -python-versions = "*" +python-versions = ">=3.9" +files = [ + {file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"}, + {file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"}, +] + +[package.extras] +license = ["ukkonen"] [[package]] -name = "omegaconf" -version = "2.2.3" -description = "A flexible configuration library" -category = "main" +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] -[package.dependencies] -antlr4-python3-runtime = ">=4.9.0,<4.10.0" -PyYAML = ">=5.1.0" +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "main" +name = "importlib-metadata" +version = "6.2.1" +description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, + {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, +] + +[package.dependencies] +zipp = ">=0.5" [package.extras] -dev = ["pytest", "black", "mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" +name = "importlib-resources" +version = "5.0.7" +description = "Read resources from Python packages" optional = false python-versions = ">=3.6" +files = [ + {file = "importlib_resources-5.0.7-py3-none-any.whl", hash = "sha256:2238159eb743bd85304a16e0536048b3e991c531d1cd51c4a834d1ccf2829057"}, + {file = "importlib_resources-5.0.7.tar.gz", hash = "sha256:4df460394562b4581bb4e4087ad9447bd433148fba44241754ec3152499f1d1b"}, +] -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +[package.extras] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"] [[package]] -name = "pathspec" -version = "0.9.0" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "main" +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] [package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" +MarkupSafe = ">=2.0" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +i18n = ["Babel (>=2.7)"] [[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" +name = "libsass" +version = "0.23.0" +description = "Sass for Python: A straightforward binding of libsass for Python." optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +python-versions = ">=3.8" +files = [ + {file = "libsass-0.23.0-cp38-abi3-macosx_11_0_x86_64.whl", hash = "sha256:34cae047cbbfc4ffa832a61cbb110f3c95f5471c6170c842d3fed161e40814dc"}, + {file = "libsass-0.23.0-cp38-abi3-macosx_14_0_arm64.whl", hash = "sha256:ea97d1b45cdc2fc3590cb9d7b60f1d8915d3ce17a98c1f2d4dd47ee0d9c68ce6"}, + {file = "libsass-0.23.0-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a218406d605f325d234e4678bd57126a66a88841cb95bee2caeafdc6f138306"}, + {file = "libsass-0.23.0-cp38-abi3-win32.whl", hash = "sha256:31e86d92a5c7a551df844b72d83fc2b5e50abc6fbbb31e296f7bebd6489ed1b4"}, + {file = "libsass-0.23.0-cp38-abi3-win_amd64.whl", hash = "sha256:a2ec85d819f353cbe807432d7275d653710d12b08ec7ef61c124a580a8352f3c"}, + {file = "libsass-0.23.0.tar.gz", hash = "sha256:6f209955ede26684e76912caf329f4ccb57e4a043fd77fe0e7348dd9574f1880"}, +] [[package]] -name = "psutil" -version = "5.9.1" -description = "Cross-platform lib for process and system monitoring in Python." -category = "main" +name = "livereload" +version = "2.7.1" +description = "Python LiveReload is an awesome tool for web developers" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" +files = [ + {file = "livereload-2.7.1-py3-none-any.whl", hash = "sha256:5201740078c1b9433f4b2ba22cd2729a39b9d0ec0a2cc6b4d3df257df5ad0564"}, + {file = "livereload-2.7.1.tar.gz", hash = "sha256:3d9bf7c05673df06e32bea23b494b8d36ca6d10f7d5c3c8a6989608c09c986a9"}, +] -[package.extras] -test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] +[package.dependencies] +tornado = "*" [[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "main" +name = "lockfile" +version = "0.12.2" +description = "Platform-independent file locking module" optional = false python-versions = "*" +files = [ + {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, + {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, +] [[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +name = "lxml" +version = "5.3.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" +files = [ + {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, + {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79"}, + {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2"}, + {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51"}, + {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406"}, + {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5"}, + {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0"}, + {file = "lxml-5.3.1-cp310-cp310-win32.whl", hash = "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23"}, + {file = "lxml-5.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c"}, + {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f"}, + {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629"}, + {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33"}, + {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295"}, + {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c"}, + {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c"}, + {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff"}, + {file = "lxml-5.3.1-cp311-cp311-win32.whl", hash = "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2"}, + {file = "lxml-5.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6"}, + {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c"}, + {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468"}, + {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367"}, + {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd"}, + {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c"}, + {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f"}, + {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645"}, + {file = "lxml-5.3.1-cp312-cp312-win32.whl", hash = "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5"}, + {file = "lxml-5.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf"}, + {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c093c7088b40d8266f57ed71d93112bd64c6724d31f0794c1e52cc4857c28e0e"}, + {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0884e3f22d87c30694e625b1e62e6f30d39782c806287450d9dc2fdf07692fd"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1637fa31ec682cd5760092adfabe86d9b718a75d43e65e211d5931809bc111e7"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a364e8e944d92dcbf33b6b494d4e0fb3499dcc3bd9485beb701aa4b4201fa414"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:779e851fd0e19795ccc8a9bb4d705d6baa0ef475329fe44a13cf1e962f18ff1e"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4393600915c308e546dc7003d74371744234e8444a28622d76fe19b98fa59d1"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:673b9d8e780f455091200bba8534d5f4f465944cbdd61f31dc832d70e29064a5"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2e4a570f6a99e96c457f7bec5ad459c9c420ee80b99eb04cbfcfe3fc18ec6423"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:71f31eda4e370f46af42fc9f264fafa1b09f46ba07bdbee98f25689a04b81c20"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:42978a68d3825eaac55399eb37a4d52012a205c0c6262199b8b44fcc6fd686e8"}, + {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8b1942b3e4ed9ed551ed3083a2e6e0772de1e5e3aca872d955e2e86385fb7ff9"}, + {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:85c4f11be9cf08917ac2a5a8b6e1ef63b2f8e3799cec194417e76826e5f1de9c"}, + {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:231cf4d140b22a923b1d0a0a4e0b4f972e5893efcdec188934cc65888fd0227b"}, + {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5865b270b420eda7b68928d70bb517ccbe045e53b1a428129bb44372bf3d7dd5"}, + {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dbf7bebc2275016cddf3c997bf8a0f7044160714c64a9b83975670a04e6d2252"}, + {file = "lxml-5.3.1-cp313-cp313-win32.whl", hash = "sha256:d0751528b97d2b19a388b302be2a0ee05817097bab46ff0ed76feeec24951f78"}, + {file = "lxml-5.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:91fb6a43d72b4f8863d21f347a9163eecbf36e76e2f51068d59cd004c506f332"}, + {file = "lxml-5.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:016b96c58e9a4528219bb563acf1aaaa8bc5452e7651004894a973f03b84ba81"}, + {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82a4bb10b0beef1434fb23a09f001ab5ca87895596b4581fd53f1e5145a8934a"}, + {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d68eeef7b4d08a25e51897dac29bcb62aba830e9ac6c4e3297ee7c6a0cf6439"}, + {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:f12582b8d3b4c6be1d298c49cb7ae64a3a73efaf4c2ab4e37db182e3545815ac"}, + {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2df7ed5edeb6bd5590914cd61df76eb6cce9d590ed04ec7c183cf5509f73530d"}, + {file = "lxml-5.3.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:585c4dc429deebc4307187d2b71ebe914843185ae16a4d582ee030e6cfbb4d8a"}, + {file = "lxml-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:06a20d607a86fccab2fc15a77aa445f2bdef7b49ec0520a842c5c5afd8381576"}, + {file = "lxml-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:057e30d0012439bc54ca427a83d458752ccda725c1c161cc283db07bcad43cf9"}, + {file = "lxml-5.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4867361c049761a56bd21de507cab2c2a608c55102311d142ade7dab67b34f32"}, + {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dddf0fb832486cc1ea71d189cb92eb887826e8deebe128884e15020bb6e3f61"}, + {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bcc211542f7af6f2dfb705f5f8b74e865592778e6cafdfd19c792c244ccce19"}, + {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaca5a812f050ab55426c32177091130b1e49329b3f002a32934cd0245571307"}, + {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:236610b77589faf462337b3305a1be91756c8abc5a45ff7ca8f245a71c5dab70"}, + {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:aed57b541b589fa05ac248f4cb1c46cbb432ab82cbd467d1c4f6a2bdc18aecf9"}, + {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:75fa3d6946d317ffc7016a6fcc44f42db6d514b7fdb8b4b28cbe058303cb6e53"}, + {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:96eef5b9f336f623ffc555ab47a775495e7e8846dde88de5f941e2906453a1ce"}, + {file = "lxml-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:ef45f31aec9be01379fc6c10f1d9c677f032f2bac9383c827d44f620e8a88407"}, + {file = "lxml-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0611da6b07dd3720f492db1b463a4d1175b096b49438761cc9f35f0d9eaaef5"}, + {file = "lxml-5.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2aca14c235c7a08558fe0a4786a1a05873a01e86b474dfa8f6df49101853a4e"}, + {file = "lxml-5.3.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae82fce1d964f065c32c9517309f0c7be588772352d2f40b1574a214bd6e6098"}, + {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aae7a3d63b935babfdc6864b31196afd5145878ddd22f5200729006366bc4d5"}, + {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e0d177b1fe251c3b1b914ab64135475c5273c8cfd2857964b2e3bb0fe196a7"}, + {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:6c4dd3bfd0c82400060896717dd261137398edb7e524527438c54a8c34f736bf"}, + {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f1208c1c67ec9e151d78aa3435aa9b08a488b53d9cfac9b699f15255a3461ef2"}, + {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c6aacf00d05b38a5069826e50ae72751cb5bc27bdc4d5746203988e429b385bb"}, + {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5881aaa4bf3a2d086c5f20371d3a5856199a0d8ac72dd8d0dbd7a2ecfc26ab73"}, + {file = "lxml-5.3.1-cp38-cp38-win32.whl", hash = "sha256:45fbb70ccbc8683f2fb58bea89498a7274af1d9ec7995e9f4af5604e028233fc"}, + {file = "lxml-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:7512b4d0fc5339d5abbb14d1843f70499cab90d0b864f790e73f780f041615d7"}, + {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5885bc586f1edb48e5d68e7a4b4757b5feb2a496b64f462b4d65950f5af3364f"}, + {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1b92fe86e04f680b848fff594a908edfa72b31bfc3499ef7433790c11d4c8cd8"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091026c3bf7519ab1e64655a3f52a59ad4a4e019a6f830c24d6430695b1cf6a"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ffb141361108e864ab5f1813f66e4e1164181227f9b1f105b042729b6c15125"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3715cdf0dd31b836433af9ee9197af10e3df41d273c19bb249230043667a5dfd"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88b72eb7222d918c967202024812c2bfb4048deeb69ca328363fb8e15254c549"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa59974880ab5ad8ef3afaa26f9bda148c5f39e06b11a8ada4660ecc9fb2feb3"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3bb8149840daf2c3f97cebf00e4ed4a65a0baff888bf2605a8d0135ff5cf764e"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:0d6b2fa86becfa81f0a0271ccb9eb127ad45fb597733a77b92e8a35e53414914"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:136bf638d92848a939fd8f0e06fcf92d9f2e4b57969d94faae27c55f3d85c05b"}, + {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:89934f9f791566e54c1d92cdc8f8fd0009447a5ecdb1ec6b810d5f8c4955f6be"}, + {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8ade0363f776f87f982572c2860cc43c65ace208db49c76df0a21dde4ddd16e"}, + {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfbbab9316330cf81656fed435311386610f78b6c93cc5db4bebbce8dd146675"}, + {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:172d65f7c72a35a6879217bcdb4bb11bc88d55fb4879e7569f55616062d387c2"}, + {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3c623923967f3e5961d272718655946e5322b8d058e094764180cdee7bab1af"}, + {file = "lxml-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ce0930a963ff593e8bb6fda49a503911accc67dee7e5445eec972668e672a0f0"}, + {file = "lxml-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7b64fcd670bca8800bc10ced36620c6bbb321e7bc1214b9c0c0df269c1dddc2"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877"}, + {file = "lxml-5.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:05123fad495a429f123307ac6d8fd6f977b71e9a0b6d9aeeb8f80c017cb17131"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a243132767150a44e6a93cd1dde41010036e1cbc63cc3e9fe1712b277d926ce3"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92ea6d9dd84a750b2bae72ff5e8cf5fdd13e58dda79c33e057862c29a8d5b50"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2f1be45d4c15f237209bbf123a0e05b5d630c8717c42f59f31ea9eae2ad89394"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a83d3adea1e0ee36dac34627f78ddd7f093bb9cfc0a8e97f1572a949b695cb98"}, + {file = "lxml-5.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3edbb9c9130bac05d8c3fe150c51c337a471cc7fdb6d2a0a7d3a88e88a829314"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2f23cf50eccb3255b6e913188291af0150d89dab44137a69e14e4dcb7be981f1"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7e5edac4778127f2bf452e0721a58a1cfa4d1d9eac63bdd650535eb8543615"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:094b28ed8a8a072b9e9e2113a81fda668d2053f2ca9f2d202c2c8c7c2d6516b1"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:514fe78fc4b87e7a7601c92492210b20a1b0c6ab20e71e81307d9c2e377c64de"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8fffc08de02071c37865a155e5ea5fce0282e1546fd5bde7f6149fcaa32558ac"}, + {file = "lxml-5.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4b0d5cdba1b655d5b18042ac9c9ff50bda33568eb80feaaca4fc237b9c4fbfde"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3031e4c16b59424e8d78522c69b062d301d951dc55ad8685736c3335a97fc270"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb659702a45136c743bc130760c6f137870d4df3a9e14386478b8a0511abcfca"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a11b16a33656ffc43c92a5343a28dc71eefe460bcc2a4923a96f292692709f6"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5ae125276f254b01daa73e2c103363d3e99e3e10505686ac7d9d2442dd4627a"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76722b5ed4a31ba103e0dc77ab869222ec36efe1a614e42e9bcea88a36186fe"}, + {file = "lxml-5.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:33e06717c00c788ab4e79bc4726ecc50c54b9bfb55355eae21473c145d83c2d2"}, + {file = "lxml-5.3.1.tar.gz", hash = "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml_html_clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11,<3.1.0)"] [[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -category = "dev" +name = "markdown" +version = "3.7" +description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] [[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "main" +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] [[package]] -name = "pycryptodome" -version = "3.15.0" -description = "Cryptographic library for Python" -category = "main" +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] [[package]] -name = "pydantic" -version = "1.9.2" -description = "Data validation and settings management using python type hints" -category = "main" +name = "mike" +version = "2.1.3" +description = "Manage multiple versions of your MkDocs-powered documentation" optional = false -python-versions = ">=3.6.1" +python-versions = "*" +files = [ + {file = "mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a"}, + {file = "mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810"}, +] [package.dependencies] -typing-extensions = ">=3.7.4.3" +importlib-metadata = "*" +importlib-resources = "*" +jinja2 = ">=2.7" +mkdocs = ">=1.0" +pyparsing = ">=3.0" +pyyaml = ">=5.1" +pyyaml-env-tag = "*" +verspec = "*" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +dev = ["coverage", "flake8 (>=3.0)", "flake8-quotes", "shtab"] +test = ["coverage", "flake8 (>=3.0)", "flake8-quotes", "shtab"] [[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -category = "dev" +name = "mkdocs" +version = "1.6.1" +description = "Project documentation with Markdown." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +files = [ + {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, + {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, +] -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.3.6" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" +packaging = ">=20.5" +pathspec = ">=0.11.1" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] [[package]] -name = "pytest" -version = "7.1.2" -description = "pytest: simple powerful testing with Python" -category = "dev" +name = "mkdocs-autorefs" +version = "1.3.1" +description = "Automatically link across pages in MkDocs." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "mkdocs_autorefs-1.3.1-py3-none-any.whl", hash = "sha256:18c504ae4d3ee7f344369bb26cb31d4105569ee252aab7d75ec2734c2c8b0474"}, + {file = "mkdocs_autorefs-1.3.1.tar.gz", hash = "sha256:a6d30cbcccae336d622a66c2418a3c92a8196b69782774529ad441abb23c0902"}, +] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +Markdown = ">=3.3" +markupsafe = ">=2.0.1" +mkdocs = ">=1.1" [[package]] -name = "pytest-cov" -version = "3.0.0" -description = "Pytest plugin for measuring coverage." -category = "dev" +name = "mkdocs-extra-sass-plugin" +version = "0.1.0" +description = "This plugin adds stylesheets to your mkdocs site from `Sass`/`SCSS`." optional = false python-versions = ">=3.6" +files = [ + {file = "mkdocs-extra-sass-plugin-0.1.0.tar.gz", hash = "sha256:cca7ae778585514371b22a63bcd69373d77e474edab4b270cf2924e05c879219"}, + {file = "mkdocs_extra_sass_plugin-0.1.0-py3-none-any.whl", hash = "sha256:10aa086fa8ef1fc4650f7bb6927deb7bf5bbf5a2dd3178f47e4ef44546b156db"}, +] [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" +beautifulsoup4 = ">=4.6.3" +libsass = ">=0.15" +mkdocs = ">=1.1" -[package.extras] -testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" [[package]] -name = "python-daemon" -version = "2.3.1" -description = "Library to implement a well-behaved Unix daemon process." -category = "main" +name = "mkdocs-material" +version = "9.6.5" +description = "Documentation that simply works" optional = false -python-versions = "*" +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material-9.6.5-py3-none-any.whl", hash = "sha256:aad3e6fb860c20870f75fb2a69ef901f1be727891e41adb60b753efcae19453b"}, + {file = "mkdocs_material-9.6.5.tar.gz", hash = "sha256:b714679a8c91b0ffe2188e11ed58c44d2523e9c2ae26a29cc652fa7478faa21f"}, +] [package.dependencies] -docutils = "*" -lockfile = ">=0.10" +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.6,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" [package.extras] -test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "main" +name = "mkdocs-material-extensions" +version = "1.3.1" +description = "Extension pack for Python Markdown and MkDocs Material." optional = false -python-versions = ">=3.6" - +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + [[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" +name = "mkdocstrings" +version = "0.28.1" +description = "Automatic documentation from sources, for MkDocs." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.9" +files = [ + {file = "mkdocstrings-0.28.1-py3-none-any.whl", hash = "sha256:a5878ae5cd1e26f491ff084c1f9ab995687d52d39a5c558e9b7023d0e4e0b740"}, + {file = "mkdocstrings-0.28.1.tar.gz", hash = "sha256:fb64576906771b7701e8e962fd90073650ff689e95eb86e86751a66d65ab4489"}, +] [package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.11.1" +Markdown = ">=3.6" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=1.3" +mkdocs-get-deps = ">=0.2" +pymdown-extensions = ">=6.3" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.16.1" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.9" +files = [ + {file = "mkdocstrings_python-1.16.1-py3-none-any.whl", hash = "sha256:b88ff6fc6a293cee9cb42313f1cba37a2c5cdf37bcc60b241ec7ab66b5d41b58"}, + {file = "mkdocstrings_python-1.16.1.tar.gz", hash = "sha256:d7152d17da74d3616a0f17df5d2da771ecf7340518c158650e5a64a0a95973f4"}, +] + +[package.dependencies] +griffe = ">=0.49" +mkdocs-autorefs = ">=1.2" +mkdocstrings = ">=0.28" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "mmh3" +version = "5.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = ">=3.9" +files = [ + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce"}, + {file = "mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182"}, + {file = "mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf"}, + {file = "mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258"}, + {file = "mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372"}, + {file = "mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759"}, + {file = "mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df"}, + {file = "mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76"}, + {file = "mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776"}, + {file = "mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59"}, + {file = "mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692"}, + {file = "mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f"}, + {file = "mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:166b67749a1d8c93b06f5e90576f1ba838a65c8e79f28ffd9dfafba7c7d0a084"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adba83c7ba5cc8ea201ee1e235f8413a68e7f7b8a657d582cc6c6c9d73f2830e"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a61f434736106804eb0b1612d503c4e6eb22ba31b16e6a2f987473de4226fa55"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba9ce59816b30866093f048b3312c2204ff59806d3a02adee71ff7bd22b87554"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd51597bef1e503363b05cb579db09269e6e6c39d419486626b255048daf545b"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d51a1ed642d3fb37b8f4cab966811c52eb246c3e1740985f701ef5ad4cdd2145"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:709bfe81c53bf8a3609efcbd65c72305ade60944f66138f697eefc1a86b6e356"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01a9b0092b6f82e861137c8e9bb9899375125b24012eb5219e61708be320032"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:27e46a2c13c9a805e03c9ec7de0ca8e096794688ab2125bdce4229daf60c4a56"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5766299c1d26f6bfd0a638e070bd17dbd98d4ccb067d64db3745bf178e700ef0"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7785205e3e4443fdcbb73766798c7647f94c2f538b90f666688f3e757546069e"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8e574fbd39afb433b3ab95683b1b4bf18313dc46456fc9daaddc2693c19ca565"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1b6727a5a20e32cbf605743749f3862abe5f5e097cbf2afc7be5aafd32a549ae"}, + {file = "mmh3-5.1.0-cp39-cp39-win32.whl", hash = "sha256:d6eaa711d4b9220fe5252032a44bf68e5dcfb7b21745a96efc9e769b0dd57ec2"}, + {file = "mmh3-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:49d444913f6c02980e5241a53fe9af2338f2043d6ce5b6f5ea7d302c52c604ac"}, + {file = "mmh3-5.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:0daaeaedd78773b70378f2413c7d6b10239a75d955d30d54f460fb25d599942d"}, + {file = "mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c"}, +] + +[package.extras] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.8.1)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.12.21)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.10.0)", "clang-format (==19.1.7)", "isort (==5.13.2)", "pylint (==3.3.3)"] +plot = ["matplotlib (==3.10.0)", "pandas (==2.2.3)"] +test = ["pytest (==8.3.4)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.14.1)"] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "omegaconf" +version = "2.3.0" +description = "A flexible configuration library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b"}, + {file = "omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7"}, +] + +[package.dependencies] +antlr4-python3-runtime = "==4.9.*" +PyYAML = ">=5.1.0" + +[[package]] +name = "orderly-set" +version = "5.3.0" +description = "Orderly set" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, + {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, +] + +[[package]] +name = "orjson" +version = "3.10.15" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e"}, + {file = "orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab"}, + {file = "orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806"}, + {file = "orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c"}, + {file = "orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e"}, + {file = "orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e"}, + {file = "orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a"}, + {file = "orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665"}, + {file = "orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa"}, + {file = "orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825"}, + {file = "orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890"}, + {file = "orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf"}, + {file = "orjson-3.10.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528"}, + {file = "orjson-3.10.15-cp38-cp38-win32.whl", hash = "sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60"}, + {file = "orjson-3.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1"}, + {file = "orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428"}, + {file = "orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507"}, + {file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"}, + {file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "paginate" +version = "0.5.7" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, + {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, +] + +[package.extras] +dev = ["pytest", "tox"] +lint = ["black"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "poetry-dynamic-versioning" +version = "1.7.1" +description = "Plugin for Poetry to enable dynamic versioning based on VCS tags" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "poetry_dynamic_versioning-1.7.1-py3-none-any.whl", hash = "sha256:70a4a54bee89aef276e3f2f8841f10a6f140b19c5aeb371a1a6095f84fcbe7b1"}, + {file = "poetry_dynamic_versioning-1.7.1.tar.gz", hash = "sha256:7304b8459af7b7114cd83429827c4d3d8b7d29df4129dde8dff61c76f93faaa3"}, +] + +[package.dependencies] +dunamai = ">=1.21.0,<2.0.0" +jinja2 = ">=2.11.1,<4" +tomlkit = ">=0.4" + +[package.extras] +plugin = ["poetry (>=1.2.0)"] + +[[package]] +name = "pre-commit" +version = "4.1.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, + {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psutil" +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[package.extras] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "puremagic" +version = "1.28" +description = "Pure python implementation of magic file detection" +optional = false +python-versions = "*" +files = [ + {file = "puremagic-1.28-py3-none-any.whl", hash = "sha256:e16cb9708ee2007142c37931c58f07f7eca956b3472489106a7245e5c3aa1241"}, + {file = "puremagic-1.28.tar.gz", hash = "sha256:195893fc129657f611b86b959aab337207d6df7f25372209269ed9e303c1a8c0"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pycryptodome" +version = "3.21.0" +description = "Cryptographic library for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymdown-extensions" +version = "10.14.3" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9"}, + {file = "pymdown_extensions-10.14.3.tar.gz", hash = "sha256:41e576ce3f5d650be59e900e4ceff231e0aed2a88cf30acaee41e02f063a061b"}, +] + +[package.dependencies] +markdown = ">=3.6" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.19.1)"] + +[[package]] +name = "pyparsing" +version = "3.2.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "8.3.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "6.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-env" +version = "1.1.5" +description = "pytest plugin that allows you to add environment variables." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30"}, + {file = "pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf"}, +] + +[package.dependencies] +pytest = ">=8.3.3" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] + +[[package]] +name = "pytest-httpserver" +version = "1.1.1" +description = "pytest-httpserver is a httpserver for pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_httpserver-1.1.1-py3-none-any.whl", hash = "sha256:aadc744bfac773a2ea93d05c2ef51fa23c087e3cc5dace3ea9d45cdd4bfe1fe8"}, + {file = "pytest_httpserver-1.1.1.tar.gz", hash = "sha256:e5c46c62c0aa65e5d4331228cb2cb7db846c36e429c3e74ca806f284806bf7c6"}, +] + +[package.dependencies] +Werkzeug = ">=2.0.0" + +[[package]] +name = "pytest-httpx" +version = "0.34.0" +description = "Send responses to httpx." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_httpx-0.34.0-py3-none-any.whl", hash = "sha256:42cf0a66f7b71b9111db2897e8b38a903abd33a27b11c48aff4a3c7650313af2"}, + {file = "pytest_httpx-0.34.0.tar.gz", hash = "sha256:3ca4b0975c0f93b985f17df19e76430c1086b5b0cce32b1af082d8901296a735"}, +] + +[package.dependencies] +httpx = "==0.27.*" +pytest = "==8.*" + +[package.extras] +testing = ["pytest-asyncio (==0.24.*)", "pytest-cov (==5.*)"] + +[[package]] +name = "pytest-rerunfailures" +version = "15.0" +description = "pytest plugin to re-run tests to eliminate flaky failures" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-rerunfailures-15.0.tar.gz", hash = "sha256:2d9ac7baf59f4c13ac730b47f6fa80e755d1ba0581da45ce30b72fb3542b4474"}, + {file = "pytest_rerunfailures-15.0-py3-none-any.whl", hash = "sha256:dd150c4795c229ef44320adc9a0c0532c51b78bb7a6843a8c53556b9a611df1a"}, +] + +[package.dependencies] +packaging = ">=17.1" +pytest = ">=7.4,<8.2.2 || >8.2.2" + +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[[package]] +name = "python-daemon" +version = "3.1.2" +description = "Library to implement a well-behaved Unix daemon process." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python_daemon-3.1.2-py3-none-any.whl", hash = "sha256:b906833cef63502994ad48e2eab213259ed9bb18d54fa8774dcba2ff7864cec6"}, + {file = "python_daemon-3.1.2.tar.gz", hash = "sha256:f7b04335adc473de877f5117e26d5f1142f4c9f7cd765408f0877757be5afbf4"}, +] + +[package.dependencies] +lockfile = ">=0.10" + +[package.extras] +build = ["build", "changelog-chug", "docutils", "python-daemon[doc]", "wheel"] +devel = ["python-daemon[dist,test]"] +dist = ["python-daemon[build]", "twine"] +static-analysis = ["isort (>=5.13,<6.0)", "pip-check", "pycodestyle (>=2.12,<3.0)", "pydocstyle (>=6.3,<7.0)", "pyupgrade (>=3.17,<4.0)"] +test = ["coverage", "python-daemon[build,static-analysis]", "testscenarios (>=0.4)", "testtools"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" [[package]] -name = "requests-cache" -version = "0.9.5" -description = "A transparent persistent cache for the requests library" -category = "main" +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "pyzmq" +version = "26.2.1" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, + {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3"}, + {file = "pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa"}, + {file = "pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473"}, + {file = "pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594"}, + {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a"}, + {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd"}, + {file = "pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7"}, + {file = "pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1"}, + {file = "pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7"}, + {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3"}, + {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e"}, + {file = "pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a"}, + {file = "pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13"}, + {file = "pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5"}, + {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23"}, + {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad"}, + {file = "pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb"}, + {file = "pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf"}, + {file = "pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce"}, + {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e"}, + {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460"}, + {file = "pyzmq-26.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ef584f13820d2629326fe20cc04069c21c5557d84c26e277cfa6235e523b10f"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:160194d1034902937359c26ccfa4e276abffc94937e73add99d9471e9f555dd6"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:574b285150afdbf0a0424dddf7ef9a0d183988eb8d22feacb7160f7515e032cb"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44dba28c34ce527cf687156c81f82bf1e51f047838d5964f6840fd87dfecf9fe"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9fbdb90b85c7624c304f72ec7854659a3bd901e1c0ffb2363163779181edeb68"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a7ad34a2921e8f76716dc7205c9bf46a53817e22b9eec2e8a3e08ee4f4a72468"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:866c12b7c90dd3a86983df7855c6f12f9407c8684db6aa3890fc8027462bda82"}, + {file = "pyzmq-26.2.1-cp37-cp37m-win32.whl", hash = "sha256:eeb37f65350d5c5870517f02f8bbb2ac0fbec7b416c0f4875219fef305a89a45"}, + {file = "pyzmq-26.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4eb3197f694dfb0ee6af29ef14a35f30ae94ff67c02076eef8125e2d98963cd0"}, + {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:36d4e7307db7c847fe37413f333027d31c11d5e6b3bacbb5022661ac635942ba"}, + {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c6ae0e95d0a4b0cfe30f648a18e764352d5415279bdf34424decb33e79935b8"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5b4fc44f5360784cc02392f14235049665caaf7c0fe0b04d313e763d3338e463"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:51431f6b2750eb9b9d2b2952d3cc9b15d0215e1b8f37b7a3239744d9b487325d"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbc78ae2065042de48a65f1421b8af6b76a0386bb487b41955818c3c1ce7bed"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d14f50d61a89b0925e4d97a0beba6053eb98c426c5815d949a43544f05a0c7ec"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:004837cb958988c75d8042f5dac19a881f3d9b3b75b2f574055e22573745f841"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b2007f28ce1b8acebdf4812c1aab997a22e57d6a73b5f318b708ef9bcabbe95"}, + {file = "pyzmq-26.2.1-cp38-cp38-win32.whl", hash = "sha256:269c14904da971cb5f013100d1aaedb27c0a246728c341d5d61ddd03f463f2f3"}, + {file = "pyzmq-26.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:31fff709fef3b991cfe7189d2cfe0c413a1d0e82800a182cfa0c2e3668cd450f"}, + {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a4bffcadfd40660f26d1b3315a6029fd4f8f5bf31a74160b151f5c577b2dc81b"}, + {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e76ad4729c2f1cf74b6eb1bdd05f6aba6175999340bd51e6caee49a435a13bf5"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8b0f5bab40a16e708e78a0c6ee2425d27e1a5d8135c7a203b4e977cee37eb4aa"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8e47050412f0ad3a9b2287779758073cbf10e460d9f345002d4779e43bb0136"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f18ce33f422d119b13c1363ed4cce245b342b2c5cbbb76753eabf6aa6f69c7d"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ceb0d78b7ef106708a7e2c2914afe68efffc0051dc6a731b0dbacd8b4aee6d68"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ebdd96bd637fd426d60e86a29ec14b8c1ab64b8d972f6a020baf08a30d1cf46"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03719e424150c6395b9513f53a5faadcc1ce4b92abdf68987f55900462ac7eec"}, + {file = "pyzmq-26.2.1-cp39-cp39-win32.whl", hash = "sha256:ef5479fac31df4b304e96400fc67ff08231873ee3537544aa08c30f9d22fce38"}, + {file = "pyzmq-26.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:f92a002462154c176dac63a8f1f6582ab56eb394ef4914d65a9417f5d9fde218"}, + {file = "pyzmq-26.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:1fd4b3efc6f62199886440d5e27dd3ccbcb98dfddf330e7396f1ff421bfbb3c2"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ee7152f32c88e0e1b5b17beb9f0e2b14454235795ef68c0c120b6d3d23d12833"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:baa1da72aecf6a490b51fba7a51f1ce298a1e0e86d0daef8265c8f8f9848eb77"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:49135bb327fca159262d8fd14aa1f4a919fe071b04ed08db4c7c37d2f0647162"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bacc1a10c150d58e8a9ee2b2037a70f8d903107e0f0b6e079bf494f2d09c091"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:09dac387ce62d69bec3f06d51610ca1d660e7849eb45f68e38e7f5cf1f49cbcb"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70b3a46ecd9296e725ccafc17d732bfc3cdab850b54bd913f843a0a54dfb2c04"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:59660e15c797a3b7a571c39f8e0b62a1f385f98ae277dfe95ca7eaf05b5a0f12"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0f50db737d688e96ad2a083ad2b453e22865e7e19c7f17d17df416e91ddf67eb"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a003200b6cd64e89b5725ff7e284a93ab24fd54bbac8b4fa46b1ed57be693c27"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f9ba5def063243793dec6603ad1392f735255cbc7202a3a484c14f99ec290705"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1238c2448c58b9c8d6565579393148414a42488a5f916b3f322742e561f6ae0d"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eddb3784aed95d07065bcf94d07e8c04024fdb6b2386f08c197dfe6b3528fda"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0f19c2097fffb1d5b07893d75c9ee693e9cbc809235cf3f2267f0ef6b015f24"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0995fd3530f2e89d6b69a2202e340bbada3191014352af978fa795cb7a446331"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7c6160fe513654e65665332740f63de29ce0d165e053c0c14a161fa60dd0da01"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8ec8e3aea6146b761d6c57fcf8f81fcb19f187afecc19bf1701a48db9617a217"}, + {file = "pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "radixtarget" +version = "3.0.15" +description = "Check whether an IP address belongs to a cloud provider" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "radixtarget-3.0.15-py3-none-any.whl", hash = "sha256:1e1d0dd3e8742ffcfc42084eb238f31f6785626b876ab63a9f28a29e97bd3bb0"}, + {file = "radixtarget-3.0.15.tar.gz", hash = "sha256:dedfad3aea1e973f261b7bc0d8936423f59ae4d082648fd496c6cdfdfa069fea"}, +] + +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] [package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -attrs = ">=21.2,<22.0" -cattrs = ">=1.8,<=22.2" -exceptiongroup = {version = ">=1.0.0-rc.3", markers = "python_full_version >= \"3.10.0\" and python_full_version < \"4.0.0\""} -requests = ">=2.22,<3.0" -url-normalize = ">=1.4,<2.0" -urllib3 = ">=1.25.5,<2.0.0" +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" [package.extras] -dynamodb = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)"] -all = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)", "pymongo (>=3)", "redis (>=3)", "itsdangerous (>=2.0,<3.0)", "pyyaml (>=5.4)", "ujson (>=4.0)"] -mongodb = ["pymongo (>=3)"] -redis = ["redis (>=3)"] -bson = ["bson (>=0.5)"] -security = ["itsdangerous (>=2.0,<3.0)"] -yaml = ["pyyaml (>=5.4)"] -json = ["ujson (>=4.0)"] -docs = ["furo (>=2021.9.8)", "linkify-it-py (>=1.0.1,<2.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx (==4.3.0)", "sphinx-autodoc-typehints (>=1.11,<2.0)", "sphinx-automodapi (>=0.13,<0.15)", "sphinx-copybutton (>=0.3,<0.5)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinx-notfound-page", "sphinx-panels (>=0.6,<0.7)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.1.0" description = "File transport adapter for Requests" -category = "main" optional = false python-versions = "*" +files = [ + {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, + {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, +] [package.dependencies] requests = ">=1.0.0" -six = "*" [[package]] name = "resolvelib" -version = "0.5.5" +version = "1.0.1" description = "Resolve abstract dependencies into concrete ones" -category = "main" optional = false python-versions = "*" +files = [ + {file = "resolvelib-1.0.1-py2.py3-none-any.whl", hash = "sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf"}, + {file = "resolvelib-1.0.1.tar.gz", hash = "sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309"}, +] [package.extras] examples = ["html5lib", "packaging", "pygraphviz", "requests"] -lint = ["black", "flake8"] -release = ["setl", "towncrier"] +lint = ["black", "flake8", "isort", "mypy", "types-requests"] +release = ["build", "towncrier", "twine"] test = ["commentjson", "packaging", "pytest"] +[[package]] +name = "ruff" +version = "0.9.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.9.7-py3-none-linux_armv6l.whl", hash = "sha256:99d50def47305fe6f233eb8dabfd60047578ca87c9dcb235c9723ab1175180f4"}, + {file = "ruff-0.9.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d59105ae9c44152c3d40a9c40d6331a7acd1cdf5ef404fbe31178a77b174ea66"}, + {file = "ruff-0.9.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f313b5800483770bd540cddac7c90fc46f895f427b7820f18fe1822697f1fec9"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042ae32b41343888f59c0a4148f103208bf6b21c90118d51dc93a68366f4e903"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87862589373b33cc484b10831004e5e5ec47dc10d2b41ba770e837d4f429d721"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a17e1e01bee0926d351a1ee9bc15c445beae888f90069a6192a07a84af544b6b"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7c1f880ac5b2cbebd58b8ebde57069a374865c73f3bf41f05fe7a179c1c8ef22"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e63fc20143c291cab2841dbb8260e96bafbe1ba13fd3d60d28be2c71e312da49"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91ff963baed3e9a6a4eba2a02f4ca8eaa6eba1cc0521aec0987da8d62f53cbef"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88362e3227c82f63eaebf0b2eff5b88990280fb1ecf7105523883ba8c3aaf6fb"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0372c5a90349f00212270421fe91874b866fd3626eb3b397ede06cd385f6f7e0"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d76b8ab60e99e6424cd9d3d923274a1324aefce04f8ea537136b8398bbae0a62"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0c439bdfc8983e1336577f00e09a4e7a78944fe01e4ea7fe616d00c3ec69a3d0"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:115d1f15e8fdd445a7b4dc9a30abae22de3f6bcabeb503964904471691ef7606"}, + {file = "ruff-0.9.7-py3-none-win32.whl", hash = "sha256:e9ece95b7de5923cbf38893f066ed2872be2f2f477ba94f826c8defdd6ec6b7d"}, + {file = "ruff-0.9.7-py3-none-win_amd64.whl", hash = "sha256:3770fe52b9d691a15f0b87ada29c45324b2ace8f01200fb0c14845e499eb0c2c"}, + {file = "ruff-0.9.7-py3-none-win_arm64.whl", hash = "sha256:b075a700b2533feb7a01130ff656a4ec0d5f340bb540ad98759b8401c32c2037"}, + {file = "ruff-0.9.7.tar.gz", hash = "sha256:643757633417907510157b206e490c3aa11cab0c087c912f60e07fbafa87a4c6"}, +] + +[[package]] +name = "setproctitle" +version = "1.3.5" +description = "A Python module to customize the process title" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setproctitle-1.3.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:02870e0cb0de7f68a7a8a5b23c2bc0ce63821cab3d9b126f9be80bb6cd674c80"}, + {file = "setproctitle-1.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55b278135be742b8901067479626d909f6613bd2d2c4fd0de6bb46f80e07a919"}, + {file = "setproctitle-1.3.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53fc971f7bf7a674f571a23cdec70f2f0ac88152c59c06aa0808d0be6d834046"}, + {file = "setproctitle-1.3.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb0500e1bc6f00b8ba696c3743ddff14c8679e3c2ca9d292c008ac51488d17cf"}, + {file = "setproctitle-1.3.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:995b3ac1b5fe510f4e1d1c19ebf19f4bceb448f2d6e8d99ea23f33cb6f1a277e"}, + {file = "setproctitle-1.3.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a05e2c3fdfbda32b9c9da72d0506398d1efb5bd2c5981b9e12d3622eb3d4f9"}, + {file = "setproctitle-1.3.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:310c7f4ca4c8476a9840b2cd4b22ee602a49a3c902fdcd2dd8284685abd10a9a"}, + {file = "setproctitle-1.3.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:867af4a5c3d85484fbcc50ea88bcd375acf709cff88a3259575361849c0da351"}, + {file = "setproctitle-1.3.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8ec0a7fe9f1ba90900144489bc93ce7dd4dec3f3df1e7f188c9e58364fe4a4c5"}, + {file = "setproctitle-1.3.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:aaee7acba2733a14a886488b7495bfec4a8d6407124c04a0946dbde1684230a3"}, + {file = "setproctitle-1.3.5-cp310-cp310-win32.whl", hash = "sha256:bd2cccd972e4282af4ce2c13cd9ebdf07be157eabafd8ce648fffdc8ae6fbe28"}, + {file = "setproctitle-1.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:81f2328ac34c9584e1e5f87eea916c0bc48476a06606a07debae07acdd7ab5ea"}, + {file = "setproctitle-1.3.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1c8dcc250872385f2780a5ea58050b58cbc8b6a7e8444952a5a65c359886c593"}, + {file = "setproctitle-1.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca82fae9eb4800231dd20229f06e8919787135a5581da245b8b05e864f34cc8b"}, + {file = "setproctitle-1.3.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0424e1d33232322541cb36fb279ea5242203cd6f20de7b4fb2a11973d8e8c2ce"}, + {file = "setproctitle-1.3.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fec8340ab543144d04a9d805d80a0aad73fdeb54bea6ff94e70d39a676ea4ec0"}, + {file = "setproctitle-1.3.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eab441c89f181271ab749077dcc94045a423e51f2fb0b120a1463ef9820a08d0"}, + {file = "setproctitle-1.3.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c371550a2288901a0dcd84192691ebd3197a43c95f3e0b396ed6d1cedf5c6c"}, + {file = "setproctitle-1.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78288ff5f9c415c56595b2257ad218936dd9fa726b36341b373b31ca958590fe"}, + {file = "setproctitle-1.3.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f1f13a25fc46731acab518602bb1149bfd8b5fabedf8290a7c0926d61414769d"}, + {file = "setproctitle-1.3.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1534d6cd3854d035e40bf4c091984cbdd4d555d7579676d406c53c8f187c006f"}, + {file = "setproctitle-1.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62a01c76708daac78b9688ffb95268c57cb57fa90b543043cda01358912fe2db"}, + {file = "setproctitle-1.3.5-cp311-cp311-win32.whl", hash = "sha256:ea07f29735d839eaed985990a0ec42c8aecefe8050da89fec35533d146a7826d"}, + {file = "setproctitle-1.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:ab3ae11e10d13d514d4a5a15b4f619341142ba3e18da48c40e8614c5a1b5e3c3"}, + {file = "setproctitle-1.3.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:523424b9be4dea97d95b8a584b183f35c7bab2d0a3d995b01febf5b8a8de90e4"}, + {file = "setproctitle-1.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6ec1d86c1b4d7b5f2bdceadf213310cf24696b82480a2a702194b8a0bfbcb47"}, + {file = "setproctitle-1.3.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea6c505264275a43e9b2acd2acfc11ac33caf52bc3167c9fced4418a810f6b1c"}, + {file = "setproctitle-1.3.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b91e68e6685998e6353f296100ecabc313a6cb3e413d66a03d74b988b61f5ff"}, + {file = "setproctitle-1.3.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc1fda208ae3a2285ad27aeab44c41daf2328abe58fa3270157a739866779199"}, + {file = "setproctitle-1.3.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:828727d220e46f048b82289018300a64547b46aaed96bf8810c05fe105426b41"}, + {file = "setproctitle-1.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83b016221cf80028b2947be20630faa14e3e72a403e35f0ba29550b4e856767b"}, + {file = "setproctitle-1.3.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6d8a411e752e794d052434139ca4234ffeceeb8d8d8ddc390a9051d7942b2726"}, + {file = "setproctitle-1.3.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:50cfbf86b9c63a2c2903f1231f0a58edeb775e651ae1af84eec8430b0571f29b"}, + {file = "setproctitle-1.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f3b5e2eacd572444770026c9dd3ddc7543ce427cdf452d40a408d1e95beefb30"}, + {file = "setproctitle-1.3.5-cp312-cp312-win32.whl", hash = "sha256:cf4e3ded98027de2596c6cc5bbd3302adfb3ca315c848f56516bb0b7e88de1e9"}, + {file = "setproctitle-1.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:f7a8c01ffd013dda2bed6e7d5cb59fbb609e72f805abf3ee98360f38f7758d9b"}, + {file = "setproctitle-1.3.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:162fd76781f57f42ddf27c475e5fef6a8df4fdd69b28dd554e53e2eb2bfe0f95"}, + {file = "setproctitle-1.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4969d996bdfbe23bbd023cd0bae6c73a27371615c4ec5296a60cecce268659ef"}, + {file = "setproctitle-1.3.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd70c95a94473216e7c7a7a1f7d8ecbaca5b16d4ba93ddbfd32050fc485a8451"}, + {file = "setproctitle-1.3.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a887582bfdb6dcbc482db0ef9e630ad23ca95875806ef2b444bf6fbd7b7d7ca"}, + {file = "setproctitle-1.3.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:755671c39a9e70834eeec6dc6b61e344399c49881d2e7ea3534a1c69669dd9cc"}, + {file = "setproctitle-1.3.5-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ab52b4c2ce056a1b60d439991a81ca90f019488d4b4f64b2779e6badd3677e6"}, + {file = "setproctitle-1.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:36178b944019ec7fc52bb967ffeee296a11d373734a7be276755bedb3db5c141"}, + {file = "setproctitle-1.3.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:269d41cd4f085b69821d1ee6599124f02dbbc79962b256e260b6c9021d037994"}, + {file = "setproctitle-1.3.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d880630fd81d1b3bde121c352ca7ea2f2ff507ef40c3c011d0928ed491f912c9"}, + {file = "setproctitle-1.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8a7fed67ab49f60bd51f3b4cffff3f8d754d1bb0a40e42869911301ec6519b65"}, + {file = "setproctitle-1.3.5-cp313-cp313-win32.whl", hash = "sha256:e9c0d0cfcf715631b10d5950d04a9978f63bc46535724ef7c2eaf1dca9988642"}, + {file = "setproctitle-1.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:e1d28eb98c91fbebd3e443a45c7da5d84974959851ef304c330eabd654a386f1"}, + {file = "setproctitle-1.3.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8995a1217b52d11d92bafd069961a47c5e13d8751ca976a32b3ecbbd471eaf9b"}, + {file = "setproctitle-1.3.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae2ce64ea87837c4e3e65a7a232ff80cf09aa7d916e74cb34a245c47fcd87981"}, + {file = "setproctitle-1.3.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b84de1780bbb0adc67560a113a0ea57e6ecfce2325680de8efe6c2a2f781ac"}, + {file = "setproctitle-1.3.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b1d2628ac9868f960d7e87b3a9b2bb337104c3644b699e52e01efd7e106e4fe"}, + {file = "setproctitle-1.3.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa912c4d08c66afda30dd5af8f2e9c59065dfc36a51edbd5419c3a7c962875aa"}, + {file = "setproctitle-1.3.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4f783e100f8b451cd92fcabd3b831edfb1f7cb02be4a79b972f138e0001885"}, + {file = "setproctitle-1.3.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8ca56e39d10b6758046694a84950e5c5570a034c409ef3337595f64fc2cfa94d"}, + {file = "setproctitle-1.3.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:8915d69260ba6a6aaf9a48f6b53dbf9f8e4dc0cb4ae25bc5edb16a1666b6e47c"}, + {file = "setproctitle-1.3.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7edd4fbb9fd17ed0e5a7f8bde9fa61c3987a34372084c45bab4eab6a2e554762"}, + {file = "setproctitle-1.3.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d0b19fd76d46b8096a463724739c3b09cf5ce38317f559f56f424f6ce7158de3"}, + {file = "setproctitle-1.3.5-cp38-cp38-win32.whl", hash = "sha256:53ce572cdbd43a0bed2aa24299cd823ebf233a7fa720cc7f8634728c213679c0"}, + {file = "setproctitle-1.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:a58f00f35d6038ce1e8a9e5f87cb5ecce13ce118c5977a603566ad1fccc8d2cb"}, + {file = "setproctitle-1.3.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c4b299b5bbadf00034978b8d741c85af25173146747eb9dab22596ec805a52d6"}, + {file = "setproctitle-1.3.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d57e7626329d4fb138da5ce15270b08a91326969956fb19c7a8fec2639066704"}, + {file = "setproctitle-1.3.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4272295721cf1fd2acf960b674d6dc09bec87f2a1e48995817b4ec4a3d483faf"}, + {file = "setproctitle-1.3.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8305b6e6c203222c61318f338f1de08269ec66c247bf251593c215ff1fbeaf9"}, + {file = "setproctitle-1.3.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:becc9f3f605936506d2bd63d9cf817b7ee66b10d204184c4a633064dbed579d6"}, + {file = "setproctitle-1.3.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4629de80c47155a26e8d87a0a92d9428aa8d79ccfe2c20fd18888580619704e1"}, + {file = "setproctitle-1.3.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f1af1d310b5b6cda692da52bd862a9833086c0a3f8380fa92505dd23857dcf60"}, + {file = "setproctitle-1.3.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3bb6ea3d6e690677619508050bc681d86223723bdf67e4e8a8dffc3d04ca3044"}, + {file = "setproctitle-1.3.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:322067ef1ffe70d297b00bee8a3862fed96021aa4318e3bce2d7c3bfa7a8d1e7"}, + {file = "setproctitle-1.3.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1b58d49c32a46c48dcc2812635a89e6bee31139b03818da49a0bbaeaf01edef9"}, + {file = "setproctitle-1.3.5-cp39-cp39-win32.whl", hash = "sha256:707c23d4a88f5e66f1005d93558bf84eb45fc0fb0c4f33480a0c7d0895e8e848"}, + {file = "setproctitle-1.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:c64199a73d442a06d372b5286942229a43e86fa41bf36f317dcc60c036aff0bb"}, + {file = "setproctitle-1.3.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dc66b84beb0d5eb03abf0c3140c6d2cbe3d67ae9f0824a09dfa8c6ff164319a6"}, + {file = "setproctitle-1.3.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31dc9b330e7cac7685bdef790747c07914081c11ee1066eb0c597303dfb52010"}, + {file = "setproctitle-1.3.5-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4028639b511f5e641d116b3b54ad70c637ebd1b4baac0948283daf11b104119f"}, + {file = "setproctitle-1.3.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6bddef4e27d0ed74e44b58bf050bc3108591bf17d20d461fc59cd141282f849c"}, + {file = "setproctitle-1.3.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:9996be1d1df399c3cdc6d72ce0064e46bc74fc6e29fe16a328511a303dd4d418"}, + {file = "setproctitle-1.3.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cefc2dbdc48121022c3c05644cd3706f08e0b3c0ce07814d3c04daba0617936"}, + {file = "setproctitle-1.3.5-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cef63879c79a570aabf7c158f453bf8d1285f0fda4b6b9b7a52d64b49c084d40"}, + {file = "setproctitle-1.3.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a863296a31fb578726c570314cb78ff3a3fddb65963dc01ea33731760f20a92c"}, + {file = "setproctitle-1.3.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b63bda3cb4b6526720dc7c6940b891c593f41771d119aeb8763875801ce2296d"}, + {file = "setproctitle-1.3.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95913af603da5b4c7635bf1fb67ecc5df7c18360b6cfb6740fd743bb150a6e17"}, + {file = "setproctitle-1.3.5-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36b130cf8fe76dc05ad1d48cc9ff3699eb1f0d8edbf6f46a3ce46a7041e49d7b"}, + {file = "setproctitle-1.3.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe3bfd5e51c24349d022e062a96c316a1b8862ea9a0cf5ea2a8b2ae008b77cec"}, + {file = "setproctitle-1.3.5.tar.gz", hash = "sha256:1e6eaeaf8a734d428a95d8c104643b39af7d247d604f40a7bebcf3960a853c5e"}, +] + +[package.extras] +test = ["pytest"] + [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "socksio" +version = "1.0.0" +description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +optional = false +python-versions = ">=3.6" +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "starlette" +version = "0.45.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +files = [ + {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, + {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "tabulate" version = "0.8.10" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "tabulate-0.8.10-py3-none-any.whl", hash = "sha256:0ba055423dbaa164b9e456abe7920c5e8ed33fcc16f6d1b2f2d152c8e1e8b4fc"}, + {file = "tabulate-0.8.10.tar.gz", hash = "sha256:6c57f3f3dd7ac2782770155f3adb2db0b1a269637e42f27599925e64b114f519"}, +] [package.extras] widechars = ["wcwidth"] [[package]] name = "tldextract" -version = "3.3.1" +version = "5.1.3" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "tldextract-5.1.3-py3-none-any.whl", hash = "sha256:78de310cc2ca018692de5ddf320f9d6bd7c5cf857d0fd4f2175f0cdf4440ea75"}, + {file = "tldextract-5.1.3.tar.gz", hash = "sha256:d43c7284c23f5dc8a42fd0fee2abede2ff74cc622674e4cb07f514ab3330c338"}, +] [package.dependencies] filelock = ">=3.0.8" @@ -655,482 +2657,455 @@ idna = "*" requests = ">=2.1.0" requests-file = ">=1.4" +[package.extras] +release = ["build", "twine"] +testing = ["mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "tox-uv", "types-filelock", "types-requests"] + [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] [[package]] -name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] [[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -category = "main" +name = "tornado" +version = "6.4.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, + {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, + {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, + {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, +] -[package.dependencies] -six = "*" +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, +] [[package]] name = "urllib3" -version = "1.26.12" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=3.9" +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] -name = "websocket-client" -version = "1.3.3" -description = "WebSocket client for Python with low level API options" -category = "main" +name = "uvicorn" +version = "0.34.0" +description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +files = [ + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "verspec" +version = "0.1.0" +description = "Flexible version handling" +optional = false +python-versions = "*" +files = [ + {file = "verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31"}, + {file = "verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e"}, +] + +[package.extras] +test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] + +[[package]] +name = "virtualenv" +version = "20.29.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, + {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchdog" +version = "6.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.9" +files = [ + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "websockets" +version = "15.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0"}, + {file = "websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3"}, + {file = "websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b"}, + {file = "websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453"}, + {file = "websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4"}, + {file = "websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb"}, + {file = "websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5"}, + {file = "websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f"}, + {file = "websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8"}, + {file = "websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f"}, + {file = "websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133"}, + {file = "websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965"}, + {file = "websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7"}, + {file = "websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad"}, + {file = "websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3"}, + {file = "websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1"}, + {file = "websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55"}, + {file = "websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596"}, + {file = "websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3"}, + {file = "websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4"}, + {file = "websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680"}, + {file = "websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37"}, + {file = "websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f"}, + {file = "websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d"}, + {file = "websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276"}, + {file = "websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc"}, + {file = "websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72"}, + {file = "websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d"}, + {file = "websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab"}, + {file = "websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99"}, + {file = "websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc"}, + {file = "websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904"}, + {file = "websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa"}, + {file = "websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1"}, + {file = "websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7"}, + {file = "websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081"}, + {file = "websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9"}, + {file = "websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b"}, + {file = "websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f"}, + {file = "websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6"}, + {file = "websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375"}, + {file = "websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72"}, + {file = "websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c"}, + {file = "websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8"}, + {file = "websockets-15.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c348abc5924caa02a62896300e32ea80a81521f91d6db2e853e6b1994017c9f6"}, + {file = "websockets-15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5294fcb410ed0a45d5d1cdedc4e51a60aab5b2b3193999028ea94afc2f554b05"}, + {file = "websockets-15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c24ba103ecf45861e2e1f933d40b2d93f5d52d8228870c3e7bf1299cd1cb8ff1"}, + {file = "websockets-15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8821a03bcfb36e4e4705316f6b66af28450357af8a575dc8f4b09bf02a3dee"}, + {file = "websockets-15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc5ae23ada6515f31604f700009e2df90b091b67d463a8401c1d8a37f76c1d7"}, + {file = "websockets-15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ac67b542505186b3bbdaffbc303292e1ee9c8729e5d5df243c1f20f4bb9057e"}, + {file = "websockets-15.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c86dc2068f1c5ca2065aca34f257bbf4f78caf566eb230f692ad347da191f0a1"}, + {file = "websockets-15.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:30cff3ef329682b6182c01c568f551481774c476722020b8f7d0daacbed07a17"}, + {file = "websockets-15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98dcf978d4c6048965d1762abd534c9d53bae981a035bfe486690ba11f49bbbb"}, + {file = "websockets-15.0-cp39-cp39-win32.whl", hash = "sha256:37d66646f929ae7c22c79bc73ec4074d6db45e6384500ee3e0d476daf55482a9"}, + {file = "websockets-15.0-cp39-cp39-win_amd64.whl", hash = "sha256:24d5333a9b2343330f0f4eb88546e2c32a7f5c280f8dd7d3cc079beb0901781b"}, + {file = "websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506"}, + {file = "websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31"}, + {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03"}, + {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3"}, + {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842"}, + {file = "websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5"}, + {file = "websockets-15.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:190bc6ef8690cd88232a038d1b15714c258f79653abad62f7048249b09438af3"}, + {file = "websockets-15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:327adab7671f3726b0ba69be9e865bba23b37a605b585e65895c428f6e47e766"}, + {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd8ef197c87afe0a9009f7a28b5dc613bfc585d329f80b7af404e766aa9e8c7"}, + {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:789c43bf4a10cd067c24c321238e800b8b2716c863ddb2294d2fed886fa5a689"}, + {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7394c0b7d460569c9285fa089a429f58465db930012566c03046f9e3ab0ed181"}, + {file = "websockets-15.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ea4f210422b912ebe58ef0ad33088bc8e5c5ff9655a8822500690abc3b1232d"}, + {file = "websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3"}, + {file = "websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab"}, +] + +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] [[package]] name = "wordninja" version = "2.0.0" description = "Probabilistically split concatenated words using NLP based on English Wikipedia uni-gram frequencies." -category = "main" optional = false python-versions = "*" +files = [ + {file = "wordninja-2.0.0.tar.gz", hash = "sha256:1a1cc7ec146ad19d6f71941ee82aef3d31221700f0d8bf844136cf8df79d281a"}, +] [[package]] name = "xmltodict" -version = "0.12.0" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" +files = [ + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, +] [[package]] name = "xmltojson" -version = "2.0.1" +version = "2.0.3" description = "A Python module and cli tool to quickly convert xml text or files into json" -category = "main" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.7" +files = [ + {file = "xmltojson-2.0.3-py3-none-any.whl", hash = "sha256:1b68519bd14fbf3e28baa630b8c9116b5d3aa8976648f277a78ae3448498889a"}, + {file = "xmltojson-2.0.3.tar.gz", hash = "sha256:68a0022272adf70b8f2639186172c808e9502cd03c0b851a65e0760561c7801d"}, +] [package.dependencies] -xmltodict = ">=0.12.0,<0.13.0" - -[metadata] -lock-version = "1.1" -python-versions = "^3.9" -content-hash = "e9644d4f02777a8391eb2fb812831c5f0dd0a30079630e3991ac551ec6a40b14" +xmltodict = "0.14.2" -[metadata.files] -ansible = [ - {file = "ansible-5.10.0.tar.gz", hash = "sha256:c77f556a7c3d9948f86639c5742aa885be25a7cdbda3bfb41a8314b60a3341e8"}, -] -ansible-core = [] -ansible-runner = [ - {file = "ansible-runner-2.2.1.tar.gz", hash = "sha256:cd9b6cb1174011b4e2e0a5993d4d04d928cde5fe22a8993aed4438493387c182"}, - {file = "ansible_runner-2.2.1-py3-none-any.whl", hash = "sha256:bc3348580b0cbb8b5831039dadad6cce115e155b91c7ad0f66d751d54a0586a0"}, -] -antlr4-python3-runtime = [ - {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -atomicwrites = [] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -black = [ - {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, - {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, - {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, - {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, - {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, - {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, - {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, - {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, - {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, - {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, - {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, - {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, - {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, - {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, - {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, - {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, - {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, - {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, - {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, -] -cattrs = [ - {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, - {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, -] -certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -coverage = [] -cryptography = [] -deepdiff = [ - {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"}, - {file = "deepdiff-5.8.1.tar.gz", hash = "sha256:8d4eb2c4e6cbc80b811266419cb71dd95a157094a3947ccf937a94d44943c7b8"}, -] -dnspython = [ - {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, - {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, -] -docutils = [] -exceptiongroup = [ - {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, - {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, -] -filelock = [] -flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -lockfile = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -omegaconf = [] -ordered-set = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -psutil = [ - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, - {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, - {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, - {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, - {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, - {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, - {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, - {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, - {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, - {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, - {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, - {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, - {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, - {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, - {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, - {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, - {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, - {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, - {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pycryptodome = [] -pydantic = [] -pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, -] -pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] -python-daemon = [] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -requests-cache = [ - {file = "requests-cache-0.9.5.tar.gz", hash = "sha256:bd67575f541f9c10f44f8b49d8d449fb55db8af2e27d93c56349f227b78e4b70"}, - {file = "requests_cache-0.9.5-py3-none-any.whl", hash = "sha256:5343132acc8ca4d7810305aa1e8e46367e9c7c8379f7aea4e8c05f401499fc43"}, -] -requests-file = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, -] -resolvelib = [ - {file = "resolvelib-0.5.5-py2.py3-none-any.whl", hash = "sha256:b0143b9d074550a6c5163a0f587e49c49017434e3cdfe853941725f5455dd29c"}, - {file = "resolvelib-0.5.5.tar.gz", hash = "sha256:123de56548c90df85137425a3f51eb93df89e2ba719aeb6a8023c032758be950"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -tabulate = [] -tldextract = [] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, -] -url-normalize = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] -urllib3 = [] -websocket-client = [ - {file = "websocket-client-1.3.3.tar.gz", hash = "sha256:d58c5f284d6a9bf8379dab423259fe8f85b70d5fa5d2916d5791a84594b122b1"}, - {file = "websocket_client-1.3.3-py3-none-any.whl", hash = "sha256:5d55652dc1d0b3c734f044337d929aaf83f4f9138816ec680c1aefefb4dc4877"}, -] -wordninja = [ - {file = "wordninja-2.0.0.tar.gz", hash = "sha256:1a1cc7ec146ad19d6f71941ee82aef3d31221700f0d8bf844136cf8df79d281a"}, -] -xmltodict = [ - {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, - {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, +[[package]] +name = "yara-python" +version = "4.5.1" +description = "Python interface for YARA" +optional = false +python-versions = "*" +files = [ + {file = "yara_python-4.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c92219bf91caea277bc2736df70dda3709834c297a4a5906f1d9a46cd03579a"}, + {file = "yara_python-4.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6e8e9eb5a49a70a013bf45e0ec97210b7cb124813271fddc666c3cfb1308a2d5"}, + {file = "yara_python-4.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffb48e853f107f2e6e0e29a97ce1185e9cc7a15a6c860dc65eb8ec431d1b6d3e"}, + {file = "yara_python-4.5.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6a4e181de457a5de74982b82ab01c89a06bcd66820ca1671f22e984be1be78"}, + {file = "yara_python-4.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:155ef1a9ca2aeeb57441fa99b6d8bd2cb67787f0d62b3c1670512e36c97ec02f"}, + {file = "yara_python-4.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:264fdc2953c635131112a2cef6208b52d35731a6cc902cc62fe82508d9051afd"}, + {file = "yara_python-4.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1a3e6b610e7131353cfea80ba119db3e96f7ad7befcd9d5a51df8786c806403"}, + {file = "yara_python-4.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aec3dda6b173c4be4d972058ee41fb019c866b82861f12a1ac2b01035cea34b9"}, + {file = "yara_python-4.5.1-cp310-cp310-win32.whl", hash = "sha256:8c3935da45ce283e02a86c9120240524e352add64c5cbccd616885937801ac67"}, + {file = "yara_python-4.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:59fd46cc8c5a77e5e4942c7e403ac738f5c64154dcbc67bd8c9af453d7bb2539"}, + {file = "yara_python-4.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3044359876921e26370f7b646d84a65681811df577be7d4d09c7de21b33d9130"}, + {file = "yara_python-4.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ad70b6b65ed1c591c3bfb3d5d6da0fc6a73b1f979604feead450f348ad67c4"}, + {file = "yara_python-4.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6a185d2ec8fbbffa89d0f7949b84f76860d0e3a74192825dbf53d6a5069b83"}, + {file = "yara_python-4.5.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2560dd27f63cdb395d9d77d6a74d1f0d6b7aa0ea18394f44d650e5abb6e377a3"}, + {file = "yara_python-4.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471e4070bf7e3b9b132f1c0134d1172d9dae353b04f2fce9bc31431ae785595e"}, + {file = "yara_python-4.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f533848781f0e46e44eda77055eae4ec934cf56c1f473e787704f1a348e90094"}, + {file = "yara_python-4.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3aaf259ed162d2de5db70ae1ba057307efdeb7f4697d74cc5b3313caa7647923"}, + {file = "yara_python-4.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90374acc38086447a668580a9aecceb11964f08deb05bfaced6f43e9e67955a1"}, + {file = "yara_python-4.5.1-cp311-cp311-win32.whl", hash = "sha256:721422a14d18a81d75397df51481f5b5f3ab8d0a5220087e5306570877cab4e4"}, + {file = "yara_python-4.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:dac13dc77a5f21c119104ae4e6ad837589eace0505e9daf38af0bd2d4ccd7cfa"}, + {file = "yara_python-4.5.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7eb27c1cd2f6f93f68e23e676ede28357c1fc8b9ec7deefe86f2cfef4abd877c"}, + {file = "yara_python-4.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4c7ac7c1ae5e25bd5bf67ce752ac82568c2cdc157c9af50ba28d7cbab4421175"}, + {file = "yara_python-4.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77011bed905f3786755da7de7ba9082790db654a241e13746fa3fc325b9ad966"}, + {file = "yara_python-4.5.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ddedd9bfcfc37ffddceefd9dbf9bbba137c979b3effc9c1e9aeb08d77c6858c"}, + {file = "yara_python-4.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3431154fac7f41b4657edad91632717b5f1bab5be4ed6ce28d6e17e441d5c947"}, + {file = "yara_python-4.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7d5dc091235ded00b30f04a51d70e08352e44976122f8d45e63d25e96eae27d9"}, + {file = "yara_python-4.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:97d30a483d195e6b695f072086cf1234317a650727844bac7bf85cf98dd960a3"}, + {file = "yara_python-4.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bb65c17657b4cdbe5adee7a6e617ee05e214e8afdbc82b195885354a72a16476"}, + {file = "yara_python-4.5.1-cp312-cp312-win32.whl", hash = "sha256:4f368d057e0865278444c948a65802f7c92008a1b59bf629bdc9efa1b0120a22"}, + {file = "yara_python-4.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ccd73466d7ad1a50cd06f38fdb7a023fee87dd185d3fcf67cc5c55d82cc34dd"}, + {file = "yara_python-4.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37ff0e6256d75521e5ac52b45671647bd6f6a7aa49259b13c19db424d9fdb795"}, + {file = "yara_python-4.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c17d1555dbd99f4872ca289ee92b9630331def0df864f88ced1665efa3cabdac"}, + {file = "yara_python-4.5.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfae9eac6a65d25799aecd21cb43f3552a86552c57e90e85e03a1e95e100fb35"}, + {file = "yara_python-4.5.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c8cfbdc33cbcf78afd6e11149e406dfe558bbd497ff0c9b001753545a326e7"}, + {file = "yara_python-4.5.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bb767f5c9c67d0b5de4d916c92130303d02d07d5a96a160aa5d7aa6c45883b1f"}, + {file = "yara_python-4.5.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e14d43aba8a8d66268cd45ce534bb7b608ca08d97d4ffb9f0205ef5554e317fb"}, + {file = "yara_python-4.5.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c2d81727e24c224b0003770f2548f2eb75d9a95d5aa03b65d5ccf8ab3112d8d"}, + {file = "yara_python-4.5.1-cp37-cp37m-win32.whl", hash = "sha256:da5848e64fdde37529e6ebd8e5778e4665a7dee8cdff2f347ec47a39b453f298"}, + {file = "yara_python-4.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0fc8a450b662a0235ab7cee59ad2e366207c97bb99a80db9ffb68f865abd4ac9"}, + {file = "yara_python-4.5.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0324175b06c440eb754b7ff3845b6eb426b5870bbbebbeae32f2e5281fd35860"}, + {file = "yara_python-4.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f408668aab84a0f42b78784d948a69a99bf95300536edd4ab771bb4a06d92f50"}, + {file = "yara_python-4.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a885ec2800b3ee8c4ba9e6634005e041afad33998d59fa6c76bea60c1bd9c73b"}, + {file = "yara_python-4.5.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:153d459a2382a28d08edb84a74f27d8ef2cc8154f7822dadf744c5797e8e6f25"}, + {file = "yara_python-4.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:509ca2000c9f76c3304f9fdbb886b1d403231a6a76ec9b4aeb18c67ee8279917"}, + {file = "yara_python-4.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b03d2ffe24a13d69d14b12517aac7a4ea5f0df41ac725f282ebdc729f4365a3d"}, + {file = "yara_python-4.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8e90cc9bee1340dec0e9dab95e056dec08e6ac67945ad20f537d65457845f2f1"}, + {file = "yara_python-4.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f6e85ee2fe458b52d4984bc2327cd33d69a10579dd708e29d6fbd371aceafe"}, + {file = "yara_python-4.5.1-cp38-cp38-win32.whl", hash = "sha256:90aa56a3e27fdc5751550fe136a8d815c55a1a1db025b28d1f7d146493751310"}, + {file = "yara_python-4.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:4cc7d5220a488fa0470f7c7ea303d1174e3b7e88dc6eef539ab048c8590257a8"}, + {file = "yara_python-4.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6e8566034b9c24a12a8fd8b0ff580b078add7f9e9719e633ad1adcbb33be534a"}, + {file = "yara_python-4.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:934f08ca197a645977749ca1163262abcec9bdbcb54cd47ffb2452c3edc4c5e4"}, + {file = "yara_python-4.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a41992c45fcad39ad05016eafc3c3632b3a11ede2440ba9c1250c5e5d484687a"}, + {file = "yara_python-4.5.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70eb3f84b6e57f7f52676ae9c11dccde2867f49bac6e9a042ef2d027a8afb9f1"}, + {file = "yara_python-4.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d21efeb69d83c48419beccda4aeb415c4c993387e6dee64d8eac4b33af8ac58"}, + {file = "yara_python-4.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:98b780fe880cb219b9a92957a1f9863e53908a2dd75483976265d256b3b69b84"}, + {file = "yara_python-4.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:04c414472b0e3c4a2998ae247c0215bbb52c7808d09a7ca3899ef86ad1df7a7b"}, + {file = "yara_python-4.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0502328eeb18aa6e50af7e31df91b1dd23db0d47a0744383d90ff5cb38ff8d30"}, + {file = "yara_python-4.5.1-cp39-cp39-win32.whl", hash = "sha256:5c266ce1a9f6f783f565d0687a052e0a76c287495452a92d495809f8f6c32a44"}, + {file = "yara_python-4.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:cc08a46630373bf194dc560e422622d45a3cbefec334650a96777f4c5f31f637"}, + {file = "yara_python-4.5.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f23ea9893efd676eb2727e869b486d71e7cb7839789a36c80b726258365b39b6"}, + {file = "yara_python-4.5.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf490994334b00933f7bc37fdd255451f12db741b15c2917fceb31e11bb698d"}, + {file = "yara_python-4.5.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038dcec1728233144ab0ab7ea4ed060f642c5f3152742c9ee71b493f571d6fd5"}, + {file = "yara_python-4.5.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146f2fbdeb043c32a6a7d08a4e37a0bb1c3c0a16d2ad97d957627f6158360569"}, + {file = "yara_python-4.5.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:389aa3a655c94885399e290bd74703273d7a1ecb33593b62801abee91efdfc86"}, + {file = "yara_python-4.5.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df67822be9430066f76604421f79b8d1446d749d925376c82c3e7649064899e3"}, + {file = "yara_python-4.5.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd0fa98a66e58be6a1d679e8679fc39029a4afa66d5310943d9180b90e57baf"}, + {file = "yara_python-4.5.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a073a26d1b081942fc741da8eeefe59c6fec5bf7f2adb3e80df1d73f57a7ea3"}, + {file = "yara_python-4.5.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92af5596aa4af20d7f81260dc72b989dfd4b7672c5492f13e9b71fe2b24c936f"}, + {file = "yara_python-4.5.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:984f67c08f945acb78d2548aaf5ffa19d27288b48979eb0652dd3a89c7b7747b"}, + {file = "yara_python-4.5.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2756fe1121fdd45b29d0d21fea66f762ef50d9e636bae8fd94217f0dc4c32a3a"}, + {file = "yara_python-4.5.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27dd8bdf1bbd946a82d1717c3dcc2efa449abb04018d186dca6b412ed93eba6"}, + {file = "yara_python-4.5.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:382fd997999cfd83d7c2087f8b73c55dde8193473ff2a78643b5c69d3a39e084"}, + {file = "yara_python-4.5.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024c477f182c26265fc447051e09099016e3562ac7f2255e05de2a506dd4d6dc"}, + {file = "yara_python-4.5.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2add91c1f2c7c6bd82affffd864f7e7a96285c80b97906f81584be3b3b448b74"}, + {file = "yara_python-4.5.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ae8411ae68a9f8911781bdc4393fc21ab48372ed3605c64265d08d57394ff5f"}, + {file = "yara_python-4.5.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc81d88d3fa54f2a019e716f715a18e0c2c7c03816fef926b07b4ab3ba698e69"}, + {file = "yara_python-4.5.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8765e387652f9354ca705ea8692e5e24424f7c20aaec857b40c13b18fe7862ad"}, + {file = "yara_python-4.5.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1acc3fd1b4634a4b438b6129f3b52a306d40e44c7fd950e7154f147a12e4de"}, + {file = "yara_python-4.5.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d64e300925d56b3cf7f430b3bf86e133b14aaf578cfe827c08aec8869b8375e9"}, + {file = "yara_python-4.5.1.tar.gz", hash = "sha256:52ab24422b021ae648be3de25090cbf9e6c6caa20488f498860d07f7be397930"}, ] -xmltojson = [ - {file = "xmltojson-2.0.1-py3-none-any.whl", hash = "sha256:21c7a62c16c2f8bfcbd8583b99b417e9179b7b2338704a44e92169acf94eebe5"}, - {file = "xmltojson-2.0.1.tar.gz", hash = "sha256:6138ec4fb71842d6018905f120233e8ee512db8175bf5bbdc83f06d63a7d427e"}, + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "11833ae9ddc8cf7db5c1257e7d4e99d1c21a2b740b1b22e5651a2ccbfa4e3265" diff --git a/pyproject.toml b/pyproject.toml index 8d024e502e..fb58fceb98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,48 +1,115 @@ [tool.poetry] name = "bbot" -version = "1.0.3" +version = "2.3.2" description = "OSINT automation for hackers." -authors = ["TheTechromancer"] +authors = [ + "TheTechromancer", + "Paul Mueller", +] license = "GPL-3.0" readme = "README.md" repository = "https://github.com/blacklanternsecurity/bbot" homepage = "https://github.com/blacklanternsecurity/bbot" +documentation = "https://www.blacklanternsecurity.com/bbot/" +keywords = ["python", "cli", "automation", "osint", "threat-intel", "intelligence", "neo4j", "scanner", "python-library", "hacking", "recursion", "pentesting", "recon", "command-line-tool", "bugbounty", "subdomains", "security-tools", "subdomain-scanner", "osint-framework", "attack-surface", "subdomain-enumeration", "osint-tool"] +classifiers = [ + "Operating System :: POSIX :: Linux", + "Topic :: Security", +] + +[tool.poetry.urls] +"Discord" = "https://discord.com/invite/PZqkgxu5SA" +"Docker Hub" = "https://hub.docker.com/r/blacklanternsecurity/bbot" [tool.poetry.scripts] bbot = 'bbot.cli:main' [tool.poetry.dependencies] python = "^3.9" -omegaconf = "^2.1.1" -tldextract = "^3.2.0" -psutil = "^5.9.0" +omegaconf = "^2.3.0" +psutil = ">=5.9.4,<8.0.0" wordninja = "^2.0.0" -requests-cache = "^0.9.3" -dnspython = "^2.2.1" -websocket-client = "^1.3.2" -pydantic = "^1.9.0" -ansible-runner = "^2.2.0" -ansible = "^5.7.1" -deepdiff = "^5.8.1" -xmltojson = "^2.0.1" -pycryptodome = "^3.15.0" -tabulate = "^0.8.10" -pip = "^22.2.2" - -[tool.poetry.dev-dependencies] -pytest = "^7.1.1" -flake8 = "^4.0.1" -black = "^22.3.0" -pytest-cov = "^3.0.0" +ansible-runner = "^2.3.2" +deepdiff = "^8.0.0" +xmltojson = "^2.0.2" +pycryptodome = "^3.17" +idna = "^3.4" +tabulate = "0.8.10" +websockets = ">=11.0.2,<16.0.0" +pyjwt = "^2.7.0" +beautifulsoup4 = "^4.12.2" +lxml = ">=4.9.2,<6.0.0" +dnspython = "^2.4.2" +tldextract = "^5.1.1" +cachetools = "^5.3.2" +socksio = "^1.0.0" +jinja2 = "^3.1.3" +regex = "^2024.4.16" +unidecode = "^1.3.8" +mmh3 = ">=4.1,<6.0" +setproctitle = "^1.3.3" +yara-python = "^4.5.1" +pyzmq = "^26.0.3" +httpx = "^0.27.0" +puremagic = "^1.28" +pydantic = "^2.9.2" +radixtarget = "^3.0.13" +cloudcheck = "^7.0.12" +orjson = "^3.10.12" +ansible-core = "^2.15.13" + +[tool.poetry.group.dev.dependencies] +poetry-dynamic-versioning = ">=0.21.4,<1.8.0" +urllib3 = "^2.0.2" +werkzeug = ">=2.3.4,<4.0.0" +pytest-env = ">=0.8.2,<1.2.0" +pre-commit = ">=3.4,<5.0" +pytest-cov = ">=5,<7" +pytest-rerunfailures = ">=14,<16" +pytest-timeout = "^2.3.1" +pytest-httpserver = "^1.0.11" +pytest = "^8.3.1" +pytest-asyncio = "0.25.3" +uvicorn = ">=0.32,<0.35" +fastapi = "^0.115.5" +pytest-httpx = ">=0.33,<0.35" +ruff = "0.9.7" + +[tool.poetry.group.docs.dependencies] +mkdocs = "^1.5.2" +mkdocs-extra-sass-plugin = "^0.1.0" +mkdocs-material = "^9.2.5" +mkdocs-material-extensions = "^1.1.1" +mkdocstrings = ">=0.22,<0.29" +mkdocstrings-python = "^1.6.0" +livereload = "^2.6.3" +mike = "^2.1.2" + +[tool.pytest.ini_options] +env = [ + "BBOT_TESTING = True", +] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" [build-system] requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry_dynamic_versioning.backend" + +[tool.codespell] +ignore-words-list = "bu,cna,couldn,dialin,nd,ned,thirdparty" +skip = "./docs/javascripts/vega*.js,./bbot/wordlists/*" -[tool.black] +[tool.ruff] line-length = 119 +format.exclude = ["bbot/test/test_step_1/test_manager_*"] +lint.select = ["E", "F"] +lint.ignore = ["E402", "E711", "E713", "E721", "E741", "F401", "F403", "F405", "E501"] [tool.poetry-dynamic-versioning] enable = true metadata = false -format = 'v1.0.3.{distance}' +format-jinja = 'v2.3.2{% if branch == "dev" %}.{{ distance }}rc{% endif %}' + +[tool.poetry-dynamic-versioning.substitution] +files = ["*/__init__.py"]