├── .coderabbit.yaml ├── .docker ├── Dockerfile.alpine ├── Dockerfile.arch ├── Dockerfile.debian ├── Dockerfile.kali ├── Dockerfile.osx ├── Dockerfile.ubuntu └── build_all.sh ├── .dockerignore ├── .flake8 ├── .github ├── actions │ └── install │ │ └── action.yml └── workflows │ ├── docker.yml │ ├── publish.yml │ ├── release-please.yml │ ├── test_tasks.yml │ ├── tests.yml │ ├── update_tools_table.yml │ └── update_tools_version.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── SECURITY.md ├── cloudbuild.yaml ├── docker-compose.yml ├── helm ├── .helmignore ├── Chart.yaml ├── templates │ ├── redis-service.yaml │ ├── redis.yaml │ ├── secator-manager.yaml │ └── secator-worker.yaml └── values.yaml ├── images ├── aliases.cast ├── aliases.gif ├── demo.gif ├── demo.tap ├── fmt.cast ├── fmt.gif ├── help.png ├── input.cast ├── input.gif ├── pipe.cast ├── pipe.gif ├── short_demo.cast └── short_demo.gif ├── package.json ├── pyproject.toml ├── scripts ├── download_cves.sh ├── generate_tools_md_table.py ├── install.sh ├── install_asciinema.sh ├── install_go.sh ├── install_ruby.sh ├── msf │ ├── exploit_cve.rc │ ├── ftp_anonymous.rc │ ├── ftp_version.rc │ ├── ftp_vsftpd_234_backdoor.rc │ └── redis.rc ├── revshells.json ├── stories │ ├── STORY.md │ ├── aliases.sh │ ├── demo.sh │ ├── fmt.sh │ ├── input.sh │ ├── pipe.sh │ └── short_demo.sh └── update_tools.sh ├── secator ├── .gitignore ├── __init__.py ├── celery.py ├── celery_signals.py ├── celery_utils.py ├── cli.py ├── cli_helper.py ├── click.py ├── config.py ├── configs │ ├── __init__.py │ ├── profiles │ │ ├── __init__.py │ │ ├── aggressive.yaml │ │ ├── http_headless.yaml │ │ ├── http_record.yaml │ │ ├── insane.yaml │ │ ├── paranoid.yaml │ │ ├── polite.yaml │ │ ├── sneaky.yaml │ │ └── tor.yaml │ ├── scans │ │ ├── __init__.py │ │ ├── domain.yaml │ │ ├── host.yaml │ │ ├── network.yaml │ │ ├── subdomain.yaml │ │ └── url.yaml │ └── workflows │ │ ├── __init__.py │ │ ├── cidr_recon.yaml │ │ ├── code_scan.yaml │ │ ├── host_recon.yaml │ │ ├── subdomain_recon.yaml │ │ ├── url_bypass.yaml │ │ ├── url_crawl.yaml │ │ ├── url_dirsearch.yaml │ │ ├── url_fuzz.yaml │ │ ├── url_params_fuzz.yaml │ │ ├── url_vuln.yaml │ │ ├── user_hunt.yaml │ │ └── wordpress.yaml ├── cve.py ├── decorators.py ├── definitions.py ├── exporters │ ├── __init__.py │ ├── _base.py │ ├── console.py │ ├── csv.py │ ├── gdrive.py │ ├── json.py │ ├── table.py │ └── txt.py ├── hooks │ ├── __init__.py │ ├── gcs.py │ └── mongodb.py ├── installer.py ├── loader.py ├── output_types │ ├── __init__.py │ ├── _base.py │ ├── certificate.py │ ├── error.py │ ├── exploit.py │ ├── info.py │ ├── ip.py │ ├── port.py │ ├── progress.py │ ├── record.py │ ├── stat.py │ ├── state.py │ ├── subdomain.py │ ├── tag.py │ ├── target.py │ ├── url.py │ ├── user_account.py │ ├── vulnerability.py │ └── warning.py ├── report.py ├── rich.py ├── runners │ ├── __init__.py │ ├── _base.py │ ├── _helpers.py │ ├── celery.py │ ├── command.py │ ├── scan.py │ ├── task.py │ └── workflow.py ├── scans │ └── __init__.py ├── serializers │ ├── __init__.py │ ├── dataclass.py │ ├── json.py │ └── regex.py ├── tasks │ ├── __init__.py │ ├── _categories.py │ ├── arjun.py │ ├── bbot.py │ ├── bup.py │ ├── cariddi.py │ ├── dalfox.py │ ├── dirsearch.py │ ├── dnsx.py │ ├── feroxbuster.py │ ├── ffuf.py │ ├── fping.py │ ├── gau.py │ ├── gf.py │ ├── gitleaks.py │ ├── gospider.py │ ├── grype.py │ ├── h8mail.py │ ├── httpx.py │ ├── katana.py │ ├── maigret.py │ ├── mapcidr.py │ ├── msfconsole.py │ ├── naabu.py │ ├── nmap.py │ ├── nuclei.py │ ├── searchsploit.py │ ├── subfinder.py │ ├── testssl.py │ ├── trivy.py │ ├── wafw00f.py │ ├── wpprobe.py │ └── wpscan.py ├── template.py ├── thread.py ├── tree.py ├── utils.py ├── utils_test.py └── workflows │ └── __init__.py └── tests ├── __init__.py ├── fixtures ├── arjun_output.json ├── bup_output.json ├── cve_circle_output.json ├── dalfox_output.json ├── dirsearch_output.json ├── dnsx_output.json ├── dnsxbrute_output.json ├── feroxbuster_output.json ├── ffuf_output.json ├── gau_output.json ├── gitleaks_output.json ├── gospider_output.json ├── h8mail_breach.txt ├── h8mail_output.json ├── httpx_output.json ├── katana_output.json ├── ls.py ├── ls.yml ├── maigret_output.json ├── msfconsole_input.rc ├── naabu_output.json ├── nmap_output.json ├── nmap_output.xml ├── nmap_output_converted.json ├── nuclei_output.json ├── subfinder_output.json ├── testssl_output.json ├── trivy_output.json ├── wafw00f_output.json ├── wpprobe_output.json └── wpscan_output.json ├── integration ├── __init__.py ├── all.yaml ├── docker-compose.yml ├── inputs.py ├── outputs.py ├── setup.sh ├── teardown.sh ├── test_addons.py ├── test_celery.py ├── test_scans.py ├── test_tasks.py ├── test_tasks_categories.py ├── test_worker.py ├── test_workflows.py ├── wordlist.txt ├── wordlist_dns.txt └── wordpress_toolbox │ ├── Dockerfile │ └── Makefile ├── performance ├── __init__.py ├── loadtester.py └── test_worker.py ├── template └── test_templates.py └── unit ├── __init__.py ├── test_celery.py ├── test_cli.py ├── test_command.py ├── test_config.py ├── test_offline.py ├── test_runners.py ├── test_runners_helpers.py ├── test_scans.py ├── test_serializers.py ├── test_tasks.py ├── test_tasks_categories.py ├── test_template.py └── test_utils.py /.docker/Dockerfile.alpine: -------------------------------------------------------------------------------- 1 | FROM alpine:3.21 AS builder 2 | 3 | ENV PATH="${PATH}:/root/.local/bin" 4 | RUN apk add --no-cache \ 5 | flock \ 6 | gcc \ 7 | musl-dev \ 8 | linux-headers \ 9 | pipx \ 10 | python3-dev 11 | COPY . /code 12 | WORKDIR /code 13 | 14 | RUN pipx install --pip-args="--no-cache-dir" . && \ 15 | secator install addons worker && \ 16 | secator install addons gdrive && \ 17 | secator install addons gcs && \ 18 | secator install addons mongodb && \ 19 | secator install addons redis && \ 20 | secator install addons dev 21 | 22 | FROM python:3.12-alpine3.21 23 | ARG flavor=full 24 | ARG build_from_source=false 25 | ENV TERM="xterm-256color" 26 | ENV PATH="${PATH}:/root/.local/bin" 27 | ENV GOBIN="/root/.local/bin" 28 | COPY --from=builder /root/.local /root/.local 29 | RUN apk add --no-cache \ 30 | flock \ 31 | pipx \ 32 | sudo 33 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 34 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 35 | ENTRYPOINT ["secator"] 36 | -------------------------------------------------------------------------------- /.docker/Dockerfile.arch: -------------------------------------------------------------------------------- 1 | FROM archlinux:latest 2 | 3 | ENV PATH="${PATH}:/root/.local/bin" 4 | ENV GOBIN="/root/.local/bin" 5 | RUN pacman -Syu --noconfirm && \ 6 | pacman -S --noconfirm \ 7 | base-devel \ 8 | bash \ 9 | curl \ 10 | git \ 11 | go \ 12 | jq \ 13 | openssl \ 14 | proxychains \ 15 | proxychains-ng \ 16 | python \ 17 | python-pip \ 18 | python-pipx \ 19 | ruby \ 20 | rubygems \ 21 | sudo \ 22 | unzip \ 23 | vim \ 24 | wget 25 | COPY . /code 26 | WORKDIR /code 27 | RUN pipx install . && \ 28 | secator install addons worker && \ 29 | secator install addons gdrive && \ 30 | secator install addons gcs && \ 31 | secator install addons mongodb && \ 32 | secator install addons redis && \ 33 | secator install addons dev 34 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 35 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 36 | ENTRYPOINT ["secator"] 37 | -------------------------------------------------------------------------------- /.docker/Dockerfile.debian: -------------------------------------------------------------------------------- 1 | FROM debian:latest 2 | 3 | ENV PATH="${PATH}:/root/.local/bin" 4 | ENV GOBIN="/root/.local/bin" 5 | RUN apt update -y && \ 6 | apt install -y \ 7 | bash \ 8 | build-essential \ 9 | curl \ 10 | git \ 11 | golang-go \ 12 | jq \ 13 | openssl \ 14 | pipx \ 15 | python3 \ 16 | python3-pip \ 17 | python3-venv \ 18 | proxychains \ 19 | proxychains-ng \ 20 | ruby-full \ 21 | rubygems \ 22 | sudo \ 23 | unzip \ 24 | vim \ 25 | wget 26 | COPY . /code 27 | WORKDIR /code 28 | RUN pipx install . && \ 29 | secator install addons worker && \ 30 | secator install addons gdrive && \ 31 | secator install addons gcs && \ 32 | secator install addons mongodb && \ 33 | secator install addons redis && \ 34 | secator install addons dev 35 | RUN secator install langs go 36 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 37 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 38 | ENTRYPOINT ["secator"] 39 | -------------------------------------------------------------------------------- /.docker/Dockerfile.kali: -------------------------------------------------------------------------------- 1 | FROM kalilinux/kali-rolling:latest 2 | 3 | ENV PATH="${PATH}:/root/.local/bin" 4 | ENV GOBIN="/root/.local/bin" 5 | RUN apt update -y && \ 6 | apt install -y \ 7 | bash \ 8 | build-essential \ 9 | curl \ 10 | git \ 11 | golang-go \ 12 | jq \ 13 | openssl \ 14 | pipx \ 15 | python3 \ 16 | python3-pip \ 17 | python3-venv \ 18 | proxychains \ 19 | proxychains-ng \ 20 | ruby-full \ 21 | rubygems \ 22 | sudo \ 23 | unzip \ 24 | vim \ 25 | wget 26 | COPY . /code 27 | WORKDIR /code 28 | RUN pipx install . && \ 29 | secator install addons worker && \ 30 | secator install addons gdrive && \ 31 | secator install addons gcs && \ 32 | secator install addons mongodb && \ 33 | secator install addons redis && \ 34 | secator install addons dev 35 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 36 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 37 | ENTRYPOINT ["secator"] 38 | -------------------------------------------------------------------------------- /.docker/Dockerfile.osx: -------------------------------------------------------------------------------- 1 | FROM sickcodes/docker-osx:latest 2 | 3 | ENV PATH="${PATH}:/home/arch/.local/bin" 4 | ENV GOBIN="/home/arch/.local/bin" 5 | RUN /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" 6 | RUN echo >> /home/arch/.bashrc 7 | RUN echo 'eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"' >> /home/arch/.bashrc 8 | RUN sudo pacman -Syu --noconfirm && \ 9 | sudo pacman -S --noconfirm -y \ 10 | base-devel \ 11 | bash \ 12 | curl \ 13 | git \ 14 | go \ 15 | jq \ 16 | openssl \ 17 | proxychains \ 18 | proxychains-ng \ 19 | python \ 20 | python-pip \ 21 | python-pipx \ 22 | ruby \ 23 | rubygems \ 24 | sudo \ 25 | unzip \ 26 | vim \ 27 | wget 28 | COPY . /code 29 | WORKDIR /code 30 | USER arch 31 | RUN pipx install . && \ 32 | secator install addons worker && \ 33 | secator install addons gdrive && \ 34 | secator install addons gcs && \ 35 | secator install addons mongodb && \ 36 | secator install addons redis && \ 37 | secator install addons dev 38 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 39 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 40 | ENTRYPOINT ["secator"] 41 | -------------------------------------------------------------------------------- /.docker/Dockerfile.ubuntu: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | ENV PATH="${PATH}:/root/.local/bin" 4 | ENV GOBIN="/root/.local/bin" 5 | RUN apt update -y && \ 6 | apt install -y \ 7 | bash \ 8 | build-essential \ 9 | curl \ 10 | git \ 11 | golang-go \ 12 | jq \ 13 | openssl \ 14 | pipx \ 15 | python3 \ 16 | python3-pip \ 17 | python3-venv \ 18 | proxychains \ 19 | proxychains-ng \ 20 | ruby-full \ 21 | rubygems \ 22 | sudo \ 23 | unzip \ 24 | vim \ 25 | wget 26 | COPY . /code 27 | WORKDIR /code 28 | RUN pipx install . && \ 29 | secator install addons worker && \ 30 | secator install addons gdrive && \ 31 | secator install addons gcs && \ 32 | secator install addons mongodb && \ 33 | secator install addons redis && \ 34 | secator install addons dev 35 | RUN if [ "$build_from_source" = "true" ]; then secator config set security.force_source_install 1; fi 36 | RUN if [ "$flavor" != "lite" ]; then secator install tools --cleanup --fail-fast; fi 37 | ENTRYPOINT ["secator"] 38 | -------------------------------------------------------------------------------- /.docker/build_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Define an array of distributions 4 | DISTROS=("alpine" "arch" "debian" "kali" "osx" "ubuntu") 5 | 6 | # Function to build an image 7 | build_image() { 8 | local DISTRO=$1 9 | local DOCKERFILE=".docker/Dockerfile.${DISTRO}" 10 | local IMAGE_NAME="secator-${DISTRO}" 11 | 12 | if [ -f "$DOCKERFILE" ]; then 13 | echo "🚀 Building $IMAGE_NAME using $DOCKERFILE..." 14 | docker build -t "$IMAGE_NAME" -f "$DOCKERFILE" . && \ 15 | echo "✅ Successfully built $IMAGE_NAME" || \ 16 | echo "❌ Failed to build $IMAGE_NAME" 17 | else 18 | echo "⚠️ Dockerfile $DOCKERFILE not found, skipping..." 19 | fi 20 | } 21 | 22 | # Iterate through the distributions and build in parallel 23 | for DISTRO in "${DISTROS[@]}"; do 24 | build_image "$DISTRO" & 25 | done 26 | 27 | # Wait for all background jobs to finish 28 | wait 29 | 30 | echo "🎉 All parallel builds completed!" 31 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .gitignore -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = W191,E101,E128,E265,W605 3 | max-line-length = 120 4 | -------------------------------------------------------------------------------- /.github/actions/install/action.yml: -------------------------------------------------------------------------------- 1 | name: Install secator 2 | description: Installs secator 3 | inputs: 4 | python-version: 5 | description: "Python version" 6 | required: true 7 | runs: 8 | using: "composite" 9 | steps: 10 | 11 | - name: Set up Python ${{ inputs.python-version }} 12 | uses: actions/setup-python@v3 13 | with: 14 | python-version: ${{ inputs.python-version }} 15 | 16 | - name: Install secator with pipx 17 | shell: bash 18 | run: pipx install -e .[dev] 19 | 20 | - name: Add secator to $PATH 21 | shell: bash 22 | run: echo "$HOME/.local/bin" >> "$GITHUB_PATH" 23 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: docker 2 | 3 | on: 4 | push: 5 | branches: 6 | - release-please* 7 | pull_request: 8 | types: [labeled, synchronize] 9 | 10 | permissions: 11 | contents: write 12 | pull-requests: write 13 | 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 16 | cancel-in-progress: true 17 | 18 | env: 19 | FORCE_COLOR: 1 20 | 21 | jobs: 22 | platform: 23 | runs-on: ubuntu-latest 24 | if: ${{ github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'ci-docker')) }} 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | distribution: ["alpine", "arch", "debian", "kali", "osx", "ubuntu"] 29 | steps: 30 | 31 | - name: Checkout repository 32 | uses: actions/checkout@v3 33 | 34 | - name: Set up QEMU 35 | uses: docker/setup-qemu-action@v3 36 | 37 | - name: Set up Docker Buildx 38 | uses: docker/setup-buildx-action@v3 39 | 40 | - name: Build Docker image 41 | run: "docker build -t freelabz/secator:${{ matrix.distribution }} -f .docker/Dockerfile.${{ matrix.distribution }} ." 42 | 43 | - name: Run secator health check (strict) 44 | run: docker run --privileged freelabz/secator:${{ matrix.distribution }} health --strict 45 | 46 | - name: Run Docker image ls to view size 47 | run: docker image ls 48 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: publish 2 | 3 | on: 4 | push: 5 | tags: 6 | - v*.*.* 7 | 8 | env: 9 | FORCE_COLOR: 1 10 | 11 | jobs: 12 | publish-pypi: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | python-version: ["3.11"] 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v3 21 | 22 | - name: Install secator 23 | uses: ./.github/actions/install 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - name: Install secator build addon 28 | run: secator install addons build 29 | 30 | - name: Extract version from tag 31 | run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV 32 | 33 | - name: Build PyPI package 34 | run: secator u build --version ${VERSION} 35 | 36 | - name: Publish PyPI package 37 | run: secator u publish 38 | env: 39 | HATCH_INDEX_AUTH: ${{ secrets.PYPI_TOKEN }} 40 | 41 | publish-docker: 42 | runs-on: ubuntu-latest 43 | strategy: 44 | fail-fast: false 45 | matrix: 46 | python-version: ["3.11"] 47 | steps: 48 | - name: Checkout repository 49 | uses: actions/checkout@v3 50 | 51 | - name: Install secator 52 | uses: ./.github/actions/install 53 | with: 54 | python-version: ${{ matrix.python-version }} 55 | 56 | - name: Set up QEMU 57 | uses: docker/setup-qemu-action@v3 58 | 59 | - name: Set up Docker Buildx 60 | uses: docker/setup-buildx-action@v3 61 | 62 | - name: Login to Docker Hub 63 | uses: docker/login-action@v3 64 | with: 65 | username: ${{ secrets.DOCKERHUB_USERNAME }} 66 | password: ${{ secrets.DOCKERHUB_TOKEN }} 67 | 68 | - name: Extract version from tag 69 | run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV 70 | 71 | - name: Build Docker image (full) 72 | run: docker build -t freelabz/secator:${VERSION} . 73 | 74 | - name: Build Docker image (lite) 75 | run: docker build -t freelabz/secator:${VERSION}-lite --build-arg flavor=lite . 76 | 77 | - name: Tag image with 'latest' 78 | run: docker tag freelabz/secator:${VERSION} freelabz/secator:latest 79 | if: ${{ !contains(github.ref_name, 'alpha') && !contains(github.ref_name, 'beta') && !contains(github.ref_name, 'a') && !contains(github.ref_name, 'b') }} 80 | 81 | - name: Push Docker images 82 | run: | 83 | docker push freelabz/secator:${VERSION}-lite 84 | docker push freelabz/secator:${VERSION} 85 | 86 | - name: Push Docker image 'latest' 87 | run: docker push freelabz/secator:latest 88 | if: ${{ !contains(github.ref_name, 'alpha') && !contains(github.ref_name, 'beta') && !contains(github.ref_name, 'a') && !contains(github.ref_name, 'b') }} 89 | 90 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | name: release-please 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | permissions: 9 | contents: write 10 | pull-requests: write 11 | 12 | jobs: 13 | release-please: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: GoogleCloudPlatform/release-please-action@v4 17 | with: 18 | token: ${{ secrets.RELEASE_TOKEN }} 19 | release-type: python 20 | -------------------------------------------------------------------------------- /.github/workflows/test_tasks.yml: -------------------------------------------------------------------------------- 1 | name: Test tasks 2 | 3 | on: 4 | push: 5 | paths: 6 | - 'secator/tasks/*.py' 7 | branches: 8 | - release-please* 9 | pull_request: 10 | types: [labeled, synchronize] 11 | paths: 12 | - 'secator/tasks/*.py' 13 | 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | test-tasks: 20 | runs-on: ${{ matrix.os }} 21 | permissions: 22 | contents: write 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | # os: ["ubuntu-latest", "macos-latest"] 27 | os: ["ubuntu-latest"] 28 | steps: 29 | - name: Checkout repository 30 | uses: actions/checkout@v3 31 | with: 32 | fetch-depth: 0 33 | 34 | - name: Install secator 35 | uses: ./.github/actions/install 36 | with: 37 | python-version: ${{ matrix.python-version }} 38 | 39 | - name: Add GOBIN to PATH 40 | run: echo "${HOME}/.local/bin" >> $GITHUB_PATH 41 | 42 | - name: Add GOPATH to PATH 43 | run: echo "${HOME}/go/bin" >> $GITHUB_PATH 44 | 45 | - name: Detect changed or all tasks 46 | run: | 47 | if [[ ${{ github.event_name }} == "push" && ${{ github.ref_name }} =~ ^release-please.* ]]; then 48 | echo "Running all tasks" 49 | if ! secator test tasks -c; then 50 | exit 1 51 | fi 52 | else 53 | TASKS=$(git diff --name-only --diff-filter=AMR origin/main HEAD | grep '^secator/tasks/.*\.py$' | grep -v '_.*\.py$' | sed 's/\.py$//' | sed 's/^secator\/tasks\///') 54 | echo "Changed tasks: $TASKS" 55 | FAILED=0 56 | for task in $TASKS; do 57 | if ! secator test task -c $task; then 58 | FAILED=1 59 | fi 60 | echo "" 61 | done 62 | if [ $FAILED -eq 1 ]; then 63 | exit 1 64 | fi 65 | fi 66 | -------------------------------------------------------------------------------- /.github/workflows/update_tools_table.yml: -------------------------------------------------------------------------------- 1 | name: Update tools table 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | # push: 8 | # branches: 9 | # - release-please* 10 | 11 | permissions: 12 | contents: write 13 | pull-requests: write 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} 17 | cancel-in-progress: true 18 | 19 | env: 20 | FORCE_COLOR: 1 21 | 22 | jobs: 23 | update-tools-table: 24 | runs-on: ${{ matrix.os }} 25 | permissions: 26 | contents: write 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | os: ["ubuntu-latest"] 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v3 34 | with: 35 | fetch-depth: 0 36 | 37 | - name: Install secator 38 | uses: ./.github/actions/install 39 | with: 40 | python-version: ${{ matrix.python-version }} 41 | 42 | - name: Generate tools table in README 43 | id: generate_tools_table 44 | run: source /opt/pipx/venvs/secator/bin/activate && python3 ./scripts/generate_tools_md_table.py 45 | env: 46 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 47 | 48 | - name: Check if README.md changed 49 | id: check_readme 50 | run: | 51 | if git diff --quiet README.md; then 52 | echo "readme_changed=false" >> $GITHUB_OUTPUT 53 | else 54 | echo "readme_changed=true" >> $GITHUB_OUTPUT 55 | fi 56 | 57 | - name: Create Pull Request 58 | if: ${{ github.event_name == 'push' && steps.check_readme.outputs.readme_changed == 'true' }} 59 | uses: peter-evans/create-pull-request@v6 60 | with: 61 | token: ${{ secrets.RELEASE_TOKEN }} 62 | commit-message: "chore(deps): update tools table in README" 63 | committer: GitHub 64 | author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> 65 | signoff: false 66 | branch: chore/update-tools-table 67 | delete-branch: true 68 | title: 'chore(deps): update tools table in README' 69 | body: | 70 | Automated updates for tools table in README. 71 | 72 | Please review the changes and merge if appropriate. 73 | labels: | 74 | dependencies 75 | automated 76 | -------------------------------------------------------------------------------- /.github/workflows/update_tools_version.yml: -------------------------------------------------------------------------------- 1 | name: Update tools versions 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | # push: 8 | # branches: 9 | # - release-please* 10 | 11 | permissions: 12 | contents: write 13 | pull-requests: write 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 17 | cancel-in-progress: true 18 | 19 | env: 20 | FORCE_COLOR: 1 21 | 22 | jobs: 23 | update-tools-versions: 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | python-version: ["3.11"] 29 | os: [ubuntu-latest] 30 | steps: 31 | - name: Checkout repository 32 | uses: actions/checkout@v3 33 | 34 | - name: Install secator 35 | uses: ./.github/actions/install 36 | with: 37 | python-version: ${{ matrix.python-version }} 38 | 39 | - name: Setup docker compose 40 | uses: KengoTODA/actions-setup-docker-compose@v1 41 | with: 42 | version: '1.29.2' 43 | 44 | - name: Add GOBIN to PATH 45 | run: echo "${HOME}/.local/bin" >> $GITHUB_PATH 46 | 47 | - name: Add GOPATH to PATH 48 | run: echo "${HOME}/go/bin" >> $GITHUB_PATH 49 | 50 | - name: Install dependencies 51 | run: | 52 | secator install addons worker 53 | secator install langs go 54 | secator install langs ruby 55 | secator install tools --fail-fast 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # to avoid being rate-limited when fetching GitHub releases 58 | 59 | - name: Check for outdated tools 60 | id: update_check 61 | run: ./scripts/update_tools.sh 62 | env: 63 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 64 | 65 | #- name: Run integration tests 66 | # if: steps.update_check.outputs.changes_made == 'true' 67 | # run: | 68 | # secator test integration --test test_celery,test_worker,test_tasks 69 | 70 | - name: Create Pull Request 71 | if: steps.update_check.outputs.changes_made == 'true' # Only run if changes were made 72 | uses: peter-evans/create-pull-request@v6 73 | with: 74 | token: ${{ secrets.RELEASE_TOKEN }} 75 | commit-message: "chore(deps): update tool versions" 76 | committer: GitHub 77 | author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com> 78 | signoff: false 79 | branch: chore/update-tools-version 80 | delete-branch: true 81 | title: 'chore(deps): update tools versions' 82 | body: | 83 | Automated updates for tool versions based on `secator health --bleeding`. 84 | 85 | Please review the changes and merge if appropriate. 86 | labels: | 87 | dependencies 88 | automated 89 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | #*.txt 6 | #*.sh 7 | 8 | # Others 9 | *.json 10 | *.yml 11 | resume.cfg 12 | 13 | # C extensions 14 | *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | pip-wheel-metadata/ 31 | share/python-wheels/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | MANIFEST 36 | 37 | # PyInstaller 38 | # Usually these files are written by a python script from a template 39 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 40 | *.manifest 41 | *.spec 42 | 43 | # Installer logs 44 | pip-log.txt 45 | pip-delete-this-directory.txt 46 | 47 | # Unit test / coverage reports 48 | htmlcov/ 49 | .tox/ 50 | .nox/ 51 | .coverage 52 | .coverage.* 53 | .cache 54 | nosetests.xml 55 | coverage.xml 56 | *.cover 57 | *.py,cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | db.sqlite3 69 | db.sqlite3-journal 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 102 | __pypackages__/ 103 | 104 | # Celery stuff 105 | celerybeat-schedule 106 | celerybeat.pid 107 | 108 | # SageMath parsed files 109 | *.sage.py 110 | 111 | # Environments 112 | .env 113 | .venv 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | 120 | # Spyder project settings 121 | .spyderproject 122 | .spyproject 123 | 124 | # Rope project settings 125 | .ropeproject 126 | 127 | # mkdocs documentation 128 | /site 129 | 130 | # mypy 131 | .mypy_cache/ 132 | .dmypy.json 133 | dmypy.json 134 | 135 | # Pyre type checker 136 | .pyre/ 137 | 138 | # Project 139 | .git/ 140 | images/ -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 2 | Please read this document before opening a new pull request. 3 | 4 | ## Create a dev environment 5 | 6 | To create a dev environment, you can either use `pipx` or `virtualenv` + `pip`: 7 | 8 |
9 | Pipx 10 | 11 | ```sh 12 | git clone https://github.com/freelabz/secator 13 | cd secator 14 | pipx install -e .[dev] 15 | ``` 16 | 17 |
18 | 19 |
20 | Pip 21 | 22 | ```sh 23 | git clone https://github.com/freelabz/secator 24 | cd secator 25 | virtualenv .venv 26 | source .venv/bin/activate 27 | pip install -e .[dev] 28 | ``` 29 | 30 |
31 | 32 | 33 | ## Contribute a new task 34 | 35 | To contribute a new task back to `secator` repository, it needs to validate some requirements: 36 | 37 | - Verify your **task class definition**: 38 | - It MUST have an `input_type` key. 39 | - It MUST have an `output_types` key. 40 | - It MUST have an `install_cmd` key. 41 | 42 | - Add your **task definition** to the `tasks/` directory. If your task class is named `MyAwesomeTask`, call it `my_awesome_task.py` 43 | 44 | - [Optional] Add your output type(s) to `secator`: 45 | - Add your type(s) definition(s) to `output_types/` directory. If your output type is named `MyAwesomeType`, call the file `my_awesome_type.py` 46 | - Import your type class in `__init__.py` 47 | 48 | - Add a **unit test** for your task: 49 | - `tests/fixtures/_output.(json|xml|rc|txt)`: add a fixture for the original command output. 50 | - Make sure it is anonymized from PII data 51 | - Run `secator x ` to make sure the output is shown correctly on the CLI. Also run with `-json` to 52 | verify the output schema 53 | - This fixture will be used by unit tests to emulate data sent by your task 54 | - Validate your unit test by running: `secator test unit --task --test test_tasks` 55 | 56 | - Add an **integration test** for your task: 57 | - `tests/integration/inputs.py` - to modify integration inputs 58 | - `tests/integration/outputs.py` - to modify expected outputs 59 | - Validate your integration test by running: `secator test integration --task --test test_tasks` 60 | 61 | - Run the lint tests: `secator test lint` 62 | 63 | - Open a new pull request with your changes. 64 | 65 | ### New workflow / scan 66 | 67 | - Add your workflow / scan YAML definition `awesome_work.yml` to `configs/workflows/` 68 | 69 | - Make sure the `name` YAML key is the same as your workflow's file name. 70 | 71 | - Make sure the `type` YAML key is set to `workflow` or `scan`. 72 | 73 | - Add some integration tests: 74 | - `inputs.py`: add inputs for your workflow 75 | - `outputs.py`: add some expected outputs of your workflow 76 | 77 | - Run the integration tests: 78 | - For workflows: `secator test integration --test test_workflows --workflows ` 79 | - For scans: `secator test integration --test test_scans --scans ` 80 | 81 | - Open a new pull request with your changes. 82 | 83 | ## Other code 84 | 85 | - Make sure you pass the `lint` and `unit` tests: 86 | - `secator test unit` 87 | - `secator test lint` 88 | - Open a new pull request with your changes. -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | .docker/Dockerfile.alpine -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. 2 | “Business Source License” is a trademark of MariaDB Corporation Ab. 3 | 4 | Parameters 5 | 6 | Licensor: FreeLabz. 7 | Licensed Work: Secator. The Licensed Work is (c) 2023 FreeLabz. 8 | Additional Use Grant: You may make production use of the Licensed Work, 9 | provided such use does not include offering the Licensed Work 10 | to third parties on a hosted or embedded basis which is 11 | competitive with FreeLabz's products. 12 | Change Date: Four years from the date the Licensed Work is published. 13 | 14 | For information about alternative licensing arrangements for the Licensed Work, 15 | please contact sales@freelabz.com. 16 | 17 | Notice 18 | 19 | Business Source License 1.1 20 | 21 | Terms 22 | 23 | The Licensor hereby grants you the right to copy, modify, create derivative 24 | works, redistribute, and make non-production use of the Licensed Work. The 25 | Licensor may make an Additional Use Grant, above, permitting limited production use. 26 | 27 | Effective on the Change Date, or the fourth anniversary of the first publicly 28 | available distribution of a specific version of the Licensed Work under this 29 | License, whichever comes first, the Licensor hereby grants you rights under 30 | the terms of the Change License, and the rights granted in the paragraph 31 | above terminate. 32 | 33 | If your use of the Licensed Work does not comply with the requirements 34 | currently in effect as described in this License, you must purchase a 35 | commercial license from the Licensor, its affiliated entities, or authorized 36 | resellers, or you must refrain from using the Licensed Work. 37 | 38 | All copies of the original and modified Licensed Work, and derivative works 39 | of the Licensed Work, are subject to this License. This License applies 40 | separately for each version of the Licensed Work and the Change Date may vary 41 | for each version of the Licensed Work released by Licensor. 42 | 43 | You must conspicuously display this License on each original or modified copy 44 | of the Licensed Work. If you receive the Licensed Work in original or 45 | modified form from a third party, the terms and conditions set forth in this 46 | License apply to your use of that work. 47 | 48 | Any use of the Licensed Work in violation of this License will automatically 49 | terminate your rights under this License for the current and all other 50 | versions of the Licensed Work. 51 | 52 | This License does not grant you any right in any trademark or logo of 53 | Licensor or its affiliates (provided that you may use a trademark or logo of 54 | Licensor as expressly required by this License). 55 | 56 | TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON 57 | AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, 58 | EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF 59 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND 60 | TITLE. -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Use this section to tell people about which versions of your project are 6 | currently being supported with security updates. 7 | 8 | | Version | Supported | 9 | | ------- | ------------------ | 10 | | 0.0.x | :white_check_mark: | 11 | 12 | ## Reporting a Vulnerability 13 | 14 | To report a new vulnerability, please open an issue with the title "security: report ". 15 | -------------------------------------------------------------------------------- /cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: 'gcr.io/cloud-builders/docker' 3 | entrypoint: 'bash' 4 | args: ['-c', 'docker pull ${_LOCATION}-docker.pkg.dev/$PROJECT_ID/${_REPOSITORY}/secator:latest || exit 0'] 5 | - name: 'gcr.io/cloud-builders/docker' 6 | args: ['build', '-t', '${_LOCATION}-docker.pkg.dev/$PROJECT_ID/${_REPOSITORY}/secator', '--cache-from', 'europe-west1-docker.pkg.dev/$PROJECT_ID/${_REPOSITORY}/secator:latest', '.'] 7 | substitutions: 8 | _REPOSITORY: secator 9 | _LOCATION: europe-west1 10 | 11 | images: 12 | - '${_LOCATION}-docker.pkg.dev/$PROJECT_ID/${_REPOSITORY}/secator' -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.7" 2 | 3 | services: 4 | 5 | secator: 6 | image: freelabz/secator:latest 7 | container_name: secator-client 8 | restart: unless-stopped 9 | entrypoint: "" 10 | command: ['tail', '-F', '/dev/null'] 11 | environment: 12 | - SECATOR_CELERY_BROKER_URL=redis://redis:6379/0 13 | - SECATOR_CELERY_RESULT_BACKEND=redis://redis:6379/0 14 | depends_on: 15 | - redis 16 | 17 | worker: 18 | image: freelabz/secator:latest 19 | container_name: secator-worker 20 | restart: unless-stopped 21 | command: ['worker'] 22 | environment: 23 | - SECATOR_CELERY_BROKER_URL=redis://redis:6379/0 24 | - SECATOR_CELERY_RESULT_BACKEND=redis://redis:6379/0 25 | depends_on: 26 | - redis 27 | 28 | redis: 29 | image: redis:latest 30 | container_name: secator-redis 31 | -------------------------------------------------------------------------------- /helm/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /helm/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: Secator 3 | description: A Secator Helm chart for Kubernetes 4 | 5 | type: application 6 | version: 0.1.0 7 | appVersion: "0.5.2" 8 | -------------------------------------------------------------------------------- /helm/templates/redis-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Values.redis.name }} 5 | namespace: {{ .Values.namespace }} 6 | spec: 7 | type: ClusterIP 8 | ports: 9 | - port: {{ .Values.redis.port }} 10 | name: client 11 | selector: 12 | app: redis -------------------------------------------------------------------------------- /helm/templates/redis.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: StatefulSet 3 | metadata: 4 | name: {{ .Values.redis.name }} 5 | namespace: {{ .Values.namespace }} 6 | spec: 7 | selector: 8 | matchLabels: 9 | app: redis 10 | serviceName: {{ .Values.redis.name }} 11 | replicas: {{ .Values.redis.replicas }} 12 | template: 13 | metadata: 14 | labels: 15 | app: redis 16 | spec: 17 | containers: 18 | - name: {{ .Values.redis.name }} 19 | image: {{ .Values.redis.image }} 20 | ports: 21 | - containerPort: {{ .Values.redis.port }} 22 | name: client -------------------------------------------------------------------------------- /helm/templates/secator-manager.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: {{ .Values.secatorManager.name }} 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | name: secator-manager 8 | spec: 9 | containers: 10 | - name: secator-manager 11 | image: {{ .Values.secatorManager.image }} 12 | command: ["tail"] 13 | args: ["-F", "anything"] 14 | env: 15 | - name: SECATOR_CELERY_BROKER_URL 16 | value: "redis://{{ .Values.redis.name }}:6379/0" 17 | - name: SECATOR_CELERY_RESULT_BACKEND 18 | value: "redis://{{ .Values.redis.name }}:6379/0" 19 | -------------------------------------------------------------------------------- /helm/templates/secator-worker.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ .Values.secatorWorker.name }} 5 | namespace: {{ .Values.namespace }} 6 | spec: 7 | selector: 8 | matchLabels: 9 | app: secator-worker 10 | template: 11 | metadata: 12 | labels: 13 | app: secator-worker 14 | spec: 15 | containers: 16 | - name: {{ .Values.secatorWorker.name }} 17 | image: {{ .Values.secatorWorker.image }} 18 | command: ["secator"] 19 | args: ["worker"] 20 | env: 21 | - name: SECATOR_CELERY_BROKER_URL 22 | value: "redis://{{ .Values.redis.name }}:6379/0" 23 | - name: SECATOR_CELERY_RESULT_BACKEND 24 | value: "redis://{{ .Values.redis.name }}:6379/0" -------------------------------------------------------------------------------- /helm/values.yaml: -------------------------------------------------------------------------------- 1 | # Default values for Secator 2 | # This is a YAML-formatted file. 3 | # Declare variables to be passed into your templates. 4 | 5 | namespace: secator 6 | 7 | secatorManager: 8 | name: secator-manager 9 | image: "freelabz/secator" 10 | 11 | # Empty if using default repository 12 | repository: 13 | # Empty if using tag "latest" 14 | tag: 15 | 16 | secatorWorker: 17 | name: secator-worker 18 | image: "freelabz/secator" 19 | 20 | # Empty if using default repository 21 | repository: 22 | # Empty if using tag "latest" 23 | tag: 24 | 25 | redis: 26 | name: redis 27 | image: "redis" 28 | 29 | # Empty if using default repository 30 | repository: 31 | # Empty if using tag "latest" 32 | tag: 33 | replicas: 1 34 | port: 6379 35 | -------------------------------------------------------------------------------- /images/aliases.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/aliases.gif -------------------------------------------------------------------------------- /images/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/demo.gif -------------------------------------------------------------------------------- /images/demo.tap: -------------------------------------------------------------------------------- 1 | secator --help 2 | clear 3 | 4 | # Run tasks ... 5 | secator x katana http://testphp.vulnweb.com -rl 100 -timeout 10 -header Accept:text/html 6 | clear 7 | 8 | # ... with mutualized input options and output types ! 9 | Up 3 10 | Left 72 11 | Backspace 6 12 | dirsearch # noenter 13 | 14 | Up 1 15 | Backspace 9 16 | cariddi # noenter 17 | 18 | Up 1 19 | Backspace 7 20 | feroxbuster # nowait 21 | Sleep 5s 22 | Ctrl+C 23 | Sleep 3s 24 | 25 | Up 1 26 | Left 72 27 | Backspace 11 28 | nmap # noenter 29 | Right 8 30 | Backspace 7 31 | Right 19 32 | Space 1 33 | -p 80,443 # noenter 34 | Right 45 35 | Backspace 25 36 | Sleep 2s 37 | Enter 38 | Wait 39 | Sleep 3s 40 | clear 41 | 42 | # Run workflows ... 43 | secator w host_recon --help 44 | 45 | Up 1 46 | Backspace 6 47 | localhost -rl 1000 -ss -p 6379,8080 -tid redis-info,exposed-redis,CVE-2021-44228 -------------------------------------------------------------------------------- /images/fmt.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/fmt.gif -------------------------------------------------------------------------------- /images/help.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/help.png -------------------------------------------------------------------------------- /images/input.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/input.gif -------------------------------------------------------------------------------- /images/pipe.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/pipe.gif -------------------------------------------------------------------------------- /images/short_demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/images/short_demo.gif -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "secator-worker", 3 | "version": "0.0.1", 4 | "description": "Secator worker", 5 | "main": "", 6 | "scripts": { 7 | "solo": "npm run venv && npm run install &&. .venv/bin/activate && .venv/bin/secator worker -r", 8 | "dev": "npm run venv && npm run install &&. .venv/bin/activate && SECATOR_CELERY_BROKER_URL=redis://localhost:6379 SECATOR_CELERY_RESULT_BACKEND=redis://localhost:6379 secator worker -r", 9 | "install": ". .venv/bin/activate && pip install -e .[gcs,redis,worker,dev,mongodb]", 10 | "venv": "python3 -m venv .venv", 11 | "generate": "rm -r venv && npm run venv && .venv/bin/pip freeze > requirements.txt", 12 | "docker:build": "docker build -t secator .", 13 | "docker:push": "gcloud builds submit .", 14 | "docker:logs": "docker logs -f secator", 15 | "docker:start": "docker run --name=secator-worker --network=host -e SECATOR_CELERY_BROKER_URL=redis://localhost:6379 -e SECATOR_CELERY_RESULT_BACKEND=redis://localhost:6379 -d secator worker", 16 | "docker:stop": "docker stop secator-worker; docker rm secator-worker", 17 | "docker:start-redis": "docker run --name redis -p 6379:6379 -d redis", 18 | "docker:stop-redis": "docker stop redis; docker rm redis", 19 | "docker:start-mongodb": "docker run --name mongodb -p 27017:27017 -d mongo:latest", 20 | "docker:stop-mongodb": "docker stop mongodb; docker rm mongodb", 21 | "docker:start-deps": "npm run docker:start-mongodb; npm run docker:start-redis", 22 | "docker:stop-deps": "npm run docker:stop-mongodb; npm run docker:stop-redis", 23 | "docker:start-all": "npm run docker:start-deps; npm run docker:start", 24 | "docker:stop-all": "npm run docker:stop-deps; npm run docker:stop" 25 | }, 26 | "keywords": [], 27 | "author": "ocervello@freelabz.com", 28 | "license": "", 29 | "dependencies": {} 30 | } 31 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ['hatchling'] 3 | build-backend = 'hatchling.build' 4 | 5 | [project] 6 | name = 'secator' 7 | version = "0.16.1" 8 | authors = [{ name = 'FreeLabz', email = 'sales@freelabz.com' }] 9 | readme = 'README.md' 10 | description = "The pentester's swiss knife." 11 | requires-python = '>=3.8' 12 | keywords = [ 13 | 'cybersecurity', 14 | 'recon', 15 | 'vulnerability', 16 | 'pentest', 17 | 'automation' 18 | ] 19 | classifiers = [ 20 | 'Development Status :: 3 - Alpha', 21 | 'Intended Audience :: Developers', 22 | 'Intended Audience :: Information Technology', 23 | 'License :: Free for non-commercial use', 24 | 'Operating System :: Unix', 25 | 'Programming Language :: Python', 26 | 'Programming Language :: Python :: 3', 27 | 'Programming Language :: Python :: 3.8', 28 | 'Programming Language :: Python :: 3.9', 29 | 'Programming Language :: Python :: 3.10', 30 | 'Programming Language :: Python :: 3.11', 31 | ] 32 | dependencies = [ 33 | 'beautifulsoup4 <= 5', 34 | 'celery < 6', 35 | 'distro < 2', 36 | 'cpe < 2', 37 | 'dotmap < 2', 38 | 'free-proxy < 2', 39 | 'furl < 3', 40 | 'greenlet < 4', 41 | 'humanize < 5', 42 | 'ifaddr < 1', 43 | 'jinja2 < 4', 44 | 'packaging < 25', 45 | 'python-dotenv < 2', 46 | 'pyyaml < 7', 47 | 'pydantic < 3', 48 | 'requests < 3', 49 | 'rich < 14', 50 | 'rich-click < 1.7', 51 | 'click < 8.2.0', 52 | 'psutil < 7', 53 | 'retry < 1', 54 | 'tldextract < 6', 55 | 'typing_extensions < 5', 56 | 'validators < 1', 57 | 'xmltodict < 1' 58 | ] 59 | 60 | [project.optional-dependencies] 61 | dev = [ 62 | 'coverage < 8', 63 | 'flake8 < 8', 64 | 'pytest < 9', 65 | 'watchdog < 3', 66 | 'asciinema-automation < 1', 67 | ] 68 | build = [ 69 | 'hatch < 2', 70 | ] 71 | trace = [ 72 | 'memray < 2', 73 | 'pyinstrument < 5', 74 | ] 75 | worker = [ 76 | 'eventlet < 1', 77 | 'flower < 3', 78 | ] 79 | redis = [ 80 | 'redis < 6', 81 | ] 82 | mongodb = [ 83 | 'pymongo < 5', 84 | ] 85 | gdrive = [ 86 | 'google-api-python-client < 3', 87 | 'gspread < 7' 88 | ] 89 | gcs = [ 90 | 'google-cloud-storage < 3' 91 | ] 92 | 93 | [project.scripts] 94 | secator = 'secator.cli:cli' 95 | 96 | [project.urls] 97 | Homepage = 'https://github.com/freelabz/secator' 98 | Issues = 'https://github.com/freelabz/secator/issues' 99 | -------------------------------------------------------------------------------- /scripts/download_cves.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | wget https://cve.circl.lu/static/circl-cve-search-expanded.json.gz 4 | gunzip circl-cve-search-expanded.json.gz 5 | mkdir -p /tmp/cves 6 | 7 | while IFS= read -r line 8 | do 9 | cve_id=$(jq -r '.id' <<< $line) 10 | echo $line > /tmp/cves/$cve_id.json 11 | echo "CVE saved to /tmp/cves/$cve_id.json" 12 | done < "circl-cve-search-expanded.json" 13 | 14 | rm circl-cve-search-expanded.json.gz || true 15 | rm circl-cve-search-expanded.json || true -------------------------------------------------------------------------------- /scripts/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW='\033[0;93m' 3 | GREEN='\033[0;92m' 4 | NC='\033[0m' # No Color 5 | 6 | echo -e "🗄 ${YELLOW}Running apt update ...${NC}" 7 | sudo apt update 8 | echo -e "🗄 ${GREEN}Ran apt update successfully !${NC}\n" 9 | 10 | echo -e "🗄 ${YELLOW}Installing pipx and git ...${NC}" 11 | sudo apt install -y pipx git 12 | echo -e "🗄 ${GREEN}pipx and git installed successfully !${NC}\n" 13 | 14 | echo -e "🗄 ${YELLOW}Setting \$PATH ...${NC}" 15 | export PATH=$PATH:~/.local/bin:~/go/bin 16 | echo -e "🗄 ${GREEN}\$PATH modified successfully !${NC}\n" 17 | 18 | echo -e "🗄 ${YELLOW}Installing secator and dependencies ...${NC}" 19 | pipx install secator 20 | secator install langs go 21 | secator install langs ruby 22 | secator install tools 23 | secator install addons redis 24 | secator install addons worker 25 | secator install addons google 26 | secator install addons mongodb 27 | echo -e "🗄 ${GREEN}secator installed successfully !${NC}\n" 28 | 29 | echo -e "🗄 ${YELLOW}Adding ~/go/bin and ~/.local/bin to \$PATH in .bashrc ...${NC}" 30 | echo "export PATH=$PATH:~/go/bin:~/.local/bin" >> ~/.bashrc 31 | . ~/.bashrc 32 | echo -e "🗄 ${GREEN}\$PATH modified successfully !${NC}\n" 33 | -------------------------------------------------------------------------------- /scripts/install_asciinema.sh: -------------------------------------------------------------------------------- 1 | sudo apt install asciinema 2 | sudo apt update && sudo apt install ttf-mscorefonts-installer 3 | go install -v github.com/cirocosta/asciinema-edit@latest 4 | git clone https://github.com/asciinema/agg 5 | cd agg 6 | cargo build -r 7 | cp target/agg /usr/local/bin/ 8 | -------------------------------------------------------------------------------- /scripts/install_go.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | GO_VERSION=1.24.0 3 | GO_BUILD=linux-amd64.tar.gz 4 | GO_TAR=go$GO_VERSION.$GO_BUILD 5 | 6 | YELLOW='\033[0;93m' 7 | GREEN='\033[0;92m' 8 | NC='\033[0m' # No Color 9 | 10 | echo -e "🗄 ${YELLOW}Downloading Go $GO_VERSION ...${NC}" 11 | wget https://golang.org/dl/$GO_TAR 12 | 13 | echo -e "🗄 ${YELLOW}Unzip $GO_TAR ...${NC}" 14 | tar -xvf $GO_TAR 15 | rm $GO_TAR || true 16 | 17 | echo -e "🗄 ${YELLOW}Linking Go install to /usr/local ...${NC}" 18 | sudo mv go /usr/local/go$GO_VERSION 19 | sudo mv /usr/bin/go /usr/bin/go.bak || true 20 | sudo ln -s /usr/local/go$GO_VERSION/bin/go /usr/bin/go 21 | 22 | echo -e "🗄 ${GREEN}Go $GO_VERSION installed successfully !${NC}\n" 23 | -------------------------------------------------------------------------------- /scripts/install_ruby.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW='\033[0;93m' 3 | GREEN='\033[0;92m' 4 | NC='\033[0m' # No Color 5 | 6 | echo -e "🗄 ${YELLOW}Installing Ruby ...${NC}" 7 | sudo apt update -y 8 | sudo apt install -y ruby-full 9 | sudo apt install -y rubygems 10 | 11 | echo -e "🗄 ${GREEN}Ruby installed successfully !${NC}\n" 12 | -------------------------------------------------------------------------------- /scripts/msf/exploit_cve.rc: -------------------------------------------------------------------------------- 1 | setg RHOST {RHOST} 2 | setg RPORT {RPORT} 3 | setg RHOSTS {RHOSTS} 4 | setg LHOST {LHOST} 5 | setg SRVHOST {LHOST} 6 | setg SRVPORT {LPORT} 7 | search {CVE_ID} 8 | use {MODULE_ID} 9 | options 10 | set ForceExploit true 11 | exploit -------------------------------------------------------------------------------- /scripts/msf/ftp_anonymous.rc: -------------------------------------------------------------------------------- 1 | setg RHOST {RHOST} 2 | setg RHOSTS {RHOSTS} 3 | use auxiliary/scanner/ftp/anonymous 4 | run 5 | exit -------------------------------------------------------------------------------- /scripts/msf/ftp_version.rc: -------------------------------------------------------------------------------- 1 | setg RHOST {RHOST} 2 | setg RHOSTS {RHOSTS} 3 | use auxiliary/scanner/ftp/ftp_version 4 | run 5 | exit 6 | -------------------------------------------------------------------------------- /scripts/msf/ftp_vsftpd_234_backdoor.rc: -------------------------------------------------------------------------------- 1 | setg LHOST {LHOST} 2 | setg LPORT {LPORT} 3 | use exploit/unix/ftp/vsftpd_234_backdoor 4 | exploit 5 | exit -------------------------------------------------------------------------------- /scripts/msf/redis.rc: -------------------------------------------------------------------------------- 1 | setg RHOSTS {RHOSTS} 2 | use auxiliary/scanner/redis/redis_server 3 | exploit 4 | exit -------------------------------------------------------------------------------- /scripts/stories/STORY.md: -------------------------------------------------------------------------------- 1 | ## Scenarios 2 | 3 | ### Short demo 4 | ``` 5 | ./docs/stories/short_demo.sh 6 | ``` 7 | 8 | ### Options format demo 9 | 10 | ```sh 11 | ./docs/stories/fmt.sh 12 | ``` 13 | 14 | ### Input options demo 15 | ```sh 16 | ./docs/stories/input.sh 17 | ``` 18 | 19 | ### Worker demo 20 | ```sh 21 | secator worker & # run worker in background 22 | secator w url_fuzz mydomain.com # workflow will be run in background 23 | ``` 24 | 25 | ### Proxy usage demo 26 | 27 | 28 | ### Demo aliases 29 | ```sh 30 | secator u enable-aliases 31 | source ~/.secator/.aliases 32 | host_recon mydomain.com 33 | ``` 34 | 35 | ### Feature-based demo 36 | 37 | **Ad-hoc discovery:** 38 | 39 | ```sh 40 | # pipe naabu and httpx to find all alive HTTP servers available on the host 41 | secator x naabu mydomain.com | secator x httpx -mc 200 -o table 42 | 43 | # run a basic URL crawler workflow on the host to see which URLs are up 44 | secator w url_crawl mydomain.com 45 | 46 | # fuzz one of the URLs to find more URLs 47 | secator x ffuf https://mydomain.com/FUZZ -mc 200,301,400,500 -o table | secator x httpx -mc 200 -o table 48 | 49 | **Host scan:** 50 | secator w host_recon mydomain.com 51 | 52 | **Subdomain mapping:** 53 | secator w subdomain_recon mydomain.com 54 | secator x subfinder mydomain.com | secator x httpx -json -o table | httpx -mc 200 -json -o table 55 | 56 | **Run in distributed mode:** 57 | secator z default mydomain.com --worker 58 | ``` 59 | 60 | **Callbacks (library mode):** 61 | ```py 62 | from secator.runners import Workflow 63 | from secator.template import TemplateLoader 64 | 65 | config = TemplateLoader(name='workflow/host_recon') 66 | hooks = { 67 | Task: { 68 | 'on_item': { 69 | Port: [save_port_to_db], 70 | Vulnerability: [save_vulnerability_to_db, send_vulnerability_to_discord], 71 | } 72 | } 73 | } 74 | workflow = Workflow(config, hooks=hooks) 75 | result = workflow.delay() 76 | while not result.ready(): 77 | nports = db.session.query(Vulnerability).count() 78 | nvulns = db.session.query(Ports).count() 79 | 80 | all_results = result.get() 81 | ``` -------------------------------------------------------------------------------- /scripts/stories/aliases.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | #$ expect \$ 4 | secator x httpx mydomain.com:8080 5 | #$ expect \$ 6 | 7 | secator u enable-aliases && . ~/.secator/.aliases # we can wrap available tasks as aliases 8 | #$ expect \$ 9 | 10 | httpx --help # httpx is now secator, you don't need the prefix 'secator x' anymore 11 | #$ expect \$ 12 | 13 | httpx mydomain.com:8080 -orig # in case you want to get the original httpx output 14 | #$ expect \$ 15 | 16 | listw # show available workflows, same as secator w 17 | #$ expect \$ 18 | #$ wait 3000 -------------------------------------------------------------------------------- /scripts/stories/demo.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | secator x # show available tasks 4 | #$ expect \$ 5 | 6 | secator x httpx mydomain.com:8080 7 | #$ expect \$ 8 | 9 | secator x httpx mydomain.com:8080 -json # JSON lines, yay ! 10 | #$ expect \$ 11 | 12 | secator u enable-aliases && . ~/.secator/.aliases # we can wrap available tasks as aliases 13 | #$ expect \$ 14 | 15 | httpx --help # httpx is now secator 16 | #$ expect \$ 17 | 18 | httpx mydomain.com:8080 -orig -json # in case you want to get the original httpx JSON output 19 | #$ expect \$ 20 | 21 | listw # show available workflows, same as secator w 22 | #$ expect \$ 23 | 24 | secator w host_recon mydomain.com # you can also use the alias hostrec mydomain.com as well 25 | #$ expect \$ 26 | 27 | echo "Thank you !" 28 | #$ expect \$ 29 | #$ wait 5000 -------------------------------------------------------------------------------- /scripts/stories/fmt.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | secator x ffuf http://testphp.vulnweb.com/FUZZ -mc 200 -quiet # readable output out-of-the-box 4 | #$ expect \$ 5 | 6 | secator x ffuf http://testphp.vulnweb.com/FUZZ -mc 200 -quiet -raw # raw output, pipeable to file or other tools 7 | #$ expect \$ 8 | 9 | secator x ffuf http://testphp.vulnweb.com/FUZZ -mc 200 -quiet -json # output JSON lines 10 | #$ expect \$ 11 | 12 | secator x ffuf http://testphp.vulnweb.com/FUZZ -mc 200 -quiet -orig -json # original ffuf JSON lines 13 | #$ expect \$ 14 | 15 | secator x ffuf http://testphp.vulnweb.com/FUZZ -mc 200 -quiet -o table,csv,json # show table, save results to CSV / JSON files 16 | #$ expect \$ 17 | 18 | #$ wait 1000 -------------------------------------------------------------------------------- /scripts/stories/input.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | secator x httpx http://mydomain.com:3000 # single target 4 | #$ expect \$ 5 | 6 | secator x httpx http://mydomain.com:3000,http://mydomain.com:8080 # ... or a comma-separated list of targets 7 | #$ expect \$ 8 | 9 | secator x httpx urls.txt # ... or a file containing targets 10 | #$ expect \$ 11 | 12 | cat urls.txt | secator x httpx # ... or feed targets through stdin 13 | #$ expect \$ 14 | 15 | #$ wait 1000 -------------------------------------------------------------------------------- /scripts/stories/pipe.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | #$ wait 1000 4 | secator x naabu mydomain.com -raw | secator x httpx -raw | secator x katana -raw | secator x gf --pattern xss # port scan + HTTP check + XSS pattern finder 5 | #$ expect \$ 6 | 7 | #$ wait 1000 8 | secator x subfinder wikipedia.org -raw | secator x httpx # subdomain discovery with HTTP check 9 | #$ expect \$ 10 | 11 | #$ wait 1000 -------------------------------------------------------------------------------- /scripts/stories/short_demo.sh: -------------------------------------------------------------------------------- 1 | #$ delay 5 2 | 3 | #$ wait 1000 4 | secator -nb x naabu mydomain.com -raw | secator x httpx # port + HTTP discovery 5 | #$ expect \$ 6 | 7 | secator x ffuf http://mydomain.com:3000/FUZZ -fs 1987,3103 -mc 200 -quiet # fuzzing 8 | #$ expect \$ 9 | 10 | secator w host_recon mydomain.com -rl 100 # host recon workflow 11 | #$ expect \$ 12 | 13 | #$ wait 1000 -------------------------------------------------------------------------------- /scripts/update_tools.sh: -------------------------------------------------------------------------------- 1 | secator health --bleeding 1> to_install.sh 2> output.log 2 | chmod +x to_install.sh 3 | ./to_install.sh 4 | 5 | echo "Parsing health check output..." 6 | outdated=$(grep -E 'is outdated' output.log) 7 | echo "" 8 | echo "Outdated lines:" 9 | echo "$outdated" 10 | 11 | tool_version=$(echo "$outdated" | sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" | sed -n 's/.*\[WRN\] \([^ ]*\) is .* latest:\([^)]*\)\.*)\./\1 \2/p') 12 | echo "" 13 | echo "Tool versions to update:" 14 | echo "$tool_version" 15 | 16 | changes_made=no 17 | while read -r tool version; do 18 | echo "Processing update for '$tool' to version '$version'" 19 | file_path="secator/tasks/${tool}.py" # Construct file path 20 | 21 | if [ -f "$file_path" ]; then 22 | echo "Updating $file_path to version $version..." 23 | sed -i "s|install_version = '.*'|install_version = '${version}'|" "$file_path" 24 | if ! git diff --quiet "$file_path"; then 25 | echo "File $file_path updated successfully." 26 | changes_made=true 27 | else 28 | echo "Warning: sed command did not modify $file_path as expected." 29 | fi 30 | else 31 | echo "Warning: Task file $file_path not found for tool '$tool'." 32 | fi 33 | done <<< "$tool_version" 34 | 35 | echo "changes_made=$changes_made" >> $GITHUB_OUTPUT 36 | -------------------------------------------------------------------------------- /secator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/__init__.py -------------------------------------------------------------------------------- /secator/click.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import rich_click as click 4 | from rich_click.rich_click import _get_rich_console 5 | from rich_click.rich_group import RichGroup 6 | 7 | 8 | class ListParamType(click.ParamType): 9 | """Custom click param type to convert comma-separated strings to lists.""" 10 | name = "list" 11 | 12 | def convert(self, value, param, ctx): 13 | if value is None: 14 | return [] 15 | if isinstance(value, list): 16 | return value 17 | return [v.strip() for v in value.split(',') if v.strip()] 18 | 19 | 20 | CLICK_LIST = ListParamType() 21 | 22 | 23 | class OrderedGroup(RichGroup): 24 | def __init__(self, name=None, commands=None, **attrs): 25 | super(OrderedGroup, self).__init__(name, commands, **attrs) 26 | self.commands = commands or OrderedDict() 27 | 28 | def command(self, *args, **kwargs): 29 | """Behaves the same as `click.Group.command()` but supports aliases. 30 | """ 31 | def decorator(f): 32 | aliases = kwargs.pop("aliases", None) 33 | if aliases: 34 | max_width = _get_rich_console().width 35 | aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases) 36 | padding = max_width // 4 37 | 38 | name = kwargs.pop("name", None) 39 | if not name: 40 | raise click.UsageError("`name` command argument is required when using aliases.") 41 | 42 | f.__doc__ = f.__doc__ or '\0'.ljust(padding+1) 43 | f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}' 44 | base_command = super(OrderedGroup, self).command( 45 | name, *args, **kwargs 46 | )(f) 47 | for alias in aliases: 48 | cmd = super(OrderedGroup, self).command(alias, *args, hidden=True, **kwargs)(f) 49 | cmd.help = f"Alias for '{name}'.\n\n{cmd.help}" 50 | cmd.params = base_command.params 51 | 52 | else: 53 | cmd = super(OrderedGroup, self).command(*args, **kwargs)(f) 54 | 55 | return cmd 56 | return decorator 57 | 58 | def group(self, *args, **kwargs): 59 | """Behaves the same as `click.Group.group()` but supports aliases. 60 | """ 61 | def decorator(f): 62 | aliases = kwargs.pop('aliases', []) 63 | aliased_group = [] 64 | if aliases: 65 | max_width = _get_rich_console().width 66 | aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases) 67 | padding = max_width // 4 68 | f.__doc__ = f.__doc__ or '\0'.ljust(padding+1) 69 | f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}' 70 | for alias in aliases: 71 | grp = super(OrderedGroup, self).group( 72 | alias, *args, hidden=True, **kwargs)(f) 73 | aliased_group.append(grp) 74 | 75 | # create the main group 76 | grp = super(OrderedGroup, self).group(*args, **kwargs)(f) 77 | grp.aliases = aliases 78 | 79 | # for all of the aliased groups, share the main group commands 80 | for aliased in aliased_group: 81 | aliased.commands = grp.commands 82 | 83 | return grp 84 | return decorator 85 | 86 | def list_commands(self, ctx): 87 | return self.commands 88 | -------------------------------------------------------------------------------- /secator/configs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/configs/__init__.py -------------------------------------------------------------------------------- /secator/configs/profiles/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/configs/profiles/__init__.py -------------------------------------------------------------------------------- /secator/configs/profiles/aggressive.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: aggressive 3 | description: "Internal networks or time-sensitive scans" 4 | opts: 5 | rate_limit: 10000 6 | delay: 0 7 | timeout: 1 8 | retries: 1 9 | -------------------------------------------------------------------------------- /secator/configs/profiles/http_headless.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: http_headless 3 | description: "Headless HTTP requests" 4 | opts: 5 | headless: true 6 | system_chrome: true 7 | -------------------------------------------------------------------------------- /secator/configs/profiles/http_record.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: http_record 3 | description: "Record HTTP requests / responses and take screenshots" 4 | opts: 5 | screenshot: true 6 | store_responses: true 7 | -------------------------------------------------------------------------------- /secator/configs/profiles/insane.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: insane 3 | description: "Local LAN scanning or stress scanning" 4 | opts: 5 | rate_limit: 100000 6 | delay: 0 7 | timeout: 1 8 | retries: 0 9 | -------------------------------------------------------------------------------- /secator/configs/profiles/paranoid.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: paranoid 3 | description: "Maximum stealth" 4 | opts: 5 | rate_limit: 5 6 | delay: 5 7 | timeout: 15 8 | retries: 5 9 | -------------------------------------------------------------------------------- /secator/configs/profiles/polite.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: polite 3 | description: "Avoid overloading network" 4 | opts: 5 | rate_limit: 100 6 | delay: 0 7 | timeout: 10 8 | retries: 5 9 | -------------------------------------------------------------------------------- /secator/configs/profiles/sneaky.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: sneaky 3 | description: "IDS/IPS evasion, sensitive networks" 4 | opts: 5 | rate_limit: 10 6 | delay: 2 7 | timeout: 15 8 | retries: 5 9 | -------------------------------------------------------------------------------- /secator/configs/profiles/tor.yaml: -------------------------------------------------------------------------------- 1 | type: profile 2 | name: tor 3 | description: "Anonymous scan using Tor network" 4 | opts: 5 | proxy: auto 6 | -------------------------------------------------------------------------------- /secator/configs/scans/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/configs/scans/__init__.py -------------------------------------------------------------------------------- /secator/configs/scans/domain.yaml: -------------------------------------------------------------------------------- 1 | type: scan 2 | name: domain 3 | description: Domain scan 4 | profile: default 5 | input_types: 6 | - host 7 | workflows: 8 | subdomain_recon: 9 | host_recon: 10 | targets_: 11 | - type: target 12 | field: name 13 | condition: target.type == 'host' 14 | - subdomain.host 15 | url_crawl: 16 | targets_: 17 | - url.url 18 | url_vuln: 19 | targets_: 20 | - url.url 21 | -------------------------------------------------------------------------------- /secator/configs/scans/host.yaml: -------------------------------------------------------------------------------- 1 | type: scan 2 | name: host 3 | description: Host scan 4 | profile: default 5 | input_types: 6 | - host 7 | workflows: 8 | host_recon: 9 | url_crawl: 10 | targets_: 11 | - url.url 12 | url_vuln: 13 | targets_: 14 | - url.url 15 | -------------------------------------------------------------------------------- /secator/configs/scans/network.yaml: -------------------------------------------------------------------------------- 1 | type: scan 2 | name: network 3 | description: Internal network scan 4 | profile: default 5 | input_types: 6 | - cidr_range 7 | workflows: 8 | cidr_recon: 9 | url_crawl: 10 | targets_: 11 | - url.url 12 | url_vuln: 13 | targets_: 14 | - url.url 15 | -------------------------------------------------------------------------------- /secator/configs/scans/subdomain.yaml: -------------------------------------------------------------------------------- 1 | type: scan 2 | name: subdomain 3 | description: Subdomain scan 4 | profile: default 5 | input_types: 6 | - host 7 | workflows: 8 | subdomain_recon: 9 | host_recon: 10 | targets_: 11 | - type: target 12 | field: name 13 | condition: target.type == 'host' 14 | - subdomain.host 15 | url_crawl: 16 | targets_: 17 | - url.url 18 | url_vuln: 19 | targets_: 20 | - url.url 21 | -------------------------------------------------------------------------------- /secator/configs/scans/url.yaml: -------------------------------------------------------------------------------- 1 | type: scan 2 | name: url 3 | description: URL scan 4 | profile: default 5 | input_types: 6 | - url 7 | workflows: 8 | url_crawl: 9 | url_vuln: 10 | targets_: 11 | - url.url 12 | -------------------------------------------------------------------------------- /secator/configs/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/configs/workflows/__init__.py -------------------------------------------------------------------------------- /secator/configs/workflows/cidr_recon.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: cidr_recon 3 | alias: cidrrec 4 | description: Local network recon 5 | tags: [recon, cidr, network] 6 | input_types: 7 | - cidr_range 8 | - ip 9 | 10 | tasks: 11 | mapcidr: 12 | description: Find CIDR range IPs 13 | 14 | fping: 15 | description: Check for alive IPs 16 | targets_: ip.ip 17 | 18 | nmap: 19 | description: Scan alive IPs' ports 20 | targets_: 21 | - type: ip 22 | field: ip 23 | condition: item.alive 24 | 25 | httpx: 26 | description: Probe HTTP services on open ports 27 | tech_detect: True 28 | targets_: 29 | - type: port 30 | field: '{ip}:{port}' 31 | -------------------------------------------------------------------------------- /secator/configs/workflows/code_scan.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: code_scan 3 | alias: codescan 4 | description: Code vulnerability scan 5 | tags: [vuln, code] 6 | input_types: 7 | - path 8 | - docker_image_name 9 | tasks: 10 | grype: 11 | description: Run code vulnerability scan 12 | -------------------------------------------------------------------------------- /secator/configs/workflows/host_recon.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: host_recon 3 | alias: hostrec 4 | description: Host recon 5 | tags: [recon, network, http] 6 | input_types: 7 | - host 8 | - cidr_range 9 | 10 | options: 11 | nuclei: 12 | is_flag: True 13 | default: False 14 | help: Run nuclei scans (slow) 15 | 16 | full: 17 | is_flag: True 18 | default: False 19 | help: "Run full port scan (default: top 100 ports)" 20 | 21 | tasks: 22 | naabu: 23 | description: Find open ports 24 | if: opts.ports or not opts.full 25 | 26 | naabu/full: 27 | description: Find open ports 28 | ports: "-" # scan all ports 29 | if: opts.full and not opts.ports 30 | 31 | nmap: 32 | description: Search for vulnerabilities on open ports 33 | version_detection: True 34 | script: vulners 35 | targets_: 36 | - port.host 37 | ports_: 38 | - type: port 39 | field: port 40 | condition: port.host in targets 41 | 42 | _group/1: 43 | httpx: 44 | description: Probe HTTP services on open ports 45 | tech_detect: True 46 | targets_: 47 | - type: port 48 | field: '{host}:{port}' 49 | 50 | searchsploit: 51 | description: Search for related exploits 52 | targets_: 53 | - type: port 54 | field: service_name 55 | condition: len(item.service_name.split('/')) > 1 56 | 57 | _group/2: 58 | nuclei/network: 59 | description: Scan network and SSL vulnerabilities 60 | tags: [network, ssl] 61 | exclude_tags: [] 62 | if: opts.nuclei 63 | 64 | nuclei/url: 65 | description: Search for vulnerabilities on alive HTTP services 66 | exclude_tags: [network, ssl, file, dns, osint, token-spray, headers] 67 | targets_: 68 | - type: url 69 | field: url 70 | condition: item.status_code != 0 71 | if: opts.nuclei 72 | -------------------------------------------------------------------------------- /secator/configs/workflows/subdomain_recon.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: subdomain_recon 3 | alias: subrec 4 | description: Subdomain discovery 5 | tags: [recon, dns, takeovers] 6 | input_types: 7 | - host 8 | 9 | options: 10 | probe_http: 11 | is_flag: True 12 | help: Probe domain and subdomains (HTTP) 13 | default: True 14 | 15 | probe_dns: 16 | is_flag: True 17 | help: Probe domain and subdomains (DNS) 18 | default: False 19 | 20 | brute_http: 21 | is_flag: True 22 | help: Bruteforce subdomains with HTTP Host header (ffuf) 23 | short: bhttp 24 | default: False 25 | 26 | brute_dns: 27 | is_flag: True 28 | help: Bruteforce subdomains with DNS queries (dnsx) 29 | short: bdns 30 | default: False 31 | 32 | tasks: 33 | _group/1: 34 | subfinder: 35 | description: List subdomains (passive) 36 | 37 | dnsx/brute: 38 | description: Bruteforce subdomains (DNS) 39 | subdomains_only: True 40 | wordlist: combined_subdomains 41 | if: opts.brute_dns 42 | 43 | httpx: 44 | description: Run HTTP probe on domain 45 | tech_detect: True 46 | targets_: 47 | - target.name 48 | if: opts.probe_http or opts.brute_http 49 | 50 | _group/2: 51 | dnsx/probe: 52 | description: Probe DNS records on subdomains 53 | subdomains_only: True 54 | wordlist: False 55 | targets_: 56 | - subdomain.host 57 | if: opts.probe_dns 58 | 59 | nuclei: 60 | description: Check for subdomain takeovers 61 | targets_: 62 | - target.name 63 | - subdomain.host 64 | tags: [takeover] 65 | 66 | ffuf: 67 | description: Bruteforce subdomains (Host header) 68 | header: "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.0.0 Safari/537.36" 69 | fuzz_host_header: True 70 | auto_calibration: True 71 | wordlist: combined_subdomains 72 | stop_on_error: True 73 | targets_: 74 | - type: url 75 | field: url 76 | if: opts.brute_http 77 | 78 | httpx: 79 | description: Run HTTP probes on subdomains 80 | tech_detect: True 81 | targets_: 82 | - target.name 83 | - subdomain.host 84 | if: opts.probe_http 85 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_bypass.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_bypass 3 | alias: urlbypass 4 | description: Try bypass techniques for 4xx URLs 5 | tags: [http, crawl] 6 | input_types: 7 | - url 8 | tasks: 9 | bup: 10 | description: Bypass 4xx 11 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_crawl.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_crawl 3 | alias: urlcrawl 4 | description: URL crawl (fast) 5 | tags: [http, crawl] 6 | input_types: 7 | - url 8 | 9 | options: 10 | crawlers: 11 | type: list 12 | help: Crawlers to use (katana, gospider) 13 | default: ['gau', 'katana'] 14 | internal: True 15 | 16 | hunt_patterns: 17 | is_flag: True 18 | help: Hunt patterns in HTTP responses (cariddi) 19 | default: True 20 | short: hp 21 | 22 | default_options: 23 | match_codes: 200,204,301,302,307,401,403,405,500 24 | 25 | tasks: 26 | _group/crawl: 27 | gau: 28 | description: Search for passive URLs 29 | if: "'gau' in opts.crawlers" 30 | 31 | katana: 32 | description: Crawl URLs 33 | if: "'katana' in opts.crawlers" 34 | 35 | gospider: 36 | description: Crawl URLs 37 | if: "'gospider' in opts.crawlers" 38 | 39 | cariddi: 40 | description: Hunt URLs patterns 41 | info: True 42 | secrets: True 43 | errors: True 44 | juicy_extensions: 1 45 | juicy_endpoints: True 46 | targets_: 47 | - target.name 48 | - url.url 49 | if: opts.hunt_patterns 50 | 51 | httpx: 52 | description: Run HTTP probes on crawled URLs 53 | tech_detect: True 54 | targets_: 55 | - target.name 56 | - type: url 57 | field: url 58 | condition: url.status_code != 0 59 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_dirsearch.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_dirsearch 3 | alias: dirfind 4 | description: URL directory search 5 | tags: [http, dir] 6 | input_types: 7 | - url 8 | 9 | tasks: 10 | ffuf: 11 | description: Search for HTTP directories 12 | wordlist: directory_list_small 13 | targets_: 14 | - type: target 15 | field: '{name}/FUZZ' 16 | 17 | cariddi: 18 | description: Crawl HTTP directories for content 19 | info: True 20 | secrets: True 21 | errors: True 22 | juicy_extensions: 1 23 | juicy_endpoints: True 24 | targets_: 25 | - target.name 26 | - url.url 27 | 28 | httpx: 29 | description: Run HTTP probes on crawled URLs 30 | tech_detect: True 31 | targets_: 32 | - type: url 33 | field: url 34 | condition: item.status_code == 0 35 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_fuzz.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_fuzz 3 | alias: urlfuzz 4 | description: URL fuzz (slow) 5 | tags: [http, fuzz] 6 | input_types: 7 | - url 8 | 9 | default_options: 10 | match_codes: 200,204,301,302,307,401,403,405,500 11 | 12 | options: 13 | fuzzers: 14 | type: list 15 | required: True 16 | help: Fuzzers to use (dirsearch, feroxbuster, ffuf) 17 | default: ['ffuf'] 18 | 19 | tasks: 20 | _group/fuzz: 21 | dirsearch: 22 | description: Fuzz URLs 23 | if: "'dirsearch' in opts.fuzzers" 24 | 25 | feroxbuster: 26 | description: Fuzz URLs 27 | if: "'feroxbuster' in opts.fuzzers" 28 | 29 | ffuf: 30 | description: Fuzz URLs 31 | if: "'ffuf' in opts.fuzzers" 32 | targets_: 33 | - type: target 34 | field: '{name}/FUZZ' 35 | 36 | httpx: 37 | description: Run HTTP probes on crawled URLs 38 | tech_detect: True 39 | targets_: 40 | type: url 41 | field: url 42 | condition: url.status_code != 0 or opts.screenshot or opts.headless 43 | # enrich: true # TODO: add enrich capabilities 44 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_params_fuzz.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_params_fuzz 3 | alias: url_params_fuzz 4 | description: Extract parameters from an URL and fuzz them 5 | tags: [http, fuzz] 6 | input_types: 7 | - url 8 | 9 | tasks: 10 | arjun: 11 | description: Extract parameters from URLs 12 | targets_: 13 | - type: target 14 | field: name 15 | condition: "'?' not in target.name" 16 | 17 | ffuf: 18 | description: Fuzz URL params 19 | wordlist: https://raw.githubusercontent.com/danielmiessler/SecLists/refs/heads/master/Discovery/Web-Content/burp-parameter-names.txt 20 | auto_calibration: true 21 | follow_redirect: true 22 | targets_: 23 | - type: url 24 | field: url 25 | condition: item._source.startswith('arjun') 26 | 27 | httpx: 28 | description: Probe fuzzed URLs 29 | targets_: 30 | - type: url 31 | field: url 32 | condition: item._source.startswith('ffuf') 33 | -------------------------------------------------------------------------------- /secator/configs/workflows/url_vuln.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: url_vuln 3 | alias: url_vuln 4 | description: URL vulnerability scan (gf, dalfox) 5 | tags: [http, vulnerability] 6 | input_types: 7 | - url 8 | 9 | options: 10 | nuclei: 11 | is_flag: True 12 | default: False 13 | help: Run nuclei on tagged URLs (slow) 14 | 15 | tasks: 16 | _group/pattern_analysis: 17 | gf/xss: 18 | description: Hunt XSS params 19 | pattern: xss 20 | 21 | gf/lfi: 22 | description: Hunt LFI params 23 | pattern: lfi 24 | 25 | gf/ssrf: 26 | description: Hunt SSRF params 27 | pattern: ssrf 28 | 29 | gf/rce: 30 | description: Hunt RCE params 31 | pattern: rce 32 | 33 | gf/interestingparams: 34 | description: Hunt interest params 35 | pattern: interestingparams 36 | 37 | gf/idor: 38 | description: Hunt Idor params 39 | pattern: idor 40 | 41 | gf/debug_logic: 42 | description: Hunt debug params 43 | pattern: debug_logic 44 | 45 | _group/vuln_scan: 46 | dalfox: 47 | description: Attack XSS vulnerabilities 48 | targets_: 49 | - type: tag 50 | field: match 51 | condition: item._source.startswith("gf") 52 | 53 | nuclei: 54 | description: Search for HTTP vulns 55 | exclude_tags: [network, ssl, file, dns, osint, token-spray, headers] 56 | targets_: 57 | - type: target 58 | field: name 59 | - type: tag 60 | field: match 61 | condition: item._source.startswith("gf") 62 | if: opts.nuclei 63 | 64 | # TODO: Add support for SQLMap 65 | # sqlmap: 66 | # description: Attack SQLI vulnerabilities 67 | # targets_: 68 | # - type: tag 69 | # field: match 70 | # condition: item.name in ['sqli'] 71 | 72 | # TODO: Make this work, need transform functions to replace a parameter fetched dynamically by the keyword 'FUZZ' 73 | # ffuf: 74 | # description: Attack LFI vulnerabilities 75 | # targets_: 76 | # - type: tag 77 | # field: match 78 | # transform: 79 | # qsreplace: FUZZ 80 | # condition: item.name in ['lfi'] 81 | -------------------------------------------------------------------------------- /secator/configs/workflows/user_hunt.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: user_hunt 3 | alias: userhunt 4 | description: User account search 5 | tags: [user_account] 6 | input_types: 7 | - slug 8 | - string 9 | 10 | tasks: 11 | maigret: 12 | description: Hunt user accounts 13 | -------------------------------------------------------------------------------- /secator/configs/workflows/wordpress.yaml: -------------------------------------------------------------------------------- 1 | type: workflow 2 | name: wordpress 3 | alias: wordpress 4 | description: Wordpress vulnerability scan 5 | tags: [http, wordpress, vulnerability] 6 | input_types: 7 | - url 8 | 9 | tasks: 10 | _group/hunt_wordpress: 11 | nuclei: 12 | description: Nuclei Wordpress scan 13 | tags: [wordpress] 14 | 15 | wpscan: 16 | description: WPScan 17 | 18 | wpprobe: 19 | description: WPProbe 20 | -------------------------------------------------------------------------------- /secator/decorators.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def task(): 4 | def decorator(cls): 5 | cls.__task__ = True 6 | return cls 7 | return decorator 8 | -------------------------------------------------------------------------------- /secator/exporters/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'ConsoleExporter', 3 | 'CsvExporter', 4 | 'GdriveExporter', 5 | 'JsonExporter', 6 | 'TableExporter', 7 | 'TxtExporter' 8 | ] 9 | from secator.exporters.console import ConsoleExporter 10 | from secator.exporters.csv import CsvExporter 11 | from secator.exporters.gdrive import GdriveExporter 12 | from secator.exporters.json import JsonExporter 13 | from secator.exporters.table import TableExporter 14 | from secator.exporters.txt import TxtExporter 15 | -------------------------------------------------------------------------------- /secator/exporters/_base.py: -------------------------------------------------------------------------------- 1 | class Exporter: 2 | def __init__(self, report): 3 | self.report = report 4 | -------------------------------------------------------------------------------- /secator/exporters/console.py: -------------------------------------------------------------------------------- 1 | from secator.exporters._base import Exporter 2 | from secator.rich import console_stdout 3 | 4 | 5 | class ConsoleExporter(Exporter): 6 | def send(self): 7 | results = self.report.data['results'] 8 | for items in results.values(): 9 | for item in items: 10 | console_stdout.print(item) 11 | -------------------------------------------------------------------------------- /secator/exporters/csv.py: -------------------------------------------------------------------------------- 1 | import csv as _csv 2 | 3 | from dataclasses import fields 4 | 5 | from secator.exporters._base import Exporter 6 | from secator.rich import console 7 | from secator.output_types import FINDING_TYPES 8 | from secator.output_types import Info 9 | 10 | 11 | class CsvExporter(Exporter): 12 | def send(self): 13 | results = self.report.data['results'] 14 | if not results: 15 | return 16 | csv_paths = [] 17 | 18 | for output_type, items in results.items(): 19 | output_cls = [o for o in FINDING_TYPES if o._type == output_type][0] 20 | keys = [o.name for o in fields(output_cls)] 21 | items = [i.toDict() for i in items] 22 | if not items: 23 | continue 24 | csv_path = f'{self.report.output_folder}/report_{output_type}.csv' 25 | csv_paths.append(csv_path) 26 | with open(csv_path, 'w', newline='') as output_file: 27 | dict_writer = _csv.DictWriter(output_file, keys) 28 | dict_writer.writeheader() 29 | dict_writer.writerows(items) 30 | 31 | if len(csv_paths) == 1: 32 | csv_paths_str = csv_paths[0] 33 | else: 34 | csv_paths_str = '\n • ' + '\n • '.join(csv_paths) 35 | 36 | info = Info(message=f'Saved CSV reports to {csv_paths_str}') 37 | console.print(info) 38 | -------------------------------------------------------------------------------- /secator/exporters/json.py: -------------------------------------------------------------------------------- 1 | from secator.exporters._base import Exporter 2 | from secator.output_types import Info 3 | from secator.rich import console 4 | from secator.serializers.dataclass import dumps_dataclass 5 | 6 | 7 | class JsonExporter(Exporter): 8 | def send(self): 9 | json_path = f'{self.report.output_folder}/report.json' 10 | 11 | # Save JSON report to file 12 | with open(json_path, 'w') as f: 13 | f.write(dumps_dataclass(self.report.data, indent=2)) 14 | 15 | info = Info(f'Saved JSON report to {json_path}') 16 | console.print(info) 17 | -------------------------------------------------------------------------------- /secator/exporters/table.py: -------------------------------------------------------------------------------- 1 | from rich.markdown import Markdown 2 | 3 | from secator.exporters._base import Exporter 4 | from secator.output_types import OutputType 5 | from secator.rich import build_table, console 6 | from secator.utils import pluralize 7 | 8 | 9 | class TableExporter(Exporter): 10 | def send(self): 11 | results = self.report.data['results'] 12 | if not results: 13 | return 14 | title = self.report.title 15 | _print = console.print 16 | _print() 17 | if title: 18 | title = ' '.join(title.capitalize().split('_')) + ' results' 19 | h1 = Markdown(f'# {title}') 20 | _print(h1, style='bold magenta', width=50) 21 | _print() 22 | for output_type, items in results.items(): 23 | if output_type == 'progress': 24 | continue 25 | if items: 26 | is_output_type = isinstance(items[0], OutputType) 27 | output_fields = items[0]._table_fields if is_output_type else None 28 | sort_by = items[0]._sort_by if is_output_type else [] 29 | _table = build_table( 30 | items, 31 | output_fields=output_fields, 32 | sort_by=sort_by) 33 | title = pluralize(items[0]._type).upper() if is_output_type else 'Results' 34 | _print(f':wrench: {title}', style='bold gold3', justify='left') 35 | _print(_table) 36 | _print() 37 | -------------------------------------------------------------------------------- /secator/exporters/txt.py: -------------------------------------------------------------------------------- 1 | from secator.exporters._base import Exporter 2 | from secator.output_types import Info 3 | from secator.rich import console 4 | 5 | 6 | class TxtExporter(Exporter): 7 | def send(self): 8 | results = self.report.data['results'] 9 | if not results: 10 | return 11 | txt_paths = [] 12 | 13 | for output_type, items in results.items(): 14 | items = list(set(str(i) for i in items)) 15 | if not items: 16 | continue 17 | txt_path = f'{self.report.output_folder}/report_{output_type}.txt' 18 | with open(txt_path, 'w') as f: 19 | f.write('\n'.join(items)) 20 | txt_paths.append(txt_path) 21 | 22 | if len(txt_paths) == 1: 23 | txt_paths_str = txt_paths[0] 24 | else: 25 | txt_paths_str = '\n • ' + '\n • '.join(txt_paths) 26 | 27 | info = Info(f'Saved TXT reports to {txt_paths_str}') 28 | console.print(info) 29 | -------------------------------------------------------------------------------- /secator/hooks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/secator/hooks/__init__.py -------------------------------------------------------------------------------- /secator/hooks/gcs.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from time import time 3 | 4 | from google.cloud import storage 5 | 6 | from secator.config import CONFIG 7 | from secator.runners import Task 8 | from secator.thread import Thread 9 | from secator.utils import debug 10 | 11 | 12 | GCS_BUCKET_NAME = CONFIG.addons.gcs.bucket_name 13 | ITEMS_TO_SEND = { 14 | 'url': ['screenshot_path'] 15 | } 16 | 17 | 18 | def process_item(self, item): 19 | if item._type not in ITEMS_TO_SEND.keys(): 20 | return item 21 | if not GCS_BUCKET_NAME: 22 | debug('skipped since addons.gcs.bucket_name is empty.', sub='hooks.gcs') 23 | return item 24 | to_send = ITEMS_TO_SEND[item._type] 25 | for k, v in item.toDict().items(): 26 | if k in to_send and v: 27 | path = Path(v) 28 | if not path.exists(): 29 | continue 30 | ext = path.suffix 31 | blob_name = f'{item._uuid}_{k}{ext}' 32 | t = Thread(target=upload_blob, args=(GCS_BUCKET_NAME, v, blob_name)) 33 | t.start() 34 | self.threads.append(t) 35 | setattr(item, k, f'gs://{GCS_BUCKET_NAME}/{blob_name}') 36 | return item 37 | 38 | 39 | def upload_blob(bucket_name, source_file_name, destination_blob_name): 40 | """Uploads a file to the bucket.""" 41 | start_time = time() 42 | storage_client = storage.Client() 43 | bucket = storage_client.bucket(bucket_name) 44 | blob = bucket.blob(destination_blob_name) 45 | blob.upload_from_filename(source_file_name) 46 | end_time = time() 47 | elapsed = end_time - start_time 48 | debug(f'in {elapsed:.4f}s', obj={'blob': 'CREATED', 'blob_name': destination_blob_name, 'bucket': bucket_name}, obj_after=False, sub='hooks.gcs', verbose=True) # noqa: E501 49 | 50 | 51 | HOOKS = { 52 | Task: {'on_item': [process_item]} 53 | } 54 | -------------------------------------------------------------------------------- /secator/output_types/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'Error', 3 | 'OutputType', 4 | 'Info', 5 | 'Ip', 6 | 'Port', 7 | 'Progress', 8 | 'Record', 9 | 'Stat', 10 | 'State', 11 | 'Subdomain', 12 | 'Url', 13 | 'UserAccount', 14 | 'Vulnerability', 15 | 'Warning', 16 | ] 17 | from secator.output_types._base import OutputType 18 | from secator.output_types.progress import Progress 19 | from secator.output_types.ip import Ip 20 | from secator.output_types.exploit import Exploit 21 | from secator.output_types.port import Port 22 | from secator.output_types.subdomain import Subdomain 23 | from secator.output_types.tag import Tag 24 | from secator.output_types.target import Target 25 | from secator.output_types.url import Url 26 | from secator.output_types.user_account import UserAccount 27 | from secator.output_types.vulnerability import Vulnerability 28 | from secator.output_types.record import Record 29 | from secator.output_types.certificate import Certificate 30 | from secator.output_types.info import Info 31 | from secator.output_types.warning import Warning 32 | from secator.output_types.error import Error 33 | from secator.output_types.stat import Stat 34 | from secator.output_types.state import State 35 | 36 | EXECUTION_TYPES = [ 37 | Target, Progress, Info, Warning, Error, State 38 | ] 39 | STAT_TYPES = [ 40 | Stat 41 | ] 42 | FINDING_TYPES = [ 43 | Subdomain, Ip, Port, Url, Tag, Exploit, UserAccount, Vulnerability, Certificate 44 | ] 45 | OUTPUT_TYPES = FINDING_TYPES + EXECUTION_TYPES + STAT_TYPES 46 | -------------------------------------------------------------------------------- /secator/output_types/_base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | from dataclasses import _MISSING_TYPE, dataclass, fields 4 | from secator.definitions import DEBUG 5 | from secator.rich import console 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | @dataclass 11 | class OutputType: 12 | _table_fields = [] 13 | _sort_by = () 14 | 15 | def __str__(self): 16 | return self.__class__.__name__ 17 | 18 | def __gt__(self, other): 19 | if not self.__eq__(other): 20 | return False 21 | 22 | # Point-based system based on number of non-empty extra-data present. 23 | # In this configuration, a > b if a == b AND a has more non-empty fields than b 24 | # extra_fields = [f for f in fields(self) if not f.compare] 25 | # points1 = 0 26 | # points2 = 0 27 | # for field in extra_fields: 28 | # v1 = getattr(self, field.name) 29 | # v2 = getattr(other, field.name) 30 | # if v1 and not v2: 31 | # points1 += 1 32 | # elif v2 and not v1: 33 | # points2 += 1 34 | # if points1 > points2: 35 | # return True 36 | 37 | # Timestamp-based system: return newest object 38 | return self._timestamp > other._timestamp 39 | 40 | def __ge__(self, other): 41 | return self == other 42 | 43 | def __lt__(self, other): 44 | return other > self 45 | 46 | def __le__(self, other): 47 | return self == other 48 | 49 | def __post_init__(self): 50 | """Initialize default fields to their proper types.""" 51 | for field in fields(self): 52 | default_factory = field.default_factory 53 | default = field.default 54 | if getattr(self, field.name) is None: 55 | if not isinstance(default, _MISSING_TYPE): 56 | setattr(self, field.name, field.default) 57 | elif not isinstance(default_factory, _MISSING_TYPE): 58 | setattr(self, field.name, default_factory()) 59 | 60 | @classmethod 61 | def load(cls, item, output_map={}): 62 | new_item = {} 63 | 64 | # Check for explicit _type keys 65 | _type = item.get('_type') 66 | if _type and _type != cls.get_name(): 67 | raise TypeError(f'Item has different _type set: {_type}') 68 | 69 | for field in fields(cls): 70 | key = field.name 71 | if key in output_map: 72 | mapped_key = output_map[key] 73 | if callable(mapped_key): 74 | try: 75 | mapped_val = mapped_key(item) 76 | except Exception as e: 77 | mapped_val = None 78 | if DEBUG > 1: 79 | console.print_exception(show_locals=True) 80 | raise TypeError( 81 | f'Fail to transform value for "{key}" using output_map function. Exception: ' 82 | f'{type(e).__name__}: {str(e)}') 83 | else: 84 | mapped_val = item.get(mapped_key) 85 | new_item[key] = mapped_val 86 | elif key in item: 87 | new_item[key] = item[key] 88 | 89 | # All values None, raise an error 90 | if all(val is None for val in new_item.values()): 91 | raise TypeError(f'Item does not match {cls} schema') 92 | 93 | new_item['_type'] = cls.get_name() 94 | return cls(**new_item) 95 | 96 | @classmethod 97 | def get_name(cls): 98 | return re.sub(r'(? str: 41 | return self.subject_cn 42 | 43 | def is_expired(self) -> bool: 44 | if self.not_after: 45 | return self.not_after < datetime.now() 46 | return True 47 | 48 | def is_expired_soon(self, months: int = 1) -> bool: 49 | if self.not_after: 50 | return self.not_after < datetime.now() + timedelta(days=months * 30) 51 | return True 52 | 53 | @staticmethod 54 | def format_date(date): 55 | if date: 56 | return date.strftime("%m/%d/%Y") 57 | return '?' 58 | 59 | def __repr__(self) -> str: 60 | s = f'📜 [bold white]{self.host}[/]' 61 | s += f' [cyan]{self.status}[/]' 62 | s += rf' [white]\[fingerprint={self.fingerprint_sha256[:10]}][/]' 63 | if self.subject_cn: 64 | s += rf' [white]\[cn={self.subject_cn}][/]' 65 | if self.subject_an: 66 | s += rf' [white]\[an={", ".join(self.subject_an)}][/]' 67 | if self.issuer: 68 | s += rf' [white]\[issuer={self.issuer}][/]' 69 | elif self.issuer_cn: 70 | s += rf' [white]\[issuer_cn={self.issuer_cn}][/]' 71 | expiry_date = Certificate.format_date(self.not_after) 72 | if self.is_expired(): 73 | s += f' [red]expired since {expiry_date}[/red]' 74 | elif self.is_expired_soon(months=2): 75 | s += f' [yellow]expires <2 months[/yellow], [yellow]valid until {expiry_date}[/yellow]' 76 | else: 77 | s += f' [green]not expired[/green], [yellow]valid until {expiry_date}[/yellow]' 78 | return rich_to_ansi(s) 79 | -------------------------------------------------------------------------------- /secator/output_types/error.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | import time 3 | from secator.output_types import OutputType 4 | from secator.utils import rich_to_ansi, traceback_as_string, rich_escape as _s 5 | 6 | 7 | @dataclass 8 | class Error(OutputType): 9 | message: str 10 | traceback: str = field(default='', compare=False) 11 | traceback_title: str = field(default='', compare=False) 12 | _source: str = field(default='', repr=True) 13 | _type: str = field(default='error', repr=True) 14 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 15 | _uuid: str = field(default='', repr=True, compare=False) 16 | _context: dict = field(default_factory=dict, repr=True, compare=False) 17 | _duplicate: bool = field(default=False, repr=True, compare=False) 18 | _related: list = field(default_factory=list, compare=False) 19 | 20 | _table_fields = ['message', 'traceback'] 21 | _sort_by = ('_timestamp',) 22 | 23 | def from_exception(e, **kwargs): 24 | errtype = type(e).__name__ 25 | if str(e): 26 | errtype += f': {str(e)}' 27 | message = kwargs.pop('message', errtype) 28 | traceback = traceback_as_string(e) if errtype not in ['KeyboardInterrupt', 'GreenletExit'] else '' 29 | error = Error(message=_s(message), traceback=traceback, **kwargs) 30 | return error 31 | 32 | def __str__(self): 33 | return self.message 34 | 35 | def __repr__(self): 36 | s = rf"\[[bold red]ERR[/]] {self.message}" 37 | if self.traceback: 38 | traceback_pretty = ' ' + _s(self.traceback).replace('\n', '\n ') 39 | if self.traceback_title: 40 | traceback_pretty = f' {self.traceback_title}:\n{traceback_pretty}' 41 | s += f'\n[dim]{_s(traceback_pretty)}[/]' 42 | return rich_to_ansi(s) 43 | -------------------------------------------------------------------------------- /secator/output_types/exploit.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | from secator.output_types import OutputType 4 | from secator.utils import rich_to_ansi, rich_escape as _s 5 | from secator.definitions import MATCHED_AT, NAME, ID, EXTRA_DATA, REFERENCE 6 | 7 | 8 | @dataclass 9 | class Exploit(OutputType): 10 | name: str 11 | provider: str 12 | id: str 13 | matched_at: str = '' 14 | ip: str = '' 15 | confidence: str = 'low' 16 | cvss_score: float = 0 17 | reference: str = '' 18 | cves: list = field(default_factory=list, compare=False) 19 | tags: list = field(default_factory=list, compare=False) 20 | extra_data: dict = field(default_factory=dict, compare=False) 21 | _source: str = field(default='', repr=True) 22 | _type: str = field(default='exploit', repr=True) 23 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 24 | _uuid: str = field(default='', repr=True, compare=False) 25 | _context: dict = field(default_factory=dict, repr=True, compare=False) 26 | _tagged: bool = field(default=False, repr=True, compare=False) 27 | _duplicate: bool = field(default=False, repr=True, compare=False) 28 | _related: list = field(default_factory=list, compare=False) 29 | 30 | _table_fields = [ 31 | MATCHED_AT, 32 | NAME, 33 | ID, 34 | EXTRA_DATA, 35 | REFERENCE 36 | ] 37 | _sort_by = ('matched_at', 'name') 38 | 39 | def __str__(self): 40 | return self.name 41 | 42 | def __repr__(self): 43 | s = rf'[bold red]⍼[/] \[[bold red]{self.name}' 44 | if self.reference: 45 | s += f' [link={_s(self.reference)}]🡕[/link]' 46 | s += '[/]]' 47 | if self.matched_at: 48 | s += f' {_s(self.matched_at)}' 49 | if self.tags: 50 | tags_str = ', '.join(self.tags) 51 | s += rf' \[[cyan]{tags_str}[/]]' 52 | if self.extra_data: 53 | data = ', '.join([f'{k}:{v}' for k, v in self.extra_data.items()]) 54 | s += rf' \[[yellow]{_s(str(data))}[/]]' 55 | if self.confidence == 'low': 56 | s = f'[dim]{s}[/]' 57 | return rich_to_ansi(s) 58 | -------------------------------------------------------------------------------- /secator/output_types/info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | import time 3 | from secator.output_types import OutputType 4 | from secator.utils import rich_to_ansi 5 | 6 | 7 | @dataclass 8 | class Info(OutputType): 9 | message: str 10 | task_id: str = field(default='', compare=False) 11 | _source: str = field(default='', repr=True) 12 | _type: str = field(default='info', repr=True) 13 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 14 | _uuid: str = field(default='', repr=True, compare=False) 15 | _context: dict = field(default_factory=dict, repr=True, compare=False) 16 | _duplicate: bool = field(default=False, repr=True, compare=False) 17 | _related: list = field(default_factory=list, compare=False) 18 | 19 | _table_fields = ['message', 'task_id'] 20 | _sort_by = ('_timestamp',) 21 | 22 | def __repr__(self): 23 | s = rf"\[[blue]INF[/]] {self.message}" 24 | return rich_to_ansi(s) 25 | -------------------------------------------------------------------------------- /secator/output_types/ip.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | from enum import Enum 4 | 5 | from secator.definitions import ALIVE, IP 6 | from secator.output_types import OutputType 7 | from secator.utils import rich_to_ansi 8 | 9 | 10 | class IpProtocol(str, Enum): 11 | IPv6 = 'IPv6' 12 | IPv4 = 'IPv4' 13 | 14 | 15 | @dataclass 16 | class Ip(OutputType): 17 | ip: str 18 | host: str = field(default='', repr=True, compare=False) 19 | alive: bool = False 20 | protocol: str = field(default=IpProtocol.IPv4) 21 | _source: str = field(default='', repr=True) 22 | _type: str = field(default='ip', repr=True) 23 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 24 | _uuid: str = field(default='', repr=True, compare=False) 25 | _context: dict = field(default_factory=dict, repr=True, compare=False) 26 | _tagged: bool = field(default=False, repr=True, compare=False) 27 | _duplicate: bool = field(default=False, repr=True, compare=False) 28 | _related: list = field(default_factory=list, compare=False) 29 | 30 | _table_fields = [IP, ALIVE] 31 | _sort_by = (IP,) 32 | 33 | def __str__(self) -> str: 34 | return self.ip 35 | 36 | def __repr__(self) -> str: 37 | s = f'💻 [bold white]{self.ip}[/]' 38 | if self.host: 39 | s += rf' \[[bold magenta]{self.host}[/]]' 40 | if self.alive: 41 | s += r' [bold green]🟢[/]' 42 | return rich_to_ansi(s) 43 | -------------------------------------------------------------------------------- /secator/output_types/port.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.definitions import CPES, EXTRA_DATA, HOST, IP, PORT 5 | from secator.output_types import OutputType 6 | from secator.utils import rich_to_ansi 7 | 8 | 9 | @dataclass 10 | class Port(OutputType): 11 | port: int 12 | ip: str 13 | state: str = 'UNKNOWN' 14 | service_name: str = field(default='', compare=False) 15 | cpes: list = field(default_factory=list, compare=False) 16 | host: str = field(default='', repr=True, compare=False) 17 | protocol: str = field(default='tcp', repr=True, compare=False) 18 | extra_data: dict = field(default_factory=dict, compare=False) 19 | confidence: str = field(default='low', repr=False, compare=False) 20 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 21 | _source: str = field(default='', repr=True, compare=False) 22 | _type: str = field(default='port', repr=True) 23 | _uuid: str = field(default='', repr=True, compare=False) 24 | _context: dict = field(default_factory=dict, repr=True, compare=False) 25 | _tagged: bool = field(default=False, repr=True, compare=False) 26 | _duplicate: bool = field(default=False, repr=True, compare=False) 27 | _related: list = field(default_factory=list, compare=False) 28 | 29 | _table_fields = [IP, PORT, HOST, CPES, EXTRA_DATA] 30 | _sort_by = (PORT, IP) 31 | 32 | def __gt__(self, other): 33 | # favor nmap over other port detection tools 34 | if self._source == 'nmap' and other._source != 'nmap': 35 | return True 36 | return super().__gt__(other) 37 | 38 | def __str__(self) -> str: 39 | return f'{self.host}:{self.port}' 40 | 41 | def __repr__(self) -> str: 42 | s = f'🔓 {self.ip}:[bold red]{self.port:<4}[/] [bold yellow]{self.state.upper()}[/]' 43 | if self.protocol != 'TCP': 44 | s += rf' \[[yellow3]{self.protocol}[/]]' 45 | if self.service_name: 46 | conf = '' 47 | if self.confidence == 'low': 48 | conf = '?' 49 | s += rf' \[[bold purple]{self.service_name}{conf}[/]]' 50 | if self.host: 51 | s += rf' \[[cyan]{self.host}[/]]' 52 | return rich_to_ansi(s) 53 | -------------------------------------------------------------------------------- /secator/output_types/progress.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.output_types import OutputType 5 | from secator.utils import rich_to_ansi, format_object 6 | 7 | 8 | @dataclass 9 | class Progress(OutputType): 10 | percent: int = 0 11 | extra_data: dict = field(default_factory=dict) 12 | _source: str = field(default='', repr=True) 13 | _type: str = field(default='progress', repr=True) 14 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 15 | _uuid: str = field(default='', repr=True, compare=False) 16 | _context: dict = field(default_factory=dict, repr=True, compare=False) 17 | _tagged: bool = field(default=False, repr=True, compare=False) 18 | _duplicate: bool = field(default=False, repr=True, compare=False) 19 | _related: list = field(default_factory=list, compare=False) 20 | 21 | _table_fields = ['percent'] 22 | _sort_by = ('percent',) 23 | 24 | def __post_init__(self): 25 | super().__post_init__() 26 | if not 0 <= self.percent <= 100: 27 | self.percent = 0 28 | 29 | def __str__(self) -> str: 30 | return f'{self.percent}%' 31 | 32 | def __repr__(self) -> str: 33 | s = f'[dim]⏳ [bold]{self.percent}%[/] ' + '█' * (self.percent // 10) + '[/]' 34 | ed = format_object(self.extra_data, color='yellow3', skip_keys=['startedAt']) 35 | s += f'[dim]{ed}[/]' 36 | return rich_to_ansi(s) 37 | -------------------------------------------------------------------------------- /secator/output_types/record.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.definitions import HOST, NAME, TYPE 5 | from secator.output_types import OutputType 6 | from secator.utils import rich_to_ansi, rich_escape as _s 7 | 8 | 9 | @dataclass 10 | class Record(OutputType): 11 | name: str 12 | type: str 13 | host: str = '' 14 | extra_data: dict = field(default_factory=dict, compare=False) 15 | _source: str = field(default='', repr=True) 16 | _type: str = field(default='record', repr=True) 17 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 18 | _uuid: str = field(default='', repr=True, compare=False) 19 | _context: dict = field(default_factory=dict, repr=True, compare=False) 20 | _tagged: bool = field(default=False, repr=True, compare=False) 21 | _duplicate: bool = field(default=False, repr=True, compare=False) 22 | _related: list = field(default_factory=list, compare=False) 23 | 24 | _table_fields = [NAME, HOST, TYPE] 25 | _sort_by = (TYPE, NAME) 26 | 27 | def __str__(self) -> str: 28 | return self.name 29 | 30 | def __repr__(self) -> str: 31 | s = rf'🎤 [bold white]{self.name}[/] \[[green]{self.type}[/]]' 32 | if self.host: 33 | s += rf' \[[magenta]{self.host}[/]]' 34 | if self.extra_data: 35 | s += r' \[[bold yellow]' + ','.join(f'{_s(k)}={_s(v)}' for k, v in self.extra_data.items()) + '[/]]' 36 | return rich_to_ansi(s) 37 | -------------------------------------------------------------------------------- /secator/output_types/stat.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.output_types import OutputType 5 | from secator.utils import rich_to_ansi 6 | 7 | 8 | @dataclass 9 | class Stat(OutputType): 10 | name: str 11 | pid: int 12 | cpu: int 13 | memory: int 14 | net_conns: int = field(default=None, repr=True) 15 | extra_data: dict = field(default_factory=dict) 16 | _source: str = field(default='', repr=True) 17 | _type: str = field(default='stat', repr=True) 18 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 19 | _uuid: str = field(default='', repr=True, compare=False) 20 | _context: dict = field(default_factory=dict, repr=True, compare=False) 21 | _tagged: bool = field(default=False, repr=True, compare=False) 22 | _duplicate: bool = field(default=False, repr=True, compare=False) 23 | _related: list = field(default_factory=list, compare=False) 24 | 25 | _table_fields = ['name', 'pid', 'cpu', 'memory'] 26 | _sort_by = ('name', 'pid') 27 | 28 | def __str__(self) -> str: 29 | return f'{self.name} [pid={self.pid}] [cpu={self.cpu:.2f}%] [memory={self.memory:.2f}%]' 30 | 31 | def __repr__(self) -> str: 32 | s = rf'[dim yellow3]📊 {self.name} \[pid={self.pid}] \[cpu={self.cpu:.2f}%] \[memory={self.memory:.2f}%]' 33 | if self.net_conns: 34 | s += rf' \[connections={self.net_conns}]' 35 | s += ' [/]' 36 | return rich_to_ansi(s) 37 | -------------------------------------------------------------------------------- /secator/output_types/state.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.output_types._base import OutputType 5 | from secator.utils import rich_to_ansi 6 | 7 | 8 | @dataclass 9 | class State(OutputType): 10 | """Represents the state of a Celery task.""" 11 | 12 | task_id: str 13 | state: str 14 | _type: str = field(default='state', repr=True) 15 | _source: str = field(default='', repr=True) 16 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 17 | _uuid: str = field(default='', repr=True, compare=False) 18 | _context: dict = field(default_factory=dict, repr=True, compare=False) 19 | _tagged: bool = field(default=False, repr=True, compare=False) 20 | _duplicate: bool = field(default=False, repr=True, compare=False) 21 | _related: list = field(default_factory=list, compare=False) 22 | _icon = '📊' 23 | _color = 'bright_blue' 24 | 25 | def __str__(self) -> str: 26 | return f"Task {self.task_id} is {self.state}" 27 | 28 | def __repr__(self) -> str: 29 | return rich_to_ansi(f"{self._icon} [bold {self._color}]{self.state}[/] {self.task_id}") 30 | -------------------------------------------------------------------------------- /secator/output_types/subdomain.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | from typing import List 4 | 5 | from secator.definitions import DOMAIN, HOST, SOURCES 6 | from secator.output_types import OutputType 7 | from secator.utils import rich_to_ansi 8 | 9 | 10 | @dataclass 11 | class Subdomain(OutputType): 12 | host: str 13 | domain: str 14 | sources: List[str] = field(default_factory=list, compare=False) 15 | extra_data: dict = field(default_factory=dict, compare=False) 16 | _source: str = field(default='', repr=True) 17 | _type: str = field(default='subdomain', repr=True) 18 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 19 | _uuid: str = field(default='', repr=True, compare=False) 20 | _context: dict = field(default_factory=dict, repr=True, compare=False) 21 | _tagged: bool = field(default=False, repr=True, compare=False) 22 | _duplicate: bool = field(default=False, repr=True, compare=False) 23 | _related: list = field(default_factory=list, compare=False) 24 | 25 | _table_fields = [ 26 | HOST, 27 | DOMAIN, 28 | SOURCES 29 | ] 30 | _sort_by = (HOST,) 31 | 32 | def __str__(self): 33 | return self.host 34 | 35 | def __repr__(self): 36 | sources_str = ', '.join([f'[magenta]{source}[/]' for source in self.sources]) 37 | s = f'🏰 [white]{self.host}[/]' 38 | if sources_str: 39 | s += f' [{sources_str}]' 40 | if self.extra_data: 41 | s += r' \[[bold yellow]' + ', '.join(f'{k}:{v}' for k, v in self.extra_data.items()) + '[/]]' 42 | return rich_to_ansi(s) 43 | -------------------------------------------------------------------------------- /secator/output_types/tag.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.output_types import OutputType 5 | from secator.utils import rich_to_ansi, trim_string, rich_escape as _s 6 | 7 | 8 | @dataclass 9 | class Tag(OutputType): 10 | name: str 11 | match: str 12 | extra_data: dict = field(default_factory=dict, repr=True, compare=False) 13 | stored_response_path: str = field(default='', compare=False) 14 | _source: str = field(default='', repr=True) 15 | _type: str = field(default='tag', repr=True) 16 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 17 | _uuid: str = field(default='', repr=True, compare=False) 18 | _context: dict = field(default_factory=dict, repr=True, compare=False) 19 | _tagged: bool = field(default=False, repr=True, compare=False) 20 | _duplicate: bool = field(default=False, repr=True, compare=False) 21 | _related: list = field(default_factory=list, compare=False) 22 | 23 | _table_fields = ['match', 'name', 'extra_data'] 24 | _sort_by = ('match', 'name') 25 | 26 | def __post_init__(self): 27 | super().__post_init__() 28 | 29 | def __str__(self) -> str: 30 | return self.match 31 | 32 | def __repr__(self) -> str: 33 | s = f'🏷️ [bold magenta]{self.name}[/]' 34 | s += f' found @ [bold]{_s(self.match)}[/]' 35 | ed = '' 36 | if self.stored_response_path: 37 | s += rf' [link=file://{self.stored_response_path}]:incoming_envelope:[/]' 38 | if self.extra_data: 39 | for k, v in self.extra_data.items(): 40 | sep = ' ' 41 | if not v: 42 | continue 43 | if isinstance(v, str): 44 | v = trim_string(v, max_length=1000) 45 | if len(v) > 1000: 46 | v = v.replace('\n', '\n' + sep) 47 | sep = '\n ' 48 | ed += f'\n [dim red]{_s(k)}[/]:{sep}[dim yellow]{_s(v)}[/]' 49 | if ed: 50 | s += ed 51 | return rich_to_ansi(s) 52 | -------------------------------------------------------------------------------- /secator/output_types/target.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.output_types import OutputType 5 | from secator.utils import autodetect_type, rich_to_ansi, rich_escape as _s 6 | 7 | 8 | @dataclass 9 | class Target(OutputType): 10 | name: str 11 | type: str = '' 12 | _source: str = field(default='', repr=True) 13 | _type: str = field(default='target', repr=True) 14 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 15 | _uuid: str = field(default='', repr=True, compare=False) 16 | _context: dict = field(default_factory=dict, repr=True, compare=False) 17 | _tagged: bool = field(default=False, repr=True, compare=False) 18 | _duplicate: bool = field(default=False, repr=True, compare=False) 19 | _related: list = field(default_factory=list, compare=False) 20 | 21 | _table_fields = [ 22 | 'name', 23 | 'type', 24 | ] 25 | _sort_by = ('type', 'name') 26 | 27 | def __post_init__(self): 28 | if not self.type: 29 | self.type = autodetect_type(self.name) 30 | 31 | def __str__(self): 32 | return self.name 33 | 34 | def __repr__(self): 35 | s = f'🎯 {_s(self.name)}' 36 | if self.type: 37 | s += f' ({self.type})' 38 | return rich_to_ansi(s) 39 | -------------------------------------------------------------------------------- /secator/output_types/user_account.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | 4 | from secator.definitions import SITE_NAME, URL, USERNAME 5 | from secator.output_types import OutputType 6 | from secator.utils import rich_to_ansi, rich_escape as _s 7 | 8 | 9 | @dataclass 10 | class UserAccount(OutputType): 11 | username: str 12 | url: str = '' 13 | email: str = '' 14 | site_name: str = '' 15 | extra_data: dict = field(default_factory=dict, compare=False) 16 | _source: str = field(default='', repr=True) 17 | _type: str = field(default='user_account', repr=True) 18 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 19 | _uuid: str = field(default='', repr=True, compare=False) 20 | _context: dict = field(default_factory=dict, repr=True, compare=False) 21 | _tagged: bool = field(default=False, repr=True, compare=False) 22 | _duplicate: bool = field(default=False, repr=True, compare=False) 23 | _related: list = field(default_factory=list, compare=False) 24 | 25 | _table_fields = [SITE_NAME, USERNAME, URL] 26 | _sort_by = (URL, USERNAME) 27 | 28 | def __str__(self) -> str: 29 | return self.url 30 | 31 | def __repr__(self) -> str: 32 | s = f'👤 [green]{_s(self.username)}[/]' 33 | if self.email: 34 | s += rf' \[[bold yellow]{_s(self.email)}[/]]' 35 | if self.site_name: 36 | s += rf' \[[bold blue]{self.site_name}[/]]' 37 | if self.url: 38 | s += rf' \[[white]{_s(self.url)}[/]]' 39 | if self.extra_data: 40 | s += r' \[[bold yellow]' + _s(', '.join(f'{k}:{v}' for k, v in self.extra_data.items())) + '[/]]' 41 | return rich_to_ansi(s) 42 | -------------------------------------------------------------------------------- /secator/output_types/vulnerability.py: -------------------------------------------------------------------------------- 1 | import time 2 | from dataclasses import dataclass, field 3 | from typing import List 4 | 5 | from secator.definitions import (CONFIDENCE, CVSS_SCORE, EXTRA_DATA, ID, 6 | MATCHED_AT, NAME, REFERENCE, SEVERITY, TAGS) 7 | from secator.output_types import OutputType 8 | from secator.utils import rich_to_ansi, rich_escape as _s, format_object 9 | 10 | 11 | @dataclass 12 | class Vulnerability(OutputType): 13 | name: str 14 | provider: str = '' 15 | id: str = '' 16 | matched_at: str = '' 17 | ip: str = field(default='', compare=False) 18 | confidence: str = 'low' 19 | severity: str = 'unknown' 20 | cvss_score: float = 0 21 | tags: List[str] = field(default_factory=list) 22 | extra_data: dict = field(default_factory=dict, compare=False) 23 | description: str = field(default='', compare=False) 24 | references: List[str] = field(default_factory=list, compare=False) 25 | reference: str = field(default='', compare=False) 26 | confidence_nb: int = 0 27 | severity_nb: int = 0 28 | _source: str = field(default='', repr=True) 29 | _type: str = field(default='vulnerability', repr=True) 30 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 31 | _uuid: str = field(default='', repr=True, compare=False) 32 | _context: dict = field(default_factory=dict, repr=True, compare=False) 33 | _tagged: bool = field(default=False, repr=True, compare=False) 34 | _duplicate: bool = field(default=False, repr=True, compare=False) 35 | _related: list = field(default_factory=list, compare=False) 36 | 37 | _table_fields = [ 38 | MATCHED_AT, 39 | SEVERITY, 40 | CONFIDENCE, 41 | NAME, 42 | ID, 43 | CVSS_SCORE, 44 | TAGS, 45 | EXTRA_DATA, 46 | REFERENCE 47 | ] 48 | _sort_by = ('confidence_nb', 'severity_nb', 'matched_at', 'cvss_score') 49 | 50 | def __post_init__(self): 51 | super().__post_init__() 52 | severity_map = { 53 | 'critical': 0, 54 | 'high': 1, 55 | 'medium': 2, 56 | 'low': 3, 57 | 'info': 4, 58 | 'unknown': 5, 59 | None: 6 60 | } 61 | self.severity = self.severity.lower() # normalize severity 62 | self.severity_nb = severity_map.get(self.severity, 6) 63 | self.confidence_nb = severity_map[self.confidence] 64 | if len(self.references) > 0: 65 | self.reference = self.references[0] 66 | 67 | def __repr__(self): 68 | data = self.extra_data 69 | 70 | # TODO: review this 71 | if 'data' in data and isinstance(data['data'], list): 72 | data = data['data'] 73 | 74 | tags = self.tags 75 | colors = { 76 | 'critical': 'bold red', 77 | 'high': 'red', 78 | 'medium': 'yellow', 79 | 'low': 'green', 80 | 'info': 'magenta', 81 | 'unknown': 'dim magenta' 82 | } 83 | c = colors.get(self.severity, 'dim magenta') 84 | name = self.name 85 | if self.reference: 86 | name += rf' [link={_s(self.reference)}]🡕[/link]' 87 | s = rf'🚨 \[[green]{name}[/]]' 88 | s += rf' \[[{c}]{self.severity}[/]] {_s(self.matched_at)}' # noqa: E501 89 | if tags: 90 | tags_str = ','.join(tags) 91 | s += rf' \[[cyan]{_s(tags_str)}[/]]' 92 | if data: 93 | s += format_object(data, 'yellow') 94 | if self.confidence == 'low': 95 | s = f'[dim]{s}[/]' 96 | return rich_to_ansi(s) 97 | 98 | def __str__(self): 99 | return self.matched_at + ' -> ' + self.name 100 | -------------------------------------------------------------------------------- /secator/output_types/warning.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | import time 3 | from secator.output_types import OutputType 4 | from secator.utils import rich_to_ansi 5 | 6 | 7 | @dataclass 8 | class Warning(OutputType): 9 | message: str 10 | task_id: str = field(default='', compare=False) 11 | _source: str = field(default='', repr=True) 12 | _type: str = field(default='warning', repr=True) 13 | _timestamp: int = field(default_factory=lambda: time.time(), compare=False) 14 | _uuid: str = field(default='', repr=True, compare=False) 15 | _context: dict = field(default_factory=dict, repr=True, compare=False) 16 | _duplicate: bool = field(default=False, repr=True, compare=False) 17 | _related: list = field(default_factory=list, compare=False) 18 | 19 | _table_fields = ['task_name', 'message'] 20 | _sort_by = ('_timestamp',) 21 | 22 | def __repr__(self): 23 | s = rf"\[[yellow]WRN[/]] {self.message}" 24 | return rich_to_ansi(s) 25 | -------------------------------------------------------------------------------- /secator/runners/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'Runner', 3 | 'Command', 4 | 'Task', 5 | 'Workflow', 6 | 'Scan', 7 | ] 8 | from secator.runners._base import Runner 9 | from secator.runners.command import Command 10 | from secator.runners.task import Task 11 | from secator.runners.scan import Scan 12 | from secator.runners.workflow import Workflow 13 | -------------------------------------------------------------------------------- /secator/runners/celery.py: -------------------------------------------------------------------------------- 1 | from secator.celery_utils import CeleryData 2 | from secator.runners import Runner 3 | 4 | 5 | class Celery(Runner): 6 | def yielder(self): 7 | if not self.celery_result: 8 | result = self.build_celery_workflow() 9 | if self.sync: 10 | yield from result.apply().get() 11 | yield from CeleryData.iter_results( 12 | self.celery_result, 13 | ids_map=self.celery_ids_map, 14 | print_remote_info=False 15 | ) 16 | 17 | def error_handler(self, e): 18 | self.stop_celery_tasks() 19 | -------------------------------------------------------------------------------- /secator/runners/scan.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from dotmap import DotMap 3 | 4 | from secator.config import CONFIG 5 | from secator.output_types.info import Info 6 | from secator.runners._base import Runner 7 | from secator.runners.workflow import Workflow 8 | from secator.utils import merge_opts 9 | 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class Scan(Runner): 15 | 16 | default_exporters = CONFIG.scans.exporters 17 | 18 | @classmethod 19 | def delay(cls, *args, **kwargs): 20 | from secator.celery import run_scan 21 | return run_scan.delay(args=args, kwargs=kwargs) 22 | 23 | def build_celery_workflow(self): 24 | """Build Celery workflow for scan execution. 25 | 26 | Returns: 27 | celery.Signature: Celery task signature. 28 | """ 29 | from celery import chain 30 | from secator.celery import mark_runner_started, mark_runner_completed 31 | from secator.template import TemplateLoader 32 | 33 | scan_opts = self.config.options 34 | 35 | # Set hooks and reports 36 | self.enable_hooks = False # Celery will handle hooks 37 | self.enable_reports = True # Workflow will handle reports 38 | self.print_item = not self.sync 39 | 40 | # Build chain of workflows 41 | sigs = [] 42 | sig = None 43 | for name, workflow_opts in self.config.workflows.items(): 44 | run_opts = self.run_opts.copy() 45 | run_opts.pop('profiles', None) 46 | run_opts['no_poll'] = True 47 | run_opts['caller'] = 'Scan' 48 | run_opts['has_parent'] = True 49 | run_opts['enable_reports'] = False 50 | run_opts['print_profiles'] = False 51 | opts = merge_opts(scan_opts, workflow_opts, run_opts) 52 | name = name.split('/')[0] 53 | config = TemplateLoader(name=f'workflow/{name}') 54 | if not config: 55 | raise ValueError(f'Workflow {name} not found') 56 | 57 | # Skip workflow if condition is not met 58 | condition = workflow_opts.pop('if', None) if workflow_opts else None 59 | local_ns = {'opts': DotMap(opts)} 60 | if condition and not eval(condition, {"__builtins__": {}}, local_ns): 61 | self.add_result(Info(message=f'Skipped workflow {name} because condition is not met: {condition}')) 62 | continue 63 | 64 | # Build workflow 65 | workflow = Workflow( 66 | config, 67 | self.inputs, 68 | results=self.results, 69 | run_opts=opts, 70 | hooks=self._hooks, 71 | context=self.context.copy() 72 | ) 73 | celery_workflow = workflow.build_celery_workflow(chain_previous_results=True) 74 | for task_id, task_info in workflow.celery_ids_map.items(): 75 | self.add_subtask(task_id, task_info['name'], task_info['descr']) 76 | sigs.append(celery_workflow) 77 | 78 | for result in workflow.results: 79 | self.add_result(result, print=False, hooks=False) 80 | 81 | if sigs: 82 | sig = chain( 83 | mark_runner_started.si([], self).set(queue='results'), 84 | *sigs, 85 | mark_runner_completed.s(self).set(queue='results'), 86 | ) 87 | return sig 88 | -------------------------------------------------------------------------------- /secator/runners/task.py: -------------------------------------------------------------------------------- 1 | from secator.config import CONFIG 2 | from secator.runners import Runner 3 | from secator.loader import discover_tasks 4 | from celery import chain 5 | 6 | 7 | class Task(Runner): 8 | 9 | default_exporters = CONFIG.tasks.exporters 10 | 11 | @classmethod 12 | def delay(cls, *args, **kwargs): 13 | from secator.celery import run_task 14 | return run_task.apply_async(kwargs={'args': args, 'kwargs': kwargs}, queue='celery') 15 | 16 | def build_celery_workflow(self): 17 | """Build Celery workflow for task execution. 18 | 19 | Args: 20 | run_opts (dict): Run options. 21 | results (list): Prior results. 22 | 23 | Returns: 24 | celery.Signature: Celery task signature. 25 | """ 26 | from secator.celery import run_command 27 | 28 | # Get task class 29 | task_cls = Task.get_task_class(self.config.name) 30 | 31 | # Run opts 32 | opts = self.run_opts.copy() 33 | opts.pop('output', None) 34 | opts.pop('profiles', None) 35 | opts.pop('no_poll', False) 36 | 37 | # Set output types 38 | self.output_types = task_cls.output_types 39 | 40 | # Set hooks and reports 41 | self.enable_hooks = False # Celery will handle hooks 42 | self.enable_reports = True # Task will handle reports 43 | 44 | # Get hooks 45 | hooks = self._hooks.get(Task, {}) 46 | opts['hooks'] = hooks 47 | opts['context'] = self.context.copy() 48 | opts['reports_folder'] = str(self.reports_folder) 49 | 50 | # Task class will handle those 51 | opts['enable_reports'] = False 52 | opts['enable_profiles'] = False 53 | opts['enable_duplicate_check'] = False 54 | opts['print_start'] = False 55 | opts['print_end'] = False 56 | opts['print_target'] = False 57 | opts['has_parent'] = False 58 | opts['skip_if_no_inputs'] = False 59 | opts['caller'] = 'Task' 60 | 61 | # Create task signature 62 | profile = task_cls.profile(opts) if callable(task_cls.profile) else task_cls.profile 63 | sig = run_command.si(self.results, self.config.name, self.inputs, opts).set(queue=profile) 64 | task_id = sig.freeze().task_id 65 | self.add_subtask(task_id, self.config.name, self.description) 66 | return chain(sig) 67 | 68 | @staticmethod 69 | def get_task_class(name): 70 | """Get task class from a name. 71 | 72 | Args: 73 | name (str): Task name. 74 | """ 75 | if '/' in name: 76 | name = name.split('/')[0] 77 | tasks_classes = discover_tasks() 78 | for task_cls in tasks_classes: 79 | if task_cls.__name__ == name: 80 | return task_cls 81 | raise ValueError(f'Task {name} not found. Aborting.') 82 | -------------------------------------------------------------------------------- /secator/scans/__init__.py: -------------------------------------------------------------------------------- 1 | from secator.loader import get_configs_by_type 2 | from secator.runners import Scan 3 | 4 | 5 | class DynamicScan(Scan): 6 | def __init__(self, config): 7 | self.config = config 8 | 9 | def __call__(self, targets, **kwargs): 10 | hooks = kwargs.pop('hooks', {}) 11 | results = kwargs.pop('results', []) 12 | context = kwargs.pop('context', {}) 13 | super().__init__( 14 | config=self.config, 15 | inputs=targets, 16 | results=results, 17 | hooks=hooks, 18 | context=context, 19 | run_opts=kwargs) 20 | return self 21 | 22 | 23 | DYNAMIC_SCANS = {} 24 | for scan in get_configs_by_type('scan'): 25 | instance = DynamicScan(scan) 26 | DYNAMIC_SCANS[scan.name] = instance 27 | 28 | globals().update(DYNAMIC_SCANS) 29 | __all__ = list(DYNAMIC_SCANS) 30 | -------------------------------------------------------------------------------- /secator/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'JSONSerializer', 3 | 'RegexSerializer', 4 | 'DataclassEncoder', 5 | ] 6 | from secator.serializers.json import JSONSerializer 7 | from secator.serializers.regex import RegexSerializer 8 | from secator.serializers.dataclass import DataclassEncoder 9 | -------------------------------------------------------------------------------- /secator/serializers/dataclass.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime 2 | import json 3 | from pathlib import PosixPath 4 | from secator.output_types import OUTPUT_TYPES 5 | 6 | 7 | class DataclassEncoder(json.JSONEncoder): 8 | def default(self, obj): 9 | if hasattr(obj, 'toDict'): 10 | return obj.toDict() 11 | elif isinstance(obj, PosixPath): 12 | return str(obj) 13 | elif isinstance(obj, (datetime, date)): 14 | return obj.isoformat() 15 | else: 16 | return json.JSONEncoder.default(self, obj) 17 | 18 | 19 | def get_output_cls(type): 20 | try: 21 | return [cls for cls in OUTPUT_TYPES if cls.get_name() == type][0] 22 | except IndexError: 23 | return None 24 | 25 | 26 | def dataclass_decoder(obj): 27 | if '_type' in obj: 28 | output_cls = get_output_cls(obj['_type']) 29 | if output_cls: 30 | return output_cls.load(obj) 31 | return obj 32 | 33 | 34 | def dumps_dataclass(obj, indent=None): 35 | return json.dumps(obj, cls=DataclassEncoder, indent=indent) 36 | 37 | 38 | def loads_dataclass(obj): 39 | return json.loads(obj, object_hook=dataclass_decoder) 40 | -------------------------------------------------------------------------------- /secator/serializers/json.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | class JSONSerializer: 5 | 6 | def __init__(self, strict=False): 7 | self.strict = strict 8 | 9 | def run(self, line): 10 | start_index = line.find('{') 11 | end_index = line.rfind('}') 12 | if start_index == -1 or end_index == -1: 13 | return 14 | if start_index != 0 and self.strict: 15 | return 16 | try: 17 | json_obj = line[start_index:end_index+1] 18 | yield json.loads(json_obj) 19 | except json.decoder.JSONDecodeError: 20 | return 21 | -------------------------------------------------------------------------------- /secator/serializers/regex.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | class RegexSerializer: 5 | 6 | def __init__(self, regex, fields=[], findall=False): 7 | self.regex = re.compile(regex) 8 | self.fields = fields 9 | self.findall = findall 10 | 11 | def run(self, line): 12 | if self.findall: 13 | match = self.regex.findall(line) 14 | yield from match 15 | return 16 | output = {} 17 | match = self.regex.match(line) 18 | if not match: 19 | return 20 | if not self.fields: 21 | yield match.group(0) 22 | return 23 | for field in self.fields: 24 | output[field] = match.group(field) 25 | yield output 26 | -------------------------------------------------------------------------------- /secator/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | from secator.loader import discover_tasks 2 | TASKS = discover_tasks() 3 | __all__ = [ 4 | cls.__name__ 5 | for cls in TASKS 6 | ] 7 | for cls in TASKS: 8 | exec(f'from .{cls.__name__} import {cls.__name__}') 9 | -------------------------------------------------------------------------------- /secator/tasks/arjun.py: -------------------------------------------------------------------------------- 1 | import os 2 | import yaml 3 | 4 | from secator.decorators import task 5 | from secator.definitions import (OUTPUT_PATH, RATE_LIMIT, THREADS, DELAY, TIMEOUT, METHOD, WORDLIST, 6 | HEADER, URL, FOLLOW_REDIRECT) 7 | from secator.output_types import Info, Url, Warning 8 | from secator.runners import Command 9 | from secator.tasks._categories import OPTS 10 | from secator.utils import process_wordlist 11 | 12 | 13 | @task() 14 | class arjun(Command): 15 | """HTTP Parameter Discovery Suite.""" 16 | cmd = 'arjun' 17 | input_types = [URL] 18 | output_types = [Url] 19 | tags = ['url', 'fuzz', 'params'] 20 | input_flag = '-u' 21 | input_chunk_size = 1 22 | version_flag = ' ' 23 | opts = { 24 | 'chunk_size': {'type': int, 'help': 'Control query/chunk size'}, 25 | 'stable': {'is_flag': True, 'default': False, 'help': 'Use stable mode'}, 26 | 'include': {'type': str, 'help': 'Include persistent data (e.g: "api_key=xxxxx" or {"api_key": "xxxx"})'}, 27 | 'passive': {'is_flag': True, 'default': False, 'help': 'Passive mode'}, 28 | 'casing': {'type': str, 'help': 'Casing style for params e.g. like_this, likeThis, LIKE_THIS, like_this'}, # noqa: E501 29 | WORDLIST: {'type': str, 'short': 'w', 'default': None, 'process': process_wordlist, 'help': 'Wordlist to use (default: arjun wordlist)'}, # noqa: E501 30 | } 31 | meta_opts = { 32 | THREADS: OPTS[THREADS], 33 | DELAY: OPTS[DELAY], 34 | TIMEOUT: OPTS[TIMEOUT], 35 | RATE_LIMIT: OPTS[RATE_LIMIT], 36 | METHOD: OPTS[METHOD], 37 | HEADER: OPTS[HEADER], 38 | FOLLOW_REDIRECT: OPTS[FOLLOW_REDIRECT], 39 | } 40 | opt_key_map = { 41 | THREADS: 't', 42 | DELAY: 'd', 43 | TIMEOUT: 'T', 44 | RATE_LIMIT: '--rate-limit', 45 | METHOD: 'm', 46 | WORDLIST: 'w', 47 | HEADER: '--headers', 48 | 'chunk_size': 'c', 49 | 'stable': '--stable', 50 | 'passive': '--passive', 51 | 'casing': '--casing', 52 | 'follow_redirect': '--follow-redirect', 53 | } 54 | opt_value_map = { 55 | HEADER: lambda headers: "\\n".join(c.strip() for c in headers.split(";;")) 56 | } 57 | install_version = '2.2.7' 58 | install_cmd = 'pipx install arjun==[install_version] --force' 59 | install_github_handle = 's0md3v/Arjun' 60 | 61 | @staticmethod 62 | def on_line(self, line): 63 | if 'Processing chunks' in line: 64 | return '' 65 | return line 66 | 67 | @staticmethod 68 | def on_cmd(self): 69 | follow_redirect = self.get_opt_value(FOLLOW_REDIRECT) 70 | self.cmd = self.cmd.replace(' --follow-redirect', '') 71 | if not follow_redirect: 72 | self.cmd += ' --disable-redirects' 73 | 74 | self.output_path = self.get_opt_value(OUTPUT_PATH) 75 | if not self.output_path: 76 | self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json' 77 | self.cmd += f' -oJ {self.output_path}' 78 | 79 | @staticmethod 80 | def on_cmd_done(self): 81 | if not os.path.exists(self.output_path): 82 | # yield Error(message=f'Could not find JSON results in {self.output_path}') 83 | return 84 | yield Info(message=f'JSON results saved to {self.output_path}') 85 | with open(self.output_path, 'r') as f: 86 | results = yaml.safe_load(f.read()) 87 | if not results: 88 | yield Warning(message='No results found !') 89 | return 90 | for url, values in results.items(): 91 | for param in values['params']: 92 | yield Url( 93 | url=url + '?' + param + '=' + 'FUZZ', 94 | request_headers=values['headers'], 95 | method=values['method'], 96 | ) 97 | -------------------------------------------------------------------------------- /secator/tasks/dalfox.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import urlparse 2 | 3 | from secator.decorators import task 4 | from secator.definitions import (CONFIDENCE, DELAY, EXTRA_DATA, FOLLOW_REDIRECT, 5 | HEADER, ID, MATCHED_AT, METHOD, NAME, 6 | OPT_NOT_SUPPORTED, PROVIDER, PROXY, RATE_LIMIT, 7 | RETRIES, SEVERITY, TAGS, THREADS, TIMEOUT, URL, 8 | USER_AGENT) 9 | from secator.output_types import Vulnerability, Url 10 | from secator.serializers import JSONSerializer 11 | from secator.tasks._categories import VulnHttp 12 | 13 | DALFOX_TYPE_MAP = { 14 | 'G': 'Grep XSS', 15 | 'R': 'Reflected XSS', 16 | 'V': 'Verified XSS' 17 | } 18 | 19 | 20 | @task() 21 | class dalfox(VulnHttp): 22 | """Powerful open source XSS scanning tool.""" 23 | cmd = 'dalfox' 24 | input_types = [URL] 25 | output_types = [Vulnerability, Url] 26 | tags = ['url', 'fuzz'] 27 | input_flag = 'url' 28 | file_flag = 'file' 29 | # input_chunk_size = 1 30 | json_flag = '--format json' 31 | version_flag = 'version' 32 | opt_prefix = '--' 33 | opt_key_map = { 34 | HEADER: 'header', 35 | DELAY: 'delay', 36 | FOLLOW_REDIRECT: 'follow-redirects', 37 | METHOD: 'method', 38 | PROXY: 'proxy', 39 | RATE_LIMIT: OPT_NOT_SUPPORTED, 40 | RETRIES: OPT_NOT_SUPPORTED, 41 | THREADS: 'worker', 42 | TIMEOUT: 'timeout', 43 | USER_AGENT: 'user-agent' 44 | } 45 | item_loaders = [JSONSerializer()] 46 | output_map = { 47 | Vulnerability: { 48 | ID: lambda x: None, 49 | NAME: lambda x: DALFOX_TYPE_MAP[x['type']], 50 | PROVIDER: 'dalfox', 51 | TAGS: lambda x: [x['cwe']] if x['cwe'] else [], 52 | CONFIDENCE: lambda x: 'high', 53 | MATCHED_AT: lambda x: urlparse(x['data'])._replace(query='').geturl(), 54 | EXTRA_DATA: lambda x: dalfox.extra_data_extractor(x), 55 | SEVERITY: lambda x: x['severity'].lower() 56 | } 57 | } 58 | install_version = 'v2.11.0' 59 | install_cmd = 'go install -v github.com/hahwul/dalfox/v2@latest' 60 | install_github_handle = 'hahwul/dalfox' 61 | encoding = 'ansi' 62 | proxychains = False 63 | proxychains_flavor = 'proxychains4' 64 | proxy_socks5 = True 65 | proxy_http = True 66 | profile = 'cpu' 67 | 68 | @staticmethod 69 | def on_line(self, line): 70 | line = line.rstrip(',') 71 | return line 72 | 73 | @staticmethod 74 | def on_json_loaded(self, item): 75 | if item.get('type', '') == 'V': 76 | item['request_headers'] = self.get_opt_value(HEADER, preprocess=True) 77 | yield Url( 78 | url=item['data'], 79 | method=item['method'], 80 | request_headers=item['request_headers'], 81 | extra_data={k: v for k, v in item.items() if k not in ['type', 'severity', 'cwe', 'request_headers', 'method', 'data']} # noqa: E501 82 | ) 83 | yield item 84 | 85 | @staticmethod 86 | def extra_data_extractor(item): 87 | extra_data = {} 88 | for key, value in item.items(): 89 | if key not in ['type', 'severity', 'cwe']: 90 | extra_data[key] = value 91 | return extra_data 92 | -------------------------------------------------------------------------------- /secator/tasks/dirsearch.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | 5 | from secator.decorators import task 6 | from secator.definitions import (CONTENT_LENGTH, CONTENT_TYPE, DATA, DELAY, DEPTH, 7 | FILTER_CODES, FILTER_REGEX, FILTER_SIZE, 8 | FILTER_WORDS, FOLLOW_REDIRECT, HEADER, 9 | MATCH_CODES, MATCH_REGEX, MATCH_SIZE, 10 | MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY, 11 | RATE_LIMIT, RETRIES, STATUS_CODE, 12 | THREADS, TIMEOUT, USER_AGENT, WORDLIST, URL) 13 | from secator.output_types import Url, Info, Error 14 | from secator.tasks._categories import HttpFuzzer 15 | 16 | 17 | @task() 18 | class dirsearch(HttpFuzzer): 19 | """Advanced web path brute-forcer.""" 20 | cmd = 'dirsearch' 21 | input_types = [URL] 22 | output_types = [Url] 23 | tags = ['url', 'fuzz'] 24 | input_flag = '-u' 25 | file_flag = '-l' 26 | json_flag = '-O json' 27 | opt_prefix = '--' 28 | encoding = 'ansi' 29 | opt_key_map = { 30 | HEADER: 'header', 31 | DATA: 'data', 32 | DELAY: 'delay', 33 | DEPTH: 'max-recursion-depth', 34 | FILTER_CODES: 'exclude-status', 35 | FILTER_REGEX: 'exclude-regex', 36 | FILTER_SIZE: 'exclude-sizes', 37 | FILTER_WORDS: OPT_NOT_SUPPORTED, 38 | FOLLOW_REDIRECT: 'follow-redirects', 39 | MATCH_CODES: 'include-status', 40 | MATCH_REGEX: OPT_NOT_SUPPORTED, 41 | MATCH_SIZE: OPT_NOT_SUPPORTED, 42 | MATCH_WORDS: OPT_NOT_SUPPORTED, 43 | METHOD: 'http-method', 44 | PROXY: 'proxy', 45 | RATE_LIMIT: 'max-rate', 46 | RETRIES: 'retries', 47 | THREADS: 'threads', 48 | TIMEOUT: 'timeout', 49 | USER_AGENT: 'user-agent', 50 | WORDLIST: 'wordlists', 51 | } 52 | output_map = { 53 | Url: { 54 | CONTENT_LENGTH: 'content-length', 55 | CONTENT_TYPE: 'content-type', 56 | STATUS_CODE: 'status', 57 | 'request_headers': 'request_headers' 58 | } 59 | } 60 | install_cmd = 'pipx install git+https://github.com/maurosoria/dirsearch.git --force' 61 | install_version = '0.4.3' 62 | proxychains = True 63 | proxy_socks5 = True 64 | proxy_http = True 65 | profile = 'io' 66 | 67 | @staticmethod 68 | def on_init(self): 69 | self.output_path = self.get_opt_value(OUTPUT_PATH) 70 | if not self.output_path: 71 | self.output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json' 72 | self.cmd += f' -o {self.output_path}' 73 | 74 | @staticmethod 75 | def on_cmd_done(self): 76 | if not os.path.exists(self.output_path): 77 | yield Error(message=f'Could not find JSON results in {self.output_path}') 78 | return 79 | 80 | yield Info(message=f'JSON results saved to {self.output_path}') 81 | with open(self.output_path, 'r') as f: 82 | results = yaml.safe_load(f.read()).get('results', []) 83 | for result in results: 84 | result['request_headers'] = self.get_opt_value(HEADER, preprocess=True) 85 | yield result 86 | -------------------------------------------------------------------------------- /secator/tasks/feroxbuster.py: -------------------------------------------------------------------------------- 1 | from secator.config import CONFIG 2 | from secator.decorators import task 3 | from secator.definitions import (CONTENT_TYPE, DATA, DELAY, DEPTH, FILTER_CODES, 4 | FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, 5 | FOLLOW_REDIRECT, HEADER, LINES, MATCH_CODES, 6 | MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, 7 | OPT_NOT_SUPPORTED, OPT_PIPE_INPUT, PROXY, 8 | RATE_LIMIT, RETRIES, STATUS_CODE, 9 | THREADS, TIMEOUT, USER_AGENT, WORDLIST, WORDS, URL) 10 | from secator.output_types import Url 11 | from secator.serializers import JSONSerializer 12 | from secator.tasks._categories import HttpFuzzer 13 | 14 | 15 | @task() 16 | class feroxbuster(HttpFuzzer): 17 | """Simple, fast, recursive content discovery tool written in Rust""" 18 | cmd = 'feroxbuster --auto-bail --no-state' 19 | input_types = [URL] 20 | output_types = [Url] 21 | tags = ['url', 'fuzz'] 22 | input_flag = '--url' 23 | input_chunk_size = 1 24 | file_flag = OPT_PIPE_INPUT 25 | json_flag = '--silent --json' 26 | opt_prefix = '--' 27 | opts = { 28 | # 'auto_tune': {'is_flag': True, 'default': False, 'help': 'Automatically lower scan rate when too many errors'}, 29 | 'extract_links': {'is_flag': True, 'default': False, 'help': 'Extract links from response body'}, 30 | 'collect_backups': {'is_flag': True, 'default': False, 'help': 'Request likely backup exts for urls'}, 31 | 'collect_extensions': {'is_flag': True, 'default': False, 'help': 'Discover exts and add to --extensions'}, 32 | 'collect_words': {'is_flag': True, 'default': False, 'help': 'Discover important words and add to wordlist'}, 33 | } 34 | opt_key_map = { 35 | HEADER: 'headers', 36 | DATA: 'data', 37 | DELAY: OPT_NOT_SUPPORTED, 38 | DEPTH: 'depth', 39 | FILTER_CODES: 'filter-status', 40 | FILTER_REGEX: 'filter-regex', 41 | FILTER_SIZE: 'filter-size', 42 | FILTER_WORDS: 'filter-words', 43 | FOLLOW_REDIRECT: 'redirects', 44 | MATCH_CODES: 'status-codes', 45 | MATCH_REGEX: OPT_NOT_SUPPORTED, 46 | MATCH_SIZE: OPT_NOT_SUPPORTED, 47 | MATCH_WORDS: OPT_NOT_SUPPORTED, 48 | METHOD: 'methods', 49 | PROXY: 'proxy', 50 | RATE_LIMIT: 'rate-limit', 51 | RETRIES: OPT_NOT_SUPPORTED, 52 | THREADS: 'threads', 53 | TIMEOUT: 'timeout', 54 | USER_AGENT: 'user-agent', 55 | WORDLIST: 'wordlist', 56 | 'request_headers': 'headers' 57 | } 58 | item_loaders = [JSONSerializer()] 59 | output_map = { 60 | Url: { 61 | STATUS_CODE: 'status', 62 | CONTENT_TYPE: lambda x: x['headers'].get('content-type'), 63 | LINES: 'line_count', 64 | WORDS: 'word_count' 65 | } 66 | } 67 | install_pre = { 68 | '*': ['curl', 'bash'] 69 | } 70 | install_version = 'v2.11.0' 71 | install_cmd = ( 72 | f'cd /tmp && curl -sL https://raw.githubusercontent.com/epi052/feroxbuster/master/install-nix.sh | bash -s {CONFIG.dirs.bin}' # noqa: E501 73 | ) 74 | install_github_handle = 'epi052/feroxbuster' 75 | proxychains = False 76 | proxy_socks5 = True 77 | proxy_http = True 78 | profile = 'io' 79 | 80 | @staticmethod 81 | def on_start(self): 82 | if self.inputs_path: 83 | self.cmd += ' --stdin' 84 | 85 | @staticmethod 86 | def validate_item(self, item): 87 | if isinstance(item, dict): 88 | return item['type'] == 'response' 89 | return True 90 | 91 | @staticmethod 92 | def on_item(self, item): 93 | item.request_headers = self.get_opt_value('header', preprocess=True) 94 | return item 95 | -------------------------------------------------------------------------------- /secator/tasks/fping.py: -------------------------------------------------------------------------------- 1 | import validators 2 | 3 | from secator.decorators import task 4 | from secator.definitions import (DELAY, IP, HOST, OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, 5 | RETRIES, THREADS, TIMEOUT) 6 | from secator.output_types import Ip 7 | from secator.tasks._categories import ReconIp 8 | 9 | 10 | @task() 11 | class fping(ReconIp): 12 | """Send ICMP echo probes to network hosts, similar to ping, but much better.""" 13 | cmd = 'fping -a -A' 14 | input_types = [IP, HOST] 15 | output_types = [Ip] 16 | tags = ['ip', 'recon'] 17 | file_flag = '-f' 18 | input_flag = None 19 | opts = { 20 | 'reverse_dns': {'is_flag': True, 'default': False, 'short': 'r', 'help': 'Reverse DNS lookup (slower)'} 21 | } 22 | opt_prefix = '--' 23 | opt_key_map = { 24 | DELAY: 'period', 25 | PROXY: OPT_NOT_SUPPORTED, 26 | RATE_LIMIT: OPT_NOT_SUPPORTED, 27 | RETRIES: 'retry', 28 | TIMEOUT: 'timeout', 29 | THREADS: OPT_NOT_SUPPORTED, 30 | 'reverse_dns': 'r' 31 | } 32 | opt_value_map = { 33 | DELAY: lambda x: x * 1000, # convert s to ms 34 | TIMEOUT: lambda x: x * 1000 # convert s to ms 35 | } 36 | install_github_handle = 'schweikert/fping' 37 | install_version = 'v5.1' 38 | install_pre = {'*': ['fping']} 39 | ignore_return_code = True 40 | 41 | @staticmethod 42 | def item_loader(self, line): 43 | if '(' in line: 44 | host, ip = tuple(t.strip() for t in line.rstrip(')').split('(')) 45 | if (validators.ipv4(host) or validators.ipv6(host)): 46 | host = '' 47 | else: 48 | ip = line.strip() 49 | host = '' 50 | if not (validators.ipv4(ip) or validators.ipv6(ip)): 51 | return 52 | yield {'ip': ip, 'alive': True, 'host': host} 53 | 54 | @staticmethod 55 | def on_line(self, line): 56 | if 'Unreachable' in line: 57 | return '' # discard line as it pollutes output 58 | return line 59 | -------------------------------------------------------------------------------- /secator/tasks/gau.py: -------------------------------------------------------------------------------- 1 | from secator.decorators import task 2 | from secator.definitions import (DELAY, DEPTH, FILTER_CODES, FILTER_REGEX, 3 | FILTER_SIZE, FILTER_WORDS, FOLLOW_REDIRECT, 4 | HEADER, MATCH_CODES, MATCH_REGEX, MATCH_SIZE, 5 | MATCH_WORDS, METHOD, OPT_NOT_SUPPORTED, 6 | OPT_PIPE_INPUT, PROXY, RATE_LIMIT, RETRIES, 7 | THREADS, TIMEOUT, USER_AGENT, URL) 8 | from secator.output_types.url import Url 9 | from secator.serializers import JSONSerializer 10 | from secator.tasks._categories import HttpCrawler 11 | 12 | 13 | @task() 14 | class gau(HttpCrawler): 15 | """Fetch known URLs from AlienVault's Open Threat Exchange, the Wayback Machine, Common Crawl, and URLScan.""" 16 | cmd = 'gau' 17 | input_types = [URL] 18 | output_types = [Url] 19 | tags = ['pattern', 'scan'] 20 | file_flag = OPT_PIPE_INPUT 21 | json_flag = '--json' 22 | opt_prefix = '--' 23 | opts = { 24 | 'providers': {'type': str, 'default': None, 'help': 'List of providers to use (wayback,commoncrawl,otx,urlscan)'} 25 | } 26 | opt_key_map = { 27 | HEADER: OPT_NOT_SUPPORTED, 28 | DELAY: OPT_NOT_SUPPORTED, 29 | DEPTH: OPT_NOT_SUPPORTED, 30 | FILTER_CODES: 'fc', 31 | FILTER_REGEX: OPT_NOT_SUPPORTED, 32 | FILTER_SIZE: OPT_NOT_SUPPORTED, 33 | FILTER_WORDS: OPT_NOT_SUPPORTED, 34 | MATCH_CODES: 'mc', 35 | MATCH_REGEX: OPT_NOT_SUPPORTED, 36 | MATCH_SIZE: OPT_NOT_SUPPORTED, 37 | MATCH_WORDS: OPT_NOT_SUPPORTED, 38 | FOLLOW_REDIRECT: OPT_NOT_SUPPORTED, 39 | METHOD: OPT_NOT_SUPPORTED, 40 | PROXY: 'proxy', 41 | RATE_LIMIT: OPT_NOT_SUPPORTED, 42 | RETRIES: 'retries', 43 | THREADS: 'threads', 44 | TIMEOUT: 'timeout', 45 | USER_AGENT: OPT_NOT_SUPPORTED, 46 | } 47 | item_loaders = [JSONSerializer()] 48 | install_pre = { 49 | 'apk': ['libc6-compat'] 50 | } 51 | install_version = 'v2.2.4' 52 | install_cmd = 'go install -v github.com/lc/gau/v2/cmd/gau@[install_version]' 53 | install_github_handle = 'lc/gau' 54 | proxychains = False 55 | proxy_socks5 = True 56 | proxy_http = True 57 | profile = 'io' 58 | -------------------------------------------------------------------------------- /secator/tasks/gf.py: -------------------------------------------------------------------------------- 1 | from secator.decorators import task 2 | from secator.definitions import OPT_PIPE_INPUT, OPT_NOT_SUPPORTED 3 | from secator.output_types import Tag 4 | from secator.tasks._categories import Tagger 5 | 6 | 7 | @task() 8 | class gf(Tagger): 9 | """Wrapper around grep, to help you grep for things.""" 10 | cmd = 'gf' 11 | input_types = None # anything 12 | output_types = [Tag] 13 | tags = ['pattern', 'scan'] 14 | file_flag = OPT_PIPE_INPUT 15 | input_flag = OPT_PIPE_INPUT 16 | version_flag = OPT_NOT_SUPPORTED 17 | opts = { 18 | 'pattern': {'type': str, 'help': 'Pattern names to match against (comma-delimited)', 'required': True} 19 | } 20 | opt_key_map = { 21 | 'pattern': '' 22 | } 23 | install_cmd = ( 24 | 'go install -v github.com/tomnomnom/gf@latest && ' 25 | 'git clone https://github.com/1ndianl33t/Gf-Patterns $HOME/.gf || true' 26 | ) 27 | 28 | @staticmethod 29 | def item_loader(self, line): 30 | yield {'match': line, 'name': self.get_opt_value('pattern').rstrip() + ' pattern'} # noqa: E731,E501 31 | 32 | @staticmethod 33 | def on_item(self, item): 34 | if isinstance(item, Tag): 35 | item.extra_data = {'source': 'url'} 36 | return item 37 | -------------------------------------------------------------------------------- /secator/tasks/gitleaks.py: -------------------------------------------------------------------------------- 1 | import click 2 | import os 3 | import yaml 4 | 5 | from secator.config import CONFIG 6 | from secator.decorators import task 7 | from secator.runners import Command 8 | from secator.definitions import (OUTPUT_PATH, PATH) 9 | from secator.utils import caml_to_snake 10 | from secator.output_types import Tag, Info, Error 11 | 12 | 13 | @task() 14 | class gitleaks(Command): 15 | """Tool for detecting secrets like passwords, API keys, and tokens in git repos, files, and stdin.""" 16 | cmd = 'gitleaks' 17 | tags = ['secret', 'scan'] 18 | input_types = [PATH] 19 | input_flag = None 20 | json_flag = '-f json' 21 | opt_prefix = '--' 22 | opts = { 23 | 'ignore_path': {'type': str, 'help': 'Path to .gitleaksignore file or folder containing one'}, 24 | 'mode': {'type': click.Choice(['git', 'dir']), 'default': 'dir', 'help': 'Gitleaks mode', 'internal': True, 'display': True}, # noqa: E501 25 | 'config': {'type': str, 'short': 'config', 'help': 'Gitleaks config file path'} 26 | } 27 | opt_key_map = { 28 | "ignore_path": "gitleaks-ignore-path" 29 | } 30 | input_type = "folder" 31 | output_types = [Tag] 32 | output_map = { 33 | Tag: { 34 | 'name': 'RuleID', 35 | 'match': lambda x: f'{x["File"]}:{x["StartLine"]}:{x["StartColumn"]}', 36 | 'extra_data': lambda x: {caml_to_snake(k): v for k, v in x.items() if k not in ['RuleID', 'File']} 37 | } 38 | } 39 | install_pre = {'*': ['git', 'make']} 40 | install_version = 'v8.24.3' 41 | install_cmd = ( 42 | f'git clone https://github.com/gitleaks/gitleaks.git {CONFIG.dirs.share}/gitleaks_[install_version] || true &&' 43 | f'cd {CONFIG.dirs.share}/gitleaks_[install_version] && make build &&' 44 | f'mv {CONFIG.dirs.share}/gitleaks_[install_version]/gitleaks {CONFIG.dirs.bin}' 45 | ) 46 | install_github_handle = 'gitleaks/gitleaks' 47 | 48 | @staticmethod 49 | def on_cmd(self): 50 | # replace fake -mode opt by subcommand 51 | mode = self.get_opt_value('mode') 52 | self.cmd = self.cmd.replace(f'{gitleaks.cmd} ', f'{gitleaks.cmd} {mode} ') 53 | 54 | # add output path 55 | output_path = self.get_opt_value(OUTPUT_PATH) 56 | if not output_path: 57 | output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json' 58 | self.output_path = output_path 59 | self.cmd += f' -r {self.output_path}' 60 | self.cmd += ' --exit-code 0' 61 | 62 | @staticmethod 63 | def on_cmd_done(self): 64 | if not os.path.exists(self.output_path): 65 | yield Error(message=f'Could not find JSON results in {self.output_path}') 66 | return 67 | 68 | yield Info(message=f'JSON results saved to {self.output_path}') 69 | with open(self.output_path, 'r') as f: 70 | results = yaml.safe_load(f.read()) 71 | for result in results: 72 | yield Tag( 73 | name=result['RuleID'], 74 | match='{File}:{StartLine}:{StartColumn}'.format(**result), 75 | extra_data={ 76 | caml_to_snake(k): v for k, v in result.items() 77 | if k not in ['RuleID', 'File'] 78 | } 79 | ) 80 | -------------------------------------------------------------------------------- /secator/tasks/gospider.py: -------------------------------------------------------------------------------- 1 | from furl import furl 2 | 3 | from secator.decorators import task 4 | from secator.definitions import (CONTENT_LENGTH, DELAY, DEPTH, FILTER_CODES, 5 | FILTER_REGEX, FILTER_SIZE, FILTER_WORDS, 6 | FOLLOW_REDIRECT, HEADER, MATCH_CODES, 7 | MATCH_REGEX, MATCH_SIZE, MATCH_WORDS, METHOD, 8 | OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES, 9 | STATUS_CODE, THREADS, TIMEOUT, URL, USER_AGENT) 10 | from secator.output_types import Url 11 | from secator.serializers import JSONSerializer 12 | from secator.tasks._categories import HttpCrawler 13 | 14 | 15 | @task() 16 | class gospider(HttpCrawler): 17 | """Fast web spider written in Go.""" 18 | cmd = 'gospider' 19 | input_types = [URL] 20 | output_types = [Url] 21 | tags = ['url', 'crawl'] 22 | file_flag = '-S' 23 | input_flag = '-s' 24 | json_flag = '--json' 25 | opt_prefix = '--' 26 | opt_key_map = { 27 | HEADER: 'header', 28 | DELAY: 'delay', 29 | DEPTH: 'depth', 30 | FILTER_CODES: OPT_NOT_SUPPORTED, 31 | FILTER_REGEX: OPT_NOT_SUPPORTED, 32 | FILTER_SIZE: OPT_NOT_SUPPORTED, 33 | FILTER_WORDS: OPT_NOT_SUPPORTED, 34 | FOLLOW_REDIRECT: 'no-redirect', 35 | MATCH_CODES: OPT_NOT_SUPPORTED, 36 | MATCH_REGEX: OPT_NOT_SUPPORTED, 37 | MATCH_SIZE: OPT_NOT_SUPPORTED, 38 | MATCH_WORDS: OPT_NOT_SUPPORTED, 39 | METHOD: OPT_NOT_SUPPORTED, 40 | PROXY: 'proxy', 41 | RATE_LIMIT: OPT_NOT_SUPPORTED, 42 | RETRIES: OPT_NOT_SUPPORTED, 43 | THREADS: 'threads', 44 | TIMEOUT: 'timeout', 45 | USER_AGENT: 'user-agent', 46 | } 47 | opt_value_map = { 48 | FOLLOW_REDIRECT: lambda x: not x, 49 | DELAY: lambda x: round(x) if isinstance(x, float) else x 50 | } 51 | item_loaders = [JSONSerializer()] 52 | output_map = { 53 | Url: { 54 | URL: 'output', 55 | STATUS_CODE: 'status', 56 | CONTENT_LENGTH: 'length', 57 | } 58 | } 59 | install_version = 'v1.1.6' 60 | install_cmd = 'go install -v github.com/jaeles-project/gospider@[install_version]' 61 | install_github_handle = 'jaeles-project/gospider' 62 | proxychains = False 63 | proxy_socks5 = True # with leaks... https://github.com/jaeles-project/gospider/issues/61 64 | proxy_http = True # with leaks... https://github.com/jaeles-project/gospider/issues/61 65 | profile = 'io' 66 | 67 | @staticmethod 68 | def validate_item(self, item): 69 | """Keep only items that match the same host.""" 70 | if not isinstance(item, dict): 71 | return False 72 | try: 73 | netloc_in = furl(item['input']).netloc 74 | netloc_out = furl(item['output']).netloc 75 | if netloc_in != netloc_out: 76 | return False 77 | except ValueError: # gospider returns invalid URLs for output sometimes 78 | return False 79 | return True 80 | 81 | @staticmethod 82 | def on_json_loaded(self, item): 83 | item['request_headers'] = self.get_opt_value('header', preprocess=True) 84 | yield item 85 | -------------------------------------------------------------------------------- /secator/tasks/grype.py: -------------------------------------------------------------------------------- 1 | from secator.config import CONFIG 2 | from secator.decorators import task 3 | from secator.definitions import (DELAY, FOLLOW_REDIRECT, HEADER, 4 | OPT_NOT_SUPPORTED, PROXY, RATE_LIMIT, RETRIES, 5 | THREADS, TIMEOUT, USER_AGENT, PATH, STRING, URL) 6 | from secator.output_types import Vulnerability 7 | from secator.tasks._categories import VulnCode 8 | 9 | 10 | @task() 11 | class grype(VulnCode): 12 | """Vulnerability scanner for container images and filesystems.""" 13 | cmd = 'grype --quiet' 14 | input_types = [PATH, URL, STRING] 15 | output_types = [Vulnerability] 16 | tags = ['vuln', 'scan'] 17 | input_flag = '' 18 | input_chunk_size = 1 19 | file_flag = None 20 | json_flag = None 21 | opt_prefix = '--' 22 | opt_key_map = { 23 | HEADER: OPT_NOT_SUPPORTED, 24 | DELAY: OPT_NOT_SUPPORTED, 25 | FOLLOW_REDIRECT: OPT_NOT_SUPPORTED, 26 | PROXY: OPT_NOT_SUPPORTED, 27 | RATE_LIMIT: OPT_NOT_SUPPORTED, 28 | RETRIES: OPT_NOT_SUPPORTED, 29 | THREADS: OPT_NOT_SUPPORTED, 30 | TIMEOUT: OPT_NOT_SUPPORTED, 31 | USER_AGENT: OPT_NOT_SUPPORTED 32 | } 33 | install_pre = { 34 | '*': ['curl'] 35 | } 36 | install_version = 'v0.91.2' 37 | install_cmd = ( 38 | f'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b {CONFIG.dirs.bin}' 39 | ) 40 | install_github_handle = 'anchore/grype' 41 | 42 | @staticmethod 43 | def item_loader(self, line): 44 | """Load vulnerabilty dicts from grype line output.""" 45 | split = [i for i in line.split(' ') if i] 46 | if len(split) not in [5, 6] or split[0] == 'NAME': 47 | return 48 | versions_fixed = None 49 | if len(split) == 5: # no version fixed 50 | product, version, product_type, vuln_id, severity = tuple(split) 51 | elif len(split) == 6: 52 | product, version, versions_fixed, product_type, vuln_id, severity = tuple(split) 53 | extra_data = { 54 | 'lang': product_type, 55 | 'product': product, 56 | 'version': version, 57 | } 58 | if versions_fixed: 59 | extra_data['versions_fixed'] = [c.strip() for c in versions_fixed.split(', ')] 60 | data = { 61 | 'id': vuln_id, 62 | 'name': vuln_id, 63 | 'matched_at': self.inputs[0], 64 | 'confidence': 'medium', 65 | 'severity': severity.lower(), 66 | 'provider': 'grype', 67 | 'cvss_score': -1, 68 | 'tags': [], 69 | } 70 | if vuln_id.startswith('GHSA'): 71 | data['provider'] = 'github.com' 72 | data['references'] = [f'https://github.com/advisories/{vuln_id}'] 73 | vuln = VulnCode.lookup_cve_from_ghsa(vuln_id) 74 | if vuln: 75 | data.update(vuln) 76 | data['severity'] = data['severity'] or severity.lower() 77 | extra_data['ghsa_id'] = vuln_id 78 | elif vuln_id.startswith('CVE'): 79 | vuln = VulnCode.lookup_cve(vuln_id) 80 | if vuln: 81 | data.update(vuln) 82 | data['severity'] = data['severity'] or severity.lower() 83 | data['extra_data'] = extra_data 84 | yield data 85 | -------------------------------------------------------------------------------- /secator/tasks/h8mail.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from secator.decorators import task 5 | from secator.definitions import EMAIL, OUTPUT_PATH 6 | from secator.tasks._categories import OSInt 7 | from secator.output_types import UserAccount, Info, Error 8 | 9 | 10 | @task() 11 | class h8mail(OSInt): 12 | """Email information and password lookup tool.""" 13 | cmd = 'h8mail' 14 | input_types = [EMAIL] 15 | output_types = [UserAccount] 16 | tags = ['user', 'recon', 'email'] 17 | json_flag = '--json ' 18 | input_flag = '--targets' 19 | file_flag = '-domain' 20 | version_flag = '--help' 21 | opt_prefix = '--' 22 | opts = { 23 | 'config': {'type': str, 'help': 'Configuration file for API keys'}, 24 | 'local_breach': {'type': str, 'short': 'lb', 'help': 'Local breach file'} 25 | } 26 | install_version = '2.5.6' 27 | install_cmd = 'pipx install h8mail==[install_version] --force' 28 | 29 | @staticmethod 30 | def on_start(self): 31 | output_path = self.get_opt_value(OUTPUT_PATH) 32 | if not output_path: 33 | output_path = f'{self.reports_folder}/.outputs/{self.unique_name}.json' 34 | self.output_path = output_path 35 | self.cmd = self.cmd.replace('--json', f'--json {self.output_path}') 36 | 37 | @staticmethod 38 | def on_cmd_done(self): 39 | if not os.path.exists(self.output_path): 40 | yield Error(message=f'Could not find JSON results in {self.output_path}') 41 | return 42 | 43 | yield Info(message=f'JSON results saved to {self.output_path}') 44 | with open(self.output_path, 'r') as f: 45 | data = json.load(f) 46 | 47 | targets = data['targets'] 48 | for target in targets: 49 | email = target['target'] 50 | target_data = target.get('data', []) 51 | pwn_num = target['pwn_num'] 52 | if not pwn_num > 0: 53 | continue 54 | if len(target_data) > 0: 55 | entries = target_data[0] 56 | for entry in entries: 57 | source, site_name = tuple(entry.split(':')) 58 | yield UserAccount(**{ 59 | "site_name": site_name, 60 | "username": email.split('@')[0], 61 | "email": email, 62 | "extra_data": { 63 | 'source': source 64 | }, 65 | }) 66 | else: 67 | yield UserAccount(**{ 68 | "username": email.split('@')[0], 69 | "email": email, 70 | "extra_data": { 71 | 'source': self.get_opt_value('local_breach') 72 | }, 73 | }) 74 | -------------------------------------------------------------------------------- /secator/tasks/maigret.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import re 5 | 6 | from secator.decorators import task 7 | from secator.definitions import (DELAY, EXTRA_DATA, OPT_NOT_SUPPORTED, OUTPUT_PATH, PROXY, 8 | RATE_LIMIT, RETRIES, SITE_NAME, THREADS, 9 | TIMEOUT, URL, STRING, SLUG) 10 | from secator.output_types import UserAccount, Info, Error 11 | from secator.tasks._categories import ReconUser 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | @task() 17 | class maigret(ReconUser): 18 | """Collect a dossier on a person by username.""" 19 | cmd = 'maigret' 20 | input_types = [SLUG, STRING] 21 | output_types = [UserAccount] 22 | tags = ['user', 'recon', 'username'] 23 | file_flag = None 24 | input_flag = None 25 | json_flag = '--json ndjson' 26 | opt_prefix = '--' 27 | opts = { 28 | 'site': {'type': str, 'help': 'Sites to check'}, 29 | } 30 | opt_key_map = { 31 | DELAY: OPT_NOT_SUPPORTED, 32 | PROXY: 'proxy', 33 | RATE_LIMIT: OPT_NOT_SUPPORTED, 34 | RETRIES: 'retries', 35 | TIMEOUT: 'timeout', 36 | THREADS: OPT_NOT_SUPPORTED 37 | } 38 | output_map = { 39 | UserAccount: { 40 | SITE_NAME: 'sitename', 41 | URL: lambda x: x['status']['url'], 42 | EXTRA_DATA: lambda x: x['status'].get('ids', {}) 43 | } 44 | } 45 | install_version = '0.5.0a' 46 | install_cmd = 'pipx install git+https://github.com/soxoj/maigret --force' 47 | socks5_proxy = True 48 | profile = 'io' 49 | 50 | @staticmethod 51 | def on_init(self): 52 | self.output_path = self.get_opt_value(OUTPUT_PATH) 53 | 54 | @staticmethod 55 | def on_cmd_done(self): 56 | # Search output path in cmd output 57 | if not self.output_path: 58 | matches = re.findall('JSON ndjson report for .* saved in (.*)', self.output) 59 | if not matches: 60 | yield Error(message='JSON output file not found in command output.') 61 | return 62 | self.output_path = matches 63 | 64 | if not isinstance(self.output_path, list): 65 | self.output_path = [self.output_path] 66 | 67 | for path in self.output_path: 68 | if not os.path.exists(path): 69 | yield Error(message=f'Could not find JSON results in {path}') 70 | return 71 | 72 | yield Info(message=f'JSON results saved to {path}') 73 | with open(path, 'r') as f: 74 | data = [json.loads(line) for line in f.read().splitlines()] 75 | for item in data: 76 | yield item 77 | 78 | @staticmethod 79 | def validate_item(self, item): 80 | if isinstance(item, dict): 81 | return item['http_status'] == 200 82 | return True 83 | -------------------------------------------------------------------------------- /secator/tasks/mapcidr.py: -------------------------------------------------------------------------------- 1 | import validators 2 | 3 | from secator.decorators import task 4 | from secator.definitions import (CIDR_RANGE, IP, OPT_NOT_SUPPORTED, PROXY, 5 | RATE_LIMIT, RETRIES, THREADS, TIMEOUT) 6 | from secator.output_types import Ip 7 | from secator.tasks._categories import ReconIp 8 | 9 | 10 | @task() 11 | class mapcidr(ReconIp): 12 | """Utility program to perform multiple operations for a given subnet/cidr ranges.""" 13 | cmd = 'mapcidr' 14 | input_types = [CIDR_RANGE, IP] 15 | output_types = [Ip] 16 | tags = ['ip', 'recon'] 17 | input_flag = '-cidr' 18 | file_flag = '-cl' 19 | install_pre = { 20 | 'apk': ['libc6-compat'] 21 | } 22 | install_version = 'v1.1.34' 23 | install_cmd = 'go install -v github.com/projectdiscovery/mapcidr/cmd/mapcidr@[install_version]' 24 | install_github_handle = 'projectdiscovery/mapcidr' 25 | opt_key_map = { 26 | THREADS: OPT_NOT_SUPPORTED, 27 | PROXY: OPT_NOT_SUPPORTED, 28 | RATE_LIMIT: OPT_NOT_SUPPORTED, 29 | RETRIES: OPT_NOT_SUPPORTED, 30 | TIMEOUT: OPT_NOT_SUPPORTED, 31 | } 32 | 33 | @staticmethod 34 | def item_loader(self, line): 35 | if validators.ipv4(line) or validators.ipv6(line): 36 | yield {'ip': line, 'alive': False} 37 | return 38 | -------------------------------------------------------------------------------- /secator/tasks/naabu.py: -------------------------------------------------------------------------------- 1 | from secator.decorators import task 2 | from secator.definitions import (DELAY, HOST, IP, OPT_NOT_SUPPORTED, PORT, PORTS, 3 | PROXY, RATE_LIMIT, RETRIES, STATE, THREADS, 4 | TIMEOUT, TOP_PORTS) 5 | from secator.output_types import Port 6 | from secator.serializers import JSONSerializer 7 | from secator.tasks._categories import ReconPort 8 | 9 | 10 | @task() 11 | class naabu(ReconPort): 12 | """Port scanning tool written in Go.""" 13 | cmd = 'naabu' 14 | input_types = [HOST, IP] 15 | output_types = [Port] 16 | tags = ['port', 'scan'] 17 | input_flag = '-host' 18 | file_flag = '-list' 19 | json_flag = '-json' 20 | opts = { 21 | PORTS: {'type': str, 'short': 'p', 'help': 'Ports'}, 22 | TOP_PORTS: {'type': str, 'short': 'tp', 'help': 'Top ports'}, 23 | 'scan_type': {'type': str, 'short': 'st', 'help': 'Scan type (SYN (s)/CONNECT(c))'}, 24 | 'skip_host_discovery': {'is_flag': True, 'short': 'Pn', 'default': False, 'help': 'Skip host discovery'}, 25 | # 'health_check': {'is_flag': True, 'short': 'hc', 'help': 'Health check'} 26 | } 27 | opt_key_map = { 28 | DELAY: OPT_NOT_SUPPORTED, 29 | PROXY: 'proxy', 30 | RATE_LIMIT: 'rate', 31 | RETRIES: 'retries', 32 | TIMEOUT: 'timeout', 33 | THREADS: 'c', 34 | 35 | # naabu opts 36 | PORTS: 'port', 37 | 'scan_type': 's', 38 | # 'health_check': 'hc' 39 | } 40 | opt_value_map = { 41 | TIMEOUT: lambda x: int(x*1000) if x and x > 0 else None, # convert to milliseconds 42 | RETRIES: lambda x: 1 if x == 0 else x, 43 | PROXY: lambda x: x.replace('socks5://', '') 44 | } 45 | item_loaders = [JSONSerializer()] 46 | output_map = { 47 | Port: { 48 | PORT: lambda x: x['port'], 49 | HOST: lambda x: x['host'] if 'host' in x else x['ip'], 50 | STATE: lambda x: 'open' 51 | } 52 | } 53 | install_version = 'v2.3.3' 54 | install_cmd = 'go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@[install_version]' 55 | install_github_handle = 'projectdiscovery/naabu' 56 | install_pre = {'apt': ['libpcap-dev'], 'apk': ['libpcap-dev', 'libc6-compat'], 'pacman|brew': ['libpcap']} 57 | install_post = {'arch|alpine': 'sudo ln -sf /usr/lib/libpcap.so /usr/lib/libpcap.so.0.8'} 58 | proxychains = False 59 | proxy_socks5 = True 60 | proxy_http = False 61 | profile = 'io' 62 | 63 | @staticmethod 64 | def before_init(self): 65 | for ix, input in enumerate(self.inputs): 66 | if input == 'localhost': 67 | self.inputs[ix] = '127.0.0.1' 68 | 69 | @staticmethod 70 | def on_cmd(self): 71 | scan_type = self.get_opt_value('scan_type') 72 | if scan_type == 's': 73 | self.requires_sudo = True 74 | 75 | @staticmethod 76 | def on_item(self, item): 77 | if isinstance(item, Port): 78 | if item.host == '127.0.0.1': 79 | item.host = 'localhost' 80 | return item 81 | -------------------------------------------------------------------------------- /secator/tasks/subfinder.py: -------------------------------------------------------------------------------- 1 | from secator.decorators import task 2 | from secator.definitions import (DELAY, DOMAIN, HOST, OPT_NOT_SUPPORTED, PROXY, 3 | RATE_LIMIT, RETRIES, THREADS, TIMEOUT) 4 | from secator.output_types import Subdomain 5 | from secator.serializers import JSONSerializer 6 | from secator.tasks._categories import ReconDns 7 | 8 | 9 | @task() 10 | class subfinder(ReconDns): 11 | """Fast passive subdomain enumeration tool.""" 12 | cmd = 'subfinder -cs' 13 | input_types = [HOST] 14 | output_types = [Subdomain] 15 | tags = ['dns', 'recon'] 16 | file_flag = '-dL' 17 | input_flag = '-d' 18 | json_flag = '-json' 19 | opt_key_map = { 20 | DELAY: OPT_NOT_SUPPORTED, 21 | PROXY: 'proxy', 22 | RATE_LIMIT: 'rate-limit', 23 | RETRIES: OPT_NOT_SUPPORTED, 24 | TIMEOUT: 'timeout', 25 | THREADS: 't' 26 | } 27 | opt_value_map = { 28 | PROXY: lambda x: x.replace('http://', '').replace('https://', '') if x else None 29 | } 30 | item_loaders = [JSONSerializer()] 31 | output_map = { 32 | Subdomain: { 33 | DOMAIN: 'input', 34 | } 35 | } 36 | install_version = 'v2.7.0' 37 | install_cmd = 'go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@[install_version]' 38 | install_github_handle = 'projectdiscovery/subfinder' 39 | proxychains = False 40 | proxy_http = True 41 | proxy_socks5 = False 42 | profile = 'io' 43 | 44 | @staticmethod 45 | def validate_item(self, item): 46 | if isinstance(item, dict): 47 | return item['input'] != 'localhost' 48 | return True 49 | -------------------------------------------------------------------------------- /secator/thread.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | from secator.output_types import Error 4 | 5 | 6 | class Thread(threading.Thread): 7 | """A thread that returns errors in their join() method as secator.output_types.Error.""" 8 | 9 | def __init__(self, *args, **kwargs): 10 | super().__init__(*args, **kwargs) 11 | self.error = None 12 | 13 | def run(self): 14 | try: 15 | if hasattr(self, '_target'): 16 | self._target(*self._args, **self._kwargs) 17 | except Exception as e: 18 | self.error = Error.from_exception(e) 19 | 20 | def join(self, *args, **kwargs): 21 | super().join(*args, **kwargs) 22 | if self.error: 23 | return self.error 24 | return None 25 | -------------------------------------------------------------------------------- /secator/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | from secator.loader import get_configs_by_type 2 | from secator.runners import Workflow 3 | 4 | 5 | class DynamicWorkflow(Workflow): 6 | def __init__(self, config): 7 | self.config = config 8 | 9 | def __call__(self, targets, **kwargs): 10 | hooks = kwargs.pop('hooks', {}) 11 | results = kwargs.pop('results', []) 12 | context = kwargs.pop('context', {}) 13 | super().__init__( 14 | config=self.config, 15 | inputs=targets, 16 | results=results, 17 | hooks=hooks, 18 | context=context, 19 | run_opts=kwargs) 20 | return self 21 | 22 | 23 | DYNAMIC_WORKFLOWS = {} 24 | for workflow in get_configs_by_type('workflow'): 25 | instance = DynamicWorkflow(workflow) 26 | DYNAMIC_WORKFLOWS[workflow.name] = instance 27 | 28 | globals().update(DYNAMIC_WORKFLOWS) 29 | __all__ = list(DYNAMIC_WORKFLOWS) 30 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/tests/__init__.py -------------------------------------------------------------------------------- /tests/fixtures/arjun_output.json: -------------------------------------------------------------------------------- 1 | { 2 | "http://testphp.vulnweb.com/hpp": { 3 | "headers": { 4 | "User-Agent": "Mozilla/5.0 (Windows NT 5.1; rv:7.0.1) Gecko/20100101 Firefox/7.0.1" 5 | }, 6 | "method": "GET", 7 | "params": [ 8 | "pp" 9 | ] 10 | } 11 | } -------------------------------------------------------------------------------- /tests/fixtures/bup_output.json: -------------------------------------------------------------------------------- 1 | { 2 | "request_curl_cmd": "/usr/bin/curl -sS -kgi -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.41 Safari/537.36' --path-as-is -X TRACE https://example.com", 3 | "request_curl_payload": "-X TRACE https://example.com", 4 | "response_headers": "HTTP/1.1 200 OK\nDate: Fri, 03 May 2024 14:24:50 GMT\nServer: Apache/2.4.56 (Debian) mod_fcgid/2.3.9 OpenSSL/1.1.1w\nConnection: close\nTransfer-Encoding: chunked\nContent-Type: message/http", 5 | "response_data": "TRACE / HTTP/1.1\nHost: example.com\nAccept: */*\nUser-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.41 Safari/537.36", 6 | "response_status_code": 200, 7 | "response_content_type": "message/http", 8 | "response_content_length": "", 9 | "response_lines_count": 3, 10 | "response_words_count": 14, 11 | "response_title": "", 12 | "response_server_type": "Apache/2.4.56 (Debian) mod_fcgid/2.3.9 OpenSSL/1.1.1w", 13 | "response_redirect_url": "", 14 | "response_html_filename": "bypass-81890dda67871d1d84165e46c6ab89fc.html", 15 | "request_url": "https://example.com" 16 | } -------------------------------------------------------------------------------- /tests/fixtures/dalfox_output.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "V", 3 | "inject_type": "inHTML-none(1)-URL", 4 | "poc_type": "plain", 5 | "method": "GET", 6 | "data": "https://xss-game.appspot.com/level1/frame?query=%3Cdiv+contextmenu%3Dxss%3E%3Cp%3E1%3Cmenu+type%3Dcontext+class%3Ddalfox+id%3Dxss+onshow%3Dprint%281%29%3E%3C%2Fmenu%3E%3C%2Fdiv%3E", 7 | "param": "query", 8 | "payload": "

1

", 9 | "evidence": "13 line: s were found for

1

&1 > /dev/null; then 6 | echo "Error: $BIN_NAME not found, trying docker compose" 7 | BIN_NAME="docker compose" 8 | fi 9 | if ! command -v $BIN_NAME 2>&1 > /dev/null; then 10 | echo "Error: $BIN_NAME not found" 11 | exit 1 12 | fi 13 | 14 | if [ "$TEST_NO_CLEANUP" != "1" ]; then 15 | $BIN_NAME pull 16 | $BIN_NAME up -d 17 | else 18 | $BIN_NAME up -d --wait --no-recreate 19 | fi 20 | -------------------------------------------------------------------------------- /tests/integration/teardown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tearing down Juice shop ..." 4 | BIN_NAME="docker compose" 5 | if ! command -v $BIN_NAME 2>&1 > /dev/null; then 6 | echo "Error: $BIN_NAME not found, trying docker-compose" 7 | BIN_NAME="docker-compose" 8 | fi 9 | if ! command -v $BIN_NAME 2>&1 > /dev/null; then 10 | echo "Error: $BIN_NAME not found" 11 | exit 1 12 | fi 13 | 14 | if [ "$TEST_NO_CLEANUP" = "1" ]; then 15 | echo "Aborting cleanup since TEST_NO_CLEANUP is set" 16 | exit 0 17 | fi 18 | $BIN_NAME down -v 19 | -------------------------------------------------------------------------------- /tests/integration/test_addons.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from unittest import mock 4 | from secator.utils_test import clear_modules 5 | from secator.config import CONFIG 6 | 7 | 8 | class TestAddonMongo(unittest.TestCase): 9 | 10 | @classmethod 11 | @mock.patch.dict(os.environ, {"SECATOR_ADDONS_MONGODB_URL": "mongodb://localhost"}) 12 | def setUpClass(cls): 13 | clear_modules() 14 | from secator.config import CONFIG 15 | print(CONFIG.addons.mongodb.url) 16 | raise Exception('test') 17 | 18 | @classmethod 19 | def tearDownClass(cls): 20 | pass 21 | 22 | def test_ok(self): 23 | print(CONFIG.addons.mongodb.url) -------------------------------------------------------------------------------- /tests/integration/test_scans.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import unittest 4 | import warnings 5 | from time import sleep 6 | 7 | from secator.definitions import DEBUG 8 | from secator.runners import Command, Scan 9 | from secator.utils import setup_logging 10 | from secator.utils_test import TEST_SCANS, CommandOutputTester, load_fixture 11 | from tests.integration.inputs import INPUTS_SCANS 12 | from tests.integration.outputs import OUTPUTS_SCANS 13 | 14 | INTEGRATION_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | level = logging.DEBUG if DEBUG == ["1"] else logging.INFO 16 | setup_logging(level) 17 | 18 | 19 | class TestScans(unittest.TestCase, CommandOutputTester): 20 | 21 | def setUp(self): 22 | warnings.simplefilter('ignore', category=ResourceWarning) 23 | warnings.simplefilter('ignore', category=DeprecationWarning) 24 | Command.execute( 25 | f'sh {INTEGRATION_DIR}/setup.sh', 26 | quiet=True, 27 | cwd=INTEGRATION_DIR 28 | ) 29 | sleep(15) 30 | 31 | def tearDown(self): 32 | Command.execute( 33 | f'sh {INTEGRATION_DIR}/teardown.sh', 34 | quiet=True, 35 | cwd=INTEGRATION_DIR 36 | ) 37 | 38 | def test_scans(self): 39 | opts = { 40 | 'filter_size': 1987, 41 | 'follow_redirect': True, 42 | 'match_codes': '200', 43 | 'httpx.match_codes': False, 44 | 'httpx.filter_size': False, 45 | 'nuclei.retries': 5, 46 | 'nuclei.timeout': 15, 47 | 'rate_limit': 1000, 48 | 'wordlist': load_fixture('wordlist', INTEGRATION_DIR, only_path=True), 49 | 'timeout': 7, 50 | 'depth': 2 51 | } 52 | 53 | for conf in TEST_SCANS: 54 | with self.subTest(name=conf.name): 55 | inputs = INPUTS_SCANS.get(conf.name, []) 56 | outputs = OUTPUTS_SCANS.get(conf.name, []) 57 | scan = Scan(conf, inputs=inputs, run_opts=opts) 58 | self._test_runner_output( 59 | scan, 60 | expected_results=outputs) -------------------------------------------------------------------------------- /tests/integration/test_tasks_categories.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | 4 | from pathlib import Path 5 | from unittest import mock 6 | 7 | from secator.utils_test import load_fixture, FIXTURES_DIR 8 | from secator.tasks._categories import Vuln 9 | from secator.config import CONFIG 10 | 11 | 12 | class TestCveHelpers(unittest.TestCase): 13 | 14 | @mock.patch('secator.config.CONFIG.runners.skip_cve_search', False) 15 | def test_lookup_cve_circle(self): 16 | fixture = json.dumps(load_fixture('cve_circle_output', FIXTURES_DIR), sort_keys=True) 17 | cve_path = f'{CONFIG.dirs.data}/cves/CVE-2023-5568.json' 18 | if Path(cve_path).exists(): 19 | Path(cve_path).unlink() # make sure we don't use cache data 20 | actual = Vuln.lookup_cve_from_cve_circle('CVE-2023-5568') 21 | actual = json.dumps(actual, sort_keys=True) 22 | self.assertEqual(actual, fixture) 23 | 24 | @mock.patch('secator.config.CONFIG.runners.skip_cve_search', False) 25 | def test_lookup_cve_from_ghsa_no_cve_id(self): 26 | actual = Vuln.lookup_cve_from_ghsa('GHSA-ggpf-24jw-3fcw') 27 | self.assertIsNone(actual) 28 | 29 | @mock.patch('secator.config.CONFIG.runners.skip_cve_search', False) 30 | def test_lookup_cve_from_ghsa(self): 31 | actual = Vuln.lookup_cve_from_ghsa('GHSA-w596-4wvx-j9j6') 32 | self.assertIsNotNone(actual) 33 | self.assertEqual(actual['id'], 'CVE-2022-42969') 34 | 35 | @mock.patch('secator.config.CONFIG.runners.skip_cve_search', False) 36 | def test_lookup_cve(self): 37 | fixture = load_fixture('cve_circle_output', FIXTURES_DIR) 38 | cve_path = f'{CONFIG.dirs.data}/cves/CVE-2023-5568.json' 39 | with open(cve_path, 'w') as f: 40 | f.write(json.dumps(fixture, indent=2)) 41 | vuln = Vuln.lookup_cve('CVE-2023-5568', 'cpe:/o:redhat:enterprise_linux:9') 42 | vuln2 = Vuln.lookup_cve('CVE-2023-5568', 'cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*') 43 | self.assertIn('cpe-match', vuln['tags']) 44 | self.assertIn('cpe-match', vuln2['tags']) 45 | -------------------------------------------------------------------------------- /tests/integration/test_worker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from secator.output_types import Url, Target, Port, Vulnerability, Info, Warning, Error 3 | from secator.runners import Command 4 | from secator.serializers import JSONSerializer 5 | from time import sleep 6 | from threading import Thread 7 | import queue 8 | 9 | class TestWorker(unittest.TestCase): 10 | 11 | @classmethod 12 | def setUpClass(cls): 13 | cls.queue = queue.Queue() 14 | cls.cmd = Command.execute('secator worker --use-command-runner', name='secator_worker', quiet=True, run=False) 15 | cls.thread = Thread(target=cls.cmd.run) 16 | cls.thread.start() 17 | sleep(3) 18 | 19 | @classmethod 20 | def tearDownClass(cls) -> None: 21 | cls.cmd.stop_process() 22 | cls.thread.join() 23 | 24 | def test_httpx(self): 25 | cmd = Command.execute( 26 | 'secator x httpx testphp.vulnweb.com -json', 27 | name='secator_x_httpx', 28 | process=True, 29 | quiet=True, 30 | cls_attributes={'output_types': [Target, Url, Info], 'item_loaders': [JSONSerializer()]} 31 | ) 32 | # self.assertEqual(cmd.return_code, 0) # TODO: figure out why return code is -9 when running from unittest 33 | self.assertEqual(len(cmd.findings), 1) 34 | url = Url( 35 | 'http://testphp.vulnweb.com', 36 | status_code=200, 37 | title='Home of Acunetix Art', 38 | webserver='nginx', 39 | tech=['DreamWeaver', 'Nginx:1.19.0', 'PHP:5.6.40', 'Ubuntu'], 40 | content_type='text/html', 41 | content_length=4958, 42 | _source='httpx' 43 | ) 44 | self.assertIn(url, cmd.findings) 45 | 46 | def test_host_recon(self): 47 | cmd = Command.execute( 48 | 'secator w host_recon vulnweb.com -json -p 80 -tid nginx-version --nuclei', 49 | name='secator_w_host_recon', 50 | process=True, 51 | quiet=True, 52 | cls_attributes={'output_types': [Target, Url, Port, Vulnerability, Info, Warning, Error], 'item_loaders': [JSONSerializer()]} 53 | ) 54 | # self.assertEqual(cmd.return_code, 0) # TODO: ditto 55 | self.assertGreater(len(cmd.results), 0) 56 | port = Port( 57 | port=80, 58 | ip="44.228.249.3", 59 | state="open", 60 | service_name="nginx/1.19.0", 61 | _source="nmap" 62 | ) 63 | url = Url( 64 | 'http://vulnweb.com', 65 | status_code=200, 66 | title='Acunetix Web Vulnerability Scanner - Test Websites', 67 | webserver='nginx/1.19.0', 68 | tech=['Nginx:1.19.0'], 69 | content_type='text/html', 70 | content_length=4018, 71 | _source='httpx' 72 | ) 73 | vuln = Vulnerability( 74 | name='nginx-version', 75 | provider='', 76 | id='', 77 | matched_at='http://vulnweb.com', 78 | confidence='high', 79 | confidence_nb=4, 80 | severity_nb=4, 81 | severity='info', 82 | tags=['tech', 'nginx'], 83 | _source='nuclei_url' 84 | ) 85 | self.assertIn(port, cmd.findings) 86 | self.assertIn(url, cmd.findings) 87 | self.assertIn(vuln, cmd.findings) 88 | 89 | # def test_pd_pipe(self): 90 | # cmd = Command.execute( 91 | # 'secator x subfinder vulnweb.com | secator x nmap | secator x httpx | secator x katana | secator x httpx | secator x gf --pattern lfi -fmt "{match}" | secator x dalfox' 92 | # ) 93 | -------------------------------------------------------------------------------- /tests/integration/wordlist.txt: -------------------------------------------------------------------------------- 1 | .well-known/security.txt 2 | api-docs/ 3 | assets/public/favicon_js.ico 4 | ftp 5 | ftp/ 6 | main.js 7 | polyfills.js 8 | robots.txt 9 | runtime.js 10 | sitemap.xml 11 | snippets 12 | styles.css 13 | video 14 | video/ 15 | error -------------------------------------------------------------------------------- /tests/integration/wordlist_dns.txt: -------------------------------------------------------------------------------- 1 | be 2 | commons 3 | de 4 | testunexistent 5 | -------------------------------------------------------------------------------- /tests/integration/wordpress_toolbox/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM php:7.3-cli-alpine 2 | 3 | RUN apk add --no-cache curl 4 | RUN apk add --no-cache make 5 | 6 | # Install WP-CLI in the toolbox 7 | RUN curl -O https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar 8 | RUN chmod +x wp-cli.phar 9 | RUN mv wp-cli.phar /usr/local/bin/wp-cli 10 | 11 | # Install MySQL extension, as WP-CLI needs to access to WordPress database 12 | RUN docker-php-ext-install mysqli 13 | 14 | # Add Makefile to scripts dir 15 | ADD Makefile /scripts/Makefile 16 | 17 | ENTRYPOINT [ "make", "-f", "/scripts/Makefile" ] -------------------------------------------------------------------------------- /tests/integration/wordpress_toolbox/Makefile: -------------------------------------------------------------------------------- 1 | WP_CLI=wp-cli --allow-root 2 | 3 | install: configure 4 | 5 | configure: 6 | sleep 15 7 | @echo "⚙️ Configuring Wordpress database..." 8 | @rm -f wp-config.php 9 | $(WP_CLI) core config \ 10 | --dbhost=${WORDPRESS_DB_HOST} \ 11 | --dbname=${WORDPRESS_DB_NAME} \ 12 | --dbuser=${WORDPRESS_DB_USER} \ 13 | --dbpass=${WORDPRESS_DB_PASSWORD} \ 14 | --path=${WORDPRESS_INSTALL_PATH} \ 15 | --locale=${WORDPRESS_LOCALE} \ 16 | --skip-check 17 | 18 | @echo "⚙️Configuring Wordpress parameters..." 19 | $(WP_CLI) core install \ 20 | --url=${WORDPRESS_WEBSITE_URL_WITHOUT_HTTP} \ 21 | --title="$(WORDPRESS_WEBSITE_TITLE)" \ 22 | --admin_user=${WORDPRESS_ADMIN_USER} \ 23 | --admin_password=${WORDPRESS_ADMIN_PASSWORD} \ 24 | --admin_email=${WORDPRESS_ADMIN_EMAIL} 25 | 26 | $(WP_CLI) option update siteurl "${WORDPRESS_WEBSITE_URL}" 27 | $(WP_CLI) rewrite structure $(WORDPRESS_WEBSITE_POST_URL_STRUCTURE) -------------------------------------------------------------------------------- /tests/performance/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/tests/performance/__init__.py -------------------------------------------------------------------------------- /tests/performance/loadtester.py: -------------------------------------------------------------------------------- 1 | import eventlet 2 | eventlet.monkey_patch() 3 | from secator.runners import Workflow, Task, Scan # noqa: E402 4 | from secator.template import TemplateLoader # noqa: E402 5 | from secator.celery import * # noqa: E402,F403 6 | import click # noqa: E402 7 | import json # noqa: E402 8 | from time import sleep, time # noqa: E402 9 | 10 | pool = eventlet.GreenPool(100) 11 | 12 | from kombu.serialization import register # noqa: E402 13 | 14 | def create_runner(runner_type, targets, index, total): 15 | register('json', json.dumps, json.loads, content_type='application/json', content_encoding='utf-8') 16 | run_opts = { 17 | 'print_item': True, 18 | 'print_line': True, 19 | 'print_cmd': True 20 | } 21 | runner = None 22 | if runner_type == 'workflow': 23 | runner = Workflow 24 | config = TemplateLoader(name='workflow/subdomain_recon') 25 | elif runner_type == 'task': 26 | runner = Task 27 | config = TemplateLoader(input={'name': 'httpx', 'type': 'task'}) 28 | elif runner_type == 'scan': 29 | runner = Scan 30 | result = runner.delay(config, targets, run_opts=run_opts) 31 | while not result.ready(): 32 | # print(f'Running {runner_type} {index}/{total} ..') 33 | sleep(1) 34 | print(f'Task {index} / {total} finished running.') 35 | result.get() 36 | 37 | def start_worker(): 38 | from secator.runners import Command 39 | from threading import Thread 40 | cmd = 'pyinstrument -r html -o /tmp/test.html --from-path secator worker' 41 | process = Command.execute(cmd, run=False) 42 | thread = Thread(target=process.run) 43 | return thread, process 44 | 45 | @click.command() 46 | @click.option('--count', type=int, default=1) 47 | @click.option('--runner', type=str, default='task') 48 | @click.option('--targets', type=str, default='http://testphp.vulnweb.com') 49 | def cli(count, runner, targets): 50 | """Secator CLI.""" 51 | try: 52 | targets = [c.strip() for c in targets.split(',')] 53 | print(f'Load tester initialized with {len(targets)} targets.') 54 | print(f'Targets: {targets}') 55 | thread, process = start_worker() 56 | print('Starting worker ...') 57 | thread.start() 58 | sleep(3) 59 | print(f'Starting {count} {runner}s ...') 60 | start_time = time() 61 | for i in range(count): 62 | print(f'Starting {runner} {i + 1}/{count} ..') 63 | pool.spawn(create_runner, runner, targets, i + 1, count) 64 | pool.waitall() 65 | elapsed_time = time() - start_time 66 | print(f'All {runner}s completed in {elapsed_time:.2f}s.') 67 | except BaseException: 68 | pass 69 | finally: 70 | import signal 71 | process.send_signal(signal.SIGINT) 72 | thread.join() 73 | 74 | if __name__ == '__main__': 75 | cli() 76 | -------------------------------------------------------------------------------- /tests/performance/test_worker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | # from secator.runners import Command 3 | # from secator.config import CONFIG 4 | # from time import sleep 5 | # from threading import Thread 6 | # from secator.celery import is_celery_worker_alive 7 | 8 | class TestWorker(unittest.TestCase): 9 | 10 | # @classmethod 11 | # def setUpClass(cls): 12 | # profiles_folder = CONFIG.dirs.performance 13 | # cls.test_file = f'{profiles_folder}/test.html' 14 | # cmd = f'pyinstrument -r html --from-path secator worker' 15 | # print('Running worker with pyinstrument...') 16 | # print(cmd) 17 | # cls.cmd = Command.execute(cmd, quiet=True, print_cmd=True, run=False) 18 | # cls.thread = Thread(target=cls.cmd.run) 19 | # cls.thread.start() 20 | # sleep(3) 21 | 22 | # @classmethod 23 | # def tearDownClass(cls) -> None: 24 | # cls.cmd.stop_process() 25 | # cls.thread.join() 26 | # print(f'Profiler output saved to {cls.test_file}') 27 | 28 | def test_httpx(self): 29 | from secator.tasks import httpx 30 | result = httpx.delay(['jahmyst.synology.me'], print_line=True, print_cmd=True, print_item=True) 31 | print(result.get()) 32 | -------------------------------------------------------------------------------- /tests/template/test_templates.py: -------------------------------------------------------------------------------- 1 | import io 2 | import unittest 3 | from secator.loader import get_configs_by_type, discover_tasks 4 | from secator.runners import Workflow, Scan 5 | from secator.workflows import DYNAMIC_WORKFLOWS 6 | from secator.scans import DYNAMIC_SCANS 7 | from secator.utils_test import META_OPTS 8 | from secator.rich import console 9 | from contextlib import redirect_stdout, redirect_stderr 10 | 11 | 12 | class TestTemplates(unittest.TestCase): 13 | 14 | def test_tasks(self): 15 | console.print('') 16 | for task in discover_tasks(): 17 | with self.subTest(name=task.__name__): 18 | console.print(f'\tTesting task {task.__name__} ...', end='') 19 | with io.StringIO() as buffer, redirect_stdout(buffer), redirect_stderr(buffer): 20 | task = task('TARGET', dry_run=True, print_cmd=True, **META_OPTS) 21 | task.run() 22 | console.print(' [green]ok[/]') 23 | 24 | def test_workflows(self): 25 | console.print('') 26 | for workflow in get_configs_by_type('workflow'): 27 | with self.subTest(name=workflow.name): 28 | console.print(f'\tTesting workflow {workflow.name} ...', end='') 29 | with io.StringIO() as buffer, redirect_stdout(buffer), redirect_stderr(buffer): 30 | workflow = Workflow(workflow, run_opts={'dry_run': True, 'print_cmd': True, **META_OPTS}) 31 | workflow.run() 32 | console.print(' [green]ok[/]') 33 | 34 | def test_scans(self): 35 | console.print('') 36 | for scan in get_configs_by_type('scan'): 37 | with self.subTest(name=scan.name): 38 | console.print(f'\tTesting scan {scan.name} ...', end='') 39 | with io.StringIO() as buffer, redirect_stdout(buffer), redirect_stderr(buffer): 40 | scan = Scan(scan, run_opts={'dry_run': True, 'print_cmd': True, **META_OPTS}) 41 | scan.run() 42 | console.print(' [green]ok[/]') 43 | 44 | def test_workflows_dynamic_import(self): 45 | console.print('') 46 | for workflow_name, runner in DYNAMIC_WORKFLOWS.items(): 47 | with self.subTest(name=workflow_name): 48 | console.print(f'\tTesting workflow {workflow_name} ...', end='') 49 | with io.StringIO() as buffer, redirect_stdout(buffer), redirect_stderr(buffer): 50 | workflow = runner('TARGET', dry_run=True, print_cmd=True, **META_OPTS) 51 | workflow.run() 52 | console.print(' [green]ok[/]') 53 | 54 | def test_scans_dynamic_import(self): 55 | console.print('') 56 | for scan_name, runner in DYNAMIC_SCANS.items(): 57 | with self.subTest(name=scan_name): 58 | console.print(f'\tTesting scan {scan_name} ...', end='') 59 | with io.StringIO() as buffer, redirect_stdout(buffer), redirect_stderr(buffer): 60 | scan = runner('TARGET', dry_run=True, print_cmd=True, **META_OPTS) 61 | scan.run() 62 | console.print(' [green]ok[/]') 63 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_offline.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from unittest import mock 4 | 5 | devnull = open(os.devnull, 'w') 6 | mock_stderr = mock.patch('sys.stderr', devnull) 7 | with mock_stderr: 8 | from secator.config import download_files, CONFIG 9 | from secator.utils_test import clear_modules 10 | 11 | 12 | @mock_stderr 13 | class TestOffline(unittest.TestCase): 14 | 15 | @classmethod 16 | def setUpClass(cls): 17 | clear_modules() 18 | 19 | def test_cve_lookup(self): 20 | from secator.tasks._categories import Vuln 21 | result = Vuln.lookup_cve('CVE-2022-23491') 22 | self.assertEqual(result, None) 23 | 24 | def test_downloads(self): 25 | download_files( 26 | {'pyproject.toml': 'https://raw.githubusercontent.com/freelabz/secator/main/pyproject.toml'}, 27 | CONFIG.dirs.payloads, 28 | CONFIG.offline_mode, 29 | 'toml file' 30 | ) 31 | path = CONFIG.dirs.payloads / 'pyproject.toml' 32 | self.assertFalse(path.exists()) 33 | 34 | def test_cli_install(self): 35 | # TODO: https://github.com/ewels/rich-click/issues/188 36 | # from secator.config import download_files, CONFIG 37 | # from secator.cli import cli 38 | # import click 39 | # from click.testing import CliRunner 40 | # result = CliRunner.invoke(cli, None, None) 41 | pass 42 | 43 | def test_cli(self): 44 | # TODO: https://github.com/freelabz/secator/issues/319 45 | # from secator.config import download_files, CONFIG 46 | pass 47 | -------------------------------------------------------------------------------- /tests/unit/test_scans.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freelabz/secator/8b8cafb6472b15cc9938921ce7ac6e0fc45df11c/tests/unit/test_scans.py -------------------------------------------------------------------------------- /tests/unit/test_serializers.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from secator.serializers.dataclass import dumps_dataclass, loads_dataclass 3 | from secator.output_types import Port, Vulnerability 4 | 5 | 6 | class TestSerializers(unittest.TestCase): 7 | 8 | def test_dumps_loads(self): 9 | results = [ 10 | Port(port=53, ip='127.0.0.1', host='localhost'), 11 | Vulnerability(matched_at='localhost', name='CVE-123123123', provider='nmap') 12 | ] 13 | results = dumps_dataclass(results) 14 | results = loads_dataclass(results) 15 | self.assertTrue(isinstance(results, list)) 16 | self.assertEqual(len(results), 2) 17 | self.assertTrue(all(type(result) in [Port, Vulnerability] for result in results)) 18 | self.assertTrue(isinstance(results, list)) 19 | 20 | def test_dumps_loads_nested(self): 21 | results = { 22 | 'info': {'name': 'test'}, 23 | 'results': { 24 | 'ports': [ 25 | {'port': 53, 'ip': '127.0.0.1', 'host': 'localhost', '_type': 'port'}, 26 | ], 27 | 'vulnerabilities': [ 28 | {'matched_at': 'localhost', 'name': 'CVE-123123123', 'provider': 'nmap', '_type': 'vulnerability'} 29 | ] 30 | } 31 | } 32 | results = loads_dataclass(dumps_dataclass(results)) 33 | self.assertTrue(isinstance(results['results']['ports'][0], Port)) 34 | self.assertTrue(isinstance(results['results']['vulnerabilities'][0], Vulnerability)) 35 | 36 | def test_dumps_loads_nested_obj(self): 37 | results = { 38 | 'info': {'name': 'test'}, 39 | 'results': { 40 | 'ports': [ 41 | Port(port=53, ip='127.0.0.1', host='localhost'), 42 | ], 43 | 'vulnerabilities': [ 44 | Vulnerability(matched_at='localhost', name='CVE-123123123', provider='nmap') 45 | ] 46 | } 47 | } 48 | results = loads_dataclass(dumps_dataclass(results)) 49 | self.assertTrue(isinstance(results['results']['ports'][0], Port)) 50 | self.assertTrue(isinstance(results['results']['vulnerabilities'][0], Vulnerability)) -------------------------------------------------------------------------------- /tests/unit/test_tasks.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import unittest 3 | import unittest.mock 4 | import warnings 5 | 6 | from secator.definitions import DEBUG 7 | from secator.rich import console 8 | from secator.utils import setup_logging 9 | from secator.utils_test import (FIXTURES_TASKS, INPUTS_TASKS, META_OPTS, 10 | CommandOutputTester, mock_command) 11 | 12 | level = logging.DEBUG if DEBUG == ["1"] else logging.ERROR 13 | setup_logging(level) 14 | 15 | 16 | class TestTasks(unittest.TestCase, CommandOutputTester): 17 | 18 | def setUp(self): 19 | warnings.simplefilter('ignore', category=ResourceWarning) 20 | warnings.simplefilter('ignore', category=DeprecationWarning) 21 | 22 | def _valid_fixture(self, cls, fixture): 23 | if not fixture: 24 | if len(FIXTURES_TASKS.keys()) == 1: # make test fail. 25 | raise AssertionError(f'No fixture for {cls.__name__}! Add one to the tests/fixtures directory (must not be an empty file / empty json / empty list).') 26 | return False 27 | return True 28 | 29 | def test_cmd_converted_schema(self): 30 | console.print('') 31 | 32 | from secator.config import CONFIG 33 | if 'debug_command' in CONFIG.debug: 34 | META_OPTS['print_cmd'] = True 35 | META_OPTS['print_item'] = True 36 | 37 | failures = [] 38 | for cls, fixture in FIXTURES_TASKS.items(): 39 | with self.subTest(name=cls.__name__): 40 | # Validate fixture 41 | if not self._valid_fixture(cls, fixture): 42 | console.print(f'\tTesting task {cls.__name__} ... [dim gold3] skipped (no fixture)[/]') 43 | continue 44 | 45 | # Run command 46 | input_type = cls.input_types[0] if cls.input_types else 'fake' 47 | targets = INPUTS_TASKS.get(input_type, []) 48 | with mock_command(cls, targets, META_OPTS, fixture) as runner: 49 | try: 50 | self._test_runner_output( 51 | runner, 52 | expected_output_types=cls.output_types 53 | ) 54 | except Exception as e: 55 | failures.append(f'ERROR ({cls.__name__}): {e}') 56 | 57 | if failures: 58 | raise AssertionError("\n\n" + "\n\n".join(failures)) 59 | -------------------------------------------------------------------------------- /tests/unit/test_tasks_categories.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | 4 | from secator.utils_test import load_fixture, FIXTURES_DIR 5 | from secator.tasks._categories import Vuln 6 | from secator.config import CONFIG 7 | 8 | 9 | class TestCveHelpers(unittest.TestCase): 10 | 11 | def test_lookup_cve_circle(self): 12 | actual = Vuln.lookup_cve_from_cve_circle('CVE-2023-5568') 13 | self.assertIsNone(actual) 14 | 15 | def test_lookup_cve(self): 16 | fixture = load_fixture('cve_circle_output', FIXTURES_DIR) 17 | cve_path = f'{CONFIG.dirs.data}/cves/CVE-2023-5568.json' 18 | with open(cve_path, 'w') as f: 19 | f.write(json.dumps(fixture, indent=2)) 20 | vuln = Vuln.lookup_cve('CVE-2023-5568', 'cpe:/o:redhat:enterprise_linux:9') 21 | vuln2 = Vuln.lookup_cve('CVE-2023-5568', 'cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*') 22 | self.assertIn('cpe-match', vuln['tags']) 23 | self.assertIn('cpe-match', vuln2['tags']) 24 | -------------------------------------------------------------------------------- /tests/unit/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from secator.utils import extract_domain_info 3 | 4 | class TestExtractRootDomain(unittest.TestCase): 5 | def test_root_domain_extraction(self): 6 | domains = [ 7 | ("subdomain.example.com", "example.com"), 8 | ("www.subdomain.example.co.uk", "example.co.uk"), 9 | ("example.com", "example.com"), 10 | ("ex-ample.co", "ex-ample.co"), 11 | ("test--domain.com", 'test--domain.com'), 12 | ("-example.com", None), 13 | ("example-.com", None), 14 | ("exa_mple.com", None), 15 | ("exa--mple.com", 'exa--mple.com'), 16 | ("example.longtld", None), 17 | ("", None), 18 | ("localhost", None), 19 | ("192.168.1.1", None), 20 | ("test.domain-.com", None), 21 | ("test.-domain.com", None), 22 | ("test_domain.com", None), 23 | ("sub.domain_goes.com", None), 24 | ("okay.domain.gov", "domain.gov"), 25 | # Adding Unicode domain examples 26 | ("täst.example.org", "example.org"), # Normal IDN 27 | ("münchen.de", "münchen.de"), # City domain name in German 28 | ("пример.рф", "пример.рф"), # Example in Cyrillic 29 | ("中文网.中国", "中文网.中国"), # Chinese characters 30 | ("xn--fiq228c5hs.xn--fiq64b", "xn--fiq228c5hs.xn--fiq64b"), # Punycode representation of Chinese domain 31 | ("test.みんな", "test.みんな"), # Using Japanese TLD 32 | ("http://sub.domain.пример.рф", "пример.рф"), 33 | ("https://suрф.みんな.пример.рф", "пример.рф"), 34 | ("http://mydomain.localhost", None), 35 | ] 36 | 37 | for domain, expected in domains: 38 | with self.subTest(domain=domain): 39 | result = extract_domain_info(domain, domain_only=True) 40 | self.assertEqual(result, expected, f"Failed for domain: {domain}") 41 | --------------------------------------------------------------------------------