├── clair_config └── local-dev │ ├── clair │ ├── .gitignore │ ├── quay.yaml.d │ │ └── .gitignore │ ├── config.yaml.d │ │ └── .gitignore │ ├── init.sql │ └── config.yaml │ └── traefik │ ├── config │ ├── jaeger.yaml │ ├── pgadmin.yaml │ ├── prom.yaml │ ├── grafana.yaml │ ├── postgresql.yaml │ ├── quay.yaml │ ├── dashboard.yaml │ ├── pyroscope.yaml │ ├── rabbitmq.yaml │ └── clair.yaml │ └── traefik.yaml ├── .vscode └── settings.json ├── falco_config └── http_output.yml ├── .env ├── .devcontainer └── devcontainer.json ├── motd ├── falco-compose.yml ├── compose.yml ├── .github └── workflows │ └── test-tools.yml ├── clair-compose.yml ├── Makefile ├── README.md ├── Dockerfile └── HOWTO.md /clair_config/local-dev/clair/.gitignore: -------------------------------------------------------------------------------- 1 | quay.yaml 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "makefile.configureOnOpen": false 3 | } -------------------------------------------------------------------------------- /clair_config/local-dev/clair/quay.yaml.d/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /clair_config/local-dev/clair/config.yaml.d/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /falco_config/http_output.yml: -------------------------------------------------------------------------------- 1 | # [Stable] `http_output` 2 | # 3 | # Send logs to an HTTP endpoint or webhook. 4 | # 5 | # When using falcosidekick, it is necessary to set `json_output` to true. 6 | json_output: true 7 | json_include_output_property: true 8 | http_output: 9 | enabled: true 10 | url: "http://falco-sidekick:2801/" 11 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/jaeger.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | jaeger: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/jaeger`)' 7 | service: jaeger 8 | services: 9 | jaeger: 10 | loadBalancer: 11 | servers: 12 | - url: "http://clair-jaeger:16686/" 13 | healthCheck: 14 | path: / 15 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/pgadmin.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | pgadmin: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/pgadmin`)' 7 | service: pgadmin 8 | services: 9 | pgadmin: 10 | loadBalancer: 11 | servers: 12 | - url: "http://clair-pgadmin/" 13 | healthCheck: 14 | path: /pgadmin 15 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/prom.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | prom: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/prom`)' 7 | service: prom 8 | services: 9 | prom: 10 | loadBalancer: 11 | servers: 12 | - url: "http://clair-prometheus:9090/" 13 | healthCheck: 14 | path: /prom/-/healthy 15 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/grafana.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | grafana: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/grafana`)' 7 | service: grafana 8 | services: 9 | grafana: 10 | loadBalancer: 11 | servers: 12 | - url: "http://clair-grafana:3000/" 13 | healthCheck: 14 | path: /grafana/api/health 15 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/postgresql.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | tcp: 3 | routers: 4 | postgresql: 5 | entryPoints: [postgresql] 6 | service: postgresql 7 | # Traefik docs say this hack is needed if not using TLS. 8 | rule: 'HostSNI(`*`)' 9 | services: 10 | postgresql: 11 | loadBalancer: 12 | servers: 13 | - address: 'clair-database:5432' 14 | -------------------------------------------------------------------------------- /clair_config/local-dev/clair/init.sql: -------------------------------------------------------------------------------- 1 | CREATE USER clair WITH PASSWORD 'clair'; 2 | CREATE USER quay WITH PASSWORD 'quay'; 3 | CREATE DATABASE indexer WITH OWNER clair; 4 | CREATE DATABASE matcher WITH OWNER clair; 5 | CREATE DATABASE notifier WITH OWNER clair; 6 | CREATE DATABASE quay WITH OWNER quay; 7 | \connect matcher 8 | CREATE EXTENSION "uuid-ossp"; 9 | \connect notifier 10 | CREATE EXTENSION "uuid-ossp"; 11 | \connect quay 12 | CREATE EXTENSION "pg_trgm"; 13 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/traefik.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | global: 3 | sendAnonymousUsage: false 4 | api: 5 | insecure: false 6 | dashboard: true 7 | entryPoints: 8 | traefik: 9 | address: ':8080' 10 | quay: 11 | address: ':8443' 12 | clair: 13 | address: ':6060' 14 | postgresql: 15 | address: ':5432' 16 | providers: 17 | file: 18 | directory: /etc/traefik/config 19 | metrics: 20 | prometheus: 21 | addServicesLabels: true 22 | accessLog: {} 23 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/quay.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | quay: 5 | entryPoints: [quay] 6 | rule: 'PathPrefix(`/`)' 7 | service: quay 8 | quay-api: 9 | entryPoints: [traefik] 10 | rule: 'PathPrefix(`/v2`)' 11 | service: quay 12 | services: 13 | quay: 14 | loadBalancer: 15 | passHostHeader: false 16 | servers: 17 | - url: "http://clair-quay:8080/" 18 | healthCheck: 19 | path: /health 20 | port: 8080 21 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/dashboard.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | api: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/api`) || PathPrefix(`/dashboard`)' 7 | service: 'api@internal' 8 | dashboard-redirect: 9 | entryPoints: [traefik] 10 | rule: 'Path(`/`)' 11 | middlewares: [dashboard-redirect] 12 | service: 'api@internal' 13 | middlewares: 14 | dashboard-redirect: 15 | redirectRegex: 16 | regex: '.*' 17 | replacement: '${1}/dashboard/' 18 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | T_2MS=3.10.0 2 | T_CHECKOV=3.2.269 3 | T_CLAIR=4.8.0 4 | T_CLOUDSPLAINING=0.7.0 5 | T_CLOUDSPLOIT=3.9.0 6 | T_DEPCHECK=10.0.4 7 | T_DEPSCAN=5.2.6 8 | T_DETECT_SECRETS=1.5.0 9 | T_DOCKLE=0.4.14 10 | T_FALCO=0.39.1 11 | T_GITLEAKS=8.21.0 12 | T_GITXRAY=1.0.16 13 | T_GRYPE=0.82.1 14 | T_HADOLINT=2.12.0 15 | T_KICS=2.1.3 16 | T_LEGITIFY=1.0.11 17 | T_NJSSCAN=0.3.7 18 | T_NODEJSSCAN=3.7 19 | T_OCTOSCAN=0.1.1 20 | T_PROWLER=4.4.1 21 | T_RETIRE=5.2.4 22 | T_SCOUTSUITE=5.14.0 23 | T_SEMGREP=1.91.0 24 | T_SNYK=1.1293.1 25 | T_TRIVY=0.56.2 26 | T_TRUFFLEHOG=3.82.8 27 | 28 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/pyroscope.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | pyroscope: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/pyroscope`)' 7 | service: pyroscope 8 | middlewares: 9 | - pyroscope-stripprefix 10 | middlewares: 11 | pyroscope-stripprefix: 12 | stripPrefix: 13 | prefixes: 14 | - /pyroscope 15 | services: 16 | pyroscope: 17 | loadBalancer: 18 | servers: 19 | - url: "http://clair-pyroscope:4040/" 20 | healthCheck: 21 | path: / 22 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "OSS Security Tools", 3 | "dockerComposeFile": "../compose.yml", 4 | "service": "toolbox", 5 | "workspaceFolder": "/workspace", 6 | "shutdownAction": "stopCompose", 7 | 8 | "features": { 9 | }, 10 | 11 | "customizations": { 12 | "vscode": { 13 | "extensions": [ 14 | "ms-vscode.makefile-tools", 15 | "ms-azuretools.vscode-docker" 16 | ] 17 | } 18 | }, 19 | 20 | "containerEnv": { 21 | "DOCKER_BUILDKIT": "1", 22 | "DOCKER_CLI_EXPERIMENTAL": "enabled" 23 | }, 24 | 25 | "remoteUser": "wanderer" 26 | } 27 | -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/rabbitmq.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | routers: 4 | rabbitmq: 5 | entryPoints: [traefik] 6 | rule: 'PathPrefix(`/rabbitmq`)' 7 | middlewares: 8 | - rewrite-api 9 | - rewrite 10 | service: rabbitmq 11 | services: 12 | rabbitmq: 13 | loadBalancer: 14 | servers: 15 | - url: "http://clair-rabbitmq:15672/" 16 | healthCheck: 17 | path: / 18 | middlewares: 19 | rewrite-api: 20 | replacePathRegex: 21 | regex: '^/rabbitmq/api/(.*?)/(.*)' 22 | replacement: '/api/%2F/$2' 23 | rewrite: 24 | replacePathRegex: 25 | regex: '^/rabbitmq/(.*)$' 26 | replacement: '/$1' 27 | -------------------------------------------------------------------------------- /motd: -------------------------------------------------------------------------------- 1 | __ __ _ 2 | \ \ / /__| | ___ ___ _ __ ___ ___ 3 | \ \ /\ / / _ \ |/ __/ _ \| '_ ` _ \ / _ \ 4 | \ V V / __/ | (_| (_) | | | | | | __/ 5 | __ \_/\_/ \___|_|\___\___/|_| |_| |_|\___| 6 | \ \ / /_ _ _ __ __| | ___ _ __ ___ _ __ 7 | \ \ /\ / / _` | '_ \ / _` |/ _ \ '__/ _ \ '__| 8 | \ V V / (_| | | | | (_| | __/ | | __/ | 9 | \_/\_/ \__,_|_| |_|\__,_|\___|_| \___|_| 10 | 11 | Welcome to the devsecops toolkit 12 | Built by The Red Guild 🪷 13 | 14 | This container was created as a resource for a workshop, 15 | which intends to spread awareness, help people protect themselves 16 | and the repos they interact with. Say hi @theredguild!, don't be a stranger. -------------------------------------------------------------------------------- /clair_config/local-dev/traefik/config/clair.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | http: 3 | entrypoint: 4 | clair: 5 | address: ':6060' 6 | routers: 7 | indexer: 8 | entryPoints: [clair] 9 | rule: 'PathPrefix(`/indexer`)' 10 | service: indexer 11 | matcher: 12 | entryPoints: [clair] 13 | rule: 'PathPrefix(`/matcher`)' 14 | service: matcher 15 | notifier: 16 | entryPoints: [clair] 17 | rule: 'PathPrefix(`/notifier`)' 18 | service: notifier 19 | services: 20 | indexer: 21 | loadBalancer: 22 | servers: 23 | - url: "http://clair-indexer:6060/" 24 | healthCheck: 25 | path: /healthz 26 | port: 8089 27 | matcher: 28 | loadBalancer: 29 | servers: 30 | - url: "http://clair-matcher:6060/" 31 | healthCheck: 32 | path: /healthz 33 | port: 8089 34 | notifier: 35 | loadBalancer: 36 | servers: 37 | - url: "http://clair-notifier:6060/" 38 | healthCheck: 39 | path: /healthz 40 | port: 8089 41 | -------------------------------------------------------------------------------- /falco-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | falco: 3 | container_name: falco 4 | cap_drop: 5 | - all 6 | cap_add: 7 | - sys_admin 8 | - sys_resource 9 | - sys_ptrace 10 | volumes: 11 | - /var/run/docker.sock:/host/var/run/docker.sock 12 | - /proc:/host/proc:ro 13 | - /etc:/host/etc:ro 14 | - ./falco_config/http_output.yml:/etc/falco/config.d/http_output.yml 15 | image: falcosecurity/falco-no-driver:latest 16 | networks: 17 | - toolbox-net 18 | 19 | falco-sidekick: 20 | container_name: falco-sidekick 21 | image: falcosecurity/falcosidekick 22 | environment: 23 | WEBUI_URL: http://falco-webui:2802 24 | networks: 25 | - toolbox-net 26 | 27 | falco-webui: 28 | container_name: falco-webui 29 | image: falcosecurity/falcosidekick-ui:2.2.0 30 | ports: 31 | - 2802:2802 32 | depends_on: 33 | - falco-redis 34 | command: ['-r', 'redis:6379', '-d'] 35 | networks: 36 | - toolbox-net 37 | 38 | falco-redis: 39 | image: redis/redis-stack:7.2.0-v11 40 | networks: 41 | - toolbox-net 42 | 43 | networks: 44 | toolbox-net: 45 | external: true 46 | -------------------------------------------------------------------------------- /compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | toolbox: 3 | build: 4 | context: . 5 | dockerfile: Dockerfile 6 | volumes: 7 | - .:/workspace 8 | - pipx-packages:/home/wanderer/.local/pipx/venvs 9 | environment: 10 | USERNAME: wanderer 11 | USER_UID: 1000 12 | USER_GID: 1000 13 | HOME: /home/wanderer 14 | PATH: /home/wanderer/.local/bin:/usr/local/bin:$PATH 15 | CLAIR_CONF: /workspace/clair_config/local-dev/clair/config.yaml 16 | CLAIR_API: http://clair-traefik:6060 17 | tty: true 18 | stdin_open: true 19 | networks: 20 | - toolbox-net 21 | command: zsh 22 | 23 | falco: 24 | extends: 25 | file: falco-compose.yml 26 | service: falco 27 | falco-sidekick: 28 | extends: 29 | file: falco-compose.yml 30 | service: falco-sidekick 31 | falco-webui: 32 | extends: 33 | file: falco-compose.yml 34 | service: falco-webui 35 | ports: 36 | - "2802:2802" 37 | falco-redis: 38 | extends: 39 | file: falco-compose.yml 40 | service: falco-redis 41 | 42 | clair-indexer: 43 | extends: 44 | file: clair-compose.yml 45 | service: clair-indexer 46 | 47 | clair-matcher: 48 | extends: 49 | file: clair-compose.yml 50 | service: clair-matcher 51 | 52 | clair-database: 53 | extends: 54 | file: clair-compose.yml 55 | service: clair-database 56 | 57 | clair-traefik: 58 | extends: 59 | file: clair-compose.yml 60 | service: clair-traefik 61 | 62 | volumes: 63 | pipx-packages: 64 | 65 | networks: 66 | toolbox-net: 67 | driver: bridge 68 | -------------------------------------------------------------------------------- /clair_config/local-dev/clair/config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | log_level: debug-color 3 | introspection_addr: ":8089" 4 | http_listen_addr: ":6060" 5 | updaters: 6 | sets: 7 | - ubuntu 8 | - debian 9 | - rhel-vex 10 | - alpine 11 | - osv 12 | auth: 13 | psk: 14 | key: 'c2VjcmV0' 15 | iss: 16 | - quay 17 | - clairctl 18 | indexer: 19 | connstring: host=clair-database user=clair dbname=indexer sslmode=disable 20 | scanlock_retry: 10 21 | layer_scan_concurrency: 5 22 | migrations: true 23 | matcher: 24 | indexer_addr: http://clair-indexer:6060/ 25 | connstring: host=clair-database user=clair dbname=matcher sslmode=disable 26 | max_conn_pool: 100 27 | migrations: true 28 | matchers: {} 29 | notifier: 30 | indexer_addr: http://clair-indexer:6060/ 31 | matcher_addr: http://clair-matcher:6060/ 32 | connstring: host=clair-database user=clair dbname=notifier sslmode=disable 33 | migrations: true 34 | delivery_interval: 30s 35 | poll_interval: 1m 36 | webhook: 37 | target: "http://webhook-target/" 38 | callback: "http://clair-notifier:6060/notifier/api/v1/notification/" 39 | # amqp: 40 | # direct: true 41 | # exchange: 42 | # name: "" 43 | # type: "direct" 44 | # durable: true 45 | # auto_delete: false 46 | # uris: ["amqp://guest:guest@clair-rabbitmq:5672/"] 47 | # routing_key: "notifications" 48 | # callback: "http://clair-notifier/notifier/api/v1/notification" 49 | # tracing and metrics config 50 | trace: 51 | name: "jaeger" 52 | # probability: 1 53 | jaeger: 54 | agent: 55 | endpoint: "clair-jaeger:6831" 56 | service_name: "clair" 57 | metrics: 58 | name: "prometheus" 59 | -------------------------------------------------------------------------------- /.github/workflows/test-tools.yml: -------------------------------------------------------------------------------- 1 | name: Test Security Tools container build 2 | on: 3 | push: 4 | branches: 5 | - main 6 | - develop 7 | pull_request: 8 | branches: 9 | - main 10 | - develop 11 | 12 | jobs: 13 | build-and-test: 14 | runs-on: ubuntu-latest 15 | steps: 16 | # This step takes ages (~4-5 min) but frees around 20 additional gigabytes 17 | # that are quite useful when installing and running everything. 18 | - name: Free up some disk space 19 | uses: jlumbroso/free-disk-space@main 20 | with: 21 | tool-cache: false 22 | android: true 23 | dotnet: true 24 | haskell: true 25 | large-packages: false 26 | swap-storage: false 27 | docker-images: false 28 | 29 | - name: Checkout 30 | uses: actions/checkout@v4 31 | 32 | - name: Set up Docker Buildx 33 | uses: docker/setup-buildx-action@v3 34 | 35 | - name: Load .env file 36 | id: dotenv 37 | uses: xom9ikk/dotenv@v2 38 | 39 | - name: Prepare build args 40 | id: prep 41 | run: | 42 | { 43 | echo 'BUILD_ARGS<> $GITHUB_OUTPUT 51 | 52 | - name: Build container 53 | uses: docker/build-push-action@v6 54 | with: 55 | push: false 56 | context: . 57 | cache-from: type=gha 58 | cache-to: type=gha,mode=max 59 | tags: theredguild/container-sec-tools:latest 60 | build-args: ${{ steps.prep.outputs.BUILD_ARGS }} 61 | 62 | - name: Check disk space 63 | run: df -h 64 | -------------------------------------------------------------------------------- /clair-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: "3.7" 3 | # This is just to hold a bunch of yaml anchors and try to consolidate parts of 4 | # the config. 5 | x-anchors: 6 | postgres: &postgres-image docker.io/library/postgres:12 7 | traefik: &traefik-image docker.io/library/traefik:v2.2 8 | clair: &clair-image quay.io/projectquay/clair:4.8.0 9 | 10 | services: 11 | clair-indexer: 12 | image: *clair-image 13 | container_name: clair-indexer 14 | depends_on: 15 | clair-database: 16 | condition: service_healthy 17 | environment: 18 | CLAIR_MODE: "indexer" 19 | volumes: 20 | - "./clair_config/local-dev/clair:/config:ro" 21 | networks: 22 | - toolbox-net 23 | 24 | clair-matcher: 25 | image: *clair-image 26 | container_name: clair-matcher 27 | depends_on: 28 | clair-database: 29 | condition: service_healthy 30 | environment: 31 | CLAIR_MODE: "matcher" 32 | volumes: 33 | - "./clair_config/local-dev/clair:/config:ro" 34 | networks: 35 | - toolbox-net 36 | 37 | clair-database: 38 | container_name: clair-database 39 | image: *postgres-image 40 | environment: 41 | POSTGRES_HOST_AUTH_METHOD: trust 42 | volumes: 43 | - type: bind 44 | source: ./clair_config/local-dev/clair/init.sql 45 | target: /docker-entrypoint-initdb.d/init.sql 46 | healthcheck: 47 | test: 48 | - CMD-SHELL 49 | - "pg_isready -U postgres" 50 | interval: 5s 51 | timeout: 4s 52 | retries: 12 53 | start_period: 10s 54 | networks: 55 | - toolbox-net 56 | 57 | clair-traefik: 58 | container_name: clair-traefik 59 | image: *traefik-image 60 | depends_on: 61 | - clair-matcher 62 | - clair-indexer 63 | ports: 64 | - '6060:6060' 65 | - '8080:8080' 66 | - '8443' 67 | - '5432' 68 | volumes: 69 | - './clair_config/local-dev/traefik/:/etc/traefik/:ro' 70 | networks: 71 | - toolbox-net 72 | 73 | networks: 74 | toolbox-net: 75 | external: true 76 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | IMAGE_NAME := devsecops-toolkit 2 | .DEFAULT_GOAL := help 3 | 4 | # Get the latest release tag from git 5 | LATEST_RELEASE := $(shell git describe --tags --abbrev=0) 6 | 7 | include .env 8 | BUILD_ARGS := $(foreach VAR,$(shell sed 's/=.*//' .env),--build-arg $(VAR)=$($(VAR))) 9 | 10 | # Use buildx only in GitHub Actions 11 | ifdef GITHUB_ACTIONS 12 | DOCKER_BUILD_CMD := docker buildx build --load --cache-from type=gha --cache-to type=gha,mode=max 13 | else 14 | DOCKER_BUILD_CMD := docker build 15 | endif 16 | 17 | help: 18 | @echo "Usage:" 19 | @echo " make " 20 | @echo "" 21 | @echo "Targets:" 22 | @echo " build Build the Docker image with the software versions described in the .env file" 23 | @echo " rebuild Forces build, even if a previous image exists. Won't delete previous images" 24 | @echo " release Build the Docker image with the software versions described in the .env file, but from a specific release of this repo" 25 | @echo " exec Run an interactive shell inside the container" 26 | @echo " clean Remove Docke image $(IMAGE_NAME) and wipe cache (CAREFUL)" 27 | @echo "" 28 | @echo "Examples:" 29 | @echo " make" 30 | @echo " make build" 31 | @echo " make rebuild" 32 | @echo " make release" 33 | @echo " make exec" 34 | @echo " make clean" 35 | @echo "" 36 | 37 | # Build the Docker image using current branch 38 | build: 39 | @if ! docker images $(IMAGE_NAME) | awk '{ print $$1 }' | grep -q "^$(IMAGE_NAME)$$"; then \ 40 | echo "Docker image $(IMAGE_NAME) not found. Building now..."; \ 41 | $(DOCKER_BUILD_CMD) $(BUILD_ARGS) -t $(IMAGE_NAME) .; \ 42 | else \ 43 | echo "Image found, not building. If you want to rebuild, run make rebuild"; \ 44 | fi 45 | 46 | rebuild: 47 | @echo "Rebuilding $(IMAGE_NAME) without cache, will take a while." 48 | @$(DOCKER_BUILD_CMD) $(BUILD_ARGS) --no-cache -t $(IMAGE_NAME) .; 49 | 50 | 51 | # Build the Docker image using the latest release 52 | release: 53 | @git checkout $(LATEST_RELEASE) 54 | @$(MAKE) build 55 | @git checkout - 56 | 57 | # TODO: Check this. 58 | exec: build 59 | @echo "Running interactive shell inside the $(IMAGE_NAME) container..." 60 | @docker run --hostname trg --rm -it -v $(PWD):/workdir $(IMAGE_NAME):latest /bin/zsh 61 | 62 | clean: 63 | @echo "Removing Docker image with the name $(IMAGE_NAME)..." 64 | @docker rmi -f $(IMAGE_NAME) && docker builder prune -f 65 | 66 | .PHONY: help build rebuild release exec clean 67 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Container OSS DevOps Security Tools 2 | 3 | Unified repository with OSS security tools, just `make exec` and dive into the container! 4 | 5 | There are some tools that have not been integrated into the main container itself, given their nature, 6 | but have been or will be added to the repository with an explanation of how to run them separately. 7 | 8 | ## First step 9 | 10 | ### Firing up the container 11 | 12 | ```bash 13 | Usage: 14 | make 15 | 16 | Targets: 17 | build "Build the Docker image with the software versions described in the .env file" 18 | rebuild "Forces build, even if a previous image exists. Won't delete previous images" 19 | release "Build the Docker image with the software versions described in the .env file, but from a specific release of this repo" 20 | latest "Build the Docker image with the latest version for each tool" 21 | exec "Run an interactive shell inside the container" 22 | clean "Remove Docker image $(IMAGE_NAME) and wipe cache (CAREFUL)" 23 | 24 | Examples: 25 | make 26 | make build 27 | make rebuild 28 | make release 29 | make latest 30 | make exec 31 | make clean 32 | ``` 33 | 34 | ## Second step 35 | 36 | Now you have your container up and running, but what am I supposed to do with it? 37 | Well, go to **[how to use the tools inside](./HOWTO.md)** to understand how each one of them actually work. 38 | 39 | ### Tools included in this repository 40 | 41 | - [x] 2ms: Detects and manages secrets in files and systems like CMS, chats, and git. () 42 | - [x] better-npm-audit: Enhances npm audit with additional features. () 43 | - [x] checkov: Scans infrastructure as code for misconfigurations and vulnerabilities. () 44 | - [x] clair: Analyzes container images for vulnerabilities. () 45 | - [x] cloudsplaining: Assesses AWS IAM policies for security risks. () 46 | - [x] cloudsploit: Scans AWS environments for security threats and misconfigurations. () 47 | - [x] DependencyCheck: Identifies vulnerabilities in application dependencies. () 48 | - [x] depscan: Scans for vulnerabilities in dependencies. () 49 | - [x] detect-secrets: Detects secrets in codebases to prevent leaks. () 50 | - [x] dockle: Lints container images for security best practices. () 51 | - [x] eslint-plugin-no-secrets: ESLint plugin to detect potential secrets in code. () 52 | - [x] eslint-plugin-no-unsanitized: Prevents unsafe DOM manipulations in JavaScript. () 53 | - [x] eslint-plugin-security: Provides security rules for ESLint. () 54 | - [x] falco: Monitors runtime security events in cloud-native environments. () 55 | - [x] generic: GitHub actions for vulnerability checks. () 56 | - [x] gh-fake-analyzer: Analyzes GitHub profiles for data insights. () 57 | - [x] git-secrets: Prevents committing secrets to git repositories. () 58 | - [x] gitxray: Uses GitHub APIs for security analysis and OSINT. () 59 | - [x] gitleaks: Scans for secrets in code repositories. () 60 | - [x] grype: Scans container images and filesystems for vulnerabilities. () 61 | - [x] harden-runner: Secures GitHub Actions runners with network filtering. () 62 | - [x] hadolint: Lints Dockerfiles for best practices. () 63 | - [x] installed-check: Ensures installed modules match package.json requirements. () 64 | - [x] kics: Detects security issues in infrastructure-as-code. () 65 | - [ ] kube-bench: Checks Kubernetes deployments against CIS benchmarks. () 66 | - [x] lavamoat: Sandboxes dependency graphs for security. () 67 | - [x] legitify: Manages security risks in GitHub and GitLab assets. () 68 | - [x] njsscan: Scans JavaScript applications for security vulnerabilities. () 69 | - [x] node-version-audit: Audits Node.js versions for known vulnerabilities. () 70 | - [x] nodejsscan: Scans Node.js applications for security issues. () 71 | - [x] npm audit: Checks installed packages for vulnerabilities. 72 | - [x] octoscan: Scans GitHub repositories for sensitive information. () 73 | - [x] prowler: Audits AWS environments for security best practices. () 74 | - [x] retirejs: Scans JavaScript libraries for known vulnerabilities. () 75 | - [x] scoutsuite: Audits multi-cloud environments for security issues. () 76 | - [x] secure-repo: Secures GitHub Actions workflows. () 77 | - [x] semgrep: Performs lightweight static analysis across languages. () 78 | - [x] snyk: Scans projects for security vulnerabilities. () 79 | - [x] trivy: Scans for vulnerabilities and misconfigurations in various environments. () 80 | - [x] trufflehog: Finds and analyzes leaked credentials. () [Easy marketplace] (https://github.com/marketplace/actions/trufflehog-oss) 81 | - [x] wait-for-secrets: Provides 2FA for GitHub Actions. () 82 | - [x] yarn-audit-fix: Adds missing fix functionality to yarn audit. () 83 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm-slim AS final 2 | 3 | # Set environment variables for the user and group 4 | ARG USERNAME=wanderer 5 | ARG GROUPNAME=trg 6 | ARG USER_UID=1000 7 | ARG USER_GID=1000 8 | 9 | # Additional ARGs for tool versions 10 | ARG T_2MS 11 | ARG T_CHECKOV 12 | ARG T_CLAIR 13 | ARG T_CLOUDSPLAINING 14 | ARG T_CLOUDSPLOIT 15 | ARG T_DEPCHECK 16 | ARG T_DEPSCAN 17 | ARG T_DETECT_SECRETS 18 | ARG T_DOCKLE 19 | ARG T_GITLEAKS 20 | ARG T_GITXRAY 21 | ARG T_GRYPE 22 | ARG T_HADOLINT 23 | ARG T_KICS 24 | ARG T_LEGITIFY 25 | ARG T_NJSSCAN 26 | ARG T_NODEJSSCAN 27 | ARG T_OCTOSCAN 28 | ARG T_PROWLER 29 | ARG T_RETIRE 30 | ARG T_SCOUTSUITE 31 | ARG T_SEMGREP 32 | ARG T_SNYK 33 | ARG T_TRIVY 34 | ARG T_TRUFFLEHOG 35 | 36 | # Install required packages 37 | RUN apt-get update && apt-get install -y \ 38 | curl \ 39 | wget \ 40 | git \ 41 | build-essential \ 42 | python3 \ 43 | python3-venv \ 44 | python3-dev \ 45 | python3-pip \ 46 | gnupg \ 47 | dirmngr \ 48 | ca-certificates \ 49 | libssl-dev \ 50 | zlib1g-dev \ 51 | libbz2-dev \ 52 | libreadline-dev \ 53 | libsqlite3-dev \ 54 | libffi-dev \ 55 | liblzma-dev \ 56 | zsh \ 57 | pipx \ 58 | sudo \ 59 | make \ 60 | vim \ 61 | unzip \ 62 | default-jre \ 63 | yarn \ 64 | && rm -rf /var/lib/apt/lists/* 65 | 66 | # Create a user group named trg and a user named wanderer with specified UID and GID 67 | RUN groupadd --gid $USER_GID $GROUPNAME && \ 68 | useradd --uid $USER_UID --gid $USER_GID --create-home $USERNAME 69 | 70 | # Configure passwordless sudo for the user wanderer 71 | RUN echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers 72 | 73 | # Optionally, add the user to the sudo group 74 | RUN usermod -aG sudo $USERNAME 75 | 76 | # Switch to the new user 77 | USER $USERNAME 78 | 79 | # Explicitly setting user home 80 | ENV HOME="/home/wanderer" 81 | 82 | # Set the default shell to zsh 83 | ENV SHELL=/usr/bin/zsh 84 | 85 | # Running everything under zsh 86 | SHELL ["/usr/bin/zsh", "-c"] 87 | 88 | # Set the prompt 89 | RUN echo "autoload -U colors && colors" >> $HOME/.zshrc 90 | RUN echo 'export PS1="%F{green}%n@%m %F{blue}%1~ %F{yellow}$%f "' >> $HOME/.zshrc 91 | 92 | # Building everything inside /src 93 | WORKDIR /src 94 | 95 | ENV ASDF_DIR="$HOME/.asdf" 96 | RUN git clone https://github.com/asdf-vm/asdf.git $ASDF_DIR --branch v0.14.1 97 | RUN echo '. $ASDF_DIR/asdf.sh' >> $HOME/.zshrc \ 98 | && echo 'fpath=(${ASDF_DIR}/completions $fpath)' >> $HOME/.zshrc \ 99 | && echo 'autoload -Uz compinit && compinit' >> $HOME/.zshrc \ 100 | && . $ASDF_DIR/asdf.sh 101 | 102 | ENV PATH="${ASDF_DIR}/bin:${ASDF_DIR}/shims:$PATH" 103 | 104 | # Install Node.js and Go using asdf 105 | RUN . $ASDF_DIR/asdf.sh \ 106 | && asdf plugin add nodejs https://github.com/asdf-vm/asdf-nodejs.git \ 107 | && asdf install nodejs latest \ 108 | && asdf global nodejs latest 109 | 110 | RUN . $ASDF_DIR/asdf.sh \ 111 | && asdf plugin add golang https://github.com/asdf-community/asdf-golang.git \ 112 | && asdf install golang latest \ 113 | && asdf global golang latest 114 | 115 | # Set GOBIN to /usr/local/bin for Go binaries 116 | ENV GOBIN=/usr/local/bin 117 | ENV PATH="${GOBIN}:${PATH}" 118 | 119 | # # Install pnpm using npm installed via asdf Node.js 120 | RUN npm install -g pnpm 121 | ENV PNPM_HOME="/home/${USERNAME}/.local/share/pnpm" 122 | ENV PATH="${PNPM_HOME}:${PNPM_HOME}/global/node_modules/.bin:${PATH}" 123 | 124 | # Pnpm-related tools 125 | RUN pnpm install -g eslint-plugin-security \ 126 | eslint-plugin-no-unsanitized \ 127 | eslint-plugin-no-secrets \ 128 | node-version-audit \ 129 | yarn-audit-fix \ 130 | better-npm-audit \ 131 | installed-check \ 132 | snyk@${T_SNYK} \ 133 | retire@${T_RETIRE} 134 | 135 | # Manually install Cloudsploit 136 | RUN git clone --branch v${T_CLOUDSPLOIT} https://github.com/aquasecurity/cloudsploit.git \ 137 | && cd cloudsploit \ 138 | && npm init --yes \ 139 | && npm install ${PACKAGENAME} \ 140 | && npm link /src/cloudsploit 141 | 142 | # Pipx-related tools 143 | RUN pipx install gitxray==${T_GITXRAY} \ 144 | && pipx install semgrep==${T_SEMGREP} \ 145 | && pipx install detect-secrets==${T_DETECT_SECRETS} \ 146 | && pipx install nodejsscan==${T_NODEJSSCAN} \ 147 | && pipx install cloudsplaining==${T_CLOUDSPLAINING} \ 148 | && pipx install checkov==${T_CHECKOV} \ 149 | && pipx install scoutsuite==${T_SCOUTSUITE} \ 150 | && pipx install git+https://github.com/shortdoom/gh-fake-analyzer.git \ 151 | && pipx install prowler==${T_PROWLER} \ 152 | && pipx install njsscan==${T_NJSSCAN} \ 153 | && pipx ensurepath 154 | 155 | # Install git-secrets 156 | RUN git clone https://github.com/awslabs/git-secrets.git git-secrets \ 157 | && cd git-secrets \ 158 | && sudo make install \ 159 | && rm -rf secrets 160 | 161 | RUN git clone https://github.com/mattaereal/gh-workflow-auditor \ 162 | && cd gh-workflow-auditor \ 163 | && python3 -m venv gwa \ 164 | && source gwa/bin/activate \ 165 | && pip install -r requirements.txt \ 166 | && exit 167 | 168 | USER root 169 | 170 | RUN echo '#!/bin/zsh\n\ 171 | source /src/gh-workflow-auditor/gwa/bin/activate\n\ 172 | python3 /src/gh-workflow-auditor/main.py "$@"\n\ 173 | deactivate' > /usr/local/bin/gh-workflow-auditor \ 174 | && chmod +x /usr/local/bin/gh-workflow-auditor \ 175 | && chown -R wanderer:trg /usr/local/bin/gh-workflow-auditor 176 | 177 | USER wanderer 178 | 179 | # Install gitleaks 180 | RUN arch=$(dpkg --print-architecture) \ 181 | && if [ "$arch" = "amd64" ]; then arch="x64"; fi \ 182 | && wget https://github.com/gitleaks/gitleaks/releases/download/v${T_GITLEAKS}/gitleaks_${T_GITLEAKS}_linux_$arch.tar.gz \ 183 | -O gitleaks.tar.gz \ 184 | && sudo tar -xzf gitleaks.tar.gz -C /usr/local/bin gitleaks \ 185 | && sudo chmod +x /usr/local/bin/gitleaks \ 186 | && rm gitleaks.tar.gz 187 | 188 | # Install legitify 189 | RUN wget https://github.com/Legit-Labs/legitify/releases/download/v${T_LEGITIFY}/legitify_${T_LEGITIFY}_linux_$(dpkg --print-architecture).tar.gz \ 190 | -O legitify.tar.gz \ 191 | && sudo tar -xzf legitify.tar.gz -C /usr/local/bin legitify \ 192 | && sudo chmod +x /usr/local/bin/legitify \ 193 | && rm legitify.tar.gz 194 | 195 | # Install kics 196 | RUN git clone https://github.com/Checkmarx/kics.git -b v${T_KICS} \ 197 | && cd kics \ 198 | && go mod vendor \ 199 | && go build -o ./bin/kics cmd/console/main.go \ 200 | && sudo ln -s /src/kics/bin/kics /usr/local/bin/kics \ 201 | && echo 'export KICS_QUERIES_PATH=/src/kics/assets/queries' >> ~/.zshrc 202 | 203 | # Install Trivy 204 | RUN arch=$(dpkg --print-architecture) \ 205 | && if [ "$arch" = "amd64" ]; then arch="64bit"; fi \ 206 | && if [ "$arch" = "arm64" ]; then arch="ARM64"; fi \ 207 | && wget https://github.com/aquasecurity/trivy/releases/download/v${T_TRIVY}/trivy_${T_TRIVY}_Linux-$arch.deb \ 208 | && sudo dpkg -i trivy_${T_TRIVY}_Linux-$arch.deb \ 209 | && rm trivy_${T_TRIVY}_Linux-$arch.deb 210 | 211 | # Install Trufflehog 212 | RUN wget https://github.com/trufflesecurity/trufflehog/releases/download/v${T_TRUFFLEHOG}/trufflehog_${T_TRUFFLEHOG}_linux_$(dpkg --print-architecture).tar.gz \ 213 | -O trufflehog.tar.gz \ 214 | && sudo tar -xzf trufflehog.tar.gz -C /usr/local/bin trufflehog \ 215 | && sudo chmod +x /usr/local/bin/trufflehog \ 216 | && rm trufflehog.tar.gz 217 | 218 | # Install hadolint 219 | RUN arch=$(dpkg --print-architecture) \ 220 | && if [ "$arch" = "amd64" ]; then arch="x86_64"; fi \ 221 | && if [ "$arch" = "arm64" ]; then arch="arm64"; fi \ 222 | && wget https://github.com/hadolint/hadolint/releases/download/v${T_HADOLINT}/hadolint-Linux-$arch \ 223 | && chmod +x hadolint-Linux-$arch \ 224 | && sudo mv hadolint-Linux-$arch /usr/local/bin/hadolint 225 | 226 | # Install grype 227 | RUN wget https://github.com/anchore/grype/releases/download/v${T_GRYPE}/grype_${T_GRYPE}_linux_$(dpkg --print-architecture).deb \ 228 | && sudo dpkg -i grype_${T_GRYPE}_linux_$(dpkg --print-architecture).deb \ 229 | && rm grype_${T_GRYPE}_linux_$(dpkg --print-architecture).deb 230 | 231 | 232 | # Install dependency-check 233 | RUN wget -q https://github.com/jeremylong/DependencyCheck/releases/download/v${T_DEPCHECK}/dependency-check-${T_DEPCHECK}-release.zip \ 234 | -O dependency-check.zip \ 235 | && unzip dependency-check.zip && rm -f dependency-check.zip \ 236 | && chmod +x dependency-check/bin/dependency-check.sh \ 237 | && sudo ln -s /src/dependency-check/bin/dependency-check.sh /usr/local/bin/dependency-check 238 | 239 | # Install dockle 240 | # VERSION=$(curl --silent "https://api.github.com/repos/goodwithtech/dockle/releases/latest" | \ 241 | # grep '"tag_name":' | \ 242 | # sed -E 's/.*"v([^"]+)".*/\1/') 243 | RUN curl -L -o dockle.deb https://github.com/goodwithtech/dockle/releases/download/v${T_DOCKLE}/dockle_${T_DOCKLE}_Linux-64bit.deb \ 244 | && sudo dpkg -i dockle.deb && rm dockle.deb 245 | 246 | # Install 2ms 247 | RUN wget https://github.com/checkmarx/2ms/releases/download/v${T_2MS}/linux-amd64.zip \ 248 | && unzip linux-amd64.zip && rm -f linux-amd64.zip \ 249 | && sudo mv 2ms /usr/local/bin/2ms \ 250 | && sudo chmod +x /usr/local/bin/2ms 251 | 252 | # Install clair 253 | RUN wget https://github.com/quay/clair/releases/download/v${T_CLAIR}/clairctl-linux-$(dpkg --print-architecture) \ 254 | -O clairctl \ 255 | && chmod +x clairctl \ 256 | && sudo mv clairctl /usr/local/bin/clairctl 257 | 258 | # Install depscan 259 | RUN curl -LO https://github.com/owasp-dep-scan/depscan-bin/releases/download/v${T_DEPSCAN}/depscan-linux-amd64 \ 260 | && chmod +x depscan-linux-amd64 \ 261 | && sudo mv depscan-linux-amd64 /usr/local/bin/depscan 262 | 263 | # Install Octoscan 264 | RUN curl -LO https://github.com/synacktiv/octoscan/releases/download/v${T_OCTOSCAN}/octoscan \ 265 | && chmod +x octoscan \ 266 | && sudo mv octoscan /usr/local/bin 267 | 268 | # Clean up 269 | RUN sudo apt-get clean \ 270 | && sudo rm -rf /var/lib/apt/lists/* 271 | 272 | # Configure MOTD 273 | COPY --link --chown=root:root motd /etc/motd 274 | RUN echo '\ncat /etc/motd\n' >> ~/.zshrc 275 | 276 | # Set working directory 277 | WORKDIR /home/${USERNAME} 278 | CMD ["/bin/zsh"] 279 | -------------------------------------------------------------------------------- /HOWTO.md: -------------------------------------------------------------------------------- 1 | # How to use the tools inside 2 | 3 | We leave you with a rough representative of what do these tools cover, so you get a better grasp on what to use them for. 4 | 5 | **Secrets** 6 | 2ms, gitleaks, git-secrets, trufflehog 7 | 8 | **GitHub / GitLab** 9 | gitxray, gh-fake-analyzer, legitify 10 | 11 | **Multi-purpose** 12 | semgrep, trivy, kics 13 | 14 | **IaC / SCA / Code** 15 | checkov, scoutsuite, dependency-check 16 | 17 | **Cloud** 18 | falco, snyk, cloudsplaining 19 | 20 | **Containers / Images** 21 | clair, snyk, grype, hadolint, dockle 22 | 23 | **NodeJS** 24 | nodejsscan, retirejs, installed-check, better-npm-audit, eslint-plugin-security, eslint-plugin-no-unsanitized, eslint-plugin-no-secrets, node-version-audit, yarn-audit-fix 25 | 26 | 27 | - [How to use the tools inside](#how-to-use-the-tools-inside) 28 | - [GitXray | Harvest public information from GitHub APIs](#gitxray--harvest-public-information-from-github-apis) 29 | - [GH Fake Analyzer | Script to analyze profile GitHub data](#gh-fake-analyzer--script-to-analyze-profile-github-data) 30 | - [git-secrets | Avoid commiting secrets](#git-secrets--avoid-commiting-secrets) 31 | - [Trufflehog | Find, verify, and analyze leaked credentials](#trufflehog--find-verify-and-analyze-leaked-credentials) 32 | - [Gitleaks | Detect and prevent secrets in git repos](#gitleaks--detect-and-prevent-secrets-in-git-repos) 33 | - [2ms | Identify secrets across an entire org](#2ms--identify-secrets-across-an-entire-org) 34 | - [detect-secrets | Detect and prevent secrets in a codebase](#detect-secrets--detect-and-prevent-secrets-in-a-codebase) 35 | - [Trivy | Very complete tool, misconfigs, vulns and more](#trivy--very-complete-tool-misconfigs-vulns-and-more) 36 | - [Clair (WIP - NOT WORKING) | Scan containers!](#clair-wip---not-working--scan-containers) 37 | - [Snyk | Scan and monitor containers, cloud](#snyk--scan-and-monitor-containers-cloud) 38 | - [Grype | Vuln scanner for images and fs](#grype--vuln-scanner-for-images-and-fs) 39 | - [Falco (WIP) | Cloud native runtime tool for Linux OS](#falco-wip--cloud-native-runtime-tool-for-linux-os) 40 | - [Semgrep | Static analyzer for almost anything](#semgrep--static-analyzer-for-almost-anything) 41 | - [sast-scan (WIP) | Static analysis with many many tools](#sast-scan-wip--static-analysis-with-many-many-tools) 42 | - [Legitify | GitHub and GitLab misconfiguration checker](#legitify--github-and-gitlab-misconfiguration-checker) 43 | - [KICS | IaC general purpose scanner](#kics--iac-general-purpose-scanner) 44 | - [Checkov | Static code analyzer and SCA tool for images and OSS](#checkov--static-code-analyzer-and-sca-tool-for-images-and-oss) 45 | - [ScoutSuite | Multi-cloud security-auditing tool](#scoutsuite--multi-cloud-security-auditing-tool) 46 | - [Cloudsplaining | AWS IAM assessment tool](#cloudsplaining--aws-iam-assessment-tool) 47 | - [Hadolint | Scans Dockerfiles for good linting practices](#hadolint--scans-dockerfiles-for-good-linting-practices) 48 | - [Dockle | Container image linter for security](#dockle--container-image-linter-for-security) 49 | - [DependencyCheck | Checks for public vulns on dependencies](#dependencycheck--checks-for-public-vulns-on-dependencies) 50 | - [nodejsscan | NodeJS application code scanner](#nodejsscan--nodejs-application-code-scanner) 51 | - [Lavamoat | JS framework to prevent supply-chain attacks](#lavamoat--js-framework-to-prevent-supply-chain-attacks) 52 | - [NodeJS specific tools](#nodejs-specific-tools) 53 | - [retirejs | Vuln scanner for JS](#retirejs--vuln-scanner-for-js) 54 | - [installed-check | Verifies modules are in tune with reqs in package.json](#installed-check--verifies-modules-are-in-tune-with-reqs-in-packagejson) 55 | - [better-npm-audit | npm audit++](#better-npm-audit--npm-audit) 56 | - [eslint-plugin-security | JS plugin to identify potential hotspots](#eslint-plugin-security--js-plugin-to-identify-potential-hotspots) 57 | - [eslint-plugin-no-unsanitized | ESLint rule to disallow unsafe patterns](#eslint-plugin-no-unsanitized--eslint-rule-to-disallow-unsafe-patterns) 58 | - [eslint-plugin-no-secrets | ESLint plugin to find secrets](#eslint-plugin-no-secrets--eslint-plugin-to-find-secrets) 59 | - [node-version-audit | Checks node's version for CVEs](#node-version-audit--checks-nodes-version-for-cves) 60 | - [yarn-audit-fix | Fixes issues found while yarn auditing](#yarn-audit-fix--fixes-issues-found-while-yarn-auditing) 61 | - [GitHub actions](#github-actions) 62 | - [List of secure measures for your repo](#list-of-secure-measures-for-your-repo) 63 | - [harden-runner | Prevent exfiltration, tampering, backdoors](#harden-runner--prevent-exfiltration-tampering-backdoors) 64 | - [wait-for-secrets | MFA for GHA](#wait-for-secrets--mfa-for-gha) 65 | - [Snyk Actions | Snyk's set of actions to check for vulns and more](#snyk-actions--snyks-set-of-actions-to-check-for-vulns-and-more) 66 | - [KICS Action | Static code analysis for IaC](#kics-action--static-code-analysis-for-iac) 67 | - [Legitify Action | Analyze your repo for misconfigs \& compliance issues](#legitify-action--analyze-your-repo-for-misconfigs--compliance-issues) 68 | - [Trivy Action | Add a thorough vuln scan to your CI/CD](#trivy-action--add-a-thorough-vuln-scan-to-your-cicd) 69 | - [2ms Action | Apply too many secrets to your workflow](#2ms-action--apply-too-many-secrets-to-your-workflow) 70 | - [GitLeaks Action | Add gitleaks as a github action](#gitleaks-action--add-gitleaks-as-a-github-action) 71 | - [Trufflehog Action | Continuously scan for secrets](#trufflehog-action--continuously-scan-for-secrets) 72 | - [Dockle action | Cointinuously scan for security issues in Docker files](#dockle-action--cointinuously-scan-for-security-issues-in-docker-files) 73 | - [Online version of some tools](#online-version-of-some-tools) 74 | 75 | ## GitXray | Harvest public information from GitHub APIs 76 | 77 | [GitHub](https://github.com/kulkansecurity/gitxray) | [Website](https://www.gitxray.com) 78 | 79 | **OSINT | Forensics | GitHub** 80 | 81 | Gitxray (short for Git X-Ray) is a multifaceted security tool designed for use on GitHub 82 | repositories. It can serve many purposes, including OSINT and Forensics. `gitxray` leverages public 83 | GitHub REST APIs to gather information that would otherwise be very time-consuming to obtain 84 | manually. Additionally, it seeks out information in unconventional places. 85 | 86 | Gitxray can be used to, for example: 87 | 88 | ```bash 89 | # Find sensitive information in contributor profiles disclosed by accident within, for example, 90 | # Armored PGP Keys, or Key Names. 91 | gitxray -r https://github.com/some-org/some-repository -v -f user_input 92 | 93 | # Identify threat actors in a Repository. You may spot co-owned or shared accounts, as well as 94 | # inspect public events to spot fake Stargazers. 95 | gitxray -r https://github.com/some-org/some-repository -v -f keys,association,starred 96 | 97 | # Identify fake or infected Repositories. It can detect tampered commit dates as well as, for 98 | # example, Release assets updated post-release. 99 | gitxray -r https://github.com/some-org/some-repository -v -f warning 100 | 101 | # Forensics use-cases, such as filtering results by date in order to check what else happened on the 102 | # day of an incident. 103 | gitxray -r https://github.com/some-org/some-repository -v -f 2024-09-01 104 | 105 | # And a lot more! Run a full X-Ray in Verbose mode to collect a ton of data. 106 | gitxray -r https://github.com/some-org/some-repository -v 107 | ``` 108 | 109 | Please refer to the Documentation for additional use-cases and introductory information. 110 | 111 | - [https://kulkansecurity.github.io/gitxray/](https://kulkansecurity.github.io/gitxray/) 112 | - [https://www.gitxray.com/](https://www.gitxray.com/) 113 | 114 | ## GH Fake Analyzer | Script to analyze profile GitHub data 115 | 116 | [GitHub](https://github.com/shortdoom/gh-fake-analyzer) 117 | 118 | **OSINT | GitHub | Script** 119 | 120 | Download and analyze profile data for any GitHub user or organization. This reconnaissance tool is 121 | designed for the OSINT/security community, enabling the inspection of potential bot, scammer, 122 | blackhat, or fake employee accounts for dark patterns (see, Malicious GitHub Accounts) 123 | 124 | ```bash 125 | gh-analyze # analyze a single user 126 | gh-analyze --out_path /path/to/dir # save to different than /out dir 127 | gh-analyze --targets # custom_file.txt to read from as "targets" 128 | gh-analyze --commit_search # search github for commit messages (slow, experimental) 129 | gh-analyze --token # provide GH_TOKEN to use for this run 130 | 131 | gh-monitor --username # Monitor single user 132 | gh-monitor --targets # Monitor multiple usernames 133 | ``` 134 | 135 | ## git-secrets | Avoid commiting secrets 136 | 137 | [GitHub](https://github.com/awslabs/git-secrets) 138 | 139 | **Secrets | git | git-hook** 140 | 141 | Prevents you from committing passwords and other sensitive information to a git repository. Set up 142 | rules, and scan. 143 | 144 | Install `git-hooks` on your repo so you can scan before commiting: 145 | 146 | ```bash 147 | git secrets --install 148 | ``` 149 | 150 | Adds a prohibited pattern to the current repo: 151 | 152 | ```bash 153 | git secrets --add '[A-Z0-9]{20}' 154 | ``` 155 | 156 | Adds a prohibited pattern to the global git config: 157 | 158 | ```bash 159 | git secrets --add --global '[A-Z0-9]{20}' 160 | git secrets --add 'password\s*=\s*.+' 161 | ``` 162 | 163 | Add a configuration template if you want to add hooks to all repositories you initialize or clone in 164 | the future. 165 | 166 | ```bash 167 | git secrets --register-aws --global 168 | ``` 169 | 170 | Scan! 171 | 172 | ```bash 173 | # Scan for secrets inside history 174 | git secrets --scan-history 175 | 176 | # Scan files and folders 177 | git secrets --scan .env 178 | git secrets --scan tests/* 179 | git secrets --scan -r deployment/ 180 | ``` 181 | 182 | For more examples and advanced usage refer to their repository. 183 | 184 | ## Trufflehog | Find, verify, and analyze leaked credentials 185 | 186 | [GitHub](https://github.com/trufflesecurity/trufflehog) | [Action](https://github.com/marketplace/actions/trufflehog-oss) 187 | 188 | **Secrets | Analysis | Various** 189 | 190 | To start with the wizard you can run `sudo trufflehog` and follow the steps! But if you want a 191 | specific command, you can run each of them manually. Check `--help` to see them all. You can scan 192 | from git, to s3/gcs buckets; docker images, CIs, and even your filesystem. 193 | 194 | ```bash 195 | trufflehog --no-update git https://github.com/trufflesecurity/test_keys --only-verified 196 | ``` 197 | 198 | ## Gitleaks | Detect and prevent secrets in git repos 199 | 200 | [GitHub](https://github.com/gitleaks/gitleaks) | [Action](https://github.com/gitleaks/gitleaks-action) 201 | 202 | **Secrets | git** 203 | 204 | Gitleaks is a SAST tool for detecting and preventing hardcoded secrets like passwords, API keys, and 205 | tokens in git repos. Gitleaks is an easy-to-use, all-in-one solution for detecting secrets, past or 206 | present, in your code. 207 | 208 | ```bash 209 | ➜ ~/code(master) gitleaks git -v 210 | 211 | ○ 212 | │╲ 213 | │ ○ 214 | ○ ░ 215 | ░ gitleaks 216 | 217 | 218 | Finding: "export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef", 219 | Secret: cafebabe:deadbeef 220 | RuleID: sidekiq-secret 221 | Entropy: 2.609850 222 | File: cmd/generate/config/rules/sidekiq.go 223 | Line: 23 224 | Commit: cd5226711335c68be1e720b318b7bc3135a30eb2 225 | Author: John 226 | Email: john@users.noreply.github.com 227 | Date: 2022-08-03T12:31:40Z 228 | Fingerprint: cd5226711335c68be1e720b318b7bc3135a30eb2:cmd/generate/config/rules/sidekiq.go:sidekiq-secret:23 229 | ``` 230 | 231 | ## 2ms | Identify secrets across an entire org 232 | 233 | [GitHub](https://github.com/Checkmarx/2ms) | [Action](https://github.com/Checkmarx/2ms/blob/master/.github/workflows/release.yml) | [Azure](https://learn.microsoft.com/en-us/azure/devops/pipelines/create-first-pipeline) 234 | 235 | **Secrets | Various** 236 | 237 | Too many secrets (2ms) is an open source CLI tool, powered by Checkmarx, that enables you to 238 | identify sensitive data such as secrets, authentication keys and passwords that are stored in your 239 | system in unencrypted text. This tool supports scanning of internal communication platforms (Slack, 240 | Discord), content management (Confluence, Paligo) and source code storage locations (Git repo, local 241 | directory). This application is written in Go language and is based on the framework provided by 242 | gitleaks. 243 | 244 | The tool checks the content using a series of rules that are designed to identify a wide range of 245 | sensitive items such as AWS access token, Bitbucket Client ID, GitHub PAT etc. For a complete list 246 | of rules, see their docs. 247 | 248 | ```bash 249 | # Scan a local repo 250 | 2ms git . 251 | 252 | # Scan filesystem's current path 253 | 2ms filesystem --path . 254 | ``` 255 | 256 | [How to get a Discord token.](https://www.geeksforgeeks.org/how-to-get-discord-token/) 257 | 258 | ```bash 259 | # Scan a Discord server 260 | 2ms discord --token --server 1097814317077897307 --duration 9999h 261 | ``` 262 | 263 | For more examples refer to their official documentation. 264 | 265 | ## detect-secrets | Detect and prevent secrets in a codebase 266 | 267 | [GitHub](https://github.com/Yelp/detect-secrets) 268 | 269 | **Secrets | Various** 270 | 271 | detect-secrets is an aptly named module for (surprise, surprise) detecting secrets within a code 272 | base. 273 | 274 | However, unlike other similar packages that solely focus on finding secrets, this package is 275 | designed with the enterprise client in mind: providing a backwards compatible, systematic means of: 276 | 277 | - Preventing new secrets from entering the code base, 278 | - Detecting if such preventions are explicitly bypassed, and 279 | - Providing a checklist of secrets to roll, and migrate off to a more secure storage. 280 | 281 | ```bash 282 | # Create a baseline of potential secrets currently found in your git repository. 283 | detect-secrets scan > .secrets.baseline 284 | # or, to run it from a different directory: 285 | detect-secrets -C /path/to/directory scan > /path/to/directory/.secrets.baseline 286 | 287 | # Scanning non-git tracked files: 288 | detect-secrets scan test_data/ --all-files > .secrets.baseline 289 | 290 | # Scanning Staged Files Only 291 | git diff --staged --name-only -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline 292 | # Scanning All Tracked Files 293 | git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline 294 | 295 | # Viewing All Enabled Plugins: 296 | detect-secrets scan --list-all-plugins 297 | ``` 298 | 299 | For more examples and advanced usage refer to their official documentation. 300 | 301 | ## Trivy | Very complete tool, misconfigs, vulns and more 302 | 303 | [GitHub](https://github.com/aquasecurity/trivy) | [Action](https://github.com/aquasecurity/trivy-action) | [VSCode](https://github.com/aquasecurity/trivy-vscode-extension) 304 | 305 | **Scanner | Vulns | Secrets | Misconfigs | Code | IaC** 306 | 307 | Find vulnerabilities, misconfigurations, secrets, SBOM in containers, Kubernetes, code repositories, 308 | clouds, and more. 309 | 310 | General usage: 311 | 312 | ```bash 313 | trivy [--scanners ] 314 | ``` 315 | 316 | ```bash 317 | trivy image python:3.4-alpine 318 | trivy fs --scanners vuln,secret,misconfig myproject/ 319 | trivy k8s --report summary cluster 320 | ``` 321 | 322 | ## Clair (WIP - NOT WORKING) | Scan containers! 323 | 324 | [GitHub](https://github.com/quay/clair) 325 | 326 | **Static Analysis | Containers** 327 | 328 | > The documentation for clair is currently incomplete. The provided config.yaml file from their 329 | > documentation side is not longer being accepted by clair and requires manual modification. 330 | 331 | Clair is an open source project for the static analysis of vulnerabilities in application containers 332 | (currently including OCI and docker). 333 | 334 | Since clair scans containers, it is best to run it separately. 335 | 336 | ```bash 337 | docker pull quay.io/projectquay/clair:4.7.4 338 | # Alternatively you can try and pull latest but it did not work out for me 339 | docker pull quay.io/projectquay/clair 340 | ``` 341 | 342 | ```bash 343 | # Download sample config 344 | wget -q https://github.com/quay/clair/blob/main/config.yaml.sample \ 345 | --output-document /tmp/clair/config.yaml 346 | # Run clair 347 | docker run -p 6060:6060 -p 6061:6061 \ 348 | -v /tmp/clair:/clair/config \ 349 | quay.io/projectquay/clair:4.7.4 -conf /clair/config/config.yaml -mode combo 350 | ``` 351 | 352 | One liner if you don't want a container: 353 | 354 | ```bash 355 | sudo wget -qO /usr/local/bin/clair https://github.com/quay/clair/releases/download/v4.7.4/clairctl-linux-$(dpkg --print-architecture) \ 356 | && sudo chmod +x /usr/local/bin/clair 357 | ``` 358 | 359 | ## Snyk | Scan and monitor containers, cloud 360 | 361 | [GitHub](https://github.com/snyk/cli) | [Website](https://snyk.io/) | [Action](https://github.com/marketplace/actions/snyk) 362 | 363 | **Monitor | Scanner | Cloud | IaC** 364 | 365 | A developer-first, cloud-native security tool to scan and monitor your software development projects 366 | for security vulnerabilities. Snyk scans multiple content types for security issues: 367 | 368 | - [**Snyk Open Source**](https://docs.snyk.io/scan-using-snyk/snyk-open-source): Find and 369 | automatically fix open-source vulnerabilities 370 | - [**Snyk Code**](https://docs.snyk.io/scan-using-snyk/snyk-code): Find and fix vulnerabilities in 371 | your application code in real time 372 | - [**Snyk Container**](https://docs.snyk.io/scan-using-snyk/snyk-container): Find and fix 373 | vulnerabilities in container images and Kubernetes applications 374 | - [**Snyk Infrastructure as Code**](https://docs.snyk.io/scan-using-snyk/scan-infrastructure): Find 375 | and fix insecure configurations in Terraform and Kubernetes code 376 | 377 | Before you start using it, you need to authenticate. A free tier would do just fine for now: 378 | 379 | ```bash 380 | snyk auth --auth-type=token # literally this command, it will provide a link from where to auth. 381 | ``` 382 | 383 | By simply running `snyk` you'll see the available commands with their description. Some of them are 384 | `test`, `container`, `iac`, `code`, `log4shell`, and `monitor` among others. 385 | 386 | You can scan a container for vulnerabilities with this simple command for example: 387 | 388 | ```bash 389 | snyk container test vulnerables/web-dvwa 390 | ``` 391 | 392 | And you can even monitor them through their website. For more information, refer to the 393 | documentation and examples for each command. 394 | 395 | ## Grype | Vuln scanner for images and fs 396 | 397 | [GitHub](https://github.com/anchore/grype/) 398 | 399 | **Scanner | Containers | Fs** 400 | 401 | A vulnerability scanner for container images and filesystems. Easily install the binary to try it 402 | out. Works with Syft, the powerful SBOM (software bill of materials) tool for container images and 403 | filesystems. 404 | 405 | Supports the following image sources: 406 | 407 | ```bash 408 | grype yourrepo/yourimage:tag defaults to using images from a Docker daemon 409 | grype path/to/yourproject a Docker tar, OCI tar, OCI directory, SIF container, or generic filesystem directory 410 | ``` 411 | 412 | You can also explicitly specify the scheme to use: 413 | 414 | ```bash 415 | grype podman:yourrepo/yourimage:tag explicitly use the Podman daemon 416 | grype docker:yourrepo/yourimage:tag explicitly use the Docker daemon 417 | grype docker-archive:path/to/yourimage.tar use a tarball from disk for archives created from "docker save" 418 | grype oci-archive:path/to/yourimage.tar use a tarball from disk for OCI archives (from Podman or otherwise) 419 | grype oci-dir:path/to/yourimage read directly from a path on disk for OCI layout directories (from Skopeo or otherwise) 420 | grype singularity:path/to/yourimage.sif read directly from a Singularity Image Format (SIF) container on disk 421 | grype dir:path/to/yourproject read directly from a path on disk (any directory) 422 | grype file:path/to/yourfile read directly from a file on disk 423 | grype sbom:path/to/syft.json read Syft JSON from path on disk 424 | grype registry:yourrepo/yourimage:tag pull image directly from a registry (no container runtime required) 425 | grype purl:path/to/purl/file read a newline separated file of purls from a path on disk 426 | ``` 427 | 428 | ## Falco (WIP) | Cloud native runtime tool for Linux OS 429 | 430 | [GitHub](https://github.com/falcosecurity/falco) | [Website](https://falco.org/) 431 | 432 | **Monitor | Monitor | Containers** 433 | 434 | Falco is a cloud native runtime security tool for Linux operating systems. It is designed to detect 435 | and alert on abnormal behavior and potential security threats in real-time. 436 | 437 | At its core, Falco is a kernel monitoring and detection agent that observes events, such as 438 | syscalls, based on custom rules. Falco can enhance these events by integrating metadata from the 439 | container runtime and Kubernetes. The collected events can be analyzed off-host in SIEM or data lake 440 | systems. 441 | 442 | Falco comes with its own container. Current instructions on how to run it are incomplete. Come back 443 | later dear wanderer! 444 | 445 | ## Semgrep | Static analyzer for almost anything 446 | 447 | [GitHub](https://github.com/semgrep/semgrep) | [Website](https://semgrep.dev) 448 | 449 | **Static Analysis | General purpose** 450 | 451 | Powerful, customizable lightweight static analysis for many languages. 452 | 453 | 1. Run `semgrep login` to create your account and login to Semgrep. 454 | Logging into Semgrep gets you access to: 455 | - [Semgrep Supply Chain](https://semgrep.dev/products/semgrep-supply-chain): A dependency scanner 456 | that detects reachable vulnerabilities in third party libraries 457 | - [Semgrep Code's Pro rules](https://semgrep.dev/products/semgrep-code): 600+ high confidence rules 458 | written by Semgrep's security research team 459 | - [Semgrep Code's Pro engine](https://semgrep.dev/products/pro-engine/): An advanced code analysis 460 | engine, designed to detect complex vulnerabilities, and reduce false positives 461 | 1. Go to your app's root directory and run `semgrep ci`. This will scan your project to check for 462 | vulnerabilities in your source code and its dependencies. 463 | 1. Try writing your own query interactively with `-e`. For example, a check for Python == where the 464 | left and right hand sides are the same (potentially a bug): `semgrep -e '$X == $X' --lang=py 465 | path/to/src` 466 | 467 | **Semgrep has an entire ecosystem** which consists of the following: **Code, Supply Chain, Secrets, 468 | Appsec Platform, OSS Engine**. They provide a range of resources for you to run analysis and scan 469 | from vulnerabilities to secrets. 470 | 471 | To learn more about Semgrep, visit: 472 | 473 | - [Semgrep Playground](https://semgrep.dev/editor) - An online interactive tool for writing and 474 | sharing rules. 475 | - [Semgrep Registry](https://semgrep.dev/explore) - 2,000+ community-driven rules covering security, 476 | correctness, and dependency vulnerabilities. 477 | 478 | **Rulesets** for you to run more specific actions: 479 | 480 | ```bash 481 | # Security checks for docker-compose configuration files. 482 | semgrep --config "p/docker-compose" 483 | # Selected rules from Hadolint, a Dockerfile linter, rewritten in Semgrep 484 | semgrep --config "p/dockerfile" 485 | # Security checks for kubernetes configuration files. 486 | semgrep --config "p/kubernetes" 487 | # Universal linter to identify vulnerabilities in your code base with the flawfinder rule pack 488 | semgrep --config "p/flawfinder" 489 | ``` 490 | 491 | ## sast-scan (WIP) | Static analysis with many many tools 492 | 493 | [GitHub](https://github.com/marksarka/sast-scan) 494 | 495 | **Static Analysis | General purpose** 496 | 497 | Scan is a free & Open Source DevSecOps tool for performing static analysis based security testing of 498 | your applications and its dependencies. CI and Git friendly. 499 | 500 | Right now we're planning to incorporate it to the repo, but on its own container given the amount of 501 | tools that it brings inside, some of them even overlap. If you want to do it by yourself you can 502 | just follow their documentation running `sh <(curl https://slscan.sh)`, or simply: 503 | 504 | ```bash 505 | docker pull shiftleft/scan-slim:latest 506 | docker save -o scanslim.tar shiftleft/scan-slim:latest 507 | # podman save --format oci-archive -o scanslim.tar shiftleft/scan-slim:latest 508 | docker run --rm -e "WORKSPACE=${PWD}" -v $PWD:/app shiftleft/scan scan --src /app/scanslim.tar -o /app/reports --type docker 509 | ``` 510 | 511 | ## Legitify | GitHub and GitLab misconfiguration checker 512 | 513 | [GitHub](https://github.com/Legit-Labs/legitify) | [Action](https://github.com/marketplace/actions/legitify-analyze) 514 | 515 | **Scanner | Misconfig | GitHub | GitLab** 516 | 517 | Detect and remediate misconfigurations and security risks across all your GitHub and GitLab assets. 518 | 519 | ```bash 520 | legitify -t analyze 521 | SCM_TOKEN= legitify analyze 522 | ``` 523 | 524 | 1. To get the most out of legitify, you need to be an owner of at least one GitHub organization. 525 | Otherwise, you can still use the tool if you're an admin of at least one repository inside an 526 | organization, in which case you'll be able to see only repository-related policies results. 527 | 2. legitify requires a GitHub personal access token (PAT) to analyze your resources successfully, 528 | which can be either provided as an argument (`-t`) or as an environment variable (`SCM_TOKEN`). 529 | The PAT needs the following scopes for full analysis: 530 | 531 | ```plain 532 | admin:org, read:enterprise, admin:org_hook, read:org, repo, read:repo_hook 533 | ``` 534 | 535 | See [Creating a Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) for more information. 536 | Fine-grained personal access tokens are currently not supported. 537 | 538 | By default, legitify will check the policies against all your resources (organizations, 539 | repositories, members, actions). Archived repositories are skipped. 540 | 541 | You can control which resources will be analyzed with command-line flags namespace and org: 542 | 543 | - `--namespace (-n)`: will analyze policies that relate to the specified resources 544 | - `--org`: will limit the analysis to the specified GitHub organizations or GitLab group, excluding 545 | archived repositories 546 | - `--repo`: will limit the analysis to the specified GitHub repositories or GitLab projects 547 | - `--scm`: specify the source code management platform. Possible values are: `github` or `gitlab`. 548 | Defaults to `github`. Please note: when running on GitLab, `--scm gitlab` is required. 549 | - `--enterprise`: will specify which enterprises should be analyzed. Please note: in order to 550 | analyze an enterprise, an enterprise slug must be provided. 551 | 552 | ```bash 553 | legitify -t analyze --org org1,org2 --namespace organization,member 554 | ``` 555 | 556 | The above command will test organization and member policies against org1 and org2. 557 | 558 | ## KICS | IaC general purpose scanner 559 | 560 | [GitHub](https://github.com/Checkmarx/kics) | [Action](https://github.com/marketplace/actions/kics-github-action) | [Scan](https://kics.checkmarx.net/) 561 | 562 | **Scanner | Vulns | Misconfig | IaC** 563 | 564 | Find security vulnerabilities, compliance issues, and infrastructure misconfigurations early in the 565 | development cycle of your infrastructure-as-code. 566 | 567 | Using kics out of the box is really easy, just run: 568 | 569 | ```bash 570 | kics scan -p path/to/project 571 | ``` 572 | 573 | To obtain the best out of this tool, you should go on reading: 574 | 575 | - [Understand how to configure KICS](configuration-file.md) so you can have a better KICS 576 | experience. 577 | - [Explore KICS commands](commands.md) to see what you can do with KICS. 578 | - [Explore supported platforms](platforms.md) to see which files you can scan with KICS. 579 | - [Explore the queries internals](queries.md) for better understanding how KICS works. 580 | - [Create a new query](creating-queries.md) to learn how to create your own custom queries. 581 | - [Explore the output results format](results.md) and quickly fix the issues detected. 582 | - [Contribute](CONTRIBUTING.md) if you want to go the extra mile. 583 | 584 | ## Checkov | Static code analyzer and SCA tool for images and OSS 585 | 586 | [GitHub](https://github.com/bridgecrewio/checkov) 587 | 588 | **Static Analysis | IaC | SCA** 589 | 590 | **Checkov** is a static code analysis tool for infrastructure as code (IaC) and also a software 591 | composition analysis (SCA) tool for images and open source packages. 592 | 593 | It scans cloud infrastructure provisioned using [Terraform](https://terraform.io/), [Terraform plan](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Terraform%20Plan%20Scanning.md), [Cloudformation](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Cloudformation.md), 594 | [AWS SAM](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/AWS%20SAM.md), [Kubernetes](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Kubernetes.md), [Helm charts](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Helm.md), [Kustomize](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Kustomize.md), [Dockerfile](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Dockerfile.md), [Serverless](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Serverless%20Framework.md), [Bicep](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Bicep.md), [OpenAPI](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/OpenAPI.md), 595 | [ARM Templates](https://github.com/bridgecrewio/checkov/blob/main/docs/7.Scan%20Examples/Azure%20ARM%20templates.md), or [OpenTofu](https://opentofu.org/) and detects security and compliance misconfigurations using 596 | graph-based scanning. 597 | 598 | It performs [Software Composition Analysis (SCA) scanning](docs/7.Scan%20Examples/Sca.md) which is a scan of open source 599 | packages and images for Common Vulnerabilities and Exposures (CVEs). 600 | 601 | **Configure** an input folder or file: 602 | 603 | ```sh 604 | checkov --directory /user/path/to/iac/code 605 | ``` 606 | 607 | Or a **specific file** or files 608 | 609 | ```sh 610 | checkov --file /user/tf/example.tf 611 | # or 612 | checkov -f /user/cloudformation/example1.yml -f /user/cloudformation/example2.yml 613 | ``` 614 | 615 | For more examples take a look at the official repository. 616 | 617 | ## ScoutSuite | Multi-cloud security-auditing tool 618 | 619 | [GitHub](https://github.com/nccgroup/ScoutSuite) 620 | 621 | **Scanner | Risk Analysis | Cloud | Various** 622 | 623 | Scout Suite is an open source multi-cloud security-auditing tool, which enables security posture 624 | assessment of cloud environments. Using the APIs exposed by cloud providers, Scout Suite gathers 625 | configuration data for manual inspection and highlights risk areas. Rather than going through dozens 626 | of pages on the web consoles, Scout Suite presents a clear view of the attack surface automatically. 627 | 628 | Scout Suite was designed by security consultants/auditors. It is meant to provide a point-in-time 629 | security-oriented view of the cloud account it was run in. Once the data has been gathered, all 630 | usage may be performed offline. 631 | 632 | As such, it needs access to each one of the cloud services you want it to take a look. 633 | 634 | Usage is really simple once you´ve configured credentials. Refer to the section of the wiki 635 | [Configuration and usage](https://github.com/nccgroup/ScoutSuite/wiki). 636 | 637 | The command `scout` will return the list of available providers, although **Digital Ocean**` wasn't 638 | available to me when I last tried. Nevertheless, you can ask for help using `scout PROVIDER --help` 639 | 640 | ```bash 641 | scout aws --help 642 | # returns help on aws 643 | scout aws --profile PROFILE 644 | scout gcp --user-account 645 | scout azure --cli 646 | ``` 647 | 648 | ## Cloudsplaining | AWS IAM assessment tool 649 | 650 | [GitHub](https://github.com/salesforce/cloudsplaining) | [ReadTheDocs](https://cloudsplaining.readthedocs.io/en/latest/) 651 | 652 | **Scanner | AWS IAM** 653 | 654 | Cloudsplaining is an AWS IAM Security Assessment tool that identifies violations of least privilege 655 | and generates a risk-prioritized HTML report. 656 | 657 | You must have AWS credentials configured that can be used by the CLI. 658 | 659 | You must have the privileges to run `iam:GetAccountAuthorizationDetails`. The 660 | `arn:aws:iam::aws:policy/SecurityAudit` policy includes this, as do many others that allow Read 661 | access to the IAM Service. 662 | 663 | To download the account authorization details, ensure you are authenticated to AWS, then run 664 | cloudsplaining's download command: 665 | 666 | ```bash 667 | cloudsplaining download 668 | ``` 669 | 670 | If you prefer to use your `~/.aws/credentials` file instead of environment variables, you can 671 | specify the profile name: 672 | 673 | ```bash 674 | cloudsplaining download --profile myprofile 675 | ``` 676 | 677 | It will download a JSON file in your current directory that contains your account authorization 678 | detail information. 679 | 680 | Now that we've downloaded the account authorization file, we can scan all of the AWS IAM policies. 681 | 682 | ```bash 683 | cloudsplaining scan --exclusions-file exclusions.yml --input-file examples/files/example.json --output examples/files/ 684 | ``` 685 | 686 | ## Hadolint | Scans Dockerfiles for good linting practices 687 | 688 | [GitHub](https://github.com/hadolint/hadolint) | [Scan](https://hadolint.github.io/hadolint) 689 | 690 | **Static | Analysis | Dockerfile** 691 | 692 | A smarter Dockerfile linter that helps you build [best practice](https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices) Docker images. The linter parses 693 | the Dockerfile into an AST and performs rules on top of the AST. It stands on the shoulders of 694 | [ShellCheck](https://github.com/koalaman/shellcheck) to lint the Bash code inside `RUN` instructions. 695 | 696 | Usage is very simple, to quickstart you can just feed a Dockerfile to it! 697 | 698 | ```bash 699 | wanderer@trg $ hadolint Dockerfile 700 | Dockerfile:5 DL3009 info: Delete the apt-get lists after installing something 701 | Dockerfile:8 DL3015 info: Avoid additional packages by specifying `--no-install-recommends` 702 | Dockerfile:8 DL3008 warning: Pin versions in apt get install. Instead of `apt-get install ` use `apt-get install =` 703 | Dockerfile:8 DL3059 info: Multiple consecutive `RUN` instructions. Consider consolidation. 704 | ``` 705 | 706 | ## Dockle | Container image linter for security 707 | 708 | [GitHub](https://github.com/goodwithtech/dockle) | [Website](https://containers.goodwith.tech/) | [Action](https://github.com/goodwithtech/dockle-action) 709 | Docker | Linter 710 | 711 | 1. Build [Best Practice](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/) Docker images 712 | 2. Build secure Docker images 713 | Checkpoints includes [CIS Benchmarks](https://www.cisecurity.org/cis-benchmarks/) 714 | 715 | ```bash 716 | # Scan an image name (and a tag) 717 | dockle goodwithtech/test-image:v1 718 | 719 | # Scan an image file 720 | docker save alpine:latest -o alpine.tar 721 | dockle --input alpine.tar 722 | ``` 723 | 724 | For more examples go to their [docs](https://github.com/goodwithtech/dockle?tab=readme-ov-file#common-examples). 725 | 726 | ## DependencyCheck | Checks for public vulns on dependencies 727 | 728 | [GitHub](https://github.com/jeremylong/DependencyCheck) 729 | 730 | **Scanner | Dependencies | Generic** 731 | 732 | OWASP dependency-check is a software composition analysis utility that detects publicly disclosed 733 | vulnerabilities in application dependencies. Requires access to several externally hosted resources. 734 | For more information. 735 | 736 | In order to analyze some technology stacks dependency-check may require other development tools to 737 | be installed. Some of the analysis listed below may be experimental and require the experimental 738 | analyzers to be enabled. 739 | 740 | 1. To analyze .NET Assemblies the dotnet 8 run time or SDK must be installed. Assemblies targeting 741 | other run times can be analyzed - but 8 is required to run the analysis. 742 | 1. If analyzing GoLang projects `go` must be installed. 743 | 1. The analysis of `Elixir` projects requires `mix_audit`. 744 | 1. The analysis of `npm`, `pnpm`, and `yarn` projects requires `npm`, `pnpm`, or `yarn` to be 745 | installed. The analysis performed utilize the respective `audit` feature of each. It also uses 746 | RetireJS. 747 | 1. The analysis of Ruby is a wrapper around `bundle-audit`, which must be installed. 748 | 749 | Currently, only analysis 2 and 4 have been prioritized given the nature of the repo. 750 | 751 | ```bash 752 | dependency-check --out . --scan [path to files to be scanned] 753 | ``` 754 | 755 | The documentation suggests you obtain an NVD API Key from [NIST](https://nvd.nist.gov/developers/request-an-api-key) in order to download the database faster. 756 | You can do that if you want, otherwise you'll have to wait a little. 757 | 758 | ## nodejsscan | NodeJS application code scanner 759 | 760 | [GitHub](https://github.com/ajinabraham/NodeJsScan) 761 | 762 | **Static Analyzer | NodeJS | Semgrep** 763 | 764 | Static security code scanner (SAST) for Node.js applications powered by [libsast](https://github.com/ajinabraham/libsast) and [semgrep](https://github.com/returntocorp/semgrep). 765 | 766 | ```bash 767 | nodejssscan -d path/to/nodejs/project/ 768 | ``` 769 | 770 | ## Lavamoat | JS framework to prevent supply-chain attacks 771 | 772 | [GitHub](https://github.com/LavaMoat/lavamoat) 773 | 774 | **Framework | Dependency | NodeJS** 775 | 776 | Tools for sandboxing your dependency graph. This tool works differently, it is not something that 777 | works "right" out of the box as you would say, not at least compared to the rest of the tools here. 778 | But it is very effective on preventing supply chain attacks. 779 | 780 | There's really no quick start. You need to install it on your project directly. 781 | 782 | ```bash 783 | pnpm install --save-dev lavamoat 784 | ``` 785 | 786 | Initialize it. 787 | 788 | ```bash 789 | npx lavamoat init 790 | ``` 791 | 792 | This generates a `lavamoat/node/policy.json` file, which will contain permissions for each 793 | dependency. You can edit it, and set permissions for each package according to your security needs. 794 | For example: 795 | 796 | ```json 797 | { 798 | "resources": { 799 | "lodash": { 800 | "globals": { 801 | "process": true, 802 | "console": true 803 | }, 804 | "packages": { 805 | "fs": false 806 | } 807 | } 808 | } 809 | } 810 | ``` 811 | 812 | In this example, `lodash` is allowed to use `process` and `console` but not `fs` (file system). 813 | 814 | Run your project with LavaMoat, which will enforce policies at runtime: 815 | 816 | ```bash 817 | npx lavamoat node index.js # index.js or whatever is your entry-point. 818 | ``` 819 | 820 | When you add new dependencies or update existing ones, to regenerate the policy file with any new 821 | dependencies run: 822 | 823 | ```bash 824 | npx lavamoat update 825 | ``` 826 | 827 | Add a script in `package.json` to simplify running LavaMoat: 828 | 829 | ```json 830 | "scripts": { 831 | "start": "lavamoat node index.js", 832 | "lavamoat-init": "lavamoat init", 833 | "lavamoat-update": "lavamoat update" 834 | } 835 | 836 | ``` 837 | 838 | Now you can use `npm run start` to run LavaMoat instead of the full command. 839 | 840 | Check the LavaMoat documentation for more advanced options, such as: 841 | 842 | - Defining custom modules 843 | - Setting up LavaMoat for browser environments 844 | - Using sandboxed environments 845 | 846 | ## NodeJS specific tools 847 | 848 | ### retirejs | Vuln scanner for JS 849 | 850 | [GitHub](https://github.com/RetireJS/retire.js) | [Website](https://retirejs.github.io/retire.js/) 851 | 852 | **Scanner | Vulns | JavaScript** 853 | 854 | Scanner detecting the use of JavaScript libraries with known vulnerabilities. 855 | 856 | Just run `retire` inside any project. 857 | 858 | ### installed-check | Verifies modules are in tune with reqs in package.json 859 | 860 | [npm](https://www.npmjs.com/package/installed-check) | [GitHub](https://github.com/voxpelli/node-installed-check) 861 | 862 | **Checker | Modules | NodeJS** 863 | 864 | Verifies that installed modules comply with the requirements specified in package.json. 865 | 866 | By default checks engine ranges, peer dependency ranges and installed versions and, in mono-repos 867 | using workspaces, by default checks all workspaces as well as the workspace root. 868 | 869 | Just run it inside your repo and that's it. Here's an example using 870 | 871 | ```bash 872 | # Ignore dev deps, treat warning as errors, and try to fix writing to disk. 873 | installed-check --ignore-dev --strict --fix 874 | # equivalent to 875 | installed-check -d -s --fix 876 | ``` 877 | 878 | For more uses, check out the official documentation. 879 | 880 | ### better-npm-audit | npm audit++ 881 | 882 | [npm](https://www.npmjs.com/package/better-npm-audit) | [GitHub](https://github.com/jeemok/better-npm-audit) 883 | 884 | **Checker | NodeJS** 885 | 886 | Provides additional features on top of the existing npm audit options. 887 | 888 | Just run the command as you'd run `npm audit` inside a project. 889 | 890 | ```bash 891 | better-npm-audit audit 892 | ``` 893 | 894 | Better yet, replace audit with this script so you can run it with `npm audit` instead: 895 | 896 | ```json 897 | "scripts": { 898 | "prepush": "npm run test && npm run audit", 899 | "audit": "better-npm-audit audit" 900 | } 901 | ``` 902 | 903 | ### eslint-plugin-security | JS plugin to identify potential hotspots 904 | 905 | [npm](https://www.npmjs.com/package/eslint-plugin-security) | [GitHub](https://github.com/eslint-community/eslint-plugin-security) 906 | 907 | **Plugin | NodeJS** 908 | 909 | ESLint rules for Node Security. This project will help identify potential security hotspots, but 910 | finds a lot of false positives which need triage by a human. It also has TypeScript suppport. 911 | 912 | ```bash 913 | npm install --save-dev eslint-plugin-security 914 | # or 915 | yarn add --dev eslint-plugin-security 916 | ``` 917 | 918 | **Flat config** (requires eslint >= v8.23.0) Add the following to your eslint.config.js file: 919 | 920 | ```js 921 | const pluginSecurity = require('eslint-plugin-security'); 922 | 923 | module.exports = [pluginSecurity.configs.recommended]; 924 | 925 | ``` 926 | 927 | For **eslintrc** config, and more, checkout their github. 928 | 929 | ### eslint-plugin-no-unsanitized | ESLint rule to disallow unsafe patterns 930 | 931 | [npm](https://www.npmjs.com/package/eslint-plugin-no-unsanitized) | [GitHub](https://github.com/mozilla/eslint-plugin-no-unsanitized/issues) 932 | 933 | **Plugin | NodeJS** 934 | 935 | Custom ESLint rule to disallow unsafe innerHTML, outerHTML, insertAdjacentHTML and alike. 936 | 937 | ```bash 938 | yarn add -D eslint-plugin-no-unsanitized 939 | npm install --save-dev eslint-plugin-no-unsanitized 940 | ``` 941 | 942 | For a **Flat config**: 943 | 944 | ```js 945 | import nounsanitized from "eslint-plugin-no-unsanitized"; 946 | 947 | export default config = [nounsanitized.configs.recommended]; 948 | ``` 949 | 950 | For **eslintrc** config, and rules selection, checkout their github. 951 | 952 | ### eslint-plugin-no-secrets | ESLint plugin to find secrets 953 | 954 | [npm](https://www.npmjs.com/package/eslint-plugin-no-secrets) | [GitHub](https://github.com/nickdeis/eslint-plugin-no-secrets) 955 | 956 | **Plugin | NodeJS** 957 | 958 | An ESLint plugin to find strings that might be secrets/credentials. 959 | 960 | **Flat config** below: 961 | 962 | ```js 963 | import noSecrets from "eslint-plugin-no-secrets"; 964 | 965 | export default [ 966 | { 967 | files: ["**/*.js"], 968 | plugins: { 969 | "no-secrets": noSecrets, 970 | }, 971 | rules: { 972 | "no-secrets/no-secrets": "error", 973 | }, 974 | }, 975 | ]; 976 | ``` 977 | 978 | Refer to the documentation to decrease the tolerance for entropy, and add patterns to check for 979 | certain token formats. 980 | 981 | ### node-version-audit | Checks node's version for CVEs 982 | 983 | [npm](https://www.npmjs.com/package/node-version-audit) | [GitHub](https://github.com/lightswitch05/node-version-audit) 984 | 985 | **Checker | NodeJS** 986 | 987 | Node Version Audit is a tool to check Node.js version against a regularly updated list of CVE 988 | exploits, new releases, and end of life dates. 989 | 990 | ```bash 991 | node-version-audit --fail-security 992 | ``` 993 | 994 | Check their documentation for more options, or just append `--help`. 995 | 996 | ### yarn-audit-fix | Fixes issues found while yarn auditing 997 | 998 | [npm](https://www.npmjs.com/package/yarn-audit-fix) | [GitHub](https://github.com/antongolub/yarn-audit-fix) 999 | 1000 | **audit | NodeJS** 1001 | 1002 | The missing yarn audit fix. 1003 | 1004 | ```bash 1005 | yarn-audit-fix --opts 1006 | ``` 1007 | 1008 | For a better understanding of the options, checkout their repo. 1009 | 1010 | ## GitHub actions 1011 | 1012 | ### List of secure measures for your repo 1013 | 1014 | [GitHub](https://github.com/step-security/secure-repo) | [Live](https://app.stepsecurity.io/) 1015 | 1016 | **List | Measures** 1017 | 1018 | Orchestrate GitHub Actions Security. A catalog of fixes and proactive measures to harden you 1019 | repository. Creators of the harden-runner below. 1020 | 1021 | ### harden-runner | Prevent exfiltration, tampering, backdoors 1022 | 1023 | [GitHub](https://github.com/step-security/harden-runner) | [Action](https://github.com/marketplace/actions/harden-runner) 1024 | 1025 | **Runners | Multi-purpose** 1026 | 1027 | Network egress filtering and runtime security for GitHub-hosted and self-hosted runners. 1028 | 1029 | There are two main threats from compromised workflows, dependencies, and build tools in a CI/CD 1030 | environment: 1031 | 1032 | - Exfiltration of CI/CD credentials and source code 1033 | - Tampering of source code, dependencies, or artifacts during the build to inject a backdoor 1034 | 1035 | Harden-Runner monitors process, file, and network activity to: 1036 | 1037 | | | Countermeasure | Prevent Security Breach | 1038 | | --- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 1039 | | 1. | Monitor and block outbound network traffic at the DNS, HTTPS (Layer 7), and network layers (Layers 3 and 4) to prevent exfiltration of code and CI/CD credentials | To prevent the [Codecov breach](https://github.com/step-security/github-actions-goat/blob/main/docs/Vulnerabilities/ExfiltratingCICDSecrets.md) scenario | 1040 | | 2. | Detect if source code is being tampered during the build process to inject a backdoor | To detect the [XZ Utils](https://www.stepsecurity.io/blog/analysis-of-backdoored-xz-utils-build-process-with-harden-runner) and [SolarWinds incident](https://github.com/step-security/github-actions-goat/blob/main/docs/Vulnerabilities/TamperingDuringBuild.md) scenarios | 1041 | | 3. | Detect poisoned workflows and compromised dependencies that exhibit suspicious behavior | To detect [Dependency confusion](https://github.com/step-security/github-actions-goat/blob/main/docs/Vulnerabilities/ExfiltratingCICDSecrets.md#dependency-confusion-attacks) and [Malicious dependencies](https://github.com/step-security/github-actions-goat/blob/main/docs/Vulnerabilities/ExfiltratingCICDSecrets.md#compromised-dependencies) scenarios | 1042 | | 4. | Determine minimum GITHUB_TOKEN permissions by monitoring HTTPS calls to GitHub APIs | To set [minimum GITHUB_TOKEN permissions](https://www.stepsecurity.io/blog/determine-minimum-github-token-permissions-using-ebpf-with-stepsecurity-harden-runner) to reduce the impact of exfiltration | 1043 | 1044 | ### wait-for-secrets | MFA for GHA 1045 | 1046 | [GitHub](https://github.com/step-security/wait-for-secrets) | [Action](https://github.com/marketplace/actions/wait-for-secrets) 1047 | 1048 | **Access control | MFA** 1049 | 1050 | Publish from GitHub Actions using multi-factor authentication. It's like 2fa for GHA. 1051 | 1052 | ### Snyk Actions | Snyk's set of actions to check for vulns and more 1053 | 1054 | [GitHub](https://github.com/snyk/actions) | [Action](https://github.com/marketplace/actions/snyk) 1055 | 1056 | **Checker | Vulns | Code** 1057 | 1058 | A set of GitHub Action for using Snyk to check for vulnerabilities in your GitHub projects. A 1059 | different action is required depending on which language or build tool you are using. 1060 | 1061 | ### KICS Action | Static code analysis for IaC 1062 | 1063 | [Action](https://github.com/marketplace/actions/kics-github-action) 1064 | 1065 | **Static Analysis | IaC** 1066 | 1067 | Keeping Infrastructure as Code Secure (in short KICS) is a must-have for any cloud native project. 1068 | With KICS, finding security vulnerabilities, compliance issues, and infrastructure misconfigurations 1069 | happens early in the development cycle, when fixing these is straightforward and cheap. 1070 | 1071 | ### Legitify Action | Analyze your repo for misconfigs & compliance issues 1072 | 1073 | [Action](https://github.com/marketplace/actions/legitify-analyze) 1074 | 1075 | **Scan | Misconfigs | Compliance** 1076 | 1077 | Strengthen the security posture of your source-code management! Detect and remediate 1078 | misconfigurations, security and compliance issues across all your GitHub and GitLab assets with 1079 | ease. 1080 | 1081 | ### Trivy Action | Add a thorough vuln scan to your CI/CD 1082 | 1083 | [Action](https://github.com/aquasecurity/trivy-action) 1084 | 1085 | **Scan | Multi-purpose** 1086 | 1087 | Runs Trivy as GitHub action to scan your Docker container image for vulnerabilities. 1088 | 1089 | ### 2ms Action | Apply too many secrets to your workflow 1090 | 1091 | [Workflow](https://github.com/Checkmarx/2ms/blob/master/.github/workflows/release.yml) 1092 | 1093 | **Scan | Secrets** 1094 | 1095 | ### GitLeaks Action | Add gitleaks as a github action 1096 | 1097 | [Action](https://github.com/gitleaks/gitleaks-action) 1098 | 1099 | **Scan | Secrets** 1100 | 1101 | ### Trufflehog Action | Continuously scan for secrets 1102 | 1103 | [Action](https://github.com/marketplace/actions/trufflehog-oss) 1104 | 1105 | **Scan | Secrets** 1106 | 1107 | ### Dockle action | Cointinuously scan for security issues in Docker files 1108 | 1109 | [Action](https://github.com/marketplace/actions/dockle-container-image-linter) 1110 | 1111 | ## Online version of some tools 1112 | 1113 | - [Scan a repo with **KICS**](https://kics.checkmarx.net/) - upt to 4mb 1114 | - [Scan your Dockerfile with **Hadolint**](https://hadolint.github.io/hadolint) 1115 | - [Scan your GitHub Actions](https://app.stepsecurity.io/) with StepSecurity --------------------------------------------------------------------------------