├── .coveragerc ├── .dockerignore ├── .env.dev ├── .env.override.example ├── .flake8 ├── .github └── workflows │ └── docker-publish.yml ├── .gitignore ├── .readthedocs.yaml ├── CONTRIBUTORS.md ├── Dockerfile ├── LICENSE.txt ├── Makefile ├── README.md ├── app ├── __init__.py ├── config.yml ├── exceptions │ ├── __init__.py │ ├── api_errors.py │ ├── handlers.py │ └── schemas.py ├── logging.yml ├── main.py ├── metrics │ ├── __init__.py │ ├── celery.py │ ├── common.py │ ├── constants.py │ ├── db │ │ ├── __init__.py │ │ ├── base.py │ │ └── sqlalchemy.py │ ├── http_in.py │ ├── http_out.py │ ├── job.py │ └── server.py ├── routers │ ├── __init__.py │ ├── gitlab.py │ ├── healthcheck.py │ ├── metrics.py │ └── security.py ├── secbot │ ├── __init__.py │ ├── alembic.ini │ ├── config.py │ ├── db │ │ ├── __init__.py │ │ ├── alembic │ │ │ ├── __init__.py │ │ │ ├── env.py │ │ │ ├── script.py.mako │ │ │ └── versions │ │ │ │ ├── 3611bb3d9dd2_initial.py │ │ │ │ └── __init__.py │ │ └── models.py │ ├── exceptions.py │ ├── handlers.py │ ├── inputs │ │ ├── __init__.py │ │ └── gitlab │ │ │ ├── __init__.py │ │ │ ├── dependencies.py │ │ │ ├── handlers │ │ │ ├── __init__.py │ │ │ ├── defectdojo │ │ │ │ ├── __init__.py │ │ │ │ ├── api.py │ │ │ │ ├── services.py │ │ │ │ └── validator.py │ │ │ ├── gitleaks │ │ │ │ └── __init__.py │ │ │ └── slack │ │ │ │ ├── __init__.py │ │ │ │ ├── api.py │ │ │ │ └── utils.py │ │ │ ├── models.py │ │ │ ├── schemas │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── merge_request.py │ │ │ ├── output_responses.py │ │ │ ├── push.py │ │ │ └── tag.py │ │ │ ├── services.py │ │ │ └── utils.py │ ├── logger.py │ ├── schemas.py │ ├── settings.py │ └── utils.py └── settings.py ├── docker-compose.yml ├── docker-entrypoint.sh ├── docs ├── Makefile ├── conf.py ├── configuration.rst ├── getting-started.rst ├── glossary.rst ├── images │ ├── defectdojo-user.png │ ├── general-scheme.drawio │ ├── general-scheme.drawio.png │ ├── gitlab-access-token.png │ ├── gitlab-system-hooks.png │ ├── job-graph.drawio │ ├── job-graph.drawio.png │ └── slack-dashboard.png ├── index.rst ├── integration.rst └── make.bat ├── k8s ├── configmap.yml ├── deployment.yml ├── ingress.yml ├── sec-bot-tls-certs.yml └── service.yml ├── poetry.lock ├── pyproject.toml ├── static └── security-bot-logo.png └── tests ├── __init__.py ├── conftest.py ├── fixtures ├── inputs │ └── gitlab │ │ ├── merge_request_webhook.json │ │ ├── push_webhook.json │ │ └── tag_push_webhook.json ├── merge_request_hook__example_project.json ├── merge_request_hook__public_site.json ├── merge_request_hook__security_bot.json └── worker_outputs │ └── gitleaks.json ├── integration ├── __init__.py ├── notifications │ ├── __init__.py │ └── test_slack.py └── test_outputs.py ├── pytest.ini ├── test_logger.py ├── test_sentry.py ├── test_settings.py └── units ├── __init__.py ├── common.py ├── config ├── __init__.py ├── test_config_v1_parser.py └── test_secbot_config_version.py ├── factories.py ├── inputs ├── __init__.py ├── config │ ├── __init__.py │ ├── test_config_utils.py │ └── test_parse_jobs.py ├── conftest.py └── gitlab │ ├── __init__.py │ ├── handlers │ ├── __init__.py │ └── slack │ │ ├── __init__.py │ │ └── test_slack_message_generation.py │ ├── test_defectdojo_validation.py │ ├── test_gitlab_dependencies.py │ ├── test_gitlab_project_languages.py │ ├── test_gitlab_routes.py │ ├── test_gitlab_utils.py │ ├── test_merge_request_webhook_data.py │ ├── test_push_webhook_data.py │ └── test_tag_push_webhook_data.py ├── secbot ├── __init__.py ├── test_autodiscover.py └── test_register.py ├── test_pydantic_celery.py ├── test_worker_utils.py └── workflow └── test_workflow_runner.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = tests/*, app/alembic/*, **/exceptions.py 3 | 4 | [report] 5 | fail_under = 60 6 | # Regexes for lines to exclude from consideration 7 | exclude_lines = 8 | # Have to re-enable the standard pragma 9 | pragma: no cover 10 | 11 | # Don't complain about missing debug-only code: 12 | def __repr__ 13 | if self\.debug 14 | 15 | # Don't complain if tests don't hit defensive assertion code: 16 | raise AssertionError 17 | raise NotImplementedError 18 | 19 | # Don't complain if non-runnable code isn't run: 20 | if 0: 21 | if __name__ == .__main__.: 22 | 23 | # Don't complain about abstract methods, they aren't run: 24 | @(abc\.)?abstractmethod 25 | 26 | ignore_errors = True 27 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .cache 3 | .coverage 4 | .dockerignore 5 | .git 6 | .gitattributes 7 | .gitignore 8 | .gitkeep 9 | .flake8 10 | .isort.cfg 11 | .mypy_cache 12 | .pytest_cache 13 | .tox 14 | tox.ini 15 | mypy.ini 16 | tests 17 | distribution 18 | venv 19 | **/__pycache__ 20 | *.yml 21 | *.svg 22 | *.egg-info/ 23 | *.egg 24 | env/ 25 | pytest.ini 26 | README.rst 27 | README.md 28 | Dockerfile 29 | Zenvfile 30 | infrastructure 31 | data/ 32 | .env.override.example 33 | -------------------------------------------------------------------------------- /.env.dev: -------------------------------------------------------------------------------- 1 | UVICORN_RELOAD=1 2 | 3 | CELERY_BROKER_URL=redis://redis:6379/0 4 | CELERY_RESULT_BACKEND=redis://redis:6379/0 5 | 6 | SECBOT_POSTGRES_DSN=postgresql+asyncpg://secbot:foobar@db:5432/secbot 7 | 8 | GITLAB_CONFIGS=[{"host":"https://git.env.local/","webhook_secret_token":"SecretStr","auth_token":"SecretStr","prefix":"GIT_LOCAL"}] 9 | 10 | DEFECTDOJO__URL=https://defectdojo.env.local 11 | DEFECTDOJO__TOKEN=defectdojo_token 12 | DEFECTDOJO__USER=defectdojo_username 13 | DEFECTDOJO__USER_ID=10 14 | 15 | SLACK_TOKEN=token_here 16 | 17 | # Metrics settings 18 | SRE_METRIC_LABEL_TEAM=SECURITY 19 | SRE_METRIC_LABEL_SERVICE=security-bot 20 | TRACING_TAGS_HOST=security-bot.env.local 21 | TRACING_TAGS_CLUSTER=security-local 22 | -------------------------------------------------------------------------------- /.env.override.example: -------------------------------------------------------------------------------- 1 | # The .env.override.example file streamlines environment-specific settings management during development. 2 | # 3 | # To use it: 4 | # 1. Rename the file to '.env.override' for recognition by the dev environment. 5 | # 2. Update environment variables with development-specific values in the file. 6 | # 3. Rebuild the Docker image and restart it, ensuring it reads from the updated .env.override. 7 | # 8 | # e.g. 9 | # DEBUG=true 10 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 87 3 | ignore = E203,W503,E501,W293 4 | statistics = True 5 | exclude = 6 | .git, 7 | __pycache__, 8 | .cache/, 9 | .pytest_cache/, 10 | .mypy_cache/, 11 | .venv/, 12 | .run/, 13 | app/secbot/db/alembic 14 | 15 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Docker image 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | push_to_registries: 9 | name: Push Docker image 10 | runs-on: ubuntu-latest 11 | permissions: 12 | packages: write 13 | contents: read 14 | steps: 15 | - name: Check out the repo 16 | uses: actions/checkout@v3 17 | 18 | - name: Log in to Docker Hub 19 | uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a 20 | with: 21 | username: ${{ secrets.DOCKER_USERNAME }} 22 | password: ${{ secrets.DOCKER_PASSWORD }} 23 | 24 | - name: Log in to the Container registry 25 | uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 26 | with: 27 | registry: ghcr.io 28 | username: ${{ github.actor }} 29 | password: ${{ secrets.GITHUB_TOKEN }} 30 | 31 | - name: Extract metadata (tags, labels) for Docker 32 | id: meta 33 | uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 34 | with: 35 | images: | 36 | exness/security-bot 37 | ghcr.io/${{ github.repository }} 38 | 39 | - name: Build and push Docker images 40 | uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 41 | with: 42 | context: . 43 | push: true 44 | tags: ${{ steps.meta.outputs.tags }} 45 | labels: ${{ steps.meta.outputs.labels }} 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Personal override env 124 | .env.override 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | 144 | # pytype static type analyzer 145 | .pytype/ 146 | 147 | # Cython debug symbols 148 | cython_debug/ 149 | 150 | # PyCharm 151 | .idea/ 152 | .DS_Store 153 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-20.04" 5 | tools: 6 | python: "3.9" 7 | jobs: 8 | post_create_environment: 9 | - pip install --upgrade pip 10 | - pip install poetry 11 | - poetry config virtualenvs.create false 12 | post_install: 13 | - poetry install --no-root 14 | 15 | formats: 16 | - pdf 17 | - epub 18 | 19 | sphinx: 20 | configuration: docs/conf.py 21 | fail_on_warning: true 22 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | # Special thanks 2 | 3 | - [Exness](https://github.com/exness) 4 | - [Vulners](https://github.com/vulnerscom) 5 | 6 | # Contributors 7 | 8 | - [Valerio Rico](https://github.com/V-Rico) - MVP and maintenance 9 | - [Ivan Zhirov](https://github.com/izhirov) - further development and maintenance 10 | - [Maxim Sokolov](https://github.com/mcson-the-writer) - documentation 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim 2 | 3 | ARG USER_NAME="exness" 4 | ARG USER_HOME="/${USER_NAME}" 5 | ARG APP_HOME="/opt" 6 | 7 | COPY poetry.lock pyproject.toml / 8 | 9 | ### Add required binaries ### 10 | RUN apt-get update && \ 11 | apt-get install -y git curl && \ 12 | apt-get clean && \ 13 | rm -rf /var/cache/* 14 | 15 | RUN apt-get update && \ 16 | apt-get install -qy --no-install-recommends build-essential make && \ 17 | pip install --no-cache-dir --upgrade pip poetry>=1.0.0 && \ 18 | poetry config virtualenvs.create false && \ 19 | poetry install --no-interaction --no-dev && \ 20 | apt-get remove -qy --purge build-essential && \ 21 | apt-get autoremove --purge -qy && \ 22 | apt-get clean && \ 23 | rm -rf /var/cache/* /poetry.lock /pyproject.toml 24 | 25 | ### Add worker tools ### 26 | 27 | # Install gitleaks 28 | COPY --from=zricethezav/gitleaks:v8.17.0 /usr/bin/gitleaks /usr/local/bin/gitleaks 29 | 30 | ### Create service user ### 31 | RUN groupadd -g 10001 ${USER_NAME} && useradd -g 10001 -u 10001 -s "/usr/sbin/nologin" -md ${USER_HOME} ${USER_NAME} 32 | 33 | ### Add application source code ### 34 | COPY docker-entrypoint.sh /usr/local/bin 35 | COPY --chown=10001:10001 app/ ${APP_HOME}/app 36 | 37 | ENV PYTHONPATH="${APP_HOME}" 38 | 39 | USER ${USER_NAME} 40 | EXPOSE 5000 5001 41 | WORKDIR ${APP_HOME} 42 | ENTRYPOINT ["docker-entrypoint.sh"] 43 | CMD ["help"] 44 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Exness 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help 2 | 3 | CURRENT_DIR = $(shell pwd) 4 | GREEN = \033[0;32m 5 | YELLOW = \033[0;33m 6 | NC = \033[0m 7 | 8 | APP_HOST := $(or ${APP_HOST},${APP_HOST},0.0.0.0) 9 | APP_PORT := $(or ${APP_PORT},${APP_PORT},5000) 10 | PYTHONPATH := $(or ${PYTHONPATH},${PYTHONPATH},.) 11 | 12 | help: 13 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-17s\033[0m %s\n", $$1, $$2}' 14 | 15 | # ============= General use-cases ============= 16 | 17 | start-up: dependencies-up install migrate start ## Start service from zero 18 | check: linting test ## Linting python code and run tests in one command 19 | 20 | # ============= General commands ============= 21 | 22 | install: ## Install all dependencies (need poetry!) 23 | @echo "\n${GREEN}Installing project dependencies${NC}" 24 | pip install --force-reinstall poetry 25 | poetry install 26 | 27 | dependencies-up: ## Pull and start the Docker containers with dependencies in the background 28 | @echo "\n${GREEN}Start the Docker containers with dependencies${NC}" 29 | docker compose up -d 30 | 31 | 32 | dependencies-down: ## Down the Docker containers with dependencies 33 | @echo "\n${GREEN}Down the Docker containers with dependencies${NC}" 34 | docker-compose down 35 | 36 | clean: dependencies-down ## Clear temporary information, stop Docker containers 37 | @echo "\n${YELLOW}Clear cache directories${NC}" 38 | rm -rf .mypy_cache .pytest_cache .coverage 39 | poetry run pyclean . 40 | 41 | 42 | start: ## Run applications 43 | @echo "Starting test webserver..." 44 | python -m \ 45 | uvicorn app.main:app --host ${APP_HOST} --port ${APP_PORT} 46 | 47 | test-webserver: 48 | @echo "Starting test webserver..." 49 | python -m \ 50 | uvicorn app.main:app --host ${APP_HOST} --port ${APP_PORT} --reload --reload-dir=./app 51 | 52 | test: 53 | pytest ${TARGET} 54 | 55 | fmt: ## Auto formatting python code 56 | @echo "\n${GREEN}Auto formatting python code with isort${NC}" 57 | poetry run isort . || true 58 | @echo "\n${GREEN}Auto formatting python code with black${NC}" 59 | poetry run black . || true 60 | 61 | linting: flake8 isort black mypy ## Linting python code 62 | 63 | # ============= Other project specific commands ============= 64 | 65 | flake8: ## Linting python code with flake8 66 | @echo "\n${GREEN}Linting python code with flake8${NC}" 67 | poetry run flake8 . 68 | 69 | isort: ## Linting python code with isort 70 | @echo "\n${GREEN}Linting python code with isort${NC}" 71 | poetry run isort app --check 72 | 73 | black: ## Linting python code with black 74 | @echo "\n${GREEN}Linting python code with black${NC}" 75 | poetry run black --check app 76 | 77 | mypy: ## Linting python code with mypy 78 | @echo "\n${GREEN}Linting python code with mypy${NC}" 79 | poetry run mypy app --check-untyped-defs 80 | 81 | # Database commands 82 | new_revision: ## Create new revision 83 | docker compose exec app alembic -c /opt/app/secbot/alembic.ini revision --autogenerate -m "${MESSAGE}" 84 | 85 | migrate: ## Migrate the database 86 | docker compose exec app alembic -c /opt/app/secbot/alembic.ini upgrade head 87 | 88 | downgrade: ## Downgrade the database 89 | docker compose exec app alembic -c /opt/app/secbot/alembic.ini downgrade -1 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![security-bot](static/security-bot-logo.png) 2 | 3 | Security Bot - Security checks orchestration tool 4 | ======= 5 | 6 | [![python 3.9](https://img.shields.io/badge/python-3.9-blue.svg)](https://www.python.org/downloads/release/python-390) 7 | [![code style: black](https://img.shields.io/badge/code%20style-black-black.svg)](https://github.com/ambv/black) 8 | [![code style: flake8](https://img.shields.io/badge/code%20style-flake8-blue.svg)](https://github.com/PyCQA/flake8) 9 | 10 | The [Security Bot](docs/index.rst) (SecBot) service introduces an additional collection of checks to the SDLC to identify security issues in corporate assets 11 | 12 | Reach out to our [Discord server](https://discord.gg/WgNbaG3Z45) to communicate with the team more effectively 13 | 14 | [![Discord Server](https://discordapp.com/api/guilds/1113355944101957703/widget.png?style=banner3)](https://discord.gg/WgNbaG3Z45) 15 | 16 | **Technologies: [FastAPI](https://fastapi.tiangolo.com/), [Celery](https://docs.celeryq.dev/en/stable/) + Redis, [SQLAlchemy](https://www.sqlalchemy.org/) + Postgres, Pytest, and others** 17 | 18 | ## Installation and Tests 19 | 20 | #### For sample k8s manifests please refer to [/k8s](/k8s) directory 21 | 22 | Deployment: 23 | 24 | git clone path/to/project.git 25 | docker-compose up --build 26 | 27 | Service configuration: 28 | 29 | .env.dev (Default values) 30 | .env.override (Customized values) 31 | 32 | Workflow configuration: 33 | 34 | app/config.yml 35 | 36 | ## Usage and Support 37 | 38 | Documentation: 39 | 40 | * [Project documentation (rtd)](https://security-bot.readthedocs.io/en/latest/) 41 | * [Project documentation (local)](docs/index.rst) 42 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Optional 3 | 4 | from celery.app.log import TaskFormatter 5 | 6 | 7 | def format_extra(message, extra: Optional[dict] = None): 8 | """Format extra keys from record.__dict__""" 9 | if not extra: 10 | return message 11 | extra_str = ", ".join(f"{k}={str(v)[:15]}" for k, v in extra.items()) 12 | return f"{message} EXTRA: {extra_str}" 13 | 14 | 15 | class ExtraLogFormatter(logging.Formatter): 16 | def format(self, record): 17 | message = super().format(record) 18 | return format_extra(message, record.__dict__) 19 | 20 | 21 | class ExtraTaskFormatter(TaskFormatter): 22 | def format(self, record): 23 | message = super().format(record) 24 | return format_extra(message, record.__dict__) 25 | -------------------------------------------------------------------------------- /app/config.yml: -------------------------------------------------------------------------------- 1 | version: 1.0 2 | 3 | components: 4 | gitleaks: 5 | handler_name: "gitleaks" 6 | config: 7 | format: "json" 8 | defectdojo: 9 | handler_name: "defectdojo" 10 | env: 11 | url: "DEFECTDOJO__URL" 12 | secret_key: "DEFECTDOJO__TOKEN" 13 | user: "DEFECTDOJO__USER" 14 | lead_id: "DEFECTDOJO__USER_ID" 15 | slack: 16 | handler_name: "slack" 17 | config: 18 | render_limit: 10 19 | channels: 20 | - secbot-test 21 | env: 22 | token: "SLACK_TOKEN" 23 | 24 | jobs: 25 | - name: Just example of merge request test 26 | rules: 27 | gitlab: 28 | event_type: "merge_request" 29 | scans: 30 | - gitleaks 31 | outputs: 32 | - defectdojo 33 | notifications: 34 | - slack 35 | -------------------------------------------------------------------------------- /app/exceptions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/exceptions/__init__.py -------------------------------------------------------------------------------- /app/exceptions/handlers.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, List, Union 2 | 3 | from fastapi.exceptions import RequestValidationError 4 | from starlette.exceptions import HTTPException 5 | from starlette.responses import JSONResponse 6 | 7 | from app.exceptions.api_errors import APIError 8 | from app.exceptions.schemas import ValidationErrorDetail 9 | 10 | if TYPE_CHECKING: 11 | from pydantic.error_wrappers import ErrorDict 12 | 13 | 14 | async def api_error_exception_handler(_, starlette_exc: APIError): 15 | return JSONResponse(starlette_exc.as_dict(), status_code=starlette_exc.status_code) 16 | 17 | 18 | async def http_exception_handler(_, starlette_exc: HTTPException): 19 | exc_class = APIError.get_cls_by_code(starlette_exc.status_code) 20 | exc = exc_class(message=starlette_exc.detail) 21 | return JSONResponse(exc.as_dict(), status_code=exc.status_code) 22 | 23 | 24 | async def validation_exception_handler(_, starlette_exc: RequestValidationError): 25 | exc_class = APIError.get_cls_by_code(400) 26 | exc = exc_class( 27 | "VALIDATION_FAILED", 28 | details=normalize_details(starlette_exc.errors()), 29 | ) 30 | return JSONResponse(exc.as_dict(), status_code=exc.status_code) 31 | 32 | 33 | def normalize_details(details: List["ErrorDict"]) -> List[ValidationErrorDetail]: 34 | res = [] 35 | for data in details: 36 | field: Union[int, str] = ".".join( 37 | [loc for loc in data["loc"][2:] if isinstance(loc, str)] 38 | ) 39 | if not field and isinstance(data["loc"], tuple) and len(data["loc"]) >= 2: 40 | field = data["loc"][1] 41 | 42 | res.append( 43 | ValidationErrorDetail( 44 | code=data["type"].upper(), 45 | field=field, 46 | message=data["msg"], 47 | ) 48 | ) 49 | return res 50 | -------------------------------------------------------------------------------- /app/exceptions/schemas.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class ValidationErrorDetail(BaseModel): 7 | code: str 8 | field: str 9 | message: str 10 | 11 | 12 | class ValidationError(BaseModel): 13 | code: Optional[str] = None 14 | message: Optional[str] = None 15 | details: Optional[List[ValidationErrorDetail]] = None 16 | -------------------------------------------------------------------------------- /app/logging.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | disable_existing_loggers: false 3 | 4 | formatters: 5 | verbose: 6 | "()": "app.ExtraLogFormatter" 7 | format: '%(levelname)s %(asctime)s %(filename)s:%(funcName)s:%(lineno)d %(message)s' 8 | 9 | handlers: 10 | console: 11 | class: logging.StreamHandler 12 | formatter: verbose 13 | stream: ext://sys.stdout 14 | 15 | loggers: 16 | uvicorn: 17 | error: 18 | propagate: true 19 | 20 | root: 21 | level: INFO 22 | handlers: [console] 23 | propagate: no 24 | -------------------------------------------------------------------------------- /app/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/metrics/__init__.py -------------------------------------------------------------------------------- /app/metrics/celery.py: -------------------------------------------------------------------------------- 1 | import timeit 2 | 3 | from celery import signals 4 | 5 | from app.metrics.common import get_location_labels_from_env 6 | from app.metrics.job import SRE_JOB_ERRORS, SRE_JOB_EXECUTION_TIME, SRE_JOB_EXECUTIONS 7 | 8 | 9 | def prerun(task, *args, **kwargs): 10 | task._started_at = timeit.default_timer() 11 | 12 | 13 | def postrun(task, *args, **kwargs): 14 | labels = get_location_labels_from_env() 15 | labels["job_name"] = task.name 16 | execution_time = int((timeit.default_timer() - task._started_at) * 1000) 17 | SRE_JOB_EXECUTIONS.labels(**labels).inc() 18 | SRE_JOB_EXECUTION_TIME.labels(**labels).inc(execution_time) 19 | 20 | 21 | def on_error(exception, sender, *args, **kwargs): 22 | labels = get_location_labels_from_env() 23 | SRE_JOB_ERRORS.labels( 24 | **labels, job_name=sender.name, job_error=type(exception).__qualname__ 25 | ).inc() 26 | 27 | 28 | def instrument(): 29 | signals.task_prerun.connect(prerun, weak=False) 30 | signals.task_postrun.connect(postrun, weak=False) 31 | signals.task_failure.connect(on_error, weak=False) 32 | signals.task_internal_error.connect(on_error, weak=False) 33 | -------------------------------------------------------------------------------- /app/metrics/common.py: -------------------------------------------------------------------------------- 1 | import os 2 | import typing 3 | from sys import argv, executable 4 | 5 | from prometheus_client import REGISTRY, CollectorRegistry, start_http_server 6 | from prometheus_client.multiprocess import MultiProcessCollector 7 | 8 | from app.metrics.constants import PROMETHEUS_MULTIPROC_DIR 9 | 10 | 11 | class LocationLabels(typing.TypedDict): 12 | dc: str 13 | cluster: str 14 | host: str 15 | k8s_namespace: str 16 | k8s_kind: str 17 | k8s_pod: str 18 | k8s_container: str 19 | docker_container: str 20 | os_process: str 21 | os_process_cmdline: str 22 | team: str 23 | service: str 24 | 25 | 26 | location_labels = tuple(LocationLabels.__annotations__) 27 | 28 | 29 | def get_location_labels_from_env() -> LocationLabels: 30 | return { 31 | "dc": os.getenv("TRACING_TAGS_DC", ""), 32 | "cluster": os.getenv("TRACING_TAGS_CLUSTER", ""), 33 | "host": os.getenv("TRACING_TAGS_HOST", ""), 34 | "os_process": os.getenv("TRACING_TAGS_PROCESS_NAME", executable), 35 | "os_process_cmdline": " ".join(argv), 36 | "docker_container": os.getenv("TRACING_TAGS_DOCKER_CONTAINER", ""), 37 | "k8s_namespace": os.getenv("TRACING_TAGS_K8S_NAMESPACE", ""), 38 | "k8s_kind": os.getenv("TRACING_TAGS_K8S_KIND", ""), 39 | "k8s_pod": os.getenv("TRACING_TAGS_K8S_POD", ""), 40 | "k8s_container": os.getenv("TRACING_TAGS_K8S_CONTAINER", ""), 41 | "team": os.getenv("SRE_METRIC_LABEL_TEAM", ""), 42 | "service": os.getenv("SRE_METRIC_LABEL_SERVICE", ""), 43 | } 44 | 45 | 46 | def start_http_metrics_server(port: int): 47 | if ( 48 | PROMETHEUS_MULTIPROC_DIR in os.environ 49 | or PROMETHEUS_MULTIPROC_DIR.lower() in os.environ 50 | ): 51 | registry = CollectorRegistry() 52 | MultiProcessCollector(registry) 53 | else: 54 | registry = REGISTRY 55 | 56 | start_http_server(port, registry=registry) 57 | -------------------------------------------------------------------------------- /app/metrics/constants.py: -------------------------------------------------------------------------------- 1 | PROMETHEUS_MULTIPROC_DIR = "PROMETHEUS_MULTIPROC_DIR" 2 | -------------------------------------------------------------------------------- /app/metrics/db/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import ( 2 | SRE_DB_CONNECTIONS, 3 | SRE_DB_QUERY_ERRORS, 4 | SRE_DB_QUERY_TIME, 5 | SRE_DB_QUERY_TOTAL, 6 | SRE_DB_RESPONSE_RECORDS, 7 | SRE_DB_RESPONSE_SIZE, 8 | ) 9 | 10 | __all__ = [ 11 | "SRE_DB_CONNECTIONS", 12 | "SRE_DB_QUERY_ERRORS", 13 | "SRE_DB_QUERY_TIME", 14 | "SRE_DB_QUERY_TOTAL", 15 | "SRE_DB_RESPONSE_RECORDS", 16 | "SRE_DB_RESPONSE_SIZE", 17 | ] 18 | -------------------------------------------------------------------------------- /app/metrics/db/base.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from prometheus_client import Counter 4 | 5 | from app.metrics.common import ( 6 | LocationLabels, 7 | get_location_labels_from_env, 8 | location_labels, 9 | ) 10 | 11 | 12 | class ConstCommonLabels(LocationLabels): 13 | db: str 14 | db_host: str 15 | db_port: str 16 | 17 | 18 | common_labels = (*ConstCommonLabels.__annotations__, "db_query") 19 | 20 | 21 | SRE_DB_CONNECTIONS = Counter( 22 | "db_client_connections_total", 23 | "Total amount of connections", 24 | ("db", "db_host", "db_port", *location_labels), 25 | ) 26 | 27 | SRE_DB_QUERY_TOTAL = Counter("db_client_query_total", "Query counter", common_labels) 28 | 29 | SRE_DB_QUERY_TIME = Counter( 30 | "db_client_query_time_total", 31 | "Query execution time", 32 | common_labels, 33 | ) 34 | 35 | SRE_DB_RESPONSE_SIZE = Counter( 36 | "db_client_query_bytes_total", 37 | "Bytes in the response", 38 | common_labels, 39 | ) 40 | 41 | SRE_DB_RESPONSE_RECORDS = Counter( 42 | "db_client_query_records_total", 43 | "Amount of records in the response", 44 | common_labels, 45 | ) 46 | 47 | SRE_DB_QUERY_ERRORS = Counter( 48 | "db_client_query_errors_total", 49 | "Total number of errors on each query", 50 | common_labels, 51 | ) 52 | 53 | 54 | def get_const_common_labels( 55 | db: str, db_host: str, db_port: typing.Union[str, int] 56 | ) -> ConstCommonLabels: 57 | loc_labels = get_location_labels_from_env() 58 | return { 59 | "dc": loc_labels["dc"], 60 | "cluster": loc_labels["cluster"], 61 | "host": loc_labels["host"], 62 | "k8s_namespace": loc_labels["k8s_namespace"], 63 | "k8s_kind": loc_labels["k8s_kind"], 64 | "k8s_pod": loc_labels["k8s_pod"], 65 | "k8s_container": loc_labels["k8s_container"], 66 | "docker_container": loc_labels["docker_container"], 67 | "os_process": loc_labels["os_process"], 68 | "os_process_cmdline": loc_labels["os_process_cmdline"], 69 | "team": loc_labels["team"], 70 | "service": loc_labels["service"], 71 | "db": db, 72 | "db_host": db_host, 73 | "db_port": str(db_port), 74 | } 75 | -------------------------------------------------------------------------------- /app/metrics/http_in.py: -------------------------------------------------------------------------------- 1 | from prometheus_client import Counter, Gauge, Histogram 2 | 3 | from app.metrics.common import location_labels 4 | 5 | LATENCY_BUCKETS = ( 6 | 50, 7 | 100, 8 | 250, 9 | 500, 10 | 750, 11 | 1000, 12 | 2500, 13 | 5000, 14 | 7500, 15 | 10000, 16 | 25000, 17 | float("inf"), 18 | ) 19 | 20 | common_labels = ( 21 | "http_in_host", 22 | "http_in_ip", 23 | "http_in_port", 24 | "http_in_method", 25 | "http_in_url", 26 | "http_in_source_ip", 27 | "http_in_source_port", 28 | *location_labels, 29 | ) 30 | 31 | 32 | class HttpIn: 33 | _instance = None 34 | 35 | @classmethod 36 | def get_instance(cls, *args, **kwargs): 37 | if not cls._instance: 38 | cls._instance = cls(*args, **kwargs) 39 | return cls._instance 40 | 41 | def __init__(self, include_latency_histogram: bool = False): 42 | self.SRE_SERVICE_UPTIME = Gauge( 43 | "http_in_uptime", 44 | "Seconds since the HTTP listener has started", 45 | ("http_in_ip", "http_in_port", "http_in_tier", *location_labels), 46 | multiprocess_mode="max", 47 | ) 48 | 49 | self.SRE_REQUESTS_TOTAL = Counter( 50 | "http_in_requests_total", "Total count of requests", common_labels 51 | ) 52 | 53 | self.SRE_REQUEST_BYTES = Counter( 54 | "http_in_request_bytes", "Total amount of request body size", common_labels 55 | ) 56 | 57 | self.SRE_RESPONSES_TOTAL = Counter( 58 | "http_in_responses_total", 59 | "Total count of responses", 60 | ("http_in_response_code", *common_labels), 61 | ) 62 | 63 | self.SRE_RESPONSE_BYTES = Counter( 64 | "http_in_response_bytes_total", 65 | "Total amount of response body size", 66 | ("http_in_response_code", *common_labels), 67 | ) 68 | 69 | self.SRE_RESPONSE_TIME = Counter( 70 | "http_in_response_time_total", 71 | "Total amount of response time", 72 | ("http_in_response_code", *common_labels), 73 | ) 74 | 75 | if include_latency_histogram: 76 | self.SRE_RESPONSE_TIME_HISTOGRAM = Histogram( 77 | "http_in_response_time_histogram", 78 | "Total amount of response time", 79 | ("http_in_method", "http_in_url", *location_labels), 80 | buckets=LATENCY_BUCKETS, 81 | ) 82 | 83 | self.SRE_REQUEST_ERRORS = Counter( 84 | "http_in_request_errors_total", 85 | "Total amount of request errors", 86 | ("http_in_request_error", *common_labels), 87 | ) 88 | 89 | self.SRE_RESPONSE_ERRORS = Counter( 90 | "http_in_response_errors_total", 91 | "Total amount of response errors", 92 | ("http_in_response_code", "http_in_response_error", *common_labels), 93 | ) 94 | -------------------------------------------------------------------------------- /app/metrics/http_out.py: -------------------------------------------------------------------------------- 1 | from prometheus_client import Counter, Gauge 2 | 3 | from app.metrics.common import location_labels 4 | 5 | common_labels = ( 6 | "http_out_host", 7 | "http_out_port", 8 | "http_out_method", 9 | "http_out_url", 10 | *location_labels, 11 | ) 12 | 13 | SRE_DNS_TIME = Gauge("http_out_dns_time", "Time spent on DNS resolve", common_labels) 14 | 15 | SRE_CONNECT_TIME = Counter( 16 | "http_out_connect_time_total", 17 | "Time spent connecting to the remote endpoint", 18 | common_labels, 19 | ) 20 | 21 | SRE_HANDSHAKE_TIME = Counter( 22 | "http_out_handshake_time_total", 23 | "Time that spends on handshake to the remote endpoint in a case of TLS ", 24 | common_labels, 25 | ) 26 | 27 | SRE_REQUEST = Counter( 28 | "http_out_requests_total", 29 | "Total amount of outgoing requests on each HTTP connection", 30 | common_labels, 31 | ) 32 | 33 | SRE_REQUEST_SIZE = Counter( 34 | "http_out_request_bytes_total", 35 | "Size of outgoing request on each HTTP connection", 36 | common_labels, 37 | ) 38 | 39 | SRE_REQUEST_ERROR = Counter( 40 | "http_out_request_errors_total", 41 | "Amount of errors on outgoing request on each HTTP connection", 42 | ("http_out_request_error", *common_labels), 43 | ) 44 | 45 | SRE_RESPONSE = Counter( 46 | "http_out_responses_total", 47 | "Amount of errors on outgoing request on each HTTP connection", 48 | ("http_out_response_code", *common_labels), 49 | ) 50 | 51 | SRE_RESPONSE_SIZE = Counter( 52 | "http_out_response_bytes_total", 53 | "Content length or response size", 54 | ("http_out_response_code", *common_labels), 55 | ) 56 | 57 | SRE_RESPONSE_ERROR = Counter( 58 | "http_out_response_errors_total", 59 | "Total amount of outgoing errors on each response", 60 | ("http_out_response_code", "http_out_response_error", *common_labels), 61 | ) 62 | 63 | SRE_RESPONSE_FIRST_BYTE = Gauge( 64 | "http_out_response_first_byte_time", 65 | "Time spent on getting first byte from the remote endpoint", 66 | ("http_out_response_code", *common_labels), 67 | ) 68 | 69 | SRE_RESPONSE_TIME = Counter( 70 | "http_out_response_time_total", 71 | "Total amount of outgoing errors on each response", 72 | ("http_out_response_code", *common_labels), 73 | ) 74 | -------------------------------------------------------------------------------- /app/metrics/job.py: -------------------------------------------------------------------------------- 1 | from prometheus_client import Counter 2 | 3 | from app.metrics.common import location_labels 4 | 5 | SRE_JOB_EXECUTIONS = Counter( 6 | "job_executions_total", 7 | "Total amount of executions", 8 | ("job_name", *location_labels), 9 | ) 10 | 11 | SRE_JOB_EXECUTION_TIME = Counter( 12 | "job_execution_time_total", 13 | "Execution time", 14 | ("job_name", *location_labels), 15 | ) 16 | 17 | SRE_JOB_ERRORS = Counter( 18 | "job_errors_total", 19 | "Amount of errors", 20 | ("job_name", "job_error", *location_labels), 21 | ) 22 | -------------------------------------------------------------------------------- /app/routers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/routers/__init__.py -------------------------------------------------------------------------------- /app/routers/gitlab.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Optional 3 | 4 | import sentry_sdk 5 | from fastapi import APIRouter, Depends 6 | from pydantic import BaseModel 7 | 8 | from app.exceptions.schemas import ValidationError 9 | from app.secbot.inputs.gitlab.dependencies import ( 10 | get_gitlab_webhook_token_header, 11 | gitlab_event, 12 | webhook_model, 13 | ) 14 | from app.secbot.inputs.gitlab.schemas import AnyGitlabModel, GitlabEvent 15 | 16 | logger = logging.getLogger(__name__) 17 | router = APIRouter( 18 | prefix="/gitlab", 19 | tags=["gitlab"], 20 | dependencies=[Depends(get_gitlab_webhook_token_header)], 21 | ) 22 | 23 | 24 | class WebhookReplyModel(BaseModel): 25 | status: str = "ok" 26 | 27 | 28 | @router.post( 29 | "/webhook", 30 | response_model=WebhookReplyModel, 31 | response_description=f"""We support only {', '.join(GitlabEvent)} events. 32 | For the rest any events we will return 200 OK and do nothing. 33 | """, 34 | responses={ 35 | 403: { 36 | "description": "Gitlab webhook secret token missing or invalid", 37 | "model": ValidationError, 38 | "content": { 39 | "application/json": { 40 | "example": { 41 | "code": "FORBIDDEN", 42 | "message": "X-Gitlab-Token header is invalid", 43 | "details": None, 44 | } 45 | }, 46 | }, 47 | }, 48 | }, 49 | ) 50 | async def post_webhook( 51 | event: Optional[GitlabEvent] = Depends(gitlab_event), 52 | data: Optional[AnyGitlabModel] = Depends(webhook_model), 53 | ): 54 | if not event: 55 | logger.info("Unsupported event", extra={"event": event}) 56 | return WebhookReplyModel() 57 | 58 | if not data: 59 | logger.warning("Unsupported event data", extra={"data": data}) 60 | with sentry_sdk.push_scope() as scope: 61 | scope.set_extra("event", event) 62 | scope.set_extra("data", data) 63 | sentry_sdk.capture_message("Unsupported event data") 64 | return WebhookReplyModel() 65 | 66 | logger.info( 67 | "Received gitlab webhook event", 68 | extra={"event": event, "data": data.raw}, 69 | ) 70 | from app.main import security_bot 71 | 72 | await security_bot.run("gitlab", data=data, event=event) 73 | return WebhookReplyModel() 74 | -------------------------------------------------------------------------------- /app/routers/healthcheck.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from pydantic import BaseModel 3 | 4 | router = APIRouter(tags=["common"]) 5 | 6 | 7 | class PingReplyModel(BaseModel): 8 | ping: str = "pong" 9 | 10 | 11 | @router.get("/ping", response_model=PingReplyModel) 12 | def get_ping(): 13 | """Healthcheck for L7 load balancers.""" 14 | return PingReplyModel() 15 | -------------------------------------------------------------------------------- /app/routers/metrics.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from prometheus_client import REGISTRY, generate_latest 3 | from starlette.responses import PlainTextResponse 4 | 5 | router = APIRouter(tags=["common"]) 6 | 7 | 8 | @router.get( 9 | "/metrics", 10 | response_class=PlainTextResponse, 11 | description="Application metrics in Prometheus format.", 12 | responses={ 13 | 200: { 14 | "description": "Metrics", 15 | "content": {"text/plain": {"example": ""}}, 16 | } 17 | }, 18 | ) 19 | def get_metrics(): 20 | return generate_latest(REGISTRY) 21 | -------------------------------------------------------------------------------- /app/routers/security.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from fastapi import APIRouter 4 | from pydantic import BaseModel 5 | 6 | from app.secbot.inputs.gitlab.schemas import GitlabWebhookSecurityID 7 | from app.secbot.schemas import SecurityCheckStatus 8 | 9 | logger = logging.getLogger(__name__) 10 | router = APIRouter(prefix="/security", tags=["security"]) 11 | 12 | 13 | class SecurityCheckResponse(BaseModel): 14 | status: SecurityCheckStatus 15 | 16 | 17 | # TODO(ivan.zhirov): add tests 18 | @router.get( 19 | "/gitlab/check/{security_check_id}", 20 | response_model=SecurityCheckResponse, 21 | responses={ 22 | 200: { 23 | "content": { 24 | "application/json": { 25 | "examples": { 26 | "Check happens before check has been created.": { 27 | "value": {"status": "not_started"}, 28 | }, 29 | "Check happens before scans has been created.": { 30 | "value": {"status": "not_started"}, 31 | }, 32 | "Technical problem with the scan": { 33 | "value": {"status": "error"}, 34 | }, 35 | "Security check in progress": { 36 | "value": {"status": "in_progress"}, 37 | }, 38 | "Security check has been failed": { 39 | "value": {"status": "fail"}, 40 | }, 41 | "Security check has been successful": { 42 | "value": {"status": "success"}, 43 | }, 44 | } 45 | } 46 | } 47 | } 48 | }, 49 | ) 50 | async def get_security_check( 51 | security_check_id: GitlabWebhookSecurityID, 52 | ) -> SecurityCheckResponse: 53 | from app.main import security_bot 54 | 55 | status = await security_bot.fetch_check_result("gitlab", security_check_id) 56 | return SecurityCheckResponse(status=status) 57 | -------------------------------------------------------------------------------- /app/secbot/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import inspect 3 | import os 4 | import pkgutil 5 | from typing import Type 6 | 7 | from celery import Celery 8 | 9 | from .inputs import SecbotInput 10 | from .logger import logger 11 | from .schemas import SecurityCheckStatus 12 | 13 | 14 | class SecurityBot: 15 | """ 16 | Main class for the SecurityBot application. Manages inputs for security checks, 17 | runs these checks, and fetches check results. 18 | 19 | Attributes: 20 | celery_app: Celery application instance for managing asynchronous tasks. 21 | _registered_inputs: Dictionary of registered inputs (security checks). 22 | """ 23 | 24 | def __init__(self, celery_app: Celery): 25 | self.celery_app = celery_app 26 | self._registered_inputs = {} # Contains the registered inputs 27 | self.autodiscover_inputs() 28 | 29 | def autodiscover_inputs(self): 30 | """ 31 | Automatically discover all available inputs (security checks) 32 | from the inputs module, and register them to the bot. 33 | """ 34 | base_package = "app.secbot.inputs" 35 | base_path = os.path.dirname(__import__(base_package, fromlist=[""]).__file__) 36 | 37 | # Iterate over all modules in the base package 38 | for _, package_name, _ in pkgutil.iter_modules([base_path]): 39 | full_package_name = f"{base_package}.{package_name}" 40 | try: 41 | module = importlib.import_module(full_package_name) 42 | except ImportError as e: 43 | logger.warning( 44 | f"Could not import {full_package_name}. Error: {str(e)}" 45 | ) 46 | continue 47 | 48 | # Register all classes that are a subclass of SecbotInput (excluding SecbotInput itself) 49 | for name, cls in inspect.getmembers(module, inspect.isclass): 50 | if issubclass(cls, SecbotInput) and cls != SecbotInput: 51 | self.register_input(package_name, cls) 52 | 53 | def register_input(self, config_name: str, input_cls: Type[SecbotInput]): 54 | """ 55 | Register a new input (security check) to the bot. 56 | 57 | Args: 58 | config_name: The name of the configuration for the input. 59 | input_cls: The class representing the input. 60 | """ 61 | self._registered_inputs[config_name] = input_cls( 62 | config_name=config_name, 63 | celery_app=self.celery_app, 64 | ) 65 | 66 | async def run(self, input_name: str, *args, **kwargs): 67 | """ 68 | Run a registered input (security check). 69 | 70 | Args: 71 | input_name: The name of the input to run. 72 | args, kwargs (optional): Arguments to pass to the input's run method. 73 | """ 74 | registered_input = self._registered_inputs[input_name] 75 | await registered_input.run(*args, **kwargs) 76 | 77 | async def fetch_check_result( 78 | self, 79 | input_name, 80 | *args, 81 | **kwargs, 82 | ) -> SecurityCheckStatus: 83 | """ 84 | Fetch the result of a security check. 85 | 86 | Args: 87 | input_name: The name of the input whose result to fetch. 88 | args, kwargs (optional): Arguments to pass to the input's fetch_result method. 89 | Returns: 90 | The status of the security check. 91 | """ 92 | registered_input = self._registered_inputs[input_name] 93 | return await registered_input.fetch_status(*args, **kwargs) 94 | -------------------------------------------------------------------------------- /app/secbot/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = app/secbot/db/alembic 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to app/alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:app/alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # the output encoding used when revision files 55 | # are written from script.py.mako 56 | # output_encoding = utf-8 57 | sqlalchemy.url = 58 | 59 | [post_write_hooks] 60 | # post_write_hooks defines scripts or Python functions that are run 61 | # on newly generated revision scripts. See the documentation for further 62 | # detail and examples 63 | 64 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 65 | # hooks = black 66 | # black.type = console_scripts 67 | # black.entrypoint = black 68 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 69 | 70 | # Logging configuration 71 | [loggers] 72 | keys = root,sqlalchemy,alembic 73 | 74 | [handlers] 75 | keys = console 76 | 77 | [formatters] 78 | keys = generic 79 | 80 | [logger_root] 81 | level = WARN 82 | handlers = console 83 | qualname = 84 | 85 | [logger_sqlalchemy] 86 | level = WARN 87 | handlers = 88 | qualname = sqlalchemy.engine 89 | 90 | [logger_alembic] 91 | level = INFO 92 | handlers = 93 | qualname = alembic 94 | 95 | [handler_console] 96 | class = StreamHandler 97 | args = (sys.stderr,) 98 | level = NOTSET 99 | formatter = generic 100 | 101 | [formatter_generic] 102 | format = %(levelname)-5.5s [%(name)s] %(message)s 103 | datefmt = %H:%M:%S 104 | -------------------------------------------------------------------------------- /app/secbot/db/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, DateTime, event, func 2 | from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine 3 | from sqlalchemy.ext.declarative import declarative_base 4 | from sqlalchemy.orm import sessionmaker 5 | from sqlalchemy.pool import AsyncAdaptedQueuePool 6 | 7 | from app.metrics.db.base import get_const_common_labels 8 | from app.metrics.db.sqlalchemy import ConnectionsTotal, add_event_listeners 9 | from app.secbot.settings import settings 10 | 11 | engine = create_async_engine( 12 | settings.postgres_dsn, 13 | poolclass=AsyncAdaptedQueuePool, 14 | pool_pre_ping=True, 15 | pool_size=5, 16 | pool_recycle=5 * 60, # 5 minutes 17 | max_overflow=5, 18 | pool_timeout=10, 19 | connect_args={"server_settings": {"jit": "off"}}, 20 | echo=False, 21 | ) 22 | 23 | # Setup metrics 24 | add_event_listeners(engine) 25 | const_labels = get_const_common_labels( 26 | db=engine.url.database, 27 | db_host=engine.url.host, 28 | db_port=engine.url.port, 29 | ) 30 | event.listen(engine.sync_engine, "connect", ConnectionsTotal(const_labels).on_connect) 31 | 32 | # Create session factory 33 | db_session = sessionmaker( 34 | autocommit=False, 35 | autoflush=False, 36 | bind=engine, 37 | expire_on_commit=False, 38 | class_=AsyncSession, 39 | ) 40 | 41 | 42 | class BaseModel: 43 | """General model of all models in the project""" 44 | 45 | created_at = Column(DateTime, default=func.now()) 46 | updated_at = Column(DateTime, default=func.now(), onupdate=func.now()) 47 | 48 | 49 | Base = declarative_base(cls=BaseModel) 50 | -------------------------------------------------------------------------------- /app/secbot/db/alembic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/secbot/db/alembic/__init__.py -------------------------------------------------------------------------------- /app/secbot/db/alembic/env.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | from logging.config import fileConfig 4 | 5 | from alembic import context 6 | from sqlalchemy import engine_from_config, pool 7 | from sqlalchemy.ext.asyncio import AsyncEngine 8 | 9 | # this is the Alembic Config object, which provides 10 | # access to the values within the .ini file in use. 11 | config = context.config 12 | 13 | # Interpret the config file for Python logging. 14 | # This line sets up loggers basically. 15 | if config.config_file_name is not None: 16 | fileConfig(config.config_file_name) 17 | 18 | # For 'autogenerate' 19 | from app.secbot.db.models import Base 20 | 21 | target_metadata = Base.metadata 22 | 23 | config.set_main_option("sqlalchemy.url", os.environ["SECBOT_POSTGRES_DSN"]) 24 | 25 | 26 | def run_migrations_offline(): 27 | """Run migrations in 'offline' mode. 28 | 29 | This configures the context with just a URL 30 | and not an Engine, though an Engine is acceptable 31 | here as well. By skipping the Engine creation 32 | we don't even need a DBAPI to be available. 33 | 34 | Calls to context.execute() here emit the given string to the 35 | script output. 36 | 37 | """ 38 | url = config.get_main_option("sqlalchemy.url") 39 | context.configure( 40 | url=url, 41 | target_metadata=target_metadata, 42 | literal_binds=True, 43 | dialect_opts={"paramstyle": "named"}, 44 | ) 45 | 46 | with context.begin_transaction(): 47 | context.run_migrations() 48 | 49 | 50 | def do_run_migrations(connection): 51 | context.configure( 52 | connection=connection, 53 | target_metadata=target_metadata, 54 | ) 55 | 56 | with context.begin_transaction(): 57 | context.run_migrations() 58 | 59 | 60 | async def run_migrations_online(): 61 | """Run migrations in 'online' mode. 62 | 63 | In this scenario we need to create an Engine 64 | and associate a connection with the context. 65 | 66 | """ 67 | connectable = AsyncEngine( 68 | engine_from_config( 69 | config.get_section(config.config_ini_section), 70 | prefix="sqlalchemy.", 71 | poolclass=pool.NullPool, 72 | future=True, 73 | ) 74 | ) 75 | 76 | async with connectable.connect() as connection: 77 | await connection.run_sync(do_run_migrations) 78 | 79 | 80 | if context.is_offline_mode(): 81 | run_migrations_offline() 82 | else: 83 | asyncio.run(run_migrations_online()) 84 | -------------------------------------------------------------------------------- /app/secbot/db/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /app/secbot/db/alembic/versions/3611bb3d9dd2_initial.py: -------------------------------------------------------------------------------- 1 | """initial 2 | 3 | Revision ID: 3611bb3d9dd2 4 | Revises: 5 | Create Date: 2023-05-31 12:53:57.376066 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "3611bb3d9dd2" 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table( 22 | "repository_security_check", 23 | sa.Column("created_at", sa.DateTime(), nullable=True), 24 | sa.Column("updated_at", sa.DateTime(), nullable=True), 25 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), 26 | sa.Column("external_id", sa.String(), nullable=False), 27 | sa.Column( 28 | "event_type", 29 | sa.Enum("PUSH", "TAG_PUSH", "MERGE_REQUEST", name="gitlabevent"), 30 | nullable=False, 31 | ), 32 | sa.Column( 33 | "event_json", postgresql.JSON(astext_type=sa.Text()), nullable=False 34 | ), 35 | sa.Column("commit_hash", sa.String(), nullable=False), 36 | sa.Column("branch", sa.String(), nullable=False), 37 | sa.Column("project_name", sa.String(), nullable=False), 38 | sa.Column("path", sa.String(), nullable=False), 39 | sa.Column("prefix", sa.String(), nullable=False), 40 | sa.PrimaryKeyConstraint("id"), 41 | sa.UniqueConstraint("external_id"), 42 | ) 43 | op.create_table( 44 | "repository_security_scan", 45 | sa.Column("created_at", sa.DateTime(), nullable=True), 46 | sa.Column("updated_at", sa.DateTime(), nullable=True), 47 | sa.Column("check_id", sa.Integer(), nullable=False), 48 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), 49 | sa.Column("started_at", sa.DateTime(), nullable=True), 50 | sa.Column("finished_at", sa.DateTime(), nullable=True), 51 | sa.Column( 52 | "status", 53 | sa.Enum("NEW", "IN_PROGRESS", "SKIP", "ERROR", "DONE", name="scanstatus"), 54 | nullable=False, 55 | ), 56 | sa.Column("response", postgresql.JSON(astext_type=sa.Text()), nullable=True), 57 | sa.Column("scan_name", sa.String(), nullable=False), 58 | sa.Column( 59 | "outputs_test_id", postgresql.JSON(astext_type=sa.Text()), nullable=True 60 | ), 61 | sa.ForeignKeyConstraint( 62 | ["check_id"], 63 | ["repository_security_check.id"], 64 | ), 65 | sa.PrimaryKeyConstraint("id"), 66 | ) 67 | op.create_table( 68 | "slack_notifications", 69 | sa.Column("created_at", sa.DateTime(), nullable=True), 70 | sa.Column("updated_at", sa.DateTime(), nullable=True), 71 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), 72 | sa.Column("channel", sa.String(), nullable=False), 73 | sa.Column("is_sent", sa.Boolean(), nullable=True), 74 | sa.Column("payload", postgresql.JSON(astext_type=sa.Text()), nullable=False), 75 | sa.Column("scan_id", sa.Integer(), nullable=False), 76 | sa.ForeignKeyConstraint( 77 | ["scan_id"], 78 | ["repository_security_scan.id"], 79 | ), 80 | sa.PrimaryKeyConstraint("id"), 81 | ) 82 | # ### end Alembic commands ### 83 | 84 | 85 | def downgrade() -> None: 86 | # ### commands auto generated by Alembic - please adjust! ### 87 | op.drop_table("slack_notifications") 88 | op.drop_table("repository_security_scan") 89 | op.drop_table("repository_security_check") 90 | # ### end Alembic commands ### 91 | -------------------------------------------------------------------------------- /app/secbot/db/alembic/versions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/secbot/db/alembic/versions/__init__.py -------------------------------------------------------------------------------- /app/secbot/db/models.py: -------------------------------------------------------------------------------- 1 | from app.secbot.db import Base # noqa: F401 2 | from app.secbot.inputs.gitlab.models import * # noqa: F401,F403 3 | -------------------------------------------------------------------------------- /app/secbot/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pprint 4 | 5 | 6 | class SecbotException(Exception): 7 | """Base gitlab exception class.""" 8 | 9 | 10 | class BaseGitlabWorkflowException(SecbotException): 11 | """Base gitlab exception with scan id info.""" 12 | 13 | 14 | class ScanCheckFailed(SecbotException): 15 | """Raises when we try to check scan status, and it's failed.""" 16 | 17 | 18 | class ScanCantBeScanned(SecbotException): 19 | """Raises when we try to scan a scan that already in progress.""" 20 | 21 | 22 | class ScanExecutionSkipped(SecbotException): 23 | """Raises when we want to make a scan skippable. 24 | 25 | It might happen when worker services can't proceed security check, 26 | and we don't want to set the scan status to ERROR. 27 | """ 28 | 29 | 30 | class SecbotInputError(SecbotException): 31 | """Base exception for all input exceptions.""" 32 | 33 | 34 | class SecbotConfigError(SecbotException): 35 | """This exception is raised when the configuration is invalid. 36 | 37 | It accumulates all possible errors that occur during the parsing of job 38 | configurations and presents them all at once. 39 | """ 40 | 41 | def __init__(self, errors): 42 | self.errors = errors 43 | 44 | def __str__(self): 45 | return pprint.pformat(self.errors) 46 | 47 | 48 | class SecbotConfigMissingEnv(SecbotConfigError): 49 | """This exception is raised when the configuration is missing an environment""" 50 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import select 2 | 3 | from app.secbot.config import config 4 | from app.secbot.db import db_session 5 | from app.secbot.inputs import SecbotInput 6 | from app.secbot.inputs.gitlab.models import ( 7 | RepositorySecurityCheck, 8 | RepositorySecurityScan, 9 | ) 10 | from app.secbot.inputs.gitlab.schemas import ( 11 | AnyGitlabModel, 12 | GitlabEvent, 13 | GitlabInputData, 14 | GitlabWebhookSecurityID, 15 | ) 16 | from app.secbot.inputs.gitlab.services import get_or_create_security_check 17 | from app.secbot.inputs.gitlab.utils import ( 18 | generate_gitlab_security_id, 19 | get_config_from_host, 20 | ) 21 | from app.secbot.logger import logger 22 | from app.secbot.schemas import ScanStatus, SecurityCheckStatus 23 | 24 | 25 | # noinspection PyMethodOverriding 26 | class GitlabInput(SecbotInput): 27 | async def run( 28 | self, 29 | data: AnyGitlabModel, 30 | event: GitlabEvent, 31 | ): 32 | job = config.matching_workflow_job("gitlab", data.raw) 33 | if not job: 34 | logger.info(f"No matching workflow job for {event}") 35 | return 36 | 37 | gitlab_config = get_config_from_host(data.repository.homepage.host) 38 | security_id = generate_gitlab_security_id(gitlab_config.prefix, data=data) 39 | 40 | async with db_session() as session: 41 | check = await get_or_create_security_check( 42 | db_session=session, 43 | external_id=security_id, 44 | initial_data={ 45 | "event_type": event, 46 | "event_json": data.raw, 47 | "commit_hash": data.commit.id, 48 | "branch": data.target_branch, 49 | "project_name": data.repository.name, 50 | "path": data.repository.homepage, 51 | "prefix": gitlab_config.prefix, 52 | }, 53 | ) 54 | input_data = GitlabInputData( 55 | event=check.event_type, 56 | data=data, 57 | db_check_id=check.id, 58 | ) 59 | return await super().run(input_data, job=job) 60 | 61 | async def fetch_status( 62 | self, security_check_id: GitlabWebhookSecurityID 63 | ) -> SecurityCheckStatus: 64 | async with db_session() as session: 65 | check = ( 66 | await session.execute( 67 | select(RepositorySecurityCheck).where( 68 | RepositorySecurityCheck.external_id == security_check_id 69 | ) 70 | ) 71 | ).scalar() 72 | if not check: 73 | return SecurityCheckStatus.NOT_STARTED 74 | 75 | scans = ( 76 | await session.execute( 77 | select( 78 | [ 79 | RepositorySecurityScan.status, 80 | RepositorySecurityScan.scan_name, 81 | RepositorySecurityScan.outputs_test_id, 82 | ] 83 | ).where(RepositorySecurityScan.check_id == check.id) 84 | ) 85 | ).all() 86 | 87 | # Define if we have enough scans 88 | # we suppose that we have only one job for a security check 89 | job = config.matching_workflow_job("gitlab", check.event_json) 90 | has_enough_scans = len(scans) == len(job.scans) 91 | 92 | # If we have not enough scans, we should wait for them 93 | if not has_enough_scans: 94 | return SecurityCheckStatus.IN_PROGRESS 95 | 96 | # If for some reason we have more scans than jobs 97 | if len(scans) > len(job.scans): 98 | return SecurityCheckStatus.ERROR 99 | 100 | # Remove skipped scans from checks 101 | scans = [scan for scan in scans if scan.status is not ScanStatus.SKIP] 102 | statuses = [scan.status for scan in scans if scan] 103 | 104 | if ScanStatus.ERROR in statuses: 105 | return SecurityCheckStatus.ERROR 106 | elif ScanStatus.IN_PROGRESS in statuses: 107 | return SecurityCheckStatus.IN_PROGRESS 108 | 109 | if all(status == ScanStatus.DONE for status in statuses): 110 | scan_outputs = set( 111 | output_name 112 | for scan in scans 113 | for output_name in scan.outputs_test_id.keys() 114 | ) 115 | outputs = [ 116 | output for output in job.outputs if output.name in scan_outputs 117 | ] 118 | scan_names = set(scan.scan_name for scan in scans) 119 | eligible_scans = [ 120 | scan for scan in job.scans if scan.name in scan_names 121 | ] 122 | return await super().fetch_status( 123 | outputs, 124 | eligible_scans=eligible_scans, 125 | commit_hash=check.commit_hash, 126 | ) 127 | return SecurityCheckStatus.ERROR 128 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/app/secbot/inputs/gitlab/handlers/__init__.py -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/defectdojo/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import List 3 | 4 | from pydantic import AnyUrl 5 | 6 | from app.secbot.config import SecbotConfigComponent 7 | from app.secbot.handlers import SecbotOutputHandler 8 | from app.secbot.inputs.gitlab.handlers.defectdojo.services import ( 9 | OutputResultObject, 10 | send_result, 11 | ) 12 | from app.secbot.inputs.gitlab.handlers.defectdojo.validator import ( 13 | DefectDojoFindingsValidator, 14 | ) 15 | from app.secbot.inputs.gitlab.schemas import GitlabOutputResult, GitlabScanResult 16 | from app.secbot.inputs.gitlab.schemas.output_responses import OutputResponse 17 | from app.secbot.inputs.gitlab.services import complete_scan, handle_exception 18 | from app.secbot.inputs.gitlab.utils import get_project_name 19 | from app.secbot.schemas import SecbotBaseModel 20 | 21 | 22 | class DefectDojoCredentials(SecbotBaseModel): 23 | url: AnyUrl 24 | secret_key: str 25 | user: str 26 | lead_id: int 27 | 28 | 29 | # noinspection PyMethodOverriding 30 | class DefectDojoHandler(SecbotOutputHandler): 31 | env_model = DefectDojoCredentials 32 | 33 | async def on_failure( 34 | self, 35 | scan_result: GitlabScanResult, 36 | exception, 37 | component_name: str, 38 | env: DefectDojoCredentials, 39 | ): 40 | await handle_exception( 41 | check_id=scan_result.input.db_check_id, 42 | scan_component_name=scan_result.component_name, 43 | exception=exception, 44 | ) 45 | 46 | async def fetch_status( 47 | self, 48 | eligible_scans: List[SecbotConfigComponent], 49 | commit_hash: str, 50 | env: DefectDojoCredentials, 51 | ) -> bool: 52 | dd_validator = DefectDojoFindingsValidator( 53 | eligible_scans=eligible_scans, 54 | credentials=env, 55 | commit_hash=commit_hash, 56 | ) 57 | return await dd_validator.is_valid() 58 | 59 | async def run( 60 | self, 61 | scan_result: GitlabScanResult, 62 | component_name: str, 63 | env: DefectDojoCredentials, 64 | ): 65 | test_id, dd_findings = await send_result( 66 | credentials=env, 67 | output_result=OutputResultObject( 68 | data=scan_result.input.data, 69 | worker=scan_result.handler_name, 70 | result=json.dumps(scan_result.file.content), 71 | ), 72 | ) 73 | await complete_scan( 74 | scan_id=scan_result.db_id, 75 | output_component_name=component_name, 76 | output_external_test_id=test_id, 77 | ) 78 | response = OutputResponse( 79 | project_name=get_project_name(scan_result.input.data.project.git_ssh_url), 80 | project_url=scan_result.input.data.project.web_url, 81 | findings=dd_findings, 82 | ) 83 | return GitlabOutputResult( 84 | handler_name=self.config_name, 85 | component_name=component_name, 86 | scan_result=scan_result, 87 | response=response, 88 | ) 89 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/defectdojo/validator.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, TypeVar 2 | 3 | from pydantic import BaseModel 4 | 5 | import app.secbot.inputs.gitlab.handlers.defectdojo.api as defectdojo 6 | from app.secbot.config import SecbotConfigComponent 7 | from app.secbot.inputs.gitlab.handlers.defectdojo.services import ( 8 | DefectDojoCredentials, 9 | handle_dd_response, 10 | ) 11 | from app.secbot.inputs.gitlab.schemas.base import CommitHash 12 | from app.secbot.schemas import Severity 13 | 14 | Findings = TypeVar("Findings", bound=List[Dict[Any, Any]]) 15 | 16 | 17 | class DefectDojoFindingDuplicate(BaseModel): 18 | active: bool 19 | severity: Severity 20 | 21 | 22 | class DefectDojoFindings(BaseModel): 23 | scan_name: str 24 | active: bool 25 | severity: Severity 26 | duplicate: Optional[DefectDojoFindingDuplicate] = None 27 | 28 | @property 29 | def is_active(self) -> bool: 30 | """Check if the finding is active. 31 | 32 | This method returns whether the finding is considered active or not. 33 | If the finding has a duplicate attribute, 34 | the method returns the active attribute of the duplicate object. 35 | """ 36 | if self.duplicate: 37 | return self.duplicate.active 38 | return self.active 39 | 40 | 41 | def is_gitleaks_valid(findings: List[DefectDojoFindings]) -> bool: 42 | """Check if the findings from Gitleaks are valid. 43 | 44 | This function checks if the findings from the Gitleaks scan service are valid. 45 | 46 | If all findings are inactive the function returns True. 47 | """ 48 | assert all(finding.scan_name == "gitleaks" for finding in findings) 49 | for finding in findings: 50 | if finding.is_active: 51 | return False 52 | return True 53 | 54 | 55 | class DefectDojoFindingsValidator: 56 | """Validator for DefectDojo findings. 57 | 58 | Based on findings from specific commit hash and by provided validators 59 | """ 60 | 61 | class Meta: 62 | validators = { 63 | "gitleaks": is_gitleaks_valid, 64 | } 65 | scan_type_name = { 66 | "Gitleaks Scan": "gitleaks", 67 | } 68 | 69 | def __init__( 70 | self, 71 | eligible_scans: List[SecbotConfigComponent], 72 | credentials: DefectDojoCredentials, 73 | commit_hash: CommitHash, 74 | ): 75 | self.eligible_scans = eligible_scans 76 | self.commit_hash = commit_hash 77 | self.credentials = credentials 78 | 79 | async def _fetch_findings(self): 80 | dd = defectdojo.DefectDojoAPIv2( 81 | self.credentials.url, 82 | self.credentials.secret_key, 83 | self.credentials.user, 84 | debug=False, 85 | timeout=360, 86 | ) 87 | response = await dd.list_findings( 88 | # NOTE(iz): We send commit_hash as a test tag to all scans 89 | # by this param we filter results and get all findings 90 | # based on specific security check 91 | test_tags=[self.commit_hash], 92 | related_fields=True, 93 | prefetch=["duplicate_finding"], 94 | # NOTE(ivan.zhirov): Temporary solution before the pagination system 95 | # will be added 96 | # TODO(ivan.zhirov): Implement proper pagination 97 | limit=500, 98 | ) 99 | response = handle_dd_response(response) 100 | duplicates = response["prefetch"].get("duplicate_finding", {}) 101 | for finding in response["results"]: 102 | duplicate_finding = None 103 | if duplicate_finding_id := finding.get("duplicate_finding"): 104 | duplicate_dict = duplicates[str(duplicate_finding_id)] 105 | duplicate_finding = DefectDojoFindingDuplicate( 106 | active=duplicate_dict["active"], 107 | severity=Severity(duplicate_dict["severity"]), 108 | ) 109 | yield DefectDojoFindings( 110 | severity=finding["severity"], 111 | duplicate=duplicate_finding, 112 | active=finding["active"], 113 | scan_name=self.Meta.scan_type_name[ 114 | finding["related_fields"]["test"]["test_type"]["name"] 115 | ], 116 | ) 117 | 118 | async def is_valid(self) -> bool: 119 | """Check if the current instance of the class is valid. 120 | 121 | This function checks if all the findings from the scan services are valid 122 | by using the validators specified in the validators attribute. 123 | """ 124 | all_findings = [finding async for finding in self._fetch_findings()] 125 | eligible_scan_handler_names = [ 126 | scan.handler_name for scan in self.eligible_scans 127 | ] 128 | validators = { 129 | check_service: validator 130 | for check_service, validator in self.Meta.validators.items() 131 | if check_service in eligible_scan_handler_names 132 | } 133 | for check_service, validator in validators.items(): 134 | findings = [ 135 | finding 136 | for finding in all_findings 137 | if finding.scan_name == check_service 138 | ] 139 | if not validator(findings): 140 | return False 141 | return True 142 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/gitleaks/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | import tempfile 4 | 5 | from sqlalchemy import update 6 | 7 | from app.secbot.db import db_session 8 | from app.secbot.exceptions import ScanCheckFailed 9 | from app.secbot.handlers import SecbotScanHandler 10 | from app.secbot.inputs.gitlab import RepositorySecurityScan 11 | from app.secbot.inputs.gitlab.schemas import ( 12 | GitlabInputData, 13 | GitlabScanResult, 14 | GitlabScanResultFile, 15 | ) 16 | from app.secbot.inputs.gitlab.services import ( 17 | clone_repository, 18 | handle_exception, 19 | start_scan, 20 | ) 21 | from app.secbot.schemas import SecbotBaseModel 22 | 23 | 24 | class GitleaksConfig(SecbotBaseModel): 25 | format: str = "json" 26 | 27 | 28 | class GitleaksHandler(SecbotScanHandler): 29 | config_model = GitleaksConfig 30 | 31 | async def on_failure( 32 | self, 33 | input_data: GitlabInputData, 34 | exception, 35 | component_name: str, 36 | config: GitleaksConfig, 37 | ) -> None: 38 | await handle_exception( 39 | check_id=input_data.db_check_id, 40 | scan_component_name=component_name, 41 | exception=exception, 42 | ) 43 | 44 | async def run( 45 | self, 46 | input_data: GitlabInputData, 47 | component_name: str, 48 | config: GitleaksConfig, 49 | ) -> GitlabScanResult: 50 | # Create and start the gitleaks scan object 51 | scan = await start_scan(component_name, input_data.db_check_id) 52 | 53 | # Clone the entire repository and save it in the temporary directory 54 | with clone_repository( 55 | repository_url=input_data.data.project.git_http_url, 56 | reference=input_data.data.commit.id, 57 | ) as repository_temp_path: 58 | 59 | # Create a temporary file and save the result of the check in it 60 | with tempfile.NamedTemporaryFile(prefix="secbot-gitleaks-") as temp_file: 61 | try: 62 | subprocess.run( 63 | [ 64 | "gitleaks", 65 | "detect", 66 | "--redact", 67 | "-f", 68 | config.format, 69 | "-r", 70 | temp_file.name, 71 | ], 72 | stdout=subprocess.PIPE, 73 | stderr=subprocess.PIPE, 74 | universal_newlines=True, 75 | cwd=repository_temp_path, 76 | check=False, 77 | ) 78 | except RuntimeError: 79 | raise ScanCheckFailed() 80 | 81 | # Read the content of the temporary file with scan defects 82 | # and save it in the database 83 | with open(temp_file.name, "rb") as output_file: 84 | content = output_file.read() 85 | response = json.loads(content.decode()) 86 | 87 | async with db_session() as session: 88 | await session.execute( 89 | update(RepositorySecurityScan) 90 | .where(RepositorySecurityScan.id == scan.id) 91 | .values(response=response) 92 | ) 93 | await session.commit() 94 | 95 | scan_file = GitlabScanResultFile( 96 | commit_hash=input_data.data.commit.id, 97 | scan_name=self.config_name, 98 | format=config.format, 99 | content=response, 100 | ) 101 | return GitlabScanResult( 102 | db_id=scan.id, 103 | input=input_data, 104 | handler_name=self.config_name, 105 | component_name=component_name, 106 | file=scan_file, 107 | ) 108 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/slack/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from sqlalchemy import select 4 | 5 | from app.secbot.db import db_session 6 | from app.secbot.handlers import SecbotNotificationHandler 7 | from app.secbot.inputs.gitlab.handlers.slack.api import send_message 8 | from app.secbot.inputs.gitlab.handlers.slack.utils import generate_message_blocks 9 | from app.secbot.inputs.gitlab.models import SlackNotifications 10 | from app.secbot.inputs.gitlab.schemas import GitlabOutputResult 11 | from app.secbot.inputs.gitlab.services import handle_exception 12 | from app.secbot.schemas import SecbotBaseModel 13 | 14 | 15 | class SlackCredentials(SecbotBaseModel): 16 | token: str 17 | 18 | 19 | class SlackConfig(SecbotBaseModel): 20 | render_limit: int 21 | channels: List[str] 22 | 23 | 24 | class SlackHandler(SecbotNotificationHandler): 25 | config_name = "slack" 26 | config_model = SlackConfig 27 | env_model = SlackCredentials 28 | 29 | async def on_failure( 30 | self, 31 | output: GitlabOutputResult, 32 | exception, 33 | component_name: str, 34 | config: SlackConfig, 35 | env: SlackCredentials, 36 | ): 37 | await handle_exception( 38 | check_id=output.scan_result.input.db_check_id, 39 | scan_component_name=output.scan_result.component_name, 40 | exception=exception, 41 | ) 42 | 43 | async def run( 44 | self, 45 | output: GitlabOutputResult, 46 | component_name: str, 47 | config: SlackConfig, 48 | env: SlackCredentials, 49 | ): 50 | """Send notification to slack channel.""" 51 | message_blocks = generate_message_blocks( 52 | output=output, 53 | render_limit=config.render_limit, 54 | ) 55 | if not message_blocks: 56 | return 57 | 58 | for channel in config.channels: 59 | async with db_session() as session: 60 | notification = ( 61 | await session.execute( 62 | select(SlackNotifications) 63 | .with_for_update() 64 | .where( 65 | SlackNotifications.scan_id == output.scan_result.db_id, 66 | SlackNotifications.channel == channel, 67 | ) 68 | ) 69 | ).scalar() 70 | if notification and notification.is_sent is True: 71 | return 72 | if not notification: 73 | notification = SlackNotifications( 74 | scan_id=output.scan_result.db_id, 75 | channel=channel, 76 | payload=message_blocks, 77 | ) 78 | await send_message( 79 | channel=channel, 80 | payload=notification.payload, 81 | token=env.token, 82 | ) 83 | notification.is_sent = True 84 | session.add(notification) 85 | await session.commit() 86 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/slack/api.py: -------------------------------------------------------------------------------- 1 | from slack_sdk.web.async_client import AsyncWebClient 2 | 3 | 4 | async def send_message( 5 | token: str, 6 | channel: str, 7 | payload: dict, 8 | ) -> None: 9 | """Send message payload to the specific channel via a secbot app.""" 10 | assert token, "The token is missing." 11 | assert channel, "The channel name is missing." 12 | assert payload, "The payload can't be empty." 13 | 14 | client = AsyncWebClient(token=token) 15 | await client.chat_postMessage(channel=channel, blocks=payload) 16 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/handlers/slack/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List, Optional 2 | 3 | from app.secbot.inputs.gitlab.schemas import GitlabOutputResult 4 | from app.secbot.schemas import Severity 5 | 6 | SEVERITY_TO_EMOJI: Dict[Severity, str] = { 7 | Severity.INFO: ":white_circle:", 8 | Severity.LOW: ":large_green_circle:", 9 | Severity.MEDIUM: ":large_yellow_circle:", 10 | Severity.HIGH: ":large_orange_circle:", 11 | Severity.CRITICAL: ":red_circle:", 12 | } 13 | 14 | 15 | def generate_message_blocks( 16 | output: GitlabOutputResult, 17 | render_limit: int, 18 | ) -> Optional[List[Dict[str, str]]]: 19 | 20 | new_findings_count = len(output.response.findings) 21 | if new_findings_count == 0: 22 | return None 23 | 24 | message_blocks = [] 25 | 26 | def add_to_message_blocks(msg: str) -> None: 27 | nonlocal message_blocks 28 | 29 | block = {"type": "section", "text": {"type": "mrkdwn", "text": msg}} 30 | message_blocks.append(block) 31 | 32 | # Message header 33 | project = output.response.project_name 34 | project_url = output.response.project_url 35 | message = f"Worker *{output.scan_result.component_name}* found *{new_findings_count}* new findings in *<{project_url}|{project}>*:" 36 | add_to_message_blocks(message) 37 | 38 | # Sort findings by severity and limit them 39 | findings = sorted( 40 | output.response.findings[:render_limit], 41 | key=lambda item: item.severity.priority, 42 | ) 43 | 44 | # Add limited findings info text blocks 45 | for finding in findings: 46 | finding_severity = SEVERITY_TO_EMOJI.get( 47 | finding.severity, ":large_purple_circle:" 48 | ) 49 | message = f"{finding_severity} <{finding.url}|{finding.title}>" 50 | add_to_message_blocks(message) 51 | 52 | # Add special info message block if findings count is greater than limit 53 | if new_findings_count > render_limit: 54 | message = f":no_bell: *{new_findings_count - render_limit}* were *stripped* from notification :no_bell:" 55 | add_to_message_blocks(message) 56 | 57 | return message_blocks 58 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/models.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, String 2 | from sqlalchemy.dialects.postgresql import JSON 3 | from sqlalchemy.orm import relationship 4 | 5 | from app.secbot.db import Base 6 | from app.secbot.inputs.gitlab.schemas import GitlabEvent 7 | from app.secbot.schemas import ScanStatus 8 | 9 | 10 | class RepositorySecurityCheck(Base): 11 | """The base model of GitLab repository check. 12 | 13 | Each GitLab event should have only one RepositorySecurityCheck. 14 | """ 15 | 16 | __tablename__ = "repository_security_check" 17 | 18 | id = Column(Integer(), primary_key=True, autoincrement=True) 19 | external_id = Column(String, nullable=False, unique=True) 20 | 21 | event_type = Column(Enum(GitlabEvent), nullable=False) 22 | event_json = Column(JSON, nullable=False) 23 | 24 | commit_hash = Column(String, nullable=False) 25 | branch = Column(String, nullable=False) 26 | project_name = Column(String, nullable=False) 27 | path = Column(String, nullable=False) 28 | prefix = Column(String, nullable=False) 29 | 30 | scans = relationship("RepositorySecurityScan", lazy=True) 31 | 32 | 33 | class RepositorySecurityScan(Base): 34 | """The base model of a particular service scan of repository security check. 35 | 36 | Each service should have only one scan within the one check. 37 | """ 38 | 39 | __tablename__ = "repository_security_scan" 40 | 41 | check_id = Column( 42 | Integer, 43 | ForeignKey("repository_security_check.id"), 44 | nullable=False, 45 | ) 46 | 47 | id = Column(Integer, primary_key=True, autoincrement=True) 48 | 49 | started_at = Column(DateTime, nullable=True) 50 | finished_at = Column(DateTime, nullable=True) 51 | 52 | status = Column(Enum(ScanStatus), nullable=False, default=ScanStatus.NEW) 53 | response = Column(JSON, nullable=True) 54 | 55 | # Config name of the scan 56 | scan_name = Column(String, nullable=False) 57 | 58 | # Map to outputs and test id in third party services 59 | # e.g. 60 | # {"defectdojo": 42, "other": "test-123"} 61 | outputs_test_id = Column(JSON) 62 | 63 | slack_notification = relationship("SlackNotifications", lazy=True, uselist=False) 64 | 65 | 66 | class SlackNotifications(Base): 67 | """State of scan notification to the Slack channel""" 68 | 69 | __tablename__ = "slack_notifications" 70 | 71 | id = Column(Integer(), primary_key=True, autoincrement=True) 72 | channel = Column(String, nullable=False) 73 | is_sent = Column(Boolean, default=False) 74 | payload = Column(JSON, nullable=False) 75 | 76 | scan_id = Column( 77 | Integer, 78 | ForeignKey("repository_security_scan.id"), 79 | nullable=False, 80 | ) 81 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Dict, List, NewType, Type, Union 4 | 5 | from app.secbot.inputs.gitlab.schemas.base import GitlabEvent 6 | from app.secbot.inputs.gitlab.schemas.merge_request import MergeRequestWebhookModel 7 | from app.secbot.inputs.gitlab.schemas.output_responses import OutputResponse 8 | from app.secbot.inputs.gitlab.schemas.push import PushWebhookModel 9 | from app.secbot.inputs.gitlab.schemas.tag import TagWebhookModel 10 | from app.secbot.schemas import SecbotBaseModel 11 | 12 | # A generated hash string of GitLab event. 13 | # To reference, look at the `generate_gitlab_security_id` method. 14 | GitlabWebhookSecurityID = NewType("GitlabWebhookSecurityID", str) 15 | 16 | # Common type for all GitLab models. 17 | AnyGitlabModel = Union[MergeRequestWebhookModel, PushWebhookModel, TagWebhookModel] 18 | 19 | # Map of GitLab events to models. 20 | GITLAB_EVENTS_MAP: Dict[GitlabEvent, Type[AnyGitlabModel]] = { 21 | GitlabEvent.PUSH: PushWebhookModel, 22 | GitlabEvent.TAG_PUSH: TagWebhookModel, 23 | GitlabEvent.MERGE_REQUEST: MergeRequestWebhookModel, 24 | } 25 | 26 | 27 | def get_gitlab_model_for_event(event: GitlabEvent, data: dict) -> AnyGitlabModel: 28 | """Get the GitLab model for the given event.""" 29 | model = GITLAB_EVENTS_MAP[event] 30 | return model(**data, raw=data) 31 | 32 | 33 | class GitlabInputData(SecbotBaseModel): 34 | """Input model for GitLab events.""" 35 | 36 | db_check_id: int 37 | event: GitlabEvent 38 | data: AnyGitlabModel 39 | 40 | 41 | class GitlabScanResultFile(SecbotBaseModel): 42 | """Scan result file data for GitLab events.""" 43 | 44 | commit_hash: str 45 | scan_name: str 46 | format: str 47 | content: Union[Dict[str, Any], List[Dict[str, Any]]] 48 | 49 | @property 50 | def filename(self) -> str: 51 | return f"{self.commit_hash}_gitlab_{self.scan_name}.{self.format}" 52 | 53 | 54 | class GitlabScanResult(SecbotBaseModel): 55 | """Scan result model for GitLab events.""" 56 | 57 | db_id: int 58 | handler_name: str 59 | component_name: str 60 | input: GitlabInputData 61 | file: GitlabScanResultFile 62 | 63 | 64 | class GitlabOutputResult(SecbotBaseModel): 65 | """Output result model for GitLab events.""" 66 | 67 | component_name: str 68 | handler_name: str 69 | scan_result: GitlabScanResult 70 | response: OutputResponse 71 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/base.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import enum 3 | from datetime import datetime 4 | 5 | from pydantic import AnyUrl, BaseModel 6 | 7 | CommitHash = str 8 | 9 | 10 | class Repository(BaseModel): 11 | name: str 12 | url: str 13 | homepage: AnyUrl 14 | 15 | 16 | class Author(BaseModel): 17 | name: str 18 | email: str 19 | 20 | 21 | class Project(BaseModel): 22 | id: int 23 | name: str 24 | web_url: AnyUrl 25 | git_ssh_url: str 26 | git_http_url: AnyUrl 27 | namespace: str 28 | path_with_namespace: str 29 | 30 | 31 | class Commit(BaseModel): 32 | id: CommitHash 33 | message: str 34 | timestamp: datetime 35 | url: AnyUrl 36 | author: Author 37 | 38 | 39 | class GitlabEvent(str, enum.Enum): 40 | PUSH = "Push Hook" 41 | TAG_PUSH = "Tag Push Hook" 42 | MERGE_REQUEST = "Merge Request Hook" 43 | 44 | 45 | class BaseGitlabEventData(BaseModel, abc.ABC): 46 | project: Project 47 | repository: Repository 48 | raw: dict 49 | 50 | @property 51 | @abc.abstractmethod 52 | def target_branch(self) -> str: 53 | raise NotImplementedError() 54 | 55 | @property 56 | @abc.abstractmethod 57 | def commit(self) -> Commit: 58 | raise NotImplementedError() 59 | 60 | @property 61 | @abc.abstractmethod 62 | def path(self) -> str: 63 | raise NotImplementedError() 64 | 65 | @property 66 | def team_name(self) -> str: 67 | return self.repository.homepage.path[1:] 68 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/merge_request.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from enum import Enum 4 | from typing import Optional 5 | 6 | from pydantic import AnyUrl, BaseModel 7 | 8 | from app.secbot.inputs.gitlab.schemas.base import BaseGitlabEventData, Commit 9 | 10 | 11 | class MergeRequestAction(str, Enum): 12 | open = "open" 13 | close = "close" 14 | reopen = "reopen" 15 | update = "update" 16 | approved = "approved" 17 | unapproved = "unapproved" 18 | merge = "merge" 19 | 20 | 21 | class MergeRequestObjectAttributes(BaseModel): 22 | id: int 23 | url: AnyUrl 24 | state: str 25 | target_branch: str 26 | source_branch: str 27 | action: Optional[MergeRequestAction] 28 | last_commit: Commit 29 | 30 | 31 | class MergeRequestWebhookModel(BaseGitlabEventData): 32 | object_attributes: MergeRequestObjectAttributes 33 | 34 | @property 35 | def commit(self) -> Commit: 36 | return self.object_attributes.last_commit 37 | 38 | @property 39 | def path(self) -> str: 40 | return self.object_attributes.url 41 | 42 | @property 43 | def target_branch(self) -> str: 44 | return self.object_attributes.target_branch 45 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/output_responses.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from pydantic import AnyUrl, BaseModel 4 | 5 | from app.secbot.schemas import Severity 6 | 7 | 8 | class OutputFinding(BaseModel): 9 | """Base output finding model.""" 10 | 11 | title: str 12 | severity: Severity 13 | url: AnyUrl 14 | 15 | 16 | class OutputResponse(BaseModel): 17 | """Base output response model.""" 18 | 19 | project_name: str 20 | project_url: AnyUrl 21 | findings: List[OutputFinding] 22 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/push.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from app.secbot.inputs.gitlab.schemas.base import ( 4 | BaseGitlabEventData, 5 | Commit, 6 | CommitHash, 7 | ) 8 | 9 | 10 | class PushWebhookModel(BaseGitlabEventData): 11 | after: CommitHash 12 | ref: str 13 | commits: List[Commit] 14 | 15 | @property 16 | def path(self) -> str: 17 | return self.commit.url 18 | 19 | @property 20 | def target_branch(self) -> str: 21 | assert "heads" in self.ref 22 | 23 | parts = self.ref.split("/") 24 | return parts[-1] 25 | 26 | @property 27 | def commit(self) -> Commit: 28 | return next(commit for commit in self.commits if commit.id == self.after) 29 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/schemas/tag.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from app.secbot.inputs.gitlab.schemas.base import ( 4 | BaseGitlabEventData, 5 | Commit, 6 | CommitHash, 7 | ) 8 | 9 | 10 | class TagWebhookModel(BaseGitlabEventData): 11 | checkout_sha: CommitHash 12 | ref: str 13 | commits: List[Commit] 14 | 15 | @property 16 | def path(self) -> str: 17 | return self.commit.url 18 | 19 | @property 20 | def target_branch(self) -> str: 21 | assert "tags" in self.ref 22 | 23 | parts = self.ref.split("/") 24 | return parts[-1] 25 | 26 | @property 27 | def commit(self) -> Commit: 28 | return next( 29 | event_commit 30 | for event_commit in self.commits 31 | if event_commit.id == self.checkout_sha 32 | ) 33 | -------------------------------------------------------------------------------- /app/secbot/inputs/gitlab/utils.py: -------------------------------------------------------------------------------- 1 | from hashlib import sha256 2 | from pathlib import Path 3 | 4 | import yarl 5 | 6 | from app.secbot.inputs.gitlab.schemas import AnyGitlabModel, GitlabWebhookSecurityID 7 | from app.settings import GitlabConfig, settings 8 | 9 | 10 | def get_project_name(git_ssh_url: str) -> str: 11 | """Get project name from git ssh url.""" 12 | project = git_ssh_url 13 | if project.startswith("git@"): 14 | project = project[4:] 15 | if project.endswith(".git"): 16 | project = project[:-4] 17 | return project 18 | 19 | 20 | def generate_gitlab_security_id( 21 | prefix: str, 22 | data: AnyGitlabModel, 23 | ) -> GitlabWebhookSecurityID: 24 | """Generate hash of gitlab webhook event. 25 | 26 | The algorithm is a string with the formation of git prefix 27 | (since we have not only one GitLab) 28 | and sha256 of project path and complete commit hash. 29 | """ 30 | project_path = get_project_name(data.project.git_ssh_url) 31 | hash_str = f"{project_path}_{data.commit.id}" 32 | return GitlabWebhookSecurityID(f"{prefix}_{sha256(hash_str.encode()).hexdigest()}") 33 | 34 | 35 | def override_git_credentials(): 36 | """Overrides Git credentials using configurations from the settings. 37 | 38 | This function creates/overwrites a '.git-credentials' file at the user's home directory. 39 | The file includes the hosts from GitLab configurations in the settings, each paired with 40 | an 'oauth2' user and a corresponding authentication token. 41 | 42 | Returns: 43 | A file object representing the '.git-credentials' file. 44 | 45 | Raises: 46 | Exception: Any exception that occurs while writing to the file. 47 | """ 48 | user = "oauth2" 49 | git_credentials_path = Path.home() / ".git-credentials" 50 | git_credentials_file = open(git_credentials_path, "w") 51 | 52 | try: 53 | git_credentials_content = "\n".join( 54 | [ 55 | str( 56 | yarl.URL(config.host) 57 | .with_user(user) 58 | .with_password(config.auth_token.get_secret_value()) 59 | ) 60 | for config in settings.gitlab_configs 61 | ] 62 | ) 63 | git_credentials_file.write(git_credentials_content) 64 | 65 | return git_credentials_file 66 | finally: 67 | git_credentials_file.close() 68 | 69 | 70 | def get_config_from_host(host: str) -> GitlabConfig: 71 | """ 72 | Retrieves a GitLab configuration that matches a specified host. 73 | 74 | This function iterates over all GitLab configurations in the settings. It returns 75 | the first configuration whose host matches the provided host string. 76 | 77 | Args: 78 | host (str): The host to match against GitLab configurations. 79 | 80 | Returns: 81 | GitlabConfig: The GitLab configuration that matches the provided host. 82 | 83 | Raises: 84 | StopIteration: If no configuration matches the provided host. 85 | """ 86 | return next( 87 | config for config in settings.gitlab_configs if config.host.host == host 88 | ) 89 | -------------------------------------------------------------------------------- /app/secbot/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logger = logging.getLogger("secbot") 4 | logger.setLevel(logging.INFO) 5 | -------------------------------------------------------------------------------- /app/secbot/schemas.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | from pydantic import BaseModel, root_validator 4 | 5 | PYDANTIC_CLS_PATH = "__pydantic_path_model__" 6 | 7 | 8 | class SecbotBaseModel(BaseModel): 9 | """Base model for Secbot, supporting serialization/deserialization. 10 | 11 | This class extends the Pydantic BaseModel for use in Secbot's Celery 12 | workflow where models need to be serialized and deserialized. 13 | 14 | It ensures the inclusion of an absolute path to the class model in each 15 | instance, which is used later for deserialization. 16 | """ 17 | 18 | @root_validator 19 | def populate_with_class_model(cls, values): 20 | """Populates the model with the absolute class model path. 21 | 22 | As a Pydantic root validator, it's invoked during validation for 23 | each model instance. It takes the model's attribute mapping, and 24 | adds a new entry under the `PYDANTIC_CLS_PATH` key. This path 25 | includes the module and class name, used later for deserialization. 26 | 27 | Args: 28 | values (dict): Model's attribute mapping. 29 | 30 | Returns: 31 | The updated attribute mapping. 32 | """ 33 | values[PYDANTIC_CLS_PATH] = f"{cls.__module__}.{cls.__name__}" 34 | return values 35 | 36 | 37 | class Severity(str, enum.Enum): 38 | # NOTE(ivan.zhirov): These severities are from defectdojo statuses. 39 | INFO = "Informational" 40 | LOW = "Low" 41 | MEDIUM = "Medium" 42 | HIGH = "High" 43 | CRITICAL = "Critical" 44 | 45 | @property 46 | def priority(self): 47 | # NOTE(valerio.rico): less => more important 48 | priorities = { 49 | self.INFO: 4, 50 | self.LOW: 3, 51 | self.MEDIUM: 2, 52 | self.HIGH: 1, 53 | self.CRITICAL: 0, 54 | } 55 | return priorities[self] 56 | 57 | 58 | class SecurityCheckStatus(str, enum.Enum): 59 | """External secbot check status.""" 60 | 61 | NOT_STARTED = "not_started" 62 | IN_PROGRESS = "in_progress" 63 | ERROR = "error" # an exception has happened. 64 | 65 | # All the data has been obtained. 66 | FAIL = "fail" # we have vulnerabilities. 67 | SUCCESS = "success" # we don't have vulnerabilities, or they are acceptable. 68 | 69 | 70 | class ScanStatus(str, enum.Enum): 71 | """Internal (technical) secbot check status.""" 72 | 73 | NEW = "new" 74 | IN_PROGRESS = "in_progress" 75 | SKIP = "skip" # we decide to skip a scan for some reason. 76 | ERROR = "error" # an exception has happened. 77 | DONE = "done" # all the data has been obtained. 78 | -------------------------------------------------------------------------------- /app/secbot/settings.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseSettings, PostgresDsn 2 | 3 | 4 | class SecbotSettings(BaseSettings): 5 | postgres_dsn: PostgresDsn 6 | 7 | class Config: 8 | env_prefix = "secbot_" 9 | 10 | 11 | settings = SecbotSettings() 12 | -------------------------------------------------------------------------------- /app/secbot/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | from importlib import import_module 3 | 4 | from pydantic import BaseModel 5 | 6 | from app.secbot.schemas import PYDANTIC_CLS_PATH 7 | 8 | 9 | def load_cls(path: str) -> BaseModel: 10 | """Load a class from a path""" 11 | module, class_name = path.rsplit(".", 1) 12 | return getattr(import_module(module), class_name) 13 | 14 | 15 | def serializer(values): 16 | """Convert JSON-serializable objects back into original data types. 17 | 18 | This function recursively handles the conversion of input values, 19 | such as dictionaries for Pydantic models, into their original data types. 20 | by checking for a special key in the dictionary which 21 | is essentially putted by the deserializer function. 22 | """ 23 | if isinstance(values, (list, tuple)): 24 | return tuple(serializer(value) for value in values) 25 | if isinstance(values, dict): 26 | if PYDANTIC_CLS_PATH in values.keys(): 27 | cls = load_cls(values.get(PYDANTIC_CLS_PATH)) 28 | return cls.parse_obj(values) 29 | else: 30 | return {key: serializer(value) for key, value in values.items()} 31 | return values 32 | 33 | 34 | def deserializer(values): 35 | """Convert values into JSON-serializable objects. 36 | 37 | This function handles the conversion of values, such as Pydantic models, 38 | into JSON-serializable objects. 39 | For Pydantic models, it creates a dictionary with a special key storing the model's path, 40 | allowing for easy re-serialization before a Celery task execution. 41 | """ 42 | if isinstance(values, (list, tuple)): 43 | return tuple(deserializer(value) for value in values) 44 | if isinstance(values, dict): 45 | return {key: deserializer(value) for key, value in values.items()} 46 | if issubclass(type(values), BaseModel): 47 | return json.loads(values.json()) 48 | return values 49 | -------------------------------------------------------------------------------- /app/settings.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os.path 4 | import pathlib 5 | from typing import List, Optional, Set, Union 6 | 7 | from pydantic import AnyUrl, BaseModel, BaseSettings, SecretStr 8 | 9 | BASE_PATH = pathlib.Path(os.path.dirname(__file__)) 10 | 11 | 12 | def flatten_settings_values(app_settings: Settings) -> Set[Union[str, int]]: 13 | """Flattering all values from settings recursively.""" 14 | 15 | def values(data): 16 | if hasattr(data, "dict"): 17 | return [values(value) for value in data.dict().values()] 18 | elif isinstance(data, dict): 19 | return [values(value) for value in data.values()] 20 | elif isinstance(data, list): 21 | return [values(item) for item in data] 22 | elif isinstance(data, SecretStr): 23 | return data.get_secret_value() 24 | elif isinstance(data, object): 25 | return str(data) 26 | return data 27 | 28 | def flatten(arg): 29 | if not isinstance(arg, list): 30 | return [arg] 31 | return [x for sub in arg for x in flatten(sub)] 32 | 33 | settings_values = flatten(values(app_settings)) 34 | return set(settings_values) 35 | 36 | 37 | class GitlabConfig(BaseModel): 38 | host: AnyUrl 39 | webhook_secret_token: SecretStr 40 | auth_token: SecretStr 41 | prefix: str 42 | 43 | 44 | class Settings(BaseSettings): 45 | app_id: str = "security-bot" 46 | app_name: str = "Security Bot" 47 | app_host: str = "localhost" 48 | app_port: int = 5000 49 | 50 | debug: bool = False 51 | docs_enable: bool = True 52 | 53 | # Inputs 54 | gitlab_configs: List[GitlabConfig] 55 | 56 | # URLS 57 | sentry_dsn: Optional[AnyUrl] = None 58 | celery_broker_url: AnyUrl = "redis://redis:6379" 59 | celery_result_backend: AnyUrl = "redis://redis:6379" 60 | 61 | class Config: 62 | # Use this delimiter to split env variables 63 | # e.g. 64 | # DEFECTDOJO__URL=123 -> {"defectdojo": {"url": "123"}} 65 | env_nested_delimiter = "__" 66 | 67 | 68 | settings = Settings() 69 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | app: 4 | build: 5 | context: . 6 | dockerfile: Dockerfile 7 | ports: 8 | - "5000:5000" 9 | env_file: 10 | - .env.dev 11 | volumes: 12 | - ./app/:/opt/app/ 13 | depends_on: 14 | - redis 15 | command: start_app 16 | 17 | security_gateway: 18 | build: 19 | context: . 20 | dockerfile: Dockerfile 21 | ports: 22 | - "5001:5001" 23 | env_file: 24 | - .env.dev 25 | volumes: 26 | - ./app/:/opt/app/ 27 | depends_on: 28 | - redis 29 | command: start_security_gateway 30 | 31 | worker: 32 | build: 33 | context: . 34 | dockerfile: Dockerfile 35 | env_file: 36 | - .env.dev 37 | volumes: 38 | - ./app/:/opt/app/ 39 | depends_on: 40 | - app 41 | - redis 42 | command: start_celery 43 | 44 | redis: 45 | image: redis:6.2-alpine 46 | ports: 47 | - "127.0.0.1:6379:6379" 48 | 49 | db: 50 | image: postgres:14 51 | restart: always 52 | command: ["postgres", "-c", "log_statement=all", "-c", "log_destination=stderr"] 53 | environment: 54 | POSTGRES_DB: secbot 55 | POSTGRES_USER: secbot 56 | POSTGRES_PASSWORD: foobar 57 | PGDATA: /data/db 58 | ports: 59 | - "127.0.0.1:5432:5432" 60 | volumes: 61 | - db:/data/db 62 | 63 | volumes: 64 | db: 65 | -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ### Uvicorn common settings ### 4 | UVICORN_PORT=${UVICORN_PORT:-"5000"} 5 | UVICORN_SECURITY_GATEWAY_PORT=${UVICORN_SECURITY_GATEWAY_PORT:-"5001"} 6 | UVICORN_RELOAD=${UVICORN_RELOAD:-"false"} 7 | UVICORN_LOG_LEVEL=${UVICORN_LOG_LEVEL:-"info"} 8 | 9 | ### Celery common settings ### 10 | CELERY_LOG_LEVEL=${CELERY_LOG_LEVEL:-"info"} 11 | CELERY_MIN_WORKERS=${CELERY_MIN_WORKERS:-"1"} 12 | CELERY_MAX_WORKERS=${CELERY_MAX_WORKERS:-"2"} 13 | 14 | if [ $UVICORN_RELOAD == "true" ]; then 15 | UVICORN_START_ARGS="${UVICORN_START_ARGS} --reload" 16 | else 17 | ### Not valid with --reload 18 | export UVICORN_WORKERS=${UVICORN_WORKERS:-"1"} 19 | fi 20 | 21 | function export_overriden_env() { 22 | # Override env variables with .env.override file 23 | # It will allow to have different env variables for local development and production 24 | FILE=".env.override" 25 | if [[ -f "$FILE" ]]; then 26 | export $(grep -v '^#' $FILE | xargs -d '\n') 27 | else 28 | echo "Override env $FILE not found" 29 | fi 30 | } 31 | 32 | function run_migrations() { 33 | echo "Running migration" 34 | alembic -c /opt/app/secbot/alembic.ini upgrade head 35 | } 36 | 37 | function run_app() { 38 | echo "Starting security bot app" 39 | export_overriden_env 40 | run_migrations 41 | uvicorn app.main:app --host 0.0.0.0 --port ${UVICORN_PORT} --log-level ${UVICORN_LOG_LEVEL} ${UVICORN_START_ARGS} 42 | } 43 | 44 | function run_security_gateway() { 45 | echo "Starting security bot security gateway" 46 | export_overriden_env 47 | uvicorn app.main:security_gateway_app --host 0.0.0.0 --port ${UVICORN_SECURITY_GATEWAY_PORT} --log-level ${UVICORN_LOG_LEVEL} ${UVICORN_START_ARGS} 48 | } 49 | 50 | function run_celery() { 51 | echo "Starting security bot celery worker" 52 | export_overriden_env 53 | celery -A app.main:celery_app worker --autoscale=${CELERY_MAX_WORKERS},${CELERY_MIN_WORKERS} --loglevel ${CELERY_LOG_LEVEL} 54 | } 55 | 56 | case $1 in 57 | "shell") 58 | bash 59 | ;; 60 | "start_app") 61 | run_app 62 | ;; 63 | "start_security_gateway") 64 | run_security_gateway 65 | ;; 66 | "start_celery") 67 | run_celery 68 | ;; 69 | "migrate") 70 | run_migrations 71 | ;; 72 | *) 73 | echo "Please use of next parameters to start:" 74 | echo " help: help information" 75 | echo " shell: run shell" 76 | echo " migrate: run migrations" 77 | echo " start_app: run app" 78 | echo " start_celery: run celery" 79 | ;; 80 | esac 81 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | import os 4 | import sys 5 | 6 | sys.path.insert(0, os.path.abspath("../")) 7 | 8 | # For the full list of built-in configuration values, see the documentation: 9 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 10 | 11 | # -- Project information ----------------------------------------------------- 12 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 13 | 14 | project = "Security Bot" 15 | copyright = "2023, Exness" 16 | author = "Maxim Sokolov" 17 | version = "0.1.0" 18 | release = version 19 | 20 | # -- General configuration --------------------------------------------------- 21 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 22 | 23 | extensions = [ 24 | "sphinx.ext.duration", 25 | "sphinx.ext.doctest", 26 | "sphinx.ext.autodoc", 27 | "sphinx.ext.autosummary", 28 | "sphinx.ext.intersphinx", 29 | ] 30 | 31 | templates_path = ["_templates"] 32 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 33 | 34 | # -- Options for HTML output ------------------------------------------------- 35 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 36 | 37 | html_theme = "sphinx_rtd_theme" 38 | -------------------------------------------------------------------------------- /docs/getting-started.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | On this page, you will find all the necessary information to dive into the 5 | Security Bot (SecBot) project to 6 | 7 | * set up the service and the documentation generator it uses, 8 | * configure and integrate it with your service, 9 | * ensure communication via API, and 10 | * get familiar with the main concepts and limits. 11 | 12 | Yet, we provide detailed descriptions and insights on separate pages of this 13 | documentation. 14 | 15 | Prerequisites 16 | ------------- 17 | 18 | Since SecBot is a Python application running in a container on Kubernetes, 19 | make sure that the relevant components and their packages are installed and 20 | available in your local environment. 21 | 22 | **Kubernetes-related**: 23 | 24 | * `Docker `_ 25 | * `Kubernetes `_ 26 | * `Kubernetes Cluster `_ 27 | * Container registry, for example `Docker Hub `_ 28 | * and other Containerization tools, for example `Docker Compose `_ 29 | 30 | **Python-related**: 31 | 32 | * `Python `_ 33 | * `PIP `_ 34 | * `Poetry `_ 35 | 36 | Additionally, we employ 37 | 38 | * `Sphinx `_ as a documentation generator and 39 | * `draw.io `_ as a tool for creating schemes and diagrams. 40 | 41 | Deployment 42 | ---------- 43 | 44 | Follow these general steps to install and build the SecBot 45 | 46 | 1. Clone the repository: 47 | 48 | a. visit the project's repository to copy the URL under :guilabel:`Clone` 49 | b. run the ``git clone`` command to create a local copy 50 | 51 | .. code-block:: console 52 | 53 | $ git clone path/to/project.git 54 | 55 | 2. Build and run the SecBot service. 56 | 57 | .. code-block:: console 58 | 59 | $ docker-compose up --build 60 | 61 | Service Configuration 62 | --------------------- 63 | 64 | The ``/.env.dev`` file defines the location, keys, and other parameters of the 65 | internal and external units SecBot communicates with: queues, databases, 66 | :ref:`Inputs, Scans, Outputs, and Notifiers `. 67 | 68 | 1. Review this file and make the necessary changes to it based on your environment's peculiarities, such as the variables within ``GITLAB_CONFIGS``. 69 | 2. Save the file and rebuild the service. 70 | 71 | .. code-block:: console 72 | 73 | $ docker-compose up --build 74 | 75 | For test and other reasons, you can redefine any parameter in the 76 | ``/.env.override`` file for your separate sandbox environment. To do this, 77 | 78 | 1. Rename the original file of ``/.env.override.example`` accordingly 79 | 2. Specify the new values of existing variables there, for example modify ``DEFECTDOJO__TOKEN=defectdojo_token`` 80 | 81 | .. code-block:: text 82 | 83 | # Excerpt from .env.override 84 | 85 | ... 86 | DEFECTDOJO__TOKEN=my_personal_token 87 | ... 88 | 89 | 3. Save the file. 90 | 4. Rebuild the service. 91 | 92 | .. code-block:: console 93 | 94 | $ docker-compose up --build 95 | 96 | .. note:: 97 | 98 | For more detailed information on this topic, see 99 | :ref:`Configuration `. 100 | 101 | Workflow Configuration 102 | ---------------------- 103 | 104 | The ``/app/config.yml`` file defines the policies SecBot follows in its work: 105 | which Scans to launch to check input entities of a particular type, which 106 | Outputs to use to aggregate the Scans' results, and so on. You can take the 107 | original version and use it as is or update the file according to your needs. 108 | In the latter case, you will need stop and restart the service. 109 | 110 | .. code-block:: console 111 | 112 | $ docker-compose stop 113 | $ docker-compose up -d 114 | 115 | .. note:: 116 | 117 | For more detailed information on this topic, see 118 | :ref:`Configuration `. 119 | 120 | Integration 121 | ----------- 122 | 123 | Since SecBot communicates with different units via their respective APIs and 124 | triggers in response to specific input events, you are expected to 125 | 126 | * :ref:`obtain authorization with these units ` (Inputs, Outputs, and Notifiers) and 127 | * :ref:`specify triggers ` on your development and distribution platform (Input), such as system hooks (or webhooks) for GitLab. -------------------------------------------------------------------------------- /docs/glossary.rst: -------------------------------------------------------------------------------- 1 | .. _glossary_and_inventory: 2 | 3 | Glossary and Inventory 4 | ====================== 5 | 6 | On this page, you will find the 7 | 8 | * brief explanations of the units SecBot's architecture is based on, 9 | * lists of those supported and used in configuration, and 10 | * descriptions of other related concepts. 11 | 12 | .. glossary:: 13 | 14 | Input 15 | Input is a code repository, storage, or development or distribution 16 | platform, such as GitLab or Docker Registry, changes to which need 17 | extended security-related validation. 18 | 19 | +------------+----------------------------------------------+ 20 | | Input | Source | 21 | +============+==============================================+ 22 | | ``gitlab`` | `GitLab Docs `_ | 23 | +------------+----------------------------------------------+ 24 | 25 | Input entity 26 | Input entity (or input event) is a substantial amount of data 27 | (payload) to be validated. This data can be filtered out based on some 28 | configuration rules so that only part of it is actually checked. You 29 | can specify one or more of the event types we support (see the 30 | following table) and any other keys (JSON paths) of your choice. 31 | 32 | +-------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ 33 | | Event type | Source | 34 | +===================+=========================================================================================================================================+ 35 | | ``push`` | `Webhook events: push events `_ | 36 | +-------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ 37 | | ``tag_push`` | `Webhook events: tag events `_ | 38 | +-------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ 39 | | ``merge_request`` | `Webhook events: merge request events `_ | 40 | +-------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ 41 | 42 | .. code-block:: text 43 | 44 | # Excerpt from /app/config.yml 45 | 46 | ... 47 | jobs: 48 | - name: Common merge request event 49 | rules: 50 | gitlab: # reserved name (Input) 51 | event_type: "merge_request" # one of the filtering parameters (Event type) 52 | ... 53 | 54 | Scan 55 | Scan is an external code analysis tool for applying the DevOps and 56 | security best practices to development and integration flows. It, for 57 | example, can detect hardcoded secrets (passwords, API keys, or tokens 58 | in Git repositories) or evaluate how certain changes might affect the 59 | overall quality or performance of your application. The result of its 60 | work is raw defect data to be passed to Outputs. 61 | 62 | +--------------+--------------------------------------------------------------+ 63 | | Scan | Source | 64 | +==============+==============================================================+ 65 | | ``gitleaks`` | `Gitleaks on GitHub `_ | 66 | +--------------+--------------------------------------------------------------+ 67 | 68 | Output 69 | Output is an external defect management system specially integrated 70 | with SecBot to aggregate the check results from different Scans, merge 71 | the duplicates, and do other relevant things to prepare a normalized 72 | readable report for Notifiers. A piece of this report (problem, 73 | vulnerability, or any other security issue) is called "finding." 74 | 75 | +----------------+---------------------------------------------------------+ 76 | | Output | Source | 77 | +================+=========================================================+ 78 | | ``defectdojo`` | `DefectDojo on GitHub `_ | 79 | +----------------+---------------------------------------------------------+ 80 | 81 | Findings 82 | For findings, see "Output." 83 | 84 | Notifier 85 | Notifier (referred to as "notification" in the `/app/config.py` file) 86 | is an instant messaging program integrated with SecBot to inform 87 | interested parties of detected security issues (findings). 88 | 89 | +-----------+---------------------------------------+ 90 | | Notifier | Source | 91 | +===========+=======================================+ 92 | | ``slack`` | `Slack Website `_ | 93 | +-----------+---------------------------------------+ 94 | 95 | Job 96 | Job is three sets of tasks, at least one for a Scan, one for an 97 | Output, and one for a Notifier, to be executed sequentially to process 98 | a particular input entity type and yield the relevant results (findings). -------------------------------------------------------------------------------- /docs/images/defectdojo-user.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/defectdojo-user.png -------------------------------------------------------------------------------- /docs/images/general-scheme.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/general-scheme.drawio.png -------------------------------------------------------------------------------- /docs/images/gitlab-access-token.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/gitlab-access-token.png -------------------------------------------------------------------------------- /docs/images/gitlab-system-hooks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/gitlab-system-hooks.png -------------------------------------------------------------------------------- /docs/images/job-graph.drawio: -------------------------------------------------------------------------------- 1 | 7Vpbb5swFP41PHYCTGjymEu7qdPWSZV2eXTAEKsOzozTkP362WAIGLLQLAn0EikK/rCPzfk+H5vjGGC6TD4yuFp8oT4ihm36iQFmhm1blgXEj0S2GeICKwNChn1VaQc84D9IgaZC19hHcaUip5RwvKqCHo0i5PEKBhmjm2q1gJJqrysYohrw4EFSR39gny8ydDgwd/gnhMNF3rNlqjtLmFdWQLyAPt2UIHBjgCmjlGdXy2SKiHRe7pes3e2eu8XAGIp4mwbXX+/B95F960/dBYvux5/vNoMrZeUJkrV6YMN2ibA3CagwK/1KKEvvuL/XcqgTwwZB+ilDZsy3ymU5KA1cxSmhY1HBclZJWnXXyA3Vb9rhPAdiD0ayQX5DPNJcryywbIA5bFfGYItHFwoRhclmgTl6WEFP3tkIkQpswZdElCxxCeNVJpsAJ8iXw8aETIuHBqMREN+iiyfEOEr2MmAVvIoJgegScbYVVVQDRylBTQUrV8amJCwFLUqayjGopBwWhndsiwtF+DPIt3tIPl3z1Zr3hn4Lzr2Rdxr6df6djukHPaQ/ohwHGLHeCCAIhqYI6aeZ/3oAcDtWgNNDBRQBwO4F/+cMALbZMf+DPvMPXj3/oOsNgNtD/tXurx+z/4y7v86X/+sG8nU/Rv5YvkOJkkdgHGOv6j5G15Ev/TYzFeXqBc5yVLnky9v0I3DhL7b9qRqlhV+y8GGQF2dJ+eZsq0rZ8JBfe2HTGBBviJCFiB/a+daZKlExaKAixxgikOOn6jCa+FE9fKM4nU25cuw924DcREzXzEOqVfmNTjd0rRmyNEOZH2qGUrkUj328goZvWEHOq1CQDbSgpEebMyto9NIUhBLMf6qe5XWplSjtGsnC81WX0fYPf7VV5+B1qHOg7Zd0Q2dWZx5OO5DnETI7f1A8KE/3RSy/VlVVjn2sPPfl8y4lz6bk7UXk2VYyfYlAtWXtWIqL8qUott8pvjDFF19kmtKw74vMgb1Ni0UGdClPMNRUpQeOtvJ09B366MLybMoRv0cgo5S927c2tKZ4X1r4UhQ3pYF7G4FOGEk63YY6prZ71JM3L0Y+B7PIh7PDw1p2uDn1bKafFlnkOzqXdjPzgTAk3CHTyjjKDheEVjDfGv95yshRwqvzIOaMPqJc6xGNkJZUVhAkOIxEkaBAWpCJZexBMlbwEvs+2Ze/rs41AueITKD3GKa43vWzgu1x+e3SfCliYHnC6HHxZBlsq57CznYG8WNc42rnNevwuUB7pw7rTlUibThKTs9NznGY4LQ8TDgfFfVcsPtGqOgfF/WsKngjXOhr4Rm5EMXdf/eytXT3D0hw8xc= -------------------------------------------------------------------------------- /docs/images/job-graph.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/job-graph.drawio.png -------------------------------------------------------------------------------- /docs/images/slack-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/docs/images/slack-dashboard.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. security-bot documentation master file, created by 2 | sphinx-quickstart on Thu May 18 11:38:18 2023. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Security Bot's documentation! 7 | ======================================== 8 | 9 | .. toctree:: 10 | :hidden: 11 | 12 | Welcome to SecBot 13 | getting-started 14 | configuration 15 | integration 16 | glossary 17 | 18 | The **Security Bot** (SecBot) is an orchestration service designed to 19 | communicate with various external units (see the :ref:`following scheme `) 20 | to detect security-related issues in developers' code. It can be implemented 21 | as an extra pipeline stage to be passed (along with linting, unit-tests, and 22 | build) or used in any other way. 23 | 24 | In its work, this service 25 | 26 | 1. receives from development and distribution platforms ("Inputs") information on changes that a software engineer contributes ("input entity") 27 | 2. based on its configuration and the input entity's type, draws up a processing plan ("job"; see an example of it :ref:`later `) 28 | 3. according to this plan, creates a necessary number of tasks for different units to be successively performed to 29 | 30 | a. scan the input entity with code analysis tools ("Scans") 31 | b. aggregate the found security issues from Scans, merge duplicates, and do other relevant things with defect management systems ("Outputs") 32 | c. inform the interested parties of the results by means of instant messaging ("Notifiers") 33 | 34 | 4. provides the "Input" platforms with the check results (status) on request. (Based on this status—"success" or "fail"—the changes being contributed are allowed or blocked.) 35 | 36 | .. _general-scheme-image: 37 | 38 | .. image:: /images/general-scheme.drawio.png 39 | :alt: General scheme 40 | 41 | .. note:: 42 | 43 | As the scheme suggests, SecBot is split into two instances running in 44 | separate containers to ensure high availability and distribute the load. 45 | One instance is responsible for receiving requests to process data, whereas 46 | the other is dedicated to providing the results of this processing. 47 | 48 | .. _job-graph-image: 49 | 50 | The following example of a processing plan, presented as a graph, implies that 51 | SecBot's job is configured to use two Scans, three Outputs, and one Notifier. 52 | The overall number of tasks is 11. 53 | 54 | .. image:: /images/job-graph.drawio.png 55 | :width: 350px 56 | :align: center 57 | :alt: Job graph -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /k8s/configmap.yml: -------------------------------------------------------------------------------- 1 | # jobs configuration yml 2 | apiVersion: v1 3 | kind: ConfigMap 4 | metadata: 5 | name: security-bot-configuration 6 | data: 7 | config.yml: | 8 | version: 1.0 9 | 10 | components: 11 | gitleaks: 12 | handler_name: "gitleaks" 13 | config: 14 | format: "json" 15 | defectdojo: 16 | handler_name: "defectdojo" 17 | env: 18 | url: "DEFECTDOJO__URL" 19 | secret_key: "DEFECTDOJO__TOKEN" 20 | user: "DEFECTDOJO__USER" 21 | lead_id: "DEFECTDOJO__USER_ID" 22 | slack: 23 | handler_name: "slack" 24 | config: 25 | render_limit: 10 26 | channels: 27 | - security-bot 28 | env: 29 | token: "SLACK_TOKEN" 30 | 31 | jobs: 32 | - name: Merge Request Job 33 | rules: 34 | gitlab: 35 | event_type: "merge_request" 36 | scans: 37 | - gitleaks 38 | outputs: 39 | - defectdojo 40 | notifications: 41 | - slack 42 | --- 43 | # environment variables 44 | apiVersion: v1 45 | kind: ConfigMap 46 | metadata: 47 | name: security-bot-configuration-envs 48 | data: 49 | SENTRY_DSN: "sentry_dsn" 50 | SECBOT_POSTGRES_DSN: "postgres_dsn" 51 | GITLAB_CONFIGS: '[{"host":"https://git.env.local/","webhook_secret_token":"SecretStr","auth_token":"SecretStr","prefix":"GIT_LOCAL"}]' 52 | DEFECTDOJO__URL: "https://defectdojo.env.local" 53 | DEFECTDOJO__TOKEN: "defectdojo_token" 54 | DEFECTDOJO__USER: "defectdojo_username" 55 | DEFECTDOJO__USER_ID: "10" 56 | SLACK_TOKEN: "slack_token" 57 | CELERY_BROKER_URL: "redis://security-bot-redis-svc:6379/0" 58 | CELERY_RESULT_BACKEND: "redis://security-bot-redis-svc:6379/0" 59 | SRE_METRIC_LABEL_TEAM: "SECURITY" 60 | SRE_METRIC_LABEL_SERVICE: "security-bot" 61 | TRACING_TAGS_HOST: "security-bot.env.local" 62 | TRACING_TAGS_CLUSTER: "security-local" 63 | -------------------------------------------------------------------------------- /k8s/deployment.yml: -------------------------------------------------------------------------------- 1 | # bot service account 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: security-bot 6 | --- 7 | # security bot deployment 8 | apiVersion: apps/v1 9 | kind: Deployment 10 | metadata: 11 | name: security-bot 12 | labels: 13 | app: security-bot 14 | spec: 15 | replicas: 1 16 | selector: 17 | matchLabels: 18 | app: security-bot 19 | template: 20 | metadata: 21 | annotations: 22 | labels: 23 | app: security-bot 24 | spec: 25 | serviceAccountName: security-bot 26 | containers: 27 | - name: security-bot 28 | image: exness/security-bot:latest 29 | command: [ "docker-entrypoint.sh" ] 30 | args: [ "start_app" ] 31 | envFrom: 32 | - configMapRef: 33 | name: security-bot-configuration-envs 34 | volumeMounts: 35 | - name: security-bot-configuration-volume 36 | mountPath: /exness/app/config.yml 37 | subPath: config.yml 38 | ports: 39 | - containerPort: 5000 40 | protocol: TCP 41 | resources: 42 | limits: 43 | cpu: 0.5 44 | memory: 1Gi 45 | requests: 46 | cpu: 0.5 47 | memory: 1Gi 48 | volumes: 49 | - name: security-bot-configuration-volume 50 | configMap: 51 | name: security-bot-configuration 52 | --- 53 | # security gateway deployment 54 | apiVersion: apps/v1 55 | kind: Deployment 56 | metadata: 57 | name: security-bot-gw 58 | labels: 59 | app: security-bot-gw 60 | spec: 61 | replicas: 1 62 | selector: 63 | matchLabels: 64 | app: security-bot-gw 65 | template: 66 | metadata: 67 | labels: 68 | app: security-bot-gw 69 | spec: 70 | serviceAccountName: security-bot 71 | containers: 72 | - name: security-bot-gw 73 | image: exness/security-bot:latest 74 | command: [ "docker-entrypoint.sh" ] 75 | args: [ "start_security_gateway" ] 76 | envFrom: 77 | - configMapRef: 78 | name: security-bot-configuration-envs 79 | volumeMounts: 80 | - name: security-bot-configuration-volume 81 | mountPath: /exness/app/config.yml 82 | subPath: config.yml 83 | ports: 84 | - containerPort: 5001 85 | protocol: TCP 86 | resources: 87 | limits: 88 | cpu: 0.5 89 | memory: 0.5Gi 90 | requests: 91 | cpu: 0.5 92 | memory: 0.5Gi 93 | volumes: 94 | - name: security-bot-configuration-volume 95 | configMap: 96 | name: security-bot-configuration 97 | --- 98 | # celery deployment 99 | apiVersion: apps/v1 100 | kind: Deployment 101 | metadata: 102 | name: security-bot-celery 103 | labels: 104 | app: security-bot-celery 105 | spec: 106 | replicas: 4 107 | selector: 108 | matchLabels: 109 | app: security-bot-celery 110 | template: 111 | metadata: 112 | labels: 113 | app: security-bot-celery 114 | spec: 115 | serviceAccountName: security-bot 116 | containers: 117 | - name: security-bot-celery 118 | image: exness/security-bot:latest 119 | command: [ "docker-entrypoint.sh" ] 120 | args: [ "start_celery" ] 121 | envFrom: 122 | - configMapRef: 123 | name: security-bot-configuration-envs 124 | volumeMounts: 125 | - name: security-bot-configuration-volume 126 | mountPath: /exness/app/config.yml 127 | subPath: config.yml 128 | resources: 129 | limits: 130 | cpu: 1 131 | memory: 4Gi 132 | requests: 133 | cpu: 1 134 | memory: 4Gi 135 | volumes: 136 | - name: security-bot-configuration-volume 137 | configMap: 138 | name: security-bot-configuration 139 | --- 140 | # redis deployment 141 | apiVersion: apps/v1 142 | kind: Deployment 143 | metadata: 144 | name: security-bot-redis 145 | labels: 146 | app: security-bot-redis 147 | spec: 148 | replicas: 1 149 | selector: 150 | matchLabels: 151 | app: security-bot-redis 152 | template: 153 | metadata: 154 | labels: 155 | app: security-bot-redis 156 | spec: 157 | serviceAccountName: security-bot-redis 158 | containers: 159 | - name: security-bot-redis 160 | image: "redis:6.2-alpine" 161 | resources: 162 | limits: 163 | cpu: 1 164 | memory: 1Gi 165 | requests: 166 | cpu: 1 167 | memory: 1Gi 168 | --- 169 | # redis service account 170 | apiVersion: v1 171 | kind: ServiceAccount 172 | metadata: 173 | name: security-bot-redis 174 | -------------------------------------------------------------------------------- /k8s/ingress.yml: -------------------------------------------------------------------------------- 1 | # ingress configuration 2 | apiVersion: extensions/v1beta1 3 | kind: Ingress 4 | metadata: 5 | name: security-bot-ingress 6 | annotations: 7 | nginx.ingress.kubernetes.io/proxy-connect-timeout: "10s" 8 | nginx.ingress.kubernetes.io/proxy-send-timeout: "300s" 9 | nginx.ingress.kubernetes.io/proxy-read-timeout: "300s" 10 | nginx.ingress.kubernetes.io/use-regex: "true" 11 | nginx.ingress.kubernetes.io/rewrite-target: /$2 12 | spec: 13 | tls: 14 | - hosts: 15 | - security-bot.env.local 16 | secretName: sec-bot-tls-certs 17 | rules: 18 | - host: security-bot.env.local 19 | http: 20 | paths: 21 | - path: /inputs(/|$)(.*) 22 | pathType: Prefix 23 | backend: 24 | serviceName: security-bot-svc 25 | servicePort: 5000 26 | - path: /gateway(/|$)(.*) 27 | pathType: Prefix 28 | backend: 29 | serviceName: security-bot-gw-svc 30 | servicePort: 5001 31 | -------------------------------------------------------------------------------- /k8s/service.yml: -------------------------------------------------------------------------------- 1 | # security bot service 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: security-bot-svc 6 | labels: 7 | app: security-bot-svc 8 | spec: 9 | type: NodePort 10 | ports: 11 | - name: security-bot 12 | protocol: TCP 13 | port: 5000 14 | selector: 15 | app: security-bot 16 | --- 17 | # security gateway service 18 | apiVersion: v1 19 | kind: Service 20 | metadata: 21 | name: security-bot-gw-svc 22 | labels: 23 | app: security-bot-gw-svc 24 | spec: 25 | type: NodePort 26 | ports: 27 | - name: security-bot-gw 28 | protocol: TCP 29 | port: 5001 30 | selector: 31 | app: security-bot-gw 32 | --- 33 | # redis service 34 | apiVersion: v1 35 | kind: Service 36 | metadata: 37 | name: security-bot-redis-svc 38 | labels: 39 | app: security-bot-redis-svc 40 | spec: 41 | type: NodePort 42 | ports: 43 | - name: security-bot-redis 44 | protocol: TCP 45 | port: 6379 46 | selector: 47 | app: security-bot-redis 48 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "security-bot" 3 | version = "0.1.0" 4 | description = "Security Bot" 5 | authors = [ 6 | "Ivan Zhirov ", 7 | "Valerio Rico <5669698+V-Rico@users.noreply.github.com>", 8 | ] 9 | license = "MIT" 10 | 11 | [tool.poetry.dependencies] 12 | python = "^3.9" 13 | fastapi = "^0.87.0" 14 | uvicorn = "^0.20.0" 15 | httpx = "^0.23.1" 16 | celery = "^5.2.7" 17 | redis = "^4.3.5" 18 | requests = "^2.28.1" 19 | toml = "^0.10.2" 20 | sentry-sdk = "^1.11.1" 21 | gitpython = "^3.1.29" 22 | yarl = "^1.8.1" 23 | pyyaml = "^6.0" 24 | slack-sdk = "^3.19.4" 25 | python-slugify = "^7.0.0" 26 | sqlalchemy = "^1.4.45" 27 | alembic = "^1.8.1" 28 | prometheus-client = "^0.15.0" 29 | asyncpg = "^0.27.0" 30 | aiohttp = "^3.8.3" 31 | boto3 = "^1.26.91" 32 | pydantic = "^1.10.9" 33 | 34 | [tool.poetry.group.dev.dependencies] 35 | pytest = "^6.2.5" 36 | tox = "^3.24.0" 37 | pytest-asyncio = "^0.16.0" 38 | pytest-cov = "^3.0.0" 39 | black = "^22.10.0" 40 | isort = "^5.10.1" 41 | bandit = "^1.7.0" 42 | mypy = "^0.991" 43 | flake8 = "^5.0.4" 44 | pyclean = "^2.0.0" 45 | packaging = "^21.0" 46 | respx = "^0.19.0" 47 | asynctest = "^0.13.0" 48 | types-requests = "^2.28.11.5" 49 | types-python-slugify = "^7.0.0.1" 50 | types-pyyaml = "^6.0.12.2" 51 | types-toml = "^0.10.8.1" 52 | pytest-dotenv = "^0.5.2" 53 | pytest-celery = "^0.0.0" 54 | types-sqlalchemy = "^1.4.53.19" 55 | greenlet = "^2.0.1" 56 | faker = "^16.7.0" 57 | moto = "^4.1.4" 58 | polyfactory = "^2.3.2" 59 | 60 | [build-system] 61 | requires = ["poetry>=1.1.0"] 62 | build-backend = "poetry.masonry.api" 63 | 64 | [tool.black] 65 | line-length = 87 66 | target-version = ['py39'] 67 | include = '\.pyi?$' 68 | exclude = 'build\/|buck-out\/|dist\/|_build\/|.git\/|.hg\/|.mypy_cache\/|.tox\/|.venv\/|proto\/|stubs\/|migrations\/' 69 | 70 | [tool.mypy] 71 | ignore_missing_imports = true 72 | warn_return_any = true 73 | warn_redundant_casts = true 74 | warn_unused_configs = true 75 | plugins = ['pydantic.mypy'] 76 | exclude = [ 77 | '.git', 78 | '.mypy_cache', 79 | '.pytest_cache', 80 | '.run', 81 | '.cache', 82 | '.venv', 83 | 'app/alembic' 84 | ] 85 | 86 | [tool.isort] 87 | profile = 'black' 88 | line_length = 87 89 | atomic = true 90 | skip = [ 91 | '.git', 92 | '.mypy_cache', 93 | '.pytest_cache', 94 | '.run', 95 | '.cache', 96 | '.venv', 97 | 'app/alembic' 98 | ] 99 | 100 | [tool.mypy-pytest] 101 | ignore_missing_imports = true 102 | 103 | [tool.pytest.ini_options] 104 | python_files = ['tests.py', 'test_*.py', '*_tests.py'] 105 | env_files = '.env.dev' 106 | addopts = ["--cov", "-vv"] 107 | -------------------------------------------------------------------------------- /static/security-bot-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/static/security-bot-logo.png -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from faker import Faker 3 | from pydantic import BaseModel, BaseSettings, SecretStr 4 | from starlette.testclient import TestClient 5 | 6 | from app.main import app # noqa 7 | from tests.units.common import get_test_root_directory 8 | 9 | 10 | @pytest.fixture 11 | def client(): 12 | with TestClient(app) as client: 13 | yield client 14 | 15 | 16 | @pytest.fixture 17 | def dir_tests(): 18 | return get_test_root_directory() 19 | 20 | 21 | @pytest.fixture(scope="session") 22 | def celery_config(): 23 | return {"task_always_eager": True} 24 | 25 | 26 | @pytest.fixture 27 | def example_settings(): 28 | class ExampleModel(BaseModel): 29 | title: str = "model" 30 | secret: SecretStr = "secret" 31 | 32 | class ExampleSettings(BaseSettings): 33 | title: str = "app" 34 | model: ExampleModel = ExampleModel() 35 | dictionary: dict = {"hello": "world"} 36 | 37 | return ExampleSettings() 38 | 39 | 40 | @pytest.fixture 41 | def faker(): 42 | return Faker() 43 | -------------------------------------------------------------------------------- /tests/fixtures/inputs/gitlab/merge_request_webhook.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "merge_request", 3 | "event_type": "merge_request", 4 | "user": 5 | { 6 | "id": 1287, 7 | "name": "Ivan Zhirov", 8 | "username": "ivan.zhirov", 9 | "avatar_url": "https://git.env.local/uploads/-/system/user/avatar/1287/avatar.png", 10 | "email": "example@mail.env.local" 11 | }, 12 | "project": 13 | { 14 | "id": 6617, 15 | "name": "Example Project", 16 | "description": "", 17 | "web_url": "https://git.env.local/secbot-test-group/example-project", 18 | "avatar_url": null, 19 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 20 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 21 | "namespace": "secbot-test-group", 22 | "visibility_level": 0, 23 | "path_with_namespace": "secbot-test-group/example-project", 24 | "default_branch": "main", 25 | "ci_config_path": null, 26 | "homepage": "https://git.env.local/secbot-test-group/example-project", 27 | "url": "git@git.env.local:secbot-test-group/example-project.git", 28 | "ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 29 | "http_url": "https://git.env.local/secbot-test-group/example-project.git" 30 | }, 31 | "object_attributes": 32 | { 33 | "assignee_id": null, 34 | "author_id": 1287, 35 | "created_at": "2023-02-02T09:44:56.081Z", 36 | "description": "", 37 | "head_pipeline_id": null, 38 | "id": 83588, 39 | "iid": 4, 40 | "last_edited_at": null, 41 | "last_edited_by_id": null, 42 | "merge_commit_sha": null, 43 | "merge_error": null, 44 | "merge_params": 45 | { 46 | "force_remove_source_branch": "1" 47 | }, 48 | "merge_status": "unchecked", 49 | "merge_user_id": null, 50 | "merge_when_pipeline_succeeds": false, 51 | "milestone_id": null, 52 | "source_branch": "duplicate-2", 53 | "source_project_id": 6617, 54 | "state_id": 1, 55 | "target_branch": "main", 56 | "target_project_id": 6617, 57 | "time_estimate": 0, 58 | "title": "Duplicate 2", 59 | "updated_at": "2023-02-07T13:51:22.027Z", 60 | "updated_by_id": null, 61 | "url": "https://git.env.local/secbot-test-group/example-project/-/merge_requests/4", 62 | "source": 63 | { 64 | "id": 6617, 65 | "name": "Example Project", 66 | "description": "", 67 | "web_url": "https://git.env.local/secbot-test-group/example-project", 68 | "avatar_url": null, 69 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 70 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 71 | "namespace": "secbot-test-group", 72 | "visibility_level": 0, 73 | "path_with_namespace": "secbot-test-group/example-project", 74 | "default_branch": "main", 75 | "ci_config_path": null, 76 | "homepage": "https://git.env.local/secbot-test-group/example-project", 77 | "url": "git@git.env.local:secbot-test-group/example-project.git", 78 | "ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 79 | "http_url": "https://git.env.local/secbot-test-group/example-project.git" 80 | }, 81 | "target": 82 | { 83 | "id": 6617, 84 | "name": "Example Project", 85 | "description": "", 86 | "web_url": "https://git.env.local/secbot-test-group/example-project", 87 | "avatar_url": null, 88 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 89 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 90 | "namespace": "secbot-test-group", 91 | "visibility_level": 0, 92 | "path_with_namespace": "secbot-test-group/example-project", 93 | "default_branch": "main", 94 | "ci_config_path": null, 95 | "homepage": "https://git.env.local/secbot-test-group/example-project", 96 | "url": "git@git.env.local:secbot-test-group/example-project.git", 97 | "ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 98 | "http_url": "https://git.env.local/secbot-test-group/example-project.git" 99 | }, 100 | "last_commit": 101 | { 102 | "id": "ff01dde062e9d8b4636fc88d9d01017800422b05", 103 | "message": "Hey ya\n", 104 | "title": "Hey ya", 105 | "timestamp": "2023-02-07T15:51:13+02:00", 106 | "url": "https://git.env.local/secbot-test-group/example-project/-/commit/ff01dde062e9d8b4636fc88d9d01017800422b05", 107 | "author": 108 | { 109 | "name": "Ivan Zhirov", 110 | "email": "zhirovivan@gmail.com" 111 | } 112 | }, 113 | "work_in_progress": false, 114 | "total_time_spent": 0, 115 | "time_change": 0, 116 | "human_total_time_spent": null, 117 | "human_time_change": null, 118 | "human_time_estimate": null, 119 | "assignee_ids": 120 | [], 121 | "reviewer_ids": 122 | [], 123 | "labels": 124 | [], 125 | "state": "opened", 126 | "blocking_discussions_resolved": true, 127 | "first_contribution": false, 128 | "detailed_merge_status": "unchecked" 129 | }, 130 | "labels": 131 | [], 132 | "changes": 133 | {}, 134 | "repository": 135 | { 136 | "name": "Example Project", 137 | "url": "git@git.env.local:secbot-test-group/example-project.git", 138 | "description": "", 139 | "homepage": "https://git.env.local/secbot-test-group/example-project" 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /tests/fixtures/inputs/gitlab/push_webhook.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "push", 3 | "event_name": "push", 4 | "before": "5823620546f7624a111148d1bf60833f9e02c475", 5 | "after": "23d5e3ab8e4dcda32a7acfb8343bfcfd12471e0a", 6 | "ref": "refs/heads/main", 7 | "checkout_sha": "23d5e3ab8e4dcda32a7acfb8343bfcfd12471e0a", 8 | "message": null, 9 | "user_id": 1287, 10 | "user_name": "Ivan Zhirov", 11 | "user_username": "ivan.zhirov", 12 | "user_email": "", 13 | "user_avatar": "https://git.env.local/uploads/-/system/user/avatar/1287/avatar.png", 14 | "project_id": 6617, 15 | "project": 16 | { 17 | "id": 6617, 18 | "name": "Example Project", 19 | "description": "", 20 | "web_url": "https://git.env.local/secbot-test-group/example-project", 21 | "avatar_url": null, 22 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 23 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 24 | "namespace": "secbot-test-group", 25 | "visibility_level": 0, 26 | "path_with_namespace": "secbot-test-group/example-project", 27 | "default_branch": "main", 28 | "ci_config_path": null, 29 | "homepage": "https://git.env.local/secbot-test-group/example-project", 30 | "url": "git@git.env.local:secbot-test-group/example-project.git", 31 | "ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 32 | "http_url": "https://git.env.local/secbot-test-group/example-project.git" 33 | }, 34 | "commits": 35 | [ 36 | { 37 | "id": "23d5e3ab8e4dcda32a7acfb8343bfcfd12471e0a", 38 | "message": "Added python file", 39 | "title": "Added python file", 40 | "timestamp": "2022-12-07T10:20:12+00:00", 41 | "url": "https://git.env.local/secbot-test-group/example-project/-/commit/23d5e3ab8e4dcda32a7acfb8343bfcfd12471e0a", 42 | "author": 43 | { 44 | "name": "Valerio Rico", 45 | "email": "valerio.rico@mail.env.local" 46 | }, 47 | "added": ["app.py"], 48 | "modified": [], 49 | "removed": [] 50 | }, 51 | { 52 | "id": "5823620546f7624a111148d1bf60833f9e02c475", 53 | "message": "Update README.md", 54 | "title": "Update README.md", 55 | "timestamp": "2022-01-11T10:39:57+00:00", 56 | "url": "https://git.env.local/secbot-test-group/example-project/-/commit/5823620546f7624a111148d1bf60833f9e02c475", 57 | "author": 58 | { 59 | "name": "Valerio Rico", 60 | "email": "valerio.rico@mail.env.local" 61 | }, 62 | "added": 63 | [ 64 | "README.md" 65 | ], 66 | "modified": 67 | [], 68 | "removed": 69 | [] 70 | } 71 | ], 72 | "total_commits_count": 2, 73 | "push_options": {}, 74 | "repository": 75 | { 76 | "name": "Example Project", 77 | "url": "git@git.env.local:secbot-test-group/example-project.git", 78 | "description": "", 79 | "homepage": "https://git.env.local/secbot-test-group/example-project", 80 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 81 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 82 | "visibility_level": 0 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /tests/fixtures/inputs/gitlab/tag_push_webhook.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "tag_push", 3 | "event_name": "tag_push", 4 | "before": "0000000000000000000000000000000000000000", 5 | "after": "ff01dde062e9d8b4636fc88d9d01017800422b05", 6 | "ref": "refs/tags/yo-5", 7 | "checkout_sha": "ff01dde062e9d8b4636fc88d9d01017800422b05", 8 | "message": "", 9 | "user_id": 1287, 10 | "user_name": "Ivan Zhirov", 11 | "user_username": "ivan.zhirov", 12 | "user_email": "", 13 | "user_avatar": "https://git.env.local/uploads/-/system/user/avatar/1287/avatar.png", 14 | "project_id": 6617, 15 | "project": { 16 | "id": 6617, 17 | "name": "Example Project", 18 | "description": "", 19 | "web_url": "https://git.env.local/secbot-test-group/example-project", 20 | "avatar_url": null, 21 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 22 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 23 | "namespace": "secbot-test-group", 24 | "visibility_level": 0, 25 | "path_with_namespace": "secbot-test-group/example-project", 26 | "default_branch": "main", 27 | "ci_config_path": null, 28 | "homepage": "https://git.env.local/secbot-test-group/example-project", 29 | "url": "git@git.env.local:secbot-test-group/example-project.git", 30 | "ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 31 | "http_url": "https://git.env.local/secbot-test-group/example-project.git" 32 | }, 33 | "commits": [ 34 | { 35 | "id": "ff01dde062e9d8b4636fc88d9d01017800422b05", 36 | "message": "Hey ya\n", 37 | "title": "Hey ya", 38 | "timestamp": "2023-02-07T15:51:13+02:00", 39 | "url": "https://git.env.local/secbot-test-group/example-project/-/commit/ff01dde062e9d8b4636fc88d9d01017800422b05", 40 | "author": { 41 | "name": "Ivan Zhirov", 42 | "email": "zhirovivan@gmail.com" 43 | }, 44 | "added": [], 45 | "modified": [], 46 | "removed": [] 47 | } 48 | ], 49 | "total_commits_count": 1, 50 | "push_options": {}, 51 | "repository": { 52 | "name": "Example Project", 53 | "url": "git@git.env.local:secbot-test-group/example-project.git", 54 | "description": "", 55 | "homepage": "https://git.env.local/secbot-test-group/example-project", 56 | "git_http_url": "https://git.env.local/secbot-test-group/example-project.git", 57 | "git_ssh_url": "git@git.env.local:secbot-test-group/example-project.git", 58 | "visibility_level": 0 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /tests/fixtures/merge_request_hook__example_project.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "merge_request", 3 | "event_type": "merge_request", 4 | "user": { 5 | "id": 1337, 6 | "name": "Firstname Lastname", 7 | "username": "not.exists", 8 | "avatar_url": "https://git.env.local/uploads/-/system/user/avatar/1337/avatar.png", 9 | "email": "not.exists@mail.env.local" 10 | }, 11 | "project": { 12 | "id": 1337, 13 | "name": "Example Project", 14 | "description": "", 15 | "web_url": "https://git.env.local/security/example-project", 16 | "avatar_url": null, 17 | "git_ssh_url": "git@git.env.local:security/example-project.git", 18 | "git_http_url": "https://git.env.local/security/example-project.git", 19 | "namespace": "security", 20 | "visibility_level": 0, 21 | "path_with_namespace": "security/example-project", 22 | "default_branch": "main", 23 | "ci_config_path": null, 24 | "homepage": "https://git.env.local/security/example-project", 25 | "url": "git@git.env.local:security/example-project.git", 26 | "ssh_url": "git@git.env.local:security/example-project.git", 27 | "http_url": "https://git.env.local/security/example-project.git" 28 | }, 29 | "object_attributes": { 30 | "assignee_id": null, 31 | "author_id": 1337, 32 | "created_at": "2011-11-11 11:33:37 UTC", 33 | "description": "test", 34 | "head_pipeline_id": null, 35 | "id": 48872, 36 | "iid": 1, 37 | "last_edited_at": null, 38 | "last_edited_by_id": null, 39 | "merge_commit_sha": null, 40 | "merge_error": null, 41 | "merge_params": { 42 | "force_remove_source_branch": "1" 43 | }, 44 | "merge_status": "unchecked", 45 | "merge_user_id": null, 46 | "merge_when_pipeline_succeeds": false, 47 | "milestone_id": null, 48 | "source_branch": "test", 49 | "source_project_id": 1337, 50 | "state_id": 1, 51 | "target_branch": "main", 52 | "target_project_id": 1337, 53 | "time_estimate": 0, 54 | "title": "Test", 55 | "updated_at": "2011-11-11 11:33:37 UTC", 56 | "updated_by_id": null, 57 | "url": "https://git.env.local/security/example-project/-/merge_requests/1", 58 | "source": { 59 | "id": 1337, 60 | "name": "Example Project", 61 | "description": "", 62 | "web_url": "https://git.env.local/security/example-project", 63 | "avatar_url": null, 64 | "git_ssh_url": "git@git.env.local:security/example-project.git", 65 | "git_http_url": "https://git.env.local/security/example-project.git", 66 | "namespace": "security", 67 | "visibility_level": 0, 68 | "path_with_namespace": "security/example-project", 69 | "default_branch": "main", 70 | "ci_config_path": null, 71 | "homepage": "https://git.env.local/security/example-project", 72 | "url": "git@git.env.local:security/example-project.git", 73 | "ssh_url": "git@git.env.local:security/example-project.git", 74 | "http_url": "https://git.env.local/security/example-project.git" 75 | }, 76 | "target": { 77 | "id": 1337, 78 | "name": "Example Project", 79 | "description": "", 80 | "web_url": "https://git.env.local/security/example-project", 81 | "avatar_url": null, 82 | "git_ssh_url": "git@git.env.local:security/example-project.git", 83 | "git_http_url": "https://git.env.local/security/example-project.git", 84 | "namespace": "security", 85 | "visibility_level": 0, 86 | "path_with_namespace": "security/example-project", 87 | "default_branch": "main", 88 | "ci_config_path": null, 89 | "homepage": "https://git.env.local/security/example-project", 90 | "url": "git@git.env.local:security/example-project.git", 91 | "ssh_url": "git@git.env.local:security/example-project.git", 92 | "http_url": "https://git.env.local/security/example-project.git" 93 | }, 94 | "last_commit": { 95 | "id": "5823620546f7624a111148d1bf60833f9e02c475", 96 | "message": "Update README.md", 97 | "title": "Update README.md", 98 | "timestamp": "2011-11-11T13:33:37+00:00", 99 | "url": "https://git.env.local/security/example-project/-/commit/5823620546f7624a111148d1bf60833f9e02c475", 100 | "author": { 101 | "name": "Firstname Lastname", 102 | "email": "not.exists@mail.env.local" 103 | } 104 | }, 105 | "work_in_progress": false, 106 | "total_time_spent": 0, 107 | "time_change": 0, 108 | "human_total_time_spent": null, 109 | "human_time_change": null, 110 | "human_time_estimate": null, 111 | "assignee_ids": [ 112 | 113 | ], 114 | "state": "opened" 115 | }, 116 | "labels": [ 117 | 118 | ], 119 | "changes": { 120 | }, 121 | "repository": { 122 | "name": "Example Project", 123 | "url": "git@git.env.local:security/example-project.git", 124 | "description": "", 125 | "homepage": "https://git.env.local/security/example-project" 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /tests/fixtures/merge_request_hook__public_site.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "merge_request", 3 | "event_type": "merge_request", 4 | "user": { 5 | "id": 662, 6 | "name": "Valerio Rico", 7 | "username": "valerio.rico", 8 | "avatar_url": "https://git.env.local/uploads/-/system/user/avatar/662/avatar.png", 9 | "email": "example@mail.env.local" 10 | }, 11 | "project": { 12 | "id": 164, 13 | "name": "Public Site", 14 | "description": "", 15 | "web_url": "https://git.env.local/public-site/backend", 16 | "avatar_url": null, 17 | "git_ssh_url": "git@git.env.local:public-site/backend.git", 18 | "git_http_url": "https://git.env.local/public-site/backend.git", 19 | "namespace": "public-site", 20 | "visibility_level": 0, 21 | "path_with_namespace": "public-site/backend", 22 | "default_branch": "master", 23 | "ci_config_path": null, 24 | "homepage": "https://git.env.local/public-site/backend", 25 | "url": "git@git.env.local:public-site/backend.git", 26 | "ssh_url": "git@git.env.local:public-site/backend.git", 27 | "http_url": "https://git.env.local/public-site/backend.git" 28 | }, 29 | "object_attributes": { 30 | "assignee_id": null, 31 | "author_id": 662, 32 | "created_at": "2022-01-11 11:49:13 UTC", 33 | "description": "test", 34 | "head_pipeline_id": null, 35 | "id": 48872, 36 | "iid": 1, 37 | "last_edited_at": null, 38 | "last_edited_by_id": null, 39 | "merge_commit_sha": null, 40 | "merge_error": null, 41 | "merge_params": { 42 | "force_remove_source_branch": "1" 43 | }, 44 | "merge_status": "cannot_be_merged", 45 | "merge_user_id": null, 46 | "merge_when_pipeline_succeeds": false, 47 | "milestone_id": null, 48 | "source_branch": "61.58.0", 49 | "source_project_id": 164, 50 | "state_id": 1, 51 | "target_branch": "master", 52 | "target_project_id": 164, 53 | "time_estimate": 0, 54 | "title": "Test", 55 | "updated_at": "2022-01-11 11:49:13 UTC", 56 | "updated_by_id": null, 57 | "url": "https://git.env.local/public-site/backend/-/merge_requests/209", 58 | "source": { 59 | "id": 164, 60 | "name": "Public Site", 61 | "description": "", 62 | "web_url": "https://git.env.local/public-site/backend", 63 | "avatar_url": null, 64 | "git_ssh_url": "git@git.env.local:public-site/backend.git", 65 | "git_http_url": "https://git.env.local/public-site/backend.git", 66 | "namespace": "public-site", 67 | "visibility_level": 0, 68 | "path_with_namespace": "public-site/backend", 69 | "default_branch": "master", 70 | "ci_config_path": null, 71 | "homepage": "https://git.env.local/public-site/backend", 72 | "url": "git@git.env.local:public-site/backend.git", 73 | "ssh_url": "git@git.env.local:public-site/backend.git", 74 | "http_url": "https://git.env.local/public-site/backend.git" 75 | }, 76 | "target": { 77 | "id": 164, 78 | "name": "Public Site", 79 | "description": "", 80 | "web_url": "https://git.env.local/public-site/backend", 81 | "avatar_url": null, 82 | "git_ssh_url": "git@git.env.local:public-site/backend.git", 83 | "git_http_url": "https://git.env.local/public-site/backend.git", 84 | "namespace": "public-site", 85 | "visibility_level": 0, 86 | "path_with_namespace": "public-site/backend", 87 | "default_branch": "master", 88 | "ci_config_path": null, 89 | "homepage": "https://git.env.local/public-site/backend", 90 | "url": "git@git.env.local:public-site/backend.git", 91 | "ssh_url": "git@git.env.local:public-site/backend.git", 92 | "http_url": "https://git.env.local/public-site/backend.git" 93 | }, 94 | "last_commit": { 95 | "id": "6cfe17a9b11dfbe179ab593a4c7bf14deffb713f", 96 | "message": "Update version", 97 | "title": "Update version", 98 | "timestamp": "2022-01-11T10:39:57+00:00", 99 | "url": "https://git.env.local/public-site/backend/-/commit/6cfe17a9b11dfbe179ab593a4c7bf14deffb713f", 100 | "author": { 101 | "name": "Valerio Rico", 102 | "email": "valerio.rico@mail.env.local" 103 | } 104 | }, 105 | "work_in_progress": false, 106 | "total_time_spent": 0, 107 | "time_change": 0, 108 | "human_total_time_spent": null, 109 | "human_time_change": null, 110 | "human_time_estimate": null, 111 | "assignee_ids": [ 112 | 113 | ], 114 | "state": "opened" 115 | }, 116 | "labels": [ 117 | 118 | ], 119 | "changes": { 120 | }, 121 | "repository": { 122 | "name": "Public Site", 123 | "url": "git@git.env.local:public-site/backend.git", 124 | "description": "", 125 | "homepage": "https://git.env.local/public-site/backend" 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /tests/fixtures/merge_request_hook__security_bot.json: -------------------------------------------------------------------------------- 1 | { 2 | "object_kind": "merge_request", 3 | "event_type": "merge_request", 4 | "user": { 5 | "id": 662, 6 | "name": "Valerio Rico", 7 | "username": "valerio.rico", 8 | "avatar_url": "https://git.env.local/uploads/-/system/user/avatar/662/avatar.png", 9 | "email": "example@mail.env.local" 10 | }, 11 | "project": { 12 | "id": 4809, 13 | "name": "Example Project", 14 | "description": "", 15 | "web_url": "https://git.env.local/security/example-project", 16 | "avatar_url": null, 17 | "git_ssh_url": "git@git.env.local:security/example-project.git", 18 | "git_http_url": "https://git.env.local/security/example-project.git", 19 | "namespace": "security", 20 | "visibility_level": 0, 21 | "path_with_namespace": "security/example-project", 22 | "default_branch": "main", 23 | "ci_config_path": null, 24 | "homepage": "https://git.env.local/security/example-project", 25 | "url": "git@git.env.local:security/example-project.git", 26 | "ssh_url": "git@git.env.local:security/example-project.git", 27 | "http_url": "https://git.env.local/security/example-project.git" 28 | }, 29 | "object_attributes": { 30 | "assignee_id": null, 31 | "author_id": 662, 32 | "created_at": "2022-01-11 11:49:13 UTC", 33 | "description": "test", 34 | "head_pipeline_id": null, 35 | "id": 48872, 36 | "iid": 1, 37 | "last_edited_at": null, 38 | "last_edited_by_id": null, 39 | "merge_commit_sha": null, 40 | "merge_error": null, 41 | "merge_params": { 42 | "force_remove_source_branch": "1" 43 | }, 44 | "merge_status": "cannot_be_merged", 45 | "merge_user_id": null, 46 | "merge_when_pipeline_succeeds": false, 47 | "milestone_id": null, 48 | "source_branch": "test", 49 | "source_project_id": 4809, 50 | "state_id": 1, 51 | "target_branch": "main", 52 | "target_project_id": 4809, 53 | "time_estimate": 0, 54 | "title": "Test", 55 | "updated_at": "2022-01-11 11:49:13 UTC", 56 | "updated_by_id": null, 57 | "url": "https://git.env.local/security/example-project/-/merge_requests/1", 58 | "source": { 59 | "id": 4809, 60 | "name": "Example Project", 61 | "description": "", 62 | "web_url": "https://git.env.local/security/example-project", 63 | "avatar_url": null, 64 | "git_ssh_url": "git@git.env.local:security/example-project.git", 65 | "git_http_url": "https://git.env.local/security/example-project.git", 66 | "namespace": "security", 67 | "visibility_level": 0, 68 | "path_with_namespace": "security/example-project", 69 | "default_branch": "main", 70 | "ci_config_path": null, 71 | "homepage": "https://git.env.local/security/example-project", 72 | "url": "git@git.env.local:security/example-project.git", 73 | "ssh_url": "git@git.env.local:security/example-project.git", 74 | "http_url": "https://git.env.local/security/example-project.git" 75 | }, 76 | "target": { 77 | "id": 4809, 78 | "name": "Example Project", 79 | "description": "", 80 | "web_url": "https://git.env.local/security/example-project", 81 | "avatar_url": null, 82 | "git_ssh_url": "git@git.env.local:security/example-project.git", 83 | "git_http_url": "https://git.env.local/security/example-project.git", 84 | "namespace": "security", 85 | "visibility_level": 0, 86 | "path_with_namespace": "security/example-project", 87 | "default_branch": "main", 88 | "ci_config_path": null, 89 | "homepage": "https://git.env.local/security/example-project", 90 | "url": "git@git.env.local:security/example-project.git", 91 | "ssh_url": "git@git.env.local:security/example-project.git", 92 | "http_url": "https://git.env.local/security/example-project.git" 93 | }, 94 | "last_commit": { 95 | "id": "5823620546f7624a111148d1bf60833f9e02c475", 96 | "message": "Update README.md", 97 | "title": "Update README.md", 98 | "timestamp": "2022-01-11T10:39:57+00:00", 99 | "url": "https://git.env.local/security/example-project/-/commit/5823620546f7624a111148d1bf60833f9e02c475", 100 | "author": { 101 | "name": "Valerio Rico", 102 | "email": "valerio.rico@mail.env.local" 103 | } 104 | }, 105 | "work_in_progress": false, 106 | "total_time_spent": 0, 107 | "time_change": 0, 108 | "human_total_time_spent": null, 109 | "human_time_change": null, 110 | "human_time_estimate": null, 111 | "assignee_ids": [ 112 | 113 | ], 114 | "state": "opened" 115 | }, 116 | "labels": [ 117 | 118 | ], 119 | "changes": { 120 | }, 121 | "repository": { 122 | "name": "Example Project", 123 | "url": "git@git.env.local:security/example-project.git", 124 | "description": "", 125 | "homepage": "https://git.env.local/security/example-project" 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /tests/fixtures/worker_outputs/gitleaks.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "Description": "RSA private key", 4 | "StartLine": 1, 5 | "EndLine": 1, 6 | "StartColumn": 8, 7 | "EndColumn": 38, 8 | "Match": "-----BEGIN RSA PRIVATE KEY-----", 9 | "Secret": "-----BEGIN RSA PRIVATE KEY-----", 10 | "File": "leaks.py", 11 | "Commit": "9fd386858fea24cf4ed693a7ee6f553e3f7a4c49", 12 | "Entropy": 0, 13 | "Author": "Valerio Rico", 14 | "Email": "valerio.rico@mail.env.local", 15 | "Date": "2022-01-17T09:59:57Z", 16 | "Message": "Add new file", 17 | "Tags": [], 18 | "RuleID": "RSA-PK" 19 | } 20 | ] 21 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/integration/__init__.py -------------------------------------------------------------------------------- /tests/integration/notifications/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/integration/notifications/__init__.py -------------------------------------------------------------------------------- /tests/integration/notifications/test_slack.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.inputs.gitlab.handlers.slack import send_message 6 | 7 | 8 | @pytest.mark.asyncio 9 | @mock.patch( 10 | "app.secbot.inputs.gitlab.handlers.slack.api.AsyncWebClient", new=mock.AsyncMock 11 | ) 12 | async def test_sending_message_without_token(faker): 13 | channel = faker.pystr() 14 | payload = faker.pydict() 15 | token = None 16 | 17 | with pytest.raises(AssertionError): 18 | await send_message(channel=channel, payload=payload, token=token) 19 | 20 | 21 | @pytest.mark.asyncio 22 | @mock.patch( 23 | "app.secbot.inputs.gitlab.handlers.slack.api.AsyncWebClient", new=mock.AsyncMock 24 | ) 25 | async def test_sending_message_without_channel(faker): 26 | channel = None 27 | payload = faker.pydict() 28 | token_value = faker.pystr() 29 | token = mock.Mock(get_secret_value=mock.Mock(return_value=token_value)) 30 | 31 | with pytest.raises(AssertionError): 32 | await send_message(channel=channel, payload=payload, token=token) 33 | 34 | 35 | @pytest.mark.asyncio 36 | @mock.patch( 37 | "app.secbot.inputs.gitlab.handlers.slack.api.AsyncWebClient", new=mock.AsyncMock 38 | ) 39 | async def test_sending_message_without_payload(faker): 40 | channel = faker.pystr() 41 | payload = None 42 | token_value = faker.pystr() 43 | token = mock.Mock(get_secret_value=mock.Mock(return_value=token_value)) 44 | 45 | with pytest.raises(AssertionError): 46 | await send_message(channel=channel, payload=payload, token=token) 47 | 48 | 49 | @pytest.mark.asyncio 50 | @mock.patch( 51 | "app.secbot.inputs.gitlab.handlers.slack.api.AsyncWebClient.chat_postMessage" 52 | ) 53 | async def test_sending_message(post_message_mock, faker): 54 | channel = faker.pystr() 55 | payload = faker.pydict() 56 | token_value = faker.pystr() 57 | token = mock.Mock(get_secret_value=mock.Mock(return_value=token_value)) 58 | 59 | await send_message(channel=channel, payload=payload, token=token) 60 | 61 | post_message_mock.assert_called_once_with( 62 | channel=channel, 63 | blocks=payload, 64 | ) 65 | -------------------------------------------------------------------------------- /tests/integration/test_outputs.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.inputs.gitlab.handlers.defectdojo.services import ( 6 | OutputResultObject, 7 | send_result, 8 | ) 9 | from tests.units.factories import create_merge_request_webhook__security_bot 10 | 11 | 12 | @pytest.mark.asyncio 13 | @pytest.mark.parametrize("fixture_file_path", ["gitleaks.json"]) 14 | @mock.patch("app.secbot.inputs.gitlab.handlers.defectdojo.services.dd_upload") 15 | @mock.patch("app.secbot.inputs.gitlab.handlers.defectdojo.services.dd_get_test") 16 | @mock.patch("app.secbot.inputs.gitlab.handlers.defectdojo.services.dd_prepare") 17 | @mock.patch( 18 | "app.secbot.inputs.gitlab.handlers.defectdojo.services.dd_findings_by_test" 19 | ) 20 | @mock.patch("app.secbot.inputs.gitlab.handlers.defectdojo.services.asyncio.sleep") 21 | async def test_defectdojo_output( 22 | # TODO(ivan.zhirov): mock tests with time.sleep 23 | sleep, 24 | dd_findings_by_test, 25 | dd_prepare, 26 | dd_get_test, 27 | dd_upload, 28 | dir_tests, 29 | fixture_file_path, 30 | ): 31 | data = create_merge_request_webhook__security_bot() 32 | with open( 33 | dir_tests / "fixtures/worker_outputs" / fixture_file_path 34 | ) as output_data: 35 | result = OutputResultObject( 36 | data=data, 37 | worker=fixture_file_path.split(".")[0], 38 | result=output_data.read(), 39 | ) 40 | 41 | dd_get_test.return_value = {"percent_complete": 100} 42 | 43 | # TODO(ivan.zhirov): mock the response from the server 44 | dd_findings_by_test.return_value = {"count": 1, "results": []} 45 | 46 | credentials = mock.Mock() 47 | 48 | assert await send_result(credentials=credentials, output_result=result) 49 | 50 | dd_prepare.assert_called_once() 51 | dd_upload.assert_called_once() 52 | dd_get_test.assert_called() 53 | dd_findings_by_test.assert_called_once() 54 | -------------------------------------------------------------------------------- /tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli=true 3 | log_cli_level=DEBUG -------------------------------------------------------------------------------- /tests/test_logger.py: -------------------------------------------------------------------------------- 1 | from app import format_extra 2 | 3 | 4 | def test_format_with_extra(faker): 5 | message = faker.pystr() 6 | extra = {"hello": "app"} 7 | assert format_extra(message, extra) == f"{message} EXTRA: hello=app" 8 | 9 | 10 | def test_format_without_extra(faker): 11 | message = faker.pystr() 12 | assert format_extra(message, None) == f"{message}" 13 | -------------------------------------------------------------------------------- /tests/test_sentry.py: -------------------------------------------------------------------------------- 1 | from app.main import sanitize_event_values 2 | from app.settings import flatten_settings_values 3 | 4 | 5 | def test_sanitize_event_values(example_settings): 6 | example_event = { 7 | "exceptions": { 8 | "values": [ 9 | "Hello, world", 10 | "Hello, app", 11 | ], 12 | "val": "Today in app we rule the world", 13 | } 14 | } 15 | values = flatten_settings_values(example_settings) 16 | assert sanitize_event_values( 17 | value=example_event, 18 | # Sensitive words: world, app, model, secret 19 | sensitive_values=values, 20 | ) == { 21 | "exceptions": { 22 | "values": ["Hello, [Redacted]", "Hello, [Redacted]"], 23 | "val": "Today in [Redacted] we rule the [Redacted]", 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /tests/test_settings.py: -------------------------------------------------------------------------------- 1 | from app.settings import flatten_settings_values 2 | 3 | 4 | def test_flatten_settings_values(example_settings): 5 | values = flatten_settings_values(example_settings) 6 | assert values == {"app", "model", "secret", "world"} 7 | -------------------------------------------------------------------------------- /tests/units/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/__init__.py -------------------------------------------------------------------------------- /tests/units/common.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | 3 | 4 | def get_test_root_directory(): 5 | return pathlib.Path(__file__).parent.parent.resolve() 6 | -------------------------------------------------------------------------------- /tests/units/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/config/__init__.py -------------------------------------------------------------------------------- /tests/units/config/test_config_v1_parser.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.config import SecbotConfigComponent, WorkflowJob, config_parser 6 | from app.secbot.exceptions import SecbotConfigError, SecbotConfigMissingEnv 7 | 8 | 9 | def test_config_v1_parser_empty_components(): 10 | with pytest.raises(SecbotConfigError): 11 | config_parser({"version": "1.0", "components": {}, "jobs": []}) 12 | 13 | 14 | def test_config_v1_parser_empty_jobs(): 15 | with pytest.raises(SecbotConfigError): 16 | config_parser( 17 | { 18 | "version": "1.0", 19 | "components": { 20 | "component": { 21 | "handler_name": "handler", 22 | "config": {"key": "value"}, 23 | }, 24 | }, 25 | "jobs": [], 26 | } 27 | ) 28 | 29 | 30 | @mock.patch("app.secbot.config.os.getenv") 31 | def test_config_v1_parser_missing_env(getenv_mock): 32 | getenv_mock.side_effect = AttributeError 33 | with pytest.raises(SecbotConfigMissingEnv): 34 | config_parser( 35 | { 36 | "version": "1.0", 37 | "components": { 38 | "component": { 39 | "handler_name": "handler", 40 | "env": { 41 | "key1": "value1", 42 | }, 43 | }, 44 | }, 45 | "jobs": [ 46 | { 47 | "name": "job1", 48 | "rules": {"input": {"some": "rule"}}, 49 | "scans": ["component"], 50 | "outputs": ["component"], 51 | "notifications": [], 52 | } 53 | ], 54 | } 55 | ) 56 | 57 | 58 | @mock.patch("app.secbot.config.os.getenv") 59 | def test_config_v1_parser(getenv, faker): 60 | example_env_value = faker.pystr() 61 | getenv.return_value = example_env_value 62 | 63 | handler_1_name = faker.pystr() 64 | handler_2_name = faker.pystr() 65 | config_2 = faker.pydict() 66 | input_name = faker.pystr() 67 | 68 | obj = { 69 | "version": "1.0", 70 | "components": { 71 | "component1": { 72 | "handler_name": handler_1_name, 73 | "env": {"key": "value"}, 74 | }, 75 | "component2": { 76 | "handler_name": handler_2_name, 77 | "config": config_2, 78 | }, 79 | }, 80 | "jobs": [ 81 | { 82 | "name": "job1", 83 | "rules": {input_name: {"some": "rule"}}, 84 | "scans": ["component1"], 85 | "outputs": ["component2"], 86 | "notifications": [], 87 | }, 88 | { 89 | "name": "job2", 90 | "rules": {input_name: {"some": "rule"}}, 91 | "scans": ["component2"], 92 | "outputs": ["component1"], 93 | "notifications": [], 94 | }, 95 | ], 96 | } 97 | result = config_parser(obj) 98 | 99 | # Check that getenv was called 100 | getenv.assert_called_once_with("value") 101 | 102 | assert input_name in result 103 | assert len(result[input_name]) == 2 104 | 105 | assert all(isinstance(item, WorkflowJob) for item in result[input_name]) 106 | 107 | assert result[input_name][0].name == "job1" 108 | assert result[input_name][1].name == "job2" 109 | 110 | assert result[input_name][0].scans == [ 111 | SecbotConfigComponent( 112 | handler_name=handler_1_name, 113 | name="component1", 114 | config=None, 115 | env={"key": example_env_value}, 116 | ) 117 | ] 118 | assert result[input_name][0].outputs == [ 119 | SecbotConfigComponent( 120 | handler_name=handler_2_name, 121 | name="component2", 122 | config=config_2, 123 | env=None, 124 | ) 125 | ] 126 | 127 | assert result[input_name][1].scans == [ 128 | SecbotConfigComponent( 129 | handler_name=handler_2_name, 130 | name="component2", 131 | config=config_2, 132 | env=None, 133 | ) 134 | ] 135 | assert result[input_name][1].outputs == [ 136 | SecbotConfigComponent( 137 | handler_name=handler_1_name, 138 | name="component1", 139 | config=None, 140 | env={"key": example_env_value}, 141 | ) 142 | ] 143 | -------------------------------------------------------------------------------- /tests/units/config/test_secbot_config_version.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.config import SecbotConfig 6 | from app.secbot.exceptions import SecbotConfigError 7 | 8 | 9 | def test_missing_version_key(): 10 | config_obj = {"components": {}, "jobs": []} 11 | with pytest.raises(SecbotConfigError): 12 | SecbotConfig(config_obj) 13 | 14 | 15 | def test_unsupported_version(): 16 | config_obj = {"version": "invalid_version", "components": {}, "jobs": []} 17 | with pytest.raises(SecbotConfigError): 18 | SecbotConfig(config_obj) 19 | 20 | 21 | def test_valid_config(): 22 | version = "1.0" 23 | mock_parser = mock.MagicMock() 24 | with mock.patch.dict(SecbotConfig.VERSIONS_PARSER, {version: mock_parser}): 25 | config_obj = {"version": version, "components": {}, "jobs": []} 26 | SecbotConfig(config_obj) 27 | mock_parser.assert_called_once_with(config_obj) 28 | -------------------------------------------------------------------------------- /tests/units/factories.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from app.secbot.inputs.gitlab.schemas import MergeRequestWebhookModel 4 | from app.secbot.inputs.gitlab.schemas.base import Project 5 | from tests.units.common import get_test_root_directory 6 | 7 | 8 | def create_project__example(): 9 | project = Project( 10 | id=4809, 11 | name="Example Project", 12 | web_url="https://git.env.local/security/example-project/", 13 | git_ssh_url="git@git.env.local:security/example-project.git", 14 | git_http_url="https://git.env.local/security/example-project.git", 15 | namespace="security", 16 | path_with_namespace="security/example-project", 17 | ) 18 | return project 19 | 20 | 21 | def create_project__security(): 22 | project = Project( 23 | id=4801, 24 | name="Security Bot", 25 | web_url="https://git.env.local/security/security-bot/", 26 | git_ssh_url="git@git.env.local:security/security-bot.git", 27 | git_http_url="https://git.env.local/security/security-bot.git", 28 | namespace="security", 29 | path_with_namespace="security/security-bot", 30 | ) 31 | return project 32 | 33 | 34 | def create_project__public_site(): 35 | project = Project( 36 | id=164, 37 | name="Public Site", 38 | web_url="https://git.env.local/public-site/backend/", 39 | git_ssh_url="git@git.env.local:public-site/backend.git", 40 | git_http_url="https://git.env.local/public-site/backend.git", 41 | namespace="public-site", 42 | path_with_namespace="public-site/backend", 43 | ) 44 | return project 45 | 46 | 47 | def create_merge_request_webhook__example_project(): 48 | with open( 49 | get_test_root_directory() 50 | / "fixtures" 51 | / "merge_request_hook__example_project.json" 52 | ) as json_data: 53 | return MergeRequestWebhookModel(**json.load(json_data), raw={}) 54 | 55 | 56 | def create_merge_request_webhook__security_bot(): 57 | with open( 58 | get_test_root_directory() 59 | / "fixtures" 60 | / "merge_request_hook__security_bot.json" 61 | ) as json_data: 62 | return MergeRequestWebhookModel(**json.load(json_data), raw={}) 63 | 64 | 65 | def create_merge_request_webhook__public_site(): 66 | with open( 67 | get_test_root_directory() / "fixtures" / "merge_request_hook__public_site.json" 68 | ) as json_data: 69 | return MergeRequestWebhookModel(**json.load(json_data), raw={}) 70 | -------------------------------------------------------------------------------- /tests/units/inputs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/inputs/__init__.py -------------------------------------------------------------------------------- /tests/units/inputs/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/inputs/config/__init__.py -------------------------------------------------------------------------------- /tests/units/inputs/config/test_config_utils.py: -------------------------------------------------------------------------------- 1 | from app.secbot.config import get_jsonpath_value 2 | 3 | 4 | def test_config_get_jsonpath_value(): 5 | meaning_of_the_universe = 42 6 | data = {"base": {"deep": {"deeper": {"here": meaning_of_the_universe}}}} 7 | assert get_jsonpath_value(data, "base.deep.deeper.here") == meaning_of_the_universe 8 | -------------------------------------------------------------------------------- /tests/units/inputs/config/test_parse_jobs.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import yaml 3 | 4 | from app.secbot.config import SecbotConfig 5 | from app.secbot.exceptions import SecbotConfigError 6 | from app.secbot.inputs.gitlab.schemas import GitlabEvent 7 | 8 | 9 | def test_parse_yaml_with_repo_regex_exclude(get_event_data): 10 | yaml_string = """ 11 | version: "1.0" 12 | components: 13 | gitleaks: 14 | handler_name: gitleaks 15 | defectdojo: 16 | handler_name: defectdojo 17 | slack: 18 | handler_name: slack 19 | jobs: 20 | - name: Exclude gitlab 21 | rules: 22 | gitlab: 23 | event_type: "merge_request" 24 | project.path_with_namespace: "!secbot-test-group/.*" 25 | scans: 26 | - gitleaks 27 | outputs: 28 | - defectdojo 29 | notifications: 30 | - slack 31 | 32 | - name: Another merge request 33 | rules: 34 | gitlab: 35 | event_type: "merge_request" 36 | scans: 37 | - gitleaks 38 | outputs: 39 | - defectdojo 40 | notifications: 41 | - slack 42 | 43 | - name: Some job merge request 44 | rules: 45 | gitlab: 46 | event_type: "push_tags" 47 | scans: 48 | - gitleaks 49 | outputs: 50 | - defectdojo 51 | notifications: 52 | - slack 53 | """ # noqa: W291,E261 54 | 55 | yaml_config = SecbotConfig(yaml.safe_load(yaml_string)) 56 | data = get_event_data( 57 | GitlabEvent.MERGE_REQUEST, 58 | {"project": {"path_with_namespace": "secbot-test-group/example-project"}}, 59 | ) 60 | assert yaml_config.matching_workflow_job("gitlab", data=data) 61 | 62 | 63 | def test_parse_yaml_with_repo_regex(get_event_data): 64 | yaml_string = """ 65 | version: "1.0" 66 | components: 67 | gitleaks: 68 | handler_name: gitleaks 69 | defectdojo: 70 | handler_name: defectdojo 71 | slack: 72 | handler_name: slack 73 | jobs: 74 | - name: Exclude gitlab 75 | rules: 76 | gitlab: 77 | event_type: "merge_request" 78 | project.path_with_namespace: "secbot-test-group/.*" 79 | scans: 80 | - gitleaks 81 | outputs: 82 | - defectdojo 83 | notifications: 84 | - slack 85 | 86 | - name: Another merge request 87 | rules: 88 | gitlab: 89 | event_type: "merge_request" 90 | scans: 91 | - gitleaks 92 | outputs: 93 | - defectdojo 94 | notifications: 95 | - slack 96 | 97 | - name: Some other job 98 | rules: 99 | gitlab: 100 | event_type: "push_tags" 101 | example: 102 | scans: 103 | - gitleaks 104 | outputs: 105 | - defectdojo 106 | notifications: 107 | - slack 108 | """ # noqa: W291,E261 109 | secbot_config = SecbotConfig(yaml.safe_load(yaml_string)) 110 | data = get_event_data( 111 | GitlabEvent.MERGE_REQUEST, 112 | {"project": {"path_with_namespace": "secbot-test-group/example-project"}}, 113 | ) 114 | with pytest.raises(SecbotConfigError): 115 | secbot_config.matching_workflow_job("gitlab", data=data) 116 | -------------------------------------------------------------------------------- /tests/units/inputs/conftest.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pathlib 3 | from typing import Dict, Optional 4 | 5 | import pytest 6 | from pydantic.utils import deep_update 7 | 8 | from app.secbot.inputs.gitlab.schemas import GitlabEvent 9 | 10 | GITLAB_EVENT_RESPONSE_MAP: Dict[GitlabEvent, str] = { 11 | GitlabEvent.MERGE_REQUEST: "fixtures/inputs/gitlab/merge_request_webhook.json", 12 | GitlabEvent.PUSH: "fixtures/inputs/gitlab/push_webhook.json", 13 | GitlabEvent.TAG_PUSH: "fixtures/inputs/gitlab/tag_push_webhook.json", 14 | } 15 | 16 | 17 | @pytest.fixture 18 | def get_event_data(dir_tests): 19 | def handler(event: GitlabEvent, overrides: Optional[dict] = None) -> dict: 20 | file_path = GITLAB_EVENT_RESPONSE_MAP[event] 21 | with open(pathlib.Path(dir_tests, file_path), "r") as file: 22 | data = json.loads(file.read()) 23 | data = deep_update(data, overrides or {}) 24 | data["raw"] = data 25 | return data 26 | 27 | return handler 28 | 29 | 30 | @pytest.fixture 31 | def generate_commit_data(faker): 32 | def handler(overrides: Optional[dict] = None) -> dict: 33 | data = { 34 | "id": faker.md5(), 35 | "message": faker.pystr(), 36 | "title": faker.pystr(), 37 | "timestamp": faker.iso8601(), 38 | "url": faker.uri(), 39 | "author": {"name": faker.name(), "email": faker.ascii_email()}, 40 | "added": [], 41 | "modified": [], 42 | "removed": [], 43 | } 44 | return deep_update(data, overrides or {}) 45 | 46 | return handler 47 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/inputs/gitlab/__init__.py -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/inputs/gitlab/handlers/__init__.py -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/handlers/slack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/inputs/gitlab/handlers/slack/__init__.py -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/handlers/slack/test_slack_message_generation.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import pytest 4 | from polyfactory.factories.pydantic_factory import ModelFactory 5 | 6 | from app.secbot.inputs.gitlab.handlers.slack import generate_message_blocks 7 | from app.secbot.inputs.gitlab.schemas import ( 8 | GitlabOutputResult, 9 | GitlabScanResult, 10 | GitlabScanResultFile, 11 | ) 12 | from app.secbot.inputs.gitlab.schemas.output_responses import ( 13 | OutputFinding, 14 | OutputResponse, 15 | ) 16 | 17 | 18 | class GitlabScanResultFileFactory(ModelFactory[GitlabScanResultFile]): 19 | __model__ = GitlabScanResultFile 20 | 21 | content = {} 22 | 23 | 24 | class GitlabScanResultFactory(ModelFactory[GitlabScanResult]): 25 | __model__ = GitlabScanResult 26 | 27 | file = GitlabScanResultFileFactory.build() 28 | 29 | 30 | class OutputResultFactory(ModelFactory[GitlabOutputResult]): 31 | __model__ = GitlabOutputResult 32 | 33 | scan_result = GitlabScanResultFactory.build() 34 | 35 | 36 | class FindingFactory(ModelFactory[OutputFinding]): 37 | __model__ = OutputFinding 38 | 39 | 40 | @pytest.fixture 41 | def output_result_factory(faker): 42 | def factory( 43 | findings_size: int = 10, 44 | project_name: Optional[str] = None, 45 | project_url: Optional[str] = None, 46 | ): 47 | if not project_name: 48 | project_name = faker.pystr() 49 | 50 | if not project_url: 51 | project_url = faker.uri() 52 | 53 | return OutputResultFactory.build( 54 | response=OutputResponse( 55 | project_name=project_name, 56 | project_url=project_url, 57 | findings=FindingFactory.batch(size=findings_size), 58 | ), 59 | ) 60 | 61 | return factory 62 | 63 | 64 | def generate_slack_message_block(msg: str) -> dict: 65 | return {"type": "section", "text": {"type": "mrkdwn", "text": msg}} 66 | 67 | 68 | def test_slack_message_generation_with_correct_worker_name( 69 | output_result_factory, faker 70 | ): 71 | findings_size = faker.pyint(max_value=25) 72 | project_name = faker.pystr() 73 | project_url = faker.uri() 74 | 75 | output_result = output_result_factory( 76 | project_name=project_name, 77 | project_url=project_url, 78 | findings_size=findings_size, 79 | ) 80 | message_blocks = generate_message_blocks(output_result, render_limit=10) 81 | assert message_blocks is not None 82 | 83 | worker_name = output_result.scan_result.component_name 84 | 85 | # Check that the title has correct worker name and project name 86 | assert message_blocks[0] == generate_slack_message_block( 87 | f"Worker *{worker_name}* found *{findings_size}* new findings in *<{project_url}|{project_name}>*:" 88 | ) 89 | 90 | 91 | def test_slack_message_generation_without_findings(output_result_factory): 92 | output_result = output_result_factory(findings_size=0) 93 | assert generate_message_blocks(output_result, render_limit=10) is None 94 | 95 | 96 | def test_slack_message_generation_with_extra_limit(output_result_factory): 97 | render_limit = 10 98 | findings_size = 15 99 | output_result = output_result_factory(findings_size=findings_size) 100 | 101 | message_blocks = generate_message_blocks(output_result, render_limit=render_limit) 102 | assert message_blocks is not None 103 | 104 | # Plus header and footer 105 | assert len(message_blocks) == render_limit + 2 106 | 107 | # Assert footer 108 | assert message_blocks[-1] == generate_slack_message_block( 109 | f":no_bell: *{findings_size - render_limit}* were *stripped* from notification :no_bell:" 110 | ) 111 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_defectdojo_validation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from app.secbot.inputs.gitlab.handlers.defectdojo.validator import ( 4 | DefectDojoFindingDuplicate, 5 | DefectDojoFindings, 6 | is_gitleaks_valid, 7 | ) 8 | from app.secbot.schemas import Severity 9 | 10 | 11 | def test_which_finding_active_without_duplicate(faker): 12 | is_valid = faker.pybool() 13 | finding = DefectDojoFindings( 14 | active=is_valid, 15 | scan_name="gitleaks", 16 | severity=Severity.HIGH, 17 | duplicate=None, 18 | ) 19 | assert finding.is_active is is_valid 20 | 21 | 22 | def test_which_finding_active_with_duplicate(): 23 | duplicate = DefectDojoFindingDuplicate( 24 | active=True, 25 | severity=Severity.HIGH, 26 | ) 27 | finding = DefectDojoFindings( 28 | active=False, 29 | scan_name="gitleaks", 30 | severity=Severity.HIGH, 31 | duplicate=duplicate, 32 | ) 33 | assert finding.is_active is True 34 | 35 | 36 | def test_is_gitleaks_with_wrong_check_service_type(): 37 | findings = [ 38 | DefectDojoFindings( 39 | active=True, 40 | scan_name="some-other-service", 41 | severity=Severity.HIGH, 42 | duplicate=None, 43 | ), 44 | DefectDojoFindings( 45 | active=True, 46 | scan_name="gitleaks", 47 | severity=Severity.HIGH, 48 | duplicate=None, 49 | ), 50 | ] 51 | with pytest.raises(AssertionError): 52 | is_gitleaks_valid(findings) 53 | 54 | 55 | @pytest.mark.parametrize( 56 | "findings, expected", 57 | [ 58 | ([], True), 59 | # No matter severities. If there is an active finding, the function returns False 60 | ( 61 | [ 62 | DefectDojoFindings( 63 | active=True, 64 | scan_name="gitleaks", 65 | severity=Severity.INFO, 66 | duplicate=None, 67 | ), 68 | DefectDojoFindings( 69 | active=True, 70 | scan_name="gitleaks", 71 | severity=Severity.LOW, 72 | duplicate=None, 73 | ), 74 | ], 75 | False, 76 | ), 77 | # All findings are inactive 78 | ( 79 | [ 80 | DefectDojoFindings( 81 | active=False, 82 | scan_name="gitleaks", 83 | severity=Severity.CRITICAL, 84 | duplicate=None, 85 | ), 86 | DefectDojoFindings( 87 | active=False, 88 | scan_name="gitleaks", 89 | severity=Severity.HIGH, 90 | duplicate=None, 91 | ), 92 | ], 93 | True, 94 | ), 95 | # Active in duplicate 96 | ( 97 | [ 98 | DefectDojoFindings( 99 | active=False, 100 | scan_name="gitleaks", 101 | severity=Severity.CRITICAL, 102 | duplicate=DefectDojoFindingDuplicate( 103 | active=True, 104 | severity=Severity.CRITICAL, 105 | ), 106 | ), 107 | DefectDojoFindings( 108 | active=False, 109 | scan_name="gitleaks", 110 | severity=Severity.HIGH, 111 | duplicate=None, 112 | ), 113 | ], 114 | False, 115 | ), 116 | ], 117 | ) 118 | def test_is_gitleaks_valid(findings, expected): 119 | assert is_gitleaks_valid(findings) == expected 120 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_gitlab_dependencies.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | from fastapi import HTTPException 5 | 6 | from app.secbot.inputs.gitlab.dependencies import ( 7 | get_gitlab_webhook_token_header, 8 | gitlab_event, 9 | ) 10 | from app.secbot.inputs.gitlab.schemas import GitlabEvent 11 | 12 | 13 | @mock.patch("app.secbot.inputs.gitlab.dependencies.settings") 14 | def test_gitlab_webhook_token_header(settings_mock, faker): 15 | token = faker.pystr() 16 | 17 | get_secret_value_mock = mock.Mock(get_secret_value=mock.Mock(return_value=token)) 18 | settings_mock.gitlab_configs = [ 19 | mock.Mock(webhook_secret_token=get_secret_value_mock), 20 | ] 21 | 22 | assert get_gitlab_webhook_token_header(x_gitlab_token=token) == token 23 | 24 | 25 | @mock.patch("app.secbot.inputs.gitlab.dependencies.settings") 26 | def test_gitlab_webhook_token_header_invalid(settings_mock): 27 | get_secret_value_mock = mock.Mock(get_secret_value=mock.Mock(return_value="valid")) 28 | settings_mock.gitlab_configs = [ 29 | mock.Mock(webhook_secret_token=get_secret_value_mock), 30 | ] 31 | 32 | with pytest.raises(HTTPException): 33 | get_gitlab_webhook_token_header(x_gitlab_token="invalid") 34 | 35 | 36 | @pytest.mark.asyncio 37 | @pytest.mark.parametrize( 38 | "event_name, payload, expected_event", 39 | [ 40 | ( 41 | "System Hook", 42 | {"event_type": "merge_request"}, 43 | GitlabEvent.MERGE_REQUEST, 44 | ), 45 | ( 46 | "System Hook", 47 | {"event_name": "push"}, 48 | GitlabEvent.PUSH, 49 | ), 50 | ( 51 | "System Hook", 52 | {"event_name": "tag_push"}, 53 | GitlabEvent.TAG_PUSH, 54 | ), 55 | ( 56 | "Merge Request Hook", 57 | {"event_type": "merge_request"}, 58 | GitlabEvent.MERGE_REQUEST, 59 | ), 60 | ( 61 | "Push Hook", 62 | {"event_name": "push"}, 63 | GitlabEvent.PUSH, 64 | ), 65 | ( 66 | "Tag Push Hook", 67 | {"event_name": "tag_push"}, 68 | GitlabEvent.TAG_PUSH, 69 | ), 70 | ], 71 | ) 72 | async def test_gitlab_event_event_detection(event_name, payload, expected_event): 73 | request_mock = mock.Mock(json=mock.AsyncMock(return_value=payload)) 74 | event = await gitlab_event(request_mock, x_gitlab_event=event_name) 75 | assert event == expected_event 76 | 77 | 78 | @pytest.mark.asyncio 79 | @pytest.mark.parametrize( 80 | "event_name", 81 | [ 82 | "System Hook", 83 | "Random event name", 84 | ], 85 | ) 86 | async def test_gitlab_not_supported_event(event_name, faker): 87 | random_event_payload = faker.pydict() 88 | request_mock = mock.Mock(json=mock.AsyncMock(return_value=random_event_payload)) 89 | event = await gitlab_event(request_mock, x_gitlab_event=event_name) 90 | assert event is None 91 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_gitlab_project_languages.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.inputs.gitlab.schemas.base import Project 6 | from app.secbot.inputs.gitlab.services import get_gitlab_project_languages 7 | 8 | 9 | @pytest.fixture 10 | def project_factory(): 11 | def _factory( 12 | project_id: int = 42, 13 | web_url: str = "https://git.env.local/secbot-test-group/example-project", 14 | ): 15 | return Project( 16 | id=project_id, 17 | name="test", 18 | web_url=web_url, 19 | git_ssh_url="ssh://git.env.local/secbot-test-group/example-project", 20 | git_http_url="https://git.env.local/secbot-test-group/example-project", 21 | namespace="secbot-test-group", 22 | path_with_namespace="secbot-test-group/example-project", 23 | ) 24 | 25 | return _factory 26 | 27 | 28 | @mock.patch("app.secbot.inputs.gitlab.services.requests.get") 29 | @mock.patch("app.secbot.inputs.gitlab.services.get_config_from_host") 30 | def test_gitlab_project_languages_api_url( 31 | config_mock, requests_mock, faker, project_factory 32 | ): 33 | token = faker.pystr() 34 | project_id = faker.pyint() 35 | web_url = "https://git.env.local/secbot-test-group/example-project" 36 | project = project_factory(web_url=web_url, project_id=project_id) 37 | 38 | config_mock.return_value.auth_token.get_secret_value.return_value = token 39 | 40 | get_gitlab_project_languages(project) 41 | 42 | requests_mock.assert_called_once_with( 43 | url=f"https://git.env.local/api/v4/projects/{project_id}/languages", 44 | headers={"PRIVATE-TOKEN": token}, 45 | ) 46 | 47 | 48 | @mock.patch( 49 | "app.secbot.inputs.gitlab.services.requests.get", 50 | return_value=mock.Mock( 51 | status_code=400, 52 | json=mock.MagicMock(return_value={"message": "error message"}), 53 | ), 54 | ) 55 | @mock.patch("app.secbot.inputs.gitlab.services.get_config_from_host") 56 | def test_gitlab_project_languages_non_success(_config_mock, _mock, project_factory): 57 | project = project_factory() 58 | assert get_gitlab_project_languages(project) is None 59 | 60 | 61 | @mock.patch( 62 | "app.secbot.inputs.gitlab.services.requests.get", 63 | return_value=mock.Mock( 64 | status_code=200, 65 | json=mock.MagicMock(return_value={"Python": "90.00", "Shell": "10.00"}), 66 | ), 67 | ) 68 | @mock.patch("app.secbot.inputs.gitlab.services.get_config_from_host") 69 | def test_gitlab_project_languages_success_response( 70 | _config_mock, _mock, project_factory 71 | ): 72 | project = project_factory() 73 | assert get_gitlab_project_languages(project) == { 74 | "Python": "90.00", 75 | "Shell": "10.00", 76 | } 77 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_gitlab_routes.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from starlette.testclient import TestClient 3 | 4 | from app.main import app 5 | from app.secbot.inputs.gitlab.dependencies import get_gitlab_webhook_token_header 6 | 7 | client = TestClient(app) 8 | app.dependency_overrides[get_gitlab_webhook_token_header] = lambda: "token" 9 | 10 | 11 | @pytest.mark.asyncio 12 | async def test_gitlab_route_not_failed_with_invalid_data_event(faker): 13 | invalid_merge_request_data = { 14 | "event_type": "merge_request", 15 | **faker.pydict(allowed_types=["str", "int", "float", "bool"]), 16 | } 17 | response = client.post( 18 | "/v1/gitlab/webhook", 19 | headers={"X-Gitlab-Event": "Merge Request Hook"}, 20 | json=invalid_merge_request_data, 21 | ) 22 | assert response.status_code == 200 23 | assert response.json() == {"status": "ok"} 24 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_gitlab_utils.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from app.secbot.inputs.gitlab import generate_gitlab_security_id 6 | from app.secbot.inputs.gitlab.utils import get_project_name 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "project_path, expected_project_name", 11 | [ 12 | ( 13 | "git.env.local:sample-group/sample-project.git", 14 | "git.env.local:sample-group/sample-project", 15 | ), 16 | ( 17 | "git@git.env.local:sample-group/sample-project", 18 | "git.env.local:sample-group/sample-project", 19 | ), 20 | ], 21 | ) 22 | def test_get_project_name_with_git_at_the_end(project_path, expected_project_name): 23 | assert get_project_name(project_path) == expected_project_name 24 | 25 | 26 | def test_get_project_name_with_plain_string(faker): 27 | simple_string = faker.pystr() 28 | project_name = get_project_name(simple_string) 29 | assert simple_string == project_name 30 | 31 | 32 | # noinspection SpellCheckingInspection 33 | def test_generate_gitlab_security_id(): 34 | prefix = "prefix" 35 | commit_hash = "commit_hash" 36 | project_path = "git.env.local:sample-group/sample-project.git" 37 | 38 | data = mock.Mock( 39 | commit=mock.Mock(id=commit_hash), 40 | project=mock.Mock(git_ssh_url=project_path), 41 | ) 42 | security_id = generate_gitlab_security_id(prefix=prefix, data=data) 43 | assert ( 44 | security_id 45 | == "prefix_8666e7b2f4a9023e9049d3e1dcc6012c8ede0351562876d482fa89d07a66f6f0" 46 | ) 47 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_merge_request_webhook_data.py: -------------------------------------------------------------------------------- 1 | from app.secbot.inputs.gitlab.schemas import GitlabEvent, MergeRequestWebhookModel 2 | 3 | 4 | def test_merge_request_webhook_data_commit(get_event_data, faker): 5 | commit_hash = faker.md5() 6 | data = get_event_data( 7 | GitlabEvent.MERGE_REQUEST, 8 | {"object_attributes": {"last_commit": {"id": commit_hash}}}, 9 | ) 10 | instance = MergeRequestWebhookModel(**data) 11 | assert instance.commit.id == commit_hash 12 | 13 | 14 | def test_merge_request_webhook_data_target_branch(get_event_data, faker): 15 | target_branch = faker.pystr() 16 | data = get_event_data( 17 | GitlabEvent.MERGE_REQUEST, 18 | {"object_attributes": {"target_branch": target_branch}}, 19 | ) 20 | instance = MergeRequestWebhookModel(**data) 21 | assert instance.target_branch == target_branch 22 | 23 | 24 | def test_merge_request_webhook_data_path(get_event_data, faker): 25 | path = faker.uri() 26 | data = get_event_data( 27 | GitlabEvent.MERGE_REQUEST, {"object_attributes": {"url": path}} 28 | ) 29 | instance = MergeRequestWebhookModel(**data) 30 | assert instance.path == path 31 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_push_webhook_data.py: -------------------------------------------------------------------------------- 1 | from app.secbot.inputs.gitlab.schemas import GitlabEvent, PushWebhookModel 2 | 3 | 4 | def test_push_webhook_data_commit(get_event_data, generate_commit_data, faker): 5 | commit_hash = faker.md5() 6 | data = get_event_data( 7 | GitlabEvent.PUSH, 8 | { 9 | "after": commit_hash, 10 | "commits": [ 11 | generate_commit_data(), 12 | generate_commit_data( 13 | { 14 | "id": commit_hash, 15 | } 16 | ), 17 | ], 18 | }, 19 | ) 20 | instance = PushWebhookModel(**data) 21 | assert instance.commit.id == commit_hash 22 | 23 | 24 | def test_push_webhook_data_target_branch(get_event_data, faker): 25 | target_branch = faker.pystr() 26 | data = get_event_data( 27 | GitlabEvent.PUSH, 28 | {"ref": f"ref/heads/{target_branch}"}, 29 | ) 30 | instance = PushWebhookModel(**data) 31 | assert instance.target_branch == target_branch 32 | 33 | 34 | def test_push_webhook_data_path(get_event_data, generate_commit_data, faker): 35 | commit_hash = faker.md5() 36 | path = faker.uri() 37 | data = get_event_data( 38 | GitlabEvent.PUSH, 39 | { 40 | "after": commit_hash, 41 | "commits": [ 42 | generate_commit_data(), 43 | generate_commit_data( 44 | { 45 | "id": commit_hash, 46 | "url": path, 47 | } 48 | ), 49 | ], 50 | }, 51 | ) 52 | instance = PushWebhookModel(**data) 53 | assert instance.path == path 54 | -------------------------------------------------------------------------------- /tests/units/inputs/gitlab/test_tag_push_webhook_data.py: -------------------------------------------------------------------------------- 1 | from app.secbot.inputs.gitlab.schemas import GitlabEvent, TagWebhookModel 2 | 3 | 4 | def test_tag_push_webhook_data_commit(get_event_data, generate_commit_data, faker): 5 | commit_hash = faker.md5() 6 | data = get_event_data( 7 | GitlabEvent.TAG_PUSH, 8 | { 9 | "checkout_sha": commit_hash, 10 | "commits": [ 11 | generate_commit_data(), 12 | generate_commit_data( 13 | { 14 | "id": commit_hash, 15 | } 16 | ), 17 | ], 18 | }, 19 | ) 20 | instance = TagWebhookModel(**data) 21 | assert instance.commit.id == commit_hash 22 | 23 | 24 | def test_tag_push_webhook_data_target_branch(get_event_data, faker): 25 | target_branch = faker.pystr() 26 | data = get_event_data( 27 | GitlabEvent.TAG_PUSH, 28 | {"ref": f"ref/tags/{target_branch}"}, 29 | ) 30 | instance = TagWebhookModel(**data) 31 | assert instance.target_branch == target_branch 32 | 33 | 34 | def test_tag_push_webhook_data_path(get_event_data, generate_commit_data, faker): 35 | commit_hash = faker.md5() 36 | path = faker.uri() 37 | data = get_event_data( 38 | GitlabEvent.TAG_PUSH, 39 | { 40 | "checkout_sha": commit_hash, 41 | "commits": [ 42 | generate_commit_data(), 43 | generate_commit_data( 44 | { 45 | "id": commit_hash, 46 | "url": path, 47 | } 48 | ), 49 | ], 50 | }, 51 | ) 52 | instance = TagWebhookModel(**data) 53 | assert instance.path == path 54 | -------------------------------------------------------------------------------- /tests/units/secbot/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/exness/security-bot/ba35e061bdce3bbc0a345bb6ef736765cfecb923/tests/units/secbot/__init__.py -------------------------------------------------------------------------------- /tests/units/secbot/test_autodiscover.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from app.secbot import SecurityBot 4 | from app.secbot.inputs.gitlab import GitlabInput 5 | 6 | 7 | def test_autodiscover_gitlab(): 8 | celery_app = mock.Mock() 9 | runner = SecurityBot(celery_app=celery_app) 10 | 11 | assert "gitlab" in runner._registered_inputs 12 | 13 | 14 | def test_gitlab_autodiscover_inputs(): 15 | celery_app = mock.Mock() 16 | gitlab_input = GitlabInput(config_name="gitlab", celery_app=celery_app) 17 | 18 | assert "gitleaks" in gitlab_input.scans 19 | assert "defectdojo" in gitlab_input.outputs 20 | assert "slack" in gitlab_input.notifications 21 | -------------------------------------------------------------------------------- /tests/units/secbot/test_register.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from app.secbot import SecurityBot 4 | from app.secbot.handlers import SecbotScanHandler 5 | from app.secbot.inputs import SecbotInput 6 | from app.secbot.inputs.gitlab import GitlabInput 7 | 8 | 9 | def test_register_new_input(): 10 | celery_app = mock.Mock() 11 | init_mock = mock.Mock(return_value=None) 12 | 13 | class ExampleInput(SecbotInput): 14 | __init__ = init_mock 15 | 16 | runner = SecurityBot(celery_app=celery_app) 17 | runner.register_input("new_input", ExampleInput) 18 | 19 | assert "new_input" in runner._registered_inputs 20 | init_mock.assert_called_once_with(config_name="new_input", celery_app=celery_app) 21 | 22 | 23 | def test_register_new_handler(): 24 | celery_app = mock.Mock() 25 | init_mock = mock.Mock(return_value=None) 26 | 27 | class ExampleScanHandler(SecbotScanHandler): 28 | __init__ = init_mock 29 | 30 | async def run(self, *args, **kwargs): 31 | pass 32 | 33 | gitlab_input = GitlabInput(config_name="gitlab", celery_app=celery_app) 34 | gitlab_input.register_handler("new_handler", ExampleScanHandler) 35 | 36 | assert "new_handler" in gitlab_input.scans 37 | init_mock.assert_called_once_with(config_name="new_handler", celery_app=celery_app) 38 | -------------------------------------------------------------------------------- /tests/units/test_pydantic_celery.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from app.secbot.schemas import SecbotBaseModel 4 | from app.secbot.utils import deserializer, load_cls, serializer 5 | 6 | 7 | class Example(SecbotBaseModel): 8 | foo: str 9 | 10 | 11 | def test_load_cls(): 12 | cls = load_cls("tests.units.test_pydantic_celery.Example") 13 | assert cls == Example 14 | 15 | 16 | def test_serializer_with_simple_value(faker): 17 | value = faker.pystr() 18 | assert value == serializer(value) 19 | 20 | 21 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 22 | def test_serializer_with_pydantic_model(faker): 23 | model_path = "tests.units.test_pydantic_celery.Example" 24 | model = {"foo": "bar", "pydantic_field": model_path} 25 | assert Example(foo="bar") == serializer(model) 26 | 27 | 28 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 29 | def test_serializer_with_args(faker): 30 | model_path = "tests.units.test_pydantic_celery.Example" 31 | model = {"foo": "bar", "pydantic_field": model_path} 32 | 33 | example_int = faker.pyint() 34 | example_str = faker.pystr() 35 | args = (example_int, example_str, model) 36 | 37 | assert (example_int, example_str, Example(foo="bar")) == serializer(args) 38 | 39 | 40 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 41 | def test_serializer_with_kwargs(faker): 42 | model_path = "tests.units.test_pydantic_celery.Example" 43 | model = {"foo": "bar", "pydantic_field": model_path} 44 | 45 | example_int = faker.pyint() 46 | example_str = faker.pystr() 47 | kwargs = {"int": example_int, "str": example_str, "model": model} 48 | 49 | assert { 50 | "int": example_int, 51 | "str": example_str, 52 | "model": Example(foo="bar"), 53 | } == serializer(kwargs) 54 | 55 | 56 | def test_deserializer_with_simple_value(faker): 57 | value = faker.pystr() 58 | assert value == deserializer(value) 59 | 60 | 61 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 62 | def test_deserializer_with_pydantic_model(): 63 | model = Example(foo="bar") 64 | assert { 65 | "foo": "bar", 66 | "pydantic_field": "tests.units.test_pydantic_celery.Example", 67 | } == deserializer(model) 68 | 69 | 70 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 71 | def test_deserializer_with_args(faker): 72 | example_int = faker.pyint() 73 | example_str = faker.pystr() 74 | args = (example_int, example_str, Example(foo="bar")) 75 | 76 | assert ( 77 | example_int, 78 | example_str, 79 | {"foo": "bar", "pydantic_field": "tests.units.test_pydantic_celery.Example"}, 80 | ) == deserializer(args) 81 | 82 | 83 | @mock.patch("app.secbot.schemas.PYDANTIC_CLS_PATH", new="pydantic_field") 84 | def test_deserializer_with_kwargs(faker): 85 | example_int = faker.pyint() 86 | example_str = faker.pystr() 87 | kwargs = {"int": example_int, "str": example_str, "model": Example(foo="bar")} 88 | assert { 89 | "int": example_int, 90 | "str": example_str, 91 | "model": { 92 | "foo": "bar", 93 | "pydantic_field": "tests.units.test_pydantic_celery.Example", 94 | }, 95 | } == deserializer(kwargs) 96 | -------------------------------------------------------------------------------- /tests/units/test_worker_utils.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | from app.secbot.inputs.gitlab.services import get_gitlab_project_languages 4 | from tests.units import factories 5 | 6 | 7 | @patch("app.secbot.inputs.gitlab.services.requests") 8 | @patch("app.secbot.inputs.gitlab.services.get_config_from_host") 9 | def test_gitlab_language(_config_mock, requests_mock): 10 | project = factories.create_project__security() 11 | requests_mock.get.return_value.status_code = 200 12 | requests_mock.get.return_value.json.return_value = { 13 | "Python": 52.01, 14 | "Javascript": 47.98, 15 | "Go": 0.01, 16 | } 17 | languages = get_gitlab_project_languages(project) 18 | 19 | requests_mock.get.assert_called_once() 20 | assert len(languages.keys()) >= 3 21 | assert "Python" in languages.keys() 22 | assert languages["Python"] > 50.0 23 | -------------------------------------------------------------------------------- /tests/units/workflow/test_workflow_runner.py: -------------------------------------------------------------------------------- 1 | # import pytest 2 | # import yaml 3 | # 4 | # from app.exceptions import SecbotConfigError 5 | # from app.secbot.config import SecbotConfig 6 | # from app.workflow import SecbotWorkflow, SecbotWorkflowRunner 7 | # 8 | # 9 | # def test_register_workflow(): 10 | # yaml_string = """ 11 | # version: "1.0" 12 | # components: 13 | # gitleaks: 14 | # handler_name: gitleaks 15 | # defectdojo: 16 | # handler_name: defectdojo 17 | # slack: 18 | # handler_name: slack 19 | # jobs: 20 | # - name: Example jobs 21 | # rules: 22 | # gitlab: 23 | # scans: 24 | # - gitleaks 25 | # outputs: 26 | # - defectdojo 27 | # notifications: 28 | # - slack 29 | # """ # noqa: W291,E261 30 | # config = SecbotConfig(yaml.safe_load(yaml_string)) 31 | # runner = SecbotWorkflowRunner(config=config) 32 | # 33 | # workflow = SecbotWorkflow(input_name="gitlab") 34 | # runner.register_workflow(workflow) 35 | # 36 | # assert "gitlab" in runner.workflows[0].input_name 37 | # 38 | # 39 | # def test_workflow_validation_without_components(): 40 | # yaml_string = """ 41 | # version: "1.0" 42 | # components: 43 | # gitleaks: 44 | # handler_name: gitleaks 45 | # defectdojo: 46 | # handler_name: defectdojo 47 | # slack: 48 | # handler_name: slack 49 | # jobs: 50 | # - name: Example jobs 51 | # rules: 52 | # gitlab: 53 | # scans: 54 | # - gitleaks 55 | # outputs: 56 | # - defectdojo 57 | # notifications: 58 | # - slack 59 | # """ # noqa: W291,E261 60 | # config = SecbotConfig(yaml.safe_load(yaml_string)) 61 | # runner = SecbotWorkflowRunner(config=config) 62 | # 63 | # runner.validate() 64 | # assert runner.is_validated is True 65 | # 66 | # 67 | # def test_workflow_validation(): 68 | # yaml_string = """ 69 | # version: "1.0" 70 | # components: 71 | # gitleaks: 72 | # handler_name: gitleaks 73 | # defectdojo: 74 | # handler_name: defectdojo 75 | # slack: 76 | # handler_name: slack 77 | # jobs: 78 | # - name: Example jobs 79 | # rules: 80 | # gitlab: 81 | # scans: 82 | # - gitleaks 83 | # outputs: 84 | # - defectdojo 85 | # notifications: 86 | # - slack 87 | # """ # noqa: W291,E261 88 | # config = SecbotConfig(yaml.safe_load(yaml_string)) 89 | # runner = SecbotWorkflowRunner(config=config) 90 | # 91 | # workflow = SecbotWorkflow(input_name="gitlab") 92 | # runner.register_workflow(workflow) 93 | # 94 | # with pytest.raises(SecbotConfigError): 95 | # runner.validate() 96 | --------------------------------------------------------------------------------