├── .gitignore ├── Dockerfile ├── Dockerfile.api ├── Dockerfile.celery ├── Dockerfile.redis ├── Dockerfile.test ├── LICENSE ├── Makefile ├── README.md ├── bin ├── run.sh └── wait-for-it.sh ├── configuration-schema.yaml ├── configuration └── environment │ ├── localdev.env │ └── test.env ├── deadshot ├── __init__.py ├── api_server.py ├── blueprints │ ├── __init__.py │ ├── api_exceptions.py │ ├── api_responses.py │ └── blueprints.py ├── configurations │ ├── __init__.py │ ├── api_server_config_data.py │ ├── celery_config_data.py │ ├── github_config.py │ ├── jira_config.py │ └── slack_config.py ├── services │ ├── celery_worker │ │ └── webhook_async_processor.py │ ├── common │ │ ├── __init__.py │ │ ├── jira_service.py │ │ ├── logger.py │ │ ├── secrets_loader.py │ │ └── slack_notification.py │ ├── github │ │ ├── __init__.py │ │ ├── git_diff_parser.py │ │ ├── github_service.py │ │ ├── sender_verification.py │ │ └── webhook_processor.py │ └── scanner │ │ ├── pr_scanner.py │ │ ├── regex_scanner.py │ │ └── secrets_json │ │ └── regex.json └── worker │ ├── __init__.py │ └── celery_initialization.py ├── docker-compose.yaml ├── local_dev_secrets ├── github_secrets.json ├── jira_user.json └── slack_webhook.json ├── requirements.txt └── tests ├── __init__.py ├── blueprints ├── __init__.py └── api │ ├── __init__.py │ └── test_incoming_webhook.py ├── diff_parser ├── __init__.py └── test_github_diff_parser.py ├── fixtures ├── bad_pr.json ├── close_pr.json ├── good_pr.json ├── test.diff └── test2.diff ├── test_requirements.txt └── test_secrets └── github_secrets.json /.gitignore: -------------------------------------------------------------------------------- 1 | # ignore all json used for testing 2 | #*.json 3 | local_dev_secrets 4 | !local_secrets/.gitkeep 5 | .DS_Store 6 | # **/test_data 7 | *.pyc 8 | *.idea 9 | __pycache__ 10 | build 11 | venv -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.0-buster 2 | 3 | RUN apt-get update 4 | RUN pip install --upgrade pip 5 | 6 | ENV APP_USER twilio 7 | ENV FLASK_APP deadshot 8 | ENV APP_DIR /home/twilio/app/deadshot 9 | ENV APP_INSTALL_DIR /app/deadshot 10 | ENV DEADSHOT_RUN_MODE api 11 | 12 | RUN mkdir -p /home/twilio 13 | RUN groupadd -r twilio &&\ 14 | useradd -r -g twilio -d /home/twilio -s /sbin/nologin -c "Twilio Docker image user" twilio 15 | 16 | RUN chown twilio /home/twilio 17 | RUN chgrp twilio /home/twilio 18 | 19 | RUN mkdir -p $APP_INSTALL_DIR 20 | COPY deadshot $APP_INSTALL_DIR/deadshot 21 | COPY bin $APP_INSTALL_DIR/bin 22 | COPY local_dev_secrets $APP_DIR/secrets 23 | COPY requirements.txt $APP_INSTALL_DIR 24 | ENV SECRET_GITHUB_SECRETS $APP_DIR/secrets/github_secrets.json 25 | ENV SECRET_SLACK_WEBHOOKS $APP_DIR/secrets/slack_webhook.json 26 | ENV SECRET_JIRA_AUTH $APP_DIR/secrets/jira_user.json 27 | RUN pip3 install -r $APP_INSTALL_DIR/requirements.txt 28 | 29 | USER $APP_USER 30 | RUN mkdir -p $APP_DIR 31 | 32 | ADD --chown=twilio:twilio deadshot $APP_DIR/deadshot 33 | ADD --chown=twilio:twilio bin $APP_DIR/bin 34 | WORKDIR $APP_DIR 35 | 36 | EXPOSE 9001 37 | 38 | CMD (/bin/bash ./bin/run.sh) 39 | -------------------------------------------------------------------------------- /Dockerfile.api: -------------------------------------------------------------------------------- 1 | FROM python:3.9.0-buster 2 | 3 | RUN apt-get update 4 | RUN pip install --upgrade pip 5 | 6 | ENV APP_USER twilio 7 | ENV FLASK_APP deadshot 8 | ENV APP_DIR /home/twilio/app/deadshot 9 | ENV APP_INSTALL_DIR /app/deadshot 10 | ENV DEADSHOT_RUN_MODE api 11 | 12 | ################################################### 13 | # Please configure the following environment variables before building the image 14 | ENV DEADSHOT_LOG_LEVEL DEBUG 15 | ENV GITHUB_URL 16 | ENV GITHUB_API https:///api/v3 17 | ENV JIRA_SERVER=https://>.com 18 | ENV GITHUB_APP_NAME deadshot[bot] 19 | #################################################### 20 | 21 | ENV CELERY_BROKER_HOST redis 22 | ENV CELERY_BROKER_PORT 6379 23 | ENV CELERY_BROKER_DATABASE 1 24 | 25 | RUN mkdir -p /home/twilio 26 | RUN groupadd -r twilio &&\ 27 | useradd -r -g twilio -d /home/twilio -s /sbin/nologin -c "Twilio Docker image user" twilio 28 | 29 | RUN chown twilio /home/twilio 30 | RUN chgrp twilio /home/twilio 31 | 32 | RUN mkdir -p $APP_INSTALL_DIR 33 | COPY deadshot $APP_INSTALL_DIR/deadshot 34 | COPY bin $APP_INSTALL_DIR/bin 35 | COPY local_dev_secrets $APP_DIR/secrets 36 | COPY requirements.txt $APP_INSTALL_DIR 37 | ENV SECRET_GITHUB_SECRET $APP_DIR/secrets/github_secrets.json 38 | ENV SECRET_SLACK_WEBHOOKS $APP_DIR/secrets/slack_webhook.json 39 | ENV SECRET_JIRA_AUTH $APP_DIR/secrets/jira_user.json 40 | RUN pip3 install -r $APP_INSTALL_DIR/requirements.txt 41 | 42 | USER $APP_USER 43 | RUN mkdir -p $APP_DIR 44 | 45 | ADD --chown=twilio:twilio deadshot $APP_DIR/deadshot 46 | ADD --chown=twilio:twilio bin $APP_DIR/bin 47 | WORKDIR $APP_DIR 48 | 49 | EXPOSE 9001 50 | 51 | CMD (gunicorn -b 0.0.0.0:9001 --workers=5 "deadshot:create_app()") 52 | -------------------------------------------------------------------------------- /Dockerfile.celery: -------------------------------------------------------------------------------- 1 | FROM python:3.9.0-buster 2 | 3 | RUN apt-get update 4 | RUN pip install --upgrade pip 5 | 6 | ENV APP_USER twilio 7 | ENV FLASK_APP deadshot 8 | ENV APP_DIR /home/twilio/app/deadshot 9 | ENV APP_INSTALL_DIR /app/deadshot 10 | ENV DEADSHOT_RUN_MODE worker 11 | 12 | ################################################### 13 | # Please configure the following environment variables before building the image 14 | ENV DEADSHOT_LOG_LEVEL DEBUG 15 | ENV GITHUB_URL 16 | ENV GITHUB_API https:///api/v3 17 | ENV JIRA_SERVER=https://.com 18 | ENV GITHUB_APP_NAME deadshot[bot] 19 | #################################################### 20 | 21 | ENV CELERY_BROKER_HOST redis 22 | ENV CELERY_BROKER_PORT 6379 23 | ENV CELERY_BROKER_DATABASE 1 24 | RUN mkdir -p /home/twilio 25 | RUN groupadd -r twilio &&\ 26 | useradd -r -g twilio -d /home/twilio -s /sbin/nologin -c "Twilio Docker image user" twilio 27 | 28 | RUN chown twilio /home/twilio 29 | RUN chgrp twilio /home/twilio 30 | 31 | RUN mkdir -p $APP_INSTALL_DIR 32 | COPY deadshot $APP_INSTALL_DIR/deadshot 33 | COPY bin $APP_INSTALL_DIR/bin 34 | COPY local_dev_secrets $APP_DIR/secrets 35 | COPY requirements.txt $APP_INSTALL_DIR 36 | ENV SECRET_GITHUB_SECRET $APP_DIR/secrets/github_secrets.json 37 | ENV SECRET_SLACK_WEBHOOKS $APP_DIR/secrets/slack_webhook.json 38 | ENV SECRET_JIRA_AUTH $APP_DIR/secrets/jira_user.json 39 | RUN pip3 install -r $APP_INSTALL_DIR/requirements.txt 40 | 41 | USER $APP_USER 42 | RUN mkdir -p $APP_DIR 43 | 44 | ADD --chown=twilio:twilio deadshot $APP_DIR/deadshot 45 | ADD --chown=twilio:twilio bin $APP_DIR/bin 46 | WORKDIR $APP_DIR 47 | 48 | # EXPOSE 9001 49 | 50 | CMD (celery -A deadshot.worker.celery_initialization.celery worker --loglevel=INFO) 51 | -------------------------------------------------------------------------------- /Dockerfile.redis: -------------------------------------------------------------------------------- 1 | FROM redis:latest -------------------------------------------------------------------------------- /Dockerfile.test: -------------------------------------------------------------------------------- 1 | FROM python:3.9.0-buster 2 | 3 | RUN apt-get update 4 | RUN pip install --upgrade pip 5 | 6 | ENV APP_USER twilio 7 | ENV FLASK_APP deadshot 8 | ENV APP_DIR /home/twilio/app/deadshot 9 | ENV APP_INSTALL_DIR /app/deadshot 10 | 11 | RUN mkdir -p /home/twilio 12 | RUN groupadd -r twilio &&\ 13 | useradd -r -g twilio -d /home/twilio -s /sbin/nologin -c "Twilio Docker image user" twilio 14 | RUN chown twilio /home/twilio 15 | RUN chgrp twilio /home/twilio 16 | 17 | RUN mkdir -p $APP_INSTALL_DIR 18 | COPY requirements.txt $APP_INSTALL_DIR 19 | RUN pip install -r $APP_INSTALL_DIR/requirements.txt 20 | COPY tests/test_requirements.txt $APP_INSTALL_DIR 21 | RUN pip install -r $APP_INSTALL_DIR/test_requirements.txt 22 | 23 | COPY deadshot $APP_INSTALL_DIR 24 | COPY bin $APP_INSTALL_DIR 25 | 26 | USER $APP_USER 27 | RUN mkdir -p $APP_DIR 28 | 29 | ADD --chown=twilio:twilio ./deadshot $APP_DIR/deadshot 30 | ADD --chown=twilio:twilio ./tests $APP_DIR/tests 31 | 32 | WORKDIR $APP_DIR 33 | 34 | CMD (pytest -v -s && pycodestyle -v --max-line-length=180 deadshot) 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Twilio Inc 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ORG := security 2 | PROJECT := deadshot 3 | TAG := $(REPOSITORY)/$(ORG)/$(PROJECT):latest 4 | TEST_IMAGE_NAME := $(ORG)/$(PROJECT)-test 5 | TEST_ARGS:= --volume "$(CURDIR)/build":"/build" --env-file ./configuration/environment/test.env --name deadshot-test $(TEST_IMAGE_NAME) 6 | 7 | 8 | DOCKER_RUN:= docker run 9 | 10 | build: 11 | docker build . --tag $(PROJECT) 12 | docker build -f Dockerfile.redis -t $(PROJECT)-redis . 13 | 14 | build-test: 15 | echo $(TEST_IMAGE_NAME) 16 | docker build --file Dockerfile.test . --tag $(TEST_IMAGE_NAME) 17 | 18 | test: build-test clean-test 19 | $(DOCKER_RUN) $(TEST_ARGS) 20 | 21 | serve: 22 | docker-compose up --build 23 | 24 | clean: 25 | rm -rf build 26 | 27 | clean-test: 28 | -@docker rm deadshot-test 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deadshot 2 | Deadshot is a Pull Request scanner that looks for the introduction of secrets via PRs by matching each diff line against a set of known secret expressions. 3 | 4 | ## Application capabilities: 5 | Service is responsible for: 6 | - Real-time Pull Request diff processor to check for secrets being committed to Github via new code 7 | - Notify the user on the PR conversation if it flags something 8 | - Slack notify the security team channel when certain secrets are identified in code for which you've enabled slack notifications via a flag in regex.json 9 | 10 | Service does NOT: 11 | - Do any static or dynamic code analysis 12 | 13 | ## How does it work? 14 | Deadshot is a Flask-Celery-Redis multi-container application that is installed as a Github app to run on every Pull Request created against the main branch of a repo on which the Github app is installed. 15 | 16 | The Flask container is the entry point for the service by exposing API routes defined in blueprints.py. Once a Pull request payload is received on the API route the service forwards the payload to a Redis queue for the Celery container to pick up and 17 | scan through the diff of the Pull Request. After the celery container scans for specified secrets regular expressions, it comments on PRs, slack notifies the security team channel, or creates a JIRA ticket for the team to follow up on. 18 | The Github app is configured with the Flask API URL and a shared secret used for generating the payload SHA checksum. 19 | 20 | One way the API URL can be setup is by deploying this code on an host and assigning a application load balancer to this host. 21 | 22 | ### Creating a Github App 23 | Note: When creating the app please make sure you have a DNS ready for host on which you'll be deploying Deadshot containers and a secure secret string for the webhook secret. 24 | 25 | Github Admins would need to create and install a Github app on Github before running or deploying the Deadshot application. 26 | To know more about creating a Github app please read this [guide](https://docs.github.com/en/free-pro-team@latest/developers/apps/creating-a-github-app) 27 | 28 | App Name: deadshot (All lower case. This is important as the service uses this name to fetch previous comments it has made on a PR) 29 | 30 | Webhook URL: http(s)://your-hosted-deadshot-dns/api/v1/deadshot-webhook 31 | 32 | To test this locally you can create a ngrok endpoint to feed into your Github app webhook section 33 | 34 | ### Github App Permissions 35 | For this application to work your Github app will have to enable the following permissions and subscriptions on the permissions page of the Github app: 36 | Repository Permissions: 37 | - Metadata: Read-only 38 | - PullRequests: Read & write 39 | - Webhooks: Read & write 40 | 41 | All other permissions are left unchanged to the default value of No access 42 | 43 | Subscribe to events: 44 | - Pull request 45 | - Pull request review 46 | 47 | Finally click “Create GitHub App”. After successful app creation follow the “generate a private key” link in the top section of the app web page 48 | 49 | 50 | Once the private key is generated store it in a secure location. 51 | This generated private key is one of the pieces of data used to generate a session token for app interaction. 52 | 53 | After generating the private key, install the app on all the orgs you want it to monitor. 54 | 55 | ## Running Deadshot 56 | This is a multi-container application designed to bring up all three containers (Flask, Celery, Redis) via the /bin/run.sh, so running the Dockerfile image should bring up the entirety of the application 57 | 58 | ### Environment variables: 59 | #### Note: For deployment using docker-compose.yaml populate the these environment variables in [localdev.env](https://github.com/twilio-labs/deadshot/blob/main/configuration/environment/localdev.env). If you're deploying this by building and running each container image individually via Dockerfile.api, Dockerfile.celery then the these environment variables are in the respective Dockerfiles 60 | The three variables below are single string values provided by the user 61 | - GITHUB_URL: This is the URL behind which your Github instance is accessed. Please provide the DNS without scheme or port. Eg. if your Github web URL is https://github.mockcompany.com then provide the value github.mockcompany.com 62 | - GITHUB_API: This is the API URL for Github. Eg. if you have your Github DNS as https://github.mockcompany.com then your API would be something like https://github.mockcompany.com/api/v3 63 | - JIRA_SERVER= Your company's JIRA server web URL 64 | 65 | The below environment variables load path to files with credentials in them. Load the json file key values in the files available [here](https://github.com/twilio-labs/deadshot/tree/main/local_dev_secrets) before running the application. 66 | - SECRET_GITHUB_SECRET: This variable loads github_secrets.json and has the Github app's shared webhook secret, integration ID, and the pem key. All these three secrets are obtained from the Github app settings page 67 | webhook secret - This is the secret configured during the app creation process 68 | integration ID - This is the app ID shown on the github app settings page 69 | pem key - this is the private key generated during the app installation process 70 | - SECRET_SLACK_WEBHOOKS: This slack_webhook.json and has the webhook URL to which the deadshot app will send slack notifications when it finds secrets in a PR for which you set slack_alert=True in regex.json 71 | - SECRET_JIRA_AUTH: This loads jira_user.json and has the username and password for the user ID to access the org's JIRA board 72 | Note: If you do not provide valid values in SECRET_SLACK_WEBHOOKS and SECRET_JIRA_AUTH the service will soft fail and print error messages about failure to initiate slack and jira methods in the docker container logs 73 | 74 | Note: If you do not move the JSON secrets files location then you do not need to update the above three environment variables values already present in the Dockerfiles or docker-compose.yaml 75 | 76 | ### Running/Serving the Docker Image 77 | This command will use docker-compose.yaml to bring up all the containers. Please update configuration/environment/localdev.env with values relevant to your organisation before running the below command 78 | ```bash 79 | make serve 80 | ``` 81 | Once you’ve done this and do not intend to use Dockerfile for serving the application then jump to “Server Healthcheck” section 82 | 83 | ### Building and running the service using Dockerfiles 84 | There are two ways to build and run the Dockerfiles. There are four Dockerfiles present in the repository, three of which are used to generate an individual image for each container needed for this service to work, and the fourth one is a Dockerfile setup to create a image that can be used to either bring up the Flask application or the celery worker depending on the DEADSHOT_RUN_MODE environment variable value (api or worker) provided 85 | To run any of the steps below you need to be present in the root folder of the repository 86 | 87 | Note: Ensure you’ve updated the environment variables in Dockerfile.api and Dockerfile.celery files 88 | 89 | 90 | #### Building images from individual Dockerfiles 91 | There are three Dockerfiles relevant to this step. Dockerfile.api, Dockerfile.celery, and Dockerfile.redis 92 | 93 | ###### To build the Flask API image 94 | ``` 95 | docker build -f Dockerfile.api -t deadshot-api: . 96 | ``` 97 | 98 | ###### To build the celery image 99 | ``` 100 | docker build -f Dockerfile.celery -t deadshot-worker: . 101 | ``` 102 | 103 | ###### To build the redis image 104 | ``` 105 | docker build -f Dockerfile.redis -t deadshot-redis: . 106 | ``` 107 | 108 | #### Running built images 109 | The three images built in the previous steps all run in separate networks due to which they won't be able to talk to each other. To enable inter-container communications we need to add them to a container network 110 | 111 | ##### Create a docker network 112 | ``` 113 | docker network create deadshot-network 114 | ``` 115 | Run the images using the created network in the following order: 116 | Start redis container: 117 | ``` 118 | docker run --net deadshot-network --name redis deadshot-redis: 119 | ``` 120 | 121 | Start celery container: 122 | ``` 123 | docker run --net deadshot-network deadshot-worker: 124 | ``` 125 | 126 | Start Flask API container: 127 | ``` 128 | docker run --net deadshot-network -p 9001:9001 deadshot-api: 129 | ``` 130 | 131 | ### Building and running a single image for Flask API container and celery worker container 132 | #### This step is useful only if you have a orchestration that allows you to feed in environment variables, secrets and other configurations at deployment time. Please use the above method of running the containers if you don't have a configurable CI/CD setup. 133 | To build a single docker image for bringing up the api and celery worker based on DEADSHOT_RUN_MODE environment variable 134 | ```bash 135 | make build 136 | ``` 137 | This command will also create the redis image that is needed for service 138 | 139 | If the built image is run with the environment variable DEADSHOT_RUN_MODE=api, it will bring up the Flask application 140 | If the image is run with environment variable DEADSHOT_RUN_MODE=worker then the celery worker will be initiated 141 | 142 | ### Server Healthcheck 143 | Now that the API is ready to receive requests navigating to `http://localhost:9001/api/v1/heartbeat` in a browser should return a valid response or you could do a curl 144 | ```bash 145 | curl localhost:9001/api/v1/healthcheck 146 | ``` 147 | Both should show the following message: 148 | `{"healthcheck": "ready"}` 149 | 150 | ### Running a Pull Request scan 151 | If you have a webhook payload of the Github app for your Pull Request then you can run the following curl command locally to test your application: 152 | ```bash 153 | curl -X POST -H "content-type: application/json" -H "X-GitHub-Enterprise-Host: github.mockcompany.com" -H "X-Hub-Signature: sha1=85df4936c6396c149be94144befab41168149840" -H "X-GitHub-Event: pull_request" -d @tests/fixtures/good_pr.json http://localhost:9001/api/v1/deadshot-webhook 154 | ``` 155 | ## Adding new regular expressions 156 | If you want the tool to monitor other types of secrets then add your regular expressions in the [regex.json](https://github.com/twilio-labs/deadshot/blob/main/deadshot/services/scanner/secrets_json/regex.json) file 157 | 158 | Note: Entropy check flag allows you to look for high entropy findings in addition to the regular expression match 159 | 160 | ## Limitations 161 | At this time, Deadshot has only tested with Github Enterprise, but should work with Github cloud as well. 162 | -------------------------------------------------------------------------------- /bin/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | # DEADSHOT_RUN_MODE environment variable is set to 5 | # instruct container how to start up (either as Flask API, 6 | # or as Celery worker to run PR checks) 7 | case "$DEADSHOT_RUN_MODE" in 8 | api) 9 | CMD="gunicorn -b 0.0.0.0:9001 --workers=5 \"deadshot:create_app()\"" 10 | ;; 11 | 12 | worker) 13 | CMD="celery -A deadshot.worker.celery_initialization.celery worker --loglevel=INFO" 14 | ;; 15 | 16 | *) 17 | echo $"DEADSHOT_RUN_MODE must be set to 'api' or 'worker'" 18 | exit 1 19 | 20 | esac 21 | export PYTHONPATH=/home/twilio/app/deadshot 22 | cd deadshot 23 | echo "[>] Command set to: $CMD $CELERY_BROKER_HOST $CELERY_BROKER_PORT" 24 | echo "[i] Going to wait for redis..." 25 | /bin/bash $APP_DIR/bin/wait-for-it.sh $CELERY_BROKER_HOST:$CELERY_BROKER_PORT --timeout=60 --strict 26 | echo "[i] Done waiting. Exit code: $?" 27 | 28 | if [ $? != 0 ]; then 29 | echo "Error: Redis DB not available after 60s" 30 | exit 1 31 | fi 32 | 33 | echo "Running: $CMD" 34 | eval $CMD 35 | -------------------------------------------------------------------------------- /bin/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Use this script to test if a given TCP host/port are available 3 | 4 | cmdname=$(basename $0) 5 | 6 | echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 7 | 8 | usage() 9 | { 10 | cat << USAGE >&2 11 | Usage: 12 | $cmdname host:port [-s] [-t timeout] [-- command args] 13 | -h HOST | --host=HOST Host or IP under test 14 | -p PORT | --port=PORT TCP port under test 15 | Alternatively, you specify the host and port as host:port 16 | -s | --strict Only execute subcommand if the test succeeds 17 | -q | --quiet Don't output any status messages 18 | -t TIMEOUT | --timeout=TIMEOUT 19 | Timeout in seconds, zero for no timeout 20 | -- COMMAND ARGS Execute command with args after the test finishes 21 | USAGE 22 | exit 1 23 | } 24 | 25 | wait_for() 26 | { 27 | if [[ $TIMEOUT -gt 0 ]]; then 28 | echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" 29 | else 30 | echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" 31 | fi 32 | start_ts=$(date +%s) 33 | while : 34 | do 35 | if [[ $ISBUSY -eq 1 ]]; then 36 | nc -z $HOST $PORT 37 | result=$? 38 | else 39 | (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 40 | result=$? 41 | fi 42 | if [[ $result -eq 0 ]]; then 43 | end_ts=$(date +%s) 44 | echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" 45 | break 46 | fi 47 | sleep 1 48 | done 49 | return $result 50 | } 51 | 52 | wait_for_wrapper() 53 | { 54 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 55 | if [[ $QUIET -eq 1 ]]; then 56 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 57 | else 58 | timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & 59 | fi 60 | PID=$! 61 | trap "kill -INT -$PID" INT 62 | wait $PID 63 | RESULT=$? 64 | if [[ $RESULT -ne 0 ]]; then 65 | echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" 66 | fi 67 | return $RESULT 68 | } 69 | 70 | # process arguments 71 | while [[ $# -gt 0 ]] 72 | do 73 | case "$1" in 74 | *:* ) 75 | hostport=(${1//:/ }) 76 | HOST=${hostport[0]} 77 | PORT=${hostport[1]} 78 | shift 1 79 | ;; 80 | --child) 81 | CHILD=1 82 | shift 1 83 | ;; 84 | -q | --quiet) 85 | QUIET=1 86 | shift 1 87 | ;; 88 | -s | --strict) 89 | STRICT=1 90 | shift 1 91 | ;; 92 | -h) 93 | HOST="$2" 94 | if [[ $HOST == "" ]]; then break; fi 95 | shift 2 96 | ;; 97 | --host=*) 98 | HOST="${1#*=}" 99 | shift 1 100 | ;; 101 | -p) 102 | PORT="$2" 103 | if [[ $PORT == "" ]]; then break; fi 104 | shift 2 105 | ;; 106 | --port=*) 107 | PORT="${1#*=}" 108 | shift 1 109 | ;; 110 | -t) 111 | TIMEOUT="$2" 112 | if [[ $TIMEOUT == "" ]]; then break; fi 113 | shift 2 114 | ;; 115 | --timeout=*) 116 | TIMEOUT="${1#*=}" 117 | shift 1 118 | ;; 119 | --) 120 | shift 121 | CLI=("$@") 122 | break 123 | ;; 124 | --help) 125 | usage 126 | ;; 127 | *) 128 | echoerr "Unknown argument: $1" 129 | usage 130 | ;; 131 | esac 132 | done 133 | 134 | if [[ "$HOST" == "" || "$PORT" == "" ]]; then 135 | echoerr "Error: you need to provide a host and port to test." 136 | usage 137 | fi 138 | 139 | TIMEOUT=${TIMEOUT:-15} 140 | STRICT=${STRICT:-0} 141 | CHILD=${CHILD:-0} 142 | QUIET=${QUIET:-0} 143 | 144 | # check to see if timeout is from busybox? 145 | # check to see if timeout is from busybox? 146 | TIMEOUT_PATH=$(realpath $(which timeout)) 147 | if [[ $TIMEOUT_PATH =~ "busybox" ]]; then 148 | ISBUSY=1 149 | BUSYTIMEFLAG="-t" 150 | else 151 | ISBUSY=0 152 | BUSYTIMEFLAG="" 153 | fi 154 | 155 | if [[ $CHILD -gt 0 ]]; then 156 | wait_for 157 | RESULT=$? 158 | exit $RESULT 159 | else 160 | if [[ $TIMEOUT -gt 0 ]]; then 161 | wait_for_wrapper 162 | RESULT=$? 163 | else 164 | wait_for 165 | RESULT=$? 166 | fi 167 | fi 168 | 169 | if [[ $CLI != "" ]]; then 170 | if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then 171 | echoerr "$cmdname: strict mode, refusing to execute subprocess" 172 | exit $RESULT 173 | fi 174 | exec "${CLI[@]}" 175 | else 176 | exit $RESULT 177 | fi -------------------------------------------------------------------------------- /configuration-schema.yaml: -------------------------------------------------------------------------------- 1 | schema: 2 | description: Deadshot Github API Service 3 | properties: 4 | CELERY_BROKER_HOST: 5 | description: Celery broker host to use for api->worker 6 | type: string 7 | default: localhost 8 | CELERY_BROKER_PORT: 9 | description: Celery broker port to use for api->worker 10 | type: string 11 | default: 6379 12 | CELERY_BROKER_DATABASE: 13 | description: Celery broker database to use for api->worker 14 | type: string 15 | default: 1 16 | DEADSHOT_LOG_LEVEL: 17 | description: Sets the log level for the service 18 | type: string 19 | default: INFO 20 | DEADSHOT_RUN_MODE: 21 | description: Sets o-v to run in API or Worker mode 22 | enum: [api, worker] 23 | default: api 24 | GITHUB_API: 25 | description: Github API URL 26 | type: string 27 | default: https:///api/v3 28 | GITHUB_APP_NAME: 29 | description: Github App name used to get the latest comment by the app 30 | type: string 31 | default: deadshot[bot] 32 | GITHUB_URL: 33 | description: Github URL 34 | type: string 35 | default: 36 | JIRA_SERVER: 37 | description: Jira server URL for SSD ticket creation 38 | type: string 39 | default: 40 | required: 41 | - CELERY_BROKER_HOST 42 | - CELERY_BROKER_PORT 43 | - CELERY_BROKER_DATABASE 44 | - DEADSHOT_LOG_LEVEL 45 | - DEADSHOT_RUN_MODE 46 | - GITHUB_API 47 | - GITHUB_APP_NAME 48 | - GITHUB_URL 49 | - JIRA_SERVER 50 | -------------------------------------------------------------------------------- /configuration/environment/localdev.env: -------------------------------------------------------------------------------- 1 | FLASK_APP=deadshot:create_app('localdev') 2 | FLASK_DEBUG=true 3 | DEADSHOT_DEBUG="true" 4 | DEADSHOT_CONFIG=localdev 5 | DEADSHOT_LOG_LEVEL=DEBUG 6 | 7 | CELERY_BROKER_HOST=redis 8 | CELERY_BROKER_PORT=6379 9 | CELERY_BROKER_DATABASE=1 10 | 11 | GITHUB_APP_NAME=deadshot[bot] 12 | 13 | GITHUB_URL= 14 | GITHUB_API=https:///api/v3 15 | JIRA_SERVER= 16 | 17 | 18 | SECRET_GITHUB_SECRET=/home/twilio/app/deadshot/secrets/github_secrets.json 19 | SECRET_SLACK_WEBHOOKS=/home/twilio/app/deadshot/secrets/slack_webhook.json 20 | SECRET_JIRA_AUTH=/home/twilio/app/deadshot/secrets/jira_user.json 21 | -------------------------------------------------------------------------------- /configuration/environment/test.env: -------------------------------------------------------------------------------- 1 | FLASK_APP=deadshot:create_app('test') 2 | DEADSHOT_CONFIG=test 3 | GITHUB_URL=mock.github.com 4 | 5 | SECRET_GITHUB_SECRET=/home/twilio/app/deadshot/tests/test_secrets/github_secrets.json 6 | SECRET_AD_USER1=/home/twilio/app/deadshot/tests/test_secrets/ad_user1.json" 7 | SECRET_AD_USER2=/home/twilio/app/deadshot/tests/test_secrets/ad_user2.json 8 | -------------------------------------------------------------------------------- /deadshot/__init__.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | from .api_server import create_app 3 | from . import * 4 | from deadshot.configurations.celery_config_data import celery_config 5 | 6 | 7 | def make_celery(app_name=__name__): 8 | return Celery( 9 | celery_config["name"], 10 | backend=celery_config["broker"], 11 | broker=celery_config["backend"] 12 | ) 13 | 14 | 15 | celery = make_celery() 16 | -------------------------------------------------------------------------------- /deadshot/api_server.py: -------------------------------------------------------------------------------- 1 | from deadshot.configurations.api_server_config_data import config_map 2 | from flask import Flask 3 | import os 4 | ''' 5 | Method to initiate the Flask API server 6 | ''' 7 | 8 | 9 | def create_app(config_object_name=None, 10 | config_dict=None, 11 | **kwargs): 12 | app = Flask(__name__) 13 | if config_object_name is None: 14 | config_object_name = os.environ.get("DEADSHOT_CONFIG", "default") 15 | app.config.from_object(config_map.get(config_object_name)) 16 | 17 | if config_dict is not None: 18 | app.config.from_mapping(config_dict) 19 | 20 | from deadshot.blueprints.blueprints import api_blueprint 21 | app.register_blueprint(api_blueprint, url_prefix='/api/v1') 22 | 23 | return app 24 | -------------------------------------------------------------------------------- /deadshot/blueprints/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/twilio-labs/deadshot/25f2fb13bc99853ac141521cab61f2ba6835f0a0/deadshot/blueprints/__init__.py -------------------------------------------------------------------------------- /deadshot/blueprints/api_exceptions.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify 2 | 3 | # This file defines standard API error responses 4 | 5 | 6 | class APIException(Exception): 7 | def __init__( 8 | self, 9 | code, 10 | message=None, 11 | errors=None, 12 | resource=None, 13 | field=None, 14 | value=None, 15 | payload=None 16 | ): 17 | self.code = code 18 | self.message = message 19 | self.errors = errors 20 | self.resource = resource 21 | self.field = field 22 | self.value = value 23 | self.payload = payload 24 | 25 | def jsonify(self): 26 | response = dict(self.payload or ()) 27 | response["code"] = self.code 28 | if self.message is not None: 29 | response["message"] = self.message 30 | if self.errors is not None: 31 | response["errors"] = self.errors 32 | if self.resource is not None: 33 | response["resource"] = self.resource 34 | if self.field is not None and self.value is not None: 35 | response["field"] = self.field 36 | response["value"] = self.value 37 | return jsonify(response) 38 | 39 | 40 | class BadRequestException(APIException): 41 | def __init__(self, message): 42 | APIException.__init__( 43 | self, 44 | code=400, 45 | message=message) 46 | 47 | 48 | class UnauthorizedException(APIException): 49 | def __init__(self, message): 50 | APIException.__init__( 51 | self, 52 | code=401, 53 | message=message) 54 | 55 | 56 | class ResourceNotFoundException(APIException): 57 | def __init__(self, resource, field, value): 58 | APIException.__init__( 59 | self, 60 | code=404, 61 | resource=resource, 62 | field=field, 63 | value=value 64 | ) 65 | 66 | 67 | class UnprocessableEntityException(APIException): 68 | def __init__(self, message, errors): 69 | APIException.__init__( 70 | self, 71 | code=422, 72 | message=message, 73 | errors=errors) 74 | 75 | 76 | class InternalServerErrorException(APIException): 77 | def __init__(self, message): 78 | APIException.__init__( 79 | self, 80 | code=500, 81 | message=message) 82 | -------------------------------------------------------------------------------- /deadshot/blueprints/api_responses.py: -------------------------------------------------------------------------------- 1 | from flask import make_response, jsonify, request 2 | 3 | # This file defines a standard format for Flask API responses 4 | 5 | 6 | def get_json_response(dict_object, code=200): 7 | return make_response(dict_object, code) 8 | 9 | 10 | def get_object(object_name, properties, code=200): 11 | return make_response(jsonify({ 12 | "code": code, 13 | object_name: properties 14 | }), code) 15 | 16 | 17 | def create_object(object_name, properties): 18 | return get_object(object_name, properties, code=201) 19 | 20 | 21 | def list_objects(page, page_size, items, total): 22 | count = len(items) 23 | 24 | base_url = request.base_url 25 | next_page = None 26 | if (page * page_size) < total: 27 | next_page = "{}?page={}&page_size={}".format( 28 | base_url, 29 | page + 1, 30 | page_size 31 | ) 32 | 33 | previous_page = None 34 | if page > 1: 35 | previous_page = "{}?page={}&page_size={}".format( 36 | base_url, 37 | page - 1, 38 | page_size 39 | ) 40 | 41 | return make_response(jsonify({ 42 | "code": 200, 43 | "items": items, 44 | "meta": { 45 | "key": "items", 46 | "count": count, 47 | "total": total, 48 | "next": next_page, 49 | "previous": previous_page 50 | } 51 | }), 200) 52 | -------------------------------------------------------------------------------- /deadshot/blueprints/blueprints.py: -------------------------------------------------------------------------------- 1 | from . import api_exceptions 2 | from deadshot.services.celery_worker.webhook_async_processor import webhook_async 3 | from deadshot.services.common.logger import get_logger 4 | from deadshot.services.github.sender_verification import SenderVerificationProcessor 5 | from flask import Blueprint 6 | from flask import request, jsonify 7 | import json 8 | 9 | # This is a blueprint document for defining new API routes on the Flask application 10 | # and defining how each route should be handled 11 | 12 | logger = get_logger() 13 | api_blueprint = Blueprint('api_blueprint', __name__) 14 | 15 | 16 | @api_blueprint.errorhandler(api_exceptions.APIException) 17 | def handleAPIException(error): 18 | response = error.jsonify() 19 | response.status_code = error.code 20 | return response 21 | 22 | 23 | def handle_webhook(webhook_json): 24 | # call webhook async processor in a celery worker file 25 | webhook_async.delay(webhook_json) 26 | 27 | 28 | @api_blueprint.route('/healthcheck', methods=['GET']) 29 | def healthcheck(): 30 | # Health check endpoint to test if the Flask application is up and running 31 | return json.dumps({"healthcheck": "ready"}) 32 | 33 | 34 | @api_blueprint.route('/deadshot-webhook', methods=['POST']) 35 | def webhook_handler(): 36 | # Endpoint called by your Github App webhook to send the Github PR payload 37 | # This method first verifies the sender of the payload and the signature of the payload. 38 | # If requests passes all checks then the payload is passed on to a celery task to initiate 39 | # Pull Request diff lines comparision against predefined regular expressions 40 | sender_host = request.headers.get('X-GitHub-Enterprise-Host') 41 | event_type = request.headers.get('X-GitHub-Event') 42 | sent_signature = request.headers.get('X-Hub-Signature') 43 | 44 | request_body = request.get_data() 45 | webhook_payload = json.loads(request_body) 46 | # Call function to verify the sender of the request 47 | sender_verify = SenderVerificationProcessor(sender_host, event_type, sent_signature, request_body) 48 | sender_status = sender_verify.verify_sender() 49 | 50 | if not sender_status: 51 | raise api_exceptions.BadRequestException("Invalid Sender") 52 | 53 | try: 54 | handle_webhook(webhook_payload) 55 | except Exception as e: 56 | logger.error(f"Failed queing celery task: {e}") 57 | 58 | return jsonify({ 59 | "Status": 200 60 | }) 61 | -------------------------------------------------------------------------------- /deadshot/configurations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/twilio-labs/deadshot/25f2fb13bc99853ac141521cab61f2ba6835f0a0/deadshot/configurations/__init__.py -------------------------------------------------------------------------------- /deadshot/configurations/api_server_config_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | # Configuration settings for the Flask applilcation container 3 | 4 | 5 | class Config: 6 | LOG_LEVEL = os.environ.get("DEADSHOT_LOG_LEVEL", "INFO") 7 | # Should be loaded from secrets 8 | 9 | # Only ever set for mocks/tests 10 | DEBUG = False 11 | TESTING = False 12 | 13 | def get_log_level(self): 14 | return os.environ.get("DEADSHOT_LOG_LEVEL", "INFO") 15 | 16 | 17 | class LocalDevelopmentConfig(Config): 18 | LOG_LEVEL = os.environ.get("DEADSHOT_LOG_LEVEL", "DEBUG") 19 | DEBUG = os.environ.get("DEADSHOT_DEBUG", "true") == "true" 20 | 21 | 22 | class TestConfig(Config): 23 | ACTIVE_DIRECTORY_ADAPTER = "mock" 24 | TESTING = True 25 | 26 | 27 | class ProductionConfig(Config): 28 | DEBUG = False 29 | TESTING = False 30 | 31 | 32 | config_map = { 33 | 'localdev': LocalDevelopmentConfig, 34 | 'test': TestConfig, 35 | 'production': ProductionConfig, 36 | 'default': ProductionConfig 37 | } 38 | -------------------------------------------------------------------------------- /deadshot/configurations/celery_config_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | # Configuration settings for the celery container 3 | name = os.environ.get("CELERY_NAME", "deadshot") 4 | broker_host = os.environ.get("CELERY_BROKER_HOST", "deadshot-redis") 5 | broker_port = os.environ.get("CELERY_BROKER_PORT", "6379") 6 | broker_db = os.environ.get("CELERY_BROKER_DATABASE", "1") 7 | broker = "redis://" + broker_host + ":" + broker_port + "/" + broker_db 8 | backend = broker 9 | celery_config = { 10 | "name": name, 11 | "broker": broker, 12 | "backend": backend 13 | } 14 | -------------------------------------------------------------------------------- /deadshot/configurations/github_config.py: -------------------------------------------------------------------------------- 1 | from deadshot.services.common.secrets_loader import get_secrets 2 | import os 3 | # Configuration settings for Github 4 | 5 | 6 | class GithubConfig: 7 | def get_github_secrets(self): 8 | _github_secrets = get_secrets("SECRET_GITHUB_SECRET") 9 | webhook_secret = _github_secrets["webhook_secret"] 10 | integration_id = int(_github_secrets["github_app_integration_id"]) 11 | app_pem = _github_secrets["github_app_pem_key"] 12 | return integration_id, app_pem 13 | 14 | def get_github_url(self): 15 | gh_url = os.environ.get("GITHUB_URL") 16 | return gh_url 17 | 18 | def get_github_webhook_secret(self): 19 | _github_secrets = get_secrets("SECRET_GITHUB_SECRET") 20 | webhook_secret = _github_secrets["webhook_secret"] 21 | return webhook_secret 22 | 23 | def get_github_api(self): 24 | gh_api = os.environ.get("GITHUB_API") 25 | return gh_api 26 | 27 | def get_github_app_name(self): 28 | app_name = os.environ.get("GITHUB_APP_NAME") 29 | return app_name 30 | -------------------------------------------------------------------------------- /deadshot/configurations/jira_config.py: -------------------------------------------------------------------------------- 1 | from deadshot.services.common.secrets_loader import get_secrets 2 | import os 3 | # Configuration settings for JIRA service access 4 | 5 | 6 | class JiraConfig: 7 | def get_jira_url(self): 8 | jira_url = os.environ.get("JIRA_SERVER") 9 | return jira_url 10 | 11 | def get_jira_creds(self): 12 | _jira_secrets = get_secrets("SECRET_JIRA_AUTH") 13 | _jira_username = _jira_secrets["username"] 14 | _jira_password = _jira_secrets["password"] 15 | return _jira_username, _jira_password 16 | -------------------------------------------------------------------------------- /deadshot/configurations/slack_config.py: -------------------------------------------------------------------------------- 1 | from deadshot.services.common.secrets_loader import get_secrets 2 | # Configuration settings for Slack webhooks to be used to send slack notifications 3 | 4 | 5 | class SlackConfig: 6 | def get_slack_webhook(self): 7 | webhooks = get_secrets("SECRET_SLACK_WEBHOOKS") 8 | webhook = webhooks["hook"] 9 | return webhook 10 | -------------------------------------------------------------------------------- /deadshot/services/celery_worker/webhook_async_processor.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot import celery 3 | from deadshot.configurations.github_config import GithubConfig 4 | from deadshot.services.common.jira_service import JiraService 5 | from deadshot.services.github.github_service import GithubAppService 6 | from deadshot.services.github.webhook_processor import PullRequestWebhookProcessor 7 | from deadshot.services.scanner.pr_scanner import PRScanner 8 | 9 | # This is the celery worker that processes each webhook payload after the blueprint verifies it's from a valid 10 | # sender. The worker handles Pull Requests based on action mentioned in the payload (webhook_json). If it receives 11 | # a open, reopen, or synchronize action it proceeds to search for tokens by passing it along to the pr_scanner 12 | # function. If it receives a closed action then it scans for secrets and creates a JIRA ticket in the Security 13 | # team's queue so they can reach out to the engineers. 14 | 15 | logger = get_task_logger(__name__) 16 | 17 | 18 | @celery.task 19 | def webhook_async(webhook_json): 20 | pr_webhook_processor = PullRequestWebhookProcessor(webhook_json) 21 | if pr_webhook_processor.pr_processor(): 22 | html_url = webhook_json['pull_request']['html_url'] 23 | logger.info(f"Received request from {html_url}") 24 | pr_scanner = PRScanner(webhook_json) 25 | pr_scanner.scan() 26 | logger.info(f"Finished processing {html_url}") 27 | 28 | elif pr_webhook_processor.pr_closed(): 29 | try: 30 | install_id = webhook_json["installation"]["id"] 31 | html_url = webhook_json['pull_request']['html_url'] 32 | logger.info(f"Received closed pull request from {html_url}") 33 | gh_app_service = GithubAppService() 34 | gh_api = GithubConfig().get_github_api() 35 | git_token = gh_app_service.get_github_app_token(gh_api, install_id) 36 | 37 | pr_scanner = PRScanner(webhook_json) 38 | identified_secrets = pr_scanner.identify_secrets(git_token) 39 | 40 | if len(identified_secrets) > 0: 41 | description = pr_scanner.create_jira_description(identified_secrets) 42 | jira_summary = "Deadshot identified secrets in a closed PR" 43 | jira_description = f"Please check PR: {html_url} \nThe following were identified:\n" + description 44 | jira_service = JiraService() 45 | jira_service.create_jira_ticket(summary=jira_summary, description=jira_description) 46 | logger.info(f"Finished processing {html_url}") 47 | except Exception as e: 48 | logger.error(f"Exception: {e}") 49 | -------------------------------------------------------------------------------- /deadshot/services/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/twilio-labs/deadshot/25f2fb13bc99853ac141521cab61f2ba6835f0a0/deadshot/services/common/__init__.py -------------------------------------------------------------------------------- /deadshot/services/common/jira_service.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot.configurations.jira_config import JiraConfig 3 | from jira import JIRA 4 | 5 | # This class defines a method to create a JIRA ticket in a single project when a PR is closed without addressing 6 | # all the identified secrets in the PR. This currently allows for the security team to get a ticket in their queue 7 | # and follow up with the team that merged the PR 8 | 9 | logger = get_task_logger(__name__) 10 | 11 | 12 | class JiraService: 13 | def __init__(self): 14 | pass 15 | 16 | # @param project - project ID on JIRA board 17 | # @param issuetype - JIRA board issue type relevant to your org 18 | def create_jira_ticket(self, project='SECURITY', summary='', description='', labels=[]): 19 | config = JiraConfig() 20 | _user, _password = config.get_jira_creds() 21 | _server = config.get_jira_url() 22 | jira = JIRA(basic_auth=(_user, _password), options={'server': _server}) 23 | issue_dict = { 24 | 'project': project, 25 | 'summary': summary, 26 | 'description': description, 27 | 'issuetype': {'name': 'Security'} 28 | } 29 | 30 | try: 31 | new_issue = jira.create_issue(fields=issue_dict) 32 | return new_issue.key 33 | except Exception as e: 34 | logger.error(f"Error creating SSD ticket: {e}") 35 | -------------------------------------------------------------------------------- /deadshot/services/common/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from datetime import datetime 4 | from deadshot.configurations.api_server_config_data import Config 5 | from pythonjsonlogger import jsonlogger 6 | 7 | # This is a standardized logger function to be used in non-celery worker calls. For now, it's only used in the 8 | # blueprints file functions 9 | 10 | 11 | class CustomJsonFormatter(jsonlogger.JsonFormatter): 12 | def add_fields(self, log_record, record, message_dict): 13 | super(CustomJsonFormatter, self).add_fields(log_record, 14 | record, message_dict) 15 | if not log_record.get('timestamp'): 16 | # this doesn't use record.created, so it is slightly off 17 | now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') 18 | log_record['timestamp'] = now 19 | if log_record.get('levelname'): 20 | log_record['levelname'] = log_record['levelname'].upper() 21 | else: 22 | log_record['levelname'] = record.levelname 23 | 24 | 25 | def get_logger(logging_level="INFO", logging_config="json"): 26 | logger = logging.getLogger() 27 | 28 | while logger.handlers: 29 | logger.handlers.pop() 30 | 31 | logHandler = logging.StreamHandler(sys.stdout) 32 | logging_level = Config().get_log_level() 33 | logHandler.setLevel(logging_level) 34 | 35 | if logging_config == "json": 36 | formatter = CustomJsonFormatter('%(message)s %(levelname)s' 37 | ' %(timestamp)s %(module)s' 38 | ' %(lineno)s %(process)s' 39 | '%(filename)s %(funcName)s' 40 | '%(thread)s') 41 | logHandler.setFormatter(formatter) 42 | 43 | logger.addHandler(logHandler) 44 | logger.setLevel(logging_level) 45 | 46 | return logger 47 | -------------------------------------------------------------------------------- /deadshot/services/common/secrets_loader.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from celery.utils.log import get_task_logger 4 | logger = get_task_logger(__name__) 5 | 6 | # JSON secret files loader to use for loading service secrets. The function loads files as a dict object for consumption 7 | # in Python code 8 | 9 | 10 | class SecretsLoaderException(Exception): 11 | pass 12 | 13 | 14 | def get_secrets(secrets_env_name): 15 | secrets_filename = os.environ.get(secrets_env_name) 16 | 17 | if secrets_filename is None: 18 | msg = f"Secrets Filename Env ({secrets_env_name}) " + \ 19 | "not defined, will not be usable" 20 | logger.error(msg) 21 | raise SecretsLoaderException(msg) 22 | elif not os.path.exists(secrets_filename): 23 | msg = f"Secrets File {secrets_filename} not found" 24 | logger.error(msg) 25 | raise SecretsLoaderException(msg) 26 | else: 27 | with open(secrets_filename) as secrets_file: 28 | secrets_json = json.loads(secrets_file.read()) 29 | return secrets_json 30 | -------------------------------------------------------------------------------- /deadshot/services/common/slack_notification.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot.configurations.slack_config import SlackConfig 3 | import requests 4 | import json 5 | 6 | # Class with method to send notification to a Slack channel whenever the tool finds secrets for which the admin set 7 | # slack_alert to True in regex.json 8 | 9 | logger = get_task_logger(__name__) 10 | 11 | 12 | class SlackService: 13 | def send_message(self, slack_message): 14 | try: 15 | message = f"*Deadshot Notification:* \n" 16 | message = message + slack_message 17 | slack_message = {'text': message} 18 | webhook = SlackConfig().get_slack_webhook() 19 | slack_alert_response = requests.post( 20 | webhook, data=json.dumps(slack_message), 21 | headers={'Content-Type': 'application/json'}) 22 | if slack_alert_response.status_code != 200: 23 | logger.error(f"Failed sending alert to slack with " 24 | f"status code:" 25 | f" {slack_alert_response.status_code}") 26 | except Exception as e: 27 | logger.error(f"Failed slack notify: {e}") 28 | -------------------------------------------------------------------------------- /deadshot/services/github/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/twilio-labs/deadshot/25f2fb13bc99853ac141521cab61f2ba6835f0a0/deadshot/services/github/__init__.py -------------------------------------------------------------------------------- /deadshot/services/github/git_diff_parser.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import unidiff 3 | import os 4 | import logging 5 | logger = logging.getLogger() 6 | 7 | # This file defines classes and methods to get diff file types and lines of a Pull Request 8 | # for which the task was initiated 9 | 10 | 11 | class DiffFileTypes: 12 | # This class is used to get the file type in the Pull Request currently being scanned 13 | PYTHON = "py" 14 | PYTHON_REQUIREMENTS = "python_requirements" 15 | JAVA = "java" 16 | MAVEN = "maven" 17 | PHP = "php" 18 | SCALA = "scala" 19 | JAVASCRIPT = "js" 20 | RUBY = "ruby" 21 | C = "c" 22 | CPP = "cpp" 23 | 24 | @classmethod 25 | def get_filetype_from_filename(cls, fullpath): 26 | # if it's part of a filepath = /ab/c/ds/dd.java 27 | filename = fullpath.split("/")[-1] 28 | endings = [ 29 | (".java", cls.JAVA), 30 | (".py", cls.PYTHON), 31 | (".php", cls.PHP), 32 | (".scala", cls.SCALA), 33 | (".rb", cls.RUBY), 34 | (".js", cls.JAVASCRIPT), 35 | (".c", cls.C), 36 | (".cpp", cls.CPP), 37 | ("requirements.txt", cls.PYTHON_REQUIREMENTS) 38 | ] 39 | for ending in endings: 40 | if filename.endswith(ending[0]): 41 | return ending[1] 42 | return None 43 | 44 | 45 | class DiffFile: 46 | def __init__(self, patch_file): 47 | self.patch_file = patch_file 48 | self.full_filename = patch_file.path 49 | self.source_file = patch_file.source_file 50 | self.target_file = patch_file.target_file 51 | self.file_type = DiffFileTypes.get_filetype_from_filename( 52 | self.full_filename 53 | ) 54 | 55 | def __str__(self): 56 | return "{} ({})".format( 57 | self.full_filename, 58 | self.file_type 59 | ) 60 | 61 | def diff_lines(self): 62 | for patch in self.patch_file: 63 | for line in patch: 64 | diff_line = DiffLine( 65 | diff_file=self, 66 | value=line.value, 67 | line_type=line.line_type, 68 | source_line_number=line.source_line_no, 69 | target_line_number=line.target_line_no 70 | ) 71 | yield diff_line 72 | 73 | 74 | class DiffLine: 75 | LINE_TYPE_ADDED = '+' 76 | LINE_TYPE_REMOVED = '-' 77 | LINE_TYPE_CONTEXT = ' ' 78 | LINE_TYPE_EMPTY = '' 79 | LINE_TYPE_NO_NEWLINE = '\\' 80 | LINE_VALUE_NO_NEWLINE = ' No newline at end of file' 81 | 82 | def __init__( 83 | self, 84 | diff_file, 85 | value, 86 | line_type, 87 | source_line_number, 88 | target_line_number): 89 | self.line_type = line_type 90 | self.line_number = target_line_number 91 | self.source_line_number = source_line_number 92 | self.diff_file = diff_file 93 | self.value = value.strip() 94 | 95 | def __str__(self): 96 | return "".format( 97 | self.line_type, 98 | self.line_number, 99 | self.value 100 | ) 101 | 102 | 103 | class GithubDiffProcessorException(Exception): 104 | pass 105 | 106 | 107 | class GithubDiffProcessor: 108 | def __init__(self, diff_url, token, diff_str=None): 109 | self.token = token 110 | self.diff_url = diff_url 111 | # Lazy load diff_str 112 | self.diff_str = diff_str 113 | 114 | def load_diff_from_url(self): 115 | headers = { 116 | "Authorization": f"token {self.token}", 117 | # "Accept" header controls what API returns to us 118 | # this will return the diff 119 | "Accept": "application/vnd.github.v3.diff" 120 | } 121 | response = requests.get(self.diff_url, headers=headers) 122 | self.diff_str = response.text 123 | return self.diff_str 124 | 125 | def diff_files(self): 126 | if self.diff_str is None: 127 | self.diff_str = self.load_diff_from_url() 128 | 129 | patch_files = unidiff.PatchSet(self.diff_str) 130 | for patch_file in patch_files: 131 | yield DiffFile(patch_file) 132 | -------------------------------------------------------------------------------- /deadshot/services/github/github_service.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot.configurations.github_config import GithubConfig 3 | from github import Github, GithubIntegration 4 | import hashlib 5 | import hmac 6 | import requests 7 | 8 | logger = get_task_logger(__name__) 9 | 10 | # File defines class and methods to access and modify objects on Github via the Github App 11 | 12 | 13 | class GithubServiceException(Exception): 14 | pass 15 | 16 | 17 | class GithubService: 18 | """Wrapper around PyGithub but also adds functionality""" 19 | 20 | def __init__(self, base_url=None, token=None): 21 | self.github_base_url = base_url 22 | self.github_token = token 23 | try: 24 | if self.github_base_url is not None: 25 | self.github_connection = Github( 26 | base_url=self.github_base_url, 27 | login_or_token=self.github_token) 28 | else: 29 | self.github_connection = Github( 30 | login_or_token=self.github_token) 31 | except Exception as e: 32 | logger.error(f"Failed github connection: {e}") 33 | 34 | def edit_app_comment(self, comments, url): 35 | # Method to edit the comments posted by the app on a single Pull Request 36 | try: 37 | headers = { 38 | "Authorization": f"token {self.github_token}" 39 | } 40 | res = requests.patch(url, json={"body": f"{comments}"}, headers=headers) 41 | return res.status_code 42 | except Exception as e: 43 | logger.error(f"Exception: {e}") 44 | 45 | def get_pr_comments(self, repo_name, pr_number): 46 | # Method to get all conversation comments posted on a single PUll Request 47 | try: 48 | repo = self.github_connection.get_repo(repo_name) 49 | pr = repo.get_pull(int(pr_number)) 50 | comments = pr.get_issue_comments() 51 | return comments 52 | except Exception as e: 53 | logger.error(f"Exception: {e}") 54 | 55 | def get_app_comments(self, repo_name, pr_number): 56 | # Method to filter out only the Github app comments from all comments posted on a single Pull Request 57 | try: 58 | app_comments = [] 59 | comments = self.get_pr_comments(repo_name, pr_number) 60 | for comment in comments: 61 | if GithubConfig().get_github_app_name() in comment.user.login: 62 | app_comments.append({ 63 | "id": comment.id, "user": comment.user.login, 64 | "body": comment.body, "issue_url": comment.issue_url, 65 | "url": comment.url, "created_at": comment.created_at, 66 | "updated_at": comment.updated_at 67 | }) 68 | 69 | return app_comments 70 | except Exception as e: 71 | logger.error(f"Exception: {e}") 72 | 73 | def get_latest_app_comment(self, repo_name=None, pr_number=None, comments=None): 74 | # Method to further filter out comments to only get the last/latest comment posted by the 75 | # Github app on a single Pull Request 76 | if comments is None and (repo_name is not None and pr_number is not None): 77 | comments = self.get_app_comments(repo_name, pr_number) 78 | latest_comment = {} 79 | for comment in comments: 80 | if len(latest_comment) < 1: 81 | latest_comment = comment 82 | else: 83 | if comment["updated_at"] > latest_comment["updated_at"]: 84 | latest_comment = comment 85 | return latest_comment 86 | 87 | @staticmethod 88 | def get_signature(payload, secret): 89 | # Method to claculate the HMAC signature of the payload received from Github 90 | key = bytes(secret, 'utf-8') 91 | digester = hmac.new(key=key, msg=payload, digestmod=hashlib.sha1) 92 | digest_signature = digester.hexdigest() 93 | signature = "sha1=" + digest_signature 94 | return signature 95 | 96 | def post_issue_comment(self, comment, repo_name, pr_number): 97 | # Method to post a comment on the Pull Request conversation as the Github app 98 | try: 99 | gh_con = self.github_connection 100 | repo = gh_con.get_repo(repo_name) 101 | pr = repo.get_pull(int(pr_number)) 102 | pr.create_issue_comment(comment) 103 | except Exception as e: 104 | logger.error(e) 105 | 106 | @classmethod 107 | def validate_webhook(cls, webhook_body, webhook_secret, sent_signature): 108 | # Method to validate the received X-GITHUB-SIGNATURE value against that calculated in the deadshot app using the 109 | # shared secret 110 | computed_signature = GithubService.get_signature(webhook_body, 111 | webhook_secret) 112 | if not hmac.compare_digest(sent_signature, computed_signature): 113 | logger.info(f"computed sha: {computed_signature}") 114 | logger.error( 115 | "HMAC comparison of signature failed, raising exception") 116 | raise GithubServiceException( 117 | "Webhook Signature Did Not Match, Please Check Signature") 118 | return True 119 | 120 | 121 | class GithubAppService: 122 | # Class to create a Github token using the app installation id, key, and base URL loaded to the Deadshot application 123 | def get_github_app_token(self, base_url, installation_id): 124 | gh_config = GithubConfig() 125 | integration_id, pem_key = gh_config.get_github_secrets() 126 | try: 127 | git_app_handler = GithubIntegration(integration_id, pem_key, base_url=base_url) 128 | access_token = git_app_handler.get_access_token(int(installation_id)) 129 | token = access_token.token 130 | return token 131 | except Exception as e: 132 | logger.error(f"Failed to retrieve git token: {e}") 133 | -------------------------------------------------------------------------------- /deadshot/services/github/sender_verification.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot.configurations.github_config import GithubConfig 3 | from deadshot.services.github.github_service import GithubService, GithubServiceException 4 | logger = get_task_logger(__name__) 5 | 6 | # This class and methods are used to verify the sender information of the webhook received before forwarding the payload 7 | # for further processing and regex matching. 8 | # Currently it checks for a match in the received host, event type, and signature against those that were either loaded in 9 | # an environment variable or calculated at run time. 10 | 11 | 12 | class SenderVerificationException(Exception): 13 | pass 14 | 15 | 16 | class SenderVerificationProcessor: 17 | def __init__(self, sender_host, event_type, sent_signature, webhook_json): 18 | self.sender_host = sender_host 19 | self.webhook_json = webhook_json 20 | self.event_type = event_type 21 | self.sent_signature = sent_signature 22 | self.all_check_status = False 23 | self.gh_config = GithubConfig() 24 | self.github_url = self.gh_config.get_github_url() 25 | self.git_wh_secret = self.gh_config.get_github_webhook_secret() 26 | 27 | def verify_sender(self): 28 | if self.github_url != self.sender_host: 29 | logger.error(f"Invalid Sender: {self.sender_host}") 30 | 31 | if self.event_type != "pull_request": 32 | logger.error(f"Received a unsupported action: {self.event_type}") 33 | return self.all_check_status 34 | 35 | if self.sent_signature is None: 36 | logger.error("Missing github signature") 37 | return self.all_check_status 38 | 39 | try: 40 | GithubService.validate_webhook( 41 | webhook_body=self.webhook_json, 42 | webhook_secret=self.git_wh_secret, 43 | sent_signature=self.sent_signature 44 | ) 45 | except GithubServiceException as github_service_exception: 46 | logger.error(github_service_exception) 47 | return self.all_check_status 48 | 49 | self.all_check_status = True 50 | 51 | return self.all_check_status 52 | -------------------------------------------------------------------------------- /deadshot/services/github/webhook_processor.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | logger = get_task_logger(__name__) 3 | 4 | # The class and methods in this file are used to verify the JSON payload has key fields that'll be necessary in the 5 | # further processing of the payload data 6 | 7 | 8 | class PullRequestWebhookProcessorException(Exception): 9 | pass 10 | 11 | 12 | class PullRequestWebhookProcessor: 13 | def __init__(self, webhook_json): 14 | self.webhook_json = webhook_json 15 | # Github Enterprise webhook payload processor 16 | 17 | def pr_processor(self): 18 | try: 19 | pr_type = self.webhook_json["action"] 20 | pr_number = self.webhook_json["number"] 21 | pr_repository_owner = self.webhook_json["repository"]["owner"]["login"] 22 | pr_repository_name = self.webhook_json["repository"]["name"] 23 | installation_id = self.webhook_json["installation"]["id"] 24 | html_url = self.webhook_json['pull_request']['html_url'] 25 | except KeyError as key_error: 26 | logger.error(f"Error retrieving: {key_error}; are you sure this is a pull request?\n") 27 | raise PullRequestWebhookProcessorException( 28 | f"Error retrieving PR field: {key_error}; are you sure this is a pull request?" 29 | ) 30 | 31 | if pr_type == "opened" or pr_type == "synchronize" or pr_type == "reopened": 32 | return True 33 | else: 34 | return False 35 | 36 | def pr_closed(self): 37 | try: 38 | install_id = self.webhook_json["installation"]["id"] 39 | html_url = self.webhook_json['pull_request']['html_url'] 40 | pr_type = self.webhook_json["action"] 41 | if pr_type == "closed": 42 | return True 43 | else: 44 | return False 45 | except KeyError as key_error: 46 | logger.error(f"Error retrieving: {key_error}; are you sure this is a pull request?\n") 47 | raise PullRequestWebhookProcessorException( 48 | f"Error retrieving PR field: {key_error}; are you sure this is a pull request?" 49 | ) 50 | -------------------------------------------------------------------------------- /deadshot/services/scanner/pr_scanner.py: -------------------------------------------------------------------------------- 1 | from celery.utils.log import get_task_logger 2 | from deadshot.configurations.github_config import GithubConfig 3 | from deadshot.services.common.slack_notification import SlackService 4 | from deadshot.services.github.github_service import GithubService, GithubAppService 5 | from deadshot.services.github.git_diff_parser import GithubDiffProcessor 6 | from deadshot.services.scanner.regex_scanner import run as regex_scan 7 | 8 | logger = get_task_logger(__name__) 9 | 10 | # Class to manage Pull Request diff scans per line and create/edit the end comment to be sent to the Github Pull Request 11 | # conversation 12 | 13 | 14 | class PRScanner: 15 | def __init__(self, pull_request_webhook): 16 | self.pull_request_webhook = pull_request_webhook 17 | self.html_url = self.pull_request_webhook['pull_request']['html_url'] 18 | self.pr_url = str(self.pull_request_webhook['pull_request']['url']) 19 | self.pr_number = int(self.pull_request_webhook['number']) 20 | self.repo_name = str(self.pull_request_webhook['pull_request']['head']['repo']['full_name']) 21 | self.installation_id = self.pull_request_webhook["installation"]["id"] 22 | self.identified_tokens = None 23 | self.slack_message_dict = {} 24 | 25 | def scan(self): 26 | try: 27 | base_url = GithubConfig().get_github_api() 28 | git_app = GithubAppService() 29 | git_token = git_app.get_github_app_token(base_url, self.installation_id) 30 | 31 | self.identified_tokens = self.identify_secrets(git_token) 32 | if self.identified_tokens: 33 | github_service = GithubService(base_url=base_url, token=git_token) 34 | latest_app_comment = github_service.get_latest_app_comment(repo_name=self.repo_name, pr_number=self.pr_number) 35 | if len(latest_app_comment) == 0: 36 | comments, post_to_slack = self.create_issue_comment() 37 | github_service.post_issue_comment(comments, self.repo_name, self.pr_number) 38 | 39 | else: 40 | old_comment_list = self.split_comments(latest_app_comment) 41 | new_comments, post_to_slack = self.create_issue_comment(old_comment_list) 42 | response = github_service.edit_app_comment(new_comments, latest_app_comment["url"]) 43 | if post_to_slack: 44 | self.post_slack_message() 45 | 46 | except Exception as e: 47 | logger.error(f"Exception: {e}") 48 | 49 | def create_issue_comment(self, old_comments=None): 50 | comments = "### Ahoy!! Your PR has some security concerns. " \ 51 | "Please review the files listed below:\n" 52 | post_to_slack = False 53 | 54 | try: 55 | if old_comments is not None: 56 | temp_comments = "" 57 | for item in old_comments: 58 | temp_comments = temp_comments + item + "\n" 59 | comments += temp_comments 60 | for title, nested_values in self.identified_tokens.items(): 61 | for issue_iterator, nested_issue_values in nested_values.items(): 62 | if old_comments is not None: 63 | comment_exists = self.evaluate_app_comment(old_comments, title, issue_iterator) 64 | else: 65 | comment_exists = False 66 | if not comment_exists: 67 | comments += f"- [ ] File: {title} Issue: {issue_iterator}. " \ 68 | f"Recommendation: {nested_issue_values['recommendation']}\n" 69 | if nested_issue_values["slack_alert"] == "True": 70 | post_to_slack = True 71 | if issue_iterator in self.slack_message_dict: 72 | self.slack_message_dict[issue_iterator] = int(self.slack_message_dict[issue_iterator]) + 1 73 | else: 74 | self.slack_message_dict[issue_iterator] = int(1) 75 | 76 | comments += "\nIf you have any questions please reach out to #help-security" 77 | return comments, post_to_slack 78 | except Exception as e: 79 | logger.error(f"Failed creating comment: {e}") 80 | 81 | @staticmethod 82 | def create_jira_description(identified_secrets): 83 | description = "" 84 | try: 85 | for title, nested_values in identified_secrets.items(): 86 | for issue_iterator, nested_issue_values in nested_values.items(): 87 | description += \ 88 | f"File: {title} " \ 89 | f"Issue: {issue_iterator}\n" 90 | 91 | return description 92 | 93 | except Exception as e: 94 | logger.error(f"Failed creating jira description: {e}") 95 | 96 | @staticmethod 97 | def evaluate_app_comment(app_comments, title, issue_iterator): 98 | try: 99 | comment_exists = False 100 | for comment in app_comments: 101 | if title in comment and issue_iterator in comment: 102 | comment_exists = True 103 | 104 | return comment_exists 105 | except Exception as e: 106 | logger.error(f"Failed to iterate through comments: {e}") 107 | 108 | def identify_secrets(self, git_token): 109 | try: 110 | identified_tokens = {} 111 | github_diff = GithubDiffProcessor(self.pr_url, git_token) 112 | for git_file in github_diff.diff_files(): 113 | if "test" not in git_file.full_filename: 114 | for git_line in git_file.diff_lines(): 115 | if git_line.line_type == "+": 116 | res = regex_scan(body=str(git_line.value), file_name=git_file.full_filename, line_number=git_line.line_number) 117 | if len(res) > 0 and git_line.line_number is not None: 118 | if str(git_file.full_filename + ":" + str(git_line.line_number)) in identified_tokens: 119 | identified_tokens[str(git_file.full_filename) + ":" + str(git_line.line_number)].update(res) 120 | else: 121 | identified_tokens[git_file.full_filename + ":" + str(git_line.line_number)] = res 122 | 123 | return identified_tokens 124 | except Exception as e: 125 | logger.error(f"Failed identifying all tokens: {e}") 126 | 127 | def post_slack_message(self): 128 | try: 129 | slack_message = "Ahoy!! Deadshot has identified: " 130 | slack = SlackService() 131 | 132 | for issue, issue_count in self.slack_message_dict.items(): 133 | slack_message += f"`{issue_count}` `{issue}`, " 134 | slack_message += f"in PR: {self.html_url}" 135 | slack.send_message(slack_message) 136 | 137 | except Exception as e: 138 | logger.error(f"Exception: {e}") 139 | 140 | @staticmethod 141 | def split_comments(latest_app_comment): 142 | try: 143 | temp_text = latest_app_comment["body"] 144 | temp_list = temp_text.split("\n") 145 | temp_list = list(filter(None, temp_list)) 146 | final_list = [] 147 | for i in range(0, len(temp_list)-1): 148 | if "- [ ]" in temp_list[i] or "- [x]" in temp_list[i]: 149 | final_list.append(temp_list[i]) 150 | return final_list 151 | except Exception as e: 152 | logger.error(f"Exception: {e}") 153 | -------------------------------------------------------------------------------- /deadshot/services/scanner/regex_scanner.py: -------------------------------------------------------------------------------- 1 | import re 2 | import json 3 | import os 4 | import math 5 | from celery.utils.log import get_task_logger 6 | 7 | 8 | # This file has function definitions used for matching each PR diff line against 9 | # the defined regular expressions and also check for entropy on secrets identified 10 | 11 | logger = get_task_logger(__name__) 12 | 13 | BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" 14 | HEX_CHARS = "1234567890abcdefABCDEF" 15 | 16 | 17 | def run(**kwargs): 18 | # This function initiates the regular expression match against each line of the PR diff lines 19 | # and generates a dictionary item for all positive matches 20 | diff_line = kwargs["body"] 21 | 22 | issues = [] 23 | temp_issues = {} 24 | try: 25 | with open(os.path.join(os.path.dirname(__file__), 'secrets_json/regex.json'), 'r') as f: 26 | data = json.load(f) 27 | for title, nested_dict in data.items(): 28 | for regexp in nested_dict["regex"]: 29 | pattern = re.compile(regexp) 30 | if bool(pattern.search(str(diff_line))): 31 | if nested_dict["entropy_check"] == "True": 32 | if find_entropy(diff_line): 33 | issues.append({title: nested_dict}) 34 | if title not in temp_issues.keys(): 35 | temp_issues[title] = { 36 | "recommendation": 37 | data[title]["recommendation"], 38 | "slack_alert": 39 | data[title]["slack_alert"] 40 | } 41 | else: 42 | issues.append({title: nested_dict}) 43 | if title not in temp_issues.keys(): 44 | temp_issues[title] = { 45 | "recommendation": 46 | data[title]["recommendation"], 47 | "slack_alert": 48 | data[title]["slack_alert"] 49 | } 50 | 51 | return temp_issues 52 | except Exception as e: 53 | logger.error(e) 54 | 55 | 56 | def find_entropy(line): 57 | # Function to calculate the entropy for regexes that have entropy_check 58 | # set to True in the regex.json file 59 | high_entropy = False 60 | stringsFound = [] 61 | for word in line.split(): 62 | base64_strings = get_strings_of_set(word, BASE64_CHARS) 63 | hex_strings = get_strings_of_set(word, HEX_CHARS) 64 | for character in base64_strings: 65 | b64_entropy = shannon_entropy(character, BASE64_CHARS) 66 | if b64_entropy > 4.5: 67 | stringsFound.append(character) 68 | 69 | for character in hex_strings: 70 | hex_entropy = shannon_entropy(character, HEX_CHARS) 71 | if hex_entropy > 3: 72 | stringsFound.append(character) 73 | 74 | entropic_diff = {} 75 | if len(stringsFound) > 0: 76 | high_entropy = True 77 | entropic_diff['stringsFound'] = stringsFound 78 | 79 | return high_entropy 80 | 81 | 82 | def shannon_entropy(data, iterator): 83 | """ 84 | Borrowed from http://blog.dkbza.org/2007/05/scanning-data-for-entropy-anomalies.html 85 | """ 86 | if not data: 87 | return 0 88 | entropy = 0 89 | for x in iterator: 90 | p_x = float(data.count(x))/len(data) 91 | if p_x > 0: 92 | entropy += - p_x*math.log(p_x, 2) 93 | return entropy 94 | 95 | 96 | def get_strings_of_set(word, char_set, threshold=20): 97 | count = 0 98 | letters = "" 99 | strings = [] 100 | for char in word: 101 | if char in char_set: 102 | letters += char 103 | count += 1 104 | else: 105 | if count >= threshold: 106 | strings.append(letters) 107 | letters = "" 108 | count = 0 109 | if count > threshold: 110 | strings.append(letters) 111 | return strings 112 | -------------------------------------------------------------------------------- /deadshot/services/scanner/secrets_json/regex.json: -------------------------------------------------------------------------------- 1 | { 2 | "AWS Secrets": { 3 | "regex" : ["(\\\"|\\')?(AWS|aws|Aws)?_?(SECRET|secret|Secret)?_?(ACCESS|access|Access)?_?(KEY|key|Key)(\\\"|\\')?\\s*(:|=>|=)\\s*(\\\"|\\')?(([A-Za-z0-9/\\\\+=]{40}(?![A-Z0-9]))|(?|=)\\s*(\\\"|\\')?[0-9]{4}\\-?[0-9]{4}\\-?[0-9]{4}(\\\"|\\')?", "(\\\"|\\')?(AWS|aws|Aws)_?(ACCESS|access|Access)?_?(KEY|key|Key)?_?(ID|id|Id|iD)?(\\\"|\\')?\\s*(:|=>|=)\\s*(\\\"|\\')?((? $cacheAdapter,", 12 | "", 13 | "// So far the only use of this arg is to use v4. Due to other signing methods", 14 | "// and SignatureInterfaces (such as v3http), we'll default to forcing v4 if", 15 | "// no value is given to prevent S3Client defaulting to v2 which will fail to", 16 | "// work on 6/24/2019.", 17 | "if (empty($signatureVersion)) {", 18 | "$factoryOptions[\"signature\"] = self::$S3_SIGNATURE_v4;" 19 | ] 20 | 21 | expected_diff_removed = [ 22 | "'credentials.cache' => $cacheAdapter", 23 | "", 24 | "if ($signatureVersion) {", 25 | "$factoryOptions[\"signature\"] = $signatureVersion;" 26 | ] 27 | 28 | 29 | class TestDiffParser(unittest.TestCase): 30 | 31 | def test_diff_parses(self): 32 | diff_parser = GithubDiffProcessor("abc", "token", test_diff) 33 | lines = [] 34 | for diff_file in diff_parser.diff_files(): 35 | # print(diff_file) 36 | for diff_line in diff_file.diff_lines(): 37 | lines.append(diff_line) 38 | # print(diff_line) 39 | 40 | added_lines = [line.value for line in lines if line.line_type == DiffLine.LINE_TYPE_ADDED] 41 | assert added_lines == expected_diff_added, f"[!] added lines do not match: {added_lines}" 42 | 43 | removed_lines = [line.value for line in lines if line.line_type == DiffLine.LINE_TYPE_REMOVED] 44 | assert removed_lines == expected_diff_removed, f"[!] removed lines do not match: {removed_lines}" -------------------------------------------------------------------------------- /tests/fixtures/bad_pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "opened", 3 | "number": 1, 4 | "pull_request": { 5 | "url": "", 6 | "id": 0, 7 | "node_id": "", 8 | "html_url": "", 9 | "diff_url": "", 10 | "patch_url": "", 11 | "issue_url": "", 12 | "number": 2, 13 | "state": "open", 14 | "locked": false, 15 | "title": "", 16 | "user": { 17 | "login": "", 18 | "id": 0, 19 | "node_id": "", 20 | "avatar_url": "", 21 | "gravatar_id": "", 22 | "url": "", 23 | "html_url": "", 24 | "followers_url": "", 25 | "following_url": "", 26 | "gists_url": "", 27 | "starred_url": "", 28 | "subscriptions_url": "", 29 | "organizations_url": "", 30 | "repos_url": "", 31 | "events_url": "", 32 | "received_events_url": "", 33 | "type": "", 34 | "site_admin": false, 35 | "ldap_dn": "" 36 | }, 37 | "body": "", 38 | "created_at": "", 39 | "updated_at": "", 40 | "closed_at": null, 41 | "merged_at": null, 42 | "merge_commit_sha": null, 43 | "assignee": null, 44 | "assignees": [], 45 | "requested_reviewers": [], 46 | "requested_teams": [], 47 | "labels": [], 48 | "milestone": null, 49 | "commits_url": "", 50 | "review_comments_url": "", 51 | "review_comment_url": "", 52 | "comments_url": "", 53 | "statuses_url": "", 54 | "head": { 55 | "label": "", 56 | "ref": "", 57 | "sha": "", 58 | "user": { 59 | "login": "", 60 | "id": 0, 61 | "node_id": "", 62 | "avatar_url": "", 63 | "gravatar_id": "", 64 | "url": "", 65 | "html_url": "", 66 | "followers_url": "", 67 | "following_url": "", 68 | "gists_url": "", 69 | "starred_url": "", 70 | "subscriptions_url": "", 71 | "organizations_url": "", 72 | "repos_url": "", 73 | "events_url": "", 74 | "received_events_url": "", 75 | "type": "User", 76 | "site_admin": false, 77 | "ldap_dn": "" 78 | }, 79 | "repo": { 80 | "id": 0, 81 | "node_id": "", 82 | "name": "deadshot", 83 | "full_name": "", 84 | "owner": { 85 | "login": "", 86 | "id": 0, 87 | "node_id": "", 88 | "avatar_url": "", 89 | "gravatar_id": "", 90 | "url": "", 91 | "html_url": "", 92 | "followers_url": "", 93 | "following_url": "", 94 | "gists_url": "", 95 | "starred_url": "", 96 | "subscriptions_url": "", 97 | "organizations_url": "", 98 | "repos_url": "", 99 | "events_url": "", 100 | "received_events_url": "", 101 | "type": "User", 102 | "site_admin": false, 103 | "ldap_dn": "" 104 | }, 105 | "private": false, 106 | "html_url": "", 107 | "description": "", 108 | "fork": false, 109 | "url": "", 110 | "forks_url": "", 111 | "keys_url": "", 112 | "collaborators_url": "", 113 | "teams_url": "", 114 | "hooks_url": "", 115 | "issue_events_url": "", 116 | "events_url": "", 117 | "assignees_url": "", 118 | "branches_url": "", 119 | "tags_url": "", 120 | "blobs_url": "", 121 | "git_tags_url": "", 122 | "git_refs_url": "", 123 | "trees_url": "", 124 | "statuses_url": "", 125 | "languages_url": "", 126 | "stargazers_url": "", 127 | "contributors_url": "", 128 | "subscribers_url": "", 129 | "subscription_url": "", 130 | "commits_url": "", 131 | "git_commits_url": "", 132 | "comments_url": "", 133 | "issue_comment_url": "", 134 | "contents_url": "", 135 | "compare_url": "", 136 | "merges_url": "", 137 | "archive_url": "", 138 | "downloads_url": "", 139 | "issues_url": "", 140 | "pulls_url": "", 141 | "milestones_url": "", 142 | "notifications_url": "", 143 | "labels_url": "", 144 | "releases_url": "", 145 | "deployments_url": "", 146 | "created_at": "", 147 | "updated_at": "", 148 | "pushed_at": "", 149 | "git_url": "", 150 | "ssh_url": "", 151 | "clone_url": "", 152 | "svn_url": "", 153 | "homepage": "", 154 | "size": 13, 155 | "stargazers_count": 0, 156 | "watchers_count": 0, 157 | "language": "", 158 | "has_issues": true, 159 | "has_projects": true, 160 | "has_downloads": true, 161 | "has_wiki": true, 162 | "has_pages": false, 163 | "forks_count": 0, 164 | "mirror_url": null, 165 | "archived": false, 166 | "open_issues_count": 0, 167 | "license": null, 168 | "forks": 0, 169 | "open_issues": 0, 170 | "watchers": 0, 171 | "default_branch": "master" 172 | } 173 | }, 174 | "base": { 175 | "label": "", 176 | "ref": "", 177 | "sha": "", 178 | "user": { 179 | "login": "", 180 | "id": 0, 181 | "node_id": "", 182 | "avatar_url": "", 183 | "gravatar_id": "", 184 | "url": "", 185 | "html_url": "", 186 | "followers_url": "", 187 | "following_url": "", 188 | "gists_url": "", 189 | "starred_url": "", 190 | "subscriptions_url": "", 191 | "organizations_url": "", 192 | "repos_url": "", 193 | "events_url": "", 194 | "received_events_url": "", 195 | "type": "User", 196 | "site_admin": false, 197 | "ldap_dn": "" 198 | }, 199 | "repo": { 200 | "id": 0, 201 | "node_id": "", 202 | "name": "", 203 | "full_name": "", 204 | "owner": { 205 | "login": "", 206 | "id": 0, 207 | "node_id": "", 208 | "avatar_url": "", 209 | "gravatar_id": "", 210 | "url": "", 211 | "html_url": "", 212 | "followers_url": "", 213 | "following_url": "", 214 | "gists_url": "", 215 | "starred_url": "", 216 | "subscriptions_url": "", 217 | "organizations_url": "", 218 | "repos_url": "", 219 | "events_url": "", 220 | "received_events_url": "", 221 | "type": "User", 222 | "site_admin": false, 223 | "ldap_dn": "" 224 | }, 225 | "private": false, 226 | "html_url": "", 227 | "description": "", 228 | "fork": false, 229 | "url": "", 230 | "forks_url": "", 231 | "keys_url": "", 232 | "collaborators_url": "", 233 | "teams_url": "", 234 | "hooks_url": "", 235 | "issue_events_url": "", 236 | "events_url": "", 237 | "assignees_url": "", 238 | "branches_url": "", 239 | "tags_url": "", 240 | "blobs_url": "", 241 | "git_tags_url": "", 242 | "git_refs_url": "", 243 | "trees_url": "", 244 | "statuses_url": "", 245 | "languages_url": "", 246 | "stargazers_url": "", 247 | "contributors_url": "", 248 | "subscribers_url": "", 249 | "subscription_url": "", 250 | "commits_url": "", 251 | "git_commits_url": "", 252 | "comments_url": "", 253 | "issue_comment_url": "", 254 | "contents_url": "", 255 | "compare_url": "", 256 | "merges_url": "", 257 | "archive_url": "", 258 | "downloads_url": "", 259 | "issues_url": "", 260 | "pulls_url": "", 261 | "milestones_url": "", 262 | "notifications_url": "", 263 | "labels_url": "", 264 | "releases_url": "", 265 | "deployments_url": "", 266 | "created_at": "", 267 | "updated_at": "", 268 | "pushed_at": "", 269 | "git_url": "", 270 | "ssh_url": "", 271 | "clone_url": "", 272 | "svn_url": "", 273 | "homepage": "", 274 | "size": 0, 275 | "stargazers_count": 0, 276 | "watchers_count": 0, 277 | "language": "Python", 278 | "has_issues": true, 279 | "has_projects": true, 280 | "has_downloads": true, 281 | "has_wiki": true, 282 | "has_pages": false, 283 | "forks_count": 0, 284 | "mirror_url": null, 285 | "archived": false, 286 | "open_issues_count": 0, 287 | "license": null, 288 | "forks": 0, 289 | "open_issues": 0, 290 | "watchers": 0, 291 | "default_branch": "master" 292 | } 293 | }, 294 | "_links": { 295 | "self": { 296 | "href": "" 297 | }, 298 | "html": { 299 | "href": "" 300 | }, 301 | "issue": { 302 | "href": "" 303 | }, 304 | "comments": { 305 | "href": "" 306 | }, 307 | "review_comments": { 308 | "href": "" 309 | }, 310 | "review_comment": { 311 | "href": "" 312 | }, 313 | "commits": { 314 | "href": "" 315 | }, 316 | "statuses": { 317 | "href": "" 318 | } 319 | }, 320 | "author_association": "OWNER", 321 | "merged": false, 322 | "mergeable": null, 323 | "rebaseable": null, 324 | "mergeable_state": "unknown", 325 | "merged_by": null, 326 | "comments": 0, 327 | "review_comments": 0, 328 | "maintainer_can_modify": false, 329 | "commits": 0, 330 | "additions": 0, 331 | "deletions": 0, 332 | "changed_files": 0 333 | }, 334 | "repository": { 335 | "id": 0, 336 | "node_id": "", 337 | "name": "", 338 | "full_name": "", 339 | "owner": { 340 | "login": "", 341 | "id": 0, 342 | "node_id": "", 343 | "avatar_url": "", 344 | "gravatar_id": "", 345 | "url": "", 346 | "html_url": "", 347 | "followers_url": "", 348 | "following_url": "", 349 | "gists_url": "", 350 | "starred_url": "", 351 | "subscriptions_url": "", 352 | "organizations_url": "", 353 | "repos_url": "", 354 | "events_url": "", 355 | "received_events_url": "", 356 | "type": "User", 357 | "site_admin": false, 358 | "ldap_dn": "" 359 | }, 360 | "private": false, 361 | "html_url": "", 362 | "description": "", 363 | "fork": false, 364 | "url": "", 365 | "forks_url": "", 366 | "keys_url": "", 367 | "collaborators_url": "", 368 | "teams_url": "", 369 | "hooks_url": "", 370 | "issue_events_url": "", 371 | "events_url": "", 372 | "assignees_url": "", 373 | "branches_url": "", 374 | "tags_url": "", 375 | "blobs_url": "", 376 | "git_tags_url": "", 377 | "git_refs_url": "", 378 | "trees_url": "", 379 | "statuses_url": "", 380 | "languages_url": "", 381 | "stargazers_url": "", 382 | "contributors_url": "", 383 | "subscribers_url": "", 384 | "subscription_url": "", 385 | "commits_url": "", 386 | "git_commits_url": "", 387 | "comments_url": "", 388 | "issue_comment_url": "", 389 | "contents_url": "", 390 | "compare_url": "", 391 | "merges_url": "", 392 | "archive_url": "", 393 | "downloads_url": "", 394 | "issues_url": "", 395 | "pulls_url": "", 396 | "milestones_url": "", 397 | "notifications_url": "", 398 | "labels_url": "", 399 | "releases_url": "", 400 | "deployments_url": "", 401 | "created_at": "", 402 | "updated_at": "", 403 | "pushed_at": "", 404 | "git_url": "", 405 | "ssh_url": "", 406 | "clone_url": "", 407 | "svn_url": "", 408 | "homepage": "", 409 | "size": 13, 410 | "stargazers_count": 0, 411 | "watchers_count": 0, 412 | "language": "Python", 413 | "has_issues": true, 414 | "has_projects": true, 415 | "has_downloads": true, 416 | "has_wiki": true, 417 | "has_pages": false, 418 | "forks_count": 0, 419 | "mirror_url": null, 420 | "archived": false, 421 | "open_issues_count": 2, 422 | "license": null, 423 | "forks": 0, 424 | "open_issues": 0, 425 | "watchers": 0, 426 | "default_branch": "master" 427 | }, 428 | "sender": { 429 | "login": "", 430 | "id": 0, 431 | "node_id": "", 432 | "avatar_url": "", 433 | "gravatar_id": "", 434 | "url": "", 435 | "html_url": "", 436 | "followers_url": "", 437 | "following_url": "", 438 | "gists_url": "", 439 | "starred_url": "", 440 | "subscriptions_url": "", 441 | "organizations_url": "", 442 | "repos_url": "", 443 | "events_url": "", 444 | "received_events_url": "", 445 | "type": "User", 446 | "site_admin": false, 447 | "ldap_dn": "" 448 | } 449 | } -------------------------------------------------------------------------------- /tests/fixtures/close_pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "closed", 3 | "number": 1, 4 | "pull_request": { 5 | "url": "", 6 | "id": 1, 7 | "node_id": "", 8 | "html_url": "", 9 | "diff_url": "", 10 | "patch_url": "", 11 | "issue_url": "", 12 | "number": 2, 13 | "state": "closed", 14 | "locked": false, 15 | "title": "ll", 16 | "user": { 17 | "login": "se", 18 | "id": 1, 19 | "node_id": "", 20 | "avatar_url": "", 21 | "gravatar_id": "", 22 | "url": "", 23 | "html_url": "", 24 | "followers_url": "", 25 | "following_url": "", 26 | "gists_url": "", 27 | "starred_url": "", 28 | "subscriptions_url": "", 29 | "organizations_url": "", 30 | "repos_url": "", 31 | "events_url": "", 32 | "received_events_url": "", 33 | "type": "User", 34 | "site_admin": false, 35 | "ldap_dn": "" 36 | }, 37 | "body": "", 38 | "created_at": "", 39 | "updated_at": "", 40 | "closed_at": "", 41 | "merged_at": "", 42 | "merge_commit_sha": "", 43 | "assignee": null, 44 | "assignees": [ 45 | 46 | ], 47 | "requested_reviewers": [ 48 | 49 | ], 50 | "requested_teams": [ 51 | 52 | ], 53 | "labels": [ 54 | 55 | ], 56 | "milestone": null, 57 | "commits_url": "", 58 | "review_comments_url": "", 59 | "review_comment_url": "", 60 | "comments_url": "", 61 | "statuses_url": "", 62 | "head": { 63 | "label": "", 64 | "ref": "", 65 | "sha": "", 66 | "user": { 67 | "login": "", 68 | "id": 1, 69 | "node_id": "", 70 | "avatar_url": "", 71 | "gravatar_id": "", 72 | "url": "", 73 | "html_url": "", 74 | "followers_url": "", 75 | "following_url": "", 76 | "gists_url": "", 77 | "starred_url": "", 78 | "subscriptions_url": "", 79 | "organizations_url": "", 80 | "repos_url": "", 81 | "events_url": "", 82 | "received_events_url": "", 83 | "type": "User", 84 | "site_admin": false, 85 | "ldap_dn": "" 86 | }, 87 | "repo": { 88 | "id": 1, 89 | "node_id": "", 90 | "name": "", 91 | "full_name": "", 92 | "private": false, 93 | "owner": { 94 | "login": "", 95 | "id": 1, 96 | "node_id": "", 97 | "avatar_url": "", 98 | "gravatar_id": "", 99 | "url": "", 100 | "html_url": "", 101 | "followers_url": "", 102 | "following_url": "", 103 | "gists_url": "", 104 | "starred_url": "", 105 | "organizations_url": "", 106 | "repos_url": "", 107 | "events_url": "", 108 | "received_events_url": "", 109 | "type": "User", 110 | "site_admin": false, 111 | "ldap_dn": "" 112 | }, 113 | "html_url": "", 114 | "description": null, 115 | "fork": false, 116 | "url": "", 117 | "forks_url": "", 118 | "keys_url": "", 119 | "collaborators_url": "", 120 | "teams_url": "", 121 | "hooks_url": "", 122 | "issue_events_url": "", 123 | "events_url": "", 124 | "assignees_url": "", 125 | "branches_url": "", 126 | "tags_url": "", 127 | "blobs_url": "", 128 | "git_tags_url": "", 129 | "git_refs_url": "", 130 | "trees_url": "", 131 | "statuses_url": "", 132 | "languages_url": "", 133 | "stargazers_url": "", 134 | "contributors_url": "", 135 | "subscribers_url": "", 136 | "subscription_url": "", 137 | "commits_url": "", 138 | "git_commits_url": "", 139 | "comments_url": "", 140 | "issue_comment_url": "", 141 | "contents_url": "", 142 | "compare_url": "", 143 | "merges_url": "", 144 | "archive_url": "", 145 | "downloads_url": "", 146 | "issues_url": "", 147 | "pulls_url": "", 148 | "milestones_url": "", 149 | "notifications_url": "", 150 | "labels_url": "", 151 | "releases_url": "", 152 | "deployments_url": "", 153 | "created_at": "", 154 | "updated_at": "", 155 | "pushed_at": "", 156 | "git_url": "", 157 | "ssh_url": "", 158 | "clone_url": "", 159 | "svn_url": "", 160 | "homepage": null, 161 | "size": 13, 162 | "stargazers_count": 0, 163 | "watchers_count": 0, 164 | "language": "Python", 165 | "has_issues": true, 166 | "has_projects": true, 167 | "has_downloads": true, 168 | "has_wiki": true, 169 | "has_pages": false, 170 | "forks_count": 0, 171 | "mirror_url": null, 172 | "archived": false, 173 | "disabled": false, 174 | "open_issues_count": 0, 175 | "license": null, 176 | "forks": 0, 177 | "open_issues": 0, 178 | "watchers": 0, 179 | "default_branch": "" 180 | } 181 | }, 182 | "base": { 183 | "label": "", 184 | "ref": "", 185 | "sha": "", 186 | "user": { 187 | "login": "", 188 | "id": 1, 189 | "node_id": "", 190 | "avatar_url": "", 191 | "gravatar_id": "", 192 | "url": "", 193 | "html_url": "", 194 | "followers_url": "", 195 | "following_url": "", 196 | "gists_url": "", 197 | "starred_url": "", 198 | "subscriptions_url": "", 199 | "organizations_url": "", 200 | "repos_url": "", 201 | "events_url": "", 202 | "received_events_url": "", 203 | "type": "User", 204 | "site_admin": false, 205 | "ldap_dn": "" 206 | }, 207 | "repo": { 208 | "id": 1, 209 | "node_id": "", 210 | "name": "", 211 | "full_name": "", 212 | "private": false, 213 | "owner": { 214 | "login": "", 215 | "id": 1, 216 | "node_id": "", 217 | "avatar_url": "", 218 | "gravatar_id": "", 219 | "url": "", 220 | "html_url": "", 221 | "followers_url": "", 222 | "following_url": "", 223 | "gists_url": "", 224 | "starred_url": "", 225 | "subscriptions_url": "", 226 | "organizations_url": "", 227 | "repos_url": "", 228 | "events_url": "", 229 | "received_events_url": "", 230 | "type": "User", 231 | "site_admin": false, 232 | "ldap_dn": "" 233 | }, 234 | "html_url": "", 235 | "description": null, 236 | "fork": false, 237 | "url": "", 238 | "forks_url": "", 239 | "keys_url": "", 240 | "collaborators_url": "", 241 | "teams_url": "", 242 | "hooks_url": "", 243 | "issue_events_url": "", 244 | "events_url": "", 245 | "assignees_url": "", 246 | "branches_url": "", 247 | "tags_url": "", 248 | "blobs_url": "", 249 | "git_tags_url": "", 250 | "git_refs_url": "", 251 | "trees_url": "", 252 | "statuses_url": "", 253 | "languages_url": "", 254 | "stargazers_url": "", 255 | "contributors_url": "", 256 | "subscribers_url": "", 257 | "subscription_url": "", 258 | "commits_url": "", 259 | "git_commits_url": "", 260 | "comments_url": "", 261 | "issue_comment_url": "", 262 | "contents_url": "", 263 | "compare_url": "", 264 | "merges_url": "", 265 | "archive_url": "", 266 | "downloads_url": "", 267 | "issues_url": "", 268 | "pulls_url": "", 269 | "milestones_url": "", 270 | "notifications_url": "", 271 | "labels_url": "", 272 | "releases_url": "", 273 | "deployments_url": "", 274 | "created_at": "", 275 | "updated_at": "", 276 | "pushed_at": "", 277 | "git_url": "", 278 | "ssh_url": "", 279 | "clone_url": "", 280 | "svn_url": "", 281 | "homepage": null, 282 | "size": 13, 283 | "stargazers_count": 0, 284 | "watchers_count": 0, 285 | "language": "Python", 286 | "has_issues": true, 287 | "has_projects": true, 288 | "has_downloads": true, 289 | "has_wiki": true, 290 | "has_pages": false, 291 | "forks_count": 0, 292 | "mirror_url": null, 293 | "archived": false, 294 | "disabled": false, 295 | "open_issues_count": 0, 296 | "license": null, 297 | "forks": 0, 298 | "open_issues": 0, 299 | "watchers": 0, 300 | "default_branch": "" 301 | } 302 | }, 303 | "_links": { 304 | "self": { 305 | "href": "" 306 | }, 307 | "html": { 308 | "href": "" 309 | }, 310 | "issue": { 311 | "href": "" 312 | }, 313 | "comments": { 314 | "href": "" 315 | }, 316 | "review_comments": { 317 | "href": "" 318 | }, 319 | "review_comment": { 320 | "href": "" 321 | }, 322 | "commits": { 323 | "href": "" 324 | }, 325 | "statuses": { 326 | "href": "" 327 | } 328 | }, 329 | "author_association": "OWNER", 330 | "draft": false, 331 | "merged": true, 332 | "mergeable": null, 333 | "rebaseable": null, 334 | "mergeable_state": "unknown", 335 | "merged_by": { 336 | "login": "", 337 | "id": 1, 338 | "node_id": "", 339 | "avatar_url": "", 340 | "gravatar_id": "", 341 | "url": "", 342 | "html_url": "", 343 | "followers_url": "", 344 | "following_url": "", 345 | "gists_url": "", 346 | "starred_url": "", 347 | "subscriptions_url": "", 348 | "organizations_url": "", 349 | "repos_url": "", 350 | "events_url": "", 351 | "received_events_url": "", 352 | "type": "User", 353 | "site_admin": false, 354 | "ldap_dn": "" 355 | }, 356 | "comments": 0, 357 | "review_comments": 0, 358 | "maintainer_can_modify": false, 359 | "commits": 1, 360 | "additions": 1, 361 | "deletions": 1, 362 | "changed_files": 1 363 | }, 364 | "repository": { 365 | "id": 1, 366 | "node_id": "", 367 | "name": "", 368 | "full_name": "", 369 | "private": false, 370 | "owner": { 371 | "login": "", 372 | "id": 1, 373 | "node_id": "", 374 | "avatar_url": "", 375 | "gravatar_id": "", 376 | "url": "", 377 | "html_url": "", 378 | "followers_url": "", 379 | "following_url": "", 380 | "gists_url": "", 381 | "starred_url": "", 382 | "subscriptions_url": "", 383 | "organizations_url": "", 384 | "repos_url": "", 385 | "events_url": "", 386 | "received_events_url": "", 387 | "type": "User", 388 | "site_admin": false, 389 | "ldap_dn": "" 390 | }, 391 | "html_url": "", 392 | "description": null, 393 | "fork": false, 394 | "url": "", 395 | "forks_url": "", 396 | "keys_url": "", 397 | "collaborators_url": "", 398 | "teams_url": "", 399 | "hooks_url": "", 400 | "issue_events_url": "", 401 | "events_url": "", 402 | "assignees_url": "", 403 | "branches_url": "", 404 | "tags_url": "", 405 | "blobs_url": "", 406 | "git_tags_url": "", 407 | "git_refs_url": "", 408 | "trees_url": "", 409 | "statuses_url": "", 410 | "languages_url": "", 411 | "stargazers_url": "", 412 | "contributors_url": "", 413 | "subscribers_url": "", 414 | "subscription_url": "", 415 | "commits_url": "", 416 | "git_commits_url": "", 417 | "comments_url": "", 418 | "issue_comment_url": "", 419 | "contents_url": "", 420 | "compare_url": "", 421 | "merges_url": "", 422 | "archive_url": "", 423 | "downloads_url": "", 424 | "issues_url": "", 425 | "pulls_url": "", 426 | "milestones_url": "", 427 | "notifications_url": "", 428 | "labels_url": "", 429 | "releases_url": "", 430 | "deployments_url": "", 431 | "created_at": "", 432 | "updated_at": "", 433 | "pushed_at": "", 434 | "git_url": "", 435 | "ssh_url": "", 436 | "clone_url": "", 437 | "svn_url": "", 438 | "homepage": null, 439 | "size": 1, 440 | "stargazers_count": 0, 441 | "watchers_count": 0, 442 | "language": "Python", 443 | "has_issues": true, 444 | "has_projects": true, 445 | "has_downloads": true, 446 | "has_wiki": true, 447 | "has_pages": false, 448 | "forks_count": 0, 449 | "mirror_url": null, 450 | "archived": false, 451 | "disabled": false, 452 | "open_issues_count": 0, 453 | "license": null, 454 | "forks": 0, 455 | "open_issues": 0, 456 | "watchers": 0, 457 | "default_branch": "" 458 | }, 459 | "enterprise": { 460 | "id": 1, 461 | "slug": "", 462 | "name": "", 463 | "node_id": "", 464 | "avatar_url": "", 465 | "description": null, 466 | "website_url": null, 467 | "html_url": "", 468 | "created_at": "", 469 | "updated_at": "" 470 | }, 471 | "sender": { 472 | "login": "", 473 | "id": 1, 474 | "node_id": "", 475 | "avatar_url": "", 476 | "gravatar_id": "", 477 | "url": "", 478 | "html_url": "", 479 | "followers_url": "", 480 | "following_url": "", 481 | "gists_url": "", 482 | "starred_url": "", 483 | "subscriptions_url": "", 484 | "organizations_url": "", 485 | "repos_url": "", 486 | "events_url": "", 487 | "received_events_url": "", 488 | "type": "User", 489 | "site_admin": false, 490 | "ldap_dn": "" 491 | }, 492 | "installation": { 493 | "id": 1, 494 | "node_id": "" 495 | } 496 | } -------------------------------------------------------------------------------- /tests/fixtures/good_pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "synchronize", 3 | "number": 1, 4 | "pull_request": { 5 | "url": "", 6 | "id": 1, 7 | "node_id": "", 8 | "html_url": "", 9 | "diff_url": "", 10 | "patch_url": "", 11 | "issue_url": "", 12 | "number": 1, 13 | "state": "open", 14 | "locked": false, 15 | "title": "test", 16 | "user": { 17 | "login": "", 18 | "id": 1, 19 | "node_id": "", 20 | "avatar_url": "", 21 | "gravatar_id": "", 22 | "url": "", 23 | "html_url": "", 24 | "followers_url": "", 25 | "following_url": "", 26 | "gists_url": "", 27 | "starred_url": "", 28 | "subscriptions_url": "", 29 | "organizations_url": "", 30 | "repos_url": "", 31 | "events_url": "", 32 | "received_events_url": "", 33 | "type": "User", 34 | "site_admin": false, 35 | "ldap_dn": "" 36 | }, 37 | "body": "", 38 | "created_at": "", 39 | "updated_at": "", 40 | "closed_at": null, 41 | "merged_at": null, 42 | "merge_commit_sha": "", 43 | "assignee": null, 44 | "assignees": [ 45 | 46 | ], 47 | "requested_reviewers": [ 48 | 49 | ], 50 | "requested_teams": [ 51 | 52 | ], 53 | "labels": [ 54 | 55 | ], 56 | "milestone": null, 57 | "commits_url": "", 58 | "review_comments_url": "", 59 | "review_comment_url": "", 60 | "comments_url": "", 61 | "statuses_url": "", 62 | "head": { 63 | "label": "", 64 | "ref": "", 65 | "sha": "", 66 | "user": { 67 | "login": "", 68 | "id": 1, 69 | "node_id": "", 70 | "avatar_url": "", 71 | "gravatar_id": "", 72 | "url": "", 73 | "html_url": "", 74 | "followers_url": "", 75 | "following_url": "", 76 | "gists_url": "", 77 | "starred_url": "", 78 | "subscriptions_url": "", 79 | "organizations_url": "", 80 | "repos_url": "", 81 | "events_url": "", 82 | "received_events_url": "", 83 | "type": "User", 84 | "site_admin": false, 85 | "ldap_dn": "" 86 | }, 87 | "repo": { 88 | "id": 1, 89 | "node_id": "", 90 | "name": "", 91 | "full_name": "", 92 | "private": false, 93 | "owner": { 94 | "login": "", 95 | "id": 1, 96 | "node_id": "", 97 | "avatar_url": "", 98 | "gravatar_id": "", 99 | "url": "", 100 | "html_url": "", 101 | "followers_url": "", 102 | "following_url": "", 103 | "gists_url": "", 104 | "starred_url": "", 105 | "subscriptions_url": "", 106 | "organizations_url": "", 107 | "repos_url": "", 108 | "events_url": "", 109 | "received_events_url": "", 110 | "type": "User", 111 | "site_admin": false, 112 | "ldap_dn": "" 113 | }, 114 | "html_url": "", 115 | "description": null, 116 | "fork": false, 117 | "url": "", 118 | "forks_url": "", 119 | "keys_url": "", 120 | "collaborators_url": "", 121 | "teams_url": "", 122 | "hooks_url": "", 123 | "issue_events_url": "", 124 | "events_url": "", 125 | "assignees_url": "", 126 | "branches_url": "", 127 | "tags_url": "", 128 | "blobs_url": "", 129 | "git_tags_url": "", 130 | "git_refs_url": "", 131 | "trees_url": "", 132 | "statuses_url": "", 133 | "languages_url": "", 134 | "stargazers_url": "", 135 | "contributors_url": "", 136 | "subscribers_url": "", 137 | "subscription_url": "", 138 | "commits_url": "", 139 | "git_commits_url": "", 140 | "comments_url": "", 141 | "issue_comment_url": "", 142 | "contents_url": "", 143 | "compare_url": "", 144 | "merges_url": "", 145 | "archive_url": "", 146 | "downloads_url": "", 147 | "issues_url": "", 148 | "pulls_url": "", 149 | "milestones_url": "", 150 | "notifications_url": "", 151 | "labels_url": "", 152 | "releases_url": "", 153 | "deployments_url": "", 154 | "created_at": "", 155 | "updated_at": "", 156 | "pushed_at": "", 157 | "git_url": "", 158 | "ssh_url": "", 159 | "clone_url": "", 160 | "svn_url": "", 161 | "homepage": null, 162 | "size": 20, 163 | "stargazers_count": 0, 164 | "watchers_count": 0, 165 | "language": "Python", 166 | "has_issues": true, 167 | "has_projects": true, 168 | "has_downloads": true, 169 | "has_wiki": true, 170 | "has_pages": false, 171 | "forks_count": 0, 172 | "mirror_url": null, 173 | "archived": false, 174 | "disabled": false, 175 | "open_issues_count": 1, 176 | "license": null, 177 | "forks": 0, 178 | "open_issues": 1, 179 | "watchers": 0, 180 | "default_branch": "master" 181 | } 182 | }, 183 | "base": { 184 | "label": "", 185 | "ref": "master", 186 | "sha": "", 187 | "user": { 188 | "login": "", 189 | "id": 1, 190 | "node_id": "", 191 | "avatar_url": "", 192 | "gravatar_id": "", 193 | "url": "", 194 | "html_url": "", 195 | "followers_url": "", 196 | "following_url": "", 197 | "gists_url": "", 198 | "starred_url": "", 199 | "subscriptions_url": "", 200 | "organizations_url": "", 201 | "repos_url": "", 202 | "events_url": "", 203 | "received_events_url": "", 204 | "type": "User", 205 | "site_admin": false, 206 | "ldap_dn": "" 207 | }, 208 | "repo": { 209 | "id": 1, 210 | "node_id": "", 211 | "name": "", 212 | "full_name": "", 213 | "private": false, 214 | "owner": { 215 | "login": "", 216 | "id": 1, 217 | "node_id": "", 218 | "avatar_url": "", 219 | "gravatar_id": "", 220 | "url": "", 221 | "html_url": "", 222 | "followers_url": "", 223 | "following_url": "", 224 | "gists_url": "", 225 | "starred_url": "", 226 | "subscriptions_url": "", 227 | "organizations_url": "", 228 | "repos_url": "", 229 | "events_url": "", 230 | "received_events_url": "", 231 | "type": "User", 232 | "site_admin": false, 233 | "ldap_dn": "" 234 | }, 235 | "html_url": "", 236 | "description": null, 237 | "fork": false, 238 | "url": "", 239 | "forks_url": "", 240 | "keys_url": "", 241 | "collaborators_url": "", 242 | "teams_url": "", 243 | "hooks_url": "", 244 | "issue_events_url": "", 245 | "events_url": "", 246 | "assignees_url": "", 247 | "branches_url": "", 248 | "tags_url": "", 249 | "blobs_url": "", 250 | "git_tags_url": "", 251 | "git_refs_url": "", 252 | "trees_url": "", 253 | "statuses_url": "", 254 | "languages_url": "", 255 | "stargazers_url": "", 256 | "contributors_url": "", 257 | "subscribers_url": "", 258 | "subscription_url": "", 259 | "commits_url": "", 260 | "git_commits_url": "", 261 | "comments_url": "", 262 | "issue_comment_url": "", 263 | "contents_url": "", 264 | "compare_url": "", 265 | "merges_url": "", 266 | "archive_url": "", 267 | "downloads_url": "", 268 | "issues_url": "", 269 | "pulls_url": "", 270 | "milestones_url": "", 271 | "notifications_url": "", 272 | "labels_url": "", 273 | "releases_url": "", 274 | "deployments_url": "", 275 | "created_at": "", 276 | "updated_at": "", 277 | "pushed_at": "", 278 | "git_url": "", 279 | "ssh_url": "", 280 | "clone_url": "", 281 | "svn_url": "", 282 | "homepage": null, 283 | "size": 20, 284 | "stargazers_count": 0, 285 | "watchers_count": 0, 286 | "language": "Python", 287 | "has_issues": true, 288 | "has_projects": true, 289 | "has_downloads": true, 290 | "has_wiki": true, 291 | "has_pages": false, 292 | "forks_count": 0, 293 | "mirror_url": null, 294 | "archived": false, 295 | "disabled": false, 296 | "open_issues_count": 1, 297 | "license": null, 298 | "forks": 0, 299 | "open_issues": 1, 300 | "watchers": 0, 301 | "default_branch": "master" 302 | } 303 | }, 304 | "_links": { 305 | "self": { 306 | "href": "" 307 | }, 308 | "html": { 309 | "href": "" 310 | }, 311 | "issue": { 312 | "href": "" 313 | }, 314 | "comments": { 315 | "href": "" 316 | }, 317 | "review_comments": { 318 | "href": "" 319 | }, 320 | "review_comment": { 321 | "href": "" 322 | }, 323 | "commits": { 324 | "href": "" 325 | }, 326 | "statuses": { 327 | "href": "" 328 | } 329 | }, 330 | "author_association": "OWNER", 331 | "draft": false, 332 | "merged": false, 333 | "mergeable": null, 334 | "rebaseable": null, 335 | "mergeable_state": "unknown", 336 | "merged_by": null, 337 | "comments": 1, 338 | "review_comments": 0, 339 | "maintainer_can_modify": false, 340 | "commits": 1, 341 | "additions": 1, 342 | "deletions": 1, 343 | "changed_files": 1 344 | }, 345 | "before": "", 346 | "after": "", 347 | "repository": { 348 | "id": 1, 349 | "node_id": "", 350 | "name": "", 351 | "full_name": "", 352 | "private": false, 353 | "owner": { 354 | "login": "", 355 | "id": 1, 356 | "node_id": "", 357 | "avatar_url": "", 358 | "gravatar_id": "", 359 | "url": "", 360 | "html_url": "", 361 | "followers_url": "", 362 | "following_url": "", 363 | "gists_url": "", 364 | "starred_url": "", 365 | "subscriptions_url": "", 366 | "organizations_url": "", 367 | "repos_url": "", 368 | "events_url": "", 369 | "received_events_url": "", 370 | "type": "User", 371 | "site_admin": false, 372 | "ldap_dn": "" 373 | }, 374 | "html_url": "", 375 | "description": null, 376 | "fork": false, 377 | "url": "", 378 | "forks_url": "", 379 | "keys_url": "", 380 | "collaborators_url": "", 381 | "teams_url": "", 382 | "hooks_url": "", 383 | "issue_events_url": "", 384 | "events_url": "", 385 | "assignees_url": "", 386 | "branches_url": "", 387 | "tags_url": "", 388 | "blobs_url": "", 389 | "git_tags_url": "", 390 | "git_refs_url": "", 391 | "trees_url": "", 392 | "statuses_url": "", 393 | "languages_url": "", 394 | "stargazers_url": "", 395 | "contributors_url": "", 396 | "subscribers_url": "", 397 | "subscription_url": "", 398 | "commits_url": "", 399 | "git_commits_url": "", 400 | "comments_url": "", 401 | "issue_comment_url": "", 402 | "contents_url": "", 403 | "compare_url": "", 404 | "merges_url": "", 405 | "archive_url": "", 406 | "downloads_url": "", 407 | "issues_url": "", 408 | "pulls_url": "", 409 | "milestones_url": "", 410 | "notifications_url": "", 411 | "labels_url": "", 412 | "releases_url": "", 413 | "deployments_url": "", 414 | "created_at": "", 415 | "updated_at": "", 416 | "pushed_at": "", 417 | "git_url": "", 418 | "ssh_url": "", 419 | "clone_url": "", 420 | "svn_url": "", 421 | "homepage": null, 422 | "size": 20, 423 | "stargazers_count": 0, 424 | "watchers_count": 0, 425 | "language": "Python", 426 | "has_issues": true, 427 | "has_projects": true, 428 | "has_downloads": true, 429 | "has_wiki": true, 430 | "has_pages": false, 431 | "forks_count": 0, 432 | "mirror_url": null, 433 | "archived": false, 434 | "disabled": false, 435 | "open_issues_count": 1, 436 | "license": null, 437 | "forks": 0, 438 | "open_issues": 1, 439 | "watchers": 0, 440 | "default_branch": "master" 441 | }, 442 | "enterprise": { 443 | "id": 1, 444 | "slug": "", 445 | "name": "", 446 | "node_id": " ", 447 | "avatar_url": "", 448 | "description": null, 449 | "website_url": null, 450 | "html_url": "", 451 | "created_at": "", 452 | "updated_at": "" 453 | }, 454 | "sender": { 455 | "login": "", 456 | "id": 1, 457 | "node_id": "", 458 | "avatar_url": "", 459 | "gravatar_id": "", 460 | "url": "", 461 | "html_url": "", 462 | "followers_url": "", 463 | "following_url": "", 464 | "gists_url": "", 465 | "starred_url": "", 466 | "subscriptions_url": "", 467 | "organizations_url": "", 468 | "repos_url": "", 469 | "events_url": "", 470 | "received_events_url": "", 471 | "type": "User", 472 | "site_admin": false, 473 | "ldap_dn": "" 474 | }, 475 | "installation": { 476 | "id": 1, 477 | "node_id": "" 478 | } 479 | } -------------------------------------------------------------------------------- /tests/fixtures/test.diff: -------------------------------------------------------------------------------- 1 | diff --git a/classes/3rdparty/storage/S3.php b/classes/3rdparty/storage/S3.php 2 | index 862b60bf948..52fb0dde114 100644 3 | --- a/classes/3rdparty/storage/S3.php 4 | +++ b/classes/3rdparty/storage/S3.php 5 | @@ -34,17 +34,24 @@ class S3 { 6 | 7 | public function __construct($region = NULL, $signatureVersion = NULL) { 8 | $cacheAdapter = new AbstractCacheAdapter(AbstractCache::fetchCache('apc')); 9 | + 10 | + // NOTE: For SignatureV4 to work, this must not be removed. SignatureV4 requires 11 | + // this parameter. 12 | if (empty($region)) { 13 | $region = self::$S3_REGION_us_east_1; 14 | } 15 | 16 | $factoryOptions = array( 17 | 'region' => $region, 18 | - 'credentials.cache' => $cacheAdapter 19 | + 'credentials.cache' => $cacheAdapter, 20 | ); 21 | - 22 | - if ($signatureVersion) { 23 | - $factoryOptions["signature"] = $signatureVersion; 24 | + 25 | + // So far the only use of this arg is to use v4. Due to other signing methods 26 | + // and SignatureInterfaces (such as v3http), we'll default to forcing v4 if 27 | + // no value is given to prevent S3Client defaulting to v2 which will fail to 28 | + // work on 6/24/2019. 29 | + if (empty($signatureVersion)) { 30 | + $factoryOptions["signature"] = self::$S3_SIGNATURE_v4; 31 | } 32 | 33 | $this->client = S3Client::factory($factoryOptions); 34 | -------------------------------------------------------------------------------- /tests/fixtures/test2.diff: -------------------------------------------------------------------------------- 1 | diff --git a/classes/3rdparty/storage/S3.php b/classes/3rdparty/storage/S3.php 2 | index 862b60bf948..52fb0dde114 100644 3 | --- a/classes/3rdparty/storage/S3.php 4 | +++ b/classes/3rdparty/storage/S3.php 5 | @@ -34,17 +34,24 @@ class S3 { 6 | 7 | public function __construct($region = NULL, $signatureVersion = NULL) { 8 | $cacheAdapter = new AbstractCacheAdapter(AbstractCache::fetchCache('apc')); 9 | + 10 | + // NOTE: For SignatureV4 to work, this must not be removed. SignatureV4 requires 11 | + // this parameter. 12 | if (empty($region)) { 13 | $region = self::$S3_REGION_us_east_1; 14 | } 15 | 16 | $factoryOptions = array( 17 | 'region' => $region, 18 | - 'credentials.cache' => $cacheAdapter 19 | + 'credentials.cache' => $cacheAdapter, 20 | ); 21 | - 22 | - if ($signatureVersion) { 23 | - $factoryOptions["signature"] = $signatureVersion; 24 | + 25 | + // So far the only use of this arg is to use v4. Due to other signing methods 26 | + // and SignatureInterfaces (such as v3http), we'll default to forcing v4 if 27 | + // no value is given to prevent S3Client defaulting to v2 which will fail to 28 | + // work on 6/24/2019. 29 | + if (empty($signatureVersion)) { 30 | + $factoryOptions["signature"] = self::$S3_SIGNATURE_v4; 31 | } 32 | 33 | $this->client = S3Client::factory($factoryOptions); -------------------------------------------------------------------------------- /tests/test_requirements.txt: -------------------------------------------------------------------------------- 1 | mock==3.0.5 2 | pycodestyle==2.5.0 3 | pytest==5.3.5 4 | -------------------------------------------------------------------------------- /tests/test_secrets/github_secrets.json: -------------------------------------------------------------------------------- 1 | { 2 | "webhook_secret": "SECRET", 3 | "github_app_integration_id": 1, 4 | "github_app_pem_key": "" 5 | } 6 | --------------------------------------------------------------------------------