├── .dockerignore
├── .eslintrc.json
├── .gitattributes
├── .github
└── workflows
│ └── ash-build-and-scan.yml
├── .gitignore
├── .pre-commit-hooks.yaml
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── NOTICE
├── README.md
├── appsec_cfn_rules
├── IamUserExistsRule.rb
├── KeyPairAsCFnParameterRule.rb
├── ResourcePolicyStarAccessVerbPolicyRule.rb
├── StarResourceAccessPolicyRule.rb
└── beta
│ ├── FlowLogsEnabledForVPCsRule.rb
│ ├── PasswordAsCFnParameterRule.rb
│ └── RotationEnabledForSecretsManagerRule.rb
├── ash
├── ash-multi
├── docs
├── content
│ ├── assets
│ │ └── images
│ │ │ └── aws-logo-light.svg
│ ├── contributing.md
│ ├── docs
│ │ ├── prerequisites.md
│ │ └── support.md
│ ├── faq.md
│ ├── index.md
│ └── tutorials
│ │ ├── CI
│ │ ├── ASH Execution Environment Viability.drawio
│ │ ├── ASH Execution Environment Viability.png
│ │ ├── AzurePipelines
│ │ │ └── azure-pipelines.yml
│ │ ├── GitHubActions
│ │ │ └── .github
│ │ │ │ └── workflows
│ │ │ │ └── run-ash-scan.yml
│ │ ├── GitLabCI
│ │ │ └── .gitlab-ci.yml
│ │ └── Jenkins
│ │ │ └── Jenkinsfile
│ │ ├── cloud9-quickstart.md
│ │ ├── running-ash-in-ci.md
│ │ └── running-ash-locally.md
└── overrides
│ └── .icons
│ └── aws-logo-light.svg
├── helper_dockerfiles
├── Dockerfile-cdk
├── Dockerfile-git
├── Dockerfile-grype
├── Dockerfile-js
├── Dockerfile-py
└── Dockerfile-yaml
├── mkdocs.yml
├── poetry.lock
├── pyproject.toml
├── quickstart
├── README.md
└── c9template.yaml
├── requirements.txt
├── src
├── README.md
└── automated_security_helper
│ ├── __init__.py
│ ├── adapters
│ └── __init__.py
│ ├── asharp.py
│ └── models
│ ├── __init__.py
│ └── asharp_model.py
└── utils
├── ash_helpers.ps1
├── ash_helpers.sh
├── cdk-addon-py.py
├── cdk-docker-execute.sh
├── cdk-nag-scan
├── .gitignore
├── .npmignore
├── README.md
├── bin
│ └── cdk-nag-scan.ts
├── cdk.json
├── jest.config.js
├── lib
│ └── cdk-nag-scan-stack.ts
├── package.json
├── test
│ └── cdk-nag-scan.test.ts
└── tsconfig.json
├── cfn-to-cdk
├── README.md
├── app.py
├── cdk.json
├── cfn_to_cdk
│ ├── __init__.py
│ ├── cfn.json
│ ├── cfn_to_cdk_stack.py
│ ├── cfn_to_cdk_stack.py.j2
│ └── template_generator.py
├── requirements-dev.txt
└── requirements.txt
├── common.sh
├── get-scan-set.py
├── git-docker-execute.sh
├── grype-docker-execute.sh
├── identifyipynb.sh
├── js-docker-execute.sh
├── py-docker-execute.sh
└── yaml-docker-execute.sh
/.dockerignore:
--------------------------------------------------------------------------------
1 | .pre-commit-*.yaml
2 | ash_output
3 | examples
4 | helper_dockerfiles
5 | quickstart
6 | sample_report.txt
7 | utils/cfn-to-cdk/cdk.out
8 | tests
9 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "node": true,
5 | "es2021": true
6 | },
7 | "extends": "eslint:recommended",
8 | "overrides": [
9 | ],
10 | "parserOptions": {
11 | "ecmaVersion": "latest",
12 | "sourceType": "module"
13 | },
14 | "rules": {
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 |
--------------------------------------------------------------------------------
/.github/workflows/ash-build-and-scan.yml:
--------------------------------------------------------------------------------
1 | name: ASH - Core Pipeline
2 | on:
3 | push:
4 | branches:
5 | - '*'
6 | tags:
7 | - '*'
8 | pull_request:
9 | branches:
10 | - '*'
11 | permissions:
12 | actions: read
13 | checks: write
14 | contents: write
15 | id-token: write
16 | security-events: write
17 | pull-requests: write
18 | env:
19 | PYTHON_VERSION: "3.12"
20 | jobs:
21 | build:
22 | strategy:
23 | matrix:
24 | runner:
25 | # - macos-14 # Docker support on this runner is not working yet, still no options for ARM in hosted :-(
26 | - ubuntu-latest
27 | name: ASH Build & Scan - ${{ matrix.runner }}
28 | runs-on: ${{ matrix.runner }}
29 | env:
30 | IMG_NAME: ${{ github.repository }}
31 | ARCH: ${{ matrix.runner == 'ubuntu-latest' && 'amd64' || 'arm64' }}
32 | SUMMARY_FILE: 'ASH Scan Result Summary - ${{ matrix.runner }}.md'
33 | steps:
34 | - name: Checkout
35 | uses: actions/checkout@v4
36 |
37 | - name: Setup docker
38 | if: runner.os == 'macos'
39 | run: |
40 | brew install docker
41 | docker info
42 |
43 | - name: Set up Docker Buildx
44 | if: runner.os == 'macos'
45 | uses: docker/setup-buildx-action@v3
46 | with:
47 | platforms: "linux/${{ env.ARCH }}"
48 |
49 | # - name: Set container metadata
50 | # id: metadata
51 | # uses: docker/metadata-action@v5
52 | # with:
53 | # images: ${{ env.IMG_NAME }}
54 | # tags: |
55 | # type=raw,value=latest
56 | # type=raw,value=${{ env.ARCH }}
57 | # type=raw,value={{sha}},enable=${{ github.ref_type != 'tag' }}
58 |
59 | # - name: Build image
60 | # uses: docker/build-push-action@v5
61 | # with:
62 | # context: '.'
63 | # push: false
64 | # tags: ${{ steps.metadata.outputs.tags }}
65 | # labels: ${{ steps.metadata.outputs.labels }}
66 |
67 | - name: Run ASH against itself
68 | id: ash
69 | run: |-
70 | # Disable exit on error
71 | set +e
72 |
73 | # Run ASH against itself
74 | ./ash --source-dir $(pwd) --output-dir ash_output --container-uid 1001 --container-gid 123 --debug | \
75 | tee ash_stdout.txt
76 |
77 | # cat the output contents to build the summary markdown
78 | # strip out the color codes from the output
79 | ASH_STDOUT=$(cat ash_stdout.txt | sed 's/\x1b\[[0-9;]*[mGKHF]//g')
80 | ASH_AGG_RESULTS=$(cat ash_output/aggregated_results.txt | sed 's/\x1b\[[0-9;]*[mGKHF]//g')
81 |
82 | # Write the summary markdown to a file
83 | cat << EOF | tee "${{ env.SUMMARY_FILE }}" | tee -a "${GITHUB_STEP_SUMMARY}"
84 | ## ASH Scan Output - ${{ env.ARCH }} - ${{ matrix.runner }}
85 |
86 | \`\`\`bash
87 | $ cat ash_stdout.txt
88 |
89 | ${ASH_STDOUT}
90 | \`\`\`
91 |
92 |
93 | Show aggregated_results.txt
94 |
95 | \`\`\`bash
96 | ${ASH_AGG_RESULTS}
97 | \`\`\`
98 |
99 | EOF
100 |
101 | # Write the summary markdown to the GITHUB_OUTPUT
102 | {
103 | echo 'ASH_OUTPUT<> "$GITHUB_OUTPUT"
107 |
108 | # Exit with the highest return code from ASH
109 | set -e
110 | typeset -i ASH_EXIT_CODE
111 | # ASH_EXIT_CODE=`sed -nE "s/.*Highest return code is ([0-9]+)/\1/p" ash_stdout.txt`
112 | ASH_EXIT_CODE=`perl -ne 'print "$1\n" if /Highest return code is ([[:digit:]]+)/' ash_stdout.txt`
113 | echo "Highest return code found is '$ASH_EXIT_CODE'"
114 | if [ $ASH_EXIT_CODE -eq 0 ]; then
115 | echo "ASH scan succeeded"
116 | exit 0
117 | else
118 | echo "ASH scan failed"
119 | exit $ASH_EXIT_CODE
120 | fi
121 |
122 | - name: Post ASH output as PR comment
123 | uses: mshick/add-pr-comment@v2
124 | # This does not work for fork runs without setting up a proxy
125 | # Info: https://github.com/mshick/add-pr-comment#proxy-for-fork-based-prshttps://github.com/mshick/add-pr-comment#proxy-for-fork-based-prs
126 | if: github.repository_owner == 'awslabs'
127 | continue-on-error: true
128 | with:
129 | message: |
130 | ${{ steps.ash.outputs.ASH_OUTPUT }}
131 |
132 | - name: Collect summary
133 | uses: actions/upload-artifact@v4
134 | if: always()
135 | continue-on-error: true
136 | with:
137 | name: Summary
138 | path: "${{ env.SUMMARY_FILE }}"
139 |
140 | - name: Collect ash_stdout
141 | uses: actions/upload-artifact@v4
142 | if: always()
143 | continue-on-error: true
144 | with:
145 | name: ash_stdout
146 | path: ash_stdout.txt
147 | if-no-files-found: error
148 |
149 | - name: Collect ash_output artifact
150 | uses: actions/upload-artifact@v4
151 | if: always()
152 | continue-on-error: false
153 | with:
154 | name: ash_output
155 | path: ash_output
156 | if-no-files-found: error
157 |
158 | build-docs:
159 | name: Build documentation
160 | needs: []
161 | runs-on: ubuntu-latest
162 | if: github.event_name == 'pull_request' || (github.event_name == 'push' && github.ref != 'refs/heads/main')
163 |
164 | steps:
165 | - uses: actions/checkout@v4
166 |
167 | - name: Set up Python
168 | uses: actions/setup-python@v5
169 | with:
170 | python-version: ${{ env.PYTHON_VERSION }}
171 | cache: 'pip'
172 |
173 | - name: Install dependencies
174 | run: pip install -r requirements.txt
175 |
176 | - name: Build documentation
177 | run: mkdocs build --clean
178 |
179 | deploy-docs:
180 | name: Deploy documentation
181 | needs: []
182 | runs-on: ubuntu-latest
183 | if: github.event_name == 'push' && github.ref == 'refs/heads/main'
184 |
185 | steps:
186 | - uses: actions/checkout@v4
187 |
188 | - name: Set up Python
189 | uses: actions/setup-python@v5
190 | with:
191 | python-version: ${{ env.PYTHON_VERSION }}
192 | cache: 'pip'
193 |
194 | - name: Install dependencies
195 | run: pip install -r requirements.txt
196 |
197 | - name: Deploy documentation
198 | run: mkdocs gh-deploy --clean --force
199 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | utils/cfn-to-cdk/cfn_to_cdk/
2 | utils/cfn-to-cdk/cdk.out/
3 | /**/aggregated_results.txt
4 | /**/__pycache__
5 | utils/cfn-to-cdk/cfn_to_cdk/cfn_to_cdk_stack.py
6 | ash_output
7 | *.bak
8 | src/automated_security_helper/models/*generated*
9 | !tests/test_data/aggregated_results.txt
10 |
11 | ### macOS ###
12 | # General
13 | .DS_Store
14 | .AppleDouble
15 | .LSOverride
16 |
17 | # Icon must end with two \r
18 | Icon
19 |
20 |
21 | # Thumbnails
22 | ._*
23 |
24 | # Files that might appear in the root of a volume
25 | .DocumentRevisions-V100
26 | .fseventsd
27 | .Spotlight-V100
28 | .TemporaryItems
29 | .Trashes
30 | .VolumeIcon.icns
31 | .com.apple.timemachine.donotpresent
32 |
33 | # Directories potentially created on remote AFP share
34 | .AppleDB
35 | .AppleDesktop
36 | Network Trash Folder
37 | Temporary Items
38 | .apdisk
39 |
40 | ### macOS Patch ###
41 | # iCloud generated files
42 | *.icloud
43 |
44 | ### Python ###
45 | # Byte-compiled / optimized / DLL files
46 | __pycache__/
47 | *.py[cod]
48 | *$py.class
49 |
50 | # C extensions
51 | *.so
52 |
53 | # Distribution / packaging
54 | .Python
55 | build/
56 | develop-eggs/
57 | dist/
58 | downloads/
59 | eggs/
60 | .eggs/
61 | lib/
62 | lib64/
63 | parts/
64 | sdist/
65 | var/
66 | wheels/
67 | share/python-wheels/
68 | *.egg-info/
69 | .installed.cfg
70 | *.egg
71 | MANIFEST
72 |
73 | # PyInstaller
74 | # Usually these files are written by a python script from a template
75 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
76 | *.manifest
77 | *.spec
78 |
79 | # Installer logs
80 | pip-log.txt
81 | pip-delete-this-directory.txt
82 |
83 | # Unit test / coverage reports
84 | htmlcov/
85 | .tox/
86 | .nox/
87 | .coverage
88 | .coverage.*
89 | .cache
90 | nosetests.xml
91 | coverage.xml
92 | *.cover
93 | *.py,cover
94 | .hypothesis/
95 | .pytest_cache/
96 | cover/
97 |
98 | # Translations
99 | *.mo
100 | *.pot
101 |
102 | # Django stuff:
103 | *.log
104 | local_settings.py
105 | db.sqlite3
106 | db.sqlite3-journal
107 |
108 | # Flask stuff:
109 | instance/
110 | .webassets-cache
111 |
112 | # Scrapy stuff:
113 | .scrapy
114 |
115 | # Sphinx documentation
116 | docs/_build/
117 |
118 | # PyBuilder
119 | .pybuilder/
120 | target/
121 |
122 | # Jupyter Notebook
123 | .ipynb_checkpoints
124 |
125 | # IPython
126 | profile_default/
127 | ipython_config.py
128 |
129 | # pyenv
130 | # For a library or package, you might want to ignore these files since the code is
131 | # intended to run in multiple environments; otherwise, check them in:
132 | # .python-version
133 |
134 | # pipenv
135 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
136 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
137 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
138 | # install all needed dependencies.
139 | #Pipfile.lock
140 |
141 | # poetry
142 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
143 | # This is especially recommended for binary packages to ensure reproducibility, and is more
144 | # commonly ignored for libraries.
145 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
146 | #poetry.lock
147 |
148 | # pdm
149 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
150 | #pdm.lock
151 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
152 | # in version control.
153 | # https://pdm.fming.dev/#use-with-ide
154 | .pdm.toml
155 |
156 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
157 | __pypackages__/
158 |
159 | # Celery stuff
160 | celerybeat-schedule
161 | celerybeat.pid
162 |
163 | # SageMath parsed files
164 | *.sage.py
165 |
166 | # Environments
167 | .env
168 | .venv
169 | env/
170 | venv/
171 | ENV/
172 | env.bak/
173 | venv.bak/
174 |
175 | # Spyder project settings
176 | .spyderproject
177 | .spyproject
178 |
179 | # Rope project settings
180 | .ropeproject
181 |
182 | # mkdocs documentation
183 | /site
184 | /public
185 |
186 | # mypy
187 | .mypy_cache/
188 | .dmypy.json
189 | dmypy.json
190 |
191 | # Pyre type checker
192 | .pyre/
193 |
194 | # pytype static type analyzer
195 | .pytype/
196 |
197 | # Cython debug symbols
198 | cython_debug/
199 |
200 | # PyCharm
201 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
202 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
203 | # and can be added to the global gitignore or merged into this file. For a more nuclear
204 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
205 | #.idea/
206 |
207 | ### Python Patch ###
208 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
209 | poetry.toml
210 |
211 | # ruff
212 | .ruff_cache/
213 |
214 | # LSP config files
215 | pyrightconfig.json
216 |
217 | ### VisualStudioCode ###
218 | .vscode/*
219 | !.vscode/settings.json
220 | !.vscode/tasks.json
221 | !.vscode/launch.json
222 | !.vscode/extensions.json
223 | !.vscode/*.code-snippets
224 |
225 | # Local History for Visual Studio Code
226 | .history/
227 |
228 | # Built Visual Studio Code Extensions
229 | *.vsix
230 |
231 | ### VisualStudioCode Patch ###
232 | # Ignore all local history of files
233 | .history
234 | .ionide
235 |
236 | ### Windows ###
237 | # Windows thumbnail cache files
238 | Thumbs.db
239 | Thumbs.db:encryptable
240 | ehthumbs.db
241 | ehthumbs_vista.db
242 |
243 | # Dump file
244 | *.stackdump
245 |
246 | # Folder config file
247 | [Dd]esktop.ini
248 |
249 | # Recycle Bin used on file shares
250 | $RECYCLE.BIN/
251 |
252 | # Windows Installer files
253 | *.cab
254 | *.msi
255 | *.msix
256 | *.msm
257 | *.msp
258 |
259 | # Windows shortcuts
260 | *.lnk
261 |
--------------------------------------------------------------------------------
/.pre-commit-hooks.yaml:
--------------------------------------------------------------------------------
1 | # (c) 2023 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
2 | # This AWS Content is provided subject to the terms of the AWS Customer Agreement
3 | # available at http://aws.amazon.com/agreement or other written agreement between
4 | # Customer and either Amazon Web Services, Inc. or Amazon Web Services EMEA SARL
5 | # or both.
6 |
7 | #
8 | # This pre-commit hook configuration allows the ASH tool
9 | # to be run as a pre-commit hook on the local workstation
10 | # where pre-commit is installed.
11 | #
12 | # The configuration parameters below turn on "verbose"
13 | # output, so that even when the hook returns success (0)
14 | # the output from the run of the ASH tool will be displayed
15 | # to the user.
16 | #
17 | # The supported stages are listed below, including the
18 | # "manual" stage. Users of this hook may want to limit
19 | # the stages where this hook runs to only "manual" since
20 | # the ASH tool can take several minutes to complete.
21 | #
22 | # Output from the run of the ASH tool can be found in
23 | # a file named "aggregated_results.txt" in the root
24 | # folder of the git repository where the pre-commit
25 | # command was invoked from.
26 | #
27 | # A useful example pre-commit-config.yaml clause for
28 | # running the ASH tool on MacOS, where "finch" is
29 | # used as the containerd user interface is:
30 | # - repo: git@github.com:aws-samples/automated-security-helper.git
31 | # rev: '1.0.8-e-03May2023'
32 | # hooks:
33 | # - id: ash
34 | # name: scan files using ash
35 | # stages: [ manual ]
36 | # args: [ "-f" ]
37 | #
38 | # With that pre-commit-config, this hook can be run manually
39 | # against the code repository using this command:
40 | # pre-commit run --hook-stage manual ash
41 | #
42 | - id: ash
43 | name: use ASH for checking
44 | description: Runs various code scanners in separate Docker containers
45 | entry: ash
46 | verbose: true
47 | stages: [commit, merge-commit, push, manual]
48 | language: script
49 | pass_filenames: false
50 | types_or: [yaml, json, shell, bash, python]
51 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Automated Security Helper - CHANGELOG
2 | - [v2.0.1](#v201)
3 | - [What's Changed](#whats-changed)
4 | - [v2.0.0](#v200)
5 | - [Breaking Changes](#breaking-changes)
6 | - [Features](#features)
7 | - [Fixes](#fixes)
8 | - [v1.5.1](#v151)
9 | - [What's Changed](#whats-changed-1)
10 | - [v1.5.0](#v150)
11 | - [What's Changed](#whats-changed-2)
12 | - [New Contributors](#new-contributors)
13 | - [v1.4.1](#v141)
14 | - [What's Changed](#whats-changed-3)
15 | - [v1.4.0](#v140)
16 | - [What's Changed](#whats-changed-4)
17 | - [v1.3.3](#v133)
18 | - [What's Changed](#whats-changed-5)
19 | - [v1.3.2](#v132)
20 | - [What's Changed](#whats-changed-6)
21 | - [New Contributors](#new-contributors-1)
22 | - [1.3.0 - 2024-04-17](#130---2024-04-17)
23 | - [Features](#features-1)
24 | - [Fixes](#fixes-1)
25 | - [Maintenance / Internal](#maintenance--internal)
26 | - [1.2.0-e-06Mar2024](#120-e-06mar2024)
27 | - [1.1.0-e-01Dec2023](#110-e-01dec2023)
28 | - [1.0.9-e-16May2023](#109-e-16may2023)
29 | - [1.0.8-e-03May2023](#108-e-03may2023)
30 | - [1.0.5-e-06Mar2023](#105-e-06mar2023)
31 | - [1.0.1-e-10Jan2023](#101-e-10jan2023)
32 |
33 | ## v2.0.1
34 |
35 | ### What's Changed
36 |
37 | - Fix handling of Bandit config files in util script
38 |
39 | ## v2.0.0
40 |
41 | ### Breaking Changes
42 |
43 | - Building ASH images for use in CI platforms (or other orchestration platforms that may require elevated access within the container) now requires targeting the `ci` stage of the `Dockerfile`:
44 |
45 | _via `ash` CLI_
46 |
47 | ```sh
48 | ash --no-run --build-target ci
49 | ```
50 |
51 | _via `docker` or other OCI CLI_
52 |
53 | ```sh
54 | docker build --tag automated-security-helper:ci --target ci .
55 | ```
56 |
57 | ### Features
58 |
59 | - Run ASH as non-root user to align with security best practices.
60 | - Create a CI version of the docker file that still runs as root to comply with the different requirements from building platforms where UID/GID cannot be modified and there are additional agents installed at runtime that requires elevated privileges.
61 |
62 | ### Fixes
63 |
64 | - Offline mode now skips NPM/PNPM/Yarn Audit checks (requires connection to registry to pull package information)
65 | - NPM install during image build now restricts available memory to prevent segmentation fault
66 |
67 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/v1.5.1...v2.0.0
68 |
69 | ## v1.5.1
70 |
71 | ### What's Changed
72 |
73 | - Fix SHELL directive in Dockerfile
74 | - Fix small items in Mkdocs config
75 |
76 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/v1.5.0...v1.5.1
77 |
78 | ## v1.5.0
79 |
80 | ### What's Changed
81 |
82 | - Introduced support for offline execution via `--offline`
83 |
84 | ### New Contributors
85 | * @awsmadi made their first contribution in https://github.com/awslabs/automated-security-helper/pull/104
86 |
87 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/v1.4.1...v1.5.0
88 |
89 | ## v1.4.1
90 |
91 | ### What's Changed
92 |
93 | - Fixed line endings on relevant files from CRLF to LF to resolve Windows build issues
94 |
95 | ## v1.4.0
96 |
97 | ### What's Changed
98 |
99 | - Adds `--format` parameter to `ash`/`ash-multi` scripts to enable additional output integrations, beginning with ASHARP (Automated Security Helper Aggregated Report Parser) as the intermediary data model to enable subsequent conversion from there.
100 | - Adds `automated_security_helper` Python code as a module of the same name from within new `src` directory, including poetry.lock and pyproject.toml files to support. This module includes the `asharp` script (CLI tool) that enabled programmatic parsing of the aggregated_results content in conjunction with the JSON output changes.
101 | - Adds pre-stage build of `automated_security_helper` module to Dockerfile
102 | - Adds support to handle when `--format` is a value other than the current default of `text` so scanners switch output to programmatically parseable output formats and `asharp` is called to parse the `aggregated_results.txt` file into `aggregated_results.txt.json`.
103 | - Moved source of version string truth into `pyproject.toml` for all projects, removed `__version__` file to coincide with this.
104 |
105 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/v1.3.3...v1.4.0
106 |
107 | ## v1.3.3
108 |
109 | ### What's Changed
110 | * fix(ash): adjust where/when output-dir is created, if necessary by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/74
111 | * fix(ash): set execute permission on ash script in the container by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/81
112 | * fix: update __version__ file to match release tag format in github.com by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/84
113 |
114 |
115 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/v1.3.2...v1.3.3
116 |
117 | ## v1.3.2
118 |
119 | ### What's Changed
120 | * added get-scan-set.py to utils scripts to return a list of non-ignored files for processing by @scrthq in https://github.com/awslabs/automated-security-helper/pull/47
121 | * fix/codebuild shared bindmount issue by @scrthq in https://github.com/awslabs/automated-security-helper/pull/49
122 | * fix error in reflecting return code in ash script by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/51
123 | * Issue 58: missing double quotes by @awsntheule in https://github.com/awslabs/automated-security-helper/pull/64
124 | * fixed cdk nag scanner, added unique stack names based on input filenames. corrected guards on git clone calls within the scanner scripts to ensure those happen in the container image by @scrthq in https://github.com/awslabs/automated-security-helper/pull/54
125 | * Add support for pnpm audit by @awsntheule in https://github.com/awslabs/automated-security-helper/pull/66
126 | * fix(cdk-nag-scan): copy output files to separate folders by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/69
127 | * fix(ash): use /tmp rather than tmpfs for scratch area by @climbertjh2 in https://github.com/awslabs/automated-security-helper/pull/73
128 | * Fix CTRL-C cancelling by @awsntheule in https://github.com/awslabs/automated-security-helper/pull/71
129 |
130 | ### New Contributors
131 | * @awsntheule made their first contribution in https://github.com/awslabs/automated-security-helper/pull/64
132 |
133 | **Full Changelog**: https://github.com/awslabs/automated-security-helper/compare/1.2.0-e-06Mar2024...v1.3.2
134 |
135 | ## 1.3.0 - 2024-04-17
136 |
137 | ### Features
138 |
139 | * New version scheme introduced, moving ASH to SemVer alignment for versioning releases
140 | * Moved version number to standalone `__version__` file for easier version maintainability
141 | * Added [ripgrep](https://github.com/BurntSushi/ripgrep) to replace `grep` on the `cdk-docker-execute.sh` script for speed as well as to respect `.gitignore`/`.ignore` file specifications automatically. Implemented `ripgrep` for the intended purposes.
142 | * Updated `cdk-docker-execute.sh` script to create a unique internal stack name per imported-and-scanned CloudFormation template.
143 |
144 | ### Fixes
145 |
146 | * Removed extraneous `git clone` calls into the temporary `${_ASH_RUN_DIR}` now that single container is the primary use case to prevent collisions and spending time on repeat tasks during scans.
147 |
148 | ### Maintenance / Internal
149 |
150 | * Added better support for debug logging via `--debug` flag.
151 | * Added new `debug_show_tree` function to `utils/common.sh` for easy debugging insertion of a tree call at any point in the scan to see repository contents
152 | * Improved functionality of `utils/get-scan-set.py` script to generate the ignore spec and initial scan set to file in the output directory
153 |
154 | ## 1.2.0-e-06Mar2024
155 |
156 | * Changes default base image in the root Dockerfile from `public.ecr.aws/bitnami/python:3.10` to `public.ecr.aws/docker/library/python:3.10-bullseye` to allow builds for linux/arm64 platforms to work
157 | * `ash` script has been renamed to `ash-multi` if multi-container architecture is needed from local. When running in the single-container, this is copied in as `ash` itself and becomes the entrypoint of the in-container run to prevent API changes for CI invocations.
158 | * New `ash` script for local invocation entrypoint is now defaulting to building the single-container image and running the scan within as normal
159 | * Printed output path of the `aggregated_results.txt` now shows the correct, local output path when using the single container instead of `/out/aggregated_results.txt`
160 | * Updated GitHub Actions workflow for the repo to invoke ASH using the `ash` script as well to validate the entire experience end-to-end
161 | * Deprecated `--finch|-f` option with warning indicating to use `--oci-runner finch|-o finch` if needing to use Finch explicitly
162 |
163 | ## 1.1.0-e-01Dec2023
164 |
165 | * Introduced single-container architecture via single Dockerfile in the repo root
166 | * Updated `utils/*.sh` and `ash` shell scripts to support running within a single container
167 | * Added new `ash_helpers.{sh,ps1}` scripts to support building and running the new container image
168 | * Changed CDK Nag scanning to use TypeScript instead of Python in order to reduce the number of dependencies
169 | * Changed identification of files to scan from `find` to `git ls-files` for Git repositories in order to reduce the number of files scanned and to avoid scanning files that are not tracked by Git
170 | * Updated the multi-container Dockerfiles to be compatible with the script updates and retain backwards compatibility
171 | * Updated ASH documentation and README content to reflect the changes and improve the user experience
172 | * Added simple image build workflow configured as a required status check for PRs
173 |
174 | ## 1.0.9-e-16May2023
175 |
176 | * Changed YAML scanning (presumed CloudFormation templates) to look for CloudFormation template files explicitly, and excluding some well known folders
177 | added additional files that checkov knows how to scan to the list of CloudFormation templates (Dockerfiles, .gitlab-ci.yml)
178 | * Re-factored CDK scanning in several ways:
179 | * Moved Python package install to the Dockerfile (container image build) so it's done once
180 | * Removed code that doesn't do anything
181 | * Added diagnostic information to report regarding the CDK version, Node version, and NPM packages installed.
182 | * Fixed Semgrep exit code
183 |
184 | ## 1.0.8-e-03May2023
185 |
186 | * Cloud9 Quickstart
187 | * Remove cdk virtual env
188 | * README reformat
189 | * Pre-commit hook guidance
190 | * Fix Grype error code
191 | * Minor bug fixes
192 |
193 |
194 |
195 | ## 1.0.5-e-06Mar2023
196 |
197 | * hardcoded Checkov config
198 | * Fix return code for the different Docker containers
199 | * Fix image for ARM based machines
200 | * Added Finch support
201 |
202 |
203 |
204 | ## 1.0.1-e-10Jan2023
205 |
206 | ASH version 1.0.1-e-10Jan2023 is out!
207 |
208 | * Speed - running time is shorter by 40-50%
209 | * Frameworks support - we support Bash, Java, Go and C## code
210 | * New tool - ASH is running [Semgrep](https://github.com/returntocorp/semgrep) for supported frameworks
211 | * Force scans for specific frameworks - You can use the `--ext` flag to enforce scan for specific framework
212 | For example: `ash --source-dir . --ext py` (Python)
213 | * Versioning - use `ash --version` to check your current version
214 | * Bug fixes and improvements
215 |
216 |
217 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 | ## Reporting Bugs/Feature Requests
10 |
11 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
12 |
13 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
14 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
15 |
16 | * A reproducible test case or series of steps
17 | * The version of our code being used
18 | * Any modifications you've made relevant to the bug
19 | * Anything unusual about your environment or deployment
20 |
21 | ## Contributing via Pull Requests
22 |
23 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
24 |
25 | 1. You are working against the latest source on the `main` branch.
26 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
27 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
28 |
29 | To send us a pull request, please:
30 |
31 | 1. Fork the repository.
32 | 2. Create a branch in your fork where the branch name is something meaningful. We encourage
33 | the use of `feature/`, `bugfix/`, `hotfix/`, and so on for branch naming.
34 | 3. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code,
35 | it will be hard for us to focus on your change.
36 | 4. Ensure local tests pass.
37 | 5. Commit to your fork using clear commit messages.
38 | 6. Send us a pull request, answering any default questions in the pull request interface.
39 | 7. Pay attention to any automated continuous integration (CI) failures reported in the pull request, and stay involved in the conversation.
40 |
41 | GitHub provides additional documentation on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
42 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
43 |
44 | ## Finding contributions to work on
45 |
46 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
47 |
48 | ## Code of Conduct
49 |
50 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
51 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
52 | with any additional questions or comments.
53 |
54 | ## Security issue notifications
55 |
56 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
57 |
58 | ## Licensing
59 |
60 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
61 |
62 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
63 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | #checkov:skip=CKV_DOCKER_7: Base image is using a non-latest version tag by default, Checkov is unable to parse due to the use of ARG
2 | ARG BASE_IMAGE=public.ecr.aws/docker/library/python:3.10-bullseye
3 |
4 | # First stage: Build poetry requirements
5 | FROM ${BASE_IMAGE} AS poetry-reqs
6 | ENV PYTHONDONTWRITEBYTECODE 1
7 | RUN apt-get update && \
8 | apt-get upgrade -y && \
9 | apt-get install -y python3-venv && \
10 | rm -rf /var/lib/apt/lists/*
11 | RUN python3 -m pip install -U pip poetry
12 | WORKDIR /src
13 | COPY pyproject.toml poetry.lock README.md ./
14 | COPY src/ src/
15 | RUN poetry build
16 |
17 | # Second stage: Core ASH image
18 | FROM ${BASE_IMAGE} AS core
19 | SHELL ["/bin/bash", "-c"]
20 | ARG BUILD_DATE_EPOCH="-1"
21 | ARG OFFLINE="NO"
22 | ARG OFFLINE_SEMGREP_RULESETS="p/ci"
23 |
24 | ENV BUILD_DATE_EPOCH="${BUILD_DATE_EPOCH}"
25 | ENV OFFLINE="${OFFLINE}"
26 | ENV OFFLINE_AT_BUILD_TIME="${OFFLINE}"
27 | ENV OFFLINE_SEMGREP_RULESETS="${OFFLINE_SEMGREP_RULESETS}"
28 | ENV TZ=UTC
29 | RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
30 |
31 | #
32 | # General / shared component installation
33 | #
34 | WORKDIR /deps
35 |
36 | #
37 | # Add GitHub's public fingerprints to known_hosts inside the image to prevent fingerprint
38 | # confirmation requests unexpectedly
39 | #
40 | RUN mkdir -p ${HOME}/.ssh && \
41 | echo "github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl" >> ${HOME}/.ssh/known_hosts && \
42 | echo "github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=" >> ${HOME}/.ssh/known_hosts && \
43 | echo "github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=" >> ${HOME}/.ssh/known_hosts
44 |
45 | #
46 | # Base dependency installation
47 | #
48 | RUN apt-get update && \
49 | apt-get upgrade -y && \
50 | apt-get install -y \
51 | curl \
52 | python3-venv \
53 | git \
54 | ripgrep \
55 | ruby-dev \
56 | tree && \
57 | rm -rf /var/lib/apt/lists/*
58 |
59 | #
60 | # Install nodejs@18 using latest recommended method
61 | #
62 | RUN set -uex; \
63 | apt-get update; \
64 | apt-get install -y ca-certificates curl gnupg; \
65 | mkdir -p /etc/apt/keyrings; \
66 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \
67 | | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg; \
68 | NODE_MAJOR=18; \
69 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" \
70 | > /etc/apt/sources.list.d/nodesource.list; \
71 | apt-get -qy update; \
72 | apt-get -qy install nodejs;
73 | #
74 | # Install and upgrade pip
75 | #
76 | RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py
77 | RUN python3 -m pip install --no-cache-dir --upgrade pip
78 |
79 | #
80 | # Git (git-secrets)
81 | #
82 | RUN git clone https://github.com/awslabs/git-secrets.git && \
83 | cd git-secrets && \
84 | make install
85 |
86 | #
87 | # Python
88 | #
89 | RUN python3 -m pip install --no-cache-dir \
90 | bandit \
91 | nbconvert \
92 | jupyterlab
93 |
94 | #
95 | # YAML (Checkov, cfn-nag)
96 | #
97 | RUN echo "gem: --no-document" >> /etc/gemrc && \
98 | python3 -m pip install checkov pathspec && \
99 | gem install cfn-nag
100 |
101 | #
102 | # JavaScript: (no-op --- node is already installed in the image, nothing else needed)
103 | #
104 |
105 | #
106 | # Grype/Syft/Semgrep - Also sets default location env vars for root user for CI compat
107 | #
108 | ENV GRYPE_DB_CACHE_DIR="/deps/.grype"
109 | ENV SEMGREP_RULES_CACHE_DIR="/deps/.semgrep"
110 | RUN mkdir -p ${GRYPE_DB_CACHE_DIR} ${SEMGREP_RULES_CACHE_DIR}
111 |
112 | RUN curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | \
113 | sh -s -- -b /usr/local/bin
114 |
115 | RUN curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | \
116 | sh -s -- -b /usr/local/bin
117 |
118 | RUN python3 -m pip install semgrep
119 |
120 | RUN set -uex; if [[ "${OFFLINE}" == "YES" ]]; then \
121 | grype db update && \
122 | mkdir -p ${SEMGREP_RULES_CACHE_DIR} && \
123 | for i in $OFFLINE_SEMGREP_RULESETS; do curl "https://semgrep.dev/c/${i}" -o "${SEMGREP_RULES_CACHE_DIR}/$(basename "${i}").yml"; done \
124 | fi
125 |
126 | # Setting PYTHONPATH so Jinja2 can resolve correctly
127 | # IMPORTANT: This is predicated on the Python version that is installed!
128 | # Changing the BASE_IMAGE may result in this breaking.
129 | ENV PYTHONPATH='/opt/bitnami/python/lib/python3.10/site-packages'
130 |
131 | #
132 | # Prerequisite installation complete, finishing up
133 | #
134 | #
135 | # Setting default WORKDIR to /src
136 | WORKDIR /src
137 |
138 | #
139 | # Make sure the default dirs are initialized
140 | #
141 | RUN mkdir -p /src && \
142 | mkdir -p /out && \
143 | mkdir -p /ash/utils
144 |
145 | #
146 | # Install CDK Nag stub dependencies
147 | #
148 | # Update NPM to latest
149 | COPY ./utils/cdk-nag-scan /ash/utils/cdk-nag-scan/
150 | # Limit memory size available for Node to prevent segmentation faults during npm install
151 | ENV NODE_OPTIONS=--max_old_space_size=512
152 | RUN npm install -g npm pnpm yarn && \
153 | cd /ash/utils/cdk-nag-scan && \
154 | npm install --quiet
155 |
156 | #
157 | # COPY ASH source to /ash instead of / to isolate
158 | #
159 | COPY ./utils/cfn-to-cdk /ash/utils/cfn-to-cdk/
160 | COPY ./utils/*.* /ash/utils/
161 | COPY ./appsec_cfn_rules /ash/appsec_cfn_rules/
162 | COPY ./ash-multi /ash/ash
163 | COPY ./pyproject.toml /ash/pyproject.toml
164 |
165 | COPY --from=poetry-reqs /src/dist/*.whl .
166 | RUN python3 -m pip install *.whl && rm *.whl
167 |
168 | #
169 | # Make sure the ash script is executable
170 | #
171 | RUN chmod -R 755 /ash && chmod -R 777 /src /out /deps
172 |
173 | #
174 | # Flag ASH as local execution mode since we are running in a container already
175 | #
176 | ENV _ASH_EXEC_MODE="local"
177 |
178 | #
179 | # Append /ash to PATH to allow calling `ash` directly
180 | #
181 | ENV PATH="$PATH:/ash"
182 |
183 |
184 | # CI stage -- any customizations specific to CI platform compatibility should be added
185 | # in this stage if it is not applicable to ASH outside of CI usage
186 | FROM core AS ci
187 |
188 | ENV ASH_TARGET=ci
189 |
190 |
191 | # Final stage: Non-root user final version. This image contains all dependencies
192 | # for ASH from the `core` stage, but ensures it is launched as a non-root user.
193 | # Running as a non-root user impacts the ability to run ASH reliably across CI
194 | # platforms and other orchestrators where the initialization and launch of the image
195 | # is not configurable for customizing the running UID/GID.
196 | FROM core AS non-root
197 |
198 | ENV ASH_TARGET=non-root
199 |
200 | ARG UID=500
201 | ARG GID=100
202 | ARG ASH_USER=ash-user
203 | ARG ASH_GROUP=ash-group
204 | ARG ASHUSER_HOME=/home/${ASH_USER}
205 |
206 | #
207 | # Create a non-root user in the container and run as this user
208 | #
209 | # And add GitHub's public fingerprints to known_hosts inside the image to prevent fingerprint
210 | # confirmation requests unexpectedly
211 | #
212 | # ignore a failure to add the group
213 | RUN addgroup --gid ${GID} ${ASH_GROUP} || :
214 | RUN adduser --disabled-password --disabled-login \
215 | --uid ${UID} --gid ${GID} \
216 | ${ASH_USER} && \
217 | mkdir -p ${ASHUSER_HOME}/.ssh && \
218 | echo "github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl" >> ${ASHUSER_HOME}/.ssh/known_hosts && \
219 | echo "github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=" >> ${ASHUSER_HOME}/.ssh/known_hosts && \
220 | echo "github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=" >> ${ASHUSER_HOME}/.ssh/known_hosts
221 |
222 | # Change ownership and permissions now that we are running with a non-root
223 | # user by default.
224 | RUN chown -R ${UID}:${GID} ${ASHUSER_HOME} /src /out /deps && \
225 | chmod 750 -R ${ASHUSER_HOME} /src /out /deps
226 |
227 | # Setting default WORKDIR to ${ASHUSER_HOME}
228 | WORKDIR ${ASHUSER_HOME}
229 |
230 | USER ${UID}:${GID}
231 |
232 | #
233 | # Set the HOME environment variable to be the HOME folder for the non-root user,
234 | # along with any additional details that were set to root user values by default
235 | #
236 | ENV HOME=${ASHUSER_HOME}
237 | ENV ASH_USER=${ASH_USER}
238 | ENV ASH_GROUP=${ASH_GROUP}
239 |
240 | HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \
241 | CMD type ash || exit 1
242 |
243 | ENTRYPOINT [ ]
244 | CMD [ "ash" ]
245 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
--------------------------------------------------------------------------------
/appsec_cfn_rules/IamUserExistsRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 |
4 |
5 | class IamUserExistsRule < BaseRule
6 | def rule_text
7 | 'IAM Users represent long-term credentials'
8 | end
9 |
10 | def rule_type
11 | Violation::FAILING_VIOLATION
12 | end
13 |
14 | def rule_id
15 | 'APPSEC-IAM-UseEphemeralCredentials-IAMUser'
16 | end
17 |
18 | def audit_impl(cfn_model)
19 | cfn_model.resources_by_type('AWS::IAM::User').map(&:logical_resource_id)
20 | end
21 | end
22 |
--------------------------------------------------------------------------------
/appsec_cfn_rules/KeyPairAsCFnParameterRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 |
4 |
5 | class KeyPairAsCFnParameterRule < BaseRule
6 | def rule_text
7 | 'Passing a KeyPair to a CloudFormation Template represents a long-term credential thats not rotated'
8 | end
9 |
10 | def rule_type
11 | Violation::FAILING_VIOLATION
12 | end
13 |
14 | def rule_id
15 | 'APPSEC-IAM-UseEphemeralCreds-KeyPairAsCFnParam'
16 | end
17 |
18 | def audit_impl(cfn_model)
19 |
20 | parameters = cfn_model.parameters.select do |name, properties|
21 |
22 | # TODO: find way to preserve the line number from properties.type["line"]
23 | properties.type["value"] == "AWS::EC2::KeyPair::KeyName"
24 | end
25 |
26 | parameters.values.map(&:id)
27 | end
28 | end
--------------------------------------------------------------------------------
/appsec_cfn_rules/ResourcePolicyStarAccessVerbPolicyRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 | require 'cfn-model/parser/policy_document_parser'
4 |
5 |
6 | class ResourcePolicyStarAccessVerbPolicyRule < BaseRule
7 | def rule_text
8 | 'Overly permissive access in resource policy allowing caller to mutate or delete the resource itself'
9 | end
10 |
11 | def rule_type
12 | Violation::FAILING_VIOLATION
13 | end
14 |
15 | def rule_id
16 | 'APPSEC-IAM-LeastPrivilege-ResourcePolicyStarVerb'
17 | end
18 |
19 | def audit_impl(cfn_model)
20 | logical_resource_ids = []
21 | cfn_model.resources.values.each do |resource|
22 |
23 | # If the resource has an IAM resource access policy
24 | unless (resource.accessPolicies.nil?) then
25 | parsed_resource_policy = PolicyDocumentParser.new().parse(resource.accessPolicies)
26 | parsed_resource_policy.statements.each do |statement|
27 |
28 | # If any statement allows access from "*" then the resource is effectively public
29 | if statement.effect == "Allow" then
30 | statement.actions.each do |action|
31 | if action.downcase.end_with?(":*") then
32 | logical_resource_ids << resource.logical_resource_id
33 | end
34 | end
35 | end
36 | end
37 | end
38 | end
39 |
40 | logical_resource_ids
41 | end
42 | end
--------------------------------------------------------------------------------
/appsec_cfn_rules/StarResourceAccessPolicyRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 | require 'cfn-model/parser/policy_document_parser'
4 |
5 |
6 | class StarResourceAccessPolicyRule < BaseRule
7 | def rule_text
8 | 'A resource with an associated IAM resource policy is allowing world access'
9 | end
10 |
11 | def rule_type
12 | Violation::FAILING_VIOLATION
13 | end
14 |
15 | def rule_id
16 | 'APPSEC-IAM-RestrictPublicAccess-StarAccessPolicy'
17 | end
18 |
19 | def audit_impl(cfn_model)
20 | logical_resource_ids = []
21 |
22 | cfn_model.resources.values.each do |resource|
23 |
24 | # If the resource has an IAM resource access policy
25 | unless (resource.accessPolicies.nil?) then
26 | parsed_resource_policy = PolicyDocumentParser.new().parse(resource.accessPolicies)
27 | parsed_resource_policy.statements.each do |statement|
28 |
29 | # If any statement allows access from "*" then the resource is effectively public
30 | if statement.effect == "Allow" then
31 | if statement.principal.has_key?("AWS") and statement.principal.has_value?("*") then
32 | logical_resource_ids << resource.logical_resource_id
33 | end
34 | end
35 | end
36 | end
37 | end
38 |
39 | logical_resource_ids
40 | end
41 | end
--------------------------------------------------------------------------------
/appsec_cfn_rules/beta/FlowLogsEnabledForVPCsRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 |
4 |
5 | class FlowLogsEnabledForVPCsRule < BaseRule
6 | def rule_text
7 | 'FlowLogs should be enabled for created VPCs'
8 | end
9 |
10 | def rule_type
11 | Violation::FAILING_VIOLATION
12 | end
13 |
14 | def rule_id
15 | 'APPSEC-DC-LogEverywhere-VPCFlowLogs'
16 | end
17 |
18 | def audit_impl(cfn_model)
19 | vpc_ids = cfn_model.resources_by_type('AWS::EC2::VPC').map(&:logical_resource_id)
20 | flowlogs = cfn_model.resources_by_type('AWS::EC2::FlowLog')
21 |
22 | # Report violation on all VPC Ids that aren't referred to by FlowLogs
23 | vpc_ids - flowlogs.map(&:ResourceId)
24 | end
25 | end
26 |
--------------------------------------------------------------------------------
/appsec_cfn_rules/beta/PasswordAsCFnParameterRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 |
4 |
5 | class PasswordAsCFnParameterRule < BaseRule
6 | def rule_text
7 | 'Passing a password to a CloudFormation Template represents a long-term credential thats not rotated'
8 | end
9 |
10 | def rule_type
11 | Violation::FAILING_VIOLATION
12 | end
13 |
14 | def rule_id
15 | 'APPSEC-IAM-UseEphemeralCreds-PasswordAsCFnParam'
16 | end
17 |
18 | def audit_impl(cfn_model)
19 |
20 | parameters = cfn_model.parameters.select do |name, properties|
21 |
22 | # TODO: find way to preserve the line number from properties.type["line"]
23 | name.downcase.include?("password") and
24 | properties.type["value"] == "String" and
25 | properties.allowedValues != [true, false] and
26 | properties.allowedValues != ['Yes', 'No']
27 | end
28 |
29 | parameters.values.map(&:id)
30 | end
31 | end
--------------------------------------------------------------------------------
/appsec_cfn_rules/beta/RotationEnabledForSecretsManagerRule.rb:
--------------------------------------------------------------------------------
1 | require 'cfn-nag/custom_rules/base'
2 | require 'cfn-nag/violation'
3 |
4 |
5 | class RotationEnabledForSecretsManagerRule < BaseRule
6 | def rule_text
7 | 'Rotation should be enabled for created Secrets'
8 | end
9 |
10 | def rule_type
11 | Violation::FAILING_VIOLATION
12 | end
13 |
14 | def rule_id
15 | 'APPSEC-IAM-UseEphemeralCreds-RotateSecretsManager'
16 | end
17 |
18 | def audit_impl(cfn_model)
19 | secret_ids = cfn_model.resources_by_type('AWS::SecretsManager::Secret').map(&:logical_resource_id)
20 | rotation_schedules = cfn_model.resources_by_type('AWS::SecretsManager::RotationSchedule')
21 |
22 | # Report violation on all Secret Ids that aren't referred to by RotationSchedule
23 | secret_ids - rotation_schedules.map(&:SecretId)
24 | end
25 | end
26 |
--------------------------------------------------------------------------------
/ash:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | # Resolve the absolute path of the parent of the script directory (ASH repo root)
6 | export ASH_ROOT_DIR="$(cd "$(dirname "$0")"; pwd)"
7 | export ASH_UTILS_DIR="${ASH_ROOT_DIR}/utils"
8 |
9 | # Set local variables
10 | SOURCE_DIR=""
11 | OUTPUT_DIR=""
12 | OUTPUT_DIR_SPECIFIED="NO"
13 | CONTAINER_UID_SPECIFIED="NO"
14 | CONTAINER_GID_SPEICIFED="NO"
15 | OUTPUT_FORMAT="text"
16 | DOCKER_EXTRA_ARGS="${DOCKER_EXTRA_ARGS:-}"
17 | DOCKER_RUN_EXTRA_ARGS=""
18 | ASH_ARGS=""
19 | NO_BUILD="NO"
20 | NO_RUN="NO"
21 | DEBUG="NO"
22 | OFFLINE="NO"
23 | OFFLINE_SEMGREP_RULESETS="p/ci"
24 | TARGET_STAGE="non-root"
25 | # Parse arguments
26 | while (("$#")); do
27 | case $1 in
28 | --source-dir)
29 | shift
30 | SOURCE_DIR="$1"
31 | ;;
32 | --output-dir)
33 | shift
34 | OUTPUT_DIR="$1"
35 | OUTPUT_DIR_SPECIFIED="YES"
36 | ;;
37 | --offline)
38 | OFFLINE="YES"
39 | ;;
40 | --offline-semgrep-rulesets)
41 | shift
42 | OFFLINE_SEMGREP_RULESETS="$1"
43 | OFFLINE="YES"
44 | ;;
45 | --force)
46 | DOCKER_EXTRA_ARGS="${DOCKER_EXTRA_ARGS} --no-cache"
47 | ;;
48 | --quiet | -q)
49 | DOCKER_EXTRA_ARGS="${DOCKER_EXTRA_ARGS} -q"
50 | ASH_ARGS="${ASH_ARGS} --quiet"
51 | ;;
52 | --oci-runner | -o)
53 | shift
54 | OCI_RUNNER="$1"
55 | ;;
56 | --container-uid | -u)
57 | shift
58 | CONTAINER_UID_SPECIFIED="YES"
59 | CONTAINER_UID="$1"
60 | ;;
61 | --container-gid | -u)
62 | shift
63 | CONTAINER_GID_SPECIFIED="YES"
64 | CONTAINER_GID="$1"
65 | ;;
66 | --no-build)
67 | NO_BUILD="YES"
68 | ;;
69 | --no-run)
70 | NO_RUN="YES"
71 | ;;
72 | --debug|-d)
73 | DEBUG="YES"
74 | ;;
75 | --format)
76 | shift
77 | OUTPUT_FORMAT="$1"
78 | ;;
79 | --build-target)
80 | shift
81 | TARGET_STAGE="$1"
82 | ;;
83 | --help | -h)
84 | source "${ASH_ROOT_DIR}/ash-multi" --help
85 | exit 0
86 | ;;
87 | --version | -v)
88 | source "${ASH_ROOT_DIR}/ash-multi" --version
89 | exit 0
90 | ;;
91 | --finch|-f)
92 | # Show colored deprecation warning from entrypoint script and exit 1
93 | source "${ASH_ROOT_DIR}/ash-multi" --finch
94 | exit 1
95 | ;;
96 | *)
97 | ASH_ARGS="${ASH_ARGS} $1"
98 | esac
99 | shift
100 | done
101 |
102 | export ASH_IMAGE_NAME=${ASH_IMAGE_NAME:-"automated-security-helper:${TARGET_STAGE}"}
103 |
104 | # Default to the pwd
105 | if [ "${SOURCE_DIR}" = "" ]; then
106 | SOURCE_DIR="$(pwd)"
107 | fi
108 |
109 | # Resolve the absolute paths
110 | SOURCE_DIR="$(cd "$SOURCE_DIR"; pwd)"
111 | if [[ "${OUTPUT_DIR_SPECIFIED}" == "YES" ]]; then
112 | mkdir -p "${OUTPUT_DIR}"
113 | OUTPUT_DIR="$(cd "$OUTPUT_DIR"; pwd)"
114 | fi
115 |
116 | #
117 | # Gather the UID and GID of the caller
118 | #
119 | HOST_UID=$(id -u)
120 | HOST_GID=$(id -g)
121 |
122 | # Resolve any offline mode flags
123 | if [[ "${OFFLINE}" == "YES" ]]; then
124 | DOCKER_RUN_EXTRA_ARGS="${DOCKER_RUN_EXTRA_ARGS} --network=none"
125 | fi
126 |
127 | # Resolve the OCI_RUNNER
128 | RESOLVED_OCI_RUNNER=${OCI_RUNNER:-$(command -v finch || command -v docker || command -v nerdctl || command -v podman)}
129 |
130 | # If we couldn't resolve an OCI_RUNNER, exit
131 | if [[ "${RESOLVED_OCI_RUNNER}" == "" ]]; then
132 | echo "Unable to resolve an OCI_RUNNER -- exiting"
133 | exit 1
134 | # else, build and run the image
135 | else
136 | if [[ "${DEBUG}" = "YES" ]]; then
137 | set -x
138 | fi
139 | echo "Resolved OCI_RUNNER to: ${RESOLVED_OCI_RUNNER}"
140 |
141 | # Build the image if the --no-build flag is not set
142 | if [ "${NO_BUILD}" = "NO" ]; then
143 | CONTAINER_UID_OPTION=""
144 | CONTAINER_GID_OPTION=""
145 | if [[ ${CONTAINER_UID_SPECIFIED} = "YES" ]]; then
146 | CONTAINER_UID_OPTION="--build-arg UID=${CONTAINER_UID}" # set the UID build-arg if --container-uid is specified
147 | elif [[ "${HOST_UID}" != "" ]]; then
148 | CONTAINER_UID_OPTION="--build-arg UID=${HOST_UID}" # set the UID build-arg to the caller's UID if --container-uid is not specified
149 | fi
150 | if [[ ${CONTAINER_GID_SPECIFIED} = "YES" ]]; then
151 | CONTAINER_GID_OPTION="--build-arg GID=${CONTAINER_GID}" # set the GID build-arg if --container-gid is specified
152 | elif [[ "${HOST_GID}" != "" ]]; then
153 | CONTAINER_GID_OPTION="--build-arg GID=${HOST_GID}" # set the GID build-arg to the caller's GID if --container-uid is not specified
154 | fi
155 | echo "Building image ${ASH_IMAGE_NAME} -- this may take a few minutes during the first build..."
156 | ${RESOLVED_OCI_RUNNER} build \
157 | ${CONTAINER_UID_OPTION} \
158 | ${CONTAINER_GID_OPTION} \
159 | --tag ${ASH_IMAGE_NAME} \
160 | --target ${TARGET_STAGE} \
161 | --file "${ASH_ROOT_DIR}/Dockerfile" \
162 | --build-arg OFFLINE="${OFFLINE}" \
163 | --build-arg OFFLINE_SEMGREP_RULESETS="${OFFLINE_SEMGREP_RULESETS}" \
164 | --build-arg BUILD_DATE="$(date +%s)" \
165 | ${DOCKER_EXTRA_ARGS} \
166 | "${ASH_ROOT_DIR}"
167 | fi
168 |
169 | # Run the image if the --no-run flag is not set
170 | RC=0
171 | if [ "${NO_RUN}" = "NO" ]; then
172 | MOUNT_SOURCE_DIR="--mount type=bind,source=${SOURCE_DIR},destination=/src"
173 | MOUNT_OUTPUT_DIR=""
174 | OUTPUT_DIR_OPTION=""
175 | if [[ ${OUTPUT_DIR_SPECIFIED} = "YES" ]]; then
176 | MOUNT_SOURCE_DIR="${MOUNT_SOURCE_DIR},readonly" # add readonly source mount when --output-dir is specified
177 | MOUNT_OUTPUT_DIR="--mount type=bind,source=${OUTPUT_DIR},destination=/out"
178 | OUTPUT_DIR_OPTION="--output-dir /out"
179 | fi
180 | echo "Running ASH scan using built image..."
181 | eval ${RESOLVED_OCI_RUNNER} run \
182 | --rm \
183 | -e ACTUAL_SOURCE_DIR="${SOURCE_DIR}" \
184 | -e ASH_DEBUG=${DEBUG} \
185 | -e ASH_OUTPUT_FORMAT=${OUTPUT_FORMAT} \
186 | ${MOUNT_SOURCE_DIR} \
187 | ${MOUNT_OUTPUT_DIR} \
188 | ${DOCKER_RUN_EXTRA_ARGS} \
189 | ${ASH_IMAGE_NAME} \
190 | ash \
191 | --source-dir /src \
192 | ${OUTPUT_DIR_OPTION} \
193 | $ASH_ARGS
194 | RC=$?
195 | fi
196 | if [[ "${DEBUG}" = "YES" ]]; then
197 | set +x
198 | fi
199 | exit ${RC}
200 | fi
201 |
--------------------------------------------------------------------------------
/docs/content/assets/images/aws-logo-light.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
39 |
--------------------------------------------------------------------------------
/docs/content/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 | ## Reporting Bugs/Feature Requests
10 |
11 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
12 |
13 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
14 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
15 |
16 | * A reproducible test case or series of steps
17 | * The version of our code being used
18 | * Any modifications you've made relevant to the bug
19 | * Anything unusual about your environment or deployment
20 |
21 | ## Contributing via Pull Requests
22 |
23 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
24 |
25 | 1. You are working against the latest source on the `main` branch.
26 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
27 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
28 |
29 | To send us a pull request, please:
30 |
31 | 1. Fork the repository.
32 | 2. Create a branch in your fork where the branch name is something meaningful. We encourage
33 | the use of `feature/`, `bugfix/`, `hotfix/`, and so on for branch naming.
34 | 3. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code,
35 | it will be hard for us to focus on your change.
36 | 4. Ensure local tests pass.
37 | 5. Commit to your fork using clear commit messages.
38 | 6. Send us a pull request, answering any default questions in the pull request interface.
39 | 7. Pay attention to any automated continuous integration (CI) failures reported in the pull request, and stay involved in the conversation.
40 |
41 | GitHub provides additional documentation on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
42 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
43 |
44 | ## Finding contributions to work on
45 |
46 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
47 |
48 | ## Code of Conduct
49 |
50 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
51 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
52 | with any additional questions or comments.
53 |
54 | ## Security issue notifications
55 |
56 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
57 |
58 | ## Licensing
59 |
60 | See the [LICENSE](https://github.com/awslabs/automated-security-helper/blob/main/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
61 |
62 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
63 |
--------------------------------------------------------------------------------
/docs/content/docs/prerequisites.md:
--------------------------------------------------------------------------------
1 | # Prerequisites
2 |
3 | To start using `ash` please make sure to install and configure the following:
4 |
5 | * Tools installed to run Linux containers, such as [Finch](https://github.com/runfinch/finch), [Rancher Desktop](https://rancherdesktop.io/), [Podman Desktop](https://podman-desktop.io/), or [Docker Desktop](https://docs.docker.com/get-docker/).
6 | * This can be any CLI + container engine combination; there is nothing in ASH that requires a specific container runtime.
7 | * If on Windows, you will also likely need Windows Subsystem for Linux (WSL) installed as a prerequisite for the listed container engine tools. Please see the specific instructions for the tool of choice regarding Windows-specific prerequisites.
8 |
--------------------------------------------------------------------------------
/docs/content/docs/support.md:
--------------------------------------------------------------------------------
1 | # Support Matrix
2 |
3 | ASH itself should support running in any environment that can support running `linux/amd64` container images.
4 |
5 | ## Local Execution
6 |
7 | The table below provides a matrix of **tested** runtime environments for ASH.
8 |
9 | | OCI Container Tool | Host | Full Support | Partial Support | No Support | Untested |
10 | |:-------------------------------------:|:----------------------:|:------------------:|:---------------:|:----------:|:--------:|
11 | | Finch | macOS w/ Intel | :white_check_mark: | | | |
12 | | Finch | macOS w/ Apple Silicon | :white_check_mark: | | | |
13 | | Docker Desktop | macOS w/ Intel | :white_check_mark: | | | |
14 | | Docker Desktop | macOS w/ Apple Silicon | :white_check_mark: | | | |
15 | | Rancher Desktop w/ docker+moby | macOS w/ Intel | :white_check_mark: | | | |
16 | | Rancher Desktop w/ docker+moby | macOS w/ Apple Silicon | :white_check_mark: | | | |
17 | | Rancher Desktop w/ nerdctl+containerd | macOS w/ Intel | :white_check_mark: | | | |
18 | | Rancher Desktop w/ nerdctl+containerd | macOS w/ Apple Silicon | :white_check_mark: | | | |
19 |
20 | ## Continuous Integration
21 |
22 | The table below provides a matrix of **tested** CI execution environment for ASH.
23 |
24 | For more information, please see [Running ASH in CI](../tutorials/running-ash-in-ci.md)
25 |
26 | | CI Platform | Execution Method | Full Support | Partial Support | No Support | Untested |
27 | |:--------------------------------------:|:----------------:|:------------------:|:---------------:|:----------:|:--------:|
28 | | GitLab CI | Container Job | :white_check_mark: | | | |
29 | | GitLab CI | `docker run` | :white_check_mark: | | | |
30 | | GitHub Actions (hosted Ubuntu agents) | Container Job | :white_check_mark: | | | |
31 | | GitHub Actions (hosted Ubuntu agents) | `docker run` | :white_check_mark: | | | |
32 | | Azure Pipelines (hosted Ubuntu agents) | Container Job | :white_check_mark: | | | |
33 | | Azure Pipelines (hosted Ubuntu agents) | `docker run` | :white_check_mark: | | | |
34 | | Jenkins | `docker run` | :white_check_mark: | | | |
35 |
--------------------------------------------------------------------------------
/docs/content/faq.md:
--------------------------------------------------------------------------------
1 | # Frequently Asked Questions
2 |
3 |
4 | - [How can I run `ash` on a Windows machine?](#how-can-i-run-ash-on-a-windows-machine)
5 | - [How can I run `ash` in a CI/CD pipline?](#how-can-i-run-ash-in-a-cicd-pipline)
6 | - [How can I run `ash` with finch or another OCI compatible tool?](#how-can-i-run-ash-with-finch-or-another-oci-compatible-tool)
7 |
8 |
9 | ## How can I run `ash` on a Windows machine?
10 |
11 | 1. Install a Windows Subsystem for Linux (WSL) with an [Ubuntu distribution](https://docs.microsoft.com/en-us/windows/wsl/install). Be sure to use the WSL2.
12 | 2. Install Docker Desktop for windows and activate the [the WSL integration](https://docs.docker.com/desktop/windows/wsl/)
13 | 3. Clone this git repo from a windows terminal via VPN (while in vpn it'll not connect to the repo directly from Ubuntu WSL).
14 | 4. Execute the helper tool from the folder downloaded in the previous step from the Ubuntu WSL.
15 |
16 | ## How can I run `ash` in a CI/CD pipline?
17 |
18 | For CDK Pipeline, please refer to the [ASH Pipeline solution](https://github.com/aws-samples/automated-security-helper-pipeline) available on GitHub.
19 |
20 | For additional CI pipeline support, please refer to the [Running ASH in CI](./tutorials/running-ash-in-ci.md) page on this site.
21 |
22 | ## How can I run `ash` with [finch](https://aws.amazon.com/blogs/opensource/introducing-finch-an-open-source-client-for-container-development/) or another OCI compatible tool?
23 |
24 | You can configure the OCI compatible tool to use with by using the environment variable `OCI_RUNNER`
25 |
26 | ## Can I use a Bandit configuration file when `ash` runs?
27 |
28 | Yes, `ash` will use a bandit configuration file if it is placed at the root of your project directory. It must be named `.bandit`, `bandit.yaml`, or `bandit.toml`. Configuration files must be formatted properly according to the [Bandit documentation](https://bandit.readthedocs.io/en/latest/config.html).
29 |
30 | > Note: paths excluded in a Bandit configuration file must begin with a `/` because `ash` uses an absolute path when calling `bandit`.
--------------------------------------------------------------------------------
/docs/content/index.md:
--------------------------------------------------------------------------------
1 | # ASH
2 |
3 | - [ASH; The *A*utomated *S*ecurity *H*elper](#ash-the-automated-security-helper)
4 | - [Description](#description)
5 | - [Supported frameworks](#supported-frameworks)
6 | - [Prerequisites](#prerequisites)
7 | - [Getting Started](#getting-started)
8 | - [Getting Started - Linux or MacOS](#getting-started---linux-or-macos)
9 | - [Getting Started - Windows](#getting-started---windows)
10 | - [Cloud9 Quickstart Guide](#cloud9-quickstart-guide)
11 | - [Using `ash` with `pre-commit`](#using-ash-with-pre-commit)
12 | - [Examples](#examples)
13 | - [Synopsis](#synopsis)
14 | - [FAQ](#faq)
15 | - [Feedback](#feedback)
16 | - [Contributing](#contributing)
17 | - [Security](#security)
18 | - [License](#license)
19 |
20 | ## ASH; The *A*utomated *S*ecurity *H*elper
21 |
22 | ## Description
23 |
24 | The security helper tool was created to help you reduce the probability of a security violation in a new code, infrastructure or IAM configuration
25 | by providing a fast and easy tool to conduct preliminary security check as early as possible within your development process.
26 |
27 | - It is not a replacement of a human review nor standards enforced by your team/customer.
28 | - It uses light, open source tools to maintain its flexibility and ability to run from anywhere.
29 | - ASH is cloning and running different open-source tools, such as: git-secrets, bandit, Semgrep, Grype, Syft, nbconvert, npm-audit, checkov, cdk-nag and cfn-nag. Please review the tools [LICENSE](license) before usage.
30 |
31 | ## Supported frameworks
32 |
33 | The security helper supports the following vectors:
34 |
35 | * Code
36 | * Git
37 | * **[git-secrets](https://github.com/awslabs/git-secrets)** - Find api keys, passwords, AWS keys in the code
38 | * Python
39 | * **[bandit](https://github.com/PyCQA/bandit)** - finds common security issues in Python code.
40 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in Python code.
41 | * **[Grype](https://github.com/anchore/grype)** - finds vulnerabilities scanner for Python code.
42 | * **[Syft](https://github.com/anchore/syft)** - generating a Software Bill of Materials (SBOM) for Python code.
43 | * Jupyter Notebook
44 | * **[nbconvert](https://nbconvert.readthedocs.io/en/latest/)** - converts Jupyter Notebook (ipynb) files into Python executables. Code scan with Bandit.
45 | * JavaScript; NodeJS
46 | * **[npm-audit](https://docs.npmjs.com/cli/v8/commands/npm-audit)** - checks for vulnerabilities in Javascript and NodeJS.
47 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in JavaScript code.
48 | * **[Grype](https://github.com/anchore/grype)** - finds vulnerabilities scanner for Javascript and NodeJS.
49 | * **[Syft](https://github.com/anchore/syft)** - generating a Software Bill of Materials (SBOM) for Javascript and NodeJS.
50 | * Go
51 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in Golang code.
52 | * **[Grype](https://github.com/anchore/grype)** - finds vulnerabilities scanner for Golang.
53 | * **[Syft](https://github.com/anchore/syft)** - generating a Software Bill of Materials (SBOM) for Golang.
54 | * C#
55 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in C# code.
56 | * Bash
57 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in Bash code.
58 | * Java
59 | * **[Semgrep](https://github.com/returntocorp/semgrep)** - finds common security issues in Java code.
60 | * **[Grype](https://github.com/anchore/grype)** - finds vulnerabilities scanner for Java.
61 | * **[Syft](https://github.com/anchore/syft)** - generating a Software Bill of Materials (SBOM) for Java.
62 | * Infrastructure
63 | * Terraform; Cloudformation
64 | * **[checkov](https://github.com/bridgecrewio/checkov)**
65 | * **[cfn_nag](https://github.com/stelligent/cfn_nag)**
66 | * **[cdk-nag](https://github.com/cdklabs/cdk-nag)** (via import of rendered CloudFormation templates into a custom CDK project with the [AWS Solutions NagPack](https://github.com/cdklabs/cdk-nag/blob/main/RULES.md#aws-solutions) enabled)
67 | * Dockerfile
68 | * **[checkov](https://github.com/bridgecrewio/checkov)**
69 |
70 | ## Prerequisites
71 |
72 | To start using `ash` please make sure to install and configure the following:
73 |
74 | - Tools installed to run Linux containers, such as [Finch](https://github.com/runfinch/finch), [Rancher Desktop](https://rancherdesktop.io/), [Podman Desktop](https://podman-desktop.io/), or [Docker Desktop](https://docs.docker.com/get-docker/).
75 | - This can be any command-line interface (CLI) + container engine combination; there is nothing in ASH that requires a specific container runtime.
76 | - If on Windows, you will also likely need Windows Subsystem for Linux (WSL) installed as a prerequisite for the listed container engine tools. Please see the specific instructions for the tool of choice regarding Windows-specific prerequisites.
77 |
78 | ## Getting Started
79 |
80 | ### Getting Started - Linux or MacOS
81 |
82 | Clone the git repository into a folder. For example:
83 |
84 | ```bash
85 | # Set up some variables
86 | REPO_DIR="${HOME}"/Documents/repos/reference
87 | REPO_NAME=automated-security-helper
88 |
89 | # Create a folder to hold reference git repositories
90 | mkdir -p ${REPO_DIR}
91 |
92 | # Clone the repository into the reference area
93 | git clone https://github.com/awslabs/automated-security-helper "${REPO_DIR}/${REPO_NAME}"
94 |
95 | # Set the repo path in your shell for easier access
96 | #
97 | # Add this (and the variable settings above) to
98 | # your ~/.bashrc, ~/.bash_profile, ~/.zshrc, or similar
99 | # start-up scripts so that the ash tool is in your PATH
100 | # after re-starting or starting a new shell.
101 | #
102 | export PATH="${PATH}:${REPO_DIR}/${REPO_NAME}"
103 |
104 | # Execute the ash tool
105 | ash --version
106 | ```
107 |
108 | ### Getting Started - Windows
109 |
110 | **ASH** uses containers, `bash` shell scripts, and multiple background processes running in parallel to run the multiple
111 | source code security scanning tools that it uses. Because of this, running `ash` from either a `PowerShell` or `cmd`
112 | shell on Windows is not possible. Furthermore, due to reliance on running containers, usually with Docker Desktop
113 | when running on Windows, there is an implicit dependency on having installed, configured, and operational a
114 | Windows Subsystem for Linux (WSL) 2 environment on the Windows machine where `ash` will be run.
115 |
116 | To use `ash` on Windows:
117 |
118 | - Install, configure, and test the [WSL 2 environment on Windows](https://learn.microsoft.com/en-us/windows/wsl/install)
119 | - Install, configure, and test [Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/), using the WSL 2 environment
120 | - Use the [Windows Terminal](https://learn.microsoft.com/en-us/windows/terminal/install) program and open a command-line window to interact with the WSL 2 environment
121 | - Install and/or update the `git` client in the WSL 2 environment. This should be pre-installed, but you may need to update the version
122 | using the `apt-get update` command.
123 |
124 | Once the WSL2 command-line window is open, follow the steps above in [Getting Started - Linux or MacOS](#getting-started---linux-or-macos)
125 | to install and run `ash` in WSL 2 on the Windows machine.
126 |
127 | To run `ash`, open a Windows Terminal shell into the WSL 2 environment and use that command-line shell to run the `ash` command.
128 |
129 | **Note**: when working this way, be sure to `git clone` any git repositories to be scanned into the WSL 2 filesystem.
130 | Results are un-predictable if repositories or file sub-trees in the Windows filesystem are scanned using `ash`
131 | that is running in the WSL 2 environment.
132 |
133 | **Tip**: If you are using Microsoft VSCode for development, it is possible to configure a "remote" connection
134 | [using VSCode into the WSL2 environment](https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-vscode).
135 | By doing this, you can host your git repositories in WSL 2 and still
136 | work with them as you have in the past when they were in the Windows filesystem of your Windows machine.
137 |
138 | ### Cloud9 Quickstart Guide
139 |
140 | Follow the instruction in the [quickstart page](/quickstart/README.md) to deploy an AWS Cloud9 Environment with ASH pre-installed.
141 |
142 | ## Using `ash` with `pre-commit`
143 |
144 | The `ash` tool can be used interactively on a workstation or run using the [`pre-commit`](https://pre-commit.com/) command.
145 | If `pre-commit` is used to run `ash`, then the `pre-commit` processing takes care of installing
146 | a copy of the `ash` git repository and setting up to run the `ash` program from that installed
147 | repository. Using `pre-commit` still requires usage of WSL 2 when running on Windows.
148 |
149 | Using `ash` as a [`pre-commit`](https://pre-commit.com/) hook enables development teams to use the `ash` tool
150 | in two ways. First, developers can use `ash` as a part of their local development process on whatever
151 | development workstation or environment they are using. Second, `ash` can be run in a build automation stage
152 | by running `pre-commit run --hook-stage manual ash` in build automation stage.
153 | When using `pre-commit`, run the `pre-commit` commands while in a folder/directory within the git repository that is
154 | configured with `pre-commit` hooks.
155 |
156 | Refer to the [pre-commit-hooks](./.pre-commit-hooks.yaml) file for information about the `pre-commit`
157 | hook itself.
158 |
159 | To configure a git repository to use the `ash` hook, start with the following `pre-commit-config` configuration:
160 |
161 | ```yaml
162 | - repo: git@github.com:awslabs/automated-security-helper.git
163 | rev: '1.1.0-e-01Dec2023' # update with the latest tagged version in the repository
164 | hooks:
165 | - id: ash
166 | name: scan files using ash
167 | stages: [ manual ]
168 | # uncomment the line below if using "finch" on MacOS
169 | # args: [ "-f" ]
170 | ```
171 |
172 | Once the `.pre-commit-hooks.yaml` file is updated, the `ash` tool can be run using the following command:
173 |
174 | ```bash
175 | pre-commit run --hook-stage manual ash
176 | ```
177 |
178 | Results from the run of the `ash` tool can be found in the `aggregated_results.txt` file
179 | the `--output-dir` folder/directory.
180 |
181 | When ASH converts CloudFormation files into CDK and runs cdk-nag on them,
182 | the output of the cdk-nag check results are preserved in a 'ash_cf2cdk_output'
183 | folder/directory under `--output-dir` after the ASH scan is run. This folder/directory is
184 | in addition to the `aggregated_results.txt` file found in `--output-dir`.
185 |
186 | ## Examples
187 |
188 | ```bash
189 | # Getting help
190 | ash -h
191 |
192 | # Scan a directory
193 | ash --source-dir /my/remote/files
194 |
195 | # Save the final report to a different directory
196 | ash --output-dir /my/remote/files
197 |
198 | # Force rebuild the entire framework to obtain latests changes and up-to-date database
199 | ash --force
200 |
201 | # Force run scan for Python code
202 | ash --source-dir . --ext py
203 |
204 | * All commands can be used together.
205 | ```
206 |
207 | ## Synopsis
208 |
209 | ```text
210 | NAME:
211 | ash
212 | SYNOPSIS:
213 | ash [OPTIONS] --source-dir /path/to/dir --output-dir /path/to/dir
214 | OPTIONS:
215 | -v | --version Prints version number.
216 |
217 | -p | --preserve-report Add timestamp to the final report file to avoid overwriting it after multiple executions.
218 | --source-dir Path to the directory containing the code/files you wish to scan. Defaults to $(pwd)
219 | --output-dir Path to the directory that will contain the report of the scans. Defaults to $(pwd)
220 | --ext | -extension Force a file extension to scan. Defaults to identify files automatically.
221 | --offline Build ASH for offline execution. Defaults to false.
222 | --offline-semgrep-rulesets Specify Semgrep rulesets for use in ASH offline mode. Defaults to 'p/ci'.
223 | --force Rebuild the Docker images of the scanning tools, to make sure software is up-to-date.
224 | --no-cleanup Don't cleanup the work directory where temp reports are stored during scans.
225 | --debug Print ASH debug log information where applicable.
226 | -q | --quiet Don't print verbose text about the build process.
227 | -c | --no-color Don't print colorized output.
228 | -s | --single-process Run ash scanners serially rather than as separate, parallel sub-processes.
229 | -o | --oci-runner Use the specified OCI runner instead of docker to run the containerized tools.
230 | ```
231 |
232 | ## FAQ
233 |
234 | - Q: How to run `ash` on a Windows machine
235 |
236 | A: ASH on a windows machine
237 |
238 | - Install a Windows Subsystem for Linux (WSL) 2 environment with a [Ubuntu distribution](https://docs.microsoft.com/en-us/windows/wsl/install). Be sure to use the WSL 2.
239 | - Install Docker Desktop for windows and activate the [integration the WSL 2](https://docs.docker.com/desktop/windows/wsl/)
240 | - Clone this git repo from a windows terminal via VPN (while in vpn it'll not connect to the repo directly from Ubuntu WSL 2).
241 | - Execute the helper tool from the folder downloaded in the previous step from the Ubuntu WSL.
242 |
243 | - Q: How to run `ash` in a Continuous Integration/Continuous Deployment (CI/CD) pipline?
244 |
245 | A: Check the [ASH Pipeline solution](https://github.com/aws-samples/automated-security-helper-pipeline)
246 |
247 | - Q: How to run `ash` with [finch](https://aws.amazon.com/blogs/opensource/introducing-finch-an-open-source-client-for-container-development/)
248 | or another Open Container Initiative (OCI) compatible tool.
249 |
250 | A: You can configure the OCI compatible tool to use with by using the environment variable `ASH_OCI_RUNNER`
251 |
252 | - Q: How to exclude files from scanning.
253 |
254 | A: `ash` will scan all the files in the folder specified in `--source-dir`, or the current directory if invoked without parameters. If the folder is a git repository,
255 | then `ash` will use the exclusions in your `.gitignore` configuration file. If you want to exclude any specific folder, it **must** be added to your git ignore list before invoking `ash`.
256 |
257 | - Q: `ash` reports there are not files to scan or you see a message stating `warning: You appear to have cloned an empty repository.`
258 |
259 | A: Ensure you're running ASH inside the folder you intend to scan or using the `--source-dir` parameter. If the folder where the files reside is part of a git repository, ensure the files are added (committed) before running ASH.
260 |
261 | - Q: How to run `ash` in an environment without internet connectivity/with an airgap?
262 |
263 | A: From your environment which does have internet connectivity, build the ASH image using `--offline` and `--offline-semgrep-rulesets` to specify what resources to package into the image. Environment variable `$ASH_IMAGE_NAME` controls the name of the image. After building, push to your container repository of choice which will be available within the airgapped environment. When you go to execute ASH in your offline environment, passing `--no-build` to `ash` alongside `--offline` and `--offline-semgrep-rulesets` will use your offline image and skip the build. Specify `$ASH_IMAGE_NAME` to override ASH's container image to the previously-built image available within your airgapped environment.
264 |
265 | ## Feedback
266 |
267 | Create an issue [here](https://github.com/awslabs/automated-security-helper/issues).
268 |
269 | ## Contributing
270 |
271 | See [CONTRIBUTING](contributing.md#contributing-guidelines) for information on how to contribute to this project.
272 |
273 | ## Security
274 |
275 | See [CONTRIBUTING](contributing.md#security-issue-notifications) for more information.
276 |
277 | ## License
278 |
279 | This library is licensed under the Apache 2.0 License. See the LICENSE file.
280 |
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/ASH Execution Environment Viability.drawio:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/ASH Execution Environment Viability.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/awslabs/automated-security-helper/5cd74321abbeda35204423fd0b57c9239a63768e/docs/content/tutorials/CI/ASH Execution Environment Viability.png
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/AzurePipelines/azure-pipelines.yml:
--------------------------------------------------------------------------------
1 | #
2 | # This example uses the Container Jobs mechanism in Azure Pipelines to specify a
3 | # container image to run the steps of the job within, compared to needing to explicitly
4 | # run the container image and mount the source and output directories.
5 | #
6 | # This method allows your script step to call `ash` directly, as it is provided in the
7 | # PATH of the ASH container image. `ash` will also handle creating the output-dir for
8 | # you to make things easy, but you can always explicitly call `mkdir -p ash_output`
9 | # from the script before calling `ash` as well, if preferred.
10 | #
11 | # This example, like the others included in this repository, uses an example image
12 | # tag for the ASH container. This is intended to be replaced with a valid repository and
13 | # image tag for the ASH container, whether publicly available or one from a private
14 | # registry hosted within your internal environment.
15 | #
16 | # More info on Container Jobs in Azure Pipelines:
17 | # https://learn.microsoft.com/en-us/azure/devops/pipelines/process/container-phases
18 | #
19 | trigger:
20 | branches:
21 | include: [ '*' ]
22 |
23 | variables:
24 | ASH_IMAGE_REPO: myregistry.example.com/automated-security-helper
25 | ASH_IMAGE_TAG: latest
26 | ASH_OUTPUT_PATH: ash_output
27 |
28 | pool:
29 | vmImage: ubuntu-latest
30 |
31 | jobs:
32 | - job: scanInContainerJob
33 | container: ${{ variables.ASH_IMAGE_REPO }}:${{ variables.ASH_IMAGE_TAG }}
34 | displayName: Run ASH in Container Job
35 | steps:
36 | - checkout: self
37 | - script: |
38 | ash \
39 | --source-dir "$(pwd)" \
40 | --output-dir "${{ variables.ASH_OUTPUT_PATH }}"
41 | name: runash
42 | displayName: Run ASH scan
43 | - publish: ${{ variables.ASH_OUTPUT_PATH }}
44 | artifact: ${{ variables.ASH_OUTPUT_PATH }}
45 | condition: succeededOrFailed()
46 |
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/GitHubActions/.github/workflows/run-ash-scan.yml:
--------------------------------------------------------------------------------
1 | #
2 | # This example clones the ASH repo and the repo to be scanned, then runs `ash`
3 | # against the repo.
4 | #
5 | # This will build the ASH container image, run the scan using the built image,
6 | # and publish the scan results as a build artifact.
7 | #
8 | name: ASH SAST Scan
9 |
10 | on:
11 | push:
12 | branches: [ '**' ]
13 |
14 | env:
15 | ASH_OUTPUT_PATH: ash_output
16 |
17 | jobs:
18 | containerjob:
19 | name: Run ASH Scan
20 | runs-on: ubuntu-latest
21 | steps:
22 | - name: Checkout ASH
23 | uses: actions/checkout@v4
24 | with:
25 | path: ./automated-security-helper
26 | repository: awslabs/automated-security-helper
27 | ref: v1.3.3
28 | - name: Checkout app repo
29 | uses: actions/checkout@v4
30 | with:
31 | path: ./repo
32 | - name: Run ASH scan against repo
33 | run: |
34 | export PATH="$(pwd)/automated-security-helper:$PATH"
35 |
36 | ash \
37 | --source-dir "$(pwd)/repo" \
38 | --output-dir "${{ env.ASH_OUTPUT_PATH }}"
39 |
40 | - name: Publish ${{ env.ASH_OUTPUT_PATH }}
41 | uses: actions/upload-artifact@v3
42 | if: success() || failure()
43 | with:
44 | name: ${{ env.ASH_OUTPUT_PATH }}
45 | path: ${{ env.ASH_OUTPUT_PATH }}
46 |
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/GitLabCI/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | #
2 | # This example clones the ASH repo and the repo to be scanned, then runs `ash`
3 | # against the repo.
4 | #
5 | # This will build the ASH container image, run the scan using the built image,
6 | # and publish the scan results as a build artifact.
7 | #
8 | variables:
9 | ASH_VERSION:
10 | description: The version tag of the awslabs/automated-security-helper repo to clone
11 | value: v1.3.3
12 |
13 | Run ASH Scan:
14 | stage: .pre
15 | when: always
16 | script:
17 | # Clone the ASH repository from GitHub at the desired version
18 | - |-
19 | git clone https://github.com/awslabs/automated-security-helper.git \
20 | ../automated-security-helper \
21 | --branch "$ASH_VERSION"
22 | # Add the ASH repo path to $PATH so `ash` is available in PATH
23 | - export PATH="$(pwd)/../automated-security-helper:$PATH"
24 | # Run `ash`.
25 | # This will build the ASH container first, then run the scan once built.
26 | - |-
27 | ash \
28 | --source-dir "$(pwd)" \
29 | --output-dir "$(pwd)/ash_output"
30 | artifacts:
31 | paths:
32 | - ash_output
33 | when: always
34 |
--------------------------------------------------------------------------------
/docs/content/tutorials/CI/Jenkins/Jenkinsfile:
--------------------------------------------------------------------------------
1 | /*
2 | #
3 | # This example clones the ASH repo and the repo to be scanned, then runs `ash`
4 | # against the repo.
5 | #
6 | # This will build the ASH container image, run the scan using the built image,
7 | # and publish the scan results as a build artifact.
8 | #
9 | */
10 | node {
11 | def ASH_VERSION = 'v1.3.3'
12 |
13 | git(
14 | branch: ASH_VERSION,
15 | url: 'https://github.com/awslabs/automated-security-helper.git',
16 | )
17 |
18 | sh(
19 | script: """
20 | # Clone the ASH repository from GitHub at the desired version
21 | git clone https://github.com/awslabs/automated-security-helper.git \
22 | ../automated-security-helper \
23 | --branch "${ASH_VERSION}"
24 |
25 | # Run the ASH container image
26 | export PATH="$(pwd)/../automated-security-helper:$PATH"
27 | ash \
28 | --source-dir $(pwd) \
29 | --output-dir $(pwd)/${ASH_OUTPUT_PATH}
30 | """
31 | )
32 |
33 | archiveArtifacts(
34 | artifacts: "${ASH_OUTPUT_PATH}/**"
35 | )
36 | }
37 |
--------------------------------------------------------------------------------
/docs/content/tutorials/cloud9-quickstart.md:
--------------------------------------------------------------------------------
1 | # Cloud9 Quickstart Guide
2 |
3 | Follow the instruction in the [quickstart page](/quickstart/README.md) to deploy an AWS Cloud9 Environment with ASH pre-installed.
4 |
5 | ## Using `ash` with `pre-commit`
6 |
7 | The `ash` tool can be used interactively on a workstation or run using the [`pre-commit`](https://pre-commit.com/) command.
8 | If `pre-commit` is used to run `ash`, then the `pre-commit` processing takes care of installing
9 | a copy of the `ash` git repository and setting up to run the `ash` program from that installed
10 | repository. Using `pre-commit` still requires usage of WSL 2 when running on Windows.
11 |
12 | Using `ash` as a [`pre-commit`](https://pre-commit.com/) hook enables development teams to use the `ash` tool
13 | in two ways. First, developers can use `ash` as a part of their local development process on whatever
14 | development workstation or environment they are using. Second, `ash` can be run in a build automation stage
15 | by running `pre-commit run --hook-stage manual ash` in build automation stage.
16 | When using `pre-commit`, run the `pre-commit` commands while in a folder/directory within the git repository that is
17 | configured with `pre-commit` hooks.
18 |
19 | Refer to the [pre-commit-hooks](https://github.com/awslabs/automated-security-helper/blob/main/.pre-commit-hooks.yaml) file for information about the `pre-commit`
20 | hook itself.
21 |
22 | To configure a git repository to use the `ash` hook, start with the following `pre-commit-config` configuration:
23 |
24 | ```yaml
25 | - repo: https://github.com/awslabs/automated-security-helper.git
26 | rev: 'v1.3.3' # update with the latest tagged version in the repository
27 | hooks:
28 | - id: ash
29 | name: scan files using ash
30 | stages: [ manual ]
31 | # uncomment the line below if using "finch" on MacOS
32 | # args: [ "-f" ]
33 | ```
34 |
35 | Once the `.pre-commit-config.yaml` file is updated, the `ash` tool can be run using the following command:
36 |
37 | ```bash
38 | pre-commit run --hook-stage manual ash
39 | ```
40 |
41 | Results from the run of the `ash` tool can be found in the `aggregated_results.txt` file
42 | the `--output-dir` folder/directory.
43 |
44 | When ASH converts CloudFormation files into CDK and runs cdk-nag on them,
45 | the output of the cdk-nag check results are preserved in a 'ash_cf2cdk_output'
46 | folder/directory under `--output-dir` after the ASH scan is run.
47 |
48 | This folder/directory is in addition to the `aggregated_results.txt` file found in `--output-dir`.
49 |
--------------------------------------------------------------------------------
/docs/content/tutorials/running-ash-in-ci.md:
--------------------------------------------------------------------------------
1 | ## Continuous Integration (CI) Execution
2 |
3 | ASH supports running in CI environments as an executable container (e.g. via `docker run`) as well as via Container Job mechanisms, depending on CI platform support.
4 |
5 | ### Building ASH Container Images for CI Usage
6 |
7 | Building ASH images for use in CI platforms (or other orchestration platforms that may require elevated access within the container) requires targeting the `ci` stage of the `Dockerfile`. This can be done via one of the following methods from the root of the ASH repository:
8 |
9 | _via `ash` CLI_
10 |
11 | ```sh
12 | ash --no-run --build-target ci
13 | ```
14 |
15 | _via `docker` or other OCI CLI_
16 |
17 | ```sh
18 | docker build --tag automated-security-helper:ci --target ci .
19 | ```
20 |
21 | ### Examples
22 |
23 | Within the CI folder, there are multiple examples of running ASH scans in various CI platforms. All examples include the following:
24 |
25 | * ASH repository is cloned from GitHub alongside the repository to be scanned.
26 | * ASH repository directory is added to `$PATH` so that `ash` is available to call directly.
27 | * `ash` is called to invoke the scan, which performs the following steps:
28 | 1. Creates the `ash_output` directory if it does not already exist
29 | 2. Builds the ASH container image
30 | 3. Runs the ASH scan using the built container image
31 | 4. Generates the results in the `ash_output` directory
32 | * Once `ash` is complete, uploads `ash_output` directory as a build artifact.
33 |
34 | These examples are meant to show simple implementations that will enable quick integration of ASH
35 | into an application or infrastructure CI pipeline.
36 |
37 | ---
38 |
39 | Current examples provided by subfolder name:
40 |
41 |
43 | * GitHub Actions (`.github/workflows/run-ash.yml`)
44 | * Job `containerjob`: Example shows how to run ASH with the ASH image itself used for the job execution. This aligns with the `ContainerJob` approach from Azure Pipelines and presents the `ash` script as a callable in PATH.
45 | * Job `dockerrun`: Example shows how to run an ASH scan using generic `docker run` invocation (seen below)
46 | * GitLab CI (`.gitlab-ci.yml`)
47 | * Example file shows how to use the ASH image as the runner image in a GitLab CI job
48 |
50 |
51 | ### ASH Execution Environment Viability
52 |
53 | If you are unsure whether ASH will run in your CI environment or not, the primary requirement is the ability to run Linux containers. This is typically true for most CI platforms, but self-hosted CI agents and enterprise security rules may restrict that ability. If you are unsure whether the CI platform you are using will support it, you can walk through the following flowchart for guidance:
54 |
55 | 
56 |
--------------------------------------------------------------------------------
/docs/content/tutorials/running-ash-locally.md:
--------------------------------------------------------------------------------
1 | # Running ASH Locally
2 |
3 | Please see the [Prerequisites](../docs/prerequisites.md) page to ensure your local workspace is configured as needed before continuing.
4 |
5 | At a high-level, you need the ability to run `linux/amd64` containers in order to use ASH.
6 |
7 | ## Linux or MacOS
8 |
9 | Clone the git repository into a folder. For example:
10 |
11 | ``` sh
12 | # Set up some variables
13 | REPO_DIR="${HOME}"/Documents/repos/reference
14 | REPO_NAME=automated-security-helper
15 |
16 | # Create a folder to hold reference git repositories
17 | mkdir -p ${REPO_DIR}
18 |
19 | # Clone the repository into the reference area
20 | git clone https://github.com/awslabs/automated-security-helper.git "${REPO_DIR}/${REPO_NAME}"
21 |
22 | # Set the repo path in your shell for easier access
23 | #
24 | # Add this (and the variable settings above) to
25 | # your ~/.bashrc, ~/.bash_profile, ~/.zshrc, or similar
26 | # start-up scripts so that the ash tool is in your PATH
27 | # after re-starting or starting a new shell.
28 | #
29 | export PATH="${PATH}:${REPO_DIR}/${REPO_NAME}"
30 |
31 | # Execute the ash tool
32 | ash --version
33 | ```
34 |
35 | ## Windows
36 |
37 | **ASH** uses containers, `bash` shell scripts, and multiple background processes running in parallel to run the multiple
38 | source code security scanning tools that it uses. Because of this, running `ash` from either a `PowerShell` or `cmd`
39 | shell on Windows is not possible. Furthermore, due to reliance on running containers, usually with Docker Desktop
40 | when running on Windows, there is an implicit dependency on having installed, configured, and operational a WSL2
41 | (Windows System for Linux) environment on the Windows machine where `ash` will be run.
42 |
43 | To use `ash` on Windows:
44 |
45 | * Install, configure, and test the [WSL 2 environment on Windows](https://learn.microsoft.com/en-us/windows/wsl/install)
46 | * Install, configure, and test [Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/), using the WSL 2 environment
47 | * Use the [Windows Terminal](https://learn.microsoft.com/en-us/windows/terminal/install) program and open a command-line window to interact with the WSL 2 environment
48 | * Install and/or update the `git` client in the WSL 2 environment. This should be pre-installed, but you may need to update the version
49 | using the `apt-get update` command.
50 |
51 | Once the WSL2 command-line window is open, follow the steps above in [Getting Started - Linux or MacOS](#getting-started---linux-or-macos)
52 | to install and run `ash` in WSL2 on the Windows machine.
53 |
54 | To run `ash`, open a Windows Terminal shell into the WSL 2 environment and use that command-line shell to run the `ash` command.
55 |
56 | **Note**: when working this way, be sure to `git clone` any git repositories to be scanned into the WSL2 filesystem.
57 | Results are un-predictable if repositories or file sub-trees in the Windows filesystem are scanned using `ash`
58 | that is running in the WSL2 environment.
59 |
60 | **Tip**: If you are using Microsoft VSCode for development, it is possible to configure a "remote" connection
61 | [using VSCode into the WSL2 environment](https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-vscode).
62 | By doing this, you can host your git repositories in WSL2 and still
63 | work with them as you have in the past when they were in the Windows filesystem of your Windows machine.
64 |
--------------------------------------------------------------------------------
/docs/overrides/.icons/aws-logo-light.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
39 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-cdk:
--------------------------------------------------------------------------------
1 | # Get Ubuntu Image
2 | FROM public.ecr.aws/docker/library/node:18.0.0
3 | ENV TZ=Europe/London
4 | RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
5 |
6 | # Instal prerequisites
7 | RUN apt-get update && \
8 | apt-get upgrade -y
9 |
10 | #
11 | # Make sure the default dirs are initialized
12 | #
13 | RUN mkdir -p /src && \
14 | mkdir -p /out && \
15 | mkdir -p /run/scan/src \
16 | mkdir -p /ash
17 |
18 | WORKDIR /src
19 |
20 | CMD bash -C /utils/cdk-docker-execute.sh
21 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-git:
--------------------------------------------------------------------------------
1 | # Get Ubuntu Image
2 | FROM --platform=linux/amd64 public.ecr.aws/bitnami/python:3.11
3 |
4 | # Install prerequisites
5 | RUN apt-get update && \
6 | apt-get install -y git tree && \
7 | apt-get upgrade -y
8 |
9 | # Clone git-secrets directory
10 | RUN git clone https://github.com/awslabs/git-secrets.git && \
11 | cd git-secrets && \
12 | make install
13 |
14 | #
15 | # Make sure the default dirs are initialized
16 | #
17 | RUN mkdir -p /src && \
18 | mkdir -p /out && \
19 | mkdir -p /run/scan/src \
20 | mkdir -p /ash
21 |
22 | WORKDIR /src
23 | VOLUME /src
24 |
25 | CMD bash -C /utils/git-docker-execute.sh
26 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-grype:
--------------------------------------------------------------------------------
1 | # Get Python Image
2 | FROM --platform=linux/amd64 public.ecr.aws/bitnami/python:3.11
3 | SHELL ["bash", "-c"]
4 | ARG OFFLINE="NO"
5 | ARG OFFLINE_SEMGREP_RULESETS="p/ci"
6 |
7 | ENV HOME="/root"
8 | ENV OFFLINE="${OFFLINE}"
9 | ENV GRYPE_DB_CACHE_DIR="${HOME}/.grype"
10 | ENV SEMGREP_RULES_CACHE_DIR="${HOME}/.semgrep"
11 |
12 | # Instal prerequisites
13 | RUN curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin && \
14 | curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin && \
15 | python3 -m pip install semgrep
16 |
17 | RUN if [[ "$OFFLINE" == "YES" ]]; then \
18 | grype db update && \
19 | mkdir -p ${SEMGREP_RULES_CACHE_DIR} && \
20 | for i in $OFFLINE_SEMGREP_RULESETS; do curl "https://semgrep.dev/c/${i}" -o "${SEMGREP_RULES_CACHE_DIR}/$(basename "${i}").yml"; done \
21 | fi
22 |
23 |
24 | #
25 | # Make sure the default dirs are initialized
26 | #
27 | RUN mkdir -p /src && \
28 | mkdir -p /out && \
29 | mkdir -p /run/scan/src \
30 | mkdir -p /ash
31 |
32 | WORKDIR /src
33 | VOLUME /src
34 |
35 | CMD bash -C /utils/grype-docker-execute.sh
36 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-js:
--------------------------------------------------------------------------------
1 | # Get NPM Image
2 | FROM public.ecr.aws/docker/library/node:18.0.0
3 | ARG OFFLINE="NO"
4 | ENV BUILD_DATE_EPOCH="${BUILD_DATE_EPOCH}"
5 |
6 | ENV BUILD_DATE_EPOCH="${BUILD_DATE_EPOCH}"
7 | ENV OFFLINE="${OFFLINE}"
8 | #
9 | # Make sure the default dirs are initialized
10 | #
11 | RUN mkdir -p /src && \
12 | mkdir -p /out && \
13 | mkdir -p /run/scan/src \
14 | mkdir -p /ash
15 |
16 | WORKDIR /src
17 | VOLUME /src
18 |
19 | CMD bash -C /utils/js-docker-execute.sh
20 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-py:
--------------------------------------------------------------------------------
1 | # Get Python Image
2 | FROM --platform=linux/amd64 public.ecr.aws/bitnami/python:3.11
3 |
4 | # Instal prerequisites
5 | RUN pip install --no-cache-dir --upgrade pip && \
6 | pip install --no-cache-dir bandit nbconvert jupyterlab
7 |
8 | #
9 | # Make sure the default dirs are initialized
10 | #
11 | RUN mkdir -p /src && \
12 | mkdir -p /out && \
13 | mkdir -p /run/scan/src \
14 | mkdir -p /ash
15 |
16 | WORKDIR /src
17 | VOLUME /src
18 |
19 | CMD bash -C /utils/py-docker-execute.sh
20 |
--------------------------------------------------------------------------------
/helper_dockerfiles/Dockerfile-yaml:
--------------------------------------------------------------------------------
1 | # Get Ubuntu Image
2 | FROM --platform=linux/amd64 public.ecr.aws/bitnami/python:3.11
3 | ARG OFFLINE="NO"
4 | ENV BUILD_DATE_EPOCH="${BUILD_DATE_EPOCH}"
5 |
6 | ENV BUILD_DATE_EPOCH="${BUILD_DATE_EPOCH}"
7 | ENV OFFLINE="${OFFLINE}"
8 |
9 | ENV TZ=Europe/London
10 | RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
11 |
12 | # Instal prerequisites
13 | RUN apt-get update && \
14 | apt-get upgrade -y && \
15 | apt-get install -y ruby-full && \
16 | rm -rf /var/lib/apt/lists/*
17 |
18 | RUN pip3 install -U checkov && gem install cfn-nag
19 |
20 | #
21 | # Make sure the default dirs are initialized
22 | #
23 | RUN mkdir -p /src && \
24 | mkdir -p /out && \
25 | mkdir -p /run/scan/src \
26 | mkdir -p /ash
27 |
28 | WORKDIR /src
29 |
30 | CMD bash -C /utils/yaml-docker-execute.sh
31 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: ASH - Automated Security Helper
2 | site_description: 'Your one-stop shop for code security scanning'
3 | site_url: 'https://awslabs.github.io/automated-security-helper'
4 | repo_name: 'awslabs/automated-security-helper'
5 | repo_url: 'https://github.com/awslabs/automated-security-helper'
6 | edit_uri: 'edit/main/site/content'
7 | copyright: '© 2024, Amazon Web Services, Inc. or its affiliates. All rights reserved.'
8 | docs_dir: 'docs/content'
9 | site_dir: 'public'
10 | # use_directory_urls: true
11 |
12 | nav:
13 | - Overview: index.md
14 | - Documentation:
15 | - Getting started:
16 | - Prerequisites: docs/prerequisites.md
17 | - Support Matrix: docs/support.md
18 | - Tutorials:
19 | - Running ASH locally: tutorials/running-ash-locally.md
20 | - Running ASH in a CI environment: tutorials/running-ash-in-ci.md
21 | - Cloud9 Quick Start: tutorials/cloud9-quickstart.md
22 | # - Troubleshooting:
23 | # - Finch Issues: troubleshooting/finch.md
24 | - Contributing to ASH: contributing.md
25 | - FAQs: faq.md
26 |
27 | theme:
28 | name: material
29 | custom_dir: docs/overrides
30 | palette:
31 | # Palette toggle for light mode
32 | - scheme: dracula
33 | toggle:
34 | icon: material/brightness-7
35 | name: Switch to dark mode
36 | # Palette toggle for dark mode
37 | - scheme: slate
38 | toggle:
39 | icon: material/brightness-4
40 | name: Switch to light mode
41 | font: false
42 | language: en
43 | features:
44 | - navigation.instant
45 | - navigation.indexes
46 | - toc.follow
47 | - content.tabs.link
48 | - search.share
49 | - search.highlight
50 | - search.suggest
51 | - content.code.copy
52 | - content.code.annotate
53 | - content.code.select
54 | icon:
55 | logo: aws-logo-light
56 | repo: fontawesome/brands/github
57 | favicon: assets/images/aws-logo-light.svg
58 |
59 | markdown_extensions:
60 | - admonition
61 | - pymdownx.emoji:
62 | emoji_index: !!python/name:materialx.emoji.twemoji
63 | - pymdownx.highlight:
64 | anchor_linenums: true
65 | line_spans: __span
66 | pygments_lang_class: true
67 | - pymdownx.inlinehilite
68 | - pymdownx.snippets
69 | - pymdownx.superfences
70 |
71 | plugins:
72 | - search
73 | - awesome-pages
74 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 | [tool.poetry]
4 | name = "automated-security-helper"
5 | version = "2.0.1"
6 | description = ""
7 | authors = ["Nate Ferrell ", "Nathan Bates "]
8 | license = "Apache-2.0"
9 | readme = "README.md"
10 | repository = "https://github.com/awslabs/automated-security-helper"
11 |
12 | [tool.poetry.scripts]
13 | asharp = 'automated_security_helper.asharp:main'
14 |
15 | [tool.poetry.dependencies]
16 | python = "^3.10"
17 | regex = "^2024.5.15"
18 | pydantic = "^2.7.3"
19 |
20 | [tool.poetry.group.dev.dependencies]
21 | black = "^24.4.2"
22 |
23 | [build-system]
24 | requires = ["poetry-core"]
25 | build-backend = "poetry.core.masonry.api"
26 |
--------------------------------------------------------------------------------
/quickstart/README.md:
--------------------------------------------------------------------------------
1 | # ASH Quickstart
2 |
3 | The purpose of this template is to deploy an AWS Cloud9 Environment with ASH and all the dependencies pre-installed.
4 |
5 | This quickstart is designed for **learning purposes only**. The user will be responsible for any patching strategy, network protection and access controls to the instance.
6 |
7 | By default, the owner of the AWS Cloud9 Environment will be the user that launched the CloudFormation stackset.
8 |
9 | ## Pre-requisites
10 |
11 | 1. An AWS Account and enough permissions to deploy a CloudFormation Stack.
12 |
13 | ## Installation
14 |
15 | 1. Download the [template](./c9template.yaml) to your local machine, or clone this repository.
16 | 1. Log into your AWS Console
17 | 1. Navigate to the AWS CloudFormation console in your region of choice. You can use [this](https://console.aws.amazon.com/cloudformation/home) link.
18 | 1. Select `Create stack`
19 | 1. In `Specify template` section, select `Upload a template file` option.
20 | 1. Use the `Choose file` option to select the template file (`c9template.yaml`) from your local machine and select `Next`.
21 | 1. Specify a descriptive `Stack name` (for example `ASH-TestStack`)
22 | 1. Select `Next` and accept the default settings on the following screen. Select `Next` again until reaching the last step (`Review ASH-TestStack`).
23 | 1. Accept the IAM resource acknowledgement `I acknowledge that AWS CloudFormation might create IAM resources with custom names.` and select Submit to create the Stack.
24 | 1. Wait until the Stack is created and status is `CREATE_COMPLETE`.
25 | 1. Navigate to the AWS Cloud9 Console. You can use [this](https://console.aws.amazon.com/cloud9control/home) link.
26 | 1. Use the `Open` link to access your AWS Cloud9 Environment.
27 | 1. You can confirm that ASH is installed properly by running `ash -v` in the terminal. It will take a few minutes for the bootstrap process to complete, wait until you see an empty file with the name `ASH-READY` under `/home/ec2-user/environment`. If you already launched a terminal, refresh the `PATH` environment variable by running `source ~/.bashrc` on your terminal and try again or close the terminal and launch a new one.
28 |
29 |
30 | ## Troubleshooting
31 |
32 | If the stack fails to deploy, check the error message in CloudFormation under the `Event` tabs. In general errors are very descriptive about the reasons for the failure. For example:
33 |
34 | ```
35 | ash-admin already exists in stack arn:aws:cloudformation:us-east-1:123456789012:stack/ASHC9/c0426010-c99c-11ed-85fd-0e5951eaa6e5
36 | ```
37 |
38 | In this case, another environment with the same name already exists. You will need to delete the old stack or change the Environment name.
39 |
40 | ## Additional information
41 |
42 | - [AWS Cloud9 User Guide](https://docs.aws.amazon.com/cloud9/latest/user-guide/welcome.html)
43 | - [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html)
44 | - [AWS CloudFormation Troubleshooting](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/troubleshooting.html)
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Documentation static site generator & deployment tool
2 | mkdocs>=1.1.2
3 | # Material theme for mkdocs
4 | mkdocs-material>=5.4.0
5 | # awesome-pages plugin for mkdocs
6 | mkdocs-awesome-pages-plugin==2.8.0
7 | # Additional mkdocs extensions for prettifying the output
8 | pymdown-extensions
9 |
--------------------------------------------------------------------------------
/src/README.md:
--------------------------------------------------------------------------------
1 | # src/automated_security_helper
2 |
3 | This directory contains the Python package code for the automated_security_helper package.
4 |
5 | This package provides some Python-based functions for ASH and is intended to be the project
6 | location for any new Python development for ASH.
7 |
8 | This package uses Poetry to manage dependencies and packaging. To read more about Poetry,
9 | please see the [Poetry documentation](https://python-poetry.org/docs/).
10 |
--------------------------------------------------------------------------------
/src/automated_security_helper/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import importlib.metadata
5 |
6 | __version__ = importlib.metadata.version('automated_security_helper')
7 |
--------------------------------------------------------------------------------
/src/automated_security_helper/adapters/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
--------------------------------------------------------------------------------
/src/automated_security_helper/asharp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | # asharp.py / Automated Security Helper - Aggregated Report Parser
6 | # A tool to parse, ingest, and output ASH aggregated reports.
7 |
8 | import datetime
9 | import regex as re
10 | import argparse
11 | import json
12 | from json import JSONEncoder
13 | from automated_security_helper import __version__
14 |
15 | # default filenames for input and output
16 | DEF_INFILE='aggregated_results.txt'
17 | DEF_OUTFILE='asharp_results.json'
18 |
19 | # handle command line
20 | cliparser = argparse.ArgumentParser( description="Automated Security Helper Aggregated Report Parser")
21 | cliparser.add_argument('-i', '--input', help=f"File contained ASH aggregated results ({DEF_INFILE})")
22 | cliparser.add_argument('-o', '--output', help="File to write ARP resulting model")
23 | #cliparser.add_argument('-j', '--jq', help="Parse raw file and filter with jq")
24 | cliparser.add_argument('-R', '--retain', action='store_true', help="TEMPORARY - Do not modify raw data output")
25 | cliparser.add_argument('-s', '--stdout', action='store_true', help="Output ARP resulting model to console")
26 | cliparser.add_argument('-v', '--verbose', action='store_true', help="Output instrumentation log")
27 | cliparser.add_argument('--version', action='store_true', help="Output ASHARP version")
28 | args = cliparser.parse_args()
29 |
30 | # simply output version and exit
31 | if args.version:
32 | print(__version__)
33 | exit(0)
34 |
35 | # data parsing/collection from ASH aggregated report
36 | aggregated = {}
37 |
38 | # resulting data model
39 | asharpmodel = {}
40 |
41 | ###
42 | ## Support functions
43 | #
44 |
45 | # basic debug log (this could be added to the model as asharp metadata)
46 | debug_log = []
47 |
48 | # basic instrumentation
49 | def debug(x, y = 0):
50 | def debugout(s):
51 | print(s)
52 | debug_log.append(s)
53 | if args.verbose or y == 255:
54 | debugout(x)
55 | if y == 0: return
56 | debugout("!! access the help menu with -h or --help")
57 | exit(255)
58 |
59 | # json encoder for datetime object
60 | class DateTimeEncoder(JSONEncoder):
61 | def default(self, obj):
62 | if isinstance(obj, (datetime.date, datetime.datetime)):
63 | return obj.isoformat()
64 |
65 | ###
66 | ## Parsing and extraction functions
67 | #
68 |
69 | # extract embedded JSON from output
70 | def ExtractJsonFromData(data):
71 |
72 | # does minimal necessary to validate JSON
73 | # uses backref (should reduce regex greediness)
74 | result = re.findall(r"""(?xmsi)
75 | ^[\n\s\t]*
76 | (?P
77 | (?>
78 | \{(?:[^{}"]|"(?:\\.|[^"\\])*"|(?&json))*\} # objects
79 | | # and/or
80 | \[(?:[^[\]]|"(?:\\.|[^"\\])*"|(?&json))*\] # arrays
81 | )
82 | )
83 | """, data)
84 |
85 | #debug(json.dumps(result, cls=DateTimeEncoder, indent=4))
86 | return result
87 |
88 | # extract ASH invoked provider sections
89 | def ExtractSectionsFromData(data):
90 |
91 | # find all section boundaries and retrieve innards
92 | # uses a backref (should reduce regex greediness)
93 | result = re.findall(r'''(?xmsi)
94 | #############################################\n
95 | Start\s+of\s+([^\n]+)\n # start marker
96 | #############################################\n
97 | (.*?) # provider output blob
98 | #############################################\n
99 | End\s+of\s+(\1)\n # end marker
100 | #############################################
101 | ''', data)
102 |
103 | #debug(json.dumps(result, cls=DateTimeEncoder, indent=4))
104 | return result
105 |
106 | # separate Grype, Syft, and Semgrep findings
107 | def ExtractSecondarySections(data):
108 |
109 | # find all section boundaries and retrieve innards
110 | # uses a backref (should reduce regex greediness)
111 | #
112 | # ^^^ this is to ensure we don't encapsulate some undesirable
113 | # section and hence, miss reporting something. This means
114 | # that all section start/end markers are IDENTICAL
115 |
116 | result = re.findall(r'''(?xmsi)
117 | >>>>>>\s+Begin\s+(\S+)\s+.+?\s+for\s+(.+?)\s+>>>>>>\n # start marker
118 | (.*?) # provider output
119 | <<<<<<\s+End\s+(\1)\s+.+?\s+for\s+(\2)\s+<<<<<< # end marker
120 | ''', data)
121 |
122 | #debug(json.dumps(result, cls=DateTimeEncoder, indent=4))
123 | return result
124 |
125 | # need to parse cdk provider here and addModel accordingly
126 | def parseCdkProvider(data):
127 |
128 | # rx for Warnings and Errors
129 | results = re.findall(r'''(?xmsi)
130 |
131 | # total pita - this will not extract the 2nd pattern, even if swapped
132 | # works well enough for now. will revisit later.
133 |
134 | ^[\n]* # just in case..
135 | (
136 | \[(Warning|Error)\s+at\s+([^\]]+)\]\s+
137 | (
138 | ([^:]+):\s+.+? # error refernce id
139 | |
140 | [^:]+:\s+'(Aws.+?)'\s+.+? # warning reference id
141 | )
142 | [\n]+
143 | )
144 | ''', data)
145 |
146 | cdks = []
147 | for result in results:
148 | o = {
149 | 'raw': result[0],
150 | 'severity': result[1],
151 | 'ref': result[2],
152 | 'id': result[4],
153 | 'result': result[3]
154 | }
155 |
156 | #debug(json.dumps(o, cls=DateTimeEncoder, indent=4))
157 | cdks.append(json.dumps(o, cls=DateTimeEncoder, indent=0))
158 |
159 | if not len(cdks):
160 | return
161 |
162 | return cdks
163 |
164 | # parse out ASH report sections
165 | def ParseAshSections(aggfile):
166 |
167 | # find, separate, and extract each providers output section
168 | sections = ExtractSectionsFromData(aggfile)
169 |
170 | # does data contain valid sections?
171 | if not sections:
172 | debug('!! Unable to find any section identifiers.')
173 | debug('!! Is this an ASH aggregate_results.txt file?', 255)
174 |
175 | # iterate through each section
176 | for section in sections:
177 |
178 | # sanity check - make sure the regex wasn't overly greedy
179 | if section[0] != section[2]:
180 | debug('!! Start and end do not match!!', 255)
181 |
182 | # identify the provider from the report filename
183 | prx = re.search(r'(grype|yaml|py|cdk|git|js)_report', section[0])
184 | if not prx:
185 | debug(f'!! No provider identified for {section[0]}', 255)
186 | provider = prx.group(1)
187 |
188 | # remove ansi, unprintables, and unicode from provider outout (just data section)
189 | # might be better to escape these.. thoughts?
190 | dsanitize = section[1].replace(r'\x1B(?:[@-Z\\]|\[[0-?]*[ -/]*[@-~])', '')
191 | dsanitize = dsanitize.replace(r'[^\x00-\x7F]+', '')
192 | ##dsanitize = dsanitize.replace(r'\u[a-fA-F0-9]{4}', '')
193 | ##dsanitize = dsanitize.replace(r'[\u0000-\uffff]', '')
194 | ##dsanitize = dsanitize.replace(r'\p{Mn}+', '')
195 | dsanitize = dsanitize.encode('ascii', 'ignore').decode()
196 |
197 | # collect the parsed information
198 | aggregated[provider] = {
199 | 'file': section[0],
200 | 'provider': provider,
201 | ##'output': section[1], # unnecessary - removal occurs below
202 | 'data': dsanitize if dsanitize else section[1]
203 | }
204 |
205 | # creates a model object representing the subsection output
206 | models = []
207 | def addModels(tool, file, arr):
208 | for a in arr:
209 | models.append({ 'tool': tool, 'file': file, 'data': a })
210 | return
211 |
212 | # need to separate findings found in different subsections
213 | # and cdk and git and js and py and yaml
214 | subsections = ExtractSecondarySections(section[1])
215 | for subsection in subsections:
216 | gmodel = None
217 |
218 | # if this is cdk provider, then we need to manually extract & generate json
219 | if provider in ['cdk']:
220 | gmodel = parseCdkProvider(subsection[2])
221 | else:
222 | gmodel = ExtractJsonFromData(subsection[2])
223 |
224 | # continue to next if no model data found
225 | if not gmodel:
226 | debug(f'-- {subsection[0]} model is {len(subsection[2])} and did not detect json')
227 | continue
228 |
229 | # add the extracted data to the models array for further processing
230 | addModels(subsection[0], subsection[1], gmodel)
231 |
232 | # process the extracted json object data (needs to be parsed/loaded)
233 | arr = []
234 | for m in models:
235 | data = m['data']
236 | tool = m['tool']
237 | file = m['file']
238 |
239 | # attempt to validate the extraction
240 | try:
241 | o = json.loads(data)
242 | arr.append({'tool': tool, 'file': file, 'data': o})
243 |
244 | # COMMENT THE FOLLOWING OUT TO RETAIN UNALTERED RAW OUTPUT
245 | #
246 | # - remove json artifacts from the provider data as we parse them
247 | # - what remains is what we did not parse.
248 | # - this doesn't remove manual extractions (eg. cdk)
249 | # - this was initially intended for debugging
250 | # ..can be disabled if undesirable
251 | # - the "output" object (1 page up) was to capture raw output
252 | # ..but maybe we prefer one over the other
253 | #
254 | aggregated[provider]['data'] = aggregated[provider]['data'].replace(data, '')
255 |
256 | except Exception as e:
257 | debug(f'!! error - {e} {m}')
258 | pass # hmmm, continue?
259 |
260 | debug(f'-- {tool} model starts with "{data[0]}", contains {len(data)} bytes, and ends with "{data[:-5]}"')
261 |
262 | # place the extacted json objects into the provider data model
263 | if arr: aggregated[provider]['model'] = arr
264 |
265 | #debug(json.dumps(aggregated[provider]['model'], cls=DateTimeEncoder, indent=4))
266 |
267 | return aggregated
268 |
269 | ## Begin execution
270 | #
271 |
272 | def main():
273 | # read the ASH aggregated report as text blob
274 | if not args.input:
275 | debug("!! provide the path to the ASH aggregate report", 255)
276 | with open(args.input, 'r') as file:
277 | aggfile = file.read()
278 |
279 | # parse ASH report sections
280 | ParseAshSections(aggfile)
281 |
282 | # if output file specified then write to file
283 | if args.output:
284 | with open(args.output, 'w') as file:
285 | file.write(json.dumps(aggregated, cls=DateTimeEncoder, indent=4))
286 |
287 | # output it to screen
288 | if not args.output or args.stdout:
289 | print(json.dumps(aggregated, cls=DateTimeEncoder, indent=4))
290 |
291 | if __name__ == "__main__":
292 | main()
293 |
294 | # EOF
295 |
--------------------------------------------------------------------------------
/src/automated_security_helper/models/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
--------------------------------------------------------------------------------
/src/automated_security_helper/models/asharp_model.py:
--------------------------------------------------------------------------------
1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | from pydantic import BaseModel, Field
5 | from typing import Literal, Optional, List, Dict, Any, Union
6 |
7 |
8 | class ASHARPModel(BaseModel):
9 |
10 | def to_json_schema(
11 | self,
12 | format: Literal["dict", "str"] = "dict",
13 | *args,
14 | **kwargs,
15 | ) -> Dict[str, Any] | str:
16 | if format == "dict":
17 | return self.model_dump(*args, **kwargs)
18 | return self.model_dump_json(*args, **kwargs)
19 |
--------------------------------------------------------------------------------
/utils/ash_helpers.ps1:
--------------------------------------------------------------------------------
1 | function Invoke-ASH {
2 | <#
3 | .SYNOPSIS
4 | Provides a PowerShell entrypoint to build and invoke ASH as a container executable.
5 |
6 | .DESCRIPTION
7 | Provides a PowerShell entrypoint to build and invoke ASH as a container executable.
8 |
9 | .PARAMETER SourceDir
10 | The source directory to scan with ASH.
11 |
12 | Defaults to the current working directory.
13 |
14 | .PARAMETER OutputDir
15 | The output directory for ASH results to be stored in.
16 |
17 | Defaults to `ash_output` within the current working directory.
18 |
19 | .PARAMETER AshArgs
20 | Additional arguments to pass to ASH.
21 |
22 | .PARAMETER OCIRunner
23 | Preferred OCI runner CLI tool to use, e.g. `docker`, `finch`, `nerdctl`, or `podman`.
24 |
25 | Supports tab-completion of common OCI runner CLI tools or overriding to provide
26 | something else entirely.
27 |
28 | Defaults to `$env:ASH_OCI_RUNNER` if set, otherwise attempts to resolve based on the
29 | first found executable in PATH.
30 |
31 | .PARAMETER NoBuild
32 | If $true, skips the `OCI_RUNNER build ...` call.
33 |
34 | Requires target image tag to be present on the host already, either through a previous
35 | build or by pulling from a registry.
36 |
37 | .PARAMETER NoRun
38 | If $true, skips the `OCI_RUNNER run ...` call.
39 |
40 | Used primarily when a rebuild is needed during development of ASH, but a re-run of
41 | the ASH scan is not needed after build.
42 |
43 | .EXAMPLE
44 | Invoke-ASH -SourceDir ./dummy_files -OCIRunner finch -AshArgs '--quiet --force' -Verbose
45 |
46 | .EXAMPLE
47 | Get-Help Invoke-ASH
48 | #>
49 | [CmdletBinding()]
50 | Param(
51 | [parameter(Position=0)]
52 | [ValidateScript({Test-Path $_})]
53 | [string]
54 | $SourceDir = $PWD.Path,
55 | [parameter()]
56 | [string]
57 | $OutputDir = $(Join-Path $PWD.Path 'ash_output'),
58 | [parameter(Position = 1, ValueFromRemainingArguments)]
59 | [string]
60 | $AshArgs = $null,
61 | [parameter()]
62 | [string]
63 | $OCIRunner = $env:ASH_OCI_RUNNER,
64 | [parameter()]
65 | [string]
66 | $AshImageName = $(if ($null -ne $env:ASH_IMAGE_NAME) {
67 | $env:ASH_IMAGE_NAME
68 | } else {
69 | "automated-security-helper:local"
70 | }),
71 | [parameter()]
72 | [switch]
73 | $NoBuild,
74 | [parameter()]
75 | [switch]
76 | $NoRun
77 | )
78 | Begin {
79 | $ashRoot = (Get-Item $PSScriptRoot).Parent.FullName
80 | $buildArgs = [System.Collections.Generic.List[string]]::new()
81 | if ("$AshArgs" -match '\-\-force') {
82 | $buildArgs.Add('--no-cache')
83 | }
84 | if ("$AshArgs" -match '(\-\-quiet|\-q)') {
85 | $buildArgs.Add('-q')
86 | }
87 | $runners = if ($null -ne $OCIRunner) {
88 | @(
89 | $OCIRunner
90 | )
91 | } else {
92 | @(
93 | 'docker'
94 | 'finch'
95 | 'nerdctl'
96 | 'podman'
97 | )
98 | }
99 | $sourceDirFull = Get-Item $SourceDir | Select-Object -ExpandProperty FullName
100 | Write-Verbose "Resolved SourceDir to: $sourceDirFull"
101 |
102 | # Create the output directory if it doesn't exist, otherwise the bind mount of the
103 | # OUTPUT_DIR will fail.
104 | if (-not (Test-Path $OutputDir)) {
105 | Write-Verbose "Creating OutputDir: $OutputDir"
106 | New-Item $OutputDir -ItemType Directory -Force | Out-Null
107 | }
108 | $outputDirFull = Get-Item $OutputDir | Select-Object -ExpandProperty FullName
109 | }
110 | Process {
111 | try {
112 | $RESOLVED_OCI_RUNNER = $null
113 | foreach ($runner in $runners) {
114 | if ($FOUND = Get-Command $runner -ErrorAction SilentlyContinue) {
115 | $RESOLVED_OCI_RUNNER = $FOUND
116 | break
117 | }
118 | }
119 | if ($null -eq $RESOLVED_OCI_RUNNER) {
120 | Write-Error "Unable to resolve an $RESOLVED_OCI_RUNNER -- exiting"
121 | exit 1
122 | } else {
123 | Write-Verbose "Resolved OCI_RUNNER to: $RESOLVED_OCI_RUNNER"
124 | $buildCmd = @(
125 | $RESOLVED_OCI_RUNNER
126 | 'build'
127 | '-t'
128 | $AshImageName
129 | "'$ashRoot'"
130 | $($buildArgs -join ' ')
131 | ) -join ' '
132 | $runCmd = @(
133 | $RESOLVED_OCI_RUNNER
134 | 'run'
135 | '--rm'
136 | '-it'
137 | "--mount type=bind,source=$sourceDirFull,destination=/src,readonly"
138 | "--mount type=bind,source=$outputDirFull,destination=/out"
139 | $AshImageName
140 | 'ash'
141 | '--source-dir /src'
142 | '--output-dir /out'
143 | "$AshArgs"
144 | ) -join ' '
145 |
146 | if (-not $NoBuild) {
147 | Write-Verbose "Executing: $buildCmd"
148 | Invoke-Expression $buildCmd
149 | }
150 | if (-not $NoRun) {
151 | Write-Verbose "Executing: $runCmd"
152 | Invoke-Expression $runCmd
153 | }
154 | }
155 | } catch {
156 | throw $_
157 | }
158 | }
159 | }
160 |
161 | Register-ArgumentCompleter -CommandName Invoke-ASH -ParameterName 'OCIRunner' -ScriptBlock {
162 | param($commandName, $parameterName, $wordToComplete, $commandAst, $fakeBoundParameter)
163 | $exampleOCIRunners = @(
164 | 'docker'
165 | 'finch'
166 | 'nerdctl'
167 | 'podman'
168 | )
169 | $exampleOCIRunners | Where-Object {$_ -match $wordToComplete} | ForEach-Object {
170 | [System.Management.Automation.CompletionResult]::new(
171 | $_, $_, 'ParameterValue', $_)
172 | }
173 | }
174 |
--------------------------------------------------------------------------------
/utils/ash_helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #################################################################################
4 | ### ~~ ASH HELPERS ~~
5 | ### This script should be sourced in your shell profile:
6 | ###
7 | ### $ echo ". '${ASH_HELPERS_SCRIPT}'" >> ~/.bashrc
8 | #################################################################################
9 |
10 | # Resolve the absolute path of the parent of the script directory (ASH repo root)
11 | export ASH_ROOT_DIR="$(cd $(dirname "$(dirname "$0")"); pwd)"
12 | export ASH_UTILS_DIR="${ASH_ROOT_DIR}/utils"
13 | export ASH_HELPERS_SCRIPT="${ASH_UTILS_DIR}/ash_helpers.sh"
14 | export ASH_IMAGE_NAME=${ASH_IMAGE_NAME:-"automated-security-helper:local"}
15 |
16 | # Function to invoke ash CLI in single container executable form
17 | invoke-ash() {
18 | # Set local variables
19 | local SOURCE_DIR=""
20 | local OUTPUT_DIR=""
21 | local OCI_RUNNER=""
22 | local DOCKER_EXTRA_ARGS=""
23 | local ASH_ARGS=""
24 | local NO_BUILD="NO"
25 | local NO_RUN="NO"
26 | # Parse arguments
27 | while (("$#")); do
28 | case $1 in
29 | --source-dir)
30 | shift
31 | local SOURCE_DIR="$1"
32 | ;;
33 | --output-dir)
34 | shift
35 | local OUTPUT_DIR="$1"
36 | ;;
37 | --force)
38 | local DOCKER_EXTRA_ARGS="${DOCKER_EXTRA_ARGS} --no-cache"
39 | ;;
40 | --quiet | -q)
41 | local DOCKER_EXTRA_ARGS="${DOCKER_EXTRA_ARGS} -q"
42 | ASH_ARGS="${ASH_ARGS} --quiet"
43 | ;;
44 | --oci-runner | -o)
45 | shift
46 | local OCI_RUNNER="$1"
47 | ;;
48 | --no-build)
49 | local NO_BUILD="YES"
50 | ;;
51 | --no-run)
52 | local NO_RUN="YES"
53 | ;;
54 | *)
55 | ASH_ARGS="${ASH_ARGS} $1"
56 | esac
57 | shift
58 | done
59 |
60 | # Default to the pwd
61 | if [ "${SOURCE_DIR}" = "" ]; then
62 | SOURCE_DIR="$(pwd)"
63 | fi
64 |
65 | # Default to the pwd/ash_output
66 | if [ "${OUTPUT_DIR}" = "" ]; then
67 | OUTPUT_DIR="$(pwd)/ash_output"
68 | fi
69 |
70 | # Create the output directory if it doesn't exist, otherwise the bind mount of the
71 | # OUTPUT_DIR will fail.
72 | if [ ! -d "${OUTPUT_DIR}" ]; then
73 | mkdir -p "${OUTPUT_DIR}"
74 | fi
75 |
76 | # Resolve the absolute paths
77 | SOURCE_DIR="$(cd "$SOURCE_DIR"; pwd)"
78 | OUTPUT_DIR="$(cd "$OUTPUT_DIR"; pwd)"
79 |
80 | # Resolve the OCI_RUNNER
81 | local RESOLVED_OCI_RUNNER=${OCI_RUNNER:-$(command -v docker || command -v finch || command -v nerdctl || command -v podman)}
82 |
83 | # If we couldn't resolve an OCI_RUNNER, exit
84 | if [[ "${RESOLVED_OCI_RUNNER}" == "" ]]; then
85 | echo "Unable to resolve an OCI_RUNNER -- exiting"
86 | exit 1
87 | # else, build and run the image
88 | else
89 | echo "Resolved OCI_RUNNER to: ${RESOLVED_OCI_RUNNER}"
90 |
91 | # Build the image if the --no-build flag is not set
92 | if [ "${NO_BUILD}" = "NO" ]; then
93 | build_cmd="${RESOLVED_OCI_RUNNER} build --tag ${ASH_IMAGE_NAME} --file \"${ASH_ROOT_DIR}/Dockerfile\"${DOCKER_EXTRA_ARGS} \"${ASH_ROOT_DIR}\""
94 | echo $build_cmd
95 | eval $build_cmd
96 | fi
97 |
98 | # Run the image if the --no-run flag is not set
99 | if [ "${NO_RUN}" = "NO" ]; then
100 | run_cmd="${RESOLVED_OCI_RUNNER} run --rm --interactive --tty --mount type=bind,source=\"${SOURCE_DIR}\",destination=/src,readonly --mount type=bind,source=\"${OUTPUT_DIR}\",destination=/out ${ASH_IMAGE_NAME} ash --source-dir /src --output-dir /out $ASH_ARGS"
101 | echo $run_cmd
102 | eval $run_cmd
103 | fi
104 | fi
105 | }
106 |
--------------------------------------------------------------------------------
/utils/cdk-addon-py.py:
--------------------------------------------------------------------------------
1 |
2 | def add_cdk_nag_imports(filename):
3 | cdk_nag_imports = ["\nfrom cdk_nag import AwsSolutionsChecks","\nfrom aws_cdk import App, Aspects"]
4 | for cdk_import in cdk_nag_imports:
5 | with open(filename, 'r') as cdk_app_file:
6 | cdk_app_file_data = cdk_app_file.read()
7 | if cdk_import in cdk_app_file_data:
8 | # print(cdk_import + ' already exists')
9 | pass
10 | else:
11 | missing_import = '#!/usr/bin/env python3' + cdk_import
12 | cdk_app_file_data = cdk_app_file_data.replace('#!/usr/bin/env python3', missing_import )
13 | with open(filename, 'w') as cdk_app_file:
14 | # print('Adding '+ missing_import)
15 | cdk_app_file.write(cdk_app_file_data)
16 |
17 | def add_cdk_nag_checks(filename):
18 | cdk_nag_check = "\nAspects.of(app).add(AwsSolutionsChecks())\n"
19 | with open(filename, 'r') as cdk_app_file:
20 | cdk_app_file_data = cdk_app_file.read()
21 | if cdk_nag_check in cdk_app_file_data:
22 | # print(cdk_nag_check + ' already exists')
23 | pass
24 | else:
25 | missing_check = cdk_nag_check + 'app.synth()'
26 | cdk_app_file_data = cdk_app_file_data.replace('app.synth()', missing_check )
27 | with open(filename, 'w') as cdk_app_file:
28 | # print('Adding '+ missing_check)
29 | cdk_app_file.write(cdk_app_file_data)
30 |
31 |
32 |
33 | filename = "app.py"
34 | add_cdk_nag_imports(filename)
35 | add_cdk_nag_checks(filename)
36 |
--------------------------------------------------------------------------------
/utils/cdk-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | source ${_ASH_UTILS_LOCATION}/common.sh
35 |
36 | #
37 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
38 | #
39 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
40 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
41 |
42 | # cd to the source directory as a starting point
43 | cd ${_ASH_SOURCE_DIR}
44 | debug_echo "[cdk] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: "${_ASH_SOURCE_DIR}" :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
45 |
46 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
47 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/cdk_report_result.txt"
48 | rm ${REPORT_PATH} 2> /dev/null
49 | touch ${REPORT_PATH}
50 |
51 | #
52 | # Set CDK_WORK_DIR to a folder in the output directory
53 | #
54 | # Note that the use of a hidden folder (.cdk-nag-scan) keeps this folder
55 | # from being scanned by other scanners running in parallel.
56 | #
57 | CDK_WORK_DIR=$(mktemp -d -t cdk-nag-scan.XXXXX)
58 |
59 | #
60 | # This is used to allow/accept files which have spaces in their names
61 | #
62 | # nosemgrep
63 | IFS=$'\n'
64 |
65 | #
66 | # Save the current directory to return to it when done
67 | #
68 | # cd to the source directory as a starting point
69 | #
70 | _CURRENT_DIR=${PWD}
71 | cd ${_ASH_OUTPUT_DIR}
72 |
73 | #
74 | # Create a directory to hold all the cdk_nag results from ASH
75 | #
76 | DIRECTORY="ash_cf2cdk_output"
77 | # Check if this directory already exist from previous ASH run
78 | if [[ -n "${_ASH_OUTPUT_DIR}" && -d "${_ASH_OUTPUT_DIR}/$DIRECTORY" ]]; then
79 | rm -rf "${_ASH_OUTPUT_DIR}/$DIRECTORY"
80 | fi
81 | mkdir -p "${_ASH_OUTPUT_DIR}/$DIRECTORY" 2> /dev/null
82 |
83 | RC=0
84 |
85 | #
86 | # Uncomment the diagnostic output below to get details about
87 | # the environment and node versions
88 | #
89 |
90 | # echo "Environment:" >> ${REPORT_PATH}
91 | # echo "Node information:" >> ${REPORT_PATH}
92 | # node --version >> ${REPORT_PATH}
93 | # echo "----------------------" >> ${REPORT_PATH}
94 | # echo "Installed NPM packages:" >> ${REPORT_PATH}
95 | # npm list -g >> ${REPORT_PATH}
96 | # echo "----------------------" >> ${REPORT_PATH}
97 | # echo "CDK information:" >> ${REPORT_PATH}
98 | # cdk --version >> ${REPORT_PATH}
99 | # echo "----------------------" >> ${REPORT_PATH}
100 |
101 | debug_echo "Starting all scanners within the CDK scanner tool set"
102 | echo -e "\nstarting to investigate ..." >> ${REPORT_PATH}
103 |
104 | # cfn_files=($(readlink -f $(grep -lri 'AWSTemplateFormatVersion' "${_ASH_SOURCE_DIR}" --exclude-dir={cdk.out,utils,.aws-sam,ash_cf2cdk_output} --exclude=ash) 2>/dev/null))
105 | cfn_files=($(rg AWSTemplateFormatVersion --files-with-matches --type yaml --type json "${_ASH_SOURCE_DIR}" 2>/dev/null))
106 | debug_echo "Found ${#cfn_files[@]} CloudFormation files to scan: ${cfn_files}"
107 |
108 | #
109 | # Copy the CDK application to the work area and change
110 | # to that folder so that npm install
111 | # installs the required packages in a writable area.
112 | #
113 | cp -R ${_ASH_UTILS_LOCATION}/cdk-nag-scan/* ${CDK_WORK_DIR}
114 | cd ${CDK_WORK_DIR}
115 |
116 | # # Install the CDK application's required packages
117 |
118 | npm install --silent
119 |
120 | #
121 | # Now, for each file, run a cdk synth to subject the file to CDK-NAG scanning
122 | #
123 | if [ "${#cfn_files[@]}" -gt 0 ]; then
124 | debug_echo "Found CloudFormation files to scan, starting scan"
125 | echo "found ${#cfn_files[@]} files to scan. Starting scans ..." >> ${REPORT_PATH}
126 |
127 | for file in "${cfn_files[@]}"; do
128 |
129 | cfn_filename=`basename $file`
130 | echo ">>>>>> begin cdk-nag result for ${cfn_filename} >>>>>>" >> ${REPORT_PATH}
131 | #
132 | # Generate the CDK application inserting the CloudFormation template
133 | #
134 | # /usr/bin/python3 cfn_to_cdk/template_generator.py $file
135 | #
136 | # Use CDK to synthesize the CDK application,
137 | # running CDK-NAG on the inserted CloudFormation template
138 | #
139 | debug_echo "Importing CloudFormation template file ${file} to apply CDK Nag rules against it"
140 | npx cdk synth --context fileName="${file}" --quiet 2>> ${REPORT_PATH}
141 | CRC=$?
142 |
143 | RC=$(bumprc $RC $CRC)
144 |
145 | #
146 | # Check to see if there is output to copy, if so, create a folder and copy the files
147 | #
148 | fileName="*.template.json"
149 | # echo "checking for ${fileName}" >> ${REPORT_PATH}
150 | # find -type f -name ${fileName} >> ${REPORT_PATH} 2>&1
151 | # ls ${fileName} >> ${REPORT_PATH} 2>&1
152 | fileExists=$(find ${CDK_WORK_DIR}/cdk.out -type f -name ${fileName} | wc -l)
153 | # echo "fileExists = ${fileExists}" >> ${REPORT_PATH}
154 | reportsName="AwsSolutions-*-NagReport.csv"
155 | # echo "checking for ${reportsName}" >> ${REPORT_PATH}
156 | # find -type f -name ${reportsName} >> ${REPORT_PATH} 2>&1
157 | # ls ${reportsName} >> ${REPORT_PATH} 2>&1
158 | reportsExist=$(find ${CDK_WORK_DIR}/cdk.out -type f -name ${reportsName} | wc -l)
159 | # echo "reportsExist = ${reportsExist}" >> ${REPORT_PATH}
160 | if [ "${fileExists}" -gt 0 -o "${reportsExist}" -gt 0 ]; then
161 | mkdir -p ${_ASH_OUTPUT_DIR}/${DIRECTORY}/${cfn_filename}_cdk_nag_results
162 |
163 | echo "Writing CDK-NAG reports for ${cfn_filename}" >> ${REPORT_PATH}
164 | #
165 | # Copy and then remove these files to avoid permission setting errors when running in a single container
166 | #
167 | cp ${CDK_WORK_DIR}/cdk.out/*.template.json ${_ASH_OUTPUT_DIR}/${DIRECTORY}/${cfn_filename}_cdk_nag_results/ >/dev/null 2>&1
168 | rm ${CDK_WORK_DIR}/cdk.out/*.template.json >/dev/null 2>&1
169 | cp ${CDK_WORK_DIR}/cdk.out/AwsSolutions-*-NagReport.csv ${_ASH_OUTPUT_DIR}/${DIRECTORY}/${cfn_filename}_cdk_nag_results/ >/dev/null 2>&1
170 | rm ${CDK_WORK_DIR}/cdk.out/AwsSolutions-*-NagReport.csv >/dev/null 2>&1
171 | else
172 | echo "No CDK-NAG reports generated for ${cfn_filename}" >> ${REPORT_PATH}
173 | fi
174 |
175 | echo "<<<<<< end cdk-nag result for ${cfn_filename} <<<<<<" >> ${REPORT_PATH}
176 | done
177 | else
178 | echo "found ${#cfn_files[@]} files to scan. Skipping scans." >> ${REPORT_PATH}
179 | fi
180 |
181 | unset IFS
182 |
183 | #
184 | # Clean up the CDK application temporary working folder
185 | #
186 | if [[ -n "${CDK_WORK_DIR}" && -d "${CDK_WORK_DIR}" ]]; then
187 | rm -rf ${CDK_WORK_DIR}
188 | fi
189 |
190 | # cd back to the original folder in case path changed during scan
191 | cd ${_CURRENT_DIR}
192 |
193 | debug_echo "Finished all scanners within the CDK scanner tool set"
194 | exit $RC
195 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/.gitignore:
--------------------------------------------------------------------------------
1 | *.js
2 | !jest.config.js
3 | *.d.ts
4 | node_modules
5 | package-lock.json
6 | !lib/
7 |
8 | # CDK asset staging directory
9 | .cdk.staging
10 | cdk.out
11 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/.npmignore:
--------------------------------------------------------------------------------
1 | *.ts
2 | !*.d.ts
3 |
4 | # CDK asset staging directory
5 | .cdk.staging
6 | cdk.out
7 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/README.md:
--------------------------------------------------------------------------------
1 | # Welcome to your CDK TypeScript project
2 |
3 | This is a blank project for CDK development with TypeScript.
4 |
5 | The `cdk.json` file tells the CDK Toolkit how to execute your app.
6 |
7 | ## Useful commands
8 |
9 | * `npm run build` compile typescript to js
10 | * `npm run watch` watch for changes and compile
11 | * `npm run test` perform the jest unit tests
12 | * `cdk deploy` deploy this stack to your default AWS account/region
13 | * `cdk diff` compare deployed stack with current state
14 | * `cdk synth` emits the synthesized CloudFormation template
15 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/bin/cdk-nag-scan.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import 'source-map-support/register';
3 | import * as cdk from 'aws-cdk-lib';
4 | import { CdkNagScanStack } from '../lib/cdk-nag-scan-stack';
5 | import { AwsSolutionsChecks } from 'cdk-nag';
6 |
7 | const app = new cdk.App();
8 |
9 | const templateFileName = app.node.tryGetContext('fileName');
10 | if (!templateFileName) {
11 | throw new Error('fileName is required');
12 | }
13 |
14 | /*
15 | * This uses the input file name to generate the temp stack name.
16 | * The temp stack name uses up to 128 characters of the path-name
17 | * (CloudFormation stack name length limit) to the CloudFormation
18 | * template that is found, slugifying non-whitespace characters to
19 | * scalar hyphens (-).
20 | *
21 | * This is done to ensure that the output files are unique and do
22 | * not overwrite each other when a scanned repository has multiple
23 | * CloudFormation template found to scan.
24 | */
25 | var stackName = templateFileName
26 | .replace(/\/(src|run|out|work)\//, '')
27 | .replace(/[\W_]+/gi, '-')
28 | .replace(/^-+/, '');
29 |
30 | if (stackName.length > 128) {
31 | stackName = stackName.substr(stackName.length - 128, stackName.length);
32 | }
33 |
34 | new CdkNagScanStack(app, stackName);
35 |
36 | cdk.Aspects.of(app).add(new AwsSolutionsChecks({ verbose: true }));
37 |
38 | app.synth();
39 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/cdk.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": "npx ts-node --prefer-ts-exts bin/cdk-nag-scan.ts",
3 | "watch": {
4 | "include": [
5 | "**"
6 | ],
7 | "exclude": [
8 | "README.md",
9 | "cdk*.json",
10 | "**/*.d.ts",
11 | "**/*.js",
12 | "tsconfig.json",
13 | "package*.json",
14 | "yarn.lock",
15 | "node_modules",
16 | "test"
17 | ]
18 | },
19 | "context": {
20 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true,
21 | "@aws-cdk/core:checkSecretUsage": true,
22 | "@aws-cdk/core:target-partitions": [
23 | "aws",
24 | "aws-cn"
25 | ],
26 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
27 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
28 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true,
29 | "@aws-cdk/aws-iam:minimizePolicies": true,
30 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true,
31 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true,
32 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true,
33 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true,
34 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true,
35 | "@aws-cdk/core:enablePartitionLiterals": true,
36 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true,
37 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true,
38 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true,
39 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true,
40 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
41 | "@aws-cdk/aws-route53-patters:useCertificate": true,
42 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false,
43 | "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true,
44 | "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true,
45 | "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true,
46 | "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true,
47 | "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true,
48 | "@aws-cdk/aws-redshift:columnId": true,
49 | "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true,
50 | "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true,
51 | "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true,
52 | "@aws-cdk/aws-kms:aliasNameRef": true,
53 | "@aws-cdk/core:includePrefixInUniqueNameGeneration": true
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | testEnvironment: 'node',
3 | roots: ['/test'],
4 | testMatch: ['**/*.test.ts'],
5 | transform: {
6 | '^.+\\.tsx?$': 'ts-jest'
7 | }
8 | };
9 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/lib/cdk-nag-scan-stack.ts:
--------------------------------------------------------------------------------
1 | import * as cdk from 'aws-cdk-lib';
2 | import { Construct } from 'constructs';
3 | // import * as sqs from 'aws-cdk-lib/aws-sqs';
4 |
5 | export class CdkNagScanStack extends cdk.Stack {
6 | constructor(scope: Construct, id: string, props?: cdk.StackProps) {
7 | super(scope, id, props);
8 |
9 | const templateFileName = this.node.tryGetContext('fileName');
10 |
11 | if ( templateFileName != undefined ) {
12 | try {
13 | const cfnTemplate = new cdk.cloudformation_include.CfnInclude(this, templateFileName, {
14 | templateFile: templateFileName
15 | });
16 | } catch(error) {
17 | let message = ''
18 | if ( error instanceof Error ) {
19 | message = error.message
20 | } else {
21 | message = 'unknown caught type'
22 | }
23 | console.log(`Error calling CfnInclude -- File: '${templateFileName}', Error: ${message}`)
24 | }
25 |
26 | } else {
27 | console.log(`Context parameter "fileName" must be set!`)
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cdk-nag-scan",
3 | "version": "0.1.0",
4 | "bin": {
5 | "cdk-nag-scan": "bin/cdk-nag-scan.js"
6 | },
7 | "scripts": {
8 | "build": "tsc",
9 | "watch": "tsc -w",
10 | "test": "jest",
11 | "cdk": "cdk"
12 | },
13 | "devDependencies": {
14 | "@types/jest": "^29.5.1",
15 | "@types/node": "20.1.7",
16 | "aws-cdk": "^2.87.0",
17 | "jest": "^29.5.0",
18 | "ts-jest": "^29.1.0",
19 | "ts-node": "^10.9.1",
20 | "typescript": "~5.1.3"
21 | },
22 | "dependencies": {
23 | "aws-cdk-lib": "^2.87.0",
24 | "cdk-nag": "^2.27.61",
25 | "constructs": "^10.0.0",
26 | "source-map-support": "^0.5.21"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/test/cdk-nag-scan.test.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from 'aws-cdk-lib';
2 | // import { Template } from 'aws-cdk-lib/assertions';
3 | // import * as CdkNagScan from '../lib/cdk-nag-scan-stack';
4 |
5 | // example test. To run these tests, uncomment this file along with the
6 | // example resource in lib/cdk-nag-scan-stack.ts
7 | test('SQS Queue Created', () => {
8 | // const app = new cdk.App();
9 | // // WHEN
10 | // const stack = new CdkNagScan.CdkNagScanStack(app, 'MyTestStack');
11 | // // THEN
12 | // const template = Template.fromStack(stack);
13 |
14 | // template.hasResourceProperties('AWS::SQS::Queue', {
15 | // VisibilityTimeout: 300
16 | // });
17 | });
18 |
--------------------------------------------------------------------------------
/utils/cdk-nag-scan/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2020",
4 | "module": "commonjs",
5 | "lib": [
6 | "es2020",
7 | "dom"
8 | ],
9 | "declaration": true,
10 | "strict": true,
11 | "noImplicitAny": true,
12 | "strictNullChecks": true,
13 | "noImplicitThis": true,
14 | "alwaysStrict": true,
15 | "noUnusedLocals": false,
16 | "noUnusedParameters": false,
17 | "noImplicitReturns": true,
18 | "noFallthroughCasesInSwitch": false,
19 | "inlineSourceMap": true,
20 | "inlineSources": true,
21 | "experimentalDecorators": true,
22 | "strictPropertyInitialization": false,
23 | "typeRoots": [
24 | "./node_modules/@types"
25 | ]
26 | },
27 | "exclude": [
28 | "node_modules",
29 | "cdk.out"
30 | ]
31 | }
32 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Welcome to your CDK Python project!
3 |
4 | You should explore the contents of this project. It demonstrates a CDK app with an instance of a stack (`cfn_to_cdk_stack`)
5 | which contains an Amazon SQS queue that is subscribed to an Amazon SNS topic.
6 |
7 | The `cdk.json` file tells the CDK Toolkit how to execute your app.
8 |
9 | This project is set up like a standard Python project. The initialization process also creates
10 | a virtualenv within this project, stored under the .venv directory. To create the virtualenv
11 | it assumes that there is a `python3` executable in your path with access to the `venv` package.
12 | If for any reason the automatic creation of the virtualenv fails, you can create the virtualenv
13 | manually once the init process completes.
14 |
15 | To manually create a virtualenv on MacOS and Linux:
16 |
17 | ```
18 | $ python3 -m venv .venv
19 | ```
20 |
21 | After the init process completes and the virtualenv is created, you can use the following
22 | step to activate your virtualenv.
23 |
24 | ```
25 | $ source .venv/bin/activate
26 | ```
27 |
28 | If you are a Windows platform, you would activate the virtualenv like this:
29 |
30 | ```
31 | % .venv\Scripts\activate.bat
32 | ```
33 |
34 | Once the virtualenv is activated, you can install the required dependencies.
35 |
36 | ```
37 | $ pip install -r requirements.txt
38 | ```
39 |
40 | At this point you can now synthesize the CloudFormation template for this code.
41 |
42 | ```
43 | $ cdk synth
44 | ```
45 |
46 | You can now begin exploring the source code, contained in the hello directory.
47 | There is also a very trivial test included that can be run like this:
48 |
49 | ```
50 | $ pytest
51 | ```
52 |
53 | To add additional dependencies, for example other CDK libraries, just add to
54 | your requirements.txt file and rerun the `pip install -r requirements.txt`
55 | command.
56 |
57 | ## Useful commands
58 |
59 | * `cdk ls` list all stacks in the app
60 | * `cdk synth` emits the synthesized CloudFormation template
61 | * `cdk deploy` deploy this stack to your default AWS account/region
62 | * `cdk diff` compare deployed stack with current state
63 | * `cdk docs` open CDK documentation
64 |
65 | Enjoy!
66 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/app.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from aws_cdk import App, Aspects
3 | from cdk_nag import AwsSolutionsChecks
4 |
5 | import aws_cdk as cdk
6 |
7 | from cfn_to_cdk.cfn_to_cdk_stack import CfnToCdkStack
8 |
9 |
10 | app = cdk.App()
11 | CfnToCdkStack(app, "cfn-to-cdk")
12 |
13 |
14 | Aspects.of(app).add(AwsSolutionsChecks())
15 | app.synth()
16 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/cdk.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": "python3 app.py",
3 | "watch": {
4 | "include": [
5 | "**"
6 | ],
7 | "exclude": [
8 | "README.md",
9 | "cdk*.json",
10 | "requirements*.txt",
11 | "source.bat",
12 | "**/__init__.py",
13 | "python/__pycache__",
14 | "tests"
15 | ]
16 | },
17 | "context": {
18 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true,
19 | "@aws-cdk/core:stackRelativeExports": true,
20 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true,
21 | "@aws-cdk/aws-lambda:recognizeVersionProps": true,
22 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true,
23 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
24 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
25 | "@aws-cdk/core:target-partitions": [
26 | "aws",
27 | "aws-cn"
28 | ]
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/cfn_to_cdk/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/awslabs/automated-security-helper/5cd74321abbeda35204423fd0b57c9239a63768e/utils/cfn-to-cdk/cfn_to_cdk/__init__.py
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/cfn_to_cdk/cfn_to_cdk_stack.py:
--------------------------------------------------------------------------------
1 | import aws_cdk as cdk
2 | from aws_cdk import cloudformation_include as cfn_inc
3 | from constructs import Construct
4 |
5 |
6 | class CfnToCdkStack(cdk.Stack):
7 |
8 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
9 | super().__init__(scope, construct_id, **kwargs)
10 |
11 |
12 | template0 = cfn_inc.CfnInclude(self, "/app/test.yaml",
13 | template_file="/app/test.yaml")
14 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/cfn_to_cdk/cfn_to_cdk_stack.py.j2:
--------------------------------------------------------------------------------
1 | import aws_cdk as cdk
2 | from aws_cdk import cloudformation_include as cfn_inc
3 | from constructs import Construct
4 |
5 |
6 | class CfnToCdkStack(cdk.Stack):
7 |
8 | def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
9 | super().__init__(scope, construct_id, **kwargs)
10 |
11 | {% for id, file in enumerate(files) %}
12 | template{{ id }} = cfn_inc.CfnInclude(self, "{{ file }}",
13 | template_file="{{ file }}")
14 | {% endfor -%}
15 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/cfn_to_cdk/template_generator.py:
--------------------------------------------------------------------------------
1 | from jinja2 import Template
2 | import sys
3 | files = sys.argv[1:]
4 | #print (files)
5 | with open('/utils/cfn-to-cdk/cfn_to_cdk/cfn_to_cdk_stack.py.j2') as f:
6 | template = Template(f.read())
7 |
8 | b=template.render(enumerate=enumerate, files=files)
9 | with open("/utils/cfn-to-cdk/cfn_to_cdk/cfn_to_cdk_stack.py", "w") as fh:
10 | fh.write(b)
11 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest==6.2.5
2 |
--------------------------------------------------------------------------------
/utils/cfn-to-cdk/requirements.txt:
--------------------------------------------------------------------------------
1 | constructs>=10.0.0,<11.0.0
2 | jsii>=1.60.1
--------------------------------------------------------------------------------
/utils/common.sh:
--------------------------------------------------------------------------------
1 | export ASH_ROOT_DIR="$(cd $(dirname "$(dirname "$0")"); pwd)"
2 | export ASH_UTILS_DIR="${ASH_ROOT_DIR}/utils"
3 |
4 | # LPURPLE='\033[1;35m'
5 | # LGRAY='\033[0;37m'
6 | # GREEN='\033[0;32m'
7 | # RED='\033[0;31m'
8 | # YELLOW='\033[0;33m'
9 | # CYAN='\033[0;36m'
10 | # NC='\033[0m' # No Color
11 |
12 | debug_echo() {
13 | [[ "${ASH_DEBUG:-"NO"}" != "NO" ]] && >&2 echo -e "\033[0;33m[$(date '+%Y-%m-%d %H:%M:%S')] DEBUG:\033[0m ${1}"
14 | }
15 |
16 | debug_show_tree() {
17 | _TREE_FLAGS="-x -h -a --du -I .git"
18 | [[ "${ASH_DEBUG:-"NO"}" != "NO" ]] && tree ${_TREE_FLAGS} ${1:-$(pwd)}
19 | }
20 |
--------------------------------------------------------------------------------
/utils/get-scan-set.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | import re
6 | import sys
7 | from datetime import datetime
8 | from typing import List
9 | from pathspec import PathSpec
10 | from pathlib import Path
11 | import argparse
12 | import os
13 | from glob import glob
14 |
15 | ASH_INCLUSIONS=[
16 | '.git',
17 | "**/cdk.out/asset.*",
18 | "!**/*.template.json", # CDK output template default path pattern
19 | ]
20 |
21 |
22 | def red(msg) -> str:
23 | return "\033[91m{}\033[00m".format(msg)
24 |
25 | def green(msg) -> str:
26 | return "\033[92m{}\033[00m".format(msg)
27 |
28 | def yellow(msg) -> str:
29 | return "\033[33m{}\033[00m".format(msg)
30 |
31 | def lightPurple(msg) -> str:
32 | return "\033[94m{}\033[00m".format(msg)
33 |
34 | def purple(msg) -> str:
35 | return "\033[95m{}\033[00m".format(msg)
36 |
37 | def cyan(msg) -> str:
38 | return "\033[96m{}\033[00m".format(msg)
39 |
40 | def gray(msg) -> str:
41 | return "\033[97m{}\033[00m".format(msg)
42 |
43 | def black(msg) -> str:
44 | return "\033[98m{}\033[00m".format(msg)
45 |
46 | def debug_echo(*msg, debug: bool = False) -> str:
47 | if debug:
48 | print(yellow(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] [get-scan-set.py] DEBUG:"), *msg, file=sys.stderr)
49 |
50 | def get_ash_ignorespec_lines(
51 | path,
52 | ignorefiles: List[str] = [],
53 | debug: bool = False,
54 | ) -> List[str]:
55 | dotignores = [
56 | f"{path}/.ignore",
57 | *[
58 | item
59 | for item in glob(f"{path}/**/.ignore")
60 | ]
61 | ]
62 | # ashignores = [
63 | # f"{path}/.ashignore",
64 | # *[
65 | # item
66 | # for item in glob(f"{path}/**/.ashignore")
67 | # ]
68 | # ]
69 | gitignores = [
70 | f"{path}/.gitignore",
71 | *[
72 | item
73 | for item in glob(f"{path}/**/.gitignore")
74 | ]
75 | ]
76 | all_ignores = list(set([
77 | *dotignores,
78 | *gitignores,
79 | # *ashignores,
80 | *[
81 | f"{path}/{file}"
82 | for file in ignorefiles
83 | ]
84 | ]))
85 | lines = []
86 | for ignorefile in all_ignores:
87 | if os.path.isfile(ignorefile):
88 | clean = re.sub(rf"^{re.escape(path)}", '${SOURCE_DIR}', ignorefile)
89 | debug_echo(f"Found .ignore file: {clean}", debug=debug)
90 | lines.append(f"######### START CONTENTS: {clean} #########")
91 | with open(ignorefile) as f:
92 | lines.extend(f.readlines())
93 | lines.append(f"######### END CONTENTS: {clean} #########")
94 | lines.append("")
95 | lines = [ line.strip() for line in lines ]
96 | lines.append(f"######### START CONTENTS: ASH_INCLUSIONS #########")
97 | lines.extend(ASH_INCLUSIONS)
98 | lines.append(f"######### END CONTENTS: ASH_INCLUSIONS #########")
99 | return lines
100 |
101 | def get_ash_ignorespec(
102 | lines: List[str],
103 | debug: bool = False,
104 | ) -> PathSpec:
105 | debug_echo("Generating spec from collected ignorespec lines", debug=debug)
106 | spec = PathSpec.from_lines('gitwildmatch', lines)
107 | return spec
108 |
109 | def get_files_not_matching_spec(
110 | path,
111 | spec,
112 | debug: bool = False,
113 | ):
114 | full = []
115 | included = []
116 | for item in os.walk(path):
117 | for file in item[2]:
118 | full.append(os.path.join(item[0], file))
119 | inc_full = os.path.join(item[0], file)
120 | clean = re.sub(rf"^{re.escape(path)}", '${SOURCE_DIR}', inc_full)
121 | if not spec.match_file(inc_full):
122 | if '/node_modules/aws-cdk' not in inc_full:
123 | debug_echo(f"Matched file for scan set: {clean}", debug=debug)
124 | included.append(inc_full)
125 | # elif '/.git/' not in inc_full:
126 | # debug_echo(f"Ignoring file matching spec: {clean}", debug=debug)
127 | included = sorted(set(included))
128 | return included
129 |
130 | if __name__ == "__main__":
131 | # set up argparse
132 | parser = argparse.ArgumentParser(description="Get list of files not matching .gitignore underneath SourceDir arg path")
133 | parser.add_argument("--source", help="path to scan", default=os.getcwd(), type=str)
134 | parser.add_argument("--output", help="output path to save the ash-ignore-report.txt and ash-scan-set-files-list.txt files to", default=None, type=str)
135 | parser.add_argument("--ignorefile", help="ignore file to use in addition to the standard gitignore", default=[], type=str, nargs='*')
136 | parser.add_argument("--debug", help="Enables debug logging", action=argparse.BooleanOptionalAction)
137 | args = parser.parse_args()
138 |
139 | ashignore_content = None
140 | ashscanset_list = None
141 | ashignore_imported = False
142 | ashscanset_imported = False
143 |
144 | if args.output:
145 | ashignore_path = Path(args.output).joinpath('ash-ignore-report.txt')
146 | ashscanset_path = Path(args.output).joinpath('ash-scan-set-files-list.txt')
147 | if ashignore_path.exists():
148 | with open(ashignore_path) as f:
149 | ashignore_content = f.readlines()
150 | ashignore_imported = True
151 | print(cyan(f"Imported ash-ignore-report.txt from {args.output}"), file=sys.stderr)
152 | if ashscanset_path.exists():
153 | with open(ashscanset_path) as f:
154 | ashscanset_list = f.readlines()
155 | ashscanset_imported = True
156 | print(cyan(f"Imported ash-scan-set-files-list.txt from {args.output}"), file=sys.stderr)
157 |
158 | if not ashignore_content:
159 | ashignore_content = get_ash_ignorespec_lines(args.source, args.ignorefile, debug=args.debug)
160 |
161 | if not ashscanset_list:
162 | spec = get_ash_ignorespec(ashignore_content, debug=args.debug)
163 | ashscanset_list = get_files_not_matching_spec(args.source, spec, debug=args.debug)
164 |
165 | for file in ashscanset_list:
166 | print(file, file=sys.stdout)
167 |
168 | if args.output:
169 | if ashignore_imported == False:
170 | debug_echo(f"Writing ash-ignore-report.txt to {args.output}", debug=args.debug)
171 | if not ashignore_path.parent.exists():
172 | ashignore_path.parent.mkdir(parents=True)
173 | with open(ashignore_path, "w") as f:
174 | f.write("\n".join(ashignore_content))
175 |
176 | if ashscanset_imported == False:
177 | debug_echo(f"Writing ash-scan-set-files-list.txt to {args.output}", debug=args.debug)
178 | if not ashscanset_path.parent.exists():
179 | ashscanset_path.parent.mkdir(parents=True)
180 | with open(ashscanset_path, "w") as f:
181 | f.write("\n".join(ashscanset_list))
182 |
--------------------------------------------------------------------------------
/utils/git-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 | _ASH_IS_GIT_REPOSITORY=0
34 |
35 | source ${_ASH_UTILS_LOCATION}/common.sh
36 |
37 | #
38 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
39 | #
40 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
41 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
42 |
43 |
44 | # cd to the source directory as a starting point
45 | cd "${_ASH_SOURCE_DIR}"
46 | debug_echo "[git] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
47 | if [[ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" == "true" ]]; then
48 | _ASH_IS_GIT_REPOSITORY=1
49 | fi
50 |
51 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
52 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/git_report_result.txt"
53 | rm ${REPORT_PATH} 2> /dev/null
54 | touch ${REPORT_PATH}
55 |
56 | # Use Tree to obtain a list of files in the source directory
57 | _TREE_FLAGS="-x -h -a --du -I .git"
58 | echo ">>>>>> begin tree result >>>>>>" >> "${REPORT_PATH}"
59 | # if the value of _ASH_IS_GIT_REPOSITORY is 1 then echo a message to the report file
60 | if [ "$_ASH_IS_GIT_REPOSITORY" -eq 1 ]; then
61 | echo "Git repository detected. Ensure your .gitignore configuration excludes all the files that you intend to ignore." >> "${REPORT_PATH}"
62 | fi;
63 | tree ${_TREE_FLAGS} "${_ASH_SOURCE_DIR}" >> "${REPORT_PATH}" 2>&1
64 | echo "<<<<<< end tree ${_TREE_FLAGS} result <<<<<<" >> "${REPORT_PATH}"
65 |
66 | #
67 | # There is no need to run the --install. Furthermore if --install is
68 | # run then any existing commit-msg, pre-commit, and prepare-commit-msg
69 | # hooks which are already set up for the repo will be overwritten by
70 | # the git secrets hooks.
71 | #
72 | # git secrets --install -f >/dev/null 2>&1 && \
73 |
74 |
75 |
76 | if [ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" != "true" ]; then
77 | echo "Not in a git repository - skipping git checks" >>"${REPORT_PATH}"
78 | else
79 |
80 | #
81 | # Configure the repo to check for AWS secrets
82 | #
83 | git secrets --register-aws >>"${REPORT_PATH}" 2>&1
84 |
85 | #
86 | # List the Git secrets configuration
87 | #
88 | echo "git config --local --get-regexp \"^secrets\\..*\$\" output:" >>"${REPORT_PATH}" 2>&1
89 | git config --local --get-regexp "^secrets\..*$" >>"${REPORT_PATH}" 2>&1
90 |
91 | echo ">>>>>> begin git secrets --scan result >>>>>>" >> "${REPORT_PATH}"
92 | git secrets --scan >> "${REPORT_PATH}" 2>&1
93 | GRC=$?
94 | RC=$(bumprc $RC $GRC)
95 | echo "<<<<<< end git secrets --scan result <<<<<<" >> "${REPORT_PATH}"
96 |
97 | #
98 | # TODO: Consider adding in a longer scan of the history as well. Comment out for now.
99 | #
100 | # echo ">>>>>> begin git secrets --scan-history result >>>>>>" >> "${REPORT_PATH}"
101 | # git secrets --scan-history >> "${REPORT_PATH}" 2>&1
102 | # GRC=$?
103 | # RC=$(bumprc $RC $GRC)
104 | # echo "<<<<<< end git secrets --scan-history result <<<<<<" >> "${REPORT_PATH}
105 |
106 | fi
107 |
108 | # cd back to the original SOURCE_DIR in case path changed during scan
109 | cd ${_ASH_SOURCE_DIR}
110 |
111 | exit $RC
112 |
--------------------------------------------------------------------------------
/utils/grype-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | if [[ "${OFFLINE}" == "YES" && ( -z "${SEMGREP_RULES_CACHE_DIR}" || -z "${GRYPE_DB_CACHE_DIR}" ) ]]; then
35 | echo "Invalid cache state for Semgrep or Grype, please rebuild with --offline."
36 | exit 100
37 | fi
38 |
39 | source ${_ASH_UTILS_LOCATION}/common.sh
40 |
41 |
42 | # Empty Semgrep data dir case
43 | if [[ $OFFLINE == "YES" && -z "$(ls -A "$SEMGREP_RULES_CACHE_DIR")" ]]; then
44 | debug_echo "[offline] Semgrep rulesets not found but offline mode enabled, erroring"
45 | exit 1
46 | # Empty Grype data dir case
47 | elif [[ $OFFLINE == "YES" && -z "$(ls -A "$GRYPE_DB_CACHE_DIR")" ]]; then
48 | debug_echo "[offline] Grype rulesets not found but offline mode enabled, erroring"
49 | exit 1
50 | # Valid offline config case
51 | elif [[ $OFFLINE == "YES" ]]; then
52 | export SEMGREP_RULES="$(echo "$SEMGREP_RULES_CACHE_DIR"/*)"
53 | SEMGREP_ARGS="--metrics=off"
54 | debug_echo "[offline] Semgrep rulesets are ${SEMGREP_RULES} with metrics off"
55 |
56 | export GRYPE_DB_VALIDATE_AGE=false
57 | export GRYPE_DB_AUTO_UPDATE=false
58 | export GRYPE_CHECK_FOR_APP_UPDATE=false
59 | debug_echo "[offline] Grype DB cache dir is ${GRYPE_DB_CACHE_DIR} and validation/auto update is off"
60 | # Online (default) mode
61 | else
62 | SEMGREP_ARGS="--config=auto"
63 | fi
64 |
65 |
66 | #
67 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
68 | #
69 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
70 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
71 |
72 | # cd to the source directory as a starting point
73 | cd "${_ASH_SOURCE_DIR}"
74 | debug_echo "[grype] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
75 |
76 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
77 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/grype_report_result.txt"
78 | rm ${REPORT_PATH} 2> /dev/null
79 | touch ${REPORT_PATH}
80 |
81 | scan_paths=("${_ASH_SOURCE_DIR}" "${_ASH_OUTPUT_DIR}/work")
82 |
83 | GRYPE_ARGS="-f medium --exclude=**/*-converted.py --exclude=**/*_report_result.txt"
84 | SYFT_ARGS="--exclude=**/*-converted.py --exclude=**/*_report_result.txt"
85 | SEMGREP_ARGS="${SEMGREP_ARGS} --legacy --error --exclude=\"*-converted.py,*_report_result.txt\""
86 | debug_echo "[grype] ASH_OUTPUT_FORMAT: '${ASH_OUTPUT_FORMAT:-text}'"
87 | if [[ "${ASH_OUTPUT_FORMAT:-text}" != "text" ]]; then
88 | debug_echo "[grype] Output format is not 'text', setting output format options to JSON to enable easy translation into desired output format"
89 | GRYPE_ARGS="-o json ${GRYPE_ARGS}"
90 | SYFT_ARGS="-o json ${SYFT_ARGS}"
91 | SEMGREP_ARGS="--json ${SEMGREP_ARGS}"
92 | fi
93 |
94 | #
95 | # Run Grype
96 | #
97 | debug_echo "[grype] Starting all scanners within the Grype scanner tool set"
98 | for i in "${!scan_paths[@]}";
99 | do
100 | scan_path=${scan_paths[$i]}
101 | cd ${scan_path}
102 | debug_echo "[grype] Starting Grype scan of ${scan_path}"
103 | # debug_show_tree ${scan_path} ${REPORT_PATH}
104 | echo -e "\n>>>>>> Begin Grype output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH}
105 |
106 | debug_echo "[grype] grype ${GRYPE_ARGS} dir:${scan_path}"
107 | grype ${GRYPE_ARGS} dir:${scan_path} >> ${REPORT_PATH} 2>&1
108 | SRC=$?
109 | RC=$(bumprc $RC $SRC)
110 |
111 | echo -e "\n<<<<<< End Grype output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH}
112 | debug_echo "Finished Grype scan of ${scan_path}"
113 | done
114 |
115 | #
116 | # Run Syft
117 | #
118 | for i in "${!scan_paths[@]}";
119 | do
120 | scan_path=${scan_paths[$i]}
121 | cd ${scan_path}
122 | debug_echo "[grype] Starting Syft scan of ${scan_path}"
123 | # debug_show_tree ${scan_path} ${REPORT_PATH}
124 | echo -e "\n>>>>>> Begin Syft output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH}
125 |
126 | debug_echo "[grype] syft ${SYFT_ARGS} ${scan_path}"
127 | syft ${SYFT_ARGS} ${scan_path} >> ${REPORT_PATH} 2>&1
128 | SRC=$?
129 | RC=$(bumprc $RC $SRC)
130 |
131 | echo -e "\n<<<<<< End Syft output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH}
132 | debug_echo "[grype] Finished Syft scan of ${scan_path}"
133 | done
134 |
135 | #
136 | # Run Semgrep
137 | #
138 | for i in "${!scan_paths[@]}";
139 | do
140 | scan_path=${scan_paths[$i]}
141 | cd ${scan_path}
142 | debug_echo "[grype] Starting Semgrep scan of ${scan_path}"
143 | # debug_show_tree ${scan_path} ${REPORT_PATH}
144 | echo -e "\n>>>>>> Begin Semgrep output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH}
145 |
146 | debug_echo "[grype] semgrep ${SEMGREP_ARGS} $scan_path"
147 | semgrep ${SEMGREP_ARGS} $scan_path >> ${REPORT_PATH} 2>&1
148 | SRC=$?
149 | RC=$(bumprc $RC $SRC)
150 |
151 | echo -e "\n<<<<<< End Semgrep output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH}
152 | debug_echo "[grype] Finished Semgrep scan of ${scan_path}"
153 | done
154 |
155 | # cd back to the original SOURCE_DIR in case path changed during scan
156 | cd ${_ASH_SOURCE_DIR}
157 |
158 | debug_echo "[grype] Finished all scanners within the Grype scanner tool set"
159 | exit $RC
160 |
--------------------------------------------------------------------------------
/utils/identifyipynb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | source ${_ASH_UTILS_LOCATION}/common.sh
35 |
36 | #
37 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
38 | #
39 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
40 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
41 |
42 | # cd to the source directory as a starting point
43 | cd "${_ASH_SOURCE_DIR}"
44 | debug_echo "[ipynb] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
45 |
46 | # nosemgrep
47 | IFS=$'\n' # Support directories with spaces, make the loop iterate over newline instead of space
48 | # Find Jupyter files and convert them to python file for safety and bandit scans.
49 | echo "Looking for Jupyter notebook files"
50 |
51 | for file in $(find . -iname "*.ipynb" -not -path "*/cdk.out/*" -not -path "*/node_modules*");
52 | do
53 | echo "Found $file"
54 | filename="$(basename -- $file)"
55 | jupyter nbconvert --log-level WARN --to script "${_ASH_SOURCE_DIR}/$file" --output "${_ASH_OUTPUT_DIR}/work/$filename-converted"
56 | JRC=$?
57 | RC=$(bumprc $RC $JRC)
58 |
59 | done
60 | echo "$extenstions_found"
61 | unset IFS
62 |
63 | # cd back to the original SOURCE_DIR in case path changed during scan
64 | cd ${_ASH_SOURCE_DIR}
65 |
66 | exit $RC
67 |
--------------------------------------------------------------------------------
/utils/js-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | source ${_ASH_UTILS_LOCATION}/common.sh
35 |
36 | #
37 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
38 | #
39 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
40 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
41 |
42 | # cd to the source directory as a starting point
43 | cd "${_ASH_SOURCE_DIR}"
44 | debug_echo "[js] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
45 |
46 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
47 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/js_report_result.txt"
48 | rm ${REPORT_PATH} 2> /dev/null
49 | touch ${REPORT_PATH}
50 |
51 | # Run NPM, PNPM, or Yarn audit
52 | scan_paths=("${_ASH_SOURCE_DIR}" "${_ASH_OUTPUT_DIR}/work")
53 |
54 | AUDIT_ARGS=""
55 | debug_echo "[js] ASH_OUTPUT_FORMAT: '${ASH_OUTPUT_FORMAT:-text}'"
56 | if [[ "${ASH_OUTPUT_FORMAT:-text}" != "text" ]]; then
57 | debug_echo "[js] Output format is not 'text', setting output format options to JSON to enable easy translation into desired output format"
58 | AUDIT_ARGS="--json ${AUDIT_ARGS}"
59 | fi
60 |
61 | if [[ $OFFLINE == "YES" ]]; then
62 | debug_echo "[js] JavaScript package auditing is not available in offline mode"
63 | else
64 | for i in "${!scan_paths[@]}";
65 | do
66 | scan_path=${scan_paths[$i]}
67 | cd ${scan_path}
68 | for file in $(find . \
69 | -iname "package-lock.json" -o \
70 | -iname "pnpm-lock.yaml" -o \
71 | -iname "yarn.lock");
72 | do
73 | path="$(dirname -- $file)"
74 | cd $path
75 |
76 | audit_command="npm"
77 |
78 | case $file in
79 | "./package-lock.json")
80 | audit_command="npm"
81 | ;;
82 | "./pnpm-lock.yaml")
83 | audit_command="pnpm"
84 | ;;
85 | "./yarn.lock")
86 | audit_command="yarn"
87 | ;;
88 | esac
89 |
90 | echo -e "\n>>>>>> Begin ${audit_command} audit output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH}
91 |
92 | eval "${audit_command} audit ${AUDIT_ARGS} >> ${REPORT_PATH} 2>&1"
93 |
94 | NRC=$?
95 | RC=$(bumprc $RC $NRC)
96 |
97 | cd ${scan_path}
98 |
99 | echo -e "\n<<<<<< End ${audit_command} audit output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH}
100 | done
101 | done
102 | fi
103 | # cd back to the original SOURCE_DIR in case path changed during scan
104 | cd ${_ASH_SOURCE_DIR}
105 |
106 | exit $RC
107 |
--------------------------------------------------------------------------------
/utils/py-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | source ${_ASH_UTILS_LOCATION}/common.sh
35 |
36 | #
37 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
38 | #
39 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
40 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
41 |
42 | # cd to the source directory as a starting point
43 | cd "${_ASH_SOURCE_DIR}"
44 | debug_echo "[py] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
45 |
46 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
47 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/py_report_result.txt"
48 | rm ${REPORT_PATH} 2> /dev/null
49 | touch ${REPORT_PATH}
50 |
51 | # Convert any Jupyter notebook files to python
52 | echo ">>>>>> begin identifyipynb output for Jupyter notebook conversion >>>>>>" >> ${REPORT_PATH}
53 | bash -C ${_ASH_UTILS_LOCATION}/identifyipynb.sh >>${REPORT_PATH} 2>&1
54 | echo >> ${REPORT_PATH} # ensure that we have a newline separating end-of-section
55 | echo "<<<<<< end identifyipynb output for Jupyter notebook conversion <<<<<<" >> ${REPORT_PATH}
56 |
57 | # Run bandit on both the source and output directories
58 | scan_paths=("${_ASH_SOURCE_DIR}" "${_ASH_OUTPUT_DIR}/work")
59 |
60 | if [ -f "${_ASH_SOURCE_DIR}/.bandit" ]; then
61 | BANDIT_ARGS="--ini ${_ASH_SOURCE_DIR}/.bandit"
62 | elif [ -f "${_ASH_SOURCE_DIR}/bandit.yaml" ]; then
63 | BANDIT_ARGS="-c ${_ASH_SOURCE_DIR}/bandit.yaml"
64 | elif [ -f "${_ASH_SOURCE_DIR}/bandit.toml" ]; then
65 | BANDIT_ARGS="-c ${_ASH_SOURCE_DIR}/bandit.toml"
66 | else
67 | BANDIT_ARGS="--exclude=\"*venv/*\" --severity-level=all"
68 | fi
69 |
70 | debug_echo "[py] BANDIT_ARGS: '${BANDIT_ARGS}'"
71 | debug_echo "[py] ASH_OUTPUT_FORMAT: '${ASH_OUTPUT_FORMAT:-text}'"
72 | if [[ "${ASH_OUTPUT_FORMAT:-text}" != "text" ]]; then
73 | debug_echo "[py] Output format is not 'text', setting output format options to JSON to enable easy translation into desired output format"
74 | BANDIT_ARGS="-f json ${BANDIT_ARGS}"
75 | fi
76 |
77 | for i in "${!scan_paths[@]}";
78 | do
79 | scan_path=${scan_paths[$i]}
80 | cd ${scan_path}
81 |
82 | echo ">>>>>> begin bandit result for ${scan_path} >>>>>>" >> ${REPORT_PATH}
83 | python3 -m bandit ${BANDIT_ARGS} -r $(pwd) >> ${REPORT_PATH} 2>&1
84 | BRC=$?
85 | RC=$(bumprc $RC $BRC)
86 | echo >> ${REPORT_PATH} # ensure that we have a newline separating end-of-section
87 | echo "<<<<<< end bandit result for ${scan_path} <<<<<<" >> ${REPORT_PATH}
88 | done
89 |
90 | # cd back to the original SOURCE_DIR in case path changed during scan
91 | cd ${_ASH_SOURCE_DIR}
92 |
93 | exit $RC
94 |
--------------------------------------------------------------------------------
/utils/yaml-docker-execute.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | abs() { # compute the absolute value of the input parameter
4 | input=$1
5 | if [[ $input -lt 0 ]]; then
6 | input=$((-input))
7 | fi
8 | echo $input
9 | }
10 |
11 | bumprc() { # return the higher absolute value of the inputs
12 | output=$1
13 | if [[ $2 -ne 0 ]]; then
14 | lrc=$(abs $2)
15 |
16 | if [[ $lrc -gt $1 ]]; then
17 | output=$lrc
18 | fi
19 | fi
20 | echo $output
21 | }
22 |
23 | RC=0
24 |
25 | #
26 | # Resolve ASH paths from env vars if they exist, otherwise use defaults
27 | #
28 | _ASH_SOURCE_DIR=${_ASH_SOURCE_DIR:-/src}
29 | _ASH_OUTPUT_DIR=${_ASH_OUTPUT_DIR:-/out}
30 | _ASH_UTILS_LOCATION=${_ASH_UTILS_LOCATION:-/utils}
31 | _ASH_CFNRULES_LOCATION=${_ASH_CFNRULES_LOCATION:-/cfnrules}
32 | _ASH_RUN_DIR=${_ASH_RUN_DIR:-/run/scan/src}
33 |
34 | source ${_ASH_UTILS_LOCATION}/common.sh
35 |
36 | #
37 | # Allow the container to run Git commands against a repo in ${_ASH_SOURCE_DIR}
38 | #
39 | git config --global --add safe.directory "${_ASH_SOURCE_DIR}" >/dev/null 2>&1
40 | git config --global --add safe.directory "${_ASH_RUN_DIR}" >/dev/null 2>&1
41 |
42 | # cd to the source directory as a starting point
43 | cd "${_ASH_SOURCE_DIR}"
44 | debug_echo "[yaml] pwd: '$(pwd)' :: _ASH_SOURCE_DIR: ${_ASH_SOURCE_DIR} :: _ASH_RUN_DIR: ${_ASH_RUN_DIR}"
45 |
46 | # Set REPORT_PATH to the report location, then touch it to ensure it exists
47 | REPORT_PATH="${_ASH_OUTPUT_DIR}/work/yaml_report_result.txt"
48 | rm ${REPORT_PATH} 2> /dev/null
49 | touch ${REPORT_PATH}
50 |
51 | #
52 | # This is used to allow/accept files which have spaces in their names
53 | #
54 | # nosemgrep
55 | IFS=$'\n'
56 |
57 | #
58 | # Save the current directory to return to it when done
59 | #
60 | # cd to the source directory as a starting point
61 | #
62 | _CURRENT_DIR=${PWD}
63 | cd ${_ASH_OUTPUT_DIR}
64 |
65 | scan_paths=("${_ASH_SOURCE_DIR}" "${_ASH_OUTPUT_DIR}/work")
66 |
67 | CHECKOV_ARGS=""
68 | CFNNAG_ARGS="--print-suppression --rule-directory ${_ASH_CFNRULES_LOCATION}"
69 | debug_echo "[yaml] ASH_OUTPUT_FORMAT: '${ASH_OUTPUT_FORMAT:-text}'"
70 | if [[ "${ASH_OUTPUT_FORMAT:-text}" != "text" ]]; then
71 | debug_echo "[yaml] Output format is not 'text', setting output format options to JSON to enable easy translation into desired output format"
72 | CHECKOV_ARGS="${CHECKOV_ARGS} --output=json"
73 | CFNNAG_ARGS="--output-format json ${CFNNAG_ARGS}"
74 | else
75 | CFNNAG_ARGS="--output-format txt ${CFNNAG_ARGS}"
76 | fi
77 |
78 | if [[ $OFFLINE == "YES" ]]; then
79 | debug_echo "[yaml] Adding --skip-download to prevent connection to Prisma Cloud during offline mode for Checkov scans"
80 | CHECKOV_ARGS="${CHECKOV_ARGS} --skip-download"
81 | else
82 | CHECKOV_ARGS="${CHECKOV_ARGS} --download-external-modules True"
83 | fi
84 |
85 | for i in "${!scan_paths[@]}";
86 | do
87 | scan_path=${scan_paths[$i]}
88 | echo -e "\n>>>>>> Begin yaml scan output for ${scan_path} >>>>>>\n" >> ${REPORT_PATH}
89 | cd ${scan_path}
90 | echo "starting to investigate ..." >> ${REPORT_PATH}
91 |
92 | #
93 | # find only files that appear to contain CloudFormation templates
94 | #
95 | cfn_files=($(readlink -f $(grep -lri 'AWSTemplateFormatVersion' . --exclude-dir={cdk.out,utils,.aws-sam,ash_cf2cdk_output} --exclude=ash) 2>/dev/null))
96 |
97 | #
98 | # For checkov scanning, add in files that are GitLab CI files or container build files
99 | #
100 | checkov_files=($(readlink -f $(find . \( -iname ".gitlab-ci.yml" \
101 | -or -iname "*Dockerfile*" \
102 | -or -iname "*.tf" \
103 | -or -iname "*.tf.json" \) \
104 | -not -path "./.git/*" \
105 | -not -path "./.github/*" \
106 | -not -path "./.venv/*" \
107 | -not -path "./.terraform/*" \
108 | -not -path "./.external_modules/*") 2>/dev/null))
109 | checkov_files=( ${checkov_files[@]} ${cfn_files[@]} )
110 |
111 | if [ "${#checkov_files[@]}" -gt 0 ]; then
112 | echo "found ${#checkov_files[@]} files to scan. Starting checkov scans ..." >> ${REPORT_PATH}
113 | ##HACK Overcomes the String length limitation default of 10000 characters so false negatives cannot occur from large resource policies.
114 | ##Vendor Issue: https://github.com/bridgecrewio/checkov/issues/5627
115 | export CHECKOV_RENDER_MAX_LEN=0
116 |
117 | for file in "${checkov_files[@]}"; do
118 | #echo $cfn_files
119 | file1=`basename $file`
120 | echo ">>>>>> begin checkov result for ${file1} >>>>>>" >> ${REPORT_PATH}
121 | #
122 | # Run the checkov scan on the file
123 | #
124 | checkov_call="checkov ${CHECKOV_ARGS} -f '${file}'"
125 | debug_echo "[yaml] Running checkov ${checkov_call}"
126 | eval $checkov_call >> ${REPORT_PATH} 2>&1
127 | CHRC=$?
128 | echo "<<<<<< end checkov result for ${file1} <<<<<<" >> ${REPORT_PATH}
129 | RC=$(bumprc $RC $CHRC)
130 | done
131 | else
132 | echo "found ${#checkov_files[@]} files to scan. Skipping checkov scans." >> ${REPORT_PATH}
133 | fi
134 |
135 | if [ "${#cfn_files[@]}" -gt 0 ]; then
136 | echo "found ${#cfn_files[@]} files to scan. Starting cfn_nag scans ..." >> ${REPORT_PATH}
137 |
138 | for file in "${cfn_files[@]}"; do
139 | file1=`basename $file`
140 | echo ">>>>>> begin cfn_nag_scan result for ${file1} >>>>>>" >> ${REPORT_PATH}
141 | #
142 | # Run the cfn_nag scan on the file
143 | #
144 | cfn_nag_scan ${CFNNAG_ARGS} --input-path "${file}" >> ${REPORT_PATH} 2>&1
145 | CNRC=$?
146 | echo "<<<<<< end cfn_nag_scan result for ${file1} <<<<<<" >> ${REPORT_PATH}
147 | RC=$(bumprc $RC $CNRC)
148 | done
149 | else
150 | echo "found ${#cfn_files[@]} files to scan. Skipping cfn_nag scans." >> ${REPORT_PATH}
151 | fi
152 | echo -e "\n<<<<<< End yaml scan output for ${scan_path} <<<<<<\n" >> ${REPORT_PATH}
153 | done
154 |
155 | unset IFS
156 |
157 | # cd back to the original folder in case path changed during scan
158 | cd ${_CURRENT_DIR}
159 |
160 | exit $RC
161 |
--------------------------------------------------------------------------------