├── .ansible-lint ├── .bandit.yml ├── .flake8 ├── .github ├── CODEOWNERS ├── dependabot.yml ├── labels.yml ├── lineage.yml └── workflows │ ├── build.yml │ ├── codeql-analysis.yml │ ├── dependency-review.yml │ └── sync-labels.yml ├── .gitignore ├── .isort.cfg ├── .mdl_config.yaml ├── .pre-commit-config.yaml ├── .prettierignore ├── .yamllint ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── bump-version ├── config ├── coordinating-skeleton-updates.md ├── gitleaks-guide.md └── gitleaks.toml ├── dev_envs ├── README.md └── mac-env-setup.md ├── ethos └── README.md ├── languages └── python │ └── README.md ├── onboarding-checklist.md ├── open-source-policy ├── README.md ├── policy.md └── practice.md ├── project_setup ├── README.md ├── branch-protection.md ├── scripts │ ├── ansible-roles │ ├── iam-to-travis │ ├── skeleton │ ├── ssm-param │ └── terraform-to-secrets ├── skeleton-list.md └── skeletonize-existing-repository.md ├── requirements-dev.txt ├── requirements-test.txt ├── requirements.txt ├── setup-env ├── setup.py ├── style-guide.md └── version.txt /.ansible-lint: -------------------------------------------------------------------------------- 1 | --- 2 | # See https://ansible-lint.readthedocs.io/configuring/ for a list of 3 | # the configuration elements that can exist in this file. 4 | enable_list: 5 | # Useful checks that one must opt-into. See here for more details: 6 | # https://ansible-lint.readthedocs.io/rules/ 7 | - fcqn-builtins 8 | - no-log-password 9 | - no-same-owner 10 | exclude_paths: 11 | # This exclusion is implicit, unless exclude_paths is defined 12 | - .cache 13 | # Seems wise to ignore this too 14 | - .github 15 | kinds: 16 | # This will force our systemd specific molecule configurations to be treated 17 | # as plain yaml files by ansible-lint. This mirrors the default kind 18 | # configuration in ansible-lint for molecule configurations: 19 | # yaml: "**/molecule/*/{base,molecule}.{yaml,yml}" 20 | - yaml: "**/molecule/*/molecule-{no,with}-systemd.yml" 21 | use_default_rules: true 22 | -------------------------------------------------------------------------------- /.bandit.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Configuration file for the Bandit python security scanner 3 | # https://bandit.readthedocs.io/en/latest/config.html 4 | 5 | # Tests are first included by `tests`, and then excluded by `skips`. 6 | # If `tests` is empty, all tests are considered included. 7 | 8 | tests: 9 | # - B101 10 | # - B102 11 | 12 | skips: 13 | # - B101 # skip "assert used" check since assertions are required in pytests 14 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 80 3 | # Select (turn on) 4 | # * Complexity violations reported by mccabe (C) - 5 | # http://flake8.pycqa.org/en/latest/user/error-codes.html#error-violation-codes 6 | # * Documentation conventions compliance reported by pydocstyle (D) - 7 | # http://www.pydocstyle.org/en/stable/error_codes.html 8 | # * Default errors and warnings reported by pycodestyle (E and W) - 9 | # https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes 10 | # * Default errors reported by pyflakes (F) - 11 | # http://flake8.pycqa.org/en/latest/glossary.html#term-pyflakes 12 | # * Default warnings reported by flake8-bugbear (B) - 13 | # https://github.com/PyCQA/flake8-bugbear#list-of-warnings 14 | # * The B950 flake8-bugbear opinionated warning - 15 | # https://github.com/PyCQA/flake8-bugbear#opinionated-warnings 16 | select = C,D,E,F,W,B,B950 17 | # Ignore flake8's default warning about maximum line length, which has 18 | # a hard stop at the configured value. Instead we use 19 | # flake8-bugbear's B950, which allows up to 10% overage. 20 | # 21 | # Also ignore flake8's warning about line breaks before binary 22 | # operators. It no longer agrees with PEP8. See, for example, here: 23 | # https://github.com/ambv/black/issues/21. Guido agrees here: 24 | # https://github.com/python/peps/commit/c59c4376ad233a62ca4b3a6060c81368bd21e85b. 25 | ignore = E501,W503 26 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Each line is a file pattern followed by one or more owners. 2 | 3 | # These owners will be the default owners for everything in the 4 | # repo. Unless a later match takes precedence, these owners will be 5 | # requested for review when someone opens a pull request. 6 | * @dav3r @felddy @jsf9k @mcdonnnj 7 | 8 | # These folks own any files in the .github directory at the root of 9 | # the repository and any of its subdirectories. 10 | /.github/ @dav3r @felddy @jsf9k @mcdonnnj 11 | 12 | # These folks own all linting configuration files. 13 | /.ansible-lint @dav3r @felddy @jsf9k @mcdonnnj 14 | /.bandit.yml @dav3r @felddy @jsf9k @mcdonnnj 15 | /.flake8 @dav3r @felddy @jsf9k @mcdonnnj 16 | /.isort.cfg @dav3r @felddy @jsf9k @mcdonnnj 17 | /.mdl_config.yaml @dav3r @felddy @jsf9k @mcdonnnj 18 | /.pre-commit-config.yaml @dav3r @felddy @jsf9k @mcdonnnj 19 | /.prettierignore @dav3r @felddy @jsf9k @mcdonnnj 20 | /.yamllint @dav3r @felddy @jsf9k @mcdonnnj 21 | /requirements.txt @dav3r @felddy @jsf9k @mcdonnnj 22 | /requirements-dev.txt @dav3r @felddy @jsf9k @mcdonnnj 23 | /requirements-test.txt @dav3r @felddy @jsf9k @mcdonnnj 24 | /setup-env @dav3r @felddy @jsf9k @mcdonnnj 25 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # Any ignore directives should be uncommented in downstream projects to disable 4 | # Dependabot updates for the given dependency. Downstream projects will get 5 | # these updates when the pull request(s) in the appropriate skeleton are merged 6 | # and Lineage processes these changes. 7 | 8 | updates: 9 | - directory: / 10 | ignore: 11 | # Managed by cisagov/skeleton-generic 12 | - dependency-name: actions/cache 13 | - dependency-name: actions/checkout 14 | - dependency-name: actions/dependency-review-action 15 | - dependency-name: actions/setup-go 16 | - dependency-name: actions/setup-python 17 | - dependency-name: cisagov/action-job-preamble 18 | - dependency-name: cisagov/setup-env-github-action 19 | - dependency-name: crazy-max/ghaction-github-labeler 20 | - dependency-name: github/codeql-action 21 | - dependency-name: hashicorp/setup-packer 22 | - dependency-name: hashicorp/setup-terraform 23 | - dependency-name: mxschmitt/action-tmate 24 | package-ecosystem: github-actions 25 | schedule: 26 | interval: weekly 27 | 28 | - directory: / 29 | package-ecosystem: pip 30 | schedule: 31 | interval: weekly 32 | 33 | - directory: / 34 | package-ecosystem: terraform 35 | schedule: 36 | interval: weekly 37 | version: 2 38 | -------------------------------------------------------------------------------- /.github/labels.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Rather than breaking up descriptions into multiline strings we disable that 3 | # specific rule in yamllint for this file. 4 | # yamllint disable rule:line-length 5 | - color: eb6420 6 | description: This issue or pull request is awaiting the outcome of another issue or pull request 7 | name: blocked 8 | - color: "000000" 9 | description: This issue or pull request involves changes to existing functionality 10 | name: breaking change 11 | - color: d73a4a 12 | description: This issue or pull request addresses broken functionality 13 | name: bug 14 | - color: 07648d 15 | description: This issue will be advertised on code.gov's Open Tasks page (https://code.gov/open-tasks) 16 | name: code.gov 17 | - color: 0366d6 18 | description: Pull requests that update a dependency file 19 | name: dependencies 20 | - color: 5319e7 21 | description: This issue or pull request improves or adds to documentation 22 | name: documentation 23 | - color: cfd3d7 24 | description: This issue or pull request already exists or is covered in another issue or pull request 25 | name: duplicate 26 | - color: b005bc 27 | description: A high-level objective issue encompassing multiple issues instead of a specific unit of work 28 | name: epic 29 | - color: "000000" 30 | description: Pull requests that update GitHub Actions code 31 | name: github-actions 32 | - color: 0e8a16 33 | description: This issue or pull request is well-defined and good for newcomers 34 | name: good first issue 35 | - color: ff7518 36 | description: Pull request that should count toward Hacktoberfest participation 37 | name: hacktoberfest-accepted 38 | - color: a2eeef 39 | description: This issue or pull request will add or improve functionality, maintainability, or ease of use 40 | name: improvement 41 | - color: fef2c0 42 | description: This issue or pull request is not applicable, incorrect, or obsolete 43 | name: invalid 44 | - color: ce099a 45 | description: This pull request is ready to merge during the next Lineage Kraken release 46 | name: kraken 🐙 47 | - color: a4fc5d 48 | description: This issue or pull request requires further information 49 | name: need info 50 | - color: fcdb45 51 | description: This pull request is awaiting an action or decision to move forward 52 | name: on hold 53 | - color: 3772a4 54 | description: Pull requests that update Python code 55 | name: python 56 | - color: ef476c 57 | description: This issue is a request for information or needs discussion 58 | name: question 59 | - color: d73a4a 60 | description: This issue or pull request addresses a security issue 61 | name: security 62 | - color: 00008b 63 | description: This issue or pull request adds or otherwise modifies test code 64 | name: test 65 | - color: 1d76db 66 | description: This issue or pull request pulls in upstream updates 67 | name: upstream update 68 | - color: d4c5f9 69 | description: This issue or pull request increments the version number 70 | name: version bump 71 | - color: ffffff 72 | description: This issue will not be incorporated 73 | name: wontfix 74 | -------------------------------------------------------------------------------- /.github/lineage.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: "1" 3 | 4 | lineage: 5 | skeleton: 6 | remote-url: https://github.com/cisagov/skeleton-generic.git 7 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: build 3 | 4 | on: # yamllint disable-line rule:truthy 5 | merge_group: 6 | types: 7 | - checks_requested 8 | pull_request: 9 | push: 10 | repository_dispatch: 11 | types: 12 | - apb 13 | 14 | # Set a default shell for any run steps. The `-Eueo pipefail` sets errtrace, 15 | # nounset, errexit, and pipefail. The `-x` will print all commands as they are 16 | # run. Please see the GitHub Actions documentation for more information: 17 | # https://docs.github.com/en/actions/using-jobs/setting-default-values-for-jobs 18 | defaults: 19 | run: 20 | shell: bash -Eueo pipefail -x {0} 21 | 22 | env: 23 | PIP_CACHE_DIR: ~/.cache/pip 24 | PRE_COMMIT_CACHE_DIR: ~/.cache/pre-commit 25 | RUN_TMATE: ${{ secrets.RUN_TMATE }} 26 | TERRAFORM_DOCS_REPO_BRANCH_NAME: improvement/support_atx_closed_markdown_headers 27 | TERRAFORM_DOCS_REPO_DEPTH: 1 28 | TERRAFORM_DOCS_REPO_URL: https://github.com/mcdonnnj/terraform-docs.git 29 | 30 | jobs: 31 | diagnostics: 32 | name: Run diagnostics 33 | # This job does not need any permissions 34 | permissions: {} 35 | runs-on: ubuntu-latest 36 | steps: 37 | # Note that a duplicate of this step must be added at the top of 38 | # each job. 39 | - name: Apply standard cisagov job preamble 40 | uses: cisagov/action-job-preamble@v1 41 | with: 42 | check_github_status: "true" 43 | # This functionality is poorly implemented and has been 44 | # causing problems due to the MITM implementation hogging or 45 | # leaking memory. As a result we disable it by default. If 46 | # you want to temporarily enable it, simply set 47 | # monitor_permissions equal to "true". 48 | # 49 | # TODO: Re-enable this functionality when practical. See 50 | # cisagov/skeleton-generic#207 for more details. 51 | monitor_permissions: "false" 52 | output_workflow_context: "true" 53 | # Use a variable to specify the permissions monitoring 54 | # configuration. By default this will yield the 55 | # configuration stored in the cisagov organization-level 56 | # variable, but if you want to use a different configuration 57 | # then simply: 58 | # 1. Create a repository-level variable with the name 59 | # ACTIONS_PERMISSIONS_CONFIG. 60 | # 2. Set this new variable's value to the configuration you 61 | # want to use for this repository. 62 | # 63 | # Note in particular that changing the permissions 64 | # monitoring configuration *does not* require you to modify 65 | # this workflow. 66 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 67 | lint: 68 | needs: 69 | - diagnostics 70 | permissions: 71 | # actions/checkout needs this to fetch code 72 | contents: read 73 | runs-on: ubuntu-latest 74 | steps: 75 | - name: Apply standard cisagov job preamble 76 | uses: cisagov/action-job-preamble@v1 77 | with: 78 | # This functionality is poorly implemented and has been 79 | # causing problems due to the MITM implementation hogging or 80 | # leaking memory. As a result we disable it by default. If 81 | # you want to temporarily enable it, simply set 82 | # monitor_permissions equal to "true". 83 | # 84 | # TODO: Re-enable this functionality when practical. See 85 | # cisagov/skeleton-generic#207 for more details. 86 | monitor_permissions: "false" 87 | # Use a variable to specify the permissions monitoring 88 | # configuration. By default this will yield the 89 | # configuration stored in the cisagov organization-level 90 | # variable, but if you want to use a different configuration 91 | # then simply: 92 | # 1. Create a repository-level variable with the name 93 | # ACTIONS_PERMISSIONS_CONFIG. 94 | # 2. Set this new variable's value to the configuration you 95 | # want to use for this repository. 96 | # 97 | # Note in particular that changing the permissions 98 | # monitoring configuration *does not* require you to modify 99 | # this workflow. 100 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 101 | - id: setup-env 102 | uses: cisagov/setup-env-github-action@develop 103 | - uses: actions/checkout@v4 104 | - id: setup-python 105 | uses: actions/setup-python@v5 106 | with: 107 | python-version: ${{ steps.setup-env.outputs.python-version }} 108 | # We need the Go version and Go cache location for the actions/cache step, 109 | # so the Go installation must happen before that. 110 | - id: setup-go 111 | uses: actions/setup-go@v5 112 | with: 113 | # There is no expectation for actual Go code so we disable caching as 114 | # it relies on the existence of a go.sum file. 115 | cache: false 116 | go-version: ${{ steps.setup-env.outputs.go-version }} 117 | - id: go-cache 118 | name: Lookup Go cache directory 119 | run: | 120 | echo "dir=$(go env GOCACHE)" >> $GITHUB_OUTPUT 121 | - uses: actions/cache@v4 122 | env: 123 | BASE_CACHE_KEY: ${{ github.job }}-${{ runner.os }}-\ 124 | py${{ steps.setup-python.outputs.python-version }}-\ 125 | go${{ steps.setup-go.outputs.go-version }}-\ 126 | packer${{ steps.setup-env.outputs.packer-version }}-\ 127 | tf${{ steps.setup-env.outputs.terraform-version }}- 128 | with: 129 | key: ${{ env.BASE_CACHE_KEY }}\ 130 | ${{ hashFiles('**/requirements-test.txt') }}-\ 131 | ${{ hashFiles('**/requirements.txt') }}-\ 132 | ${{ hashFiles('**/.pre-commit-config.yaml') }} 133 | # Note that the .terraform directory IS NOT included in the 134 | # cache because if we were caching, then we would need to use 135 | # the `-upgrade=true` option. This option blindly pulls down the 136 | # latest modules and providers instead of checking to see if an 137 | # update is required. That behavior defeats the benefits of caching. 138 | # so there is no point in doing it for the .terraform directory. 139 | path: | 140 | ${{ env.PIP_CACHE_DIR }} 141 | ${{ env.PRE_COMMIT_CACHE_DIR }} 142 | ${{ steps.go-cache.outputs.dir }} 143 | restore-keys: | 144 | ${{ env.BASE_CACHE_KEY }} 145 | - uses: hashicorp/setup-packer@v3 146 | with: 147 | version: ${{ steps.setup-env.outputs.packer-version }} 148 | - uses: hashicorp/setup-terraform@v3 149 | with: 150 | terraform_version: ${{ steps.setup-env.outputs.terraform-version }} 151 | - name: Install go-critic 152 | env: 153 | PACKAGE_URL: github.com/go-critic/go-critic/cmd/gocritic 154 | PACKAGE_VERSION: ${{ steps.setup-env.outputs.go-critic-version }} 155 | run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} 156 | - name: Install goimports 157 | env: 158 | PACKAGE_URL: golang.org/x/tools/cmd/goimports 159 | PACKAGE_VERSION: ${{ steps.setup-env.outputs.goimports-version }} 160 | run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} 161 | - name: Install gosec 162 | env: 163 | PACKAGE_URL: github.com/securego/gosec/v2/cmd/gosec 164 | PACKAGE_VERSION: ${{ steps.setup-env.outputs.gosec-version }} 165 | run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} 166 | - name: Install staticcheck 167 | env: 168 | PACKAGE_URL: honnef.co/go/tools/cmd/staticcheck 169 | PACKAGE_VERSION: ${{ steps.setup-env.outputs.staticcheck-version }} 170 | run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} 171 | # TODO: https://github.com/cisagov/skeleton-generic/issues/165 172 | # We are temporarily using @mcdonnnj's forked branch of terraform-docs 173 | # until his PR: https://github.com/terraform-docs/terraform-docs/pull/745 174 | # is approved. This temporary fix will allow for ATX header support when 175 | # terraform-docs is run during linting. 176 | - name: Clone ATX headers branch from terraform-docs fork 177 | run: | 178 | git clone \ 179 | --branch $TERRAFORM_DOCS_REPO_BRANCH_NAME \ 180 | --depth $TERRAFORM_DOCS_REPO_DEPTH \ 181 | --single-branch \ 182 | $TERRAFORM_DOCS_REPO_URL /tmp/terraform-docs 183 | - name: Build and install terraform-docs binary 184 | run: | 185 | go build \ 186 | -C /tmp/terraform-docs \ 187 | -o $(go env GOPATH)/bin/terraform-docs 188 | - name: Install dependencies 189 | run: | 190 | python -m pip install --upgrade pip setuptools wheel 191 | pip install --upgrade --requirement requirements-test.txt 192 | - name: Set up pre-commit hook environments 193 | run: pre-commit install-hooks 194 | - name: Run pre-commit on all files 195 | run: pre-commit run --all-files 196 | - name: Setup tmate debug session 197 | uses: mxschmitt/action-tmate@v3 198 | if: env.RUN_TMATE 199 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # For most projects, this workflow file will not need changing; you simply need 3 | # to commit it to your repository. 4 | # 5 | # You may wish to alter this file to override the set of languages analyzed, 6 | # or to provide custom queries or build logic. 7 | name: CodeQL 8 | 9 | # The use of on here as a key is part of the GitHub actions syntax. 10 | # yamllint disable-line rule:truthy 11 | on: 12 | merge_group: 13 | types: 14 | - checks_requested 15 | pull_request: 16 | # The branches here must be a subset of the ones in the push key 17 | branches: 18 | - develop 19 | push: 20 | # Dependabot-triggered push events have read-only access, but uploading code 21 | # scanning requires write access. 22 | branches-ignore: 23 | - dependabot/** 24 | schedule: 25 | - cron: 0 2 * * 6 26 | 27 | jobs: 28 | diagnostics: 29 | name: Run diagnostics 30 | # This job does not need any permissions 31 | permissions: {} 32 | runs-on: ubuntu-latest 33 | steps: 34 | # Note that a duplicate of this step must be added at the top of 35 | # each job. 36 | - name: Apply standard cisagov job preamble 37 | uses: cisagov/action-job-preamble@v1 38 | with: 39 | check_github_status: "true" 40 | # This functionality is poorly implemented and has been 41 | # causing problems due to the MITM implementation hogging or 42 | # leaking memory. As a result we disable it by default. If 43 | # you want to temporarily enable it, simply set 44 | # monitor_permissions equal to "true". 45 | # 46 | # TODO: Re-enable this functionality when practical. See 47 | # cisagov/skeleton-generic#207 for more details. 48 | monitor_permissions: "false" 49 | output_workflow_context: "true" 50 | # Use a variable to specify the permissions monitoring 51 | # configuration. By default this will yield the 52 | # configuration stored in the cisagov organization-level 53 | # variable, but if you want to use a different configuration 54 | # then simply: 55 | # 1. Create a repository-level variable with the name 56 | # ACTIONS_PERMISSIONS_CONFIG. 57 | # 2. Set this new variable's value to the configuration you 58 | # want to use for this repository. 59 | # 60 | # Note in particular that changing the permissions 61 | # monitoring configuration *does not* require you to modify 62 | # this workflow. 63 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 64 | analyze: 65 | name: Analyze 66 | needs: 67 | - diagnostics 68 | runs-on: ubuntu-latest 69 | permissions: 70 | # actions/checkout needs this to fetch code 71 | contents: read 72 | # required for all workflows 73 | security-events: write 74 | strategy: 75 | fail-fast: false 76 | matrix: 77 | # Override automatic language detection by changing the below 78 | # list 79 | # 80 | # Supported options are actions, c-cpp, csharp, go, 81 | # java-kotlin, javascript-typescript, python, ruby, and swift. 82 | language: 83 | - actions 84 | # Learn more... 85 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 86 | 87 | steps: 88 | - name: Apply standard cisagov job preamble 89 | uses: cisagov/action-job-preamble@v1 90 | with: 91 | # This functionality is poorly implemented and has been 92 | # causing problems due to the MITM implementation hogging or 93 | # leaking memory. As a result we disable it by default. If 94 | # you want to temporarily enable it, simply set 95 | # monitor_permissions equal to "true". 96 | # 97 | # TODO: Re-enable this functionality when practical. See 98 | # cisagov/skeleton-generic#207 for more details. 99 | monitor_permissions: "false" 100 | # Use a variable to specify the permissions monitoring 101 | # configuration. By default this will yield the 102 | # configuration stored in the cisagov organization-level 103 | # variable, but if you want to use a different configuration 104 | # then simply: 105 | # 1. Create a repository-level variable with the name 106 | # ACTIONS_PERMISSIONS_CONFIG. 107 | # 2. Set this new variable's value to the configuration you 108 | # want to use for this repository. 109 | # 110 | # Note in particular that changing the permissions 111 | # monitoring configuration *does not* require you to modify 112 | # this workflow. 113 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 114 | 115 | - name: Checkout repository 116 | uses: actions/checkout@v4 117 | 118 | # Initializes the CodeQL tools for scanning. 119 | - name: Initialize CodeQL 120 | uses: github/codeql-action/init@v3 121 | with: 122 | languages: ${{ matrix.language }} 123 | 124 | # Autobuild attempts to build any compiled languages (C/C++, C#, or 125 | # Java). If this step fails, then you should remove it and run the build 126 | # manually (see below). 127 | - name: Autobuild 128 | uses: github/codeql-action/autobuild@v3 129 | 130 | # ℹ️ Command-line programs to run using the OS shell. 131 | # 📚 https://git.io/JvXDl 132 | 133 | # ✏️ If the Autobuild fails above, remove it and uncomment the following 134 | # three lines and modify them (or add more) to build your code if your 135 | # project uses a compiled language 136 | 137 | # - run: | 138 | # make bootstrap 139 | # make release 140 | 141 | - name: Perform CodeQL Analysis 142 | uses: github/codeql-action/analyze@v3 143 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Dependency review 3 | 4 | on: # yamllint disable-line rule:truthy 5 | merge_group: 6 | types: 7 | - checks_requested 8 | pull_request: 9 | 10 | # Set a default shell for any run steps. The `-Eueo pipefail` sets errtrace, 11 | # nounset, errexit, and pipefail. The `-x` will print all commands as they are 12 | # run. Please see the GitHub Actions documentation for more information: 13 | # https://docs.github.com/en/actions/using-jobs/setting-default-values-for-jobs 14 | defaults: 15 | run: 16 | shell: bash -Eueo pipefail -x {0} 17 | 18 | jobs: 19 | diagnostics: 20 | name: Run diagnostics 21 | # This job does not need any permissions 22 | permissions: {} 23 | runs-on: ubuntu-latest 24 | steps: 25 | # Note that a duplicate of this step must be added at the top of 26 | # each job. 27 | - name: Apply standard cisagov job preamble 28 | uses: cisagov/action-job-preamble@v1 29 | with: 30 | check_github_status: "true" 31 | # This functionality is poorly implemented and has been 32 | # causing problems due to the MITM implementation hogging or 33 | # leaking memory. As a result we disable it by default. If 34 | # you want to temporarily enable it, simply set 35 | # monitor_permissions equal to "true". 36 | # 37 | # TODO: Re-enable this functionality when practical. See 38 | # cisagov/skeleton-generic#207 for more details. 39 | monitor_permissions: "false" 40 | output_workflow_context: "true" 41 | # Use a variable to specify the permissions monitoring 42 | # configuration. By default this will yield the 43 | # configuration stored in the cisagov organization-level 44 | # variable, but if you want to use a different configuration 45 | # then simply: 46 | # 1. Create a repository-level variable with the name 47 | # ACTIONS_PERMISSIONS_CONFIG. 48 | # 2. Set this new variable's value to the configuration you 49 | # want to use for this repository. 50 | # 51 | # Note in particular that changing the permissions 52 | # monitoring configuration *does not* require you to modify 53 | # this workflow. 54 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 55 | dependency-review: 56 | name: Dependency review 57 | needs: 58 | - diagnostics 59 | permissions: 60 | # actions/checkout needs this to fetch code 61 | contents: read 62 | runs-on: ubuntu-latest 63 | steps: 64 | - name: Apply standard cisagov job preamble 65 | uses: cisagov/action-job-preamble@v1 66 | with: 67 | # This functionality is poorly implemented and has been 68 | # causing problems due to the MITM implementation hogging or 69 | # leaking memory. As a result we disable it by default. If 70 | # you want to temporarily enable it, simply set 71 | # monitor_permissions equal to "true". 72 | # 73 | # TODO: Re-enable this functionality when practical. See 74 | # cisagov/skeleton-generic#207 for more details. 75 | monitor_permissions: "false" 76 | # Use a variable to specify the permissions monitoring 77 | # configuration. By default this will yield the 78 | # configuration stored in the cisagov organization-level 79 | # variable, but if you want to use a different configuration 80 | # then simply: 81 | # 1. Create a repository-level variable with the name 82 | # ACTIONS_PERMISSIONS_CONFIG. 83 | # 2. Set this new variable's value to the configuration you 84 | # want to use for this repository. 85 | # 86 | # Note in particular that changing the permissions 87 | # monitoring configuration *does not* require you to modify 88 | # this workflow. 89 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 90 | - id: checkout-repo 91 | name: Checkout the repository 92 | uses: actions/checkout@v4 93 | - id: dependency-review 94 | name: Review dependency changes for vulnerabilities and license changes 95 | uses: actions/dependency-review-action@v4 96 | -------------------------------------------------------------------------------- /.github/workflows/sync-labels.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: sync-labels 3 | 4 | on: # yamllint disable-line rule:truthy 5 | push: 6 | paths: 7 | - .github/labels.yml 8 | - .github/workflows/sync-labels.yml 9 | workflow_dispatch: 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | diagnostics: 16 | name: Run diagnostics 17 | # This job does not need any permissions 18 | permissions: {} 19 | runs-on: ubuntu-latest 20 | steps: 21 | # Note that a duplicate of this step must be added at the top of 22 | # each job. 23 | - name: Apply standard cisagov job preamble 24 | uses: cisagov/action-job-preamble@v1 25 | with: 26 | check_github_status: "true" 27 | # This functionality is poorly implemented and has been 28 | # causing problems due to the MITM implementation hogging or 29 | # leaking memory. As a result we disable it by default. If 30 | # you want to temporarily enable it, simply set 31 | # monitor_permissions equal to "true". 32 | # 33 | # TODO: Re-enable this functionality when practical. See 34 | # cisagov/skeleton-generic#207 for more details. 35 | monitor_permissions: "false" 36 | output_workflow_context: "true" 37 | # Use a variable to specify the permissions monitoring 38 | # configuration. By default this will yield the 39 | # configuration stored in the cisagov organization-level 40 | # variable, but if you want to use a different configuration 41 | # then simply: 42 | # 1. Create a repository-level variable with the name 43 | # ACTIONS_PERMISSIONS_CONFIG. 44 | # 2. Set this new variable's value to the configuration you 45 | # want to use for this repository. 46 | # 47 | # Note in particular that changing the permissions 48 | # monitoring configuration *does not* require you to modify 49 | # this workflow. 50 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 51 | labeler: 52 | needs: 53 | - diagnostics 54 | permissions: 55 | # actions/checkout needs this to fetch code 56 | contents: read 57 | # crazy-max/ghaction-github-labeler needs this to manage repository labels 58 | issues: write 59 | runs-on: ubuntu-latest 60 | steps: 61 | - name: Apply standard cisagov job preamble 62 | uses: cisagov/action-job-preamble@v1 63 | with: 64 | # This functionality is poorly implemented and has been 65 | # causing problems due to the MITM implementation hogging or 66 | # leaking memory. As a result we disable it by default. If 67 | # you want to temporarily enable it, simply set 68 | # monitor_permissions equal to "true". 69 | # 70 | # TODO: Re-enable this functionality when practical. See 71 | # cisagov/skeleton-generic#207 for more details. 72 | monitor_permissions: "false" 73 | # Use a variable to specify the permissions monitoring 74 | # configuration. By default this will yield the 75 | # configuration stored in the cisagov organization-level 76 | # variable, but if you want to use a different configuration 77 | # then simply: 78 | # 1. Create a repository-level variable with the name 79 | # ACTIONS_PERMISSIONS_CONFIG. 80 | # 2. Set this new variable's value to the configuration you 81 | # want to use for this repository. 82 | # 83 | # Note in particular that changing the permissions 84 | # monitoring configuration *does not* require you to modify 85 | # this workflow. 86 | permissions_monitoring_config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} 87 | - uses: actions/checkout@v4 88 | - name: Sync repository labels 89 | if: success() 90 | uses: crazy-max/ghaction-github-labeler@v5 91 | with: 92 | # This is a hideous ternary equivalent so we only do a dry run unless 93 | # this workflow is triggered by the develop branch. 94 | dry-run: ${{ github.ref_name == 'develop' && 'false' || 'true' }} 95 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # This file specifies intentionally untracked files that Git should ignore. 2 | # Files already tracked by Git are not affected. 3 | # See: https://git-scm.com/docs/gitignore 4 | 5 | ## Python ## 6 | __pycache__ 7 | .coverage 8 | .mypy_cache 9 | .pytest_cache 10 | .python-version 11 | *.egg-info 12 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | combine_star=true 3 | force_sort_within_sections=true 4 | 5 | import_heading_stdlib=Standard Python Libraries 6 | import_heading_thirdparty=Third-Party Libraries 7 | import_heading_firstparty=cisagov Libraries 8 | 9 | # Run isort under the black profile to align with our other Python linting 10 | profile=black 11 | -------------------------------------------------------------------------------- /.mdl_config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # Default state for all rules 4 | default: true 5 | 6 | # MD003/heading-style/header-style - Heading style 7 | MD003: 8 | # Enforce the ATX-closed style of header 9 | style: atx_closed 10 | 11 | # MD004/ul-style - Unordered list style 12 | MD004: 13 | # Enforce dashes for unordered lists 14 | style: dash 15 | 16 | # MD013/line-length - Line length 17 | MD013: 18 | # Do not enforce for code blocks 19 | code_blocks: false 20 | # Do not enforce for tables 21 | tables: false 22 | 23 | # MD024/no-duplicate-heading/no-duplicate-header - Multiple headings with the 24 | # same content 25 | MD024: 26 | # Allow headers with the same content as long as they are not in the same 27 | # parent heading 28 | allow_different_nesting: true 29 | 30 | # MD029/ol-prefix - Ordered list item prefix 31 | MD029: 32 | # Enforce the `1.` style for ordered lists 33 | style: one 34 | 35 | # MD033/no-inline-html - Inline HTML 36 | MD033: 37 | # The h1 and img elements are allowed to permit header images 38 | allowed_elements: 39 | - h1 40 | - img 41 | 42 | # MD035/hr-style - Horizontal rule style 43 | MD035: 44 | # Enforce dashes for horizontal rules 45 | style: --- 46 | 47 | # MD046/code-block-style - Code block style 48 | MD046: 49 | # Enforce the fenced style for code blocks 50 | style: fenced 51 | 52 | # MD049/emphasis-style - Emphasis style should be consistent 53 | MD049: 54 | # Enforce asterisks as the style to use for emphasis 55 | style: asterisk 56 | 57 | # MD050/strong-style - Strong style should be consistent 58 | MD050: 59 | # Enforce asterisks as the style to use for strong 60 | style: asterisk 61 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | default_language_version: 3 | # force all unspecified python hooks to run python3 4 | python: python3 5 | 6 | repos: 7 | # Check the pre-commit configuration 8 | - repo: meta 9 | hooks: 10 | - id: check-useless-excludes 11 | 12 | - repo: https://github.com/pre-commit/pre-commit-hooks 13 | rev: v5.0.0 14 | hooks: 15 | - id: check-case-conflict 16 | - id: check-executables-have-shebangs 17 | - id: check-json 18 | - id: check-merge-conflict 19 | - id: check-shebang-scripts-are-executable 20 | - id: check-symlinks 21 | - id: check-toml 22 | - id: check-vcs-permalinks 23 | - id: check-xml 24 | - id: debug-statements 25 | - id: destroyed-symlinks 26 | - id: detect-aws-credentials 27 | args: 28 | - --allow-missing-credentials 29 | exclude: config/gitleaks.toml 30 | - id: detect-private-key 31 | exclude: config/gitleaks.toml 32 | - id: end-of-file-fixer 33 | - id: mixed-line-ending 34 | args: 35 | - --fix=lf 36 | - id: pretty-format-json 37 | args: 38 | - --autofix 39 | - id: requirements-txt-fixer 40 | - id: trailing-whitespace 41 | 42 | # Text file hooks 43 | - repo: https://github.com/igorshubovych/markdownlint-cli 44 | rev: v0.44.0 45 | hooks: 46 | - id: markdownlint 47 | args: 48 | - --config=.mdl_config.yaml 49 | - repo: https://github.com/rbubley/mirrors-prettier 50 | rev: v3.5.3 51 | hooks: 52 | - id: prettier 53 | - repo: https://github.com/adrienverge/yamllint 54 | rev: v1.37.0 55 | hooks: 56 | - id: yamllint 57 | args: 58 | - --strict 59 | 60 | # GitHub Actions hooks 61 | - repo: https://github.com/python-jsonschema/check-jsonschema 62 | rev: 0.32.1 63 | hooks: 64 | - id: check-github-actions 65 | - id: check-github-workflows 66 | 67 | # pre-commit hooks 68 | - repo: https://github.com/pre-commit/pre-commit 69 | rev: v4.2.0 70 | hooks: 71 | - id: validate_manifest 72 | 73 | # Go hooks 74 | - repo: https://github.com/TekWizely/pre-commit-golang 75 | rev: v1.0.0-rc.1 76 | hooks: 77 | # Go Build 78 | - id: go-build-repo-mod 79 | # Style Checkers 80 | - id: go-critic 81 | # goimports 82 | - id: go-imports-repo 83 | args: 84 | # Write changes to files 85 | - -w 86 | # Go Mod Tidy 87 | - id: go-mod-tidy-repo 88 | # GoSec 89 | - id: go-sec-repo-mod 90 | # StaticCheck 91 | - id: go-staticcheck-repo-mod 92 | # Go Test 93 | - id: go-test-repo-mod 94 | # Go Vet 95 | - id: go-vet-repo-mod 96 | # Nix hooks 97 | - repo: https://github.com/nix-community/nixpkgs-fmt 98 | rev: v1.3.0 99 | hooks: 100 | - id: nixpkgs-fmt 101 | 102 | # Shell script hooks 103 | - repo: https://github.com/scop/pre-commit-shfmt 104 | rev: v3.11.0-1 105 | hooks: 106 | - id: shfmt 107 | args: 108 | # List files that will be formatted 109 | - --list 110 | # Write result to file instead of stdout 111 | - --write 112 | # Indent by two spaces 113 | - --indent 114 | - "2" 115 | # Binary operators may start a line 116 | - --binary-next-line 117 | # Switch cases are indented 118 | - --case-indent 119 | # Redirect operators are followed by a space 120 | - --space-redirects 121 | - repo: https://github.com/shellcheck-py/shellcheck-py 122 | rev: v0.10.0.1 123 | hooks: 124 | - id: shellcheck 125 | 126 | # Python hooks 127 | - repo: https://github.com/PyCQA/bandit 128 | rev: 1.8.3 129 | hooks: 130 | - id: bandit 131 | args: 132 | - --config=.bandit.yml 133 | - repo: https://github.com/psf/black-pre-commit-mirror 134 | rev: 25.1.0 135 | hooks: 136 | - id: black 137 | - repo: https://github.com/PyCQA/flake8 138 | rev: 7.1.2 139 | hooks: 140 | - id: flake8 141 | additional_dependencies: 142 | - flake8-docstrings==1.7.0 143 | - repo: https://github.com/PyCQA/isort 144 | rev: 6.0.1 145 | hooks: 146 | - id: isort 147 | - repo: https://github.com/pre-commit/mirrors-mypy 148 | rev: v1.15.0 149 | hooks: 150 | - id: mypy 151 | additional_dependencies: 152 | - types-PyYAML 153 | - types-requests 154 | - repo: https://github.com/pypa/pip-audit 155 | rev: v2.8.0 156 | hooks: 157 | - id: pip-audit 158 | args: 159 | # Add any pip requirements files to scan 160 | - --requirement 161 | - requirements-dev.txt 162 | - --requirement 163 | - requirements-test.txt 164 | - --requirement 165 | - requirements.txt 166 | - repo: https://github.com/asottile/pyupgrade 167 | rev: v3.19.1 168 | hooks: 169 | - id: pyupgrade 170 | 171 | # Ansible hooks 172 | - repo: https://github.com/ansible/ansible-lint 173 | rev: v25.4.0 174 | hooks: 175 | - id: ansible-lint 176 | additional_dependencies: 177 | # On its own ansible-lint does not pull in ansible, only 178 | # ansible-core. Therefore, if an Ansible module lives in 179 | # ansible instead of ansible-core, the linter will complain 180 | # that the module is unknown. In these cases it is 181 | # necessary to add the ansible package itself as an 182 | # additional dependency, with the same pinning as is done in 183 | # requirements-test.txt of cisagov/skeleton-ansible-role. 184 | # 185 | # Version 10 is required because the pip-audit pre-commit 186 | # hook identifies a vulnerability in ansible-core 2.16.13, 187 | # but all versions of ansible 9 have a dependency on 188 | # ~=2.16.X. 189 | # 190 | # It is also a good idea to go ahead and upgrade to version 191 | # 10 since version 9 is going EOL at the end of November: 192 | # https://endoflife.date/ansible 193 | # - ansible>=10,<11 194 | # ansible-core 2.16.3 through 2.16.6 suffer from the bug 195 | # discussed in ansible/ansible#82702, which breaks any 196 | # symlinked files in vars, tasks, etc. for any Ansible role 197 | # installed via ansible-galaxy. Hence we never want to 198 | # install those versions. 199 | # 200 | # Note that the pip-audit pre-commit hook identifies a 201 | # vulnerability in ansible-core 2.16.13. The pin of 202 | # ansible-core to >=2.17 effectively also pins ansible to 203 | # >=10. 204 | # 205 | # It is also a good idea to go ahead and upgrade to 206 | # ansible-core 2.17 since security support for ansible-core 207 | # 2.16 ends this month: 208 | # https://docs.ansible.com/ansible/devel/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix 209 | # 210 | # Note that any changes made to this dependency must also be 211 | # made in requirements.txt in cisagov/skeleton-packer and 212 | # requirements-test.txt in cisagov/skeleton-ansible-role. 213 | - ansible-core>=2.17 214 | 215 | # Terraform hooks 216 | - repo: https://github.com/antonbabenko/pre-commit-terraform 217 | rev: v1.98.0 218 | hooks: 219 | - id: terraform_fmt 220 | - id: terraform_validate 221 | 222 | # Docker hooks 223 | - repo: https://github.com/IamTheFij/docker-pre-commit 224 | rev: v3.0.1 225 | hooks: 226 | - id: docker-compose-check 227 | 228 | # Packer hooks 229 | - repo: https://github.com/cisagov/pre-commit-packer 230 | rev: v0.3.0 231 | hooks: 232 | - id: packer_fmt 233 | - id: packer_validate 234 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Already being linted by pretty-format-json 2 | *.json 3 | # Already being linted by mdl 4 | *.md 5 | # Already being linted by yamllint 6 | *.yaml 7 | *.yml 8 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | extends: default 3 | 4 | rules: 5 | braces: 6 | # Do not allow non-empty flow mappings 7 | forbid: non-empty 8 | # Allow up to one space inside braces. This is required for Ansible compatibility. 9 | max-spaces-inside: 1 10 | 11 | brackets: 12 | # Do not allow non-empty flow sequences 13 | forbid: non-empty 14 | 15 | comments: 16 | # Ensure that inline comments have at least one space before the preceding content. 17 | # This is required for Ansible compatibility. 18 | min-spaces-from-content: 1 19 | 20 | # yamllint does not like it when you comment out different parts of 21 | # dictionaries in a list. You can see 22 | # https://github.com/adrienverge/yamllint/issues/384 for some examples of 23 | # this behavior. 24 | comments-indentation: disable 25 | 26 | indentation: 27 | # Ensure that block sequences inside of a mapping are indented 28 | indent-sequences: true 29 | # Enforce a specific number of spaces 30 | spaces: 2 31 | 32 | # yamllint does not allow inline mappings that exceed the line length by 33 | # default. There are many scenarios where the inline mapping may be a key, 34 | # hash, or other long value that would exceed the line length but cannot 35 | # reasonably be broken across lines. 36 | line-length: 37 | # This rule implies the allow-non-breakable-words rule 38 | allow-non-breakable-inline-mappings: true 39 | # Allows a 10% overage from the default limit of 80 40 | max: 88 41 | 42 | # Using anything other than strings to express octal values can lead to unexpected 43 | # and potentially unsafe behavior. Ansible strongly recommends against such practices 44 | # and these rules are needed for Ansible compatibility. Please see the following for 45 | # more information: 46 | # https://ansible.readthedocs.io/projects/lint/rules/risky-octal/ 47 | octal-values: 48 | # Do not allow explicit octal values (those beginning with a leading 0o). 49 | forbid-explicit-octal: true 50 | # Do not allow implicit octal values (those beginning with a leading 0). 51 | forbid-implicit-octal: true 52 | 53 | quoted-strings: 54 | # Allow disallowed quotes (single quotes) for strings that contain allowed quotes 55 | # (double quotes). 56 | allow-quoted-quotes: true 57 | # Apply these rules to keys in mappings as well 58 | check-keys: true 59 | # We prefer double quotes for strings when they are needed 60 | quote-type: double 61 | # Only require quotes when they are necessary for proper processing 62 | required: only-when-needed 63 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Welcome # 2 | 3 | We're so glad you're thinking about contributing to this open source 4 | project! If you're unsure or afraid of anything, just ask or submit 5 | the issue or pull request anyway. The worst that can happen is that 6 | you'll be politely asked to change something. We appreciate any sort 7 | of contribution, and don't want a wall of rules to get in the way of 8 | that. 9 | 10 | Before contributing, we encourage you to read our CONTRIBUTING policy 11 | (you are here), our [LICENSE](LICENSE), and our [README](README.md), 12 | all of which should be in this repository. 13 | 14 | ## Issues ## 15 | 16 | If you want to report a bug or request a new feature, the most direct 17 | method is to [create an 18 | issue](https://github.com/cisagov/development-guide/issues) in this 19 | repository. We recommend that you first search through existing 20 | issues (both open and closed) to check if your particular issue has 21 | already been reported. If it has then you might want to add a comment 22 | to the existing issue. If it hasn't then feel free to create a new 23 | one. 24 | 25 | ## Pull requests ## 26 | 27 | If you choose to [submit a pull 28 | request](https://github.com/cisagov/development-guide/pulls), you will 29 | notice that our continuous integration (CI) system runs a fairly 30 | extensive set of linters and syntax checkers. Your pull request may 31 | fail these checks, and that's OK. If you want you can stop there and 32 | wait for us to make the necessary corrections to ensure your code 33 | passes the CI checks. 34 | 35 | If you want to make the changes yourself, or if you want to become a 36 | regular contributor, then you will want to set up 37 | [pre-commit](https://pre-commit.com/) on your local machine. Once you 38 | do that, the CI checks will run locally before you even write your 39 | commit message. This speeds up your development cycle considerably. 40 | 41 | ### Setting up pre-commit ### 42 | 43 | There are a few ways to do this, but we prefer to use 44 | [`pyenv`](https://github.com/pyenv/pyenv) and 45 | [`pyenv-virtualenv`](https://github.com/pyenv/pyenv-virtualenv) to 46 | create and manage a Python virtual environment specific to this 47 | project. 48 | 49 | We recommend using the `setup-env` script located in this repository, 50 | as it automates the entire environment configuration process. The 51 | dependencies required to run this script are 52 | [GNU `getopt`](https://github.com/util-linux/util-linux/blob/master/misc-utils/getopt.1.adoc), 53 | [`pyenv`](https://github.com/pyenv/pyenv), and [`pyenv-virtualenv`](https://github.com/pyenv/pyenv-virtualenv). 54 | If these tools are already configured on your system, you can simply run the 55 | following command: 56 | 57 | ```console 58 | ./setup-env 59 | ``` 60 | 61 | Otherwise, follow the steps below to manually configure your 62 | environment. 63 | 64 | #### Installing and using GNU `getopt`, `pyenv`, and `pyenv-virtualenv` #### 65 | 66 | On macOS, we recommend installing [brew](https://brew.sh/). Then 67 | installation is as simple as `brew install gnu-getopt pyenv pyenv-virtualenv` and 68 | adding this to your profile: 69 | 70 | ```bash 71 | # GNU getopt must be explicitly added to the path since it is 72 | # keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean) 73 | export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" 74 | 75 | # Setup pyenv 76 | export PYENV_ROOT="$HOME/.pyenv" 77 | export PATH="$PYENV_ROOT/bin:$PATH" 78 | eval "$(pyenv init --path)" 79 | eval "$(pyenv init -)" 80 | eval "$(pyenv virtualenv-init -)" 81 | ``` 82 | 83 | For Linux, Windows Subsystem for Linux (WSL), or macOS (if you 84 | don't want to use `brew`) you can use 85 | [pyenv/pyenv-installer](https://github.com/pyenv/pyenv-installer) to 86 | install the necessary tools. Before running this ensure that you have 87 | installed the prerequisites for your platform according to the 88 | [`pyenv` wiki 89 | page](https://github.com/pyenv/pyenv/wiki/common-build-problems). 90 | GNU `getopt` is included in most Linux distributions as part of the 91 | [`util-linux`](https://github.com/util-linux/util-linux) package. 92 | 93 | On WSL you should treat your platform as whatever Linux distribution 94 | you've chosen to install. 95 | 96 | Once you have installed `pyenv` you will need to add the following 97 | lines to your `.bash_profile` (or `.profile`): 98 | 99 | ```bash 100 | export PYENV_ROOT="$HOME/.pyenv" 101 | export PATH="$PYENV_ROOT/bin:$PATH" 102 | eval "$(pyenv init --path)" 103 | ``` 104 | 105 | and then add the following lines to your `.bashrc`: 106 | 107 | ```bash 108 | eval "$(pyenv init -)" 109 | eval "$(pyenv virtualenv-init -)" 110 | ``` 111 | 112 | If you want more information about setting up `pyenv` once installed, please run 113 | 114 | ```console 115 | pyenv init 116 | ``` 117 | 118 | and 119 | 120 | ```console 121 | pyenv virtualenv-init 122 | ``` 123 | 124 | for the current configuration instructions. 125 | 126 | If you are using a shell other than `bash` you should follow the 127 | instructions that the `pyenv-installer` script outputs. 128 | 129 | You will need to reload your shell for these changes to take effect so 130 | you can begin to use `pyenv`. 131 | 132 | For a list of Python versions that are already installed and ready to 133 | use with `pyenv`, use the command `pyenv versions`. To see a list of 134 | the Python versions available to be installed and used with `pyenv` 135 | use the command `pyenv install --list`. You can read more 136 | [here](https://github.com/pyenv/pyenv/blob/master/COMMANDS.md) about 137 | the many things that `pyenv` can do. See 138 | [here](https://github.com/pyenv/pyenv-virtualenv#usage) for the 139 | additional capabilities that pyenv-virtualenv adds to the `pyenv` 140 | command. 141 | 142 | #### Creating the Python virtual environment #### 143 | 144 | Once `pyenv` and `pyenv-virtualenv` are installed on your system, you 145 | can create and configure the Python virtual environment with these 146 | commands: 147 | 148 | ```console 149 | cd development-guide 150 | pyenv virtualenv development-guide 151 | pyenv local development-guide 152 | pip install --requirement requirements-dev.txt 153 | ``` 154 | 155 | #### Installing the pre-commit hook #### 156 | 157 | Now setting up pre-commit is as simple as: 158 | 159 | ```console 160 | pre-commit install 161 | ``` 162 | 163 | At this point the pre-commit checks will run against any files that 164 | you attempt to commit. If you want to run the checks against the 165 | entire repo, just execute `pre-commit run --all-files`. 166 | 167 | ## Quality assurance and code reviews ## 168 | 169 | In order to maintain standardization of practices, ensure security standards 170 | are being met, and to incorporate third party code as seamlessly as possible, 171 | all submitted code will go through our quality assurance (QA) team. 172 | 173 | Code contributors are able to coordinate with the QA team at any point during 174 | the contribution process. We recommend initiating the discussions as early as 175 | possible, to decrease the likelihood of issues around merging or using the 176 | contributed code occurring late in the process. However, the QA team is not 177 | responsible for the success of the project or for ensuring that all team 178 | members follow the development standards which have been established. Any 179 | discussions or initial inputs are a courtesy evaluation, and contributing teams 180 | remain responsible for the quality of their code, internal coordination, and 181 | alignment with the standards set forth in this guide. 182 | 183 | The type of contribution being made (e.g. typo corrections vs. a new repository), 184 | complexity of code change (e.g. adding a new test vs. adding a new function), and 185 | the testability of the code (e.g. well-documented and replicable) will factor 186 | into the level of interaction needed with the QA team. 187 | 188 | ## Public domain ## 189 | 190 | This project is in the public domain within the United States, and 191 | copyright and related rights in the work worldwide are waived through 192 | the [CC0 1.0 Universal public domain 193 | dedication](https://creativecommons.org/publicdomain/zero/1.0/). 194 | 195 | All contributions to this project will be released under the CC0 196 | dedication. By submitting a pull request, you are agreeing to comply 197 | with this waiver of copyright interest. 198 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | CC0 1.0 Universal 2 | 3 | Statement of Purpose 4 | 5 | The laws of most jurisdictions throughout the world automatically confer 6 | exclusive Copyright and Related Rights (defined below) upon the creator and 7 | subsequent owner(s) (each and all, an "owner") of an original work of 8 | authorship and/or a database (each, a "Work"). 9 | 10 | Certain owners wish to permanently relinquish those rights to a Work for the 11 | purpose of contributing to a commons of creative, cultural and scientific 12 | works ("Commons") that the public can reliably and without fear of later 13 | claims of infringement build upon, modify, incorporate in other works, reuse 14 | and redistribute as freely as possible in any form whatsoever and for any 15 | purposes, including without limitation commercial purposes. These owners may 16 | contribute to the Commons to promote the ideal of a free culture and the 17 | further production of creative, cultural and scientific works, or to gain 18 | reputation or greater distribution for their Work in part through the use and 19 | efforts of others. 20 | 21 | For these and/or other purposes and motivations, and without any expectation 22 | of additional consideration or compensation, the person associating CC0 with a 23 | Work (the "Affirmer"), to the extent that he or she is an owner of Copyright 24 | and Related Rights in the Work, voluntarily elects to apply CC0 to the Work 25 | and publicly distribute the Work under its terms, with knowledge of his or her 26 | Copyright and Related Rights in the Work and the meaning and intended legal 27 | effect of CC0 on those rights. 28 | 29 | 1. Copyright and Related Rights. A Work made available under CC0 may be 30 | protected by copyright and related or neighboring rights ("Copyright and 31 | Related Rights"). Copyright and Related Rights include, but are not limited 32 | to, the following: 33 | 34 | i. the right to reproduce, adapt, distribute, perform, display, communicate, 35 | and translate a Work; 36 | 37 | ii. moral rights retained by the original author(s) and/or performer(s); 38 | 39 | iii. publicity and privacy rights pertaining to a person's image or likeness 40 | depicted in a Work; 41 | 42 | iv. rights protecting against unfair competition in regards to a Work, 43 | subject to the limitations in paragraph 4(a), below; 44 | 45 | v. rights protecting the extraction, dissemination, use and reuse of data in 46 | a Work; 47 | 48 | vi. database rights (such as those arising under Directive 96/9/EC of the 49 | European Parliament and of the Council of 11 March 1996 on the legal 50 | protection of databases, and under any national implementation thereof, 51 | including any amended or successor version of such directive); and 52 | 53 | vii. other similar, equivalent or corresponding rights throughout the world 54 | based on applicable law or treaty, and any national implementations thereof. 55 | 56 | 2. Waiver. To the greatest extent permitted by, but not in contravention of, 57 | applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and 58 | unconditionally waives, abandons, and surrenders all of Affirmer's Copyright 59 | and Related Rights and associated claims and causes of action, whether now 60 | known or unknown (including existing as well as future claims and causes of 61 | action), in the Work (i) in all territories worldwide, (ii) for the maximum 62 | duration provided by applicable law or treaty (including future time 63 | extensions), (iii) in any current or future medium and for any number of 64 | copies, and (iv) for any purpose whatsoever, including without limitation 65 | commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes 66 | the Waiver for the benefit of each member of the public at large and to the 67 | detriment of Affirmer's heirs and successors, fully intending that such Waiver 68 | shall not be subject to revocation, rescission, cancellation, termination, or 69 | any other legal or equitable action to disrupt the quiet enjoyment of the Work 70 | by the public as contemplated by Affirmer's express Statement of Purpose. 71 | 72 | 3. Public License Fallback. Should any part of the Waiver for any reason be 73 | judged legally invalid or ineffective under applicable law, then the Waiver 74 | shall be preserved to the maximum extent permitted taking into account 75 | Affirmer's express Statement of Purpose. In addition, to the extent the Waiver 76 | is so judged Affirmer hereby grants to each affected person a royalty-free, 77 | non transferable, non sublicensable, non exclusive, irrevocable and 78 | unconditional license to exercise Affirmer's Copyright and Related Rights in 79 | the Work (i) in all territories worldwide, (ii) for the maximum duration 80 | provided by applicable law or treaty (including future time extensions), (iii) 81 | in any current or future medium and for any number of copies, and (iv) for any 82 | purpose whatsoever, including without limitation commercial, advertising or 83 | promotional purposes (the "License"). The License shall be deemed effective as 84 | of the date CC0 was applied by Affirmer to the Work. Should any part of the 85 | License for any reason be judged legally invalid or ineffective under 86 | applicable law, such partial invalidity or ineffectiveness shall not 87 | invalidate the remainder of the License, and in such case Affirmer hereby 88 | affirms that he or she will not (i) exercise any of his or her remaining 89 | Copyright and Related Rights in the Work or (ii) assert any associated claims 90 | and causes of action with respect to the Work, in either case contrary to 91 | Affirmer's express Statement of Purpose. 92 | 93 | 4. Limitations and Disclaimers. 94 | 95 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 96 | surrendered, licensed or otherwise affected by this document. 97 | 98 | b. Affirmer offers the Work as-is and makes no representations or warranties 99 | of any kind concerning the Work, express, implied, statutory or otherwise, 100 | including without limitation warranties of title, merchantability, fitness 101 | for a particular purpose, non infringement, or the absence of latent or 102 | other defects, accuracy, or the present or absence of errors, whether or not 103 | discoverable, all to the greatest extent permissible under applicable law. 104 | 105 | c. Affirmer disclaims responsibility for clearing rights of other persons 106 | that may apply to the Work or any use thereof, including without limitation 107 | any person's Copyright and Related Rights in the Work. Further, Affirmer 108 | disclaims responsibility for obtaining any necessary consents, permissions 109 | or other rights required for any use of the Work. 110 | 111 | d. Affirmer understands and acknowledges that Creative Commons is not a 112 | party to this document and has no duty or obligation with respect to this 113 | CC0 or use of the Work. 114 | 115 | For more information, please see 116 | 117 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # development-guide # 2 | 3 | [![GitHub Build Status](https://github.com/cisagov/development-guide/workflows/build/badge.svg)](https://github.com/cisagov/development-guide/actions) 4 | 5 | A set of guidelines and best practices for an awesome engineering team. 6 | Heavily "inspired" by the 7 | [18F Development Guide](https://github.com/18f/development-guide). 8 | 9 | - [Open Source Policy](/open-source-policy) 10 | 11 | - [Project Setup](/project_setup) 12 | - [Development Environments](/dev_envs) 13 | - [CISA-provided Mac Setup](/dev_envs/mac-env-setup.md) 14 | - Language Guides 15 | - HCL 16 | - JavaScript 17 | - [Python](/languages/python) 18 | 19 | ## Git, GitHub, and you ## 20 | 21 | - [FISMA-Ready GitHub account setup](https://github.com/fisma-ready/github) 22 | - [How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/#seven-rules) 23 | - [How to Review Code Effectively](https://github.blog/developer-skills/github/how-to-review-code-effectively-a-github-staff-engineers-philosophy/) 24 | 25 | ## Tools and services we use ## 26 | 27 | - [AWS](https://aws.amazon.com) - Our main cloud provider 28 | - [CodeQL](https://codeql.github.com) - Code analysis engine developed by GitHub 29 | to automate security checks and help prevent critical vulnerabilities. 30 | Replaces [LGTM](https://lgtm.com). 31 | - [Coveralls](https://coveralls.io/github/cisagov) - Test coverage tracking 32 | - [Dependabot](https://docs.github.com/en/code-security/dependabot) - 33 | Monitor vulnerabilities in dependencies and keep dependencies 34 | up-to-date 35 | - [GitHub Actions](https://github.com/features/actions) - 36 | Continuous integration and delivery 37 | - [PyPi](https://pypi.org/search/?q=cisagov) - Python package publication 38 | 39 | ## Installation ## 40 | 41 | This guide has several supporting Python scripts. The simplest way to install these 42 | scripts and their dependencies is to use `pip`. In the root of this project execute: 43 | 44 | `pip install -r requirements.txt` 45 | 46 | Please see the 47 | [Creating the Python virtual environment](CONTRIBUTING.md#creating-the-python-virtual-environment) 48 | section of the [CONTRIBUTING](CONTRIBUTING.md) document for 49 | information about setting up these scripts in a Python virtual environment. 50 | 51 | ## Contributing ## 52 | 53 | We welcome contributions! Please see [`CONTRIBUTING.md`](CONTRIBUTING.md) for 54 | details. 55 | 56 | ## License ## 57 | 58 | This project is in the worldwide [public domain](LICENSE). 59 | 60 | This project is in the public domain within the United States, and 61 | copyright and related rights in the work worldwide are waived through 62 | the [CC0 1.0 Universal public domain 63 | dedication](https://creativecommons.org/publicdomain/zero/1.0/). 64 | 65 | All contributions to this project will be released under the CC0 66 | dedication. By submitting a pull request, you are agreeing to comply 67 | with this waiver of copyright interest. 68 | -------------------------------------------------------------------------------- /bump-version: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # bump-version [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) 4 | # bump-version --list-files 5 | 6 | set -o nounset 7 | set -o errexit 8 | set -o pipefail 9 | 10 | # Stores the canonical version for the project. 11 | VERSION_FILE=version.txt 12 | # Files that should be updated with the new version. 13 | VERSION_FILES=("$VERSION_FILE") 14 | 15 | USAGE=$( 16 | cat << END_OF_LINE 17 | Update the version of the project. 18 | 19 | Usage: 20 | ${0##*/} [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) 21 | ${0##*/} --list-files 22 | ${0##*/} (-h | --help) 23 | 24 | Options: 25 | -h | --help Show this message. 26 | --push Perform a \`git push\` after updating the version. 27 | --label LABEL Specify the label to use when updating the build or prerelease version. 28 | --list-files List the files that will be updated when the version is bumped. 29 | END_OF_LINE 30 | ) 31 | 32 | old_version=$(< "$VERSION_FILE") 33 | # Comment out periods so they are interpreted as periods and don't 34 | # just match any character 35 | old_version_regex=${old_version//\./\\\.} 36 | new_version="$old_version" 37 | 38 | bump_part="" 39 | label="" 40 | commit_prefix="Bump" 41 | with_push=false 42 | commands_with_label=("build" "prerelease") 43 | commands_with_prerelease=("major" "minor" "patch") 44 | with_prerelease=false 45 | 46 | ####################################### 47 | # Display an error message, the help information, and exit with a non-zero status. 48 | # Arguments: 49 | # Error message. 50 | ####################################### 51 | function invalid_option() { 52 | echo "$1" 53 | echo "$USAGE" 54 | exit 1 55 | } 56 | 57 | ####################################### 58 | # Bump the version using the provided command. 59 | # Arguments: 60 | # The version to bump. 61 | # The command to bump the version. 62 | # Returns: 63 | # The new version. 64 | ####################################### 65 | function bump_version() { 66 | local temp_version 67 | temp_version=$(python -c "import semver; print(semver.parse_version_info('$1').${2})") 68 | echo "$temp_version" 69 | } 70 | 71 | if [ $# -eq 0 ]; then 72 | echo "$USAGE" 73 | exit 1 74 | else 75 | while [ $# -gt 0 ]; do 76 | case $1 in 77 | --push) 78 | if [ "$with_push" = true ]; then 79 | invalid_option "Push has already been set." 80 | fi 81 | 82 | with_push=true 83 | shift 84 | ;; 85 | --label) 86 | if [ -n "$label" ]; then 87 | invalid_option "Label has already been set." 88 | fi 89 | 90 | label="$2" 91 | shift 2 92 | ;; 93 | build | finalize | major | minor | patch) 94 | if [ -n "$bump_part" ]; then 95 | invalid_option "Only one version part should be bumped at a time." 96 | fi 97 | 98 | bump_part="$1" 99 | shift 100 | ;; 101 | prerelease) 102 | with_prerelease=true 103 | shift 104 | ;; 105 | show) 106 | echo "$old_version" 107 | exit 0 108 | ;; 109 | -h | --help) 110 | echo "$USAGE" 111 | exit 0 112 | ;; 113 | --list-files) 114 | printf '%s\n' "${VERSION_FILES[@]}" 115 | exit 0 116 | ;; 117 | *) 118 | invalid_option "Invalid option: $1" 119 | ;; 120 | esac 121 | done 122 | fi 123 | 124 | if [ -n "$label" ] && [ "$with_prerelease" = false ] && [[ ! " ${commands_with_label[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then 125 | invalid_option "Setting the label is only allowed for the following commands: ${commands_with_label[*]}" 126 | fi 127 | 128 | if [ "$with_prerelease" = true ] && [ -n "$bump_part" ] && [[ ! " ${commands_with_prerelease[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then 129 | invalid_option "Changing the prerelease is only allowed in conjunction with the following commands: ${commands_with_prerelease[*]}" 130 | fi 131 | 132 | label_option="" 133 | if [ -n "$label" ]; then 134 | label_option="token='$label'" 135 | fi 136 | 137 | if [ -n "$bump_part" ]; then 138 | if [ "$bump_part" = "finalize" ]; then 139 | commit_prefix="Finalize" 140 | bump_command="finalize_version()" 141 | elif [ "$bump_part" = "build" ]; then 142 | bump_command="bump_${bump_part}($label_option)" 143 | else 144 | bump_command="bump_${bump_part}()" 145 | fi 146 | new_version=$(bump_version "$old_version" "$bump_command") 147 | echo Changing version from "$old_version" to "$new_version" 148 | fi 149 | 150 | if [ "$with_prerelease" = true ]; then 151 | bump_command="bump_prerelease($label_option)" 152 | temp_version=$(bump_version "$new_version" "$bump_command") 153 | echo Changing version from "$new_version" to "$temp_version" 154 | new_version="$temp_version" 155 | fi 156 | 157 | tmp_file=/tmp/version.$$ 158 | for version_file in "${VERSION_FILES[@]}"; do 159 | if [ ! -f "$version_file" ]; then 160 | echo Missing expected file: "$version_file" 161 | exit 1 162 | fi 163 | sed "s/$old_version_regex/$new_version/" "$version_file" > $tmp_file 164 | mv $tmp_file "$version_file" 165 | done 166 | 167 | git add "${VERSION_FILES[@]}" 168 | git commit --message "$commit_prefix version from $old_version to $new_version" 169 | 170 | if [ "$with_push" = true ]; then 171 | git push 172 | fi 173 | -------------------------------------------------------------------------------- /config/coordinating-skeleton-updates.md: -------------------------------------------------------------------------------- 1 | # Coordinating Skeleton Updates # 2 | 3 | When Lineage runs to push out skeleton updates to its descendent 4 | repositories, this often results in generating tens of PRs to review. To 5 | minimize this Kraken-like behavior and reduce the burden on team of so many 6 | PR reviews, we try to coordinate updates to the skeletons in batches. 7 | 8 | 9 | [Lineage GitHub Action]: https://github.com/cisagov/action-lineage/actions?query=workflow%3Alineage_scan 10 | 11 | ## Checking the Lineage GitHub Action ## 12 | 13 | First, check the status of the [Lineage GitHub Action]. 14 | If it's about to run, you'll see the latest entry has a run time of roughly 15 | `1 hour ago`. 16 | 17 | If you want multiple PRs to be released in one Lineage run, so as to minimize 18 | the Kraken of PRs to approve, wait until the next run completes and then start 19 | the merge process. 20 | 21 | You'll also want to start from the top of the skeleton hierarchy and 22 | merge from `skeleton-generic` first, then once Lineage has run, do the same 23 | into the next level of skeletons, and so on until you just have a boatload 24 | of PRs to review. 25 | 26 | ## Merging ## 27 | 28 | The part that takes the longest is waiting for the checks to complete as you 29 | update each PR from its newly-updated `develop` branch before merging. 30 | 31 | Wait until the [Lineage GitHub Action] has completed, then: 32 | 33 | 1. Merge one PR into the skeleton repository 34 | 1. Go to the next PR and click `Merge branch 'develop' into ` - 35 | you can also rebase from the command-line if you prefer 36 | 1. Wait for tests to complete and merge this PR 37 | 1. Repeat the preceding steps for the next batched PR until all PRs are merged 38 | 1. Wait for Lineage to run and then review and merge the resulting PRs 39 | 40 | ## Schedule ## 41 | 42 | Generally, we schedule the batching and coordinate to review and merge 43 | efficiently. To do so, create a team calendar invite a la `Unleash the Kraken` 44 | and start adding links to PRs that belong in that batch. 45 | -------------------------------------------------------------------------------- /config/gitleaks-guide.md: -------------------------------------------------------------------------------- 1 | # Check for PII and credentials with `gitleaks` # 2 | 3 | This guide describes how to check a codebase for PII (personally identifiable 4 | information) or credentials that may have inadvertently been committed. 5 | 6 | This process is based on [`caulking`](https://github.com/cloud-gov/caulking) 7 | by 18F's cloud.gov. 8 | 9 | Please note that our [gitleaks configuration file](gitleaks.toml) results in a 10 | number of false positives for things like code referencing "user_name" etc. 11 | 12 | Results should be reviewed by humans. 13 | 14 | ## Installation ## 15 | 16 | This assumes you are on MacOS with [HomeBrew](https://brew.sh) installed. 17 | 18 | - Install `gitleaks` via `brew install gitleaks` 19 | - Add the pattern configuration file `gitleaks.toml` somewhere like 20 | `$HOME/.git-support/gitleaks.toml` 21 | 22 | ## Usage ## 23 | 24 | To check the commit history of a locally checked-out repository once you've 25 | finished installation, you can use the `--repo-path=` option. 26 | 27 | Useful flags: 28 | 29 | - `--config-path=` specifies the configuration file to use 30 | - `--path=` specifies a local repository folder to check 31 | - `--verbose` gives you more details about each item found 32 | - `--pretty` formats the output so it's more human-readable 33 | 34 | ```sh 35 | gitleaks --config-path=$HOME/.git-support/gitleaks.toml --path= --verbose --pretty 36 | ``` 37 | 38 | Without the `--verbose` flag, you'll get output with totals but not details, 39 | such as: 40 | 41 | ```console 42 | WARN[2020-12-08T10:47:03-05:00] 7 leaks detected. 24 commits scanned in 325 milliseconds 936 microseconds 43 | ``` 44 | 45 | If you want to save the console output to file so you can review it at your 46 | leisure, use the `>` syntax and specify a location and filename. 47 | 48 | ```sh 49 | gitleaks --config=$HOME/.git-support/gitleaks.toml --repo-path= --verbose --pretty > ~/gitleaks.txt 50 | ``` 51 | 52 | You can then open the file and review the results for credential inclusions 53 | and false positives. 54 | -------------------------------------------------------------------------------- /config/gitleaks.toml: -------------------------------------------------------------------------------- 1 | title = "gitleaks config" 2 | 3 | # Rules borrowed from GSA 4 | # https://github.com/GSA/odp-code-repository-commit-rules/blob/master/gitleaks/rules.toml 5 | 6 | # If IPv4 is overbroad, cloud.gov external IPs may all be nonsensitive 7 | # and we can change match to (10|172|192). in the first octet. 8 | #[[rules]] 9 | # description = "IPv4 addresses" 10 | # regex = '''\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b''' 11 | # tags = ["IPv4", "IP", "addresses"] 12 | # [rules.allowlist] 13 | # regexes = [ 14 | # '''(169.254.169.254|127.0.0.\d+|23.22.13.113)''', # 23.22.13.113 is gsa.gov 15 | # '''0\.(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){2}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)''' # OK to start w/ 0 16 | # ] 17 | 18 | [[rules]] 19 | description = "yaml secrets" 20 | regex = '''(?i)(password|enc.key|auth.pass):\s+(.*)''' 21 | file = '''(?i)(\.yml|\.yaml)''' 22 | tags = ["yaml"] 23 | [rules.allowlist] 24 | description = "ignore substituted values and examples" 25 | regexes = ['''(\(\(.*\)\)|\{\{.*\}\})'''] 26 | 27 | [[rules]] 28 | description = "Email except non-pii business email or test emails" 29 | regex = '''(.{0,48}?)[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}''' 30 | tags = ["email"] 31 | [rules.allowlist] 32 | regexes = ['''(?i)@(github.com|test.org)''', 33 | '''(?i)@(beta|cisa|gwe.cisa|hq|trio).dhs.gov''', 34 | '''(?i)(Author|Copyright|Contact|MAINTAINER)'''] 35 | pathes = ['''Godeps._workspace'''] 36 | 37 | # Rules from original `leaky-repo.toml` v4.1.1 38 | # https://raw.githubusercontent.com/zricethezav/gitleaks/master/examples/leaky-repo.toml 39 | # with the following rule sets removed: 40 | # - Email 41 | 42 | [[rules]] 43 | description = "AWS Manager ID" 44 | regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}''' 45 | tags = ["key", "AWS"] 46 | 47 | [[rules]] 48 | description = "AWS cred file info" 49 | regex = '''(?i)(aws_access_key_id|aws_secret_access_key)(.{0,20})?=.[0-9a-zA-Z\/+]{20,40}''' 50 | tags = ["AWS"] 51 | 52 | [[rules]] 53 | description = "AWS Secret Key" 54 | regex = '''(?i)aws(.{0,20})?(?-i)['\"][0-9a-zA-Z\/+]{40}['\"]''' 55 | tags = ["key", "AWS"] 56 | 57 | [[rules]] 58 | description = "AWS MWS key" 59 | regex = '''amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}''' 60 | tags = ["key", "AWS", "MWS"] 61 | 62 | [[rules]] 63 | description = "Facebook Secret Key" 64 | regex = '''(?i)(facebook|fb)(.{0,20})?(?-i)['\"][0-9a-f]{32}['\"]''' 65 | tags = ["key", "Facebook"] 66 | 67 | [[rules]] 68 | description = "Facebook Client ID" 69 | regex = '''(?i)(facebook|fb)(.{0,20})?['\"][0-9]{13,17}['\"]''' 70 | tags = ["key", "Facebook"] 71 | 72 | [[rules]] 73 | description = "Twitter Secret Key" 74 | regex = '''(?i)twitter(.{0,20})?['\"][0-9a-z]{35,44}['\"]''' 75 | tags = ["key", "Twitter"] 76 | 77 | [[rules]] 78 | description = "Twitter Client ID" 79 | regex = '''(?i)twitter(.{0,20})?['\"][0-9a-z]{18,25}['\"]''' 80 | tags = ["client", "Twitter"] 81 | 82 | [[rules]] 83 | description = "Github" 84 | regex = '''(?i)github(.{0,20})?(?-i)['\"][0-9a-zA-Z]{35,40}['\"]''' 85 | tags = ["key", "Github"] 86 | 87 | [[rules]] 88 | description = "LinkedIn Client ID" 89 | regex = '''(?i)linkedin(.{0,20})?(?-i)['\"][0-9a-z]{12}['\"]''' 90 | tags = ["client", "LinkedIn"] 91 | 92 | [[rules]] 93 | description = "LinkedIn Secret Key" 94 | regex = '''(?i)linkedin(.{0,20})?['\"][0-9a-z]{16}['\"]''' 95 | tags = ["secret", "LinkedIn"] 96 | 97 | [[rules]] 98 | description = "Slack" 99 | regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?''' 100 | tags = ["key", "Slack"] 101 | 102 | [[rules]] 103 | description = "EC" 104 | regex = '''-----BEGIN EC PRIVATE KEY-----''' 105 | tags = ["key", "EC"] 106 | 107 | 108 | [[rules]] 109 | description = "Google API key" 110 | regex = '''AIza[0-9A-Za-z\\-_]{35}''' 111 | tags = ["key", "Google"] 112 | 113 | 114 | [[rules]] 115 | description = "Heroku API key" 116 | regex = '''(?i)heroku(.{0,20})?['"][0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}['"]''' 117 | tags = ["key", "Heroku"] 118 | 119 | [[rules]] 120 | description = "MailChimp API key" 121 | regex = '''(?i)(mailchimp|mc)(.{0,20})?['"][0-9a-f]{32}-us[0-9]{1,2}['"]''' 122 | tags = ["key", "Mailchimp"] 123 | 124 | [[rules]] 125 | description = "Mailgun API key" 126 | regex = '''(?i)(mailgun|mg)(.{0,20})?['"][0-9a-z]{32}['"]''' 127 | tags = ["key", "Mailgun"] 128 | 129 | [[rules]] 130 | description = "PayPal Braintree access token" 131 | regex = '''access_token\$production\$[0-9a-z]{16}\$[0-9a-f]{32}''' 132 | tags = ["key", "Paypal"] 133 | 134 | [[rules]] 135 | description = "Picatic API key" 136 | regex = '''sk_live_[0-9a-z]{32}''' 137 | tags = ["key", "Picatic"] 138 | 139 | [[rules]] 140 | description = "Slack Webhook" 141 | regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}''' 142 | tags = ["key", "slack"] 143 | 144 | [[rules]] 145 | description = "Stripe API key" 146 | regex = '''(?i)stripe(.{0,20})?['\"][sk|rk]_live_[0-9a-zA-Z]{24}''' 147 | tags = ["key", "Stripe"] 148 | 149 | [[rules]] 150 | description = "Square access token" 151 | regex = '''sq0atp-[0-9A-Za-z\-_]{22}''' 152 | tags = ["key", "square"] 153 | 154 | [[rules]] 155 | description = "Square OAuth secret" 156 | regex = '''sq0csp-[0-9A-Za-z\\-_]{43}''' 157 | tags = ["key", "square"] 158 | 159 | [[rules]] 160 | description = "Twilio API key" 161 | regex = '''(?i)twilio(.{0,20})?['\"][0-9a-f]{32}['\"]''' 162 | tags = ["key", "twilio"] 163 | 164 | [[rules]] 165 | description = "Env Var" 166 | regex = '''(?i)(apikey|secret|key|api|password|pass|pw|host)=[0-9a-zA-Z-_.{}]{4,120}''' 167 | 168 | [[rules]] 169 | description = "Port" 170 | regex = '''(?i)port(.{0,4})?[0-9]{1,10}''' 171 | [rules.allowlist] 172 | regexes = ['''(?i)port '''] 173 | description = "ignore export " 174 | 175 | [[rules]] 176 | description = "Generic Credential" 177 | regex = '''(?i)(dbpasswd|dbuser|dbname|dbhost|api_key|apikey|secret|key|api|password|user|guid|hostname|pw|auth)(.{0,20})?['|"]([0-9a-zA-Z-_\/+!{}/=]{4,120})['|"]''' 178 | tags = ["key", "API", "generic"] 179 | # ignore leaks with specific identifiers like slack and aws 180 | [rules.allowlist] 181 | description = "ignore slack, mailchimp, aws" 182 | regexes = [ 183 | '''xox[baprs]-([0-9a-zA-Z]{10,48})''', 184 | '''(?i)(.{0,20})?['"][0-9a-f]{32}-us[0-9]{1,2}['"]''', 185 | '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}''', 186 | '''(user\[|user\'|user_id)''', 187 | ] 188 | 189 | [[rules]] 190 | description = "High Entropy" 191 | regex = '''[0-9a-zA-Z-_!{}/=]{4,120}''' 192 | file = '''(?i)(dump.sql|high-entropy-misc.txt)$''' 193 | tags = ["entropy"] 194 | [[rules.Entropies]] 195 | Min = "4.3" 196 | Max = "7.0" 197 | [rules.allowlist] 198 | description = "ignore ssh key and pems" 199 | files = ['''(pem|ppk|env)$'''] 200 | paths = ['''(.*)?ssh'''] 201 | 202 | [[rules]] 203 | description = "Potential bash var" 204 | regex='''(?i)(=)([0-9a-zA-Z-_!{}=]{4,120})''' 205 | tags = ["key", "bash", "API", "generic"] 206 | [[rules.Entropies]] 207 | Min = "3.5" 208 | Max = "4.5" 209 | Group = "1" 210 | 211 | [[rules]] 212 | description = "WP-Config" 213 | regex='''define(.{0,20})?(DB_CHARSET|NONCE_SALT|LOGGED_IN_SALT|AUTH_SALT|NONCE_KEY|DB_HOST|DB_PASSWORD|AUTH_KEY|SECURE_AUTH_KEY|LOGGED_IN_KEY|DB_NAME|DB_USER)(.{0,20})?['|"].{10,120}['|"]''' 214 | tags = ["key", "API", "generic"] 215 | 216 | [[rules]] 217 | description = "Files with keys and credentials" 218 | file = '''(?i)(id_rsa|passwd|id_rsa.pub|pgpass|pem|key|shadow)''' 219 | 220 | # Global allowlist - only one entry with allowlist is allowed 221 | [allowlist] 222 | description = "images, rules and testing allowlists" 223 | files = ['''(.*?)(jpg|gif|doc|pdf|bin)$''', '''development.bats''', '''caulked.bats''', '''local.toml'''] 224 | regexes = ['''CHANGEME|changeme|feedabee|EXAMPLE|23.22.13.113|1234567890'''] 225 | -------------------------------------------------------------------------------- /dev_envs/README.md: -------------------------------------------------------------------------------- 1 | # Personal Development Environments # 2 | 3 | ## Mac ## 4 | 5 | - [Mac Dev Environment Setup](mac-env-setup.md) 6 | 7 | ## Linux ## 8 | 9 | ## Other ## 10 | -------------------------------------------------------------------------------- /dev_envs/mac-env-setup.md: -------------------------------------------------------------------------------- 1 | # Setting up a Mac-based development environment # 2 | 3 | ## Initial account setup (as administrator) ## 4 | 5 | For this section, log in as your administrator account. 6 | 7 | ### Account permissions ### 8 | 9 | You’ll need to allow your standard user access to run `sudo` commands. 10 | First, you’ll want to take note of your account name by viewing the folders 11 | listed inside of the `/Users/` folder - in this example, the account 12 | name is **username**. 13 | 14 | ```console 15 | $ ls /Users/ 16 | username Shared fedadmin 17 | ``` 18 | 19 | Then, you’ll add your **username** to the `/etc/sudoers` file using `visudo`: 20 | 21 | ```console 22 | $ sudo visudo 23 | ... 24 | ## 25 | # User specification 26 | ## 27 | 28 | # root and users in group wheel can run anything on any machine as any user 29 | root ALL = (ALL) ALL 30 | %admin ALL = (ALL) ALL 31 | username ALL = (ALL) ALL 32 | ``` 33 | 34 | Press `Esc` followed by `:wq` to save and quit `visudo`. 35 | 36 | ### Install XCode command line tools ### 37 | 38 | Download XCode and accept the license agreement, then install the XCode 39 | command line tools. 40 | 41 | ```console 42 | sudo xcodebuild -license 43 | xcode-select --install 44 | ``` 45 | 46 | ### Docker ### 47 | 48 | Download the latest stable [Docker for Mac](https://www.docker.com/docker-mac). 49 | Install it by double-clicking the downloaded `dmg` file and dragging the 50 | Docker application file to the `Applications` folder. 51 | 52 | ### iTerm2 (recommended) ### 53 | 54 | To set up iTerm2 - a macOS terminal replacement with some nifty features - 55 | visit the [iTerm2 download page](https://www.iterm2.com/downloads.html) to 56 | get the latest stable release. Install it by double-clicking the downloaded 57 | `zip` file and dragging the extracted `iTerm` application file to the 58 | `Applications` folder. 59 | 60 | Once installed, run the app and select the menu option 61 | `iTerm2 > Make iTerm2 Default Term`. 62 | 63 | > **Note** 64 | > You may also `Make iTerm2 Default Term` on your standard user account. 65 | 66 | ### VPN configuration ### 67 | 68 | Once you're set up with the lab (CAL) VPN connection, we recommend you set up 69 | split-tunneling. Instructions and a generator script are located in the 70 | [vpn-config-gen repository](https://github.com/cisagov/vpn-config-gen). 71 | 72 | ## User environment setup ## 73 | 74 | For this section and following sections, log back in as your standard user. 75 | 76 | > **Note** 77 | > If you installed iTerm2 above, run the iTerm2 app and select the menu option 78 | > `iTerm2 > Make iTerm2 Default Term` to make it your default too. 79 | 80 | ### Automatic package installation (recommended) ### 81 | 82 | NOTE (DEC 2022): The [CISA `laptop` script] is out of date but still 83 | functional. 84 | 85 | To set up a dev environment via the [CISA `laptop` script], execute 86 | the following in your terminal: 87 | 88 | ```bash 89 | bash <(curl --silent https://raw.githubusercontent.com/cisagov/laptop/master/laptop) 90 | ``` 91 | 92 | This will `curl` the current install files and script. You can specify 93 | additional packages to install by creating a `Brewfile.local`, as described in 94 | the Readme for the `laptop` repo where an example is provided. 95 | 96 | ### Manual package installation (alternative) ### 97 | 98 | Open Terminal and install `brew` per the instructions from 99 | [Homebrew](https://brew.sh). You will probably have to do this with `sudo`. 100 | 101 | ```console 102 | /bin/bash -c "$(curl --fail --silent --show-error --location https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" 103 | ``` 104 | 105 | After installing Brew, you'll want to install other useful 106 | packages. We recommend installing all the packages specified in the 107 | [CISA `laptop` script repository](https://github.com/cisagov/laptop/blob/master/Brewfile). 108 | 109 | ### Environment configuration ### 110 | 111 | For portability between computers and environments, you may want to switch 112 | from the usual dotfiles setup to using `stow` to be able to use a personal 113 | repository and then map your dotfiles to that directory. This makes it much 114 | easier to sync and set up. 115 | 116 | Using a `stow`-based setup also allows for some nifty features like 117 | subdirectories to split out useful files into a more modular approach. 118 | 119 | #### Prerequisites #### 120 | 121 | These are automatically installed if you used the 122 | [automatic package installation](#automatic-package-installation-recommended) 123 | script above. 124 | 125 | ```console 126 | brew install figlet pinentry-mac s3cmd stow 127 | ``` 128 | 129 | #### Install #### 130 | 131 | Fork [cisagov's `.dotfiles` repo](https://github.com/cisagov/.dotfiles) and 132 | clone it as shown below. Then, stow all the packages from `.dotfiles` into 133 | your home directory per the [`.dotfiles` README](https://github.com/cisagov/.dotfiles). 134 | 135 | ```console 136 | cd ~ 137 | git clone git@github.com:/.dotfiles.git 138 | cd ~/.dotfiles 139 | ./deploy.sh 140 | ``` 141 | 142 | ### Customize your user ### 143 | 144 | You'll want to make some changes, e.g. username/email should be set to your 145 | own - you can do this by editing the files in your new `~/.dotfiles` folder. 146 | 147 | > **Note** 148 | > You don't need to rerun `stow` or `deploy.sh` after this because your 149 | > files are already symlinked. 150 | 151 | #### Set up your favorite IDE #### 152 | 153 | - Install your favorite IDE and set up its preferences to your liking 154 | - For Python development, please set up `black` for opinionated linting so 155 | your code formatting will match the rest of the codebase 156 | - Note: the pre-commit hooks will automatically run `black` on commit 157 | 158 | #### Configure `git` #### 159 | 160 | To allow access to the saved macOS Keychain credential for command line 161 | usage, run: 162 | 163 | ```console 164 | git config --global credential.helper osxkeychain 165 | ``` 166 | 167 | #### Set up commit signing with GPG #### 168 | 169 | Generate a key to sign your git commits and add it to your `~/.gitconfig`: 170 | 171 | ```console 172 | gpg --gen-key 173 | ``` 174 | 175 | Follow the prompts for name and email address, using either your CISA 176 | or GWE email address. The output should look like: 177 | 178 | ```console 179 | We need to generate a lot of random bytes. It is a good idea to perform 180 | some other action (type on the keyboard, move the mouse, utilize the 181 | disks) during the prime generation; this gives the random number 182 | generator a better chance to gain enough entropy. 183 | gpg: key EXAMPLE1234ABCDE marked as ultimately trusted 184 | gpg: revocation certificate stored as '/Users/username/path/to/.gnupg' 185 | public and secret key created and signed. 186 | 187 | pub rsa3072 YYYY-MM-DD [SC] [expires: YYYY-MM-DD] 188 | 189 | uid User Name 190 | sub rsa3072 YYYY-MM-DD [E] [expires: YYYY-MM-DD] 191 | ``` 192 | 193 | The alphanumeric string from the key generation output line that says 194 | `gpg: key EXAMPLE1234ABCDE marked as ultimately trusted` is used as 195 | your **signing key**. 196 | 197 | Now that your new key has been generated, add it to your `~/.gitconfig`: 198 | 199 | 1. Fill your `name` and `email`, using the same values as above 200 | 1. Copy the alphanumeric string from the key generation output line that says 201 | `gpg: key EXAMPLE1234ABCDE marked as ultimately trusted` 202 | 1. Fill the alphanumeric signing key string in the `[user]` section: 203 | `signingkey = EXAMPLE1234ABCDE` 204 | 205 | Next, export the public key via command line and add it to your GitHub account: 206 | 207 | 1. `gpg --armor --export user@example.com` 208 | 1. Copy the output, starting from `-----BEGIN PGP PUBLIC KEY BLOCK-----` 209 | 1. Add a new PGP key to your [GitHub keys page](https://github.com/settings/keys) 210 | 1. Paste the public key into the dialog and `Add GPG key` 211 | 1. Confirm your password to continue (if requested) 212 | 213 | Lastly, enable the global configuration option for commit signing: 214 | 215 | ```console 216 | git config --global commit.gpgsign true 217 | ``` 218 | 219 | #### Generate `ssh` key to use git on the command line #### 220 | 221 | Reference: [GitHub: Generating a new SSH key](https://docs.github.com/en/github/authenticating-to-github/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent#adding-your-ssh-key-to-the-ssh-agent) 222 | 223 | > **Warning** 224 | > Use `ed25519` instead of `rsa`! 225 | 226 | ```console 227 | ssh-keygen -t ed25519 228 | ``` 229 | 230 | 1. Save the generated key in the default location (e.g. `~/.ssh/id_ed25519`) 231 | 1. Use a passphrase you'll remember and/or save in your keychain 232 | 1. Start the `ssh-agent` in the background and add your new credential: 233 | 234 | ```console 235 | eval "$(ssh-agent -s)" 236 | ssh-add -K ~/.ssh/id_ed25519 237 | ``` 238 | 239 | #### Add ssh key to GitHub #### 240 | 241 | Reference: [GitHub: Adding a new SSH key to your GitHub account](https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account) 242 | 243 | > **Warning** 244 | > Use `ed25519` instead of `rsa`! 245 | 246 | ```console 247 | # Copy the contents of the id_ed25519.pub file to your clipboard 248 | $ pbcopy < ~/.ssh/id_ed25519.pub 249 | ``` 250 | 251 | 1. Load your [GitHub Settings page for adding an SSH key](https://github.com/settings/ssh/new) 252 | and paste the contents of your clipboard 253 | 1. Give this device a descriptive name and `Add SSH key` 254 | 1. Confirm your password to continue (if requested) 255 | 256 | ## Setup for the COOL ## 257 | 258 | There are some additional setup steps to access the 259 | [COOL (Cloud-Optimized Operations Lab)](https://github.com/cisagov/cool-system/): 260 | 261 | > **Note** 262 | > These steps are only necessary if you want to manually configure these vs. 263 | > using the Jamf-pushed configuration. 264 | 265 | 1. [Install MIT Kerberos](https://github.com/cisagov/cool-system-internal/blob/master/Installing-MIT-Kerberos.md) 266 | - NOTE: If you chose Automatic Installation above, this step was already 267 | performed by the `laptop` script 268 | 1. [Configuring Kerberos for the COOL](https://github.com/cisagov/cool-system-internal/blob/master/Configuring-Kerberos-for-the-COOL.md) 269 | 1. [Configuring your browser for the COOL](https://github.com/cisagov/cool-system-internal/blob/master/Configuring-your-browser-for-the-COOL.md) 270 | 271 | After you have successfully completed the tasks above, follow 272 | [these instructions to access Guacamole](https://github.com/cisagov/cool-system-internal/blob/master/Accessing-an-assessment-environment-with-Guacamole.md). 273 | 274 | > **Note** 275 | > You will need to have been given access to one or more COOL environments 276 | > to be able to access anything with Guacamole. 277 | 278 | [CISA `laptop` script]: https://github.com/cisagov/laptop/ 279 | -------------------------------------------------------------------------------- /ethos/README.md: -------------------------------------------------------------------------------- 1 | # Development Ethos # 2 | 3 | *On its way!* 4 | -------------------------------------------------------------------------------- /languages/python/README.md: -------------------------------------------------------------------------------- 1 | # Python Development Guide # 2 | 3 | This document is structured by topic; under each, we include “Standards”, 4 | “Defaults”, and “Suggestions”. 5 | 6 | **Standards** are practices that have a strong consensus across CISA; they 7 | should generally be followed. 8 | 9 | **Defaults** are safe selections that tend to be used by a large number of our 10 | projects; you may find yourself with a better or more tailored solution, 11 | however. 12 | 13 | **Suggestions** contain examples that have worked well on a project or two; 14 | they're not widely used enough to be defaults, but are worth considering. 15 | 16 | ## Versions ## 17 | 18 | We've **standardized** on Python 3.x over 2.x. All new projects should begin 19 | their life in 3.x. When starting a Python project, select the latest Python 20 | release available in `pyenv` and incrementally update as new releases are issued. 21 | 22 | `pyenv install --list` 23 | 24 | ## Style ## 25 | 26 | Our **standard** tool for ensuring consistency across Python code bases is 27 | [black](https://github.com/psf/black). Our 28 | [Python skeleton project](https://github.com/cisagov/skeleton-python-library) 29 | contains a set of **default** linters and syntax checkers. See the 30 | [`CONTRIBUTING`](https://github.com/cisagov/skeleton-python-library/blob/develop/CONTRIBUTING.md) 31 | file of this project for the most up-to-date information. 32 | 33 | ## Correctness and security ## 34 | 35 | Our **standard** code vulnerability scanners in cisagov are 36 | [CodeQL](https://codeql.github.com/) and 37 | [Dependabot](https://docs.github.com/en/code-security/dependabot). By 38 | **default** all repositories in the organization are scanned by these 39 | tools. 40 | 41 | ## Libraries ## 42 | 43 | The Python ecosystem is large and full of alternative solutions to similar 44 | problems. Here we document a few common use cases and the libraries we 45 | recommend when trying to solve them. 46 | 47 | | Purpose | Library | Conviction | 48 | | --- | --- | --- | 49 | | API (GraphQL) | [Flask-GraphQL] | Suggestion | 50 | | API (REST) | [Django Rest Framework] | Suggestion | 51 | | Argument Parser | [docopt](http://docopt.org) | Standard | 52 | | Data Analysis | [Pandas](https://pandas.pydata.org) | Standard | 53 | | HTTP Client | [Requests](https://requests.readthedocs.io/) | Standard | 54 | | Input Validation | [schema](https://github.com/keleshev/schema) | Standard | 55 | | Mongo ORM | [mongoengine](http://mongoengine.org) | Suggestion | 56 | | Task Queue | [Celery](http://www.celeryproject.org/) | Suggestion | 57 | | Test Coverage | [Coveralls] | Standard | 58 | | Test Runner | [py.test](https://docs.pytest.org/en/latest/) | Standard | 59 | | Web framework | [Flask](https://palletsprojects.com/p/flask/) | Default | 60 | 61 | ## Type support ## 62 | 63 | Python 3.5 and beyond have had partial support for static type hints. Static 64 | typing can both make code authors' intent clearer and reduce the number of 65 | bugs through static analysis. It's also notorious for slowing down the pace of 66 | prototyping and requiring a great deal of boiler-plate. 67 | 68 | Given this state, we believe it's reasonable to **default** to using type 69 | annotations when they make your intent clearer (i.e. as a form of 70 | documentation). The static analysis tool 71 | [mypy](http://mypy.readthedocs.io/en/latest/) is one of our **default** linters. 72 | 73 | [Coveralls]: http://github.com/coveralls-clients/coveralls-python 74 | [Django Rest Framework]: http://www.django-rest-framework.org/ 75 | [Flask-GraphQL]: https://github.com/graphql-python/flask-graphql 76 | -------------------------------------------------------------------------------- /onboarding-checklist.md: -------------------------------------------------------------------------------- 1 | # Onboarding Checklist # 2 | 3 | ## Instructions ## 4 | 5 | When someone new joins the team: 6 | 7 | 1. Create a new issue called "Onboard *Name*" from this template. 8 | 1. Remove any block of tasks that doesn't apply to the new team member's role. 9 | 1. Assign the new person's onboarding buddy to this issue. 10 | 1. Remove this `Instructions` block. 11 | 1. Submit this issue. 12 | 13 | --- 14 | 15 | ## Directions ## 16 | 17 | **Onboardee and buddy:** Try to go through your checklists in order. 18 | 19 | **Buddy:** If you can’t complete any of the items on your checklist 20 | personally, *you are responsible for ensuring that someone with the 21 | correct access completes that item.* 22 | 23 | ## Onboardee checklist ## 24 | 25 | ### Getting to know VM Fusion ### 26 | 27 | - [ ] Take notes on anything that is confusing or frustrating during your 28 | onboarding process. 29 | - Please share this information with your buddy and the team so we can 30 | make things better. 31 | - If you notice a problem (especially with things like documentation), you 32 | are more than welcome to fix it! 33 | - You can also file issues and pull requests against the [template 34 | onboarding checklist]. 35 | - [ ] Meet with your onboarding buddy (they should reach out to you). 36 | - [ ] Once you've finished the checklists below, make suggestions for steps 37 | that would have improved your onboarding experience as pull requests on the 38 | [template onboarding checklist] used to make this issue. 39 | 40 | ### Required items for all team members ### 41 | 42 | Completing these items helps us fulfill security and compliance requirements. 43 | If you get stuck, or if these requirements are confusing, ask for help from 44 | your onboarding buddy. 45 | 46 | #### GitHub #### 47 | 48 | - [ ] If you don't already have a GitHub account, 49 | [create a GitHub account with your government email address](https://github.com/join). 50 | - [ ] If you already have a GitHub account, [add your government email address](https://docs.github.com/en/free-pro-team@latest/github/setting-up-and-managing-your-github-user-account/adding-an-email-address-to-your-github-account). 51 | - [ ] Go through the [FISMA-Ready GitHub guide](https://github.com/fisma-ready/github) 52 | to set up your GitHub account to be added to CISA. 53 | - [ ] Once your GitHub account has been added to the `cisagov` organization, 54 | make sure you're assigned to this issue. 55 | 56 | #### Mandatory training #### 57 | 58 | - [ ] Complete Cybersecurity Training at the CISA Course Management 59 | website, including Mandatory Cyber Security and Privacy 60 | Training, and acceptance of the Rules of Behavior, which are 61 | both required before we can give you access to any systems, 62 | including CISA GWE (Google Workspace Enterprise). 63 | 64 | ### CISA Google Workspace Enterprise ### 65 | 66 | In addition to your CISA email address `first.last@cisa.dhs.gov`, 67 | you'll also get an account on CISA GWE, which is a Google Workspace 68 | account used for cross-team and cross-agency collaboration and is 69 | generally `first.last@gwe.cisa.dhs.gov`. Your CISA GWE account may 70 | take a few days to get set up. 71 | 72 | Once your CISA GWE account is set up, ask a dev team member to help 73 | you perform the following: 74 | 75 | - [ ] Join the appropriate rooms in Google Chat. 76 | - [ ] Join the appropriate Google Groups so you can participate in 77 | team-wide internal communication. 78 | - [ ] Get access to the Google Drive dev team share. If you create or 79 | move a doc there, it'll get the right access permissions for 80 | team members to be able to view and edit it. 81 | - [ ] Get access to the dev team's shared Google Calendar. 82 | 83 | ## Useful links ## 84 | 85 | - [ ] [NCATS Data Dictionary](https://github.com/cisagov/ncats-data-dictionary) 86 | repo for information about what's stored in the CyHy environment MongoDB databases 87 | - [ ] [COOL Wiki](https://github.com/cisagov/cool-system-internal) - 88 | Although they do not currently display as a wiki because this is 89 | a private repository (for historical reasons; ask another dev 90 | for the backstory), this collection of Markdown pages was 91 | exported from what was once an actual GitHub wiki in a public 92 | repository. We maintain these pages in the hope that they can 93 | again be made public one day. 94 | - [ ] [CyHy Wiki](https://github.com/cisagov/cyhy-system/wiki/) 95 | - [ ] Set up your [environment on your CISA-provided Mac laptop](/dev_envs/mac-env-setup.md). 96 | 97 | ## Buddy checklist ## 98 | 99 | - [ ] Introduce yourself to the new team member and talk about your 100 | background so they know who you are. 101 | - [ ] Identify a straightforward and well-defined first issue, ideally 102 | something the new team member could conceivably complete in 103 | their first two/three weeks using their existing skills. Discuss 104 | the context with them, then make them the assignee. 105 | - [ ] Discuss suggestions for how the onboarding experience could have 106 | been improved and open as PRs on the [template onboarding 107 | checklist]. 108 | - [ ] Invite the new team member to all relevant recurring meetings. 109 | - [ ] Invite the new team member to all relevant [GitHub cisagov 110 | teams](https://github.com/orgs/cisagov/teams/). 111 | 112 | 113 | [template onboarding checklist]: https://github.com/cisagov/development-guide/blob/master/onboarding-checklist.md 114 | -------------------------------------------------------------------------------- /open-source-policy/README.md: -------------------------------------------------------------------------------- 1 | # CISA Open Source Policy # 2 | 3 | In general, we work in the open by default. 4 | 5 | **[Read CISA's open source policy](policy.md)** for the official 6 | policy of [CISA](https://cisa.gov/) and more details about what "open by 7 | default" means. 8 | 9 | ## CISA team guidance ## 10 | 11 | **[Read CISA's open source team practices](practice.md)** for guidance on how 12 | CISA puts this open-by-default policy into practice, and how we handle the 13 | narrow situations where we may delay or withhold the release of source code. 14 | 15 | ## Credits ## 16 | 17 | This policy was originally copied from [18F](https://18f.gsa.gov) which was 18 | forked from the [Consumer Financial Protection Bureau's policy](https://github.com/cfpb/source-code-policy). 19 | Thanks also to [@benbalter](https://github.com/benbalter) for his 20 | [insights regarding CFPB's initial policy](http://ben.balter.com/2012/04/10/whats-missing-from-cfpbs-awesome-new-source-code-policy/). 21 | -------------------------------------------------------------------------------- /open-source-policy/policy.md: -------------------------------------------------------------------------------- 1 | # CISA: An open source agency # 2 | 3 | The Cybersecurity and Infrastructure Security Agency ([CISA](https://cisa.gov)) 4 | is the Nation’s risk advisor, working with partners to defend against today’s 5 | threats and collaborating to build more secure and resilient infrastructure for 6 | the future. 7 | 8 | The default position of CISA when developing new projects is to: 9 | 10 | 1. Use Free and Open Source Software (FOSS), which is software that does not 11 | charge users a purchase or licensing fee for modifying or redistributing the 12 | source code, in our projects and contribute back to the open source community. 13 | 1. Develop our work in the open. 14 | 1. Publish publicly all source code created or modified by CISA, whether 15 | developed in-house by government staff or through contracts negotiated by CISA. 16 | 17 | ## Benefits ## 18 | 19 | Using FOSS allows for product customization, advances interoperability between 20 | tools, and improves the overall quality of the final product. Other benefits 21 | include: 22 | 23 | 1. **Flexible usage.** The benefits of using FOSS compel CISA to meet user needs 24 | by modifying existing or creating new FOSS. FOSS is particularly suitable for 25 | rapid prototyping and experimentation. The testing process generates minimal 26 | costs, and the process encourages the identification and elimination of defects 27 | not recognized by the original development team. 28 | 29 | 1. **Community involvement.** Publicly available source code enables continuous 30 | and broad peer review. Whether simply publishing the completed code or opening 31 | the development process, the practice of expanding the review and testing 32 | process to a wider audience—beyond the development team—ensures 33 | increased software reliability and security. Developing in the open also allows 34 | for other opinions to help adjust the direction of a product to maximize its 35 | usefulness to the community it serves. 36 | 37 | 1. **Cost-savings.** The ability to modify FOSS enables CISA to respond rapidly 38 | to changing missions and markets. Support and maintenance of open source 39 | code—as opposed to more burdensome usages of proprietary 40 | software—provides a real cost advantage where multiple copies of software 41 | are required, or when the user base grows. The total cost of ownership is shared 42 | with a community, rather than solely CISA. 43 | 44 | 1. **Reusability.** The code we create belongs to the public as a part of the 45 | public domain. The code we work on was paid for by the American people, but the 46 | end-product is not the only way they should be able to interact with their 47 | government. By coding in FOSS, we help populate a larger commons that cities, 48 | states, businesses, and individuals can participate in. This creates real 49 | economic value by lowering the burden of replicating similar work or by allowing 50 | the private sector to build off of and create new businesses around code 51 | developed at CISA. 52 | 53 | ## Maximizing community involvement and reuse ## 54 | 55 | Active involvement from the open source community is integral to the success of 56 | open source code. CISA will be an active contributor to FOSS projects that it or 57 | its clients utilize. 58 | 59 | Code written entirely by CISA staff will be dedicated to the public domain. In 60 | addition, any contracts CISA enters into, where others will develop software on 61 | CISA's behalf, will ensure that all results are dedicated to the public domain. 62 | In general, all discussion in this document about the licensing of work of 63 | CISA's contractors means that CISA will ensure that their contracts guarantee 64 | those terms. 65 | 66 | CISA encourages contributions to its open source projects, whether it be code, 67 | commentary, bug reports, feature requests, or overall strategic direction. 68 | 69 | Forks or clones of our code repositories are free to be re-distributed. This 70 | means code created by CISA can be integrated into work that is under a more 71 | restrictive license, even those that are not considered open source licenses. 72 | 73 | This changes when our code repositories include code that was not created by 74 | CISA and carries an open license. Code previously released under an open source 75 | license and then modified by CISA or its contractors is considered a ["joint 76 | work"](http://www.copyright.gov/title17/92chap1.html#101) and must be released 77 | under terms permitted by the original open source license. 78 | 79 | The public can use our code as the basis of wholly proprietary and commercial 80 | systems. CISA would appreciate that users of our code disclose its lineage, but 81 | CISA maintains no legal right to require disclosure. Notifications that our work 82 | is used in a new system are always greatly appreciated. 83 | 84 | ## Open source licenses ## 85 | 86 | As previously mentioned, most work generated at CISA falls within the U.S. 87 | public domain. 88 | 89 | For our international colleagues, CISA also permanently waives all copyright and 90 | related rights worldwide to code created by CISA or its contractors. 91 | 92 | Our [default LICENSE file](/LICENSE) for projects acknowledges that our work is 93 | in the US public domain, and uses 94 | [CC0](https://creativecommons.org/publicdomain/zero/1.0/) to waive copyright 95 | internationally. 96 | 97 | Our [default CONTRIBUTING file](/CONTRIBUTING.md) informs contributors that 98 | their contributions will be licensed under the same terms. 99 | 100 | However, certain projects will require the usage of licensed open source 101 | software not created by CISA. Some open source licenses make source code 102 | available under different terms and conditions. These terms and conditions 103 | specify how the code may be used, modified, or shared. When users modify CISA 104 | code, they should review and understand the terms of the open source license in 105 | question. 106 | 107 | Each project may need to modify or extend the above LICENSE and CONTRIBUTING 108 | files as needed for its own circumstances. 109 | 110 | ## Distribution of code ## 111 | 112 | There is a misconception that FOSS that is distributed to the public should not 113 | be integrated or modified for use in sensitive systems. On the contrary, FOSS is 114 | often preferred for use in sensitive systems, due in part to its increased 115 | auditability. In other words, security in FOSS must be designed never to rely on 116 | obscurity in how the code works. 117 | 118 | In addition, while open source licenses permit the user to modify FOSS for 119 | internal use without obligating them to distribute source code to the public, 120 | when the user chooses to distribute the modified FOSS outside the user's 121 | organization, then the code is subject to whatever license it carries. 122 | 123 | ## Exceptions ## 124 | 125 | The only conditions where code shall not be developed and released in the open 126 | are: 127 | 128 | - The U.S. Government does not have the rights to reproduce and release the 129 | item. 130 | 131 | - The public release of the item is restricted by other law or regulation, such 132 | as the Export Administration Regulations or the International Traffic in Arms 133 | Regulation. 134 | 135 | These decisions will be made as needed by the CISA Infrastructure team, which 136 | will lead an interdisciplinary team to review the conditions under which code 137 | will not be made available publicly. Any further exemptions will be rare, 138 | documented publicly, and the result of compelling interest. 139 | 140 | If an existing solution cannot be found in the open source community, CISA may 141 | consider other options, including creating an open source solution itself. 142 | Ultimately, the software that best meets the needs and mission of CISA should be 143 | used. 144 | 145 | ## Further reading ## 146 | 147 | [OMB M-16-21](https://sourcecode.cio.gov/) - A White House policy pertaining to 148 | federal source code, published by the White House Office of Management and 149 | Budget. M-16-21 directs all agencies to publish some custom developed code as 150 | open source code, to update acquisition requirements to capture code developed 151 | by vendors for GSA, and to inventory [all open- and closed-source 152 | code](https://open.gsa.gov/code.json). 153 | 154 | [GSA Open Source Policy](https://open.gsa.gov/oss-policy/) - GSA's agency-wide 155 | open source policy, created based on OMB's M-16-21 policy. 156 | 157 | ## Thanks ## 158 | 159 | CISA would like to thank 18F, Consumer Financial Protection Bureau, Department 160 | of Defense, and Office of Management and Budget for their work in blazing the 161 | path for the use of FOSS in the Federal Government. 162 | 163 | ## Future changes ## 164 | 165 | This policy is a living document. CISA expects to make changes to this policy in 166 | the future, and we welcome 167 | [issues](https://github.com/cisagov/development-guide/issues) and [pull 168 | requests](https://github.com/cisagov/development-guide/pulls). To contact us 169 | privately, email [github@cisa.dhs.gov](mailto:github@cisa.dhs.gov). 170 | -------------------------------------------------------------------------------- /open-source-policy/practice.md: -------------------------------------------------------------------------------- 1 | # Practicing our open source policy # 2 | 3 | We have an "open-by-default" development policy. This document is meant to 4 | give specific team guidance on putting our [open source policy](policy.md) 5 | into practice. 6 | 7 | - CISA releases software into the [international public domain](#public-domain). 8 | - Contribute back to outside open source projects whenever possible. 9 | - We [develop our software in the open](#working-in-public), while also 10 | [protecting sensitive information](#protecting-sensitive-information). 11 | - There are [narrow, documented exceptions](policy.md#exceptions) where we may 12 | delay or withhold source code. 13 | 14 | CISA team members should work with the strong presumption that all of their 15 | code will be public, both during and after development. 16 | 17 | Before deciding to delay or withhold the release of source code, consult with 18 | the team and be prepared to publicly document this exception. 19 | 20 | ## Public domain ## 21 | 22 | [By law](http://www.law.cornell.edu/uscode/text/17/105), works of the United 23 | States government are not copyrightable in the US, and so are public domain. 24 | But by default, US government works **are** copyrightable internationally, and 25 | so CISA intentionally waives this copyright abroad using 26 | [Creative Commons Zero (CC0) 1.0](https://creativecommons.org/publicdomain/zero/1.0/). 27 | 28 | There are potentially other cases where copyright is involved: where 29 | contractors produce the work, or where work was otherwise originally performed 30 | outside the capacity of a US government employee. 31 | 32 | To the extent CISA has the rights to do so, CISA will normalize the copyright 33 | status of its work product under CC0. 34 | 35 | ## Contributing back to outside projects ## 36 | 37 | CISA staff are encouraged to seek existing, open source solutions -- whether 38 | government or non-government -- before writing custom tools. When existing 39 | libraries need to be modified or improved, CISA staff should make the 40 | modifications with eventual upstream contribution in mind. 41 | 42 | In practice, this generally involves forking the relevant repository to the 43 | CISA organization within GitHub (`cisagov`), creating a new branch with the 44 | modifications, and sending a pull request to upstream from the CISA fork. 45 | Unlike our own projects, there is no need for internal code review in this 46 | scenario (though it doesn't hurt). 47 | 48 | In terms of licensing: as works of the government, employee contributions are 49 | public domain in the United States, regardless of the outside project's 50 | contribution agreement. This does not change the overall license status of the 51 | outside project. 52 | 53 | As [the Free Software Foundation says](https://www.gnu.org/licenses/gpl-faq.html#GPLUSGovAdd) 54 | about government-contributed improvements to GPL software: 55 | 56 | > Yes. If the improvements are written by US government employees in the 57 | course of their employment, then the improvements are in the public domain. 58 | However, the improved version, as a whole, is still covered by the GNU GPL. 59 | There is no problem in this situation. 60 | 61 | See also: [The Department of Defense's FAQ question about this](http://dodcio.defense.gov/Open-Source-Software-FAQ/#Q:_Can_government_employees_contribute_code_to_open_source_software_projects.3F). 62 | 63 | ### Contributor License Agreements (CLAs) ### 64 | 65 | Some external projects have CLAs. You cannot sign these yourself, in your 66 | official capacity. 67 | 68 | 1. See if there is an organizational CLA available 69 | 1. Send the agreement to DHS's Office of General Counsel (OGC) for review 70 | - Email 71 | [github@cisa.dhs.gov](mailto:github@cisa.dhs.gov) 72 | to determine the best contact 73 | 1. Collect names/emails/GitHub usernames (whatever is needed) for folks you 74 | think will be contributing 75 | - Err on the side of adding too many than too few - this is much easier 76 | 1. Get it signed 77 | 1. Add to list below 78 | 1. Contribute 79 | 80 | CISA currently has the following CLAs signed: 81 | 82 | - None 😒 83 | 84 | ## How to license CISA repos ## 85 | 86 | When creating a repo, we highly recommend that you start from one of our 87 | maintained skeleton projects. This will quickly get you setup with the 88 | correct LICENSE and CONTRIBUTING documents as well as some spiffy tooling to 89 | keep your project healthy. See our [project setup](/project_setup) document 90 | for the best way to do this. 91 | 92 | ## Accepting contributions from the public ## 93 | 94 | Any CISA project can (and should!) accept open source contributions from the 95 | public. 96 | 97 | Projects can **encourage public contributions** by: 98 | 99 | - Creating open issues where public help would be especially welcome 100 | - Labeling those issues with `help wanted` so people can scan issues quickly 101 | and [services](http://www.codeforamerica.org/geeks/civicissues) can aggregate 102 | volunteer opportunities 103 | - Asking for contributions, in the README and in other public writing about the 104 | project 105 | - Providing solid documentation for any project setup process 106 | - Being super nice when communicating with volunteers 107 | 108 | As [described above](#public-domain), CISA projects are dedicated to the 109 | international public domain wherever possible. In this situation, contributors 110 | must agree to release their contributions into the international public 111 | domain. Projects can inform contributors of this agreement by copying the 112 | [`CONTRIBUTING.md`](CONTRIBUTING.md) file from this repo into new project 113 | repos, and copying the 114 | ["Public domain" section of this repo's README](README.md#public-domain) 115 | into the new project's README. 116 | 117 | When an CISA project has a non-standard license status (e.g. it's a fork of a 118 | previously licensed project, or is a module/plugin for a GPL project), then 119 | that project needs to figure out an appropriate contributing agreement. 120 | 121 | ## Working in public ## 122 | 123 | CISA believes in 124 | [working in public](https://18f.gsa.gov/2014/07/31/working-in-public-from-day-1/) 125 | It creates a healthier working environment, a more collaborative process, and 126 | just better software. 127 | 128 | All CISA team members are expected to make new source code 129 | repositories public from the time of creation. This means we often 130 | publish drafts in our repos that may change substantially. If you're 131 | interested in learning more about the contents of a repo, email 132 | [github@cisa.dhs.gov](mailto:github@cisa.dhs.gov) 133 | and we'll direct you to the right person or team. 134 | 135 | ## Protecting sensitive information ## 136 | 137 | As part of responsibly working in the open, CISA team members are expected to 138 | protect information that needs to be protected. We already receive training 139 | and guidance about information we can’t publish for [ethical](https://www.oge.gov/web/oge.nsf/Topics), 140 | [legal](https://handbook.18f.gov/intro-to-18f-infrastructure/), and [security](https://insite.gsa.gov/portal/content/627226) 141 | reasons — this section is a reminder about sensitive information (formally 142 | called “[controlled unclassified information](http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-171.pdf)”) 143 | to carefully protect when working with our open source projects. Sensitive 144 | information can include code, configuration, content, or documentation. (We 145 | have [approved options for sharing sensitive information](https://handbook.18f.gov/sensitive-information/).) 146 | 147 | If CISA team members aren't sure whether they should make something public, 148 | they should ask a person on our CISA Infrastructure team for advice *first*. 149 | 150 | If CISA team members inadvertently come into the possession of classified 151 | information (Secret, Top Secret, etc.), they should immediately report the 152 | incident and follow the established information spill procedures. 153 | 154 | Sensitive information we need to protect includes, but is not limited to: 155 | 156 | - Information an attacker could plausibly use to help them compromise any 157 | system (including a prototype/development system). Examples: 158 | - **Secret keys:** Passwords, passcodes, access codes, access tokens, API 159 | keys, TLS keys, SSH keys, OAuth secrets, or any other “secret keys” that 160 | protect access to something. 161 | - **Undisclosed vulnerabilities:** If we know of a security problem or 162 | potential security problem with our code that isn’t already publicly-known 163 | (such as a vulnerability that can’t be found with a publicly-available open 164 | source scanning tool run on the public-facing system), we shouldn’t write 165 | publicly about it until we fix it. 166 | - Nonpublic information in general about vulnerabilities, including 167 | attribution/source information (such as how and when we learned about a 168 | vulnerability, if the disclosure to us was not public). 169 | - We may wish to withhold some non-CISA IP addresses. If something looks like 170 | an IP address, ask CISA Infrastructure before publishing that info. 171 | - Personally Identifiable Information (PII). Here’s [OMB's definition and GSA's policy](http://www.gsa.gov/portal/content/104256). 172 | 18F also has [guidance for systems involving PII](https://pages.18f.gov/before-you-ship/security/pii/). 173 | - Some kinds of procurement and acquisition information, which may include 174 | non-public cost or pricing data, contract information, trade secrets, indirect 175 | costs, and direct labor rates. If you’re an CISA team member working with this 176 | kind of data, ask our acquisition specialists for help determining whether it 177 | can be public. 178 | - Emergency procedures, such as evacuation plans. 179 | 180 | There are more categories of controlled unclassified information to protect; 181 | those are just the kinds that we work with most often. Here’s the 182 | [complete list of CUI categories](https://www.archives.gov/cui/registry/category-list). 183 | 184 | ## Private repositories ## 185 | 186 | If the CISA Infrastructure team determines that a repository should not be 187 | public, as described in the [open source policy](policy.md#exceptions), the 188 | reasoning should be documented and a link to that reasoning provided in the 189 | repository's `README` to preserve that knowledge and so the decision can be 190 | revisited in the future if circumstances change. If the underlying reasons 191 | for making the repository private are not themselves sensitive, this 192 | explanation can be placed directly in the `README`. 193 | 194 | ## Managing CISA resources ## 195 | 196 | CISA intends to produce great software for the American people. That means not 197 | just rushing through projects to get them working as fast as possible, but 198 | managing [technical debt](https://en.wikipedia.org/wiki/Technical_debt) with 199 | an eye towards usability and reusability. 200 | 201 | If a refactoring or feature makes the tool easier for CISA to use in its work, 202 | and the teammate doing it is otherwise meeting their duties, then that's time 203 | well spent for CISA and the taxpayer. 204 | 205 | Open source projects can - and hopefully do! - get use and uptake from outside 206 | CISA. It's also okay for individual teammates to create projects they intend 207 | to use both at CISA and in their personal capacity. 208 | 209 | Teammates do not need permission to start new open source projects in the CISA 210 | GitHub organization. However, generally speaking, these projects should have 211 | some work applicability. 212 | 213 | When creating new open source projects: 214 | 215 | - If you're creating a repo because it's primarily for your CISA work, and the 216 | work you perform in it is primarily to benefit CISA, start the repo's life in 217 | the CISA organization. It's okay if you also think it'll be helpful in 218 | personal work. 219 | - If you're creating a repo that isn't primarily for CISA work, but you think 220 | will likely see use at CISA, start it in your personal account. If you don't 221 | have strong feelings or concerns about ownership, consider releasing the 222 | project under CC0 to save yourself even having to ever think about it. 223 | 224 | As people open issues and request features (no matter whether the repo is in 225 | your account or CISA's), continue to exercise professional judgment about how 226 | to spend CISA time. 227 | 228 | If you think something will benefit CISA and is worth the time, then that's 229 | valuable CISA work. If it won't benefit CISA but makes the library better for 230 | other uses, that may best be done with personal time. 231 | 232 | ## Archiving a repository ## 233 | 234 | When a repository is no longer useful, it should be [archived](https://help.github.com/articles/archiving-repositories/). 235 | This may be because the work has been incorporated into another repository, 236 | the project is unmaintained and out-of-date, or some other reason. In order to 237 | preserve repository metadata like pull request discussions and issues, the 238 | repository should not be deleted or made private. 239 | -------------------------------------------------------------------------------- /project_setup/README.md: -------------------------------------------------------------------------------- 1 | # Project Setup # 2 | 3 | We recommend you follow the directions below and use a skeleton for 4 | all new repositories. 5 | 6 | The [`skeleton`](scripts/skeleton) 7 | helper tool included in the [`scripts`](scripts) directory can quickly setup 8 | a new local repository. Once you've cloned and configured the repository 9 | to your local machine, it can be published to a repository created on GitHub. 10 | 11 | For repositories created from skeletons, run `setup-env` and 12 | `pre-commit install` to [set up your environment](#set-up-your-environment-and-pre-commit), 13 | enable linting and other tools to prevent new commits from immediately 14 | running into linting failures. 15 | 16 | Once you've set up a repository, make sure to enable 17 | branch protection - [see our branch protection guide for details](branch-protection.md). 18 | 19 | ## Contents ## 20 | 21 | - [Adding a skeleton configuration to an existing repository](#adding-a-skeleton-configuration-to-an-existing-repository) 22 | - [Using the skeleton tool to start a new repository 💀🛠](#using-the-skeleton-tool-to-start-a-new-repository-) 23 | - [Selecting a skeleton](#selecting-a-skeleton) 24 | - [Cloning a selected skeleton](#cloning-a-selected-skeleton) 25 | - [Create and publish the GitHub repository](#create-and-publish-the-github-repository) 26 | - [Disabling squash merging](#disabling-squash-merging) 27 | - [Set up your environment and pre-commit](#set-up-your-environment-and-pre-commit) 28 | - [Create an initial pull request](#create-an-initial-pull-request) 29 | - [Setting up branch protection](#setting-up-branch-protection) 30 | - [Setting up type-specific configuration settings](#setting-up-type-specific-configuration-settings) 31 | - [Setting up Coveralls for Python projects](#setting-up-coveralls-for-python-projects) 32 | - [Ansible requirement file generation tool 🧻🛠](#ansible-requirement-file-generation-tool-) 33 | - [Terraform IAM credentials to GitHub secrets 🔑‍👉🤫](#terraform-iam-credentials-to-github-secrets-) 34 | - [Managing SSM parameters from files 🗂👉☁️](#managing-ssm-parameters-from-files-%EF%B8%8F) 35 | 36 | ## Adding a skeleton configuration to an existing repository ## 37 | 38 | To skeletonize an existing repository, please see the guide to 39 | [skeletonize an existing repository](skeletonize-existing-repository.md). 40 | 41 | ## Using the skeleton tool to start a new repository 💀🛠 ## 42 | 43 | We recommend that you start your project from one of the 44 | [skeleton projects](https://github.com/search?q=org%3Acisagov+topic%3Askeleton) 45 | in this organization. The [`skeleton`](scripts/skeleton) 46 | helper tool included in the [`scripts`](scripts) directory can quickly setup 47 | a new local repository. Once you've cloned and configured the repository 48 | to your local machine, you then publish it to a repository created on GitHub. 49 | 50 | ### Selecting a skeleton ### 51 | 52 | First, identify a suitable skeleton project to use as the starting point 53 | for your new repository. For a list of available skeletons, see the 54 | [Skeleton List](skeleton-list.md) or use the following command: 55 | 56 | ```bash 57 | ./skeleton list 58 | ``` 59 | 60 | ```console 61 | Available skeletons in cisagov: 62 | 63 | skeleton-python-library 64 | A skeleton project for quickly getting a new cisagov Python library started. 65 | 66 | skeleton-docker 67 | A skeleton project for quickly getting a new cisagov Docker container started. 68 | 69 | skeleton-generic 70 | A generic skeleton project for quickly getting a new cisagov project started. 71 | 72 | skeleton-tf-module 73 | A skeleton project for quickly getting a new cisagov Terraform module started. 74 | 75 | skeleton-ansible-role 76 | A skeleton project for quickly getting a new cisagov Ansible role started. 77 | 78 | skeleton-ansible-role-with-test-user 79 | A skeleton project for quickly getting a new cisagov Ansible role started when that role requires an AWS test user. 80 | 81 | skeleton-packer 82 | A skeleton project for quickly getting a new cisagov packer project started. 83 | 84 | skeleton-aws-lambda 85 | A skeleton project for quickly getting a new cisagov Python AWS Lambda started. 86 | ``` 87 | 88 | ### Cloning a selected skeleton ### 89 | 90 | Next, use the `skeleton` tool to clone, rename, and prepare the contents of 91 | your new repository for publication. The tool will print out each command it 92 | is issuing and its result. 93 | 94 | ```console 95 | ./skeleton clone [options] 96 | ``` 97 | 98 | For example, to create a project based on `skeleton-ansible-role` named 99 | `ansible-role-quantum-rng` in your local `~/projects` directory: 100 | 101 | ```bash 102 | ./skeleton clone --change-dir ~/projects skeleton-ansible-role ansible-role-quantum-rng 103 | ``` 104 | 105 | This command results in: 106 | 107 | ```console 108 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 109 | 💬 Clone an existing remote repository to the new name locally. 110 | ➤ git clone git@github.com:cisagov/skeleton-ansible-role.git ansible-role-quantum-rng 111 | Cloning into 'ansible-role-quantum-rng'... 112 | ✅ success 113 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 114 | 💬 Disable pushing to the upstream (parent) repository. 115 | ➤ git remote set-url --push skeleton-ansible-role no_push 116 | ✅ success 117 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 118 | 💬 Add a new remote origin for the this repository. 119 | ➤ git remote add origin git@github.com:cisagov/ansible-role-quantum-rng.git 120 | ✅ success 121 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 122 | 💬 Search and replace repository name in source files. 123 | ➤ find . \( ! -regex '.*/\.git/.*' \) -type f -print0 | xargs -0 sed -i "" "s/skeleton-ansible-role/ansible-role-quantum-rng/g" 124 | ✅ success 125 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 126 | 💬 Stage modified files. 127 | ➤ git add --verbose . 128 | add 'CONTRIBUTING.md' 129 | add 'README.md' 130 | add 'molecule/default/playbook.yml' 131 | ✅ success 132 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 133 | 💬 Commit staged files to the new repository. 134 | ➤ git commit --message "Rename repository references after clone." 135 | [develop 565e041] Rename repository references after clone. 136 | 3 files changed, 10 insertions(+), 10 deletions(-) 137 | ✅ success 138 | ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― 139 | 140 | The repository "skeleton-ansible-role" has been cloned and renamed to "ansible-role-quantum-rng". 141 | Use the following commands to push the new repository to github: 142 | cd ~/projects/ansible-role-quantum-rng 143 | git push --set-upstream origin develop 144 | ``` 145 | 146 | ## Create and publish the GitHub repository ## 147 | 148 | Once the `skeleton` tool has run, the next step is to publish to a GitHub 149 | remote repository. 150 | 151 | To publish your new repository on GitHub, the remote must already exist. 152 | [Create a new repository](https://github.com/organizations/cisagov/repositories/new) 153 | on GitHub with the same name as your new local repository. If you do not 154 | have permission, ask an administrator to create it for you. 155 | 156 | Add the repository name and description, set the repository to public, and 157 | skip the rest of the options. 158 | 159 | Next, publish your new repository to GitHub: 160 | 161 | ```bash 162 | git push --set-upstream origin develop 163 | ``` 164 | 165 | ## Disabling squash merging ## 166 | 167 | Click on the settings tab for your new repository and, in the 168 | "Options" section, make sure that "Allow squash merging" is 169 | *unchecked*. 170 | 171 | ## Set up your environment and pre-commit ## 172 | 173 | Follow the instructions in [CONTRIBUTING.md on setting up pre-commit](../CONTRIBUTING.md#setting-up-pre-commit) 174 | to run `setup-env` and enable the `pre-commit` hooks. If you have already set 175 | up the prerequisites, this involves: 176 | 177 | ```sh 178 | # In the root directory of the repository 179 | ./setup-env 180 | ``` 181 | 182 | ## Create an initial pull request ## 183 | 184 | You probably want to add code, documentation, and other items to your 185 | repository to customize it from the skeleton and make changes. 186 | 187 | Create a new branch called `first-commits` and publish it: 188 | 189 | ```bash 190 | git checkout -b first-commits 191 | git push origin first-commits --set-upstream 192 | ``` 193 | 194 | Edit the existing files and add your own as needed, then commit your changes, 195 | push them, and create a pull request (PR) via GitHub or the command-line for 196 | your teammates to review. 197 | 198 | GitHub only populates its list of status checks once a PR 199 | has been created so checks can run against changes. Status checks are 200 | extremely useful for quality control and automated testing, 201 | so we require these checks to pass before merging. This first PR will ensure 202 | your new repository is ready to go and give your teammates a chance to review 203 | your code before merging it. 204 | 205 | If a status check doesn't apply to your new repository, leave it enabled 206 | anyway - it won't hurt anything. 207 | 208 | ## Setting up branch protection ## 209 | 210 | Once you've made your initial pull request, enable [branch protection](branch-protection.md) 211 | to enforce the `codeowners` approval requirements for pull requests. 212 | 213 | ## Setting up type-specific configuration settings ## 214 | 215 | ### Setting up Coveralls for Python projects ### 216 | 217 | The README for your new Python project will be prepared with a Coveralls badge. 218 | To make the badge work properly, you'll need to add a repository secret. 219 | 220 | 1. Visit [Coveralls](https://coveralls.io/) and go to `Add Repos`. 221 | 1. Select your new repository and enable it. This will take you to a 222 | page with `Python set up for Coveralls`. The code block will have an entry for 223 | `repo_token: `. 224 | 1. Copy the `repo_token` value. 225 | 1. On GitHub, visit your new repository's `Settings -> Secrets` page. 226 | - Note: If you don't have access to `Settings`, please contact an 227 | administrator to do this step for you. 228 | 1. Add a `New repository secret` and name it `COVERALLS_REPO_TOKEN` with the 229 | value from Coveralls. 230 | 231 | ### Ansible requirement file generation tool 🧻🛠 ### 232 | 233 | We have a [plethora](https://www.youtube.com/watch?v=zWld721Wk-Q) of 234 | [ansible-roles in our organization](https://github.com/search?q=org%3Acisagov+topic%3Aansible-role+NOT+skeleton+archived%3Afalse). 235 | To facilitate the creation of a `requirements.yml` file used in an Ansible 236 | project, we have created the [`ansible-roles`](scripts/ansible-roles) tool 237 | located in the [`scripts`](scripts) directory. The tool will output `yml` 238 | for all the current (non-archived) Ansible role repositories. A common 239 | usage of the tool is: 240 | 241 | ```bash 242 | ./ansible-roles > myproject/src/requirements.yml 243 | ``` 244 | 245 | This file will now contain definitions for all the Ansible roles. Edit 246 | the file, and remove any role that will not be required for your project. 247 | 248 | ### Terraform IAM credentials to GitHub secrets 🔑‍👉🤫 ### 249 | 250 | When GitHub Actions workflows require credentials to run we provide them via 251 | [secrets](https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets). 252 | This usually involves extracting the secrets from the Terraform state 253 | json output. Then some pointing, clicking, cutting and pasting on the 254 | repository's settings. 255 | 256 | To simplify this task use the [`terraform-to-secrets`](scripts/terraform-to-secrets) 257 | tool located in the [`scripts`](scripts) directory. The tool will create secrets 258 | using your 259 | [personal access token (PAT)](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line). 260 | Note: Your PAT needs to have the "repo" scope set. 261 | 262 | Execute the tool from your GitHub project's terraform directory: 263 | 264 | ```bash 265 | terraform-to-secrets 9f4ae878de917c7cf191b9861d3c1cf9224939f7 266 | ``` 267 | 268 | ```console 269 | 2020-02-22 15:50:36,059 INFO Using GitHub repository name: cisagov/ansible-role-dev-ssh-access 270 | 2020-02-22 15:50:36,060 INFO Searching Terraform state for IAM credentials. 271 | 2020-02-22 15:50:40,643 INFO Found credentials for user: test-ansible-role-dev-ssh-access 272 | 2020-02-22 15:50:40,643 INFO Creating GitHub API session using personal access token. 273 | 2020-02-22 15:50:40,644 INFO Requesting public key for repository cisagov/ansible-role-dev-ssh-access 274 | 2020-02-22 15:50:40,832 INFO Setting secrets for user: test-ansible-role-dev-ssh-access 275 | 2020-02-22 15:50:40,832 INFO Creating secret AWS_ACCESS_KEY_ID 276 | 2020-02-22 15:50:41,027 INFO Creating secret AWS_SECRET_ACCESS_KEY 277 | 2020-02-22 15:50:41,036 INFO Success! 278 | ``` 279 | 280 | ### Managing SSM parameters from files 🗂👉☁️ ### 281 | 282 | Use the [`ssm-param`](scripts/ssm-param) tool to copy files into 283 | [SSM parameters](https://docs.aws.amazon.com/systems-manager/latest/userguide/parameter-store-about-examples.html) 284 | in multiple regions simultaneously: 285 | 286 | ```console 287 | ssm-param cp *.pem /demo/ 288 | ``` 289 | 290 | ```console 291 | 2019-08-27 16:45:58,651 INFO Found credentials in shared credentials file: ~/.aws/credentials 292 | 2019-08-27 16:45:58,744 INFO Putting parameter '/demo/dh4096.pem' in region 'us-east-1' 293 | 2019-08-27 16:45:58,981 INFO Putting parameter '/demo/dh4096.pem' in region 'us-east-2' 294 | 2019-08-27 16:45:59,327 INFO Putting parameter '/demo/dh4096.pem' in region 'us-west-1' 295 | 2019-08-27 16:45:59,887 INFO Putting parameter '/demo/dh4096.pem' in region 'us-west-2' 296 | 2019-08-27 16:46:00,363 INFO Putting parameter '/demo/private.pem' in region 'us-east-1' 297 | 2019-08-27 16:46:00,480 INFO Putting parameter '/demo/private.pem' in region 'us-east-2' 298 | 2019-08-27 16:46:00,640 INFO Putting parameter '/demo/private.pem' in region 'us-west-1' 299 | 2019-08-27 16:46:01,008 INFO Putting parameter '/demo/private.pem' in region 'us-west-2' 300 | 2019-08-27 16:46:01,414 INFO Putting parameter '/demo/public.pem' in region 'us-east-1' 301 | 2019-08-27 16:46:01,553 INFO Putting parameter '/demo/public.pem' in region 'us-east-2' 302 | 2019-08-27 16:46:01,718 INFO Putting parameter '/demo/public.pem' in region 'us-west-1' 303 | 2019-08-27 16:46:02,070 INFO Putting parameter '/demo/public.pem' in region 'us-west-2' 304 | ``` 305 | 306 | It can also delete parameters from multiple regions: 307 | 308 | ```console 309 | ssm-param rm /demo/dh4096.pem /demo/private.pem /demo/public.pem 310 | ``` 311 | 312 | ```console 313 | 2019-08-27 16:47:59,384 INFO Found credentials in shared credentials file: ~/.aws/credentials 314 | 2019-08-27 16:47:59,478 INFO Deleting parameter '/demo/dh4096.pem' in region 'us-east-1' 315 | 2019-08-27 16:47:59,715 INFO Deleting parameter '/demo/dh4096.pem' in region 'us-east-2' 316 | 2019-08-27 16:48:00,003 INFO Deleting parameter '/demo/dh4096.pem' in region 'us-west-1' 317 | 2019-08-27 16:48:00,523 INFO Deleting parameter '/demo/dh4096.pem' in region 'us-west-2' 318 | 2019-08-27 16:48:01,065 INFO Deleting parameter '/demo/private.pem' in region 'us-east-1' 319 | 2019-08-27 16:48:01,202 INFO Deleting parameter '/demo/private.pem' in region 'us-east-2' 320 | 2019-08-27 16:48:01,355 INFO Deleting parameter '/demo/private.pem' in region 'us-west-1' 321 | 2019-08-27 16:48:01,728 INFO Deleting parameter '/demo/private.pem' in region 'us-west-2' 322 | 2019-08-27 16:48:02,138 INFO Deleting parameter '/demo/public.pem' in region 'us-east-1' 323 | 2019-08-27 16:48:02,269 INFO Deleting parameter '/demo/public.pem' in region 'us-east-2' 324 | 2019-08-27 16:48:02,417 INFO Deleting parameter '/demo/public.pem' in region 'us-west-1' 325 | 2019-08-27 16:48:02,795 INFO Deleting parameter '/demo/public.pem' in region 'us-west-2' 326 | ``` 327 | -------------------------------------------------------------------------------- /project_setup/branch-protection.md: -------------------------------------------------------------------------------- 1 | # Branch Protection # 2 | 3 | We require branch protection to be enabled on all cisagov repositories after 4 | creation and initial population. 5 | 6 | ## Setting up a rule to protect the `develop` branch ## 7 | 8 | To enable branch protection, you must have permission to access the Settings 9 | menu. 10 | 11 | In `Settings`, go to the `Branches` entry and create a rule with the following: 12 | 13 | - Branch name pattern: `develop` 14 | - Note: Our primary branches are named `develop`, as a rule. 15 | - Verify that adding this name pattern then shows that it 16 | `Applies to 1 branch: develop`. 17 | 18 | - Protect matching branches 19 | - [X] Require pull request reviews before merging 20 | - Required approving reviews: `2` 21 | - [ ] Dismiss stale pull request approvals when new commits are pushed 22 | - [X] Require review from Code Owners 23 | - [X] Restrict who can dismiss pull request reviews 24 | - [X] Require status checks to pass before merging 25 | - [X] Require branches to be up to date before merging 26 | - There may be a list of status checks under this option. We require 27 | passing status checks to merge, so all status checks should generally be 28 | checked as required. 29 | - Please note that the list of status checks will not fully populate in a 30 | new repository until the first pull request (PR) has been created. 31 | - [X] Require conversation resolution before merging 32 | - [ ] Require signed commits 33 | - [ ] Require linear history 34 | - [X] Include administrators 35 | - [X] Restrict who can push to matching branches 36 | - Note: this allows by default "People, teams or apps with push access", so 37 | you likely don't have to make any changes *under* this entry 38 | 39 | - Rules applied to everyone including administrators 40 | - [ ] Allow force pushes 41 | - [ ] Allow deletions 42 | -------------------------------------------------------------------------------- /project_setup/scripts/ansible-roles: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Helper tool to create a requirements.yml file with all ansible-roles. 4 | 5 | Usage: 6 | ansible-roles (-h | --help) 7 | ansible-roles [--org=] 8 | 9 | Options: 10 | -h --help Show this message. 11 | -o --org= Organization to search [default: cisagov]. 12 | """ 13 | 14 | # Standard Python Libraries 15 | import sys 16 | 17 | # Third-Party Libraries 18 | import docopt 19 | from github import Github 20 | 21 | VERSION = "0.0.1" 22 | 23 | 24 | def eprint(*args, **kwargs): 25 | """Print to stderr.""" 26 | print(*args, file=sys.stderr, **kwargs) 27 | 28 | 29 | def print_reqs(org): 30 | """Output a list of all ansible-roles in requirements.yml format.""" 31 | # There is a bug in the pager (API or library) that spuriously returns 32 | # duplicate results. So set per_page high enough to get one page. 33 | g = Github(per_page=512) 34 | repos = g.search_repositories( 35 | query=f"org:{org} topic:ansible-role NOT skeleton archived:false" 36 | ) 37 | sorted_repos = sorted(repos, key=lambda r: r.html_url) 38 | eprint(f"Adding {len(sorted_repos)} ansible-roles.") 39 | prev_repo = None 40 | for repo in sorted_repos: 41 | if prev_repo and repo.id == prev_repo.id: 42 | eprint(">>> DUP >>>") 43 | # github api bug? Same repo can be returned more than once. 44 | continue 45 | short_name = repo.name.replace("ansible-role-", "").replace("-", "_") 46 | print(f"- src: {repo.html_url}") 47 | print(f" name: {short_name}") 48 | prev_repo = repo 49 | 50 | 51 | def main(): 52 | """Parse arguments and perform requested actions.""" 53 | args = docopt.docopt(__doc__, version=VERSION) 54 | 55 | org = args["--org"] 56 | 57 | print_reqs(org) 58 | return 0 59 | 60 | 61 | if __name__ == "__main__": 62 | sys.exit(main()) 63 | -------------------------------------------------------------------------------- /project_setup/scripts/iam-to-travis: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Extract AWS credentials from terraform state, encrypt, and format for Travis. 4 | 5 | This command must be executed in the directory containing the .terraform state 6 | within the a GitHub project. 7 | 8 | Usage: 9 | iam-to-travis [--log-level=LEVEL] [--indent=SPACES] [--width=WIDTH] 10 | iam-to-travis (-h | --help) 11 | 12 | Options: 13 | -h --help Show this message. 14 | -i --indent=SPACES Number of spaces to indent yaml block. Minimum 2. 15 | [default: 6] 16 | --log-level=LEVEL If specified, then the log level will be set to 17 | the specified value. Valid values are "debug", "info", 18 | "warning", "error", and "critical". [default: warning] 19 | -w --width=WIDTH Maximum width of yaml block. Minimum 16. [default: 80] 20 | """ 21 | 22 | # Standard Python Libraries 23 | import json 24 | import logging 25 | import subprocess # nosec 26 | import sys 27 | 28 | # Third-Party Libraries 29 | import docopt 30 | 31 | 32 | def creds_from_child(child_module): 33 | """Search for IAM access keys in child resources. 34 | 35 | Returns (key_id, secret) if found, (None, None) otherwise. 36 | """ 37 | for resource in child_module["resources"]: 38 | if resource["address"] == "aws_iam_access_key.key": 39 | key_id = resource["values"]["id"] 40 | secret = resource["values"]["secret"] 41 | return key_id, secret 42 | return None, None 43 | 44 | 45 | def creds_from_terraform(): 46 | """Retrieve IAM credentials from terraform state. 47 | 48 | Returns (key_id, secret) if found, (None, None) otherwise. 49 | """ 50 | c = subprocess.run( # nosec 51 | "terraform show --json", shell=True, stdout=subprocess.PIPE # nosec 52 | ) 53 | j = json.loads(c.stdout) 54 | 55 | if not j.get("values"): 56 | return None, None 57 | 58 | for child_module in j["values"]["root_module"]["child_modules"]: 59 | key_id, secret = creds_from_child(child_module) 60 | if key_id: 61 | return key_id, secret 62 | else: 63 | return None, None 64 | 65 | 66 | def wrap_for_yml(s, indent=6, width=75): 67 | """Wrap a string in yamly way.""" 68 | result = [] 69 | width = width - 1 70 | while True: 71 | result.append(s[:width]) 72 | s = s[width:] 73 | if not s: 74 | break 75 | s = " " * indent + s 76 | return "\\\n".join(result) 77 | 78 | 79 | def encrypt_for_travis(variable_name, value, indent, width): 80 | """Encrypt a value for a variable and print it as yaml.""" 81 | logging.debug(f"Encrypting {variable_name}.") 82 | command = f'travis encrypt --com --no-interactive "{variable_name}={value}"' 83 | c = subprocess.run(command, shell=True, stdout=subprocess.PIPE) # nosec 84 | s = f"{' ' * (indent - 2)}- secure: {c.stdout.decode('utf-8')}" 85 | print(f"{' ' * (indent - 2)}# {variable_name}") 86 | print(wrap_for_yml(s, indent, width)) 87 | 88 | 89 | def main(): 90 | """Set up logging and call the requested commands.""" 91 | args = docopt.docopt(__doc__, version="0.0.1") 92 | 93 | # Set up logging 94 | log_level = args["--log-level"] 95 | try: 96 | logging.basicConfig( 97 | format="%(asctime)-15s %(levelname)s %(message)s", level=log_level.upper() 98 | ) 99 | except ValueError: 100 | logging.critical( 101 | f'"{log_level}" is not a valid logging level. Possible values ' 102 | "are debug, info, warning, and error." 103 | ) 104 | return 1 105 | 106 | indent = int(args["--indent"]) 107 | width = int(args["--width"]) 108 | 109 | if width < 16: 110 | logging.error("Width must be 16 or greater.") 111 | sys.exit(-1) 112 | 113 | if indent < 2 or indent > width - 10: 114 | logging.error("Indent must be greater than 2, and less than (width - 10).") 115 | sys.exit(-1) 116 | 117 | logging.info("Searching Terraform state for IAM credentials.") 118 | key_id, secret = creds_from_terraform() 119 | if key_id is None: 120 | logging.error("Credentials not found in terraform state.") 121 | logging.error("Is there a .terraform state directory here?") 122 | sys.exit(-1) 123 | 124 | encrypt_for_travis("AWS_ACCESS_KEY_ID", key_id, indent, width) 125 | encrypt_for_travis("AWS_SECRET_ACCESS_KEY", secret, indent, width) 126 | 127 | return 0 128 | 129 | 130 | if __name__ == "__main__": 131 | sys.exit(main()) 132 | -------------------------------------------------------------------------------- /project_setup/scripts/skeleton: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Helper tool to start a new github project from a skeleton github repository. 4 | 5 | Usage: 6 | skeleton (-h | --help) 7 | skeleton list [--org=] 8 | skeleton clone [options] 9 | 10 | Options: 11 | -c --change-dir= Create clone in this directory. 12 | -h --help Show this message. 13 | -o --org= Organization to search [default: cisagov]. 14 | """ 15 | 16 | # Standard Python Libraries 17 | import os 18 | from pathlib import Path 19 | import subprocess # nosec 20 | import sys 21 | 22 | # Third-Party Libraries 23 | import docopt 24 | from github import Github 25 | import yaml 26 | 27 | LINEAGE_CONFIG = Path(".github/lineage.yml") 28 | LINEAGE_CONFIG_VERSION = "1" 29 | VERSION = "0.0.1" 30 | 31 | 32 | def run(cmd, comment): 33 | """Run a command and display its output and return code.""" 34 | print("―" * 80) 35 | if comment: 36 | print(f"💬 {comment}") 37 | print(f"➤ {cmd}") 38 | proc = subprocess.run(cmd, shell=True) # nosec 39 | if proc.returncode == 0: 40 | print("✅ success") 41 | else: 42 | print(f"❌ ERROR! return code: {proc.returncode}") 43 | sys.exit(proc.returncode) 44 | 45 | 46 | def print_available_skeletons(org): 47 | """Print a list of skeleton repos available for cloning.""" 48 | g = Github() 49 | skel_repos = g.search_repositories(query=f"org:{org} topic:skeleton archived:false") 50 | print(f"Available skeletons in {org}:\n") 51 | for repo in skel_repos: 52 | print(f"{repo.name}\n\t{repo.description}\n") 53 | 54 | 55 | def clone_repo(parent_repo, new_repo, org, dir=None): 56 | """Clone a repository to a new name and prepare it for publication.""" 57 | if dir: 58 | os.chdir(dir) 59 | run( 60 | f"git clone --origin {parent_repo} git@github.com:{org}/{parent_repo}.git {new_repo}", 61 | "Clone an existing remote repository to the new name locally.", 62 | ) 63 | os.chdir(new_repo) 64 | run( 65 | f"git remote set-url --push {parent_repo} no_push", 66 | "Disable pushing to the upstream (parent) repository.", 67 | ) 68 | run( 69 | f"git remote add origin git@github.com:{org}/{new_repo}.git", 70 | "Add a new remote origin for the this repository.", 71 | ) 72 | run("git tag -d $(git tag -l)", f"Delete all local git tags from {parent_repo}") 73 | run( 74 | rf"find . \( ! -regex '.*/\.git/.*' \) -type f -exec " 75 | rf"perl -pi -e s/{parent_repo}/{new_repo}/g {{}} \;", 76 | "Search and replace repository name in source files.", 77 | ) 78 | lineage = { 79 | "version": LINEAGE_CONFIG_VERSION, 80 | "lineage": { 81 | "skeleton": {"remote-url": f"https://github.com/{org}/{parent_repo}.git"} 82 | }, 83 | } 84 | with LINEAGE_CONFIG.open("w") as f: 85 | yaml.dump(lineage, stream=f, explicit_start=True) 86 | run("git add --verbose .", "Stage modified files.") 87 | run( 88 | 'git commit --message "Rename repository references after clone."', 89 | "Commit staged files to the new repository.", 90 | ) 91 | print("―" * 80) 92 | print( 93 | f""" 94 | The repository "{parent_repo}" has been cloned and renamed to "{new_repo}". 95 | Use the following commands to push the new repository to github: 96 | cd {os.path.join(dir, new_repo) if dir else new_repo} 97 | git push --set-upstream origin develop 98 | """ 99 | ) 100 | 101 | 102 | def main(): 103 | """Parse arguments and perform requested actions.""" 104 | args = docopt.docopt(__doc__, version=VERSION) 105 | 106 | org = args["--org"] 107 | 108 | if args["list"]: 109 | print_available_skeletons(org) 110 | elif args["clone"]: 111 | parent_repo = args[""] 112 | new_repo = args[""] 113 | dir = args["--change-dir"] 114 | clone_repo(parent_repo, new_repo, org, dir) 115 | return 0 116 | 117 | 118 | if __name__ == "__main__": 119 | sys.exit(main()) 120 | -------------------------------------------------------------------------------- /project_setup/scripts/ssm-param: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Manage SSM parameters across multiple regions. 4 | 5 | The cp command can copy multiple files to a parameter path. Specify each file 6 | followed by parameter that ends in a /. 7 | 8 | Usage: 9 | ssm-param cp [--log-level=LEVEL] [--overwrite] ... 10 | ssm-param rm ... 11 | ssm-param (-h | --help) 12 | 13 | Options: 14 | -h --help Show this message. 15 | --log-level=LEVEL If specified, then the log level will be set to 16 | the specified value. Valid values are "debug", "info", 17 | "warning", "error", and "critical". [default: info] 18 | -o --overwrite Overwrite existing parameters. 19 | """ 20 | 21 | # Standard Python Libraries 22 | import logging 23 | from os import path 24 | import sys 25 | 26 | # Third-Party Libraries 27 | import boto3 28 | import docopt 29 | 30 | REGIONS = ["us-east-1", "us-east-2", "us-west-1", "us-west-2"] 31 | 32 | 33 | def calc_parameter_name(filename, parameter_name): 34 | """Calculate a the parameter_name to allow "folders".""" 35 | if parameter_name.endswith("/"): 36 | return path.join(parameter_name, path.basename(filename)) 37 | else: 38 | return parameter_name 39 | 40 | 41 | def put_file(session, filename, parameter_name, overwrite, regions): 42 | """Create a parameter from the contents of a file in multiple regions.""" 43 | new_parameter_name = calc_parameter_name(filename, parameter_name) 44 | 45 | with open(filename) as f: 46 | file_contents = f.read() 47 | 48 | for region in regions: 49 | logging.debug(f"Creating client for {region}") 50 | client = session.client("ssm", region_name=region) 51 | logging.info(f"Putting parameter '{new_parameter_name}' in region '{region}'") 52 | logging.debug(f"Overwrite = {overwrite}") 53 | try: 54 | client.put_parameter( 55 | Name=new_parameter_name, 56 | Value=file_contents, 57 | Type="SecureString", 58 | Overwrite=overwrite, 59 | ) 60 | except client.exceptions.ParameterAlreadyExists: 61 | logging.warning( 62 | f"Parameter {new_parameter_name} already exists in {region}. " 63 | "Not overwriting." 64 | ) 65 | 66 | 67 | def delete_parameter(session, parameter_name, regions): 68 | """Delete a parameter from multiple regions.""" 69 | for region in regions: 70 | logging.debug(f"Creating client for {region}") 71 | client = session.client("ssm", region_name=region) 72 | logging.info(f"Deleting parameter '{parameter_name}' in region '{region}'") 73 | try: 74 | client.delete_parameter(Name=parameter_name) 75 | except client.exceptions.ParameterNotFound: 76 | logging.warning(f"Parameter {parameter_name} does not exists in {region}.") 77 | 78 | 79 | def main(): 80 | """Set up logging and call the requested commands.""" 81 | args = docopt.docopt(__doc__, version="0.0.1") 82 | 83 | # Set up logging 84 | log_level = args["--log-level"] 85 | try: 86 | logging.basicConfig( 87 | format="%(asctime)-15s %(levelname)s %(message)s", level=log_level.upper() 88 | ) 89 | except ValueError: 90 | logging.critical( 91 | f'"{log_level}" is not a valid logging level. Possible values ' 92 | "are debug, info, warning, and error." 93 | ) 94 | return 1 95 | 96 | session = boto3.Session() 97 | 98 | if args["cp"]: 99 | # Working around docopt limitations. 100 | # No support for: ... 101 | # The last is the parameter 102 | parameter_name = args[""][-1] 103 | filenames = args[""][:-1] 104 | overwrite = args["--overwrite"] 105 | if len(filenames) > 1 and not parameter_name.endswith("/"): 106 | print("Cannot cp multiple files unless parameter_name ends in a '/'") 107 | sys.exit(-1) 108 | for filename in filenames: 109 | put_file(session, filename, parameter_name, overwrite, REGIONS) 110 | elif args["rm"]: 111 | parameter_names = args[""] 112 | for parameter_name in parameter_names: 113 | delete_parameter(session, parameter_name, REGIONS) 114 | 115 | # Stop logging and clean up 116 | logging.shutdown() 117 | return 0 118 | 119 | 120 | if __name__ == "__main__": 121 | sys.exit(main()) 122 | -------------------------------------------------------------------------------- /project_setup/scripts/terraform-to-secrets: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Parse Terraform state and create GitHub secrets. 4 | 5 | Secrets are created for IAM access keys, and specially tagged resources. 6 | 7 | For each IAM access key two secrets will be created: 8 | - AWS_ACCESS_KEY_ID 9 | - AWS_SECRET_ACCESS_KEY 10 | 11 | If there is only one access key, the secrets will be named as above. If more than one 12 | key exists in the Terraform state, the key's associated user will be appended to the 13 | secret's name. 14 | - AWS_ACCESS_KEY_ID_BUILDER 15 | - AWS_SECRET_ACCESS_KEY_BUILDER 16 | 17 | Resources tagged with "GitHub_Secret_Name" and "GitHub_Secret_Terraform_Lookup" will 18 | have a single secret created based on the tag contents. For example if a resource had 19 | these tags: 20 | - GitHub_Secret_Name: BUILD_ROLE_TO_ASSUME 21 | - GitHub_Secret_Terraform_Lookup: arn 22 | 23 | A secret would be generated with the name "BUILD_ROLE_TO_ASSUME" and the value of 24 | the resource's ARN. 25 | 26 | This tool is most effective when executed in the directory containing the .terraform 27 | state directory, within a GitHub project. It will attempt to detect the repository 28 | name from the project's git origin. Options exist to provide the repository name or 29 | Terraform state manually. 30 | 31 | It requires a Personal Access Token from GitHub that has "repo" access scope. Tokens 32 | can be saved to the keychain service for future use by using the "save" command. 33 | 34 | Usage: 35 | terraform-to-secrets [options] 36 | terraform-to-secrets save 37 | 38 | terraform-to-secrets (-h | --help) 39 | 40 | Options: 41 | -d --dry-run Don't create secrets. Just log what would be created. 42 | -e --env=ENVNAME Create secrets in the specified GitHub environment. 43 | -h --help Show this message. 44 | -l --log-level=LEVEL If specified, then the log level will be set to 45 | the specified value. Valid values are "debug", "info", 46 | "warning", "error", and "critical". [default: info] 47 | -m --remote-state Look for secrets in remote state resources also. 48 | -r --repo=REPONAME Use provided repository name instead of detecting it. 49 | -s --state=JSONFILE Read state from a file instead of asking Terraform. 50 | -t --token=PAT Specify a GitHub personal access token (PAT). 51 | """ 52 | 53 | # Standard Python Libraries 54 | from base64 import b64encode 55 | import json 56 | import logging 57 | import re 58 | import subprocess # nosec : security implications have been considered 59 | import sys 60 | from typing import Any, Dict, Generator, Optional, Tuple, Union 61 | 62 | # Third-Party Libraries 63 | import docopt 64 | import keyring 65 | from nacl import encoding, public 66 | import requests 67 | from schema import And, Or, Schema, SchemaError, Use 68 | 69 | # Constants 70 | GIT_URL_RE: re.Pattern = re.compile("(?:git@|https://)github.com[:/](.*).git") 71 | GITHUB_SECRET_NAME_TAG: str = "GitHub_Secret_Name" 72 | GITHUB_SECRET_TERRAFORM_LOOKUP_TAG: str = "GitHub_Secret_Terraform_Lookup" 73 | KEYRING_SERVICE = "terraform-to-secrets" 74 | KEYRING_USERNAME = "GitHub PAT" 75 | 76 | 77 | def get_terraform_state(filename: str = "") -> Dict: 78 | """Retrieve IAM credentials from Terraform state. 79 | 80 | Returns the Terraform state as a dict. 81 | """ 82 | data: Union[str, bytes, bytearray] 83 | if filename: 84 | logging.info(f"Reading state from json file {filename}") 85 | with open(filename) as f: 86 | data = f.read() 87 | else: 88 | logging.info("Reading state from Terraform command.") 89 | process = subprocess.run( # nosec 90 | ["terraform", "show", "--json"], stdout=subprocess.PIPE 91 | ) 92 | data = process.stdout 93 | # Normally we'd check the process return code here. But Terraform is perfectly 94 | # happy to return zero even if there were no state files. 95 | json_state: Dict = json.loads(data) 96 | 97 | if not json_state.get("values"): 98 | logging.critical("Is there a .terraform state directory here?") 99 | raise Exception("No Terraform state found.") 100 | return json_state 101 | 102 | 103 | def find_tagged_secret( 104 | resource_name: str, resource_data: Dict 105 | ) -> Generator[Tuple[str, str], None, None]: 106 | """Extract a tagged secret from a resource.""" 107 | # Ensure "tags" key exists in resource_data and if it does, make sure 108 | # its value is not None. Both of these cases can occur. 109 | tags: Dict[str, str] 110 | if "tags" not in resource_data or resource_data.get("tags") is None: 111 | tags = dict() 112 | else: 113 | tags = resource_data["tags"] 114 | 115 | secret_name: Optional[str] = tags.get(GITHUB_SECRET_NAME_TAG) 116 | lookup_tag: Optional[str] = tags.get(GITHUB_SECRET_TERRAFORM_LOOKUP_TAG) 117 | secret_value: Optional[str] 118 | if secret_name: 119 | logging.debug( 120 | f"Found {GITHUB_SECRET_NAME_TAG} on {resource_name} " 121 | f"with value {secret_name}" 122 | ) 123 | if lookup_tag: 124 | logging.debug( 125 | f"Found {GITHUB_SECRET_TERRAFORM_LOOKUP_TAG} on " 126 | f"{resource_name} with value {lookup_tag}" 127 | ) 128 | secret_value = resource_data.get(lookup_tag) 129 | if secret_value is None: 130 | logging.warning(f"Could not lookup value with key {lookup_tag}") 131 | else: 132 | logging.debug(f"Looked up value: {secret_value}") 133 | yield secret_name, secret_value 134 | else: 135 | logging.warning( 136 | f"Missing {GITHUB_SECRET_TERRAFORM_LOOKUP_TAG} on " f"{resource_name}." 137 | ) 138 | return 139 | 140 | 141 | def find_outputs( 142 | terraform_state: Dict, include_remote_state: bool 143 | ) -> Generator[Dict, None, None]: 144 | """Search for resources with outputs in the Terraform state.""" 145 | for resource in terraform_state["values"]["root_module"].get("resources", []): 146 | # Exclude remote state resources unless requested 147 | if ( 148 | not include_remote_state 149 | and resource.get("type") == "terraform_remote_state" 150 | ): 151 | continue 152 | if resource.get("values", dict()).get("outputs", dict()): 153 | yield resource["values"]["outputs"] 154 | 155 | 156 | def parse_tagged_outputs( 157 | terraform_state: Dict, 158 | include_remote_state: bool, 159 | ) -> Generator[Tuple[str, str], None, None]: 160 | """Search all outputs for tags requesting the creation of a secret.""" 161 | for outputs in find_outputs(terraform_state, include_remote_state): 162 | for output_name, output_data in outputs.items(): 163 | yield from find_tagged_secret(output_name, output_data) 164 | return 165 | 166 | 167 | def find_resources_in_child_modules( 168 | child_modules: list, resource_type: Optional[str] 169 | ) -> Generator[Dict, None, None]: 170 | """ 171 | Search for resources of a certain type in a Terraform child_modules list. 172 | 173 | resource_type None yields all resources. 174 | """ 175 | for child_module in child_modules: 176 | for resource in child_module.get("resources", []): 177 | if resource_type is None or resource["type"] == resource_type: 178 | yield resource 179 | 180 | if "child_modules" in child_module: 181 | for resource in find_resources_in_child_modules( 182 | child_module["child_modules"], resource_type 183 | ): 184 | yield resource 185 | 186 | 187 | def find_resources( 188 | terraform_state: Dict, resource_type: Optional[str] 189 | ) -> Generator[Dict, None, None]: 190 | """Search for resources of a certain type in the Terraform state. 191 | 192 | resource_type None yields all resources. 193 | """ 194 | for resource in terraform_state["values"]["root_module"].get("resources", []): 195 | if resource_type is None or resource["type"] == resource_type: 196 | yield resource 197 | 198 | if "child_modules" in terraform_state["values"]["root_module"]: 199 | for resource in find_resources_in_child_modules( 200 | terraform_state["values"]["root_module"]["child_modules"], resource_type 201 | ): 202 | yield resource 203 | 204 | 205 | def parse_creds(terraform_state: Dict) -> Generator[Tuple[str, str, str], None, None]: 206 | """Search for IAM access keys in resources. 207 | 208 | Yields (user, key_id, secret) when found. 209 | """ 210 | for resource in find_resources(terraform_state, "aws_iam_access_key"): 211 | key_id: str = resource["values"]["id"] 212 | secret: str = resource["values"]["secret"] 213 | user: str = resource["values"]["user"] 214 | yield user, key_id, secret 215 | return 216 | 217 | 218 | def parse_tagged_resources( 219 | terraform_state: Dict, 220 | include_remote_state: bool, 221 | ) -> Generator[Tuple[str, str], None, None]: 222 | """Search all resources for tags requesting the creation of a secret.""" 223 | for resource in find_resources(terraform_state, None): 224 | # Exclude remote state resources unless requested 225 | if ( 226 | not include_remote_state 227 | and resource.get("type") == "terraform_remote_state" 228 | ): 229 | continue 230 | yield from find_tagged_secret(resource["address"], resource["values"]) 231 | return 232 | 233 | 234 | def encrypt(public_key: str, secret_value: str) -> str: 235 | """Encrypt a Unicode string using the public key.""" 236 | public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) 237 | sealed_box = public.SealedBox(public_key) 238 | encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) 239 | return b64encode(encrypted).decode("utf-8") 240 | 241 | 242 | def get_public_key(session: requests.Session, repo_name, github_env) -> Dict[str, str]: 243 | """Fetch the public key for a repository or environment.""" 244 | if github_env: 245 | logging.info( 246 | f"Requesting public key for environment {github_env} in {repo_name}" 247 | ) 248 | response = session.get( 249 | f"https://api.github.com/repos/{repo_name}/environments/{github_env}/secrets/public-key" 250 | ) 251 | else: 252 | logging.info(f"Requesting public key for repository {repo_name}") 253 | response = session.get( 254 | f"https://api.github.com/repos/{repo_name}/actions/secrets/public-key" 255 | ) 256 | response.raise_for_status() 257 | return response.json() 258 | 259 | 260 | def set_secret( 261 | session: requests.Session, 262 | repo_name: str, 263 | github_env: str, 264 | secret_name: str, 265 | secret_value: str, 266 | public_key: Dict[str, str], 267 | ) -> None: 268 | """Create a secret in a repository or environment.""" 269 | if github_env: 270 | logging.info(f"Creating secret {secret_name} in environment {github_env}") 271 | api_url = f"https://api.github.com/repos/{repo_name}/environments/{github_env}/secrets/{secret_name}" 272 | else: 273 | logging.info(f"Creating repository secret {secret_name}") 274 | api_url = ( 275 | f"https://api.github.com/repos/{repo_name}/actions/secrets/{secret_name}" 276 | ) 277 | encrypted_secret_value = encrypt(public_key["key"], secret_value) 278 | response = session.put( 279 | api_url, 280 | json={ 281 | "encrypted_value": encrypted_secret_value, 282 | "key_id": public_key["key_id"], 283 | }, 284 | ) 285 | response.raise_for_status() 286 | 287 | 288 | def get_repo_name() -> str: 289 | """Get the repository name using git.""" 290 | logging.debug("Trying to determine GitHub repository name using git.") 291 | c = subprocess.run( # nosec 292 | ["git", "remote", "get-url", "origin"], 293 | stdout=subprocess.PIPE, 294 | stderr=subprocess.PIPE, 295 | ) 296 | if c.returncode != 0: 297 | logging.critical("Could not determine GitHub repository name.") 298 | raise Exception(c.stderr) 299 | match = GIT_URL_RE.match(c.stdout.decode()) 300 | if match: 301 | repo_name = match.groups()[0] # type: ignore 302 | else: 303 | logging.critical("Could not determine GitHub repository name.") 304 | logging.critical("Use the --repo option to specify it manually.") 305 | raise Exception("Could not determine GitHub repository name.") 306 | return repo_name 307 | 308 | 309 | def get_users(terraform_state: Dict) -> Dict[str, Tuple[str, str]]: 310 | """Return a dictionary of users. 311 | 312 | Returns: a dictionary mapping usernames to (key_id, key_secret) 313 | """ 314 | aws_user: Optional[str] = None 315 | aws_key_id: Optional[str] = None 316 | aws_secret: Optional[str] = None 317 | user_creds: Dict[str, Tuple[str, str]] = dict() 318 | 319 | logging.info("Searching Terraform state for IAM credentials.") 320 | for aws_user, aws_key_id, aws_secret in parse_creds(terraform_state): 321 | logging.info(f"Found credentials for user: {aws_user}") 322 | user_creds[aws_user] = (aws_key_id, aws_secret) 323 | 324 | if len(user_creds) == 0: 325 | logging.warning("No users found.") 326 | return user_creds 327 | 328 | 329 | def get_resource_secrets( 330 | terraform_state: Dict, include_remote_state: bool 331 | ) -> Dict[str, str]: 332 | """Collect secrets from tagged Terraform resources.""" 333 | secrets: Dict[str, str] = dict() 334 | logging.info("Searching Terraform state for tagged resources.") 335 | for secret_name, secret_value in parse_tagged_resources( 336 | terraform_state, include_remote_state 337 | ): 338 | logging.info(f"Found secret: {secret_name}") 339 | secrets[secret_name] = secret_value 340 | for secret_name, secret_value in parse_tagged_outputs( 341 | terraform_state, include_remote_state 342 | ): 343 | logging.info(f"Found secret: {secret_name}") 344 | secrets[secret_name] = secret_value 345 | return secrets 346 | 347 | 348 | def create_user_secrets(user_creds: Dict[str, Tuple[str, str]]) -> Dict[str, str]: 349 | """Create secrets for user key IDs and key values.""" 350 | secrets: Dict[str, str] = dict() 351 | for user_name, creds in user_creds.items(): 352 | # If there is more than one user add the name as a suffix 353 | if len(user_creds) > 1: 354 | # Convert the username into an environment variable-safe form 355 | suffix = ("_" + re.sub(r"\W", "_", user_name)).upper() 356 | else: 357 | suffix = "" 358 | secrets["AWS_ACCESS_KEY_ID" + suffix] = creds[0] 359 | secrets["AWS_SECRET_ACCESS_KEY" + suffix] = creds[1] 360 | return secrets 361 | 362 | 363 | def create_all_secrets( 364 | secrets: Dict[str, str], 365 | github_env: str, 366 | github_token: str, 367 | repo_name: str, 368 | dry_run: bool = False, 369 | ) -> None: 370 | """Log into GitHub and create all encrypted secrets.""" 371 | logging.info("Creating GitHub API session using personal access token.") 372 | session: requests.Session = requests.Session() 373 | session.auth = ("", github_token) 374 | 375 | # If an environment is specified, verify that it exists 376 | if github_env: 377 | logging.info(f"Checking if environment {github_env} exists") 378 | response = session.get( 379 | f"https://api.github.com/repos/{repo_name}/environments/{github_env}" 380 | ) 381 | if response.status_code != 200: 382 | logging.critical(f"Environment {github_env} not found in {repo_name}.") 383 | raise Exception(f"Environment {github_env} not found in {repo_name}.") 384 | 385 | # Get the repo or environment public key to be used to encrypt secrets 386 | public_key: Dict[str, str] = get_public_key(session, repo_name, github_env) 387 | 388 | for secret_name, secret_value in secrets.items(): 389 | if dry_run: 390 | logging.info(f"Would create secret {secret_name}") 391 | else: 392 | set_secret( 393 | session, repo_name, github_env, secret_name, secret_value, public_key 394 | ) 395 | 396 | 397 | def main() -> int: 398 | """Set up logging and call the requested commands.""" 399 | args: Dict[str, Any] = docopt.docopt(__doc__, version="1.1.0") 400 | 401 | # Validate and convert arguments as needed 402 | schema: Schema = Schema( 403 | { 404 | "": Or( 405 | None, 406 | And( 407 | str, 408 | lambda n: len(n) == 40, 409 | error="--token must be a 40 character personal access token.", 410 | ), 411 | ), 412 | "--log-level": And( 413 | str, 414 | Use(str.lower), 415 | lambda n: n in ("debug", "info", "warning", "error", "critical"), 416 | error="Possible values for --log-level are " 417 | "debug, info, warning, error, and critical.", 418 | ), 419 | "--repo": Or( 420 | None, 421 | And( 422 | str, 423 | lambda n: "/" in n, 424 | error='Repository names must contain a "/"', 425 | ), 426 | ), 427 | "--token": Or( 428 | None, 429 | And( 430 | str, 431 | lambda n: len(n) == 40, 432 | error="--token must be a 40 character personal access token.", 433 | ), 434 | ), 435 | str: object, # Don't care about other keys, if any 436 | } 437 | ) 438 | 439 | try: 440 | validated_args: Dict[str, Any] = schema.validate(args) 441 | except SchemaError as err: 442 | # Exit because one or more of the arguments were invalid 443 | print(err, file=sys.stderr) 444 | return 1 445 | 446 | # Assign validated arguments to variables 447 | dry_run: bool = validated_args["--dry-run"] 448 | github_env: str = validated_args["--env"] 449 | github_token_to_save: str = validated_args[""] 450 | log_level: str = validated_args["--log-level"] 451 | include_remote_state: bool = validated_args["--remote-state"] 452 | repo_name: str = validated_args["--repo"] 453 | state_filename: str = validated_args["--state"] 454 | github_token: str = validated_args["--token"] 455 | 456 | # Set up logging 457 | logging.basicConfig( 458 | format="%(asctime)-15s %(levelname)s %(message)s", level=log_level.upper() 459 | ) 460 | 461 | # Just save the GitHub token to the keyring and exit. 462 | if validated_args["save"]: 463 | logging.info("Saving the GitHub personal access token to the keyring.") 464 | keyring.set_password(KEYRING_SERVICE, KEYRING_USERNAME, github_token_to_save) 465 | logging.info("Success!") 466 | return 0 467 | 468 | # If the user does not provide a repo name we'll try to determine it from git 469 | if not repo_name: 470 | repo_name = get_repo_name() 471 | logging.info(f"Using GitHub repository name: {repo_name}") 472 | 473 | if github_token is None: 474 | logging.debug("GitHub token not provided in arguments. Checking keyring.") 475 | github_token = keyring.get_password(KEYRING_SERVICE, KEYRING_USERNAME) 476 | if github_token is None: 477 | logging.critical( 478 | "GitHub token not provided on command line or found in keychain." 479 | ) 480 | return -1 481 | else: 482 | logging.info("GitHub token retrieved from keyring.") 483 | 484 | # Get the state from Terraform or a json file 485 | terraform_state: Dict = get_terraform_state(state_filename) 486 | 487 | # Users mapped to their (key, secret) 488 | user_creds: Dict[str, Tuple[str, str]] = get_users(terraform_state) 489 | 490 | # User secrets created from credentials. Names mapped to value. 491 | user_secrets: Dict[str, str] = create_user_secrets(user_creds) 492 | 493 | # Secrets created from tagged resources. Names mapped to value. 494 | resource_secrets: Dict[str, str] = get_resource_secrets( 495 | terraform_state, include_remote_state 496 | ) 497 | 498 | # Check if there are overlaps in the keys. 499 | if not user_secrets.keys().isdisjoint(resource_secrets.keys()): 500 | logging.warning("User secret names overlap with resource secret names.") 501 | 502 | # Merge the two dictionaries together 503 | all_secrets: Dict[str, str] = resource_secrets.copy() 504 | all_secrets.update(user_secrets) 505 | 506 | # All the ducks are in a row, let's do this thang! 507 | create_all_secrets(all_secrets, github_env, github_token, repo_name, dry_run) 508 | 509 | logging.info("Success!") 510 | 511 | return 0 512 | 513 | 514 | if __name__ == "__main__": 515 | sys.exit(main()) 516 | -------------------------------------------------------------------------------- /project_setup/skeleton-list.md: -------------------------------------------------------------------------------- 1 | # Skeleton Repositories # 2 | 3 | We maintain a number of [`cisagov` skeleton projects](https://github.com/search?q=org%3Acisagov+topic%3Askeleton) 4 | to make starting new repositories quicker and to simplify maintaining 5 | configurations and dependencies across the organization. 6 | 7 | The [`skeleton`](scripts/skeleton) helper tool is included in the [`scripts`](scripts) 8 | directory of the [development-guide](https://github.com/cisagov/development-guide) 9 | repository. 10 | 11 | - To skeletonize an existing repository, please see the guide to 12 | [skeletonize an existing repository](skeletonize-existing-repository.md). 13 | - To skeletonize a new repository, please see the guide to 14 | [start a new repository from a skeleton](new-repository-from-skeleton.md). 15 | 16 | ## List available skeletons with the command-line tool ## 17 | 18 | After you've run `setup-env` in this repository, the `skeleton` tool will be 19 | available to you via the command line. To see a list of available skeletons, 20 | use the following command: 21 | 22 | ```bash 23 | skeleton list 24 | ``` 25 | 26 | ## Available skeletons in cisagov ## 27 | 28 | [`skeleton-generic`](https://github.com/cisagov/skeleton-generic): 29 | A generic skeleton project for quickly getting a new cisagov project started. 30 | 31 | [`skeleton-python-library`](https://github.com/cisagov/skeleton-python-library): 32 | A skeleton project for quickly getting a new cisagov Python library started. 33 | 34 | [`skeleton-docker`](https://github.com/cisagov/skeleton-docker): 35 | A skeleton project for quickly getting a new cisagov Docker container started. 36 | 37 | [`skeleton-tf-module`](https://github.com/cisagov/skeleton-tf-module): 38 | A skeleton project for quickly getting a new cisagov Terraform module started. 39 | 40 | [`skeleton-ansible-role`](https://github.com/cisagov/skeleton-ansible-role): 41 | A skeleton project for quickly getting a new cisagov Ansible role started. 42 | 43 | [`skeleton-ansible-role-with-test-user`](https://github.com/cisagov/skeleton-ansible-role-with-test-user): 44 | A skeleton project for quickly getting a new cisagov Ansible role started when 45 | that role requires an AWS test user. 46 | 47 | [`skeleton-packer`](https://github.com/cisagov/skeleton-packer): 48 | A skeleton project for quickly getting a new cisagov packer project started. 49 | 50 | [`skeleton-aws-lambda`](https://github.com/cisagov/skeleton-aws-lambda): 51 | A skeleton project for quickly getting a new cisagov Python AWS Lambda started. 52 | 53 | Note: If you discover a new flavor of skeleton that doesn't yet exist, use 54 | `skeleton-generic` to create it, or add an issue to this repository to get 55 | assistance. 56 | -------------------------------------------------------------------------------- /project_setup/skeletonize-existing-repository.md: -------------------------------------------------------------------------------- 1 | # Skeletonize an Existing Repository # 2 | 3 | Skeletonizing a repository standardizes our development setup and environment, 4 | and it enables our [Lineage GitHub Action](https://github.com/cisagov/action-lineage/) 5 | to keep the repository updated and standardized. 6 | 7 | ## About ## 8 | 9 | Skeleton projects contain [licensing information](LICENSE), as 10 | well as [pre-commit hooks](https://pre-commit.com) and 11 | [GitHub Actions](https://github.com/features/actions) configurations 12 | appropriate for the major languages that we use. This lets us standardize 13 | [cisagov](https://github.com/cisagov) GitHub projects to a 14 | [list of cisagov skeleton projects](https://github.com/search?q=org%3Acisagov+skeleton&type=Repositories). 15 | 16 | ## General overview ## 17 | 18 | The general outline of how to add a skeleton to a repository is: 19 | 20 | 1. [Add the skeleton as a remote](#add-a-skeleton-as-remote) to the 21 | non-skeletonized repository 22 | 1. Pull with `--allow-unrelated-histories` 23 | 1. [Fix all the inevitable conflicts](#fix-merge-conflicts) 24 | 1. [Review non-conflicting changes](#review-non-conflicting-changes) to 25 | prevent merging destructive upstream changes 26 | 1. [Update skeleton's `example` references](#update-skeletons-example-references) 27 | 1. [Set up pre-commit](#set-up-your-environment-and-pre-commit-hooks) 28 | 1. Fix additional problems that may arise 29 | 1. [Make a pull request](#make-a-pull-request) 30 | 31 | ## Add a skeleton as remote ## 32 | 33 | First, decide which of the available skeletons fits your existing repository. 34 | To see a list of available skeletons, use the `skeleton list` command or see 35 | the [list of skeletons](skeleton-list.md). 36 | 37 | As an example, we'll be using [`skeleton-python-library`](https://github.com/cisagov/skeleton-python-library) 38 | in this document. 39 | 40 | ```sh 41 | cd 42 | git remote add skeleton-parent git@github.com:cisagov/skeleton-python-library.git 43 | 44 | # You can verify the remote has been added by 45 | git remote --verbose 46 | 47 | # Create a new branch for this work 48 | git checkout -b skeletonize 49 | 50 | # Pull skeleton's history 51 | git pull skeleton-parent develop --allow-unrelated-histories 52 | ``` 53 | 54 | ## Set up your environment and pre-commit hooks ## 55 | 56 | Follow the instructions in [CONTRIBUTING.md on setting up pre-commit](../CONTRIBUTING.md#setting-up-pre-commit) 57 | to run `setup-env` and enable the `pre-commit` hooks. If you have already set 58 | up the prerequisites, this involves: 59 | 60 | ```sh 61 | # In the root directory of the repository 62 | ./setup-env 63 | pre-commit install 64 | ``` 65 | 66 | ## Fix merge conflicts ## 67 | 68 | This merge process will almost certainly fail, resulting in merge conflicts. 69 | The next step is to fix those conflicts and add the files once the fixes are 70 | in place. 71 | 72 | ```sh 73 | # Determine which files need fixes 74 | git status 75 | 76 | # After fixing the merge conflicts in a file, add it 77 | git add 78 | 79 | ... 80 | # When all conflicts have been fixed and added, commit to complete the merge 81 | # Remember to add a descriptive and useful commit message 82 | git commit 83 | ``` 84 | 85 | ## Review non-conflicting changes ## 86 | 87 | You don't only have to fix merge conflicts. It is important to also look at 88 | the unconflicted changes listed in the outputs of `git status` and 89 | `git diff origin/develop` and verify that you want to include all those 90 | changes. 91 | 92 | ```sh 93 | git diff origin/develop 94 | ``` 95 | 96 | This step is often overlooked because it is rarely needed, but it can save you 97 | from merging in destructive upstream changes. 98 | 99 | ## Update skeleton's `example` references ## 100 | 101 | This step includes such activities as: 102 | 103 | - Update `setup.py` with non-example information 104 | - Arrange into appropriate folders, such as `src` and `test` 105 | - Update the `codeowners` to reflect subject matter expertise and 106 | codebase familiarity 107 | - Aim to have at least two codeowners for every repository 108 | 109 | Some skeletons need additional configuration, such as with 110 | `skeleton-python-library` and its module structure inside `src/example`. 111 | 112 | ## Run pre-commit against existing files ## 113 | 114 | The skeleton will bring along with it our standard pre-commit hook 115 | configurations, including linting and other checks, with `setup-env`. 116 | 117 | ```sh 118 | # Check all existing files 119 | pre-commit run --all-files 120 | ``` 121 | 122 | The linters will automatically fix files where it can, however you are 123 | probably looking at a long list of updates to make before automated checks 124 | will pass. You may want to send this output to a file to make it easier to 125 | review, e.g. `pre-commit run --all-files > fixme.txt`. 126 | 127 | ### isort ### 128 | 129 | For our `skeleton-python-library` example, you'll need to do some 130 | configuration with `isort` and `.isort.cfg` to deconflict packages. 131 | 132 | - Remove known-first-party and known-third-party packages so the tool will 133 | auto-populate them during the `pre-commit` step. 134 | - Manually add your package name (i.e. in `src`) as known-first-party. 135 | 136 | ### Coveralls ### 137 | 138 | If the repository needs coverage checks and integration with 139 | [Coveralls](https://coveralls.io/github/cisagov): 140 | 141 | - Modify the `.coveragerc` to point to the src package 142 | - Add appropriate secrets so they're available to the Actions workflow, 143 | e.g. add a token from [Coveralls](https://coveralls.io/github/cisagov) to the 144 | repository's secrets as `secrets.COVERALLS_REPO_TOKEN` for the repo badge. 145 | 146 | ### pytest ### 147 | 148 | For Python projects, run `pytest` manually to verify that your newly-updated 149 | repository still passes its test suite. 150 | 151 | ## Make a pull request ## 152 | 153 | Once you've run through the configuration and testing stages, you've probably 154 | accumulated a number of commits on your `skeletonize` branch. 155 | 156 | The next step is to [make a pull request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request) 157 | and have the team perform code reviews. 158 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | --requirement requirements-test.txt 2 | ipython 3 | # The bump-version script requires at least version 3 of semver. 4 | semver>=3 5 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | -e .[test] 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | -------------------------------------------------------------------------------- /setup-env: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | 7 | USAGE=$( 8 | cat << 'END_OF_LINE' 9 | Configure a development environment for this repository. 10 | 11 | It does the following: 12 | - Allows the user to specify the Python version to use for the virtual environment. 13 | - Allows the user to specify a name for the virtual environment. 14 | - Verifies pyenv and pyenv-virtualenv are installed. 15 | - Creates the Python virtual environment. 16 | - Configures the activation of the virtual enviroment for the repo directory. 17 | - Installs the requirements needed for development. 18 | - Installs git pre-commit hooks. 19 | - Configures git remotes for upstream "lineage" repositories. 20 | 21 | Usage: 22 | setup-env [--venv-name venv_name] [--python-version python_version] 23 | setup-env (-h | --help) 24 | 25 | Options: 26 | -f | --force Delete virtual enviroment if it already exists. 27 | -h | --help Show this message. 28 | -i | --install-hooks Install hook environments for all environments in the 29 | pre-commit config file. 30 | -l | --list-versions List available Python versions and select one interactively. 31 | -v | --venv-name Specify the name of the virtual environment. 32 | -p | --python-version Specify the Python version for the virtual environment. 33 | 34 | END_OF_LINE 35 | ) 36 | 37 | # Display pyenv's installed Python versions 38 | python_versions() { 39 | pyenv versions --bare --skip-aliases --skip-envs 40 | } 41 | 42 | check_python_version() { 43 | local version=$1 44 | 45 | # This is a valid regex for semantically correct Python version strings. 46 | # For more information see here: 47 | # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string 48 | # Break down the regex into readable parts major.minor.patch 49 | local major="0|[1-9]\d*" 50 | local minor="0|[1-9]\d*" 51 | local patch="0|[1-9]\d*" 52 | 53 | # Splitting the prerelease part for readability 54 | # Start of the prerelease 55 | local prerelease="(?:-" 56 | # Numeric or alphanumeric identifiers 57 | local prerelease+="(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)" 58 | # Additional dot-separated identifiers 59 | local prerelease+="(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*" 60 | # End of the prerelease, making it optional 61 | local prerelease+=")?" 62 | # Optional build metadata 63 | local build="(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?" 64 | 65 | # Final regex composed of parts 66 | local regex="^($major)\.($minor)\.($patch)$prerelease$build$" 67 | 68 | # This checks if the Python version does not match the regex pattern specified in $regex, 69 | # using Perl for regex matching. If the pattern is not found, then prompt the user with 70 | # the invalid version message. 71 | if ! echo "$version" | perl -ne "exit(!/$regex/)"; then 72 | echo "Invalid version of Python: Python follows semantic versioning," \ 73 | "so any version string that is not a valid semantic version is an" \ 74 | "invalid version of Python." 75 | exit 1 76 | # Else if the Python version isn't installed then notify the user. 77 | # grep -E is used for searching through text lines that match the 78 | # specific version. 79 | elif ! python_versions | grep -E "^${version}$" > /dev/null; then 80 | echo "Error: Python version $version is not installed." 81 | echo "Installed Python versions are:" 82 | python_versions 83 | exit 1 84 | else 85 | echo "Using Python version $version" 86 | fi 87 | } 88 | 89 | # Flag to force deletion and creation of virtual environment 90 | FORCE=0 91 | 92 | # Initialize the other flags 93 | INSTALL_HOOKS=0 94 | LIST_VERSIONS=0 95 | PYTHON_VERSION="" 96 | VENV_NAME="" 97 | 98 | # Define long options 99 | LONGOPTS="force,help,install-hooks,list-versions,python-version:,venv-name:" 100 | 101 | # Define short options for getopt 102 | SHORTOPTS="fhilp:v:" 103 | 104 | # Check for GNU getopt by matching a specific pattern ("getopt from util-linux") 105 | # in its version output. This approach presumes the output format remains stable. 106 | # Be aware that format changes could invalidate this check. 107 | if [[ $(getopt --version 2> /dev/null) != *"getopt from util-linux"* ]]; then 108 | cat << 'END_OF_LINE' 109 | 110 | Please note, this script requires GNU getopt due to its enhanced 111 | functionality and compatibility with certain script features that 112 | are not supported by the POSIX getopt found in some systems, particularly 113 | those with a non-GNU version of getopt. This distinction is crucial 114 | as a system might have a non-GNU version of getopt installed by default, 115 | which could lead to unexpected behavior. 116 | 117 | On macOS, we recommend installing brew (https://brew.sh/). Then installation 118 | is as simple as `brew install gnu-getopt` and adding this to your 119 | profile: 120 | 121 | export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" 122 | 123 | GNU getopt must be explicitly added to the PATH since it 124 | is keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean). 125 | 126 | END_OF_LINE 127 | exit 1 128 | fi 129 | 130 | # Check to see if pyenv is installed 131 | if [ -z "$(command -v pyenv)" ] || { [ -z "$(command -v pyenv-virtualenv)" ] && [ ! -f "$(pyenv root)/plugins/pyenv-virtualenv/bin/pyenv-virtualenv" ]; }; then 132 | echo "pyenv and pyenv-virtualenv are required." 133 | if [[ "$OSTYPE" == "darwin"* ]]; then 134 | cat << 'END_OF_LINE' 135 | 136 | On macOS, we recommend installing brew, https://brew.sh/. Then installation 137 | is as simple as `brew install pyenv pyenv-virtualenv` and adding this to your 138 | profile: 139 | 140 | eval "$(pyenv init -)" 141 | eval "$(pyenv virtualenv-init -)" 142 | 143 | END_OF_LINE 144 | 145 | fi 146 | cat << 'END_OF_LINE' 147 | For Linux, Windows Subsystem for Linux (WSL), or macOS (if you don't want 148 | to use "brew") you can use https://github.com/pyenv/pyenv-installer to install 149 | the necessary tools. Before running this ensure that you have installed the 150 | prerequisites for your platform according to the pyenv wiki page, 151 | https://github.com/pyenv/pyenv/wiki/common-build-problems. 152 | 153 | On WSL you should treat your platform as whatever Linux distribution you've 154 | chosen to install. 155 | 156 | Once you have installed "pyenv" you will need to add the following lines to 157 | your ".bashrc": 158 | 159 | export PATH="$PATH:$HOME/.pyenv/bin" 160 | eval "$(pyenv init -)" 161 | eval "$(pyenv virtualenv-init -)" 162 | END_OF_LINE 163 | exit 1 164 | fi 165 | 166 | # Use GNU getopt to parse options 167 | if ! PARSED=$(getopt --options $SHORTOPTS --longoptions $LONGOPTS --name "$0" -- "$@"); then 168 | echo "Error parsing options" 169 | exit 1 170 | fi 171 | eval set -- "$PARSED" 172 | 173 | while true; do 174 | case "$1" in 175 | -f | --force) 176 | FORCE=1 177 | shift 178 | ;; 179 | -h | --help) 180 | echo "$USAGE" 181 | exit 0 182 | ;; 183 | -i | --install-hooks) 184 | INSTALL_HOOKS=1 185 | shift 186 | ;; 187 | -l | --list-versions) 188 | LIST_VERSIONS=1 189 | shift 190 | ;; 191 | -p | --python-version) 192 | PYTHON_VERSION="$2" 193 | shift 2 194 | # Check the Python version being passed in. 195 | check_python_version "$PYTHON_VERSION" 196 | ;; 197 | -v | --venv-name) 198 | VENV_NAME="$2" 199 | shift 2 200 | ;; 201 | --) 202 | shift 203 | break 204 | ;; 205 | *) 206 | # Unreachable due to GNU getopt handling all options 207 | echo "Programming error" 208 | exit 64 209 | ;; 210 | esac 211 | done 212 | 213 | # Determine the virtual environment name 214 | if [ -n "$VENV_NAME" ]; then 215 | # Use the user-provided environment name 216 | env_name="$VENV_NAME" 217 | else 218 | # Set the environment name to the last part of the working directory. 219 | env_name=${PWD##*/} 220 | fi 221 | 222 | # List Python versions and select one interactively. 223 | if [ $LIST_VERSIONS -ne 0 ]; then 224 | echo Available Python versions: 225 | python_versions 226 | # Read the user's desired Python version. 227 | # -r: treat backslashes as literal, -p: display prompt before input. 228 | read -r -p "Enter the desired Python version: " PYTHON_VERSION 229 | # Check the Python version being passed in. 230 | check_python_version "$PYTHON_VERSION" 231 | fi 232 | 233 | # Remove any lingering local configuration. 234 | if [ $FORCE -ne 0 ]; then 235 | rm -f .python-version 236 | pyenv virtualenv-delete --force "${env_name}" || true 237 | elif [[ -f .python-version ]]; then 238 | cat << 'END_OF_LINE' 239 | An existing .python-version file was found. Either remove this file yourself 240 | or re-run with the --force option to have it deleted along with the associated 241 | virtual environment. 242 | 243 | rm .python-version 244 | 245 | END_OF_LINE 246 | exit 1 247 | fi 248 | 249 | # Create a new virtual environment for this project 250 | # 251 | # If $PYTHON_VERSION is undefined then the current pyenv Python version will be used. 252 | # 253 | # We can't quote ${PYTHON_VERSION:=} below since if the variable is 254 | # undefined then we want nothing to appear; this is the reason for the 255 | # "shellcheck disable" line below. 256 | # 257 | # shellcheck disable=SC2086 258 | if ! pyenv virtualenv ${PYTHON_VERSION:=} "${env_name}"; then 259 | cat << END_OF_LINE 260 | An existing virtual environment named $env_name was found. Either delete this 261 | environment yourself or re-run with the --force option to have it deleted. 262 | 263 | pyenv virtualenv-delete ${env_name} 264 | 265 | END_OF_LINE 266 | exit 1 267 | fi 268 | 269 | # Set the local application-specific Python version(s) by writing the 270 | # version name to a file named `.python-version'. 271 | pyenv local "${env_name}" 272 | 273 | # Upgrade pip and friends 274 | python3 -m pip install --upgrade pip setuptools wheel 275 | 276 | # Find a requirements file (if possible) and install 277 | for req_file in "requirements-dev.txt" "requirements-test.txt" "requirements.txt"; do 278 | if [[ -f $req_file ]]; then 279 | pip install --requirement $req_file 280 | break 281 | fi 282 | done 283 | 284 | # Install git pre-commit hooks now or later. 285 | pre-commit install ${INSTALL_HOOKS:+"--install-hooks"} 286 | 287 | # Setup git remotes from lineage configuration 288 | # This could fail if the remotes are already setup, but that is ok. 289 | set +o errexit 290 | 291 | eval "$( 292 | python3 << 'END_OF_LINE' 293 | from pathlib import Path 294 | import yaml 295 | import sys 296 | 297 | LINEAGE_CONFIG = Path(".github/lineage.yml") 298 | 299 | if not LINEAGE_CONFIG.exists(): 300 | print("No lineage configuration found.", file=sys.stderr) 301 | sys.exit(0) 302 | 303 | with LINEAGE_CONFIG.open("r") as f: 304 | lineage = yaml.safe_load(stream=f) 305 | 306 | if lineage["version"] == "1": 307 | for parent_name, v in lineage["lineage"].items(): 308 | remote_url = v["remote-url"] 309 | print(f"git remote add {parent_name} {remote_url};") 310 | print(f"git remote set-url --push {parent_name} no_push;") 311 | else: 312 | print(f'Unsupported lineage version: {lineage["version"]}', file=sys.stderr) 313 | END_OF_LINE 314 | )" 315 | 316 | # Qapla' 317 | echo "Success!" 318 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the setup module for the example project. 3 | 4 | Based on: 5 | 6 | - https://packaging.python.org/distributing/ 7 | - https://github.com/pypa/sampleproject/blob/master/setup.py 8 | - https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure 9 | """ 10 | 11 | # Standard Python Libraries 12 | import codecs 13 | from os.path import abspath, dirname, join 14 | 15 | # Third-Party Libraries 16 | from setuptools import setup 17 | 18 | 19 | def readme(): 20 | """Read in and return the contents of the project's README.md file.""" 21 | with open("README.md", encoding="utf-8") as f: 22 | return f.read() 23 | 24 | 25 | # Below two methods were pulled from: 26 | # https://packaging.python.org/guides/single-sourcing-package-version/ 27 | def read(rel_path): 28 | """Open a file for reading from a given relative path.""" 29 | here = abspath(dirname(__file__)) 30 | with codecs.open(join(here, rel_path), "r") as fp: 31 | return fp.read() 32 | 33 | 34 | def get_version(version_file): 35 | """Extract a version number from the given file path.""" 36 | for line in read(version_file).splitlines(): 37 | if line.startswith("__version__"): 38 | delim = '"' if '"' in line else "'" 39 | return line.split(delim)[1] 40 | raise RuntimeError("Unable to find version string.") 41 | 42 | 43 | setup( 44 | name="project_setup", 45 | # Versions should comply with PEP440 46 | version="1.0.0", 47 | description="Documentation for Github projects in the cisagov organization.", 48 | long_description=readme(), 49 | long_description_content_type="text/markdown", 50 | # Landing page for CISA's cybersecurity mission 51 | url="https://www.cisa.gov/cybersecurity", 52 | # Additional URLs for this project per 53 | # https://packaging.python.org/guides/distributing-packages-using-setuptools/#project-urls 54 | project_urls={ 55 | "Source": "https://github.com/cisagov/development-guide", 56 | "Tracker": "https://github.com/cisagov/development-guide/issues", 57 | }, 58 | # Author details 59 | author="Cybersecurity and Infrastructure Security Agency", 60 | author_email="github@cisa.dhs.gov", 61 | license="License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication", 62 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 63 | classifiers=[ 64 | # How mature is this project? Common values are 65 | # 3 - Alpha 66 | # 4 - Beta 67 | # 5 - Production/Stable 68 | "Development Status :: 3 - Alpha", 69 | # Indicate who your project is intended for 70 | "Intended Audience :: Developers", 71 | # Pick your license as you wish (should match "license" above) 72 | "License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication", 73 | # Specify the Python versions you support here. In particular, ensure 74 | # that you indicate whether you support Python 2, Python 3 or both. 75 | "Programming Language :: Python :: 3", 76 | "Programming Language :: Python :: 3.7", 77 | "Programming Language :: Python :: 3.8", 78 | "Programming Language :: Python :: 3.9", 79 | "Programming Language :: Python :: 3.10", 80 | "Programming Language :: Python :: 3.11", 81 | "Programming Language :: Python :: 3.12", 82 | "Programming Language :: Python :: Implementation :: CPython", 83 | ], 84 | python_requires=">=3.7", 85 | # What does your project relate to? 86 | keywords="documentation", 87 | package_dir={"": "project_setup/scripts"}, 88 | install_requires=[ 89 | "boto3", 90 | "docopt", 91 | "keyring", 92 | "PyGithub", 93 | "PyNaCl", 94 | "pyyaml", 95 | "schema", 96 | "setuptools >= 24.2.0", 97 | "wheel", 98 | ], 99 | extras_require={ 100 | "test": [ 101 | "pre-commit", 102 | "coveralls", 103 | "coverage < 7.0", 104 | "pytest-cov", 105 | "pytest", 106 | ] 107 | }, 108 | scripts=[ 109 | "project_setup/scripts/ansible-roles", 110 | "project_setup/scripts/iam-to-travis", 111 | "project_setup/scripts/terraform-to-secrets", 112 | "project_setup/scripts/skeleton", 113 | "project_setup/scripts/ssm-param", 114 | ], 115 | entry_points={}, 116 | ) 117 | -------------------------------------------------------------------------------- /style-guide.md: -------------------------------------------------------------------------------- 1 | # Style Guide # 2 | 3 | Here lie the heretofore-unwritten rules and styles we use when writing 4 | documentation, issues, pull requests, and other forms of written expression. 5 | 6 | ## References ## 7 | 8 | - [Plain Language guide](https://www.plainlanguage.gov/guidelines/) 9 | - [GitHub list of supported languages](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) 10 | 11 | ## Capitalization ## 12 | 13 | Follow standard capitalization for written work such as comments, 14 | documentation, reports, and emails, namely that 15 | 16 | > words are lowercase unless there is specific guidance to capitalize them. 17 | > For example, capitalize the first word of a sentence, unless the sentence 18 | > begins with the name of a person whose name starts with a lowercase letter. 19 | 20 | Reference: [APA's capitalization guidelines](https://apastyle.apa.org/style-grammar-guidelines/capitalization) 21 | 22 | When an object being referenced has a nonstandard capitalization, defer to its 23 | capitalization. For example, GitHub has a capital letter mid-word. 24 | 25 | Use uppercase for abbreviations, including technical protocols such as HTTPS 26 | and SSL. 27 | 28 | Please note that this guidance is specifically for *written work such as 29 | documentation* and that code capitalization has its own standards that vary by 30 | language. 31 | 32 | ## Repositories ## 33 | 34 | **All repositories should be public.** 35 | 36 | Whether creating new repositories or transferring existing repositories, 37 | remember that we work in the open by default. For a discussion of the 38 | rare cases in which we might use private repositories, read our 39 | [open source policy](https://github.com/cisagov/development-guide/tree/develop/open-source-policy). 40 | 41 | ### Repository naming ### 42 | 43 | Use a repository's name to describe its purpose. Names should be: 44 | 45 | 1. Descriptive 46 | 1. Readable 47 | 1. Consistent 48 | 1. Contextual 49 | 1. Future-friendly 50 | 1. Extensible 51 | 1. Reusable 52 | 53 | Avoid references to pop culture or other implicit references. GitLab has a 54 | good guide on using [inclusive language in documentation](https://docs.gitlab.com/ee/development/documentation/styleguide/#inclusive-language). 55 | 56 | Specifically, when naming a repository: 57 | 58 | - Use lower case 59 | - Use dashes 60 | - For readability, where spaces would otherwise separate words 61 | - Avoid underscores, which require extra keystrokes 62 | 63 | ### Repository settings ### 64 | 65 | Specify code owners in each repository's `.github/codeowners` file to make 66 | sure that changes and pull requests are reviewed by people familiar with 67 | the codebase. This way, we'll catch more problems before they get into the 68 | protected branch. 69 | 70 | - Set a minimum of two code owners for all repositories. 71 | - Add or edit each repository's `.github/codeowners` file. 72 | - Set up [branch protection](project_setup/branch-protection.md). 73 | 74 | ## Titles and subjects ## 75 | 76 | Use [title case](https://apastyle.apa.org/style-grammar-guidelines/capitalization/title-case) 77 | for the main title of a document. 78 | 79 | ## Headers and headings ## 80 | 81 | Use [sentence case](https://apastyle.apa.org/style-grammar-guidelines/capitalization/sentence-case) 82 | for headings within a document that are not the inital document title. 83 | 84 | When referencing an object or another document, use the capitalization of the 85 | referenced item, such as "GitHub" or "the Style Guide". 86 | 87 | Use both opening and closing octothorpes (`#`) to indicate the Markdown header 88 | level. Our Markdown linting enforces this as well. 89 | 90 | For example: 91 | 92 | - `### Use variables to configure pipelines ###` 93 | - `## Using the Style Guide ##` 94 | 95 | ## Lists ## 96 | 97 | Use bulleted lists unless order matters, in which case use numbered lists. 98 | 99 | Always capitalize the first word of list items, unless they’re parameters or 100 | commands that are in backticks, or a similar situation. 101 | 102 | ### List punctuation ### 103 | 104 | Periods or no periods? 105 | 106 | - Use periods at the end of bullets that are full sentences. 107 | - Omit periods at the end of partial sentences. 108 | - Match the rest of the bulleted or numbered list as much as possible. 109 | 110 | ## Code blocks ## 111 | 112 | When formatting code blocks, use the formatting that best highlights the 113 | syntax in the code block. 114 | 115 | We generally use code blocks for scripts, command-line usage, and when showing 116 | file contents like configuration files. 117 | 118 | ### Scripts ### 119 | 120 | - Use `shell` formatting for shell scripts 121 | - `shell` is also aliased as `sh`, `bash`, `zsh`, and others 122 | - Use language-specific formatting, such as `python`, for blocks with scripts 123 | written in specific languages 124 | - Reference the [GitHub list of supported languages](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) 125 | 126 | ### Command-line ### 127 | 128 | - `console` is most effective at highlighting a command *and* its resulting 129 | output, like this: 130 | 131 | ```console 132 | $ the_first_command 133 | the output 134 | more output 135 | $ the_second_command 136 | yet more output 137 | ``` 138 | 139 | - `shell` should be used for result-less commands, *especially* if 140 | there is any shell-fu like pipes, file redirects, shell variables, etc. to 141 | apply syntax highlighting so the reader is more aware of the magic happening 142 | 143 | ### File contents ### 144 | 145 | - Extension-specific formatting, such as `hcl` for Terraform, should be used 146 | -------------------------------------------------------------------------------- /version.txt: -------------------------------------------------------------------------------- 1 | 1.0.0 2 | --------------------------------------------------------------------------------