├── .github
└── workflows
│ ├── continuous_run.yml
│ └── docker-build-and-push.yml
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── SECURITY.md
├── bin
└── docker-entrypoint.sh
├── data
├── empty-input.json
└── sample_input.json
├── doc
├── rules.md
└── threat-matrix.md
├── github
├── admins.rego
├── api.rego
├── branches.rego
├── collaborators.rego
├── commits.rego
├── data.json
├── deploy_keys.rego
├── files.rego
├── gh_utils.rego
├── github.rego
├── hooks.rego
├── orgs.rego
├── report.rego
├── repos.rego
├── reviews.rego
├── secrets.rego
├── ssh_keys.rego
├── state
│ ├── admins.rego
│ ├── branches.rego
│ ├── collaborators.rego
│ ├── commits.rego
│ ├── deploy_keys.rego
│ ├── files.rego
│ ├── hooks.rego
│ ├── ssh_keys.rego
│ ├── teams.rego
│ └── tfa.rego
├── teams.rego
├── test
│ ├── branches.rego
│ ├── commits.rego
│ ├── deploy_keys.rego
│ ├── files.rego
│ ├── hooks.rego
│ ├── repos.rego
│ ├── ssh_keys.rego
│ ├── teams.rego
│ ├── tfa.rego
│ └── utils.rego
├── tfa.rego
├── token.rego
└── utils.rego
├── input.json
└── license-artifacts
└── opa-LICENSE.txt
/.github/workflows/continuous_run.yml:
--------------------------------------------------------------------------------
1 | name: continuously run gitgat
2 |
3 | on:
4 | #remove the following commented lines to enable continuous running
5 | #schedule:
6 | #- cron: "0 23 * * *"
7 |
8 | workflow_dispatch:
9 |
10 | jobs:
11 | run:
12 | runs-on: ubuntu-latest
13 | env:
14 | GITHUB_SECRET : ${{secrets.GH_SECRET}}
15 | steps:
16 |
17 | # checkout in order to have state
18 | - uses: actions/checkout@v3
19 |
20 | # run gitgat
21 | - name: run GitGat
22 | run: |
23 | docker pull scribesecurity/gitgat:latest
24 | docker run -e "$GITHUB_SECRET" -v $(pwd):/var/opt/opa scribesecurity/gitgat:latest data.github.report.print_report 2> report.md
25 |
26 | # push report to artifacts of pipeline run
27 | - name: upload artifact
28 | uses: actions/upload-artifact@v3
29 | with:
30 | name: gitgat report
31 | path: report.md
32 |
33 | # Push updates to repo. To enable uncomment the following lines.
34 | # - name: Push updated report.md to repo
35 | # run: |
36 | # cp report.md tmp.md
37 | # git config --global user.name "My name (via get_latest workflow)"
38 | # git config --global user.email "nobody@nowhere.com"
39 | # git rm report.md
40 | # git commit -m "remove tmporarly report"
41 | # mv tmp.md report.md
42 | # git add report.md
43 | # git commit -m "report.md"
44 | # git push
45 |
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/.github/workflows/docker-build-and-push.yml:
--------------------------------------------------------------------------------
1 | name: docker-build-and-push
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | docker:
8 | runs-on: ubuntu-latest
9 | steps:
10 | -
11 | name: Checkout
12 | uses: actions/checkout@v3
13 | -
14 | name: Set up QEMU
15 | uses: docker/setup-qemu-action@v2
16 | -
17 | name: Set up Docker Buildx
18 | uses: docker/setup-buildx-action@v2
19 | -
20 | name: Login to DockerHub
21 | uses: docker/login-action@v2
22 | with:
23 | username: ${{ secrets.DOCKERHUB_USERNAME }}
24 | password: ${{ secrets.DOCKERHUB_TOKEN }}
25 | -
26 | name: Build and push
27 | uses: docker/build-push-action@v3
28 | with:
29 | context: .
30 | platforms: linux/amd64,linux/arm64
31 | push: true
32 | tags: scribesecurity/gitgat:latest
33 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Thanks for your interest in contributing to the Gitgat project.
4 |
5 | The ways you can get involved:
6 | * Get involved in issue discussions
7 | * Suggest improvements and report bugs
8 | * Suggest policies through opening an issue:
9 | * Suggest a new policy: describe the policy goals and rules.
10 | * Suggest policy implementation ideas: Describe implementation ideas, APIs to get required data from, rule logic, state management etc.
11 | * Suggest implementations through pull requests:
12 | * Implement new policies.
13 | * Implement current policies for other source-control platforms.
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine
2 |
3 | ARG OPA_VERSION="v0.41.0"
4 |
5 | WORKDIR /opt/opa
6 |
7 | COPY bin/docker-entrypoint.sh /opt/opa/docker-entrypoint.sh
8 | COPY data/empty-input.json /var/opt/opa/input.json
9 | COPY license-artifacts /opt/opa
10 |
11 | RUN apk --no-cache add curl &&\
12 | adduser -D opa &&\
13 | curl -L -o opa https://openpolicyagent.org/downloads/${OPA_VERSION}/opa_linux_amd64_static &&\
14 | chmod u+x /opt/opa/opa &&\
15 | chmod u+x /opt/opa/docker-entrypoint.sh &&\
16 | chown -R opa:opa /opt/opa &&\
17 | chown -R opa:opa /var/opt/opa
18 |
19 | COPY github /opt/opa/github
20 |
21 | VOLUME /var/opt/opa/
22 |
23 | USER opa
24 |
25 | ENTRYPOINT ["/opt/opa/docker-entrypoint.sh"]
26 | CMD ["data.gh.eval"]
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [2022] [Scribe Security LTD. www.scribesecurity.com]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # Source control system security posture
6 | SCM (Source Control Management) security is of high importance as it serves as an entry point to the whole CI/CD pipeline. This repository contains policies that verify SCM (currently GitHub's) organization/repositories/user accounts security. The policies are evaluated using [Open Policy Agent (OPA)](https://openpolicyagent.org).
7 |
8 |
9 | There are different sets of policies depending on which account is being evaluated. **Most policies are only relevant for organization owners**. See the rulesets section bellow.
10 |
11 | The policies are evaluated against a certain state. When executed for the first time, the state is empty. The returned data should be reviewed, and the security posture should be manually evaluated (with recommendations from each module). If the state is approved, it should be added to the input data, so that the next evaluation of policies tracks the changes of the state. More information about the state configurable for each module is available in each module's corresponding section.
12 |
13 |
14 | # Usage
15 | ## Get a GitHub Personal Access Token
16 | 1. Generate a Personal Access Token with necessary permissions on GitHub in Settings > Developer Settings.
17 | You will need the following permissions:
18 | * read:org
19 | * read:user
20 | * read:public_key
21 | * repo:status
22 | * repo_deployment
23 | * read:repo_hook
24 | * public_repo
25 | * gist
26 |
27 | If needed, refer to each module's section to figure out what permissions are needed to evaluate the module's policies.
28 |
29 | 2. Set an environment variable with the token, for example:
30 |
31 | ```sh
32 | export GH_TOKEN=''
33 | ```
34 |
35 | ## Run Using Docker
36 | Run the following to get the report as a gist in your GitHub Account:
37 |
38 | ```sh
39 | docker run -e GH_TOKEN scribesecurity/gitgat:latest data.gh.post_gist
40 | ```
41 |
42 | You can access your report from your gists
43 |
44 | Run the following to get the report as a Markdown file:
45 | ```sh
46 | docker run -e GH_TOKEN scribesecurity/gitgat:latest data.github.report.print_report 2> report.md
47 | ```
48 |
49 | Run the following to get the report as a JSON object:
50 | ```sh
51 | docker run -e GH_TOKEN scribesecurity/gitgat:latest data.gh.eval
52 | ```
53 | In order to run the report using the variables and state you have saved in the input.json file, use this command:
54 | ```sh
55 | docker run -e GH_TOKEN -v :/var/opt/opa scribesecurity/gitgat:latest
56 | ```
57 | If you have already included the token in the input.json file, you can shorten it to:
58 | ```sh
59 | docker run -v :/var/opt/opa scribesecurity/gitgat:latest
60 | ```
61 | Note that the default report is the JSON version, so if you want to get the Markdown file you need to specify it as seen at the top of this section.
62 |
63 | ## Run Using the OPA CLI
64 |
65 | ### Install OPA and additional tools
66 | In order to execute the evaluation of the policies, download and install OPA (version 0.40.0 and up) from .
67 | Binaries are available for macOS, Linux and Windows.
68 |
69 | The examples below demonstrating safe handling of GitHub's Personal Access Token via an environment variable rely on `cat` and `sed` which are typically available on macOS and Linux. They can be obtained for Windows as well, for example, by using [Git for Windows](https://gitforwindows.org/).
70 | It is also possible to put the token directly into the configuration file, but do it at your own risk and make sure that it cannot be read by other users.
71 |
72 | ### Clone this repository
73 | Clone the repository using:
74 | ```sh
75 | git clone git@github.com:scribe-public/gitgat.git
76 | ```
77 | And then enter into the directory created:
78 | ```sh
79 | cd gitgat
80 | ```
81 |
82 |
83 | ### Configure the input.json configuration file
84 | The configuration file for the examples below is expected to be `input.json`. Make sure you create this file in the main gitgat folder, using the following script:
85 |
86 | ```sh
87 | cp data/empty-input.json input.json
88 | ```
89 | Samples of configuration files can be found in here: .
90 |
91 | If you wish to add information or state to your `input.json` file, you can refer to `data/sample_input.json`, for policies configuration and state management. Each rule set is its own JSON section, and the state information for each rule fits inside that segment. Make sure that the state information does not get pushed to the repository, as it might contain sensitive data.
92 |
93 | `sample_input.json` is **not** included in .gitignore, but `input.json` is.
94 | So it is recommended to use `input.json` as the input configuration file for OPA.
95 |
96 |
97 | ### Run the policies using OPA
98 |
99 | When running eval and report commands, pipe the token variable via stdin and sed.
100 | Following are a few examples of uses.
101 |
102 | Create a report as a report as a gist in your GitHub account:
103 |
104 |
105 | ```sh
106 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.gh.post_gist
107 | ```
108 |
109 | Get a report as a md file:
110 | ```sh
111 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.github.report.print_report 2> report.md
112 | ```
113 |
114 | Get a report as a JSON object:
115 |
116 | ```sh
117 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.gh.eval
118 | ```
119 |
120 | Run a specific module/rule:
121 |
122 | ```sh
123 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.github..eval
124 | ```
125 | For example:
126 | ```sh
127 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.github.ssh_keys.eval
128 | ```
129 | You can find the different rule files under `data/github`. Each file is a single OPA rule. The file name is the rule name, and that's the name you can use instead of the ``.
130 |
131 | (Under development) Print the Markdown report to stdout:
132 |
133 | ```sh
134 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.github.report.print_report
135 |
136 | ```
137 |
138 | (Under development) Upload the report to GitHub as a Gist:
139 |
140 | ```sh
141 | cat input.json | sed "s/GH_TOKEN/$GH_TOKEN/" | opa eval -I -b github data.gh.post_gist
142 | ```
143 |
144 |
145 | ## Rule sets
146 | The evaluation can be run for three different rule sets.
147 | The rule set is configured via `input.rule_set`:
148 |
149 | * "user" - evaluates rules from a single user perspective.
150 | * "org" - evaluates rules from an organization perspective.
151 | The organizations that are evaluated are configured in the `input.json` file under the `organizations` header.
152 | * "enterprise" - evaluates rules for an enterprise (coming soon).
153 |
154 | The default selection is "user" as can be seen in the example `input.json` file above.
155 | # State configuration
156 | Policies are configured via relevant state objects in `input.json`.
157 | Each configurable module has a corresponding input configuration object.
158 | Configuration parameters are described in each module's section below.
159 | The state can be updated and approved by the policy administrator.
160 | Eval rules print out violations of policies.
161 | The violated rules can be used to configure exceptions allowed by the SCM administrator for the modules by updating the state of the modules.
162 | Additional information about modules is available in corresponding eval rules descriptions.
163 |
164 | # Authentication modules
165 |
166 | ## 2 factor authentication
167 | 2 factor authentication protects against developers account password leakage. It is **highly recommended** to request users to enable 2 factor authentication.
168 | Module *tfa* checks for organization members with 2 factor authentication disabled.
169 |
170 | Required permissions:
171 | * read:org - note, that only organization owners can get the list of users with 2 factor authentication disabled
172 | * read:user - to get the list of organizations the user belongs to (when evaluating the *user* rule set)
173 |
174 | Configuration parameters:
175 |
176 | * `input.tfa.disabled_members` - specifies the list of users that are allowed to have 2 factor authentication disabled
177 | * `input.tfa.unenforced_orgs` - specifies the list of organizations that are allowed to have 2 factor authentication enforcements disabled
178 |
179 | Rule modules:
180 |
181 | * `data.github.tfa.eval.state.disabled_members` returns the list of users in each organization that have the 2 factor authentication disabled.
182 | If the new state is approved, they should be added to the configuration state.
183 |
184 | * `data.github.tfa.eval.state.unenforced_orgs` returns the list of organizations that do not enforce 2 factor authentication.
185 |
186 |
187 | ## SSH keys
188 | Developers can use SSH keys to access the repositories. A leaked SSH key gives an attacker access to the repository without the need to acquire a password. To mitigate the risk, it is advised to rotate SSH keys periodically and review configured SSH keys. The module is supported in the user rule set as organization owners do not have access to SSH keys metadata.
189 | Module *ssh_keys* checks for expired and newly added SSH keys.
190 |
191 | Required permissions:
192 |
193 | * read:public_key - to get the list of user's SSH public keys
194 |
195 | Configuration parameters:
196 | * `input.ssh_keys.expiration` - [years, months, days] for the SSH keys expiration
197 | * `input.ssh_keys.keys` - list of SSH keys that are registered for the user
198 |
199 | Rule modules:
200 |
201 | * `data.github.ssh_keys.eval.state.expired` returns the list of SSH keys that are older than configured by the expiration parameter.
202 | * `data.github.ssh_keys.eval.state.keys` returns the list of SSH keys that were not previously added to the input configuration file.
203 | All the approved keys should be added to the configuration state.
204 |
205 | ## Deploy keys
206 | Deploy keys are SSH keys that give access to a specific repository (as opposed to the user's SSH keys that give access to all user's repositories). The same recommendations apply to deploy keys.
207 | Module *deploy_keys* checks for expired and newly added deploy keys.
208 |
209 | Required permissions:
210 |
211 | * repo - to get the list of deploy keys
212 |
213 | Configuration parameters:
214 | * `input.deploy_keys.expiration` - [years, months, days] for the deploy keys expiration
215 | * `input.deploy_keys.keys` - list of deploy keys that are registered for the repository
216 |
217 | Rule modules:
218 |
219 | * `data.github.deploy_keys.eval.state.expired` returns the list of deploy keys that are older than configured by the expiration parameter.
220 | * `data.github.deploy_keys.eval.state.keys` returns the list of deploy keys that were not previously registered for organization repositories. All the approved new keys should be added to the configuration state.
221 |
222 | ## Commits
223 | Commit signatures can serve as an additional protection mechanism against compromised developer's accounts. Even when the password or an SSH key is leaked, the commit signing key will not necessarily be leaked and requiring signatures would prevent an attacker from authoring commits on behalf of a compromised developer's account. See branches section for documentation on enabling signatures enforcement per branch.
224 | Module *commits* checks for commit signatures in specified repositories and for the history of commits to detect anomalies.
225 |
226 | Required permissions:
227 |
228 | * repo - to get the list of commits
229 |
230 | Configuration parameters:
231 |
232 | * `input.commits..allow_unverified` - list of user accounts per repository that are allowed to commit without signing
233 | * `input.commits..history` - list of the last 30 commits in the repository
234 |
235 | Rule modules:
236 |
237 | * `data.github.commits.eval.state.unverified` returns the list of commits that are either not signed or for which the signature verification failed.
238 | It does not include the commits by authors listed in `allow_unverified`. To approve the new state, the authors of unverified commits should be added to the configuration state.
239 | * `data.github.commits.eval.state.history` returns the list of commits in the repository that are not included in the input configuration state.
240 |
241 | # Permission modules
242 |
243 | ## Admins
244 | Organization administrators have full control over the organization configuration and its repositories. The list of administrator users should be kept up-to-date.
245 | Module *admins* monitor the list of admin users.
246 |
247 | Required permissions:
248 |
249 | * read:org - to get the list of admins in the organization
250 |
251 | Configuration parameters:
252 |
253 | * `input.admins.members` - current set of admin users
254 |
255 | Rule module:
256 |
257 | * `data.github.admins.eval.state.members` returns the list of admin users in each organization that were not included in the input list of admin users.
258 | If the new state is approved, they should be added to the configuration state.
259 |
260 | ## Branches
261 | Branch protection is a set of configuration options to authorize commits that can be pushed to a branch. For more information, refer to SCM documentation.
262 | Module *branches* monitor the branch protection configuration for a repository.
263 |
264 | Required permissions:
265 |
266 | * repo - to get the branch protection configuration in repositories
267 |
268 | Configuration parameters:
269 |
270 | * `input.branches.unprotected` - branches for which the branch protection is turned off
271 | * `input.branches.protection_data` - current configuration of branch protection
272 |
273 | Rule modules:
274 |
275 | * `data.github.branches.eval.state.unprotected` returns the list of unprotected branches not included in the input configuration.
276 | * `data.github.branches.eval.state.protection_data` returns the protection configuration that is different from the input protection data.
277 | If the new branch protection configuration is approved, the unprotected branches and the protection configuration should be added to the input.
278 |
279 | ## Teams
280 | Teams configuration is a convenient mechanism to organize users into groups and set permissions on a per team basis.
281 | Module *teams* monitor the teams members and the permissions of teams in repositories.
282 |
283 | Required permissions:
284 |
285 | * read:org - to get the list of teams in an organization
286 | * repo - to get the information about repositories
287 |
288 | Configuration parameters:
289 |
290 | * `input.teams.permissions` - current permissions of teams in repositories
291 | * `input.teams.members` - current teams members
292 |
293 | Rule modules:
294 |
295 | * `data.github.teams.eval.state.changed_permissions` returns the *newly added* permissions of teams in repositories.
296 | * `data.github.teams.eval.state.permissions` returns the permissions of teams in repositories for which no previous state was configured.
297 | * `data.github.teams.eval.state.members` returns the lists of team members that are not included in the input data.
298 | If the new state is approved, the teams permissions state should be updated.
299 |
300 | ## Files
301 | Sometimes it is necessary to configure more fine-grained permissions for the files in the repository. For example, access to CI/CD configuration files should be limited to DevOps developers. While teams module checks for the current settings, the repository history can be monitored for suspicious activity.
302 | Module *files* monitors modifications of individual files in the repository.
303 |
304 | Required permissions:
305 |
306 | * read:repo - to get the repository commits
307 |
308 | Configuration parameters:
309 |
310 | * `input.files.permissions` - permissions to modify individual files.
311 | Committers from the list per file are allowed to push commits that modify the file.
312 |
313 | Rule modules:
314 |
315 | * `data.github.admins.eval.state.violating_commits` returns the list of commits that violate the
316 | restrictions from the state. Updating the state requires adding the committers of the
317 | violating commits to the allowed list.
318 |
319 | # Isolation modules
320 |
321 | ## Hooks
322 | Web hooks notify external parties about events in the repository. This can potentially leak sensitive information.
323 | Module *hooks* monitors the list of configured Web hooks.
324 |
325 | Required permissions:
326 |
327 | * read:repo_hook - to get the list of Web hooks in repositories
328 |
329 | Configuration parameters:
330 |
331 | * `input.hooks` - the list of configured Web hooks
332 |
333 | Rule module:
334 |
335 | * `data.github.hooks.eval.state.hooks` returns the list of new/changed Web hooks.
336 | If the new state is approved, they should be added to the configuration state.
337 |
338 | # Contribute
339 |
340 | Information describing how to contribute can be found **[here](https://github.com/scribe-public/gitgat/blob/master/CONTRIBUTING.md)**.
341 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Github-Posture Security Policy
2 |
3 | This document outlines our security policy.
4 |
5 | ## Vulenrability Reporting
6 |
7 | * Vulnerabilities should be reported as issues in the repo.
8 |
--------------------------------------------------------------------------------
/bin/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | tmpfile=$(mktemp /tmp/opa-input.XXXXXX)
3 | exec 3>"$tmpfile"
4 | exec 4<"$tmpfile"
5 | rm "$tmpfile"
6 |
7 | cat /var/opt/opa/input.json | sed "s/GH_TOKEN/$GH_TOKEN/" >&3
8 |
9 | exec /opt/opa/opa eval -I -b github $@ <&4
10 |
--------------------------------------------------------------------------------
/data/empty-input.json:
--------------------------------------------------------------------------------
1 | {
2 | "token": "token GH_TOKEN"
3 | }
4 |
--------------------------------------------------------------------------------
/data/sample_input.json:
--------------------------------------------------------------------------------
1 | {
2 | "token": "token GH_TOKEN",
3 | "organizations": [ "orgs/org_id" ],
4 | "tfa": {
5 | "disabled_members": {
6 | "test_org": ["test_user"]
7 | },
8 | "unenforced_orgs": [ "org" ]
9 | },
10 | "admins": {
11 | "members": {
12 | "": [""]
13 | }
14 | },
15 | "commits": {
16 | "owner/repo": {
17 | "allow_unverified": ["user"],
18 | "history": [""]
19 | }
20 | },
21 | "deploy_keys": {
22 | "expiration": [1, 0, 0],
23 | "keys": [""]
24 | },
25 | "ssh_keys": {
26 | "expiration": [1, 0, 0],
27 | "keys": [""]
28 | },
29 | "files": {
30 | "permissions": {
31 | "owner/repo": {
32 | "file pattern": [ "commiter" ]
33 | }
34 | }
35 | },
36 | "hooks": {
37 | "org/repo": [ {
38 | "active": true,
39 | "created_at": "2022-01-01T00:00:00Z",
40 | "events": [
41 | "label"
42 | ],
43 | "id": 1,
44 | "name": "test",
45 | "config": {
46 | "content_type": "json",
47 | "insecure_ssl": "0",
48 | "url": "https://example.com"
49 | },
50 | "updated_at": "2022-01-01T00:01:00Z",
51 | "type": "Repository"
52 | } ]
53 | },
54 | "teams": {
55 | "permissions": {
56 | "org": {
57 | "team": {
58 | "owner/repo": {
59 | "admin": true,
60 | "maintain": true,
61 | "pull": true,
62 | "push": true,
63 | "triage": true
64 | }
65 | }
66 | }
67 | },
68 | "members": {
69 | "org/team": [ "member" ]
70 | }
71 | },
72 | "branches": {
73 | "protection_data": {
74 | "owner/repo/branches/main": {
75 | "allow_deletions": {
76 | "enabled": false
77 | },
78 | "allow_force_pushes": {
79 | "enabled": false
80 | },
81 | "block_creations": {
82 | "enabled": false
83 | },
84 | "enforce_admins": {
85 | "enabled": false,
86 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/enforce_admins"
87 | },
88 | "required_conversation_resolution": {
89 | "enabled": false
90 | },
91 | "required_linear_history": {
92 | "enabled": true
93 | },
94 | "required_pull_request_reviews": {
95 | "dismiss_stale_reviews": false,
96 | "require_code_owner_reviews": false,
97 | "required_approving_review_count": 1,
98 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_pull_request_reviews"
99 | },
100 | "required_signatures": {
101 | "enabled": true,
102 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_signatures"
103 | },
104 | "required_status_checks": {
105 | "checks": [],
106 | "contexts": [],
107 | "contexts_url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks/contexts",
108 | "strict": true,
109 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks"
110 | },
111 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection"
112 | }
113 | }
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/doc/rules.md:
--------------------------------------------------------------------------------
1 |
2 | | Rule \ Account | Personal | Organizational | Enterprise |
3 | |-------------------|-------------------------------|------------------------------|---------------------|
4 | | 2fa | For orgs where user is admin | For org admin | For org admins |
5 | | Admins | For orgs user belongs to | Supported | Supported |
6 | | SSH keys | For user own keys | Metadata not available | Possible to support |
7 | | Deploy keys | For repos where user is admin | For repos where org is owner | Supported |
8 | | Hooks | Supported | Supported | Supported |
9 | | Teams | For orgs wher user is admin | Supported | Supported |
10 | | Files | Supported | Supported | Supported |
11 | | Commits | Supported | Supported | Supported |
12 | | Branch protection | For repo admin | For repo admin | For repo admin |
13 | |-------------------|-------------------------------|------------------------------|---------------------|
14 | | Audit log | Not supported | Not supported | Supported |
15 | | Secret scanning | Not part of GitHub API | | |
16 | | Dependabot | | | |
17 |
--------------------------------------------------------------------------------
/doc/threat-matrix.md:
--------------------------------------------------------------------------------
1 | # Initial Access
2 |
3 |
4 |
5 | Techniques
6 | |
7 | Description
8 | |
9 | Mitigation
10 | |
11 | GitHub Posture comments
12 | |
13 | Remaining Threat
14 | |
15 |
16 |
17 | Supply Chain Compromise on CI/CD
18 | |
19 | Supply Chain Attacks to Application Library, Tools, Container Images in CI/CD Pipelines.
20 | |
21 |
22 |
23 |
24 | - (CI, CD) Limit egress connection via Proxy or IP Restriction
25 |
26 |
- (CI, CD) Audit Logging of the activities
27 |
28 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
29 |
30 |
- (CI, CD) Check each tool’s Integrity
31 |
32 |
- (CI, CD) Doesn’t allow untrusted libraries, tools
33 |
34 |
35 | |
36 |
37 | NA to GitHub repository
38 | |
39 |
40 | Suppy Chain Compromise on CI/CD is out of scope of repository security
41 | |
42 |
43 |
44 | Valid Account of Git Repository
45 |
46 | (Personal Token, SSH key, Login password, Browser Cookie)
47 | |
48 | Use developer’s credentials to access to Git Repository Service
49 | (Personal token, SSH key, browser cookie, or login password is stolen)
50 | |
51 |
52 |
53 |
54 | - (Device) Device security is out of scope
55 |
56 |
- (Git Repository) Network Restriction
57 |
58 |
- (Git Repository) Limit access permission of each developer
(e.g. no write permission, limited read permission)
59 |
60 | - (CI, CD) Use GitHub App and enable IP restriction
61 |
62 |
63 | |
64 |
65 | 2 factor authentication should be on (on user/org/enterprise accounts, read:org and should be org admin to use the 2fa filter).
66 | Permissions of developers should be minimal (teams rule needs repo and read:org authorizations, user account with org admin can get the list of admins in the org).
67 | SSH keys should be rotated (user account only, read:public_key authorization).
68 | Audit log analysis (Coming soon) (only enterprise account over API).
69 | |
70 |
71 | Access tokens usage cannot be tracked.
72 | In regular organization accounts, only limited info about users is available to the admin.
73 | |
74 |
75 |
76 | Valid Account of CI/CD Service
77 |
78 | (Personal Token, Login password, Browser Cookie)
79 | |
80 | Use SSH key or Tokens to access to CI/CD Service Servers directly
81 | |
82 |
83 |
84 |
85 | - (CI, CD) Strict access control to CI/CD pipeline servers
86 |
87 |
- (CI, CD) Hardening CI/CD pipeline servers
88 |
89 | - (New) (Git Repository) Prevent CI\CD credential leakage from source control
90 |
91 |
92 | |
93 |
94 | Secret scanning (secrets towards CI/CD Service) Coming soon
95 | |
96 |
97 | Mostly not related to repository security. If GitHub Actions are used as CI/CD then above access controls can be applied.
98 | |
99 |
100 |
101 | Valid Admin account of Server hosting Git Repository
102 | |
103 | Use SSH key, Tokens to access to Server hosting Git Repository
104 | |
105 |
106 |
107 |
108 | - (Git Repository) Strict access control to server hosting Git Repository
109 |
110 |
- (Git Repository) Hardening git repository servers
111 |
112 |
113 | |
114 |
115 | Not applicable to GitHub
116 | |
117 |
118 | If GitHub is compromised we are out of luck
119 | |
120 |
121 |
122 |
123 |
124 | # Execution
125 |
126 |
127 |
128 | Techniques
129 | |
130 | Description
131 | |
132 | Mitigation
133 | |
134 | GitHub Posture comments
135 | |
136 | Remaining Threat
137 | |
138 |
139 |
140 | Modify CI/CD Configuration
141 | |
142 | Modify CI/CD Configuration on Git Repository
143 |
144 | (CircleCI: .circleci/config.yml, CodeBuild: buildspec.yml, CloudBuild: cloudbuild.yaml, GitHub Actions: .github/workflows/*.yaml)
145 | |
146 |
147 |
148 |
149 | - (Git Repository) Only allow pushing of signed commits
150 |
151 |
- (CI, CD) Disallow CI/CD config modification without review (CI/CD must not follow changes of a branch without review)
152 |
153 |
- (CI, CD) Add signature to CI/CD config and verify it
154 |
155 |
- (New) (Git Repository) Limit editing permissions to CI/CD configurations
156 |
157 |
- (CI, CD) Limit egress connections via Proxy and IP restrictions
158 |
159 |
- (CI, CD) Audit Logging of activities
160 |
161 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
162 |
163 |
164 | |
165 |
166 | Signed commits (any account with repo authorization).
167 | Requiring review (Coming soon - not implemented yet branch protection rule).
168 | Files rule limits who can edit CI/CD config files (any account with repo authorization, requires addition of standard regular expressions for CI/CD files).
169 | |
170 |
171 | Files and commits are reactive rules, they check
172 | the history of the repository. Note: all our rules are reactive
173 | |
174 |
175 |
176 | Inject code to IaC configuration
177 | |
178 | For example, Terraform allows code execution and file inclusion. The code is executed during CI(plan stage)
179 |
180 | Code Execution: Provider installation(put provider binary with .tf), Use External provider
181 | File inclusion: file Function
182 | |
183 |
184 |
185 |
186 | - (Git Repository) Only allow pushing of signed commits
187 |
188 |
- (New) (Git Repository) Limit editing permissions to CI/CD configurations
189 |
190 |
- (New) (Git Repository) Disallow CI/CD config modification without review (CI/CD must not follow changes of a branch without review)
191 |
192 |
- (CI, CD) Restrict dangerous code through Policy as Code
193 |
194 |
- (CI, CD) Restrict untrusted providers
195 |
196 |
- (CI, CD) Limit egress connections via Proxy and IP restrictions
197 |
198 |
- (CI, CD) Audit Logging of activities
199 |
200 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
201 |
202 |
203 | |
204 |
205 | Signed commits.
206 | Files rule to limit who can edit config.
207 | Requiring review.
208 | |
209 |
210 | Our rules are reactive - code can be executed already by the time commits and files check are done.
211 | |
212 |
213 |
214 | Inject code to source code
215 | |
216 | Application executes test code during CI
217 | |
218 |
219 |
220 |
221 | - (New) (Git Repository) Limit editing permissions to source files
222 |
223 |
- (New) (Git Repository) Disallow CI/CD config modification without review
224 |
225 |
- (CI, CD) Restrict dangerous code through Policy as Code
226 |
227 |
- (CI, CD) Limit egress connections via Proxy and IP restrictions
228 |
229 |
- (CI, CD) Audit Logging of the activities
230 |
231 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
232 |
233 |
234 | |
235 |
236 | Requiring reviews. (Coming soon)
237 | |
238 |
239 | Can anything else be done here from repository perspective?
240 | Permissions do let developers to modify source code.
241 | Files will not cover all of the source code files.
242 | |
243 |
244 |
245 | Supply Chain Compromise on CI/CD
246 | |
247 | (Repeated)
248 | |
249 |
250 | |
251 |
252 |
253 | Inject bad dependency
254 | |
255 | Inject bad dependency
256 | |
257 |
258 |
259 | - (New) (Git Repository) Limit editing permissions to source files
260 |
- (CI, CD) Code checks by SCA(Software composition analysis)
261 |
262 |
- (CI, CD) Restrict untrusted libraries, and tools
263 |
264 |
- (CI, CD) Limit egress connections via Proxy and IP restrictions
265 |
266 |
- (CI, CD) Audit Logging of activities
267 |
268 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
269 |
270 |
271 | |
272 |
273 | Files rule to make sure only limited number of developers can modify
274 | project configuration that specifies dependencies. For example, package.json
275 | specifies dependencies for NodeJS app, so make sure only trusted developers
276 | can modify it.
277 | Dependency rule (TBD Coming soon: based on dependabot data)
278 | |
279 |
280 | The dependency can be injected with the version that is configured in the repo already.
281 | For example, if an attacker can see package.json file, he does not need to modify it directly but provide a version of the dependency in the upstream that will be pulled by CI/CD (dependency confusion attack). True, but I think it is more complicated to generate an such a dependency (would it be a new version? how would it be pushed to npm\pypi?)
282 | |
283 |
284 |
285 | SSH to CI/CD pipelines
286 | |
287 | Connect to CI/CD pipeline servers via SSH or Valid Token
288 | |
289 |
290 |
291 |
292 | - (CI, CD) Implement strict access control to CI/CD pipeline servers
293 |
294 |
- (CI, CD) Disallow SSH access
295 |
296 |
297 | |
298 |
299 | Not applicable to GitHub repository security
300 | |
301 |
302 | CI/CD pipeline access is ouf of scope
303 | |
304 |
305 |
306 |
307 | # Execution (Production)
308 |
309 |
310 |
311 |
312 | Techniques
313 | |
314 | Description
315 | |
316 | Mitigation
317 | |
318 | GitHub Posture comments
319 | |
320 | Remaining Threat
321 | |
322 |
323 |
324 | Modify the configuration of Production environment
325 | |
326 | Modify the configuration of Production environment via stolen credentials
327 | |
328 |
329 |
330 | - (New) (Git Repository) Limit editing permissions to source files (Reconsider if this is the right place)
331 |
- (Secret Manager) Rotate credentials regularly or issue temporary tokens only
332 |
333 |
- (Production environment) Network Restriction to Cloud API
334 |
335 |
- (Production environment) Enable Audit Logging
336 |
337 |
- (Production environment) Security Monitoring of data access
338 |
339 |
- (Production environment) Enforce principle of least privilege to issued credentials
340 |
341 |
- (Production environment) Rate limiting
342 |
343 |
344 | |
345 |
346 | See above: modify CI/CD configuration:
347 | If configuration of Production environment is stored in the repository,
348 | files and commits rule can help preventing its modification.
349 | Limit effect of stolen credentials by enforcing 2fa
350 | |
351 |
352 | Mostly out of scope of repository security though.
353 | |
354 |
355 |
356 | Deploy modified applications or server images to production environment
357 | |
358 | Deploy modified applications or server images (e.g. container image, function, VM image) to production environment via stolen credentials
359 | |
360 |
361 |
362 |
363 | - (Secret Manager) Rotate credentials regularly or issue temporary tokens only
364 |
365 |
- (Git Repository) Require multi-party approval(peer review)
366 |
367 |
- (Production environment) Verify signature of artifacts
368 |
369 |
- (Production environment) Network Restriction to Cloud API
370 |
371 |
- (Production environment) Enable Audit Logging
372 |
373 |
- (Production environment) Security Monitoring of deployment
374 |
375 |
- (Production environment) Enforce principle of least privilege to issued credentials
376 |
377 |
- (Production environment) Rate limiting
378 |
379 |
380 | |
381 |
382 | Requiring review. (Coming soon) File Rule: Who uploads and when.
383 | |
384 |
385 | Not implemented. Check what GitHub is doing about artifacts.
386 | |
387 |
388 |
389 |
390 |
391 | # Persistence
392 |
393 |
394 |
395 | Techniques
396 | |
397 | Description
398 | |
399 | Mitigation
400 | |
401 | GitHub Posture comments
402 | |
403 | Remaining Threat
404 | |
405 |
406 |
407 | Compromise CI/CD Server
408 | |
409 | Compromise CI/CD Server from pipeline
410 | |
411 |
412 |
413 |
414 | - (CI, CD) Clean environment created on every pipeline run
415 |
416 |
417 | |
418 |
419 | Out of scope of repository security.
420 | |
421 |
422 | Out of scope.
423 | CI/CD Server configuration is not stored in the repo.
424 | |
425 |
426 |
427 | Implant CI/CD runner images
428 | |
429 | Implant container images for CI/CD with malicious code to establish persistence
430 | |
431 |
432 |
433 |
434 | - Use signed/trusted CI runners only
435 |
436 |
- Implement strict access controls to container registry
437 |
438 |
- (CI, CD) Audit Logging of activities
439 |
440 |
441 | |
442 |
443 | Out of scope
444 | |
445 |
446 | Out of scope
447 | Images are out of scope of repository security.
448 | |
449 |
450 |
451 | (Modify CI/CD Configuration)
452 | |
453 | (Repeated)
454 | |
455 |
456 | |
457 |
458 | |
459 |
460 | |
461 |
462 |
463 | (Inject code to IaC configuration)
464 | |
465 | (Repeated)
466 | |
467 |
468 | |
469 |
470 | |
471 |
472 | |
473 |
474 |
475 | (Inject code to source code)
476 | |
477 | (Repeated)
478 | |
479 |
480 | |
481 |
482 | |
483 |
484 | |
485 |
486 |
487 | (Inject bad dependency)
488 | |
489 | (Repeated)
490 | |
491 |
492 | |
493 |
494 | |
495 |
496 | |
497 |
498 |
499 |
500 |
501 | # Privilege Escalation
502 |
503 |
504 |
505 | Techniques
506 | |
507 | Description
508 | |
509 | Mitigation
510 | |
511 | GitHub Posture comments
512 | |
513 | Remaining Threat
514 | |
515 |
516 |
517 | Get credential for Deployment(CD) on CI stage
518 | |
519 | Get high privilege credential in CI stage (not CD)
520 | |
521 |
522 |
523 | - (New) (Git Repository) Prevent CI\CD credential leakage from source control
524 |
525 |
- (CI, CD) Limit the scope of credentials in each step.
526 |
527 |
- (CI) Always enforce Least Privilege. CI(not CD) must not have credentials for deployment
528 |
529 |
- (CI, CD) Use different Identities between CI and CD
530 |
531 |
- (CI, CD) Maintain strong isolation between CI and CD
532 |
533 |
534 | |
535 |
536 | Repository secret scanning.(Coming soon)
537 | |
538 |
539 | Mostly Out of scope. CD must be separated from CI, but except secret scanning impossible to check from repository security perspective if it is.
540 | |
541 |
542 |
543 | Privileged Escalation and compromise other CI/CD pipeline
544 | |
545 | Privilege Escalation from CI/CD Environment to other components
546 | |
547 |
548 |
549 |
550 | - (CI, CD) Hardening of CI/CD pipeline servers
551 |
552 |
- (CI, CD) Isolate CI/CD pipeline from other systems.
553 |
554 |
555 | |
556 |
557 | Out of scope.
558 | |
559 |
560 | |
561 |
562 |
563 |
564 |
565 | # Defense Evasion
566 |
567 |
568 |
569 | Techniques
570 | |
571 | Description
572 | |
573 | Mitigation
574 | |
575 | GitHub Posture comments
576 | |
577 | Remaining Threat
578 | |
579 |
580 |
581 | Add Approver using Admin permission
582 | |
583 | Change Approver using Git Repository Service Admin permission
584 | |
585 |
586 |
587 |
588 | - (Git Repository) Limit admin users
589 |
590 |
- (Git Repository) Require multi-party approval(peer review)
591 |
592 |
593 | |
594 |
595 | Admins and teams rules to limit admin users and permissions.
596 | Review rule (branch protection, coming soon)
597 | |
598 |
599 | Our rules are reactive: An attacker with admin priviledges can add reviewers, make changes and return the original settings. But we will be able to catch this through the GitHub app given there are events triggered.
600 | |
601 |
602 |
603 | Bypass Review
604 | |
605 | Bypass Peer Review of Git Repository
606 | |
607 |
608 |
609 |
610 | - (Git Repository) Restrict repository admin from pushing to main branch without a review
611 |
612 |
- (CD) Require additional approval from reviewer to kick CD
613 |
614 |
615 | |
616 |
617 | Branch protection.
618 | Requiring review. (Both coming soon)
619 | |
620 |
621 | |
622 |
623 |
624 | Access to Secret Manager from CI/CD kicked by different repository
625 | |
626 | Use a CI/CD system in a different repository to leverage stolen credentials to access secret manager
627 | |
628 |
629 |
630 |
631 | - (Secret Manager) Restrict and separate access from different workloads
632 |
633 |
634 | |
635 |
636 | Out of scope of repository security
637 | |
638 |
639 | |
640 |
641 |
642 | Modify Caches of CI/CD
643 | |
644 | Implant bad code to caches of CI/CD pipeline
645 | |
646 |
647 |
648 |
649 | - (CI, CD) Clean environment on every pipeline run
650 |
651 |
652 | |
653 |
654 | Out of scope of repository security
655 | |
656 |
657 | |
658 |
659 |
660 | Implant CI/CD runner images
661 | |
662 | (Repeated)
663 | |
664 |
665 | |
666 |
667 | |
668 |
669 | |
670 |
671 |
672 |
673 |
674 | # Credential Access
675 |
676 |
677 |
678 | Techniques
679 | |
680 | Description
681 | |
682 | Mitigation
683 | |
684 | GitHub Posture comments
685 | |
686 | Remaining Threat
687 | |
688 |
689 |
690 | Dumping Env Variables in CI/CD
691 | |
692 | Dump Environment Variables in CI/CD
693 | |
694 |
695 |
696 |
697 | - (CI, CD) Don’t use environment variables for storing credentials
698 |
699 |
- (Secret Manager) Use secret manager which has network restriction
700 |
701 |
- (Secret Manager) Enable Audit Logging
702 |
703 |
- (Secret Manager) Security Monitoring to detect malicious activity
704 |
705 |
- (Secret Manager) Rotate credentials regularly or issue temporary tokens only
706 |
707 |
- (CI, CD) Enable Audit Logging
708 |
709 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
710 |
711 |
712 | |
713 |
714 | I think all of this is out of scope.
715 | Secret scanning for credentials.
716 | |
717 |
718 | Mostly out of scope.
719 | |
720 |
721 |
722 | Access to Cloud Metadata
723 | |
724 | Access to Cloud Metadata to get access token of Cloud resources
725 | |
726 |
727 |
728 |
729 | - (CI, CD) Restrict metadata access from suspicious processes
730 |
731 |
- (Secret Manager) Use secret manager which has network restriction
732 |
733 |
- (Secret Manager) Enable Audit Logging
734 |
735 |
- (Secret Manager) Security Monitoring to detect malicious activity
736 |
737 |
- (Secret Manager) Rotate credentials regularly or issue temporary tokens only
738 |
739 |
- (CI, CD) Enable Audit Logging
740 |
741 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
742 |
743 |
744 | |
745 |
746 | I think all of this is out of scope.
747 | Secret scanning.
748 | |
749 |
750 | |
751 |
752 |
753 | Read credentials file
754 | |
755 | Read credentials file mounted in CI/CD pipeline
756 | |
757 |
758 |
759 |
760 | - (CI, CD) Disable or mask contents of files in results of CI/CD
761 |
762 |
- (Secret Manager) Use secret manager which has network restriction
763 |
764 |
- (Secret Manager) Enable Audit Logging
765 |
766 |
- (Secret Manager) Security Monitoring to detect malicious activity
767 |
768 |
- (Secret Manager) Rotate credentials regularly or issue temporary tokens only
769 |
770 |
- (CI, CD) Enable Audit Logging
771 |
772 |
- (CI, CD) Security Monitoring using IDS/IPS, and EDR
773 |
774 |
775 | |
776 |
777 | Out of scope of repository security
778 | |
779 |
780 | Credentials mounted on CI/CD pipeline are not represented in the repository
781 | |
782 |
783 |
784 | Get credential from CI/CD Admin Console
785 | |
786 | See credential from CI/CD admin console
787 | |
788 |
789 |
790 |
791 | - (CI, CD) Doesn’t use CI/CD services that expose credentials from the system console
792 |
793 |
794 | |
795 |
796 | Out of scope of repository security
797 | |
798 |
799 | |
800 |
801 |
802 |
803 |
804 | # Lateral Movement
805 |
806 |
807 |
808 | Techniques
809 | |
810 | Description
811 | |
812 | Mitigation
813 | |
814 | GitHub Posture comments
815 | |
816 | Remaining Threat
817 | |
818 |
819 |
820 | Exploitation of Remote Services
821 | |
822 | Exploit services from CI/CD Pipeline
823 | |
824 |
825 |
826 |
827 | - (CI, CD) Isolate CI/CD pipeline systems from other services
828 |
829 |
830 | |
831 |
832 | Potentially: secret scanning for credentials towards other services
833 | |
834 |
835 | Isolation is out of scope of repository security
836 | |
837 |
838 |
839 | (Monorepo) Get credential of different folder's context
840 | |
841 | In monorepo architecture of Git Repository, there are many approvers.
842 |
843 | Need to set access controls carefully
844 | |
845 |
846 |
847 |
848 | - (Git Repository) Set approver for each folder
849 |
850 |
- (CI, CD, Secret Manager) Avoid sharing CI/CD environment and credentials between different folders.
851 |
852 |
- (CI, CD) should be isolated by environment folder or context
853 |
854 |
855 | |
856 |
857 | TODO Need to investigate monorepo on GitHub
858 | |
859 |
860 | |
861 |
862 |
863 | Privileged Escalation and compromise other CI/CD pipeline
864 |
865 | (Repeated)
866 | |
867 |
868 | |
869 |
870 | |
871 |
872 | |
873 |
874 | |
875 |
876 |
877 |
878 |
879 | # Exfiltration
880 |
881 |
882 |
883 | Techniques
884 | |
885 | Description
886 | |
887 | Mitigation
888 | |
889 | GitHub Posture comments
890 | |
891 | Remaining Threat
892 | |
893 |
894 |
895 | Exfiltrate data in Production environment
896 | |
897 | Exfiltrate data in Production environment via stolen credentials
898 | |
899 |
900 |
901 |
902 | - (CI/CD) Doesn’t put data access credential in CI/CD
903 |
904 |
- (Production environment) Network Restriction to Cloud API
905 |
906 |
- (Production environment) Enable Audit Logging
907 |
908 |
- (Production environment) Security Monitoring of data access
909 |
910 |
- (Production environment) Enforce principle of least privilege to issued credentials
911 |
912 |
- (Production environment) Rate limiting
913 |
914 |
915 | |
916 |
917 | Secret scanning lets discuss: scanning can prevent sensitive data existance, not exfiltration
918 | |
919 |
920 | Mostly out of scope
921 | |
922 |
923 |
924 | Clone Git Repositories
925 | |
926 | Exfiltrate data from Git Repositories
927 | |
928 |
929 |
930 |
931 | - (Git Repository) Network Restriction
932 |
933 |
- (Git Repository) Use temporary tokens instead of long life static tokens
934 |
935 |
- (Git Repository) Limit access permission of each developer (e.g. no write permission, limited read permission)
936 |
937 |
- (New) (Git Repository) Limit permission to make public/private
938 |
939 |
- (Git Repository) Enable Audit Logging
940 |
941 |
- (Git Repository) Security Monitoring of data access
942 |
943 |
- (Git Repository) Rate limiting
944 |
945 |
946 | |
947 |
948 | Permissions, hooks (can leak information about events in the repository), deploy keys.
949 | Audit logging (coming soon)
950 | |
951 |
952 | Token usage cannot be tracked.
953 | |
954 |
955 |
956 |
957 |
958 | # Impact
959 |
960 |
961 |
962 | Techniques
963 | |
964 | Description
965 | |
966 | Mitigation
967 | |
968 | GitHub Posture comments
969 | |
970 | Remaining Threat
971 | |
972 |
973 |
974 | Denial of Services
975 | |
976 | Denial of Services of CI/CD pipeline
977 | |
978 |
979 |
980 |
981 | - (CI, CD) Scalable Infrastructure
982 |
983 |
984 | |
985 |
986 | Out of scope
987 | |
988 |
989 | DoS cannot be mitigated via GitHub repository security monitoring
990 | |
991 |
992 |
993 |
--------------------------------------------------------------------------------
/github/admins.rego:
--------------------------------------------------------------------------------
1 | package github.admins
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 | import data.github.gh_utils as gh_utils
6 |
7 | admin_filter := "role=admin"
8 | orgs := data.github.orgs.orgs
9 | responses[x] = gh_utils.get_nested_data(orgs[x], "members_url", "{/member}", admin_filter, {})
10 | current_admins[x] = utils.flatten_array(responses[x], "login")
11 | admin_members[x] = utils.array_subtraction(current_admins[x], data.github.state.admins.members[x])
12 | admin_members[x] = current_admins[x] {
13 | not utils.exists(data.github.state.admins.members, x)
14 | }
15 |
16 | eval = v {
17 | merged_responses := utils.merge(responses, data.github.orgs.responses)
18 | v := { "state": {"members": admin_members},
19 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v)},
20 | }
21 | }
22 |
23 | non_empty_admin_members[x] = admin_members[x] {
24 | count(admin_members[x]) > 0
25 | }
26 |
27 | more_or_less_than_two_admin_members[x] = admin_members[x] {
28 | count(admin_members[x]) > 2
29 | }
30 |
31 | more_or_less_than_two_admin_members[x] = admin_members[x] {
32 | count(admin_members[x]) == 1
33 | }
34 |
35 | members_findings = v {
36 | v := { x: count(v) | some x, v in more_or_less_than_two_admin_members }
37 | }
38 |
39 | overview_section := concat("\n", [
40 | "Admin permissions allow full control over your organization.",
41 | "Excessive admin permissions may be exploited, intentionally or unintentionally.",
42 | "Limiting permissions will limit the potential damage of credential theft, account-takeover or developer-workstation-takeover.",
43 | ])
44 |
45 | recommendation_section := concat("\n", [
46 | "Review the permissions and limit the number of users with admin permissions, to the minimum required.",
47 | ])
48 |
49 | module_title = "## Admin Permissions"
50 | overview_report := concat("\n", [
51 | module_title,
52 | "### Motivation",
53 | overview_section,
54 | "",
55 |
56 | "### Key Findings",
57 | "The following organizations do not have 2 admin members:",
58 | findings,
59 | "",
60 | "See [below](#admin-permissions-1) for a detailed report.",
61 | "",
62 |
63 | "### Our Recommendation",
64 | recommendation_section,
65 | "You can limit the administrative permissions of members at the following links:",
66 | "",
67 | "Click to expand
",
68 | "",
69 | utils.json_to_md_list(settings_urls, " "),
70 | " ",
71 | "",
72 | ])
73 |
74 | findings = v {
75 | count(members_findings) > 0
76 | v := concat("\n", [utils.json_to_md_dict(members_findings, ":", " ")])
77 | }
78 |
79 | findings = "None" {
80 | count(members_findings) == 0
81 | }
82 |
83 | settings_urls := { v |
84 | some k, _ in members_findings
85 | v := sprintf("<%s>", [concat("/", ["https://github.com/organizations", k, "settings", "member_privileges"])])
86 | }
87 |
88 | detailed_report := concat("\n", [
89 | module_title,
90 | overview_section,
91 | recommendation_section,
92 | "",
93 | "Go [back](#admin-permissions) to the overview report.",
94 | "",
95 |
96 | "",
97 | " Admin Members
",
98 | "",
99 | admin_details,
100 | " ",
101 | "",
102 | ])
103 |
104 | admin_details = v {
105 | count(non_empty_admin_members) == 0
106 | v := "None"
107 | }
108 |
109 | admin_details = v {
110 | count(non_empty_admin_members) > 0
111 | v := utils.json_to_md_dict_of_lists(non_empty_admin_members, " ")
112 | }
113 |
114 | update := v {
115 | v := { "known": current_admins, }
116 | }
117 |
118 | # state: empty, admin_members: admin_data -> update: admin_data
119 | # state: admin_data, admin_members: empty -> update:
120 | # if current_admins == admin_data -> update: current_admins
121 | # if current_admins < admin_data -> update: current_admins
122 | # state: admin1, admin_members: admin2 -> update:
123 | # if current_admins == admin1+admin2 -> update: current_admins
124 | # if current_admins == admin2 -> update: current_admins
125 |
--------------------------------------------------------------------------------
/github/api.rego:
--------------------------------------------------------------------------------
1 | package github.api
2 |
3 | import future.keywords.in
4 |
5 | call_github(url) = response {
6 | request_url := concat("/", ["https://api.github.com", url])
7 | response = call_github_abs(request_url)
8 | }
9 |
10 | call_github_abs(url) = response {
11 | request := {"method": "GET",
12 | "url": url,
13 | "headers": {
14 | "Authorization": input.token,
15 | "Accept": "application/vnd.github.v3+json; application/vnd.github.v3.repository+json"},
16 | "raise_error": false}
17 | response := http.send(request)
18 | }
19 |
20 | post_github(url, upload_data) = response {
21 | request_url := concat("/", ["https://api.github.com", url])
22 | response = post_github_abs(request_url, upload_data)
23 | }
24 |
25 | post_github_abs(url, upload_data) = response {
26 | request := {"method": "POST",
27 | "url": url,
28 | "headers": {
29 | "Authorization": input.token,
30 | "Accept": "application/vnd.github.v3+json",
31 | "Content-Type": "application/json"
32 | },
33 | "raise_error": false,
34 | "body": upload_data
35 | }
36 | response := http.send(request)
37 | }
38 |
39 | post_test(upload_data) = response {
40 | request := {"method": "POST",
41 | "url": "http://localhost:8282",
42 | "headers": {
43 | "Authorization": input.token,
44 | "Accept": "application/vnd.github.v3+json",
45 | "Content-Type": "application/json"
46 | },
47 | "raise_error": false,
48 | "raw_body": "{\"files\": {}}"
49 | }
50 | print(request)
51 | response := http.send(request)
52 | }
53 |
--------------------------------------------------------------------------------
/github/branches.rego:
--------------------------------------------------------------------------------
1 | package github.branches
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | # Get URLs
7 | branches_urls[name] = url {
8 | some r in data.github.repos.responses[x]
9 | name := r.full_name
10 | url := concat("/", ["repos", name, "branches"])
11 | }
12 | responses[x] = utils.parse(data.github.api.call_github(branches_urls[x]))
13 |
14 | successes[x] = responses[x] {
15 | not utils.is_error(responses[x])
16 | }
17 |
18 | branches[name] = branch {
19 | some x, response in successes
20 |
21 | some branch in response
22 | name := concat("/", [x, "branches", branch.name])
23 | }
24 |
25 | current_unprotected_branches[x] = branches[x] {
26 | not branches[x].protected
27 | }
28 |
29 | unprotected_branches := utils.array_subtraction(
30 | utils.keys(current_unprotected_branches), data.github.state.branches.unprotected_branches
31 | )
32 |
33 | protected_branches[x] = branches[x] {
34 | branches[x].protected
35 | }
36 |
37 | protection_responses[x] = utils.parse(data.github.api.call_github_abs(protected_branches[x].protection_url))
38 | protection_data[x] = protection_responses[x] {
39 | not utils.is_error(protection_responses[x])
40 | }
41 |
42 | filtered_protection_data[x] = v {
43 | d := protection_data[x]
44 | v := {
45 | "allow_deletions": d["allow_deletions"]["enabled"],
46 | "allow_force_pushes": d["allow_force_pushes"]["enabled"],
47 | "block_creations": d["block_creations"]["enabled"],
48 | "enforce_admins": d["enforce_admins"]["enabled"],
49 | "required_conversation_resolution": d["required_conversation_resolution"]["enabled"],
50 | "required_linear_history": d["required_linear_history"]["enabled"],
51 | "dismiss_stale_reviews": d["required_pull_request_reviews"]["dismiss_stale_reviews"],
52 | "require_code_owner_reviews": d["required_pull_request_reviews"]["require_code_owner_reviews"],
53 | "required_signatures": d["required_signatures"]["enabled"],
54 | }
55 | }
56 |
57 | unchanged_protection[x] = protection_data[x] {
58 | protection_data[x] == data.github.state.branches.protection_data[x]
59 | }
60 |
61 | protection_diff[x] = protection_data[x] {
62 | not protection_data[x] == data.github.state.branches.protection_data[x]
63 | }
64 |
65 | recommendation_diff[x] = v {
66 | not filtered_protection_data[x] == data.github.state.branches.recommended_protection
67 | v := filtered_protection_data[x]
68 | }
69 |
70 | protected_findings = v {
71 | count(unprotected_branches) > 1
72 | c_findings := "(i) %d branches lacking any protection rules."
73 | v := sprintf(c_findings, [count(unprotected_branches)])
74 | }
75 |
76 | protected_findings = v {
77 | count(unprotected_branches) == 1
78 | v := "(i) 1 branch lacking any protection rules."
79 | }
80 |
81 | protected_findings = v {
82 | count(unprotected_branches) == 0
83 | v := "(v) all branches are protected."
84 | }
85 |
86 | diff_findings = v {
87 | count(protected_branches) == 0
88 | v := "(i) no branches are protected."
89 | }
90 |
91 | diff_findings = v {
92 | count(protection_diff) == 1
93 | v := "(v) 1 branch is protected."
94 | }
95 |
96 | diff_findings = v {
97 | count(protection_diff) > 1
98 | c_findings := "(v) %d branches are protected."
99 | v := sprintf(c_findings, [count(protection_diff)])
100 | }
101 |
102 | eval = v {
103 | pre_merged_responses := utils.merge(responses, data.github.repos.responses)
104 | merged_responses := utils.merge(protection_responses, pre_merged_responses)
105 |
106 | v := { "state": { "unprotected_branches": unprotected_branches,
107 | "protection_diff": protection_diff },
108 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
109 | }
110 | }
111 |
112 | findings := concat("\n\n", [protected_findings, diff_findings])
113 |
114 | overview_section := concat("\n", [
115 | "Branch protection are specific protection mechanisms that limit users from making dangerous modifications of your repositories.",
116 | "Branch protection rules include requiring pull-request reviews, signed commits and limiting deleting history.",
117 | "GitHub Branch protection rules are detailed at the following link:",
118 | ".",
119 | "Branch protection is managed at the repository-branch level.",
120 | ])
121 |
122 | recommendation_section := concat("\n", [
123 | "You should configure branch protection for the main branches of your repositories.",
124 | "Branch protection rules for these branches should include requiring pull-request-reviews, signed commits, and not allowing deletions.",
125 | ])
126 |
127 | module_title := "## Branch Protection"
128 | overview_report := concat("\n", [
129 | module_title,
130 | "### Motivation",
131 | overview_section,
132 | "",
133 |
134 | "### Key Findings",
135 | findings,
136 | "",
137 | "See [below](#branch-protection-1) for a detailed report.",
138 | "",
139 |
140 | "### Our Recommendation",
141 | recommendation_section,
142 | "This can be done from the following links:",
143 | "",
144 | "Click to expand
",
145 | "",
146 | utils.json_to_md_list(settings_urls, " "),
147 | " ",
148 | "",
149 | ])
150 |
151 | settings_urls := { v |
152 | some k, r in data.github.repos.repos
153 | v := sprintf("<%s>", [concat("/", [r.html_url, "settings", "branches"])])
154 | }
155 |
156 | detailed_report := concat("\n", [
157 | module_title,
158 | overview_section,
159 | recommendation_section,
160 | "",
161 | "Go [back](#branch-protection) to the overview report.",
162 | "",
163 |
164 | "",
165 | " Branch Protection
",
166 | "",
167 | protection_details,
168 | " ",
169 | "",
170 |
171 | "",
172 | " Unprotected Branches
",
173 | "",
174 | unprotected_details,
175 | " ",
176 | "",
177 | ])
178 |
179 | create_table_row(k, v, r, e) = res {
180 | res := { "Setting": k, "Value": v, "Recommended": r, "Explanation": e }
181 | }
182 |
183 | explanations := {
184 | "allow_deletions": "",
185 | "allow_force_pushes": "",
186 | "block_creations": "",
187 | "enforce_admins": "",
188 | "required_conversation_resolution": "",
189 | "required_linear_history": "",
190 | "dismiss_stale_reviews": "",
191 | "require_code_owner_reviews": "",
192 | "required_signatures": "",
193 | }
194 |
195 | protection_table_data[x] := v {
196 | d := recommendation_diff[x]
197 | r := data.github.state.branches.recommended_protection
198 | v := [ row | some k, diff in d; row := create_table_row(k, d[k], r[k], explanations[k]) ]
199 | }
200 |
201 | format_table_row(row) = res {
202 | res := sprintf("| %v | %v | %v | %v |", [row["Setting"], row["Value"], row["Recommended"], row["Explanation"]])
203 | }
204 |
205 | table_header := "| Setting | Value | Recommended | Explanation |"
206 | delim := "| --- | --- | --- | --- |"
207 |
208 | format_table(table_data) = res {
209 | rows := [ format_table_row(x) | some x in table_data ]
210 | concated_rows := concat("\n", rows)
211 | res := concat("\n", [table_header, delim, concated_rows, ""])
212 | }
213 |
214 | unprotected_details = v {
215 | count(unprotected_branches) == 0
216 | v := "None"
217 | }
218 |
219 | unprotected_details = v {
220 | count(unprotected_branches) > 0
221 |
222 | table := { branch: link |
223 | branch := unprotected_branches[x]
224 | parts := split(branch, "/")
225 | repo_full := concat("/", [parts[0], parts[1]])
226 | link := sprintf("[Settings]()", [repo_full])
227 | }
228 |
229 | header := "| Branch | Link |"
230 | delim := "| --- | --- |"
231 | body := utils.json_to_md_dict_to_table(table, " ")
232 | v := concat("\n", [header, delim, body])
233 | }
234 |
235 | protection_details = v {
236 | count(recommendation_diff) == 0
237 | v := "None"
238 | }
239 |
240 | tables := { k: v |
241 | some k, t in protection_table_data
242 | v := sprintf("%s", [sprintf(format_table(t), [])])
243 | }
244 |
245 | protection_details = v {
246 | count(recommendation_diff) > 0
247 | v := utils.json_to_md_dict(tables, ":\n\n", " ")
248 | }
249 |
250 | verified_history_rule := v {
251 | v := {
252 | "id": "GGS002",
253 | "name": "SourceHistoryVerified",
254 | "shortDescription": {
255 | "text": "All commits are signed."
256 | },
257 | "fullDescription": {
258 | "text": concat("\n", [
259 | "Every change in the revision’s history has at least one strongly authenticated actor identity (author, uploader, reviewer, etc.) and timestamp. It must be clear which identities were verified, and those identities must use two-step verification or similar. (Exceptions noted below.)",
260 | "",
261 | "[First-parent history] In the case of a non-linear version control system, where a revision can have more than one parent, only the “first parent history” is in scope. In other words, when a feature branch is merged back into the main branch, only the merge itself is in scope.",
262 | "",
263 | "[Historical cutoff] There is some TBD exception to allow existing projects to meet SLSA 3/4 even if historical revisions were present in the history. Current thinking is that this could be either last N months or a platform attestation guaranteeing that future changes in the next N months will meet the requirements.",
264 | ])
265 | },
266 | "messageStrings": {
267 | "pass": {
268 | "text": "Signed commits are requried by the branch protection rules."
269 | },
270 | "fail": {
271 | "text": "Signed commits are NOT requried by the branch protection rules."
272 | }
273 | }
274 | }
275 | }
276 |
277 | verified_history_result = v {
278 | filtered_protection_data[input.slsa.protected_branch].required_signatures == true
279 | v := {
280 | "ruleId": verified_history_rule.id,
281 | "level": "note",
282 | "message": {
283 | "id": "pass",
284 | }
285 | }
286 | }
287 |
288 | verified_history_result = v {
289 | filtered_protection_data[input.slsa.protected_branch].required_signatures == false
290 | v := {
291 | "ruleId": verified_history_rule.id,
292 | "level": "error",
293 | "message": {
294 | "id": "fail",
295 | }
296 | }
297 | }
298 |
299 | verified_history_result = v {
300 | not utils.exists(filtered_protection_data, input.slsa.protected_branch)
301 | v := {
302 | "ruleId": verified_history_rule.id,
303 | "level": "error",
304 | "message": {
305 | "id": "fail",
306 | }
307 | }
308 | }
309 |
310 |
311 |
--------------------------------------------------------------------------------
/github/collaborators.rego:
--------------------------------------------------------------------------------
1 | package github.collaborators
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | orgs := data.github.orgs.orgs
7 |
8 | collaborators_urls[r.full_name] = url {
9 | some r in data.github.repos.repos
10 | url := trim_suffix(r.collaborators_url, "{/collaborator}")
11 | }
12 | collaborators_responses[x] = utils.parse(data.github.api.call_github_abs(collaborators_urls[x]))
13 |
14 | members_urls[orgs[x].login] = trim_suffix(orgs[x].members_url, "{/member}")
15 | members_responses[x] = utils.parse(data.github.api.call_github_abs(members_urls[x]))
16 |
17 | collaborators_successes[x] = collaborators_responses[x] {
18 | not utils.is_error(collaborators_responses[x])
19 | }
20 | members_successes[x] = members_responses[x] {
21 | not utils.is_error(members_responses[x])
22 | }
23 |
24 | collaborators[x] = utils.flatten_array(collaborators_successes[x], "login")
25 | members[x] = utils.flatten_array(members_successes[x], "login")
26 |
27 | non_members_collaborators[k] = vv {
28 | some k, v in collaborators
29 |
30 | owner := split(k, "/")[0]
31 |
32 | vv := utils.array_subtraction(v, members[owner])
33 | owner in utils.keys(members)
34 | not owner in v
35 | }
36 |
37 | non_members_collaborators[k] = vv {
38 | some k, v in collaborators
39 |
40 | owner := split(k, "/")[0]
41 |
42 | vv := v
43 | not owner in utils.keys(members)
44 | not owner in v
45 | }
46 |
47 |
48 | unknown_collaborators[x] = utils.array_subtraction(non_members_collaborators[x], data.github.state.collaborators.known[x])
49 | unknown_collaborators[x] = non_members_collaborators[x] {
50 | not utils.exists(data.github.state.collaborators.known, x)
51 | }
52 |
53 | non_empty_collaborators[x] = unknown_collaborators[x] {
54 | count(unknown_collaborators[x]) > 0
55 | }
56 |
57 | eval = v {
58 | pre_merged_responses := utils.merge(collaborators_responses, data.github.repos.responses)
59 | merged_responses := utils.merge(members_responses, pre_merged_responses)
60 | v := { "state": {"unknown": unknown_collaborators},
61 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v)},
62 | }
63 | }
64 |
65 | members_findings = v {
66 | count(non_empty_collaborators) > 1
67 | c_findings := "(i) %d of your repositories have collaborators."
68 | v := sprintf(c_findings, [count(non_empty_collaborators)])
69 | }
70 |
71 | members_findings = v {
72 | count(non_empty_collaborators) == 1
73 | v := "(i) 1 repository has collaborators."
74 | }
75 |
76 | members_findings = v {
77 | count(non_empty_collaborators) == 0
78 | v := "(v) your repositories do not have out of organization collaborators."
79 | }
80 |
81 | findings := concat("\n", [members_findings])
82 |
83 | overview_section := concat("\n", [
84 | "Collaborators are people outside of the organization who are active in your repositories.",
85 | ])
86 |
87 | recommendation_section := concat("\n", [
88 | "Regularly review the collaborators of your repositories, and block users that are not collaborators anymore.",
89 | ])
90 |
91 | module_title := "## Collaborators"
92 | overview_report := concat("\n", [
93 | module_title,
94 | "### Motivation",
95 | overview_section,
96 | "",
97 |
98 | "### Key Findings",
99 | findings,
100 | "",
101 | "See [below](#collaborators-1) for a detailed report.",
102 | "",
103 |
104 | "### Our Recommendation",
105 | recommendation_section,
106 | "Blocking members is done through the following links:",
107 | "",
108 | "Click to expand
",
109 | "",
110 | utils.json_to_md_list(settings_urls, " "),
111 | " ",
112 | "",
113 | ])
114 |
115 | settings_urls := { v |
116 | some repo, _ in non_empty_collaborators
117 | v := sprintf("<%s>", [concat("/", ["https://github.com", repo, "settings", "access"])])
118 | }
119 |
120 | detailed_report := concat("\n", [
121 | module_title,
122 | overview_section,
123 | recommendation_section,
124 | "",
125 | "Go [back](#collaborators) to the overview report.",
126 | "",
127 |
128 | "",
129 | " Outside Collaborators
",
130 | "",
131 | collaborators_details,
132 | " ",
133 | "",
134 | ])
135 |
136 | collaborators_details = v {
137 | count(non_empty_collaborators) == 0
138 | v := "None"
139 | }
140 |
141 | collaborators_details = v {
142 | count(non_empty_collaborators) > 0
143 | v := utils.json_to_md_dict_of_lists(non_empty_collaborators, " ")
144 | }
145 |
146 | # See comment about update in admins.rego
147 | update := v {
148 | v := { "known": non_empty_collaborators, }
149 | }
150 |
--------------------------------------------------------------------------------
/github/commits.rego:
--------------------------------------------------------------------------------
1 | package github.commits
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | commits_urls[x] = trim_suffix(data.github.repos.repos[x].commits_url, "{/sha}")
7 |
8 | responses[x] = utils.parse(data.github.api.call_github_abs(commits_urls[x])) {
9 | some x, _ in data.github.state.commits.config
10 | }
11 |
12 | successes[x] = responses[x] {
13 | not utils.is_error(responses[x])
14 | }
15 |
16 | # Not verified and not allowed
17 | commits_unverified = { repo: result |
18 | some repo, repo_commits in successes
19 | result := [ x |
20 | c := repo_commits[_]
21 | not c.commit.verification.verified
22 | not c.author.login in data.github.state.commits.config[repo].allow_unverified
23 | x := { "sha": c.sha, "message": c.commit.message, "author": c.author.login }
24 | ]
25 | }
26 |
27 | commits[x] := utils.flatten_array(successes[x], "sha")
28 | commits_history[x] := utils.array_subtraction(commits[x], data.github.state.commits.config[x].history)
29 | authors[x] := utils.flatten_array(utils.flatten_array(successes[x], "author"), "login")
30 | authors_set := { x: v |
31 | some x, arr in authors
32 | v := { y | y := arr[_] }
33 | }
34 |
35 | eval = v {
36 | merged_responses := utils.merge(responses, data.github.repos.responses)
37 | v := {
38 | "state": {"unverified": commits_unverified,
39 | "history": commits_history},
40 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
41 | }
42 | }
43 |
44 | findings = v {
45 | count(commits_unverified) == 0
46 | count(commits) > 0
47 | v := "(v) all commits are verified."
48 | }
49 |
50 | findings = v {
51 | count(commits_unverified) == 0
52 | count(commits) == 0
53 | v := "(i) no data was fetched. The module needs configuration. Add the configuration section to the input file: 'commits': { '': 'allow_unverified': [], ''history: [] }"
54 | }
55 |
56 | findings = v {
57 | count(commits_unverified) == 1
58 | v := "(i) 1 commit is not verified."
59 | }
60 |
61 | findings = v {
62 | count(commits_unverified) > 1
63 | c_findings := "(i) %d commits are not verified."
64 | v := sprintf(c_findings, [count(commits_unverified)])
65 | }
66 |
67 | overview_section := concat("\n", [
68 | "Signing commits prevents unauthorized people from committing code into your repositories.",
69 | "In case you have not deployed appropriate branch protection rules,",
70 | "the following findings display the signing status of individual commits.",
71 | ])
72 |
73 | recommendation_section := concat("\n", [
74 | "You should either configure branch protection rules to enforce signed commits, or require developers to sign their commits.",
75 | "Instructions for configuring your local git installation to sign commits to work with GitHub can be found here:",
76 | "",
77 | ])
78 |
79 | module_title := "## Signed Commits"
80 | overview_report := concat("\n", [
81 | module_title,
82 | "### Motivation",
83 | overview_section,
84 | "",
85 |
86 | "### Key Findings",
87 | findings,
88 | "",
89 | "See [below](#signed-commits-1) for a detailed report.",
90 | "",
91 |
92 | "### Our Recommendation",
93 | recommendation_section,
94 | "",
95 | ])
96 |
97 | detailed_report := concat("\n", [
98 | module_title,
99 | overview_section,
100 | recommendation_section,
101 | "",
102 | "Go [back](#signed-commits) to the overview report.",
103 | "",
104 |
105 | "",
106 | " Unverified Commits
",
107 | "",
108 | unverified_commits_details,
109 | " ",
110 | "",
111 | ])
112 |
113 | unverified_commits_data = { repo: result |
114 | some repo, repo_commits in commits_unverified
115 | result := [ x |
116 | c := repo_commits[_]
117 |
118 | url := sprintf("https://github.com/%s/commit/%s", [repo, c.sha])
119 | f_url := sprintf("[%s](<%s>)", [c.sha, url])
120 |
121 | x := { "Author": c.author, "Message": c.message, "Commit": f_url }
122 | ]
123 | }
124 |
125 | unverified_commits_details := v {
126 | count(commits_unverified) > 0
127 |
128 | table_keys := ["Author", "Message", "Commit"]
129 | tables := { repo:
130 | utils.json_to_md_array_of_dict_to_table(unverified_commits_data[repo], table_keys, "") }
131 |
132 | v := utils.json_to_md_dict(tables, ":\n\n", " ")
133 | }
134 |
--------------------------------------------------------------------------------
/github/data.json:
--------------------------------------------------------------------------------
1 | {
2 | "github": {
3 | "rule_set": "user",
4 | "debug_modules": [ "files" ]
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/github/deploy_keys.rego:
--------------------------------------------------------------------------------
1 | package github.deploy_keys
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | # Get URLs
7 | deploy_keys_urls[r.full_name] = url {
8 | some r in data.github.repos.responses[x]
9 | url := trim_suffix(r.keys_url, "{/key_id}")
10 | }
11 | responses[x] = utils.error_substitute(
12 | utils.parse(data.github.api.call_github_abs(deploy_keys_urls[x])),
13 | { "404 Not Found: Not Found": "This accout is not allowed to get deploy keys for this repository" } )
14 |
15 | successes[x] = responses[x] {
16 | not utils.is_error(responses[x])
17 | }
18 |
19 | deploy_keys[x] = utils.flatten_array(successes[x], "key")
20 | keys[x] = utils.array_subtraction(deploy_keys[x], data.github.state.deploy_keys.keys[x])
21 | keys[x] = deploy_keys[x] {
22 | not utils.exists(data.github.state.deploy_keys.keys, x)
23 | }
24 |
25 | non_empty_keys[x] = keys[x] {
26 | count(keys[x]) > 0
27 | }
28 |
29 | expired[k.id] = v {
30 | k := successes[_][_]
31 | utils.is_expired(k, data.github.state.deploy_keys.expiration)
32 | v := json.filter(k, ["id", "created_at"])
33 | }
34 |
35 | all_keys[k.id] = v {
36 | k := successes[_][_]
37 | k.key == keys[_][_]
38 | v := json.filter(k, ["id", "created_at", "title", "url"])
39 | }
40 |
41 | non_empty_findings = v {
42 | count(non_empty_keys) > 1
43 | c_findings := "(i) %d keys are configured for the repositories."
44 | v := sprintf(c_findings, [count(non_empty_keys)])
45 | }
46 |
47 | non_empty_findings = v {
48 | count(non_empty_keys) == 1
49 | v := "(i) 1 key is configured for the repository."
50 | }
51 |
52 | non_empty_findings = v {
53 | count(non_empty_keys) == 0
54 | v := "(v) no new keys."
55 | }
56 |
57 | expired_findings = v {
58 | count(expired) == 0
59 | v := "(v) no keys are expired."
60 | }
61 |
62 | expired_findings = v {
63 | count(expired) == 1
64 | v := "(i) 1 key is expired."
65 | }
66 |
67 | expired_findings = v {
68 | count(expired) > 1
69 | c_findings := "(i) %d keys are expired."
70 | v := sprintf(c_findings, [count(expired)])
71 | }
72 |
73 |
74 | eval = v {
75 | merged_responses := utils.merge(responses, data.github.repos.responses)
76 | v := { "state": {"expired": expired, "keys": non_empty_keys},
77 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
78 | }
79 | }
80 |
81 | findings := concat("\n\n", [non_empty_findings, expired_findings])
82 |
83 | overview_section := concat("\n", [
84 | "Deploy keys are an authentication tool to enable access to repositories.",
85 | "Manage your deploy keys to ensure you have not left keys that can be wrongfully used.",
86 | "GitHub’s explanation about deploy keys can be found here:",
87 | "",
88 | "",
89 | ])
90 |
91 | recommendation_section := concat("\n", [
92 | "Deploy keys are SSH keys assigned to each repository that allow reading and (optional) writing to private repositories.",
93 | "We recommend you review your SSH keys regularly; ensure you are familiar with the keys and their use.",
94 | "In case of an upcoming expiration date - ensure you replace the keys on time.",
95 | ])
96 |
97 | module_title := "## Deploy Keys"
98 | overview_report := concat("\n", [
99 | module_title,
100 | "### Motivation",
101 | overview_section,
102 | "",
103 |
104 | "### Key Findings",
105 | findings,
106 | "",
107 | "See [below](#deploy-keys-1) for a detailed report.",
108 | "",
109 |
110 | "### Our Recommendation",
111 | recommendation_section,
112 | "Deploy keys can be managed at the following links:",
113 | "",
114 | "Click to expand
",
115 | "",
116 | utils.json_to_md_list(settings_urls, " "),
117 | " ",
118 | "",
119 | ])
120 |
121 | settings_urls := { v |
122 | some x, _ in non_empty_keys
123 | r := data.github.repos.repos[x]
124 | v := sprintf("<%s>", [concat("/", [r.html_url, "settings", "keys"])])
125 | }
126 |
127 | detailed_report := concat("\n", [
128 | module_title,
129 | overview_section,
130 | recommendation_section,
131 | "",
132 | "Go [back](#deploy-keys) to the overview report.",
133 | "",
134 |
135 | "Expired",
136 | "",
137 | expired_details,
138 | "",
139 |
140 | "All",
141 | "",
142 | non_empty_details,
143 | ""
144 | ])
145 |
146 | expired_details = v {
147 | count(expired) == 0
148 | v := "None"
149 | }
150 |
151 | expired_details = v {
152 | count(expired) > 0
153 | v_data := [ q |
154 | k := expired[_]
155 | q := { "Key": k.title, "Creation time": k.created_at,
156 | "Link": k.url }
157 | ]
158 |
159 | expired_details_keys := ["Key", "Creation time", "Link"]
160 | v := sprintf("%s", [utils.json_to_md_array_of_dict_to_table(v_data,
161 | expired_details_keys, "")])
162 | }
163 |
164 | non_empty_details = v {
165 | count(all_keys) == 0
166 | v := "None"
167 | }
168 |
169 | non_empty_details = v {
170 | count(all_keys) > 0
171 | v_data := [ q |
172 | some k in all_keys
173 | q := { "Key": k.title, "Creation time": k.created_at,
174 | "Link": k.url }
175 | ]
176 |
177 | non_empty_details_keys := ["Key", "Creation time", "Link"]
178 | v := sprintf("%s", [utils.json_to_md_array_of_dict_to_table(v_data,
179 | non_empty_details_keys, "")])
180 | }
181 |
182 | # See comment about update in admins.rego
183 | update := v {
184 | v := { "keys": non_empty_keys, }
185 | }
186 |
--------------------------------------------------------------------------------
/github/files.rego:
--------------------------------------------------------------------------------
1 | package github.files
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | all_commits_urls[x] = trim_suffix(data.github.repos.repos[x].commits_url, "{/sha}")
7 |
8 | commits_responses[x] = utils.parse(data.github.api.call_github_abs(all_commits_urls[x])) {
9 | some x, _ in data.github.state.files.permissions
10 | }
11 |
12 | commits_successes[x] = commits_responses[x] {
13 | not utils.is_error(commits_responses[x])
14 | }
15 |
16 | # Get URLs
17 | # "/": [ { "sha": , "url": } ]
18 | # "/": {"sha": }
19 |
20 | commits_urls = { repo: commits |
21 | some repo, files in data.github.state.files.permissions
22 | commits = { sha: url |
23 | some commit in commits_successes[repo]
24 | sha := commit.sha
25 | url := commit.url
26 | }
27 | }
28 |
29 | responses = { repo: commits |
30 | some repo, urls in commits_urls
31 | commits = { sha: response |
32 | some sha, url in urls
33 | response := data.github.utils.parse(data.github.api.call_github_abs(url))
34 | }
35 | }
36 |
37 | # "org/repo": {
38 | # "sha": {
39 | # "committer": "login",
40 | # "files": [ "filename" ]
41 | # }
42 | # }
43 | filtered = { repo: filtered_commits |
44 | some repo, response in responses
45 | filtered_commits = { sha: commit |
46 | some sha, commits in response
47 | filtered_commit := json.filter(commits, ["author/login", "committer/login", "files", "html_url"])
48 | commit := {
49 | "author": filtered_commit.author.login,
50 | "committer": filtered_commit.committer.login,
51 | "html_url": filtered_commit.html_url,
52 | "files": [ filename |
53 | filename := filtered_commit.files[_].filename
54 | ]
55 | }
56 | }
57 | }
58 |
59 | commit_contains_file(permissions, commit) {
60 | p := permissions[file]
61 | regex.match(file, commit.files[x])
62 | }
63 |
64 | commit_okay(permissions, commit) {
65 | commit_contains_file(permissions, commit)
66 |
67 | p := permissions[file]
68 | regex.match(file, commit.files[x])
69 | commit.committer == p.committers[y]
70 | commit.author == p.authors[z]
71 | }
72 |
73 | commit_okay(permissions, commit) {
74 | not commit_contains_file(permissions, commit)
75 | }
76 |
77 | violating_commits = { repo: checked |
78 | some repo, commits in filtered
79 | checked = { sha: commit |
80 | some sha, commit in commits
81 | not commit_okay(data.github.state.files.permissions[repo], commit)
82 | }
83 | }
84 |
85 | eval = v {
86 | merged_responses := responses
87 | v := {
88 | "state": {"violating_commits": violating_commits},
89 | "processing_errors": { k: v | some k; v := merged_responses[k]; data.github.utils.is_error(v) },
90 | "description": "The files module checks for modifications of specific files in a repostitory. Only committers that are listed in the configurable state are allowed to modify those files. This module does nothing without pre-configuring."
91 | }
92 | }
93 |
94 | overview_section :=
95 | `
96 | ### Motivation
97 |
98 | In many cases your repository includes sensitive files,
99 | such as CI pipeline and IaC definitions. You should manage
100 | who’s allowed to modify these files. To use this rule, configure
101 | the file-name patterns of the files you want to track.
102 | `
103 |
104 | recommendation_section :=
105 | `Configure the rule and regularly track access to sensitive files.`
106 |
107 | module_title := "## Fine Grained File Access Tracking"
108 | overview_report := concat("\n", [
109 | module_title,
110 | overview_section,
111 |
112 | "### Key Findings",
113 | findings,
114 | "",
115 | "See [below](#fine-grained-file-access-tracking-1) for a detailed report.",
116 | "",
117 |
118 | "### Our Recommendation",
119 | "",
120 | ])
121 |
122 | findings = "There are no violating commits." {
123 | violating_counts := [ count(x) | some x in violating_commits ]
124 | all([violating_counts[_] == 0])
125 | }
126 |
127 | findings = sprintf("There are %d violating commits.", [v]) {
128 | violating_counts := [ count(x) | some x in violating_commits ]
129 | any([violating_counts[_] > 0])
130 | v := sum(violating_counts)
131 | }
132 |
133 | violating_details = v {
134 | count(violating_commits) > 0
135 | violating_commits_lists := { k: v |
136 | some k, vv in violating_commits
137 | v := [ c.html_url | some c in vv ]
138 | }
139 | v := utils.json_to_md_dict_of_lists(violating_commits_lists, " ")
140 | #v := "Some"
141 | }
142 |
143 | detailed_report := concat("\n", [
144 | module_title,
145 | overview_section,
146 | recommendation_section,
147 | "",
148 | "Go [back](#fine-grained-file-access-tracking) to the overview report.",
149 | "",
150 |
151 | "",
152 | " Violating Commits
",
153 | "",
154 | violating_details,
155 | " ",
156 | "",
157 | ])
158 |
--------------------------------------------------------------------------------
/github/gh_utils.rego:
--------------------------------------------------------------------------------
1 | package github.gh_utils
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | get_nested_data(url_collection, url, suffix, filter, err_substitute) = v {
7 | u = concat("?", [trim_suffix(url_collection[url], suffix), filter])
8 | r := utils.parse(data.github.api.call_github_abs(u))
9 | v := utils.error_substitute(r, err_substitute)
10 | }
11 |
--------------------------------------------------------------------------------
/github/github.rego:
--------------------------------------------------------------------------------
1 | # TODO Unclear what happens if the package name is github
2 | # then eval rule becomes recursive
3 | package gh
4 |
5 | import future.keywords.in
6 | import data.github.utils as utils
7 |
8 | rule_set := input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
9 |
10 | gh_modules["user"] := [
11 | "tfa",
12 | "admins",
13 | "hooks",
14 | "teams",
15 | "ssh_keys",
16 | "deploy_keys",
17 | "branches",
18 | "commits"
19 | ]
20 |
21 | gh_requesting_modules["user"] := [
22 | "init",
23 | "repos",
24 | "tfa",
25 | "admins",
26 | "hooks",
27 | "teams",
28 | "deploy_keys",
29 | "branches",
30 | "commits"
31 | ]
32 |
33 | gh_modules["org"] := [
34 | "tfa",
35 | "admins",
36 | "hooks",
37 | "teams",
38 | "deploy_keys",
39 | "files",
40 | "branches",
41 | "commits"
42 | ]
43 |
44 | gh_requesting_modules["org"] := [
45 | "init",
46 | "repos",
47 | "tfa",
48 | "admins",
49 | "hooks",
50 | "teams",
51 | "deploy_keys",
52 | "files",
53 | "branches",
54 | "commits"
55 | ]
56 |
57 | m_states = { v |
58 | some m in gh_modules[rule_set]
59 | v := {
60 | m: data.github[m].eval.state,
61 | }
62 | }
63 |
64 | m_errors = { v |
65 | some rm in gh_requesting_modules[rule_set]
66 | v := {
67 | concat("_", [rm, "processing_errors"]): { k: v | some k; v := data.github[rm].responses[k]; data.github.utils.is_error(v) }
68 | }
69 | }
70 |
71 | eval := { "state": m_states, "errors": m_errors }
72 |
73 | post_gist = response.status {
74 | response := data.github.api.post_github("gists",
75 | { "files":
76 | {
77 | "report.md":
78 | { "content": data.github.report.f_report },
79 |
80 | "report.json":
81 | { "content": sprintf("%s\n", [eval]) }
82 | },
83 | "description": "GitHub security posture report"
84 | }
85 | )
86 | }
87 |
--------------------------------------------------------------------------------
/github/hooks.rego:
--------------------------------------------------------------------------------
1 | package github.hooks
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | # Get URLs
7 | hooks_urls[name] = url {
8 | some r in data.github.repos.responses[x]
9 | name := r.full_name
10 | url := r.hooks_url
11 | }
12 | responses[x] = utils.error_substitute(
13 | utils.parse(data.github.api.call_github_abs(hooks_urls[x])),
14 | { "404 Not Found: Not Found": "This account is not allowed to get hooks configuration for this repository" } )
15 |
16 | successes[x] := responses[x] {
17 | not utils.is_error(responses[x])
18 | }
19 |
20 | hooks[repo] = result {
21 | some repo, repo_hooks in successes
22 |
23 | utils.exists(data.github.state.hooks.config, repo)
24 |
25 | result := [ x |
26 | h := repo_hooks[_]
27 | h == data.github.state.hooks.config[repo][_]
28 |
29 | x := { "id": h.id, "created_at": h.created_at, "config": { "url": h.config.url } }
30 | ]
31 | }
32 |
33 | hooks[repo] = result {
34 | some repo, repo_hooks in successes
35 |
36 | not utils.exists(data.github.state.hooks.config, repo)
37 |
38 | result := [ x |
39 | h := repo_hooks[_]
40 | x := { "id": h.id, "created_at": h.created_at, "config": { "url": h.config.url } }
41 | ]
42 | }
43 |
44 | new_hooks[repo] = result {
45 | some repo, repo_hooks in successes
46 |
47 | result := [ x |
48 | x := repo_hooks[_]
49 | not x in hooks[repo]
50 | ]
51 |
52 | count(result) > 0
53 | }
54 |
55 | eval = v {
56 | pre_merged_responses := utils.merge(data.github.init.responses, data.github.repos.responses)
57 | merged_responses := utils.merge(responses, pre_merged_responses)
58 | v := {
59 | "state": {"hooks": new_hooks},
60 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
61 | "description": "Web hooks issue HTTP POST request to specified URLs when configured events occur. Make sure that all the configured hooks are approved. It is recommended to configure a secret associated with a Web hook to verify that the POST request is coming from GitHub."
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/github/orgs.rego:
--------------------------------------------------------------------------------
1 | package github.orgs
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | default rule_set := "user"
7 | rule_set := input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
8 |
9 | user_response := v {
10 | rule_set == "user"
11 | v := utils.parse(data.github.api.call_github("user/orgs"))
12 | }
13 |
14 | responses["user/orgs"] := user_response
15 |
16 | responses[org.login] = v {
17 | rule_set == "user"
18 | some org in user_response
19 | v := utils.parse(data.github.api.call_github_abs(org.url))
20 | }
21 |
22 | responses[split(input.organizations[x], "/")[1]] = v {
23 | rule_set == "org"
24 | v := utils.parse(data.github.api.call_github(input.organizations[x]))
25 | }
26 |
27 | orgs[x] = responses[x] {
28 | not x == "user/orgs"
29 | not utils.is_error(responses[x])
30 | }
31 |
--------------------------------------------------------------------------------
/github/report.rego:
--------------------------------------------------------------------------------
1 | package github.report
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | report := [
7 | "# Your Gitgat Account Security Audit",
8 | "This report is the output of Gitgat, an experimental open source audit tool that will assist you in improving the security of your GitHub account.",
9 | "%s",
10 | "",
11 |
12 | "# Overview",
13 | "",
14 | "Gitgat automatically analyzes GitHub account and points to potential gaps as compared to security configuration best practices.",
15 | "As the project matures additional automated analyses will be added.",
16 | "",
17 | "%s",
18 |
19 | "# Detailed Results",
20 | "%s",
21 | "",
22 | ]
23 |
24 | rule_set := input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
25 | debug_modules := input.debug_modules { utils.exists(input, "debug_modules") } else := data.github.debug_modules
26 |
27 | gh_overview_modules["debug"] := debug_modules
28 |
29 | gh_detailed_modules["debug"] := debug_modules
30 |
31 | gh_intro["debug"] := "This report is a debug-view report, used by developers."
32 |
33 | gh_overview_modules["org"] := ["repos", "tfa", "admins", "teams", "collaborators", "branches", "commits", "deploy_keys", "files",]
34 |
35 | gh_overview_modules["user"] := ["repos", "tfa", "admins", "teams", "collaborators", "branches", "commits", "deploy_keys", "ssh_keys", "files",]
36 |
37 | gh_detailed_modules["org"] := ["repos", "tfa", "admins", "teams", "collaborators", "branches", "commits", "deploy_keys"]
38 |
39 | gh_detailed_modules["user"] := ["repos", "tfa", "admins", "teams", "collaborators", "branches", "commits", "deploy_keys", "ssh_keys"]
40 |
41 | gh_intro["org"] := v {
42 | orgs := utils.json_to_md_list(input.organizations, " ")
43 | v := sprintf("This report is an organizational report referring to the following organizations: %s", [orgs])
44 | }
45 |
46 | gh_intro["user"] := "This report is a user-view report, and includes all organizations that the user belongs to."
47 |
48 | f_report := v {
49 | overview_reports := [data.github[m].overview_report | some m in gh_overview_modules[rule_set]]
50 | overview_report := concat("\n", overview_reports)
51 |
52 | detailed_reports := [data.github[m].detailed_report | some m in gh_detailed_modules[rule_set]]
53 | detailed_report := concat("\n", detailed_reports)
54 |
55 | c_report := concat("\n", report)
56 | v := sprintf(c_report, [gh_intro[rule_set], overview_report, detailed_report])
57 | }
58 |
59 | print_report = v {
60 | print(f_report)
61 | v := 1
62 | }
63 |
64 | gh_update_modules["user"] := ["token", "tfa", "admins", "collaborators", "deploy_keys", "ssh_keys",]
65 |
66 | gh_update_modules["org"] := ["token", "tfa", "admins", "collaborators", "deploy_keys",]
67 |
68 | f_update := v {
69 | v := { m: data.github[m].update | some m in gh_update_modules[rule_set] }
70 | }
71 |
72 | print_update = v {
73 | print(f_update)
74 | v := 1
75 | }
76 |
--------------------------------------------------------------------------------
/github/repos.rego:
--------------------------------------------------------------------------------
1 | package github.repos
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | rule_set := input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
7 |
8 | orgs = data.github.orgs.orgs
9 |
10 | repos_urls[x] = orgs[x].repos_url {
11 | rule_set == "org"
12 | }
13 | responses[x] = utils.parse(data.github.api.call_github_abs(repos_urls[x]))
14 |
15 | user_response := v {
16 | rule_set == "user"
17 | v := utils.parse(data.github.api.call_github("user/repos"))
18 | }
19 |
20 | # { "user/repos": [ { "name": "repo" } ] }
21 | responses["user/repos"] := user_response
22 |
23 | repos[k] = v {
24 | not utils.is_error(responses[x])
25 |
26 | r := responses[x][y]
27 | k := r.full_name
28 |
29 | v := json.filter(responses[x][y],
30 | ["private",
31 | "hooks_url",
32 | "keys_url",
33 | "collaborators_url",
34 | "owner/login",
35 | "owner/type",
36 | "full_name",
37 | "name",
38 | "commits_url",
39 | "html_url"])
40 | }
41 |
42 | owners := { r.owner.login |
43 | some r in repos
44 | }
45 |
46 | repos_per_owner[x] = v {
47 | some x in owners
48 | v := [ r.full_name | some r in repos; r.owner.login == x ]
49 | }
50 |
51 | private_repos := [ k |
52 | repos[k].private
53 | ]
54 |
55 | public_repos := [ k.full_name |
56 | some k in repos
57 | not k.full_name in private_repos
58 | ]
59 |
60 | private_repos_per_owner[x] = v {
61 | some x, x_repos in repos_per_owner
62 | v := [ r | some r in private_repos; some y in x_repos; r == y ]
63 | }
64 |
65 | public_repos_per_owner[x] = v {
66 | some x, x_repos in repos_per_owner
67 | v := [ r | some r in public_repos; some y in x_repos; r == y ]
68 | }
69 |
70 | overview_section := concat("\n", [
71 | "Public GitHub repositories enable open source collaboration.",
72 | "But mistakenly exposing a private repository as public",
73 | "may leak information and allow unwanted people access to your repositories.",
74 | ])
75 |
76 | recommendation_section := concat("\n", [
77 | "Regularly review your repositories to ensure private repositories have not been made public.",
78 | ])
79 |
80 | report := [
81 | "## Repository Public Visibility and Access",
82 | "### Motivation",
83 | "%s",
84 | "",
85 |
86 | "### Key Findings",
87 | "%s",
88 | "",
89 | "See [below](#repository-public-visibility-and-access-1) for a detailed report.",
90 | "",
91 |
92 | "### Our Recommendation",
93 | "%s",
94 | "",
95 | "Managing repositories visibility can be done through the following links:",
96 | "",
97 | "Click to expand
",
98 | "",
99 | "%s",
100 | " ",
101 | "",
102 | ]
103 |
104 | findings_per_owner[x] = v {
105 | count(public_repos_per_owner[x]) == 0
106 | v := "(v) no public repositories"
107 | }
108 |
109 | # format_strings := {
110 | # { 1:
111 | # { 1: "(i) %d out of %d repository is public" },
112 | # {
113 | # format_string[num_public_repos][total_repos]
114 |
115 | findings_per_owner[x] = v {
116 | num_public_repos := count(public_repos_per_owner[x])
117 | num_public_repos == 1
118 | total_repos := num_public_repos + count(private_repos_per_owner[x])
119 | total_repos > 1
120 | v := sprintf("(i) %d out of %d repositories is public", [num_public_repos, total_repos])
121 | }
122 |
123 | findings_per_owner[x] = v {
124 | num_public_repos := count(public_repos_per_owner[x])
125 | num_public_repos == 1
126 | total_repos := num_public_repos + count(private_repos_per_owner[x])
127 | total_repos == 1
128 | v := sprintf("(i) %d out of %d repository is public", [num_public_repos, total_repos])
129 | }
130 |
131 | findings_per_owner[x] = v {
132 | num_public_repos := count(public_repos_per_owner[x])
133 | num_public_repos > 1
134 | total_repos := num_public_repos + count(private_repos_per_owner[x])
135 | total_repos > 1
136 | v := sprintf("(i) %d out of %d repositories are public", [num_public_repos, total_repos])
137 | }
138 |
139 | findings = v {
140 | header := "| Owner | Findings |"
141 | delim := "| --- | --- |"
142 | body := utils.json_to_md_dict_to_table(findings_per_owner, " ")
143 | v := concat("\n", [header, delim, body])
144 | }
145 |
146 | settings_urls := { v |
147 | some k in public_repos
148 | v := sprintf("<%s>", [concat("/", [repos[k].html_url, "settings"])])
149 | }
150 |
151 | overview_report := v {
152 | c_report := concat("\n", report)
153 | urls := utils.json_to_md_list(settings_urls, " ")
154 | v := sprintf(c_report, [overview_section, findings, recommendation_section, urls])
155 | }
156 |
157 | d_report := [
158 | "## Repository Public Visibility and Access",
159 | "%s",
160 | "%s",
161 | "",
162 | "Go [back](#repository-public-visibility-and-access) to the overview report.",
163 | "",
164 |
165 | "",
166 | " Repositories Visibility Settings (for Public Repositories)
",
167 | "",
168 | "%s",
169 | " ",
170 | "",
171 | ]
172 |
173 | settings_details = v {
174 | count(settings_urls) == 0
175 | v := "No public repositories."
176 | }
177 |
178 | settings_details = v {
179 | count(settings_urls) > 0
180 | v_data := [ q |
181 | r := repos[x]
182 | not r.private
183 | url := concat("/", [r.html_url, "settings"])
184 | f_url := sprintf("[Settings](<%s>)", [url])
185 | q := { "Owner": r.owner.login, "Repository": r.name,
186 | "Link": f_url }
187 | ]
188 |
189 | settings_details_keys := ["Owner", "Repository", "Link"]
190 | v := sprintf("%s", [utils.json_to_md_array_of_dict_to_table(v_data,
191 | settings_details_keys, "")])
192 | }
193 |
194 | detailed_report := v {
195 | v := sprintf(concat("\n", d_report),
196 | [overview_section, recommendation_section, settings_details])
197 | }
198 |
199 | version_controlled_rule := v {
200 | v := {
201 | "id": "GGS001",
202 | "name": "SourceVersionControlled",
203 | "shortDescription": {
204 | "text": "The code must be version-controlled."
205 | },
206 | "fullDescription": {
207 | "text": concat("\n", [
208 | "Every change to the source is tracked in a version control system that meets the following requirements:",
209 | "",
210 | "[Change history] There exists a record of the history of changes that went into the revision. Each change must contain: the identities of the uploader and reviewers (if any), timestamps of the reviews (if any) and submission, the change description/justification, the content of the change, and the parent revisions.",
211 | "",
212 | "[Immutable reference] There exists a way to indefinitely reference this particular, immutable revision. In git, this is the {repo URL + branch/tag/ref + commit ID}.",
213 | "",
214 | "Most popular version control system meet this requirement, such as git, Mercurial, Subversion, or Perforce.",
215 | "",
216 | "NOTE: This does NOT require that the code, uploader/reviewer identities, or change history be made public. Rather, some organization must attest to the fact that these requirements are met, and it is up to the consumer whether this attestation is sufficient.",
217 | ])
218 | },
219 | "messageStrings": {
220 | "pass": {
221 | "text": "The code is version-controlled in {0}."
222 | }
223 | }
224 | }
225 | }
226 |
227 | version_controlled_result := v {
228 | v := {
229 | "ruleId": version_controlled_rule.id,
230 | "level": "note",
231 | "message": {
232 | "id": "pass",
233 | "arguments": [
234 | input.slsa.repository_url,
235 | ]
236 | }
237 | }
238 | }
239 |
--------------------------------------------------------------------------------
/github/reviews.rego:
--------------------------------------------------------------------------------
1 | package github.reviews
2 |
3 | import future.keywords.in
4 | import future.keywords.every
5 |
6 | import data.github.utils as utils
7 |
8 | pull_request := sprintf("%d", [input.reviews.pull_request])
9 | url := concat("/", ["repos", input.reviews.repository, "pulls", pull_request, "reviews"])
10 |
11 | response = utils.parse(data.github.api.call_github(url))
12 |
13 | success = response {
14 | not utils.is_error(response)
15 | }
16 |
17 | filtered[x] = json.filter(success[x], ["state", "user/login"])
18 |
19 | approved_reviewers := input.reviews.approved_reviewers
20 |
21 | review_okay(review, approved_reviewers) {
22 | review.state == "APPROVED"
23 | review.user.login == approved_reviewers[_]
24 | }
25 |
26 | all_reviews_okay {
27 | every _, r in filtered {
28 | review_okay(r, input.reviews.approved_reviewers)
29 | }
30 | }
31 |
32 | violating_reviews = { r |
33 | some _, r in filtered
34 | not review_okay(r, input.reviews.approved_reviewers)
35 | }
36 |
37 | overview_findings = v {
38 | all_reviews_okay
39 | v := "(v) all reviews were provided by approved reviewers"
40 | }
41 |
42 | overview_findings = v {
43 | not all_reviews_okay
44 | v := "(i) some reviews are not by approved reviewers"
45 | }
46 |
47 | detailed_findings = v {
48 | not all_reviews_okay
49 | v := utils.json_to_md_list(violating_reviews, " ")
50 | }
51 |
52 | overview_section := concat("\n", [
53 | "Reviews should be provided by approved reviewers.",
54 | ])
55 |
56 | recommendation_section := concat("\n", [
57 | "You should configure the list of approved reviewers",
58 | ])
59 |
60 | module_title := "## Reviews"
61 | overview_report := concat("\n", [
62 | module_title,
63 | "### Motivation",
64 | overview_section,
65 | "",
66 |
67 | "### Key Findings",
68 | overview_findings,
69 | "",
70 | "See [below](#reviews-1) for a detailed report.",
71 | "",
72 |
73 | "### Our Recommendation",
74 | recommendation_section,
75 | "",
76 | ])
77 |
78 | detailed_report := concat("\n", [
79 | module_title,
80 | overview_section,
81 | recommendation_section,
82 | "",
83 | "Go [back](#reviews) to the overview report.",
84 | "",
85 |
86 | "",
87 | " Unapproved reviews
",
88 | "",
89 | detailed_findings,
90 | " ",
91 | "",
92 | ])
93 |
--------------------------------------------------------------------------------
/github/secrets.rego:
--------------------------------------------------------------------------------
1 | package github.secrets
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | default rule_set := "user"
7 | rule_set := input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
8 |
9 | responses[org.login] = v {
10 | some org in data.github.orgs.orgs
11 | secrets_url := concat("/", ["orgs", org.login, "actions", "secrets"])
12 | v := utils.parse(data.github.api.call_github(secrets_url))
13 | }
14 |
15 | secrets[x] = responses[x] {
16 | not utils.is_error(responses[x])
17 | }
18 |
--------------------------------------------------------------------------------
/github/ssh_keys.rego:
--------------------------------------------------------------------------------
1 | package github.ssh_keys
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | rule_set = input.rule_set { utils.exists(input, "rule_set") } else := data.github.rule_set
7 |
8 | # Keys
9 | responses := utils.parse(data.github.api.call_github("user/keys")) {
10 | rule_set == "user"
11 | }
12 |
13 | # [ { "key": "ssh-rsa ..." } ]
14 | user_keys := utils.flatten_array(responses, "key")
15 | keys := utils.array_subtraction(user_keys, data.github.state.ssh_keys.keys)
16 |
17 | expired[k.id] = v {
18 | k := responses[_]
19 | utils.is_expired(k, data.github.state.ssh_keys.expiration)
20 | v := json.filter(k, ["id", "created_at", "title", "url"])
21 | }
22 |
23 | all_keys[k.id] = v {
24 | k := responses[_]
25 | k.key == keys[_]
26 | v := json.filter(k, ["id", "created_at", "title", "url"])
27 | }
28 |
29 | keys_findings = v {
30 | valid := count(keys) - count(expired)
31 | valid > 1
32 | c_findings := "(i) You have %d valid SSH keys."
33 | v := sprintf(c_findings, [valid])
34 | }
35 |
36 | keys_findings = v {
37 | valid := count(keys) - count(expired)
38 | valid == 1
39 | v := "(i) You have 1 valid SSH key."
40 | }
41 |
42 | keys_findings = v {
43 | count(keys) > 0
44 | valid := count(keys) - count(expired)
45 | valid == 0
46 | v := "(i) You have no valid SSH keys."
47 | }
48 |
49 | keys_findings = v {
50 | count(keys) == 0
51 | v := "(v) no new keys."
52 | }
53 |
54 | expired_findings = v {
55 | count(expired) == 0
56 | v := "(v) no keys have expired."
57 | }
58 |
59 | expired_findings = v {
60 | count(expired) == 1
61 | v := "(i) You have 1 expired key."
62 | }
63 |
64 | expired_findings = v {
65 | count(expired) > 1
66 | c_findings := "(i) You have %d expired keys."
67 | v := sprintf(c_findings, [count(expired)])
68 | }
69 |
70 | eval = v {
71 | v := { "state": {"expired": expired, "keys": keys},
72 | "processing_errors": { k: v | some k; v := responses[k]; utils.is_error(v) },
73 | }
74 | }
75 |
76 | findings := concat("\n\n", [keys_findings, expired_findings])
77 |
78 | overview_section := concat("\n", [
79 | "SSH keys are an authentication tool that enables tools like git to access repositories you have access to.",
80 | "In GitHub personal and organizational accounts, SSH keys are managed by the user.",
81 | "Thus the following are findings regarding *your* SSH keys.",
82 | ])
83 |
84 | recommendation_section := concat("\n", [
85 | "Your SSH keys allow full access to all the repositories over SSH.",
86 | "We recommend you review your SSH keys regularly; ensure you are familiar with the keys and their use.",
87 | "In case of an upcoming expiration date - ensure you replace the keys on time.",
88 | "SSH keys generation is done via the following link: .",
89 | ])
90 |
91 | module_title := "## SSH Keys"
92 | overview_report := concat("\n", [
93 | module_title,
94 | "### Motivation",
95 | overview_section,
96 | "",
97 |
98 | "### Key Findings",
99 | findings,
100 | "",
101 | "See [below](#ssh-keys-1) for a detailed report.",
102 | "",
103 |
104 | "### Our Recommendation",
105 | recommendation_section,
106 | "",
107 | ])
108 |
109 | detailed_report := concat("\n", [
110 | module_title,
111 | overview_section,
112 | recommendation_section,
113 | "",
114 | "Go [back](#ssh-keys) to the overview report.",
115 | "",
116 |
117 | "Expired",
118 | expired_details,
119 | "",
120 |
121 | "All",
122 | non_empty_details,
123 | ""
124 | ])
125 |
126 | expired_details = v {
127 | count(expired) == 0
128 | v := "None"
129 | }
130 |
131 | expired_details = v {
132 | count(expired) > 0
133 | v_data := [ q |
134 | k := expired[_]
135 | q := { "Key": k.title, "Creation time": k.created_at,
136 | "Link": k.url }
137 | ]
138 |
139 | expired_details_keys := ["Key", "Creation time", "Link"]
140 | v := sprintf("%s", [utils.json_to_md_array_of_dict_to_table(v_data,
141 | expired_details_keys, "")])
142 | }
143 |
144 | non_empty_details = v {
145 | count(keys) == 0
146 | v := "None"
147 | }
148 |
149 | non_empty_details = v {
150 | count(keys) > 0
151 | v_data := [ q |
152 | some k in all_keys
153 | q := { "Key": k.title, "Creation time": k.created_at,
154 | "Link": k.url }
155 | ]
156 |
157 | non_empty_details_keys := ["Key", "Creation time", "Link"]
158 | v := sprintf("%s", [utils.json_to_md_array_of_dict_to_table(v_data,
159 | non_empty_details_keys, "")])
160 | }
161 |
162 | # See comment about update in admins.rego
163 | update := v {
164 | v := { "keys": user_keys, }
165 | }
166 |
--------------------------------------------------------------------------------
/github/state/admins.rego:
--------------------------------------------------------------------------------
1 | package github.state.admins
2 |
3 | default members := {}
4 | members := input.admins.members
5 |
--------------------------------------------------------------------------------
/github/state/branches.rego:
--------------------------------------------------------------------------------
1 | package github.state.branches
2 |
3 | default unprotected_branches := []
4 | default protection_data := {}
5 |
6 | unprotected_branches := input.branches.unprotected
7 | protection_data := input.branches.protection_data
8 |
9 | recommended_protection := {
10 | "allow_deletions": false,
11 | "allow_force_pushes": false,
12 | "block_creations": false,
13 | "enforce_admins": true,
14 | "required_conversation_resolution": true,
15 | "required_linear_history": true,
16 | "dismiss_stale_reviews": true,
17 | "require_code_owner_reviews": true,
18 | "required_signatures": true
19 | }
20 |
--------------------------------------------------------------------------------
/github/state/collaborators.rego:
--------------------------------------------------------------------------------
1 | package github.state.collaborators
2 |
3 | default known := {}
4 | known := input.collaborators.known
5 |
--------------------------------------------------------------------------------
/github/state/commits.rego:
--------------------------------------------------------------------------------
1 | package github.state.commits
2 |
3 | default config := {}
4 | config := input.commits
5 |
--------------------------------------------------------------------------------
/github/state/deploy_keys.rego:
--------------------------------------------------------------------------------
1 | package github.state.deploy_keys
2 |
3 | # Default expiration is 1 year
4 | default expiration := [1, 0, 0]
5 | expiration := input.deploy_keys.expiration
6 |
7 | default keys := {}
8 | keys := input.deploy_keys.keys
9 |
--------------------------------------------------------------------------------
/github/state/files.rego:
--------------------------------------------------------------------------------
1 | package github.state.files
2 |
3 | default permissions := {
4 | ".circle-ci/*": []
5 | }
6 |
7 | # Only commiters from the list are allowed to change files
8 | permissions := input.files.permissions
9 |
10 |
--------------------------------------------------------------------------------
/github/state/hooks.rego:
--------------------------------------------------------------------------------
1 | package github.state.hooks
2 |
3 | default config := {}
4 | config := input.hooks
5 |
--------------------------------------------------------------------------------
/github/state/ssh_keys.rego:
--------------------------------------------------------------------------------
1 | package github.state.ssh_keys
2 |
3 | # Default expiration is one year
4 | default expiration := [1, 0, 0]
5 | expiration := input.ssh_keys.expiration
6 |
7 | default keys := []
8 | keys := input.ssh_keys.keys
9 |
--------------------------------------------------------------------------------
/github/state/teams.rego:
--------------------------------------------------------------------------------
1 | package github.state.teams
2 |
3 | permissions := input.teams.permissions
4 |
5 | members := input.teams.members
6 |
--------------------------------------------------------------------------------
/github/state/tfa.rego:
--------------------------------------------------------------------------------
1 | package github.state.tfa
2 |
3 | default exceptions := {}
4 | exceptions := input.tfa.disabled_members
5 |
6 | default unenforced_orgs := []
7 | unenforced_orgs := input.tfa.unenforced_orgs
--------------------------------------------------------------------------------
/github/teams.rego:
--------------------------------------------------------------------------------
1 | package github.teams
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | # Organizations
7 | orgs = data.github.orgs.orgs
8 |
9 | # Repos
10 | repos := data.github.repos.repos
11 |
12 | teams_urls[orgs[x].login] = concat("/", ["orgs", orgs[x].login, "teams"])
13 | teams_responses[x] = utils.parse(data.github.api.call_github(teams_urls[x]))
14 | teams[x] = teams_responses[x] {
15 | not utils.is_error(teams_responses[x])
16 | }
17 | # teams_responses := {"org": [{"slug", "repositories_url"}]}
18 |
19 | repos_urls = { slug: url |
20 | some org, org_teams in teams
21 | some team in org_teams
22 | slug := sprintf("%s/%s", [org, team.slug])
23 | url := team.repositories_url
24 | }
25 | teams_repos_responses[x] = utils.parse(data.github.api.call_github_abs(repos_urls[x]))
26 | teams_repos[x] = teams_repos_responses[x] {
27 | not utils.is_error(teams_repos_responses[x])
28 | }
29 | # teams_repos := {"org/team": [{"full_name"(owner/repo), "permissions"}]}
30 |
31 | team_members_urls = { slug: url |
32 | some org, org_teams in teams
33 | some team in org_teams
34 | slug := sprintf("%s/%s", [org, team.slug])
35 | url := trim_suffix(team.members_url, "{/member}")
36 | }
37 | teams_members_responses[x] = utils.parse(data.github.api.call_github_abs(team_members_urls[x]))
38 | teams_members[x] = teams_members_responses[x] {
39 | not utils.is_error(teams_members_responses[x])
40 | }
41 |
42 | merged_responses := object.union(teams_responses, teams_repos_responses)
43 | responses := object.union(merged_responses, teams_members_responses)
44 |
45 | teams_members_logins[k] = utils.flatten_array(teams_members[k], "login")
46 |
47 | #configured_members[k] = v {
48 | # some org, teams
49 | #}
50 |
51 | members[k] = utils.array_intersection(teams_members_logins[k], data.github.state.teams.members[k])
52 | new_members[k] = utils.array_subtraction(teams_members_logins[k], members[k])
53 | new_members[k] = teams_members_logins[k] {
54 | not k in members
55 | }
56 |
57 | non_empty_new_members[k] = new_members[k] {
58 | count(new_members[k]) > 0
59 | }
60 |
61 | members_findings = v {
62 | count(non_empty_new_members) > 1
63 | c_findings := "(i) %d teams have members to be reviewed."
64 | v := sprintf(c_findings, [count(non_empty_new_members)])
65 | }
66 |
67 | members_findings = v {
68 | count(non_empty_new_members) == 1
69 | v := "(i) 1 team has members to be reviewed."
70 | }
71 |
72 | members_findings = v {
73 | count(non_empty_new_members) == 0
74 | count(teams) > 0
75 | v := "(v) all teams members are approved."
76 | }
77 |
78 | members_findings = v {
79 | count(teams) == 0
80 | v := "(v) no teams are configured in the organizations."
81 | }
82 |
83 | current_permissions[k] = v {
84 | some org_team, repos in teams_repos
85 | some repo in repos
86 | k := concat("/", [org_team, repo.full_name])
87 | v := repo.permissions
88 | }
89 |
90 | configured_permissions[k] = v {
91 | some org, teams in data.github.state.teams.permissions
92 | some team, repos in teams
93 | some repo, v in repos
94 | k := concat("/", [org, team, repo])
95 | }
96 |
97 | current_keys := { k | some k, _ in current_permissions }
98 | configured_keys := { k | some k, _ in configured_permissions }
99 | state_available := current_keys & configured_keys
100 | state_unavailable := current_keys - configured_keys
101 |
102 | permissions = { k: v |
103 | some t;
104 | k := state_unavailable[t];
105 | v := current_permissions[k]
106 | }
107 |
108 | non_empty_permissions[x] = permissions[x] {
109 | count(permissions[x]) > 0
110 | }
111 |
112 | changed_permissions = { k: v |
113 | some t;
114 | k := state_available[t];
115 | current_permissions[k] != configured_permissions[k];
116 | v := current_permissions[k]
117 | }
118 |
119 | admin_permissions = { k: v |
120 | permissions[k]["admin"]
121 | v = permissions[k]
122 | }
123 |
124 | permissions_findings = v {
125 | count(admin_permissions) == 0
126 | v := "(v) no teams with admin permissions are found."
127 | }
128 |
129 | permissions_findings = v {
130 | count(admin_permissions) > 1
131 | c_findings := "(i) %d teams have admin permissions in some repositories."
132 | v := sprintf(c_findings, [count(admin_permissions)])
133 | }
134 |
135 | permissions_findings = v {
136 | count(admin_permissions) == 1
137 | v := "(i) 1 team has admin permissions in 1 of the repositories."
138 | }
139 |
140 | eval = v {
141 | merged_responses := utils.merge(responses, data.github.orgs.responses)
142 | v := {
143 | "state": {"changed_permissions": changed_permissions,
144 | "permissions": permissions,
145 | "members": new_members},
146 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
147 | }
148 | }
149 |
150 | findings := concat("\n\n", [members_findings, permissions_findings])
151 |
152 | overview_section := concat("\n", [
153 | "Excess permissions may be exploited, intentionally or unintentionally.",
154 | "Limiting permissions will limit the potential damage of credential theft, account-takeover or developer-workstation-takeover.",
155 | ])
156 |
157 | recommendation_section := concat("\n", [
158 | "Review the permissions for team members according to our recommendations below.",
159 | "Remove team members who are not active or are no longer on the team.",
160 | ])
161 |
162 | module_title := "## Teams"
163 | overview_report := concat("\n", [
164 | module_title,
165 | "### Motivation",
166 | overview_section,
167 | "",
168 |
169 | "### Key Findings",
170 | findings,
171 | "",
172 | "See [below](#teams-1) for a detailed report.",
173 | "",
174 |
175 | "### Our Recommendation",
176 | recommendation_section,
177 | "You can manage team permissions at the following links:",
178 | "",
179 | "Click to expand
",
180 | "",
181 | utils.json_to_md_list(access_settings_urls, " "),
182 | " ",
183 | "",
184 | ])
185 |
186 | access_settings_urls := { v |
187 | # t is org/team/owner/repo
188 | some t, _ in permissions
189 | some k, r in repos
190 | splitted_slug := split(t, "/")
191 | k == concat("/", [splitted_slug[2], splitted_slug[3]])
192 | v := sprintf("<%s>", [concat("/", [r.html_url, "settings", "access"])])
193 | }
194 |
195 | detailed_report := concat("\n", [
196 | module_title,
197 | overview_section,
198 | recommendation_section,
199 | "",
200 | "Go [back](#teams) to the overview report.",
201 | "",
202 |
203 | "",
204 | " Members
",
205 | "",
206 | members_details,
207 | " ",
208 | "",
209 |
210 | "",
211 | " Teams Permissions
",
212 | "",
213 | permissions_details,
214 | " ",
215 | "",
216 | ])
217 |
218 | members_details = v {
219 | count(non_empty_new_members) == 0
220 | v := "None"
221 | }
222 |
223 | members_details = v {
224 | count(non_empty_new_members) > 0
225 |
226 | members_with_links := { u: ms |
227 | ms = non_empty_new_members[k]
228 | o := split(k, "/")[0]
229 | t := split(k, "/")[1]
230 | u := sprintf("[%s](<%s>)", [k, concat("/", ["https://github.com/orgs",
231 | o, "teams", t, "members"])])
232 | }
233 |
234 | v := utils.json_to_md_dict_of_lists(members_with_links, " ")
235 | }
236 |
237 | permissions_details = v {
238 | count(non_empty_permissions) == 0
239 | v := "None"
240 | }
241 |
242 | permissions_details = v {
243 | count(non_empty_permissions) > 0
244 | v := utils.json_to_md_dict_of_dicts(non_empty_permissions, ":", " ")
245 | }
246 |
--------------------------------------------------------------------------------
/github/test/branches.rego:
--------------------------------------------------------------------------------
1 | package github.test.branches
2 |
3 | import future.keywords.in
4 |
5 | protection_a := {
6 | "owner/repo/branches/main": {
7 | "allow_deletions": {
8 | "enabled": false
9 | },
10 | "allow_force_pushes": {
11 | "enabled": false
12 | },
13 | "block_creations": {
14 | "enabled": false
15 | },
16 | "enforce_admins": {
17 | "enabled": false,
18 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/enforce_admins"
19 | },
20 | "required_conversation_resolution": {
21 | "enabled": false
22 | },
23 | "required_linear_history": {
24 | "enabled": true
25 | },
26 | "required_pull_request_reviews": {
27 | "dismiss_stale_reviews": false,
28 | "require_code_owner_reviews": false,
29 | "required_approving_review_count": 1,
30 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_pull_request_reviews"
31 | },
32 | "required_signatures": {
33 | "enabled": true,
34 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_signatures"
35 | },
36 | "required_status_checks": {
37 | "checks": [],
38 | "contexts": [],
39 | "contexts_url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks/contexts",
40 | "strict": true,
41 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks"
42 | },
43 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection"
44 | }
45 | }
46 |
47 | protection_b := {
48 | "owner/repo/branches/main": {
49 | "allow_deletions": {
50 | "enabled": true
51 | },
52 | "allow_force_pushes": {
53 | "enabled": false
54 | },
55 | "block_creations": {
56 | "enabled": false
57 | },
58 | "enforce_admins": {
59 | "enabled": false,
60 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/enforce_admins"
61 | },
62 | "required_conversation_resolution": {
63 | "enabled": false
64 | },
65 | "required_linear_history": {
66 | "enabled": true
67 | },
68 | "required_pull_request_reviews": {
69 | "dismiss_stale_reviews": false,
70 | "require_code_owner_reviews": false,
71 | "required_approving_review_count": 1,
72 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_pull_request_reviews"
73 | },
74 | "required_signatures": {
75 | "enabled": true,
76 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_signatures"
77 | },
78 | "required_status_checks": {
79 | "checks": [],
80 | "contexts": [],
81 | "contexts_url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks/contexts",
82 | "strict": true,
83 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks"
84 | },
85 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection"
86 | }
87 | }
88 |
89 | protection_c := {
90 | "owner/repo/branches/main": {
91 | "allow_deletions": {
92 | "enabled": false
93 | },
94 | "allow_force_pushes": {
95 | "enabled": false
96 | },
97 | "block_creations": {
98 | "enabled": false
99 | },
100 | "enforce_admins": {
101 | "enabled": false,
102 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/enforce_admins"
103 | },
104 | "required_conversation_resolution": {
105 | "enabled": false
106 | },
107 | "required_linear_history": {
108 | "enabled": true
109 | },
110 | "required_pull_request_reviews": {
111 | "dismiss_stale_reviews": false,
112 | "require_code_owner_reviews": false,
113 | "required_approving_review_count": 1,
114 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_pull_request_reviews"
115 | },
116 | "required_signatures": {
117 | "enabled": true,
118 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_signatures"
119 | },
120 | "required_status_checks": {
121 | "checks": [1],
122 | "contexts": [],
123 | "contexts_url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks/contexts",
124 | "strict": true,
125 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection/required_status_checks"
126 | },
127 | "url": "https://api.github.com/repos/owner/repo/branches/main/protection"
128 | }
129 | }
130 |
131 | test_allow_deletions {
132 | not protection_a == protection_b
133 | }
134 |
135 | test_detect_checks {
136 | not protection_a == protection_c
137 | }
138 |
139 | unprotected_branches := {
140 | "some-branch": {
141 | "protected": false
142 | },
143 | "some-other-branch": {
144 | "protected": false
145 | }
146 | }
147 |
148 | known_unprotected_branches := ["some-branch"]
149 |
150 | test_known {
151 | count(data.github.branches.unprotected_branches) == 1
152 | with data.github.branches.current_unprotected_branches as unprotected_branches
153 | with data.github.state.branches.unprotected_branches as known_unprotected_branches
154 | }
155 |
156 | known_protection_data := {
157 | "some-branch": {
158 | "allow_deletions": {
159 | "enabled": false
160 | }
161 | }
162 | }
163 |
164 | test_known_protection_data {
165 | count(data.github.branches.unchanged_protection) == 1
166 | with data.github.branches.protection_data as known_protection_data
167 | with data.github.state.branches.protection_data as known_protection_data
168 | }
169 |
--------------------------------------------------------------------------------
/github/test/commits.rego:
--------------------------------------------------------------------------------
1 | package github.test.commits
2 |
3 | import future.keywords.in
4 |
5 | verified_commit_by_test := { "author": { "login": "test" }, "commit": { "verification": { "verified": true } } }
6 | unverified_commit_by_test := { "author": { "login": "test" }, "commit": { "verification": { "verified": false } } }
7 | unverified_commit_by_allowed := { "author": { "login": "allowed" }, "commit": { "verification": { "verified": false } } }
8 |
9 | input_state_allowed := { "org/repo": ["allowed"] }
10 |
11 | one_verified_commits_state := { "org/repo": [ unverified_commit_by_test, verified_commit_by_test ] }
12 | one_unverified_commits_state := { "org/repo": [ unverified_commit_by_test ] }
13 | one_unverified_commits_state_with_allowed := { "org/repo": [ verified_commit_by_test, unverified_commit_by_test, unverified_commit_by_allowed ] }
14 |
15 | test_one_verified {
16 | count(data.github.commits.commits_unverified) == 1 with data.github.commits.successes as one_verified_commits_state
17 | }
18 |
19 | test_one_unverified {
20 | count(data.github.commits.commits_unverified) == 1 with data.github.commits.successes as one_unverified_commits_state
21 | }
22 |
23 | test_one_allowed {
24 | count(data.github.commits.commits_unverified) == 1
25 | with data.github.commits.successes as one_unverified_commits_state_with_allowed
26 | with data.github.state.commits.allowed as input_state_allowed
27 | }
28 |
--------------------------------------------------------------------------------
/github/test/deploy_keys.rego:
--------------------------------------------------------------------------------
1 | package github.test.deploy_keys
2 |
3 | import future.keywords.in
4 |
5 | expired_key := { "org/repo": [{ "id": 1, "created_at": "2010-01-01T00:00:00Z" }] }
6 | not_expired_key := { "org/repo": [{ "id": 1, "created_at": "2022-01-01T00:00:00Z" }] }
7 |
8 | expiration := [1, 0, 0]
9 |
10 | test_expired {
11 | count(data.github.deploy_keys.expired) == 1 with data.github.deploy_keys.responses as expired_key with input.deploy_keys.expiration as expiration
12 | }
13 |
14 | test_not_expired {
15 | count(data.github.deploy_keys.expired) == 0 with data.github.deploy_keys.responses as not_expired_key with input.deploy_keys.expiration as expiration
16 | }
17 |
18 | #known_key := { "org/repo": [ "ssh-rsa" ] }
19 | known_key := { "org/repo": [
20 | { "id": 1, "key": "ssh-rsa" },
21 | { "id": 2, "key": "ssh-ed25591" },
22 | ] }
23 |
24 | state_known_key := { "org/repo": ["ssh-rsa"] }
25 |
26 | test_known {
27 | count(data.github.deploy_keys.non_empty_keys) == 1
28 | with data.github.deploy_keys.responses as known_key
29 | with data.github.state.deploy_keys.keys as state_known_key
30 | }
31 |
--------------------------------------------------------------------------------
/github/test/files.rego:
--------------------------------------------------------------------------------
1 | package github.test.files
2 |
3 | files_responses := {
4 | "owner/repo": {
5 | "sha-1": okay_commit
6 | }
7 | }
8 |
9 | permissions := {
10 | "owner/repo": {
11 | ".*md": [ "commiter-1" ]
12 | }
13 | }
14 |
15 | permissions_noregex := {
16 | "owner/repo": {
17 | "1": [ "commiter-1" ]
18 | }
19 | }
20 |
21 | okay_commit := {
22 | "committer": "commiter-1",
23 | "files": [
24 | "README.md"
25 | ]
26 | }
27 |
28 | not_okay_commit := {
29 | "committer": "commiter-2",
30 | "files": [
31 | "README.md"
32 | ]
33 | }
34 |
35 | okay_commit_noregex := {
36 | "committer": "commiter-1",
37 | "files": [
38 | "1"
39 | ]
40 | }
41 |
42 | not_okay_commit_noregex := {
43 | "committer": "commiter-2",
44 | "files": [
45 | "1"
46 | ]
47 | }
48 |
49 | test_regex {
50 | regex.match(".*md", "README.md")
51 | }
52 |
53 | test_regex_noregex {
54 | regex.match("1", "1")
55 | }
56 |
57 | test_commit_okay {
58 | data.github.files.commit_okay(permissions["owner/repo"], okay_commit)
59 | }
60 |
61 | test_commit_not_okay {
62 | not data.github.files.commit_okay(permissions["owner/repo"], not_okay_commit)
63 | }
64 |
65 | test_commit_okay_noregex {
66 | data.github.files.commit_okay(permissions_noregex["owner/repo"], okay_commit_noregex)
67 | }
68 |
69 | test_commit_not_okay_noregex {
70 | not data.github.files.commit_okay(permissions_noregex["owner/repo"], not_okay_commit_noregex)
71 | }
72 |
--------------------------------------------------------------------------------
/github/test/hooks.rego:
--------------------------------------------------------------------------------
1 | package github.test.hooks
2 |
3 | import future.keywords.in
4 |
5 | known_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
6 |
7 | changed_active_hook := { "org/repo": [ {"active": false, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
8 | changed_created_at_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:01Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
9 | changed_events_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label", "pull"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
10 | changed_id_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 2, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
11 | changed_name_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test_test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
12 | changed_config_content_type_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "non-json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
13 | changed_config_insecure_ssl_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "1", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
14 | changed_config_url_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://non-example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Repository" } ] }
15 | changed_updated_at_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:02Z", "type": "Repository" } ] }
16 | changed_type_hook := { "org/repo": [ {"active": true, "created_at": "2022-01-01T00:00:00Z", "events": ["label"], "id": 1, "name": "test", "config": { "content_type": "json", "insecure_url": "0", "url": "https://example.com" }, "updated_at": "2022-01-01T00:00:01Z", "type": "Organization" } ] }
17 |
18 | test_known_hook {
19 | count(data.github.hooks.new_hooks) == 1
20 | with data.github.hooks.responses as known_hook
21 | with data.github.state.hooks.config as known_hook
22 | }
23 |
24 | test_changed_active_hook {
25 | count(data.github.hooks.new_hooks) == 1
26 | with data.github.hooks.responses as known_hook
27 | with data.github.state.hooks.config as changed_active_hook
28 | }
29 |
30 | test_changed_created_at_hook {
31 | count(data.github.hooks.new_hooks) == 1
32 | with data.github.hooks.responses as known_hook
33 | with data.github.state.hooks.config as changed_created_at_hook
34 | }
35 |
36 | test_changed_events_hook {
37 | count(data.github.hooks.new_hooks) == 1
38 | with data.github.hooks.responses as known_hook
39 | with data.github.state.hooks.config as changed_events_hook
40 | }
41 |
42 | test_changed_id_hook {
43 | count(data.github.hooks.new_hooks) == 1
44 | with data.github.hooks.responses as known_hook
45 | with data.github.state.hooks.config as changed_id_hook
46 | }
47 |
48 | test_changed_name_hook {
49 | count(data.github.hooks.new_hooks) == 1
50 | with data.github.hooks.responses as known_hook
51 | with data.github.state.hooks.config as changed_name_hook
52 | }
53 |
54 | test_changed_config_content_type_hook {
55 | count(data.github.hooks.new_hooks) == 1
56 | with data.github.hooks.responses as known_hook
57 | with data.github.state.hooks.config as changed_config_content_type_hook
58 | }
59 |
60 | test_changed_config_insecure_ssl_hook {
61 | count(data.github.hooks.new_hooks) == 1
62 | with data.github.hooks.responses as known_hook
63 | with data.github.state.hooks.config as changed_config_insecure_ssl_hook
64 | }
65 |
66 | test_changed_config_url_hook {
67 | count(data.github.hooks.new_hooks) == 1
68 | with data.github.hooks.responses as known_hook
69 | with data.github.state.hooks.config as changed_config_url_hook
70 | }
71 |
72 | test_changed_updated_at_hook {
73 | count(data.github.hooks.new_hooks) == 1
74 | with data.github.hooks.responses as known_hook
75 | with data.github.state.hooks.config as changed_updated_at_hook
76 | }
77 |
78 | test_changed_type_hook {
79 | count(data.github.hooks.new_hooks) == 1
80 | with data.github.hooks.responses as known_hook
81 | with data.github.state.hooks.config as changed_type_hook
82 | }
83 |
84 |
85 | #list_compr := { 1: [ { "id": 1, "created_at": 2, "config": { "url": 3 } } ],
86 | # 2: "foo" }
87 |
88 | #list_h := { "id": 1, "created_at": 2, "config": { "url": 3 } }
89 |
90 | #test_list_comprehension { res |
91 | # some k, vv in list_compr
92 |
93 | # v := [ x |
94 | # h := list_h[_]
95 |
96 | # h == vv[_]
97 |
98 | # x := { "id": h.id, "created_at": h.created_at, "config": { "url": h.config.url } }
99 | # ]
100 |
101 | # res := { k: v }
102 | #}
103 |
--------------------------------------------------------------------------------
/github/test/repos.rego:
--------------------------------------------------------------------------------
1 | package github.test.repos
2 |
3 | import future.keywords.in
4 |
5 | orgs_response := { "org-1": { "login": "org-1", "repos_url": "org-1-repos" },
6 | "org-2": { "login": "org-2", "repos_url": "org-2-repos" } }
7 |
--------------------------------------------------------------------------------
/github/test/ssh_keys.rego:
--------------------------------------------------------------------------------
1 | package github.test.ssh_keys
2 |
3 | import future.keywords.in
4 |
5 | expired_key := { "keys": [{ "id": 1, "created_at": "2010-01-01T00:00:00Z" }] }
6 | not_expired_key := { "keys": [{ "id": 1, "created_at": "2022-01-01T00:00:00Z" }] }
7 |
8 | expiration := [1, 0, 0]
9 |
10 | test_expired {
11 | count(data.github.ssh_keys.expired) == 1 with data.github.ssh_keys.responses as expired_key["keys"] with input.ssh_keys.expiration as expiration
12 | }
13 |
14 | test_not_expired {
15 | count(data.github.ssh_keys.expired) == 0 with data.github.ssh_keys.responses as not_expired_key["keys"]
16 | }
17 |
18 | known_key := [{ "id": 1, "key": "ssh-rsa" }]
19 | state_known_key := ["ssh-rsa"]
20 |
21 | test_known {
22 | count(data.github.ssh_keys.keys) == 0 with data.github.ssh_keys.responses as known_key with data.github.state.ssh_keys.keys as state_known_key
23 | }
24 |
25 | state_unknown_key := ["not-ssh-rsa"]
26 |
27 | test_unknown {
28 | count(data.github.ssh_keys.keys) == 1 with data.github.ssh_keys.responses as known_key with data.github.state.ssh_keys.keys as state_unknown_key
29 | }
30 |
31 |
--------------------------------------------------------------------------------
/github/test/teams.rego:
--------------------------------------------------------------------------------
1 | package github.test.teams
2 |
3 | import future.keywords.in
4 |
5 | state_permissions := { "org": { "team": { "owner/repo": { "admin": true, "maintain": true, "pull": true, "push": true, "triage": true } } } }
6 |
7 | pass_response_permissions := { "org/team/owner/repo": { "admin": true, "maintain": true, "pull": true, "push": true, "triage": true } }
8 |
9 | admin_response_permissions := { "org/team/owner/repo": { "admin": false, "maintain": true, "pull": true, "push": true, "triage": true } }
10 | maintain_response_permissions := { "org/team/owner/repo": { "admin": true, "maintain": false, "pull": true, "push": true, "triage": true } }
11 | pull_response_permissions := { "org/team/owner/repo": { "admin": true, "maintain": true, "pull": false, "push": true, "triage": true } }
12 | push_response_permissions := { "org/team/owner/repo": { "admin": true, "maintain": true, "pull": true, "push": false, "triage": true } }
13 | triage_response_permissions := { "org/team/owner/repo": { "admin": true, "maintain": true, "pull": true, "push": true, "triage": false } }
14 |
15 | test_pass_permissions {
16 | count(data.github.teams.changed_permissions) == 0
17 | with data.github.init.responses as pass_response_permissions
18 | with data.github.state.teams.permissions as state_permissions
19 | }
20 |
21 | test_admin_permissions {
22 | count(data.github.teams.changed_permissions) == 0
23 | with data.github.teams.teams_repos_responses as admin_response_permissions
24 | with data.github.state.teams.permissions as state_permissions
25 | }
26 | test_maintain_permissions {
27 | count(data.github.teams.teams_responses) == 0
28 | with data.github.init.responses as maintain_response_permissions
29 | with data.github.state.teams.permissions as state_permissions
30 | }
31 | test_pull_permissions {
32 | count(data.github.teams.teams_responses) == 0
33 | with data.github.init.responses as pull_response_permissions
34 | with data.github.state.teams.permissions as state_permissions
35 | }
36 | test_push_permissions {
37 | count(data.github.teams.teams_responses) == 0
38 | with data.github.init.responses as push_response_permissions
39 | with data.github.state.teams.permissions as state_permissions
40 | }
41 | test_triage_permissions {
42 | count(data.github.teams.teams_responses) == 0
43 | with data.github.init.responses as triage_response_permissions
44 | with data.github.state.teams.permissions as state_permissions
45 | }
46 |
--------------------------------------------------------------------------------
/github/test/tfa.rego:
--------------------------------------------------------------------------------
1 | package github.test.tfa
2 |
3 | import future.keywords.in
4 |
5 | existing_user := { "test_org": [{ "login": "test_user" }] }
6 |
7 | test_tfa_existing {
8 | "test_user" in data.github.tfa.tfa_disabled_members["test_org"] with data.github.tfa.responses as existing_user with data.github.state.tfa.exceptions as {}
9 | }
10 |
11 | test_tfa_non_existing {
12 | "test_user" in data.github.tfa.tfa_disabled_members["test_org"] with data.github.tfa.responses as existing_user with data.github.state.tfa.exceptions as {}
13 | not "other_user" in data.github.tfa.tfa_disabled_members["test_org"] with data.github.tfa.responses as existing_user with data.github.state.tfa.exceptions as {}
14 | }
15 |
16 | exceptions := { "test_org": [ "test_user" ] }
17 |
18 | test_tfa_exceptions {
19 | not "test_user" in data.github.tfa.tfa_disabled_members["test_org"] with data.github.tfa.responses as existing_user with data.github.state.tfa.exceptions as exceptions
20 | }
21 |
--------------------------------------------------------------------------------
/github/test/utils.rego:
--------------------------------------------------------------------------------
1 | package github.test.utils
2 |
3 | import future.keywords.in
4 |
5 | test_error {
6 | data.github.utils.is_error({"processing_error": true})
7 | }
8 |
9 | test_not_error {
10 | not data.github.utils.is_error({"not_processing_error": true})
11 | }
12 |
13 | arr1 := [ 1, 2, 3 ]
14 | arr2 := [ 1, 4, 5 ]
15 |
16 | test_array_intersection {
17 | data.github.utils.array_intersection(arr1, arr2) == [1]
18 | }
19 |
20 | obj1 := { 1: ["a", "b", "c"] }
21 | obj2 := { 1: ["a", "d", "e"] }
22 |
23 | test_array_intersection_in_object {
24 | data.github.utils.array_intersection(obj1[x], obj2[x]) == ["a"]
25 | }
26 |
27 | resp1 := { 1: [{ "field1": "value1", "field2": "value2" }] }
28 | nested_resp1 := [{ "field1": "value1", "field2": "value2" },
29 | {"field1": "value3"}]
30 |
31 | #test_object_filter {
32 | # q := object.filter(nested_resp1[_], {"field1"})
33 | # print(q)
34 | # object.filter(nested_resp1[_], {"field1"}) == ["value"]
35 | #}
36 |
37 | test_flatten_array {
38 | data.github.utils.flatten_array(nested_resp1, "field1") == ["value1", "value3"]
39 | }
40 |
41 | # test_nested_flatten_array {
42 | # some x in resp1
43 | # a[x] = data.github.utils.flatten_array(resp1[x], "field1")
44 | # a[1] == ["value1", "value3"]
45 | # }
46 |
47 | add_one(x) = v {
48 | v := x + 1
49 | }
50 |
51 | # test_apply_func {
52 | # [2, 3, 4] == data.github.utils.apply_func([1, 2, 3], add_one)
53 | # }
54 |
55 | test_array_subtraction {
56 | data.github.utils.array_subtraction([1,2,3], [1]) == [2,3]
57 | }
58 |
59 | test_array_subtraction_no_intersection {
60 | data.github.utils.array_subtraction([1,2,3], [0]) == [1,2,3]
61 | }
62 |
63 | test_object_subtraction {
64 | data.github.utils.object_subtraction({1: 2}, {2: 3}) == {1: 2}
65 | data.github.utils.object_subtraction({1: 2}, {1: 1}) == {1: 1}
66 | }
67 |
68 | test_state_diff {
69 | # data.github.utils.state_diff({1: {"a": [1, 2]}, 2: {"a": [3, 4]} },
70 | # "a", {1: {"a": [2]}}) == {1: {"a": [1]}, 2: {"a": [3, 4]}}
71 | data.github.utils.state_diff({"a": [1, 2]}, "a",
72 | {"a": [2]}) == {"a": [1]}
73 | }
--------------------------------------------------------------------------------
/github/tfa.rego:
--------------------------------------------------------------------------------
1 | package github.tfa
2 |
3 | import future.keywords.in
4 | import data.github.utils as utils
5 |
6 | orgs = data.github.orgs.orgs
7 |
8 | enforced_orgs := [ x | orgs[x].two_factor_requirement_enabled ]
9 | current_unenforced_orgs := [ x.login |
10 | some x in orgs
11 | not x.login in enforced_orgs
12 | ]
13 |
14 | tfa_disabled := "filter=2fa_disabled"
15 |
16 | member_orgs_urls[orgs[x].login] = concat("?", [trim_suffix(orgs[x].members_url, "{/member}"), tfa_disabled])
17 | responses[x] = utils.error_substitute(
18 | utils.parse(data.github.api.call_github_abs(member_orgs_urls[x])),
19 | { "422 Unprocessable Entity: Only owners can use this filter.": "This account is not the owner of this organization hence it cannot get information about 2fa disabled members." } )
20 |
21 | current_tfa_disabled_members[x] = utils.flatten_array(responses[x], "login")
22 | tfa_disabled_members[x] = utils.array_subtraction(current_tfa_disabled_members[x], data.github.state.tfa.exceptions[x])
23 | tfa_disabled_members[x] = current_tfa_disabled_members[x] {
24 | not utils.exists(data.github.state.tfa.exceptions, x)
25 | }
26 |
27 | unenforced_orgs = utils.array_subtraction(current_unenforced_orgs, data.github.state.tfa.unenforced_orgs)
28 | unenforced_orgs = current_unenforced_orgs {
29 | not utils.exists(data.github.state.tfa, "unenforced_orgs")
30 | }
31 |
32 | non_empty_tfa_disabled_members[x] = tfa_disabled_members[x] {
33 | count(tfa_disabled_members[x]) > 0
34 | }
35 |
36 | members_findings = v {
37 | count(non_empty_tfa_disabled_members) > 1
38 | c_findings := "(i) %d organizations have members with two factor authentication disabled."
39 | v := sprintf(c_findings, [count(non_empty_tfa_disabled_members)])
40 | }
41 |
42 | members_findings = v {
43 | count(non_empty_tfa_disabled_members) == 1
44 | v := "(i) 1 organization has members with two factor authentication disabled."
45 | }
46 |
47 | members_findings = v {
48 | count(non_empty_tfa_disabled_members) == 0
49 | v := "(v) no organization has members with two factor authentication disabled."
50 | }
51 |
52 | unenforced_findings = v {
53 | count(unenforced_orgs) > 1
54 | c_findings := "(i) %d organizations lack overall enforcement."
55 | v := sprintf(c_findings, [count(unenforced_orgs)])
56 | }
57 |
58 | unenforced_findings = v {
59 | count(unenforced_orgs) == 1
60 | v := "(i) 1 organization lacks overall enforcement."
61 | }
62 |
63 | unenforced_findings = v {
64 | count(unenforced_orgs) == 0
65 | v := "(v) all organizations have overall enforcement."
66 | }
67 |
68 | eval = v {
69 | merged_responses := utils.merge(responses, data.github.orgs.responses)
70 | v := { "state": { "disabled_members": non_empty_tfa_disabled_members,
71 | "unenforced_orgs": unenforced_orgs },
72 | "processing_errors": { k: v | some k; v := merged_responses[k]; utils.is_error(v) },
73 | }
74 | }
75 |
76 | overview_section := concat("\n", [
77 | "2 factor authentication protects your account from credential theft.",
78 | ])
79 |
80 | recommendation_section := concat("\n", [
81 | "Require all users in your GitHub organization to turn on 2 factor authentication.",
82 | "They can do it from the following link: .",
83 | "",
84 | "Configure your GitHub organizations to enforce 2 factor authentication on all organizations’ users.",
85 | ])
86 |
87 | module_title := "## Two Factor Authentication"
88 | overview_report := concat("\n", [
89 | module_title,
90 | "### Motivation",
91 | overview_section,
92 | "",
93 |
94 | "### Key Findings",
95 | findings,
96 | "",
97 | "See [below](#two-factor-authentication-1) for a detailed report.",
98 | "",
99 |
100 | "### Our Recommendation",
101 | recommendation_section,
102 | "That can be done from the following link(s):",
103 | "",
104 | "Click to expand
",
105 | "",
106 | utils.json_to_md_list(settings_urls, " "),
107 | " ",
108 | "",
109 | ])
110 |
111 | settings_urls := { v |
112 | some k in unenforced_orgs
113 | v := sprintf("<%s>", [concat("/", ["https://github.com/organizations", orgs[k].login, "settings", "security"])])
114 | }
115 |
116 | findings := concat("\n\n", [members_findings, unenforced_findings])
117 |
118 | detailed_report := concat("\n", [
119 | module_title,
120 | overview_section,
121 | recommendation_section,
122 | "",
123 | "Go [back](#two-factor-authentication) to the overview report.",
124 | "",
125 |
126 | "",
127 | " Two Factor Disabled Members
",
128 | "",
129 | disabled_details,
130 | " ",
131 | "",
132 |
133 | "",
134 | " Two Factor Unenforced Organizations
",
135 | "",
136 | unenforced_details,
137 | " ",
138 | "",
139 | ])
140 |
141 | disabled_details = v {
142 | count(non_empty_tfa_disabled_members) == 0
143 | v := "All members in all organizations have two factor authentication enabled."
144 | }
145 |
146 | disabled_details = v {
147 | count(non_empty_tfa_disabled_members) > 0
148 | v := utils.json_to_md_dict_of_lists(non_empty_tfa_disabled_members, " ")
149 | }
150 |
151 | unenforced_details = v {
152 | count(unenforced_orgs) == 0
153 | v := "All organizations enforce two factor authentication on their members."
154 | }
155 |
156 | unenforced_details = v {
157 | count(unenforced_orgs) > 0
158 | table := { orgs[k].login: u |
159 | some k in unenforced_orgs
160 | u := sprintf("[Settings](<%s>)", [concat("/", ["https://github.com/organizations", orgs[k].login, "settings", "security"])])
161 | }
162 |
163 | header := "| Organization | Link |"
164 | delim := "| --- | --- |"
165 | body := utils.json_to_md_dict_to_table(table, " ")
166 | v := concat("\n", [header, delim, body])
167 | }
168 |
169 | update := v {
170 | v := { "disabled_members": current_tfa_disabled_members,
171 | "unenforced_orgs": current_unenforced_orgs }
172 | }
173 |
--------------------------------------------------------------------------------
/github/token.rego:
--------------------------------------------------------------------------------
1 | package github.token
2 |
3 | update := "token GH_TOKEN"
4 |
--------------------------------------------------------------------------------
/github/utils.rego:
--------------------------------------------------------------------------------
1 | package github.utils
2 |
3 | import future.keywords.in
4 |
5 | exists(obj, k) {
6 | _ = obj[k]
7 | }
8 |
9 | pick(k, obj1, obj2) = v {
10 | v := obj1[k]
11 | }
12 |
13 | pick(k, obj1, obj2) = v {
14 | not exists(obj1, k)
15 | v := obj2[k]
16 | }
17 |
18 | merge(a, b) = c {
19 | keys := {k | _ = a[k]} | {k | _ = b[k]}
20 | c := {k: v | k := keys[_]; v := pick(k, b, a)}
21 | }
22 |
23 | okay(response) {
24 | response.status_code == 200
25 | }
26 |
27 | error(response) {
28 | response.status_code == 0
29 | }
30 |
31 | not_okay(response) {
32 | response.status_code != 0
33 | response.status_code != 200
34 | }
35 |
36 | parse(response) = v {
37 | okay(response)
38 | v := response.body
39 | }
40 |
41 | parse(response) = v {
42 | error(response)
43 | v := {"processing_error": response.error}
44 | }
45 |
46 | parse(response) = v {
47 | not_okay(response)
48 | v := {"processing_error": sprintf("%s: %s", [response.status, response.body.message])}
49 | }
50 |
51 | error_substitute(response, substitutes) = v {
52 | is_error(response)
53 | v := { "processing_error": substitutes[response.processing_error] }
54 | }
55 |
56 | error_substitute(response, substitutes) = v {
57 | is_error(response)
58 | not exists(substitutes, response.processing_error)
59 | v := response
60 | }
61 |
62 | error_substitute(response, substitutes) = v {
63 | not is_error(response)
64 | v := response
65 | }
66 |
67 | is_okay(v) {
68 | not is_error(v)
69 | }
70 |
71 | is_error(v) {
72 | exists(v, "processing_error")
73 | }
74 |
75 | keys(o) = v {
76 | v := [ x | o[x] ]
77 | }
78 |
79 | array_intersection(arr_a, arr_b) = v {
80 | v_set := { a | a := arr_a[_] } & { b | b := arr_b[_] }
81 | v := [ x | v_set[x] ]
82 | }
83 |
84 | array_subtraction(arr_a, arr_b) = v {
85 | v_set := { a | a := arr_a[_] } - { b | b := arr_b[_] }
86 | v := [ x | v_set[x] ]
87 | }
88 |
89 | is_expired(key, expiration) {
90 | year := expiration[0]
91 | month := expiration[1]
92 | day := expiration[2]
93 |
94 | expired := time.add_date(time.parse_rfc3339_ns(key.created_at), year, month, day)
95 | expired < time.now_ns()
96 | }
97 |
98 | # { key1: [ val1, val2 ], key2: [ val3 ] } -
99 | # { key1: [ val1 ] } =
100 | # { key1: [ val2 ], key2: [ val3 ] }
101 | object_subtraction( obj1, obj2 ) = v {
102 | keys_only_first := {k | _ = obj1[k]} - {k | _ = obj2[k]}
103 | v_only_first := { k: v | some k in keys_only_first; v := obj1[k] }
104 |
105 | keys_both := {k | _ = obj1[k]} & {k | _ = obj2[k]}
106 | v_both := { k: v | some k in keys_both; v := obj1[k] - obj2[k] }
107 | v := merge(v_only_first, v_both)
108 | }
109 |
110 | # array_of_objects = [{field1: value1, field: value}] -> [value]
111 | flatten_array(array_of_objects, field) = [ v |
112 | obj := array_of_objects[_]
113 | v := obj[field]
114 | ]
115 |
116 | # object = { field1: { field2: value1 } -> { field1: value1 }
117 | # extract_field(object, field) = [
118 |
119 | # flatten_array = apply json.filter
120 | # flatten_array(array_of_objects, field) = mapped {
121 | # mapped := [ json.filter(x) | x = array_of_objects[_] ]
122 | # }
123 |
124 | # apply_func(array_of_objects, func) = mapped {
125 | # mapped := [ func(x) | x = array_of_objects[_] ]
126 | # }
127 |
128 |
129 | # objects_to_array(objects, field)
130 | # objects_to_array([{"login": login}], "login") -> [login]
131 |
132 | # apply_func
133 | # mapped := [func(x) | x = list[_]]
134 |
135 | # Json = [1, 2, 3]
136 | # MD: * 1
137 | # * 2
138 | # * 3
139 | json_to_md_list(json_input, indent) = res {
140 | s := concat("\n", [sprintf("%s* %v", [indent, x]) | x = json_input[_]])
141 | res := sprintf("%s", [s])
142 | }
143 |
144 | json_to_md_dict(json_input, separator, indent) = res {
145 | s := concat("\n", [sprintf("%s* %v%s %v", [indent, k, separator, v]) | v = json_input[k]])
146 | res := sprintf("%s", [s])
147 | }
148 |
149 | json_to_md_headed_list(k, json_list, indent) = res {
150 | extra_indent := sprintf(" %s", [indent])
151 | list_str := json_to_md_list(json_list, extra_indent)
152 | header := sprintf("%s* **%s**:", [indent, k])
153 | s := concat("\n", [header, list_str])
154 | res := sprintf("%s", [s])
155 | }
156 |
157 | json_to_md_headed_dict(k, json_dict, separator, indent) = res {
158 | extra_indent := sprintf(" %s", [indent])
159 | dict_str := json_to_md_dict(json_dict, separator, extra_indent)
160 | header := sprintf("%s* **%s**:", [indent, k])
161 | s := concat("\n", [header, dict_str])
162 | res := sprintf("%s", [s])
163 | }
164 |
165 | # Json = {1: [1, 2, 3], k: [5, 6, 7]}
166 | # MD: * **1**
167 | # * 1
168 | # * 2
169 | # * 3
170 |
171 | json_to_md_dict_of_lists(json_input, indent) = res {
172 | s := concat("\n", [json_to_md_headed_list(k, json_list, indent) | some k, json_list in json_input])
173 | res := sprintf("%s", [s])
174 | }
175 |
176 | json_to_md_dict_of_dicts(json_input, separator, indent) = res {
177 | s := concat("\n", [json_to_md_headed_dict(k, json_list, separator, indent) | some k, json_list in json_input])
178 | res := sprintf("%s", [s])
179 | }
180 |
181 | json_to_md_dict_to_table(json_input, indent) = res {
182 | key_lens := [count(k) | some k, _ in json_input]
183 | body_in := [ sprintf("| %v | %v |", [k, v]) | some k, v in json_input ]
184 | body := concat("\n", body_in)
185 | res := sprintf("%s", [body])
186 | }
187 |
188 | json_to_md_dict_to_row(json_input, keys, indent) = res {
189 | # Works for json_input[v] as strings, does not work for int
190 | row := concat(" | ", [ json_input[v] | v = keys[k]])
191 | res := sprintf("| %s |", [row])
192 | }
193 |
194 | json_to_md_array_of_dict_to_table(json_input, keys, indent) = res {
195 | header := sprintf("| %s |", [concat(" | ", keys)])
196 | delims := { i: " --- " | some i in numbers.range(1, count(keys)) }
197 | delim := sprintf("|%s|", [concat("|", [d | some d in delims])])
198 |
199 | records := [ s |
200 | some r in json_input
201 | s := json_to_md_dict_to_row(r, keys, indent)
202 | ]
203 |
204 | body := concat("\n", records)
205 | res := concat("\n", [header, delim, body])
206 | }
207 |
208 | array_group_by(input_array, fields) = res {
209 | path := concat(".", fields)
210 | res := [ r |
211 | r := input_array[_][path]
212 | ]
213 | }
214 |
215 | state_diff(current, field, configured) = diff {
216 | some x in current
217 | exists(configured, x)
218 | flattened = { x: flatten_array(current[x], field) }
219 |
220 | diff = { y: array_subtraction(flattened[y], configured[y]) }
221 | }
222 |
223 | state_diff(current, field, configured) = diff {
224 | some x in current
225 | not exists(configured, x)
226 |
227 | flattened = { x: flatten_array(current[x], field) }
228 |
229 | diff = flattened
230 | }
231 |
--------------------------------------------------------------------------------
/input.json:
--------------------------------------------------------------------------------
1 | {
2 | "token": "token GH_TOKEN"
3 | }
4 |
--------------------------------------------------------------------------------
/license-artifacts/opa-LICENSE.txt:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------