├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── release.yml │ └── update-dist.yml ├── .gitignore ├── .prettierignore ├── .prettierrc.json ├── .scala-steward.conf ├── LICENSE ├── README.md ├── action.yml ├── dist ├── index.js ├── index.js.map ├── licenses.txt └── sourcemap-register.js ├── package-lock.json ├── package.json ├── project └── build.properties ├── sbt-plugin ├── .gitignore ├── .scalafix.conf ├── .scalafmt.conf ├── README.md ├── build.sbt ├── project │ ├── ContrabandConfig.scala │ ├── Developers.scala │ ├── build.properties │ └── plugins.sbt └── src │ ├── main │ ├── contraband │ │ ├── github-dependency-submission-api-0.contra │ │ └── input.contra │ └── scala │ │ └── ch │ │ └── epfl │ │ └── scala │ │ ├── AnalyzeDependencyGraph.scala │ │ ├── GithubDependencyGraphPlugin.scala │ │ └── SubmitDependencyGraph.scala │ ├── sbt-test │ ├── dependency-manifest │ │ ├── coursier-manifest │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ ├── default-scala-manifest │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ ├── ignore-scaladoc │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ ├── ignore-test │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ ├── ivy-manifest │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ ├── package-urls │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ └── scala3-manifest │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ └── plugins.sbt │ │ │ └── test │ ├── generate-snapshot │ │ ├── generate-snapshot │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ │ └── plugins.sbt │ │ │ └── test │ │ └── resolve-failure │ │ │ ├── build.sbt │ │ │ ├── project │ │ │ └── plugins.sbt │ │ │ └── test │ └── submit-snapshot │ │ └── submit-snapshot │ │ ├── build.sbt │ │ ├── project │ │ └── plugins.sbt │ │ └── test │ └── test │ └── scala │ └── ch │ └── epfl │ └── scala │ └── JsonProtocolTests.scala ├── src └── main.ts └── tsconfig.json /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | "@typescript-eslint" 4 | ], 5 | "extends": [ 6 | "plugin:github/recommended" 7 | ], 8 | "parser": "@typescript-eslint/parser", 9 | "parserOptions": { 10 | "ecmaVersion": 9, 11 | "sourceType": "module", 12 | "project": "./tsconfig.json" 13 | }, 14 | "rules": { 15 | "eslint-comments/no-use": "off", 16 | "import/no-namespace": "off", 17 | "no-unused-vars": "off", 18 | "@typescript-eslint/no-unused-vars": "error", 19 | "@typescript-eslint/explicit-member-accessibility": [ 20 | "error", 21 | { 22 | "accessibility": "no-public" 23 | } 24 | ], 25 | "@typescript-eslint/no-require-imports": "error", 26 | "@typescript-eslint/array-type": "error", 27 | "@typescript-eslint/await-thenable": "error", 28 | "@typescript-eslint/ban-ts-comment": "error", 29 | "camelcase": "off", 30 | "@typescript-eslint/consistent-type-assertions": "error", 31 | "@typescript-eslint/explicit-function-return-type": [ 32 | "error", 33 | { 34 | "allowExpressions": true 35 | } 36 | ], 37 | "@typescript-eslint/func-call-spacing": [ 38 | "error", 39 | "never" 40 | ], 41 | "@typescript-eslint/no-array-constructor": "error", 42 | "@typescript-eslint/no-empty-interface": "error", 43 | "@typescript-eslint/no-explicit-any": "warn", 44 | "@typescript-eslint/no-extraneous-class": "error", 45 | "@typescript-eslint/no-for-in-array": "error", 46 | "@typescript-eslint/no-inferrable-types": "error", 47 | "@typescript-eslint/no-misused-new": "error", 48 | "@typescript-eslint/no-namespace": "error", 49 | "@typescript-eslint/no-non-null-assertion": "warn", 50 | "@typescript-eslint/no-unnecessary-qualifier": "error", 51 | "@typescript-eslint/no-unnecessary-type-assertion": "error", 52 | "@typescript-eslint/no-useless-constructor": "error", 53 | "@typescript-eslint/no-var-requires": "error", 54 | "@typescript-eslint/prefer-for-of": "warn", 55 | "@typescript-eslint/prefer-function-type": "warn", 56 | "@typescript-eslint/prefer-includes": "error", 57 | "@typescript-eslint/prefer-string-starts-ends-with": "error", 58 | "@typescript-eslint/promise-function-async": "error", 59 | "@typescript-eslint/require-array-sort-compare": "error", 60 | "@typescript-eslint/restrict-plus-operands": "error", 61 | "semi": "off", 62 | "@typescript-eslint/semi": [ 63 | "error", 64 | "never" 65 | ], 66 | "@typescript-eslint/type-annotation-spacing": "error", 67 | "@typescript-eslint/unbound-method": "error", 68 | "i18n-text/no-en": "off" 69 | }, 70 | "env": { 71 | "node": true, 72 | "es6": true 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Enable version updates for npm 4 | - package-ecosystem: 'npm' 5 | # Look for `package.json` and `lock` files in the `root` directory 6 | directory: '/' 7 | # Check the npm registry for updates every day (weekdays) 8 | schedule: 9 | interval: 'daily' 10 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | on: 3 | push: 4 | branches: [main] 5 | tags: ["v*"] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | scalafmt: 11 | name: Scalafmt and Scalafix 12 | runs-on: ubuntu-latest 13 | defaults: 14 | run: 15 | working-directory: sbt-plugin 16 | steps: 17 | - uses: actions/checkout@v3 18 | - uses: coursier/setup-action@v1.3.5 19 | with: 20 | apps: scalafmt sbt 21 | - run: scalafmt --test 22 | - run: sbt 'scalafixAll -test' 23 | 24 | test-sbt: 25 | strategy: 26 | matrix: 27 | include: 28 | - os: ubuntu-latest 29 | jvm: 'adoptium:1.8.0-412' 30 | - os: windows-latest 31 | jvm: 'adoptium:1.11.0.23' 32 | - os: macOS-latest 33 | jvm: 'adoptium:1.17' 34 | fail-fast: false 35 | name: Test sbt plugin on ${{ matrix.os }} - ${{ matrix.jvm }} 36 | runs-on: ${{ matrix.os }} 37 | permissions: 38 | contents: write 39 | defaults: 40 | run: 41 | working-directory: sbt-plugin 42 | env: 43 | GITHUB_TOKEN: ${{ github.token }} 44 | steps: 45 | - uses: actions/checkout@v3 46 | - uses: coursier/setup-action@v1.3.5 47 | with: 48 | jvm: ${{ matrix.jvm }} 49 | apps: sbt 50 | - run: sbt test 51 | - run: sbt "scripted dependency-manifest/* generate-snapshot/*" 52 | - run: sbt "scripted submit-snapshot/*" 53 | if: github.event_name == 'push' || github.event.pull_request.head.repo.owner.login == 'scalacenter' 54 | 55 | test-action: 56 | if: github.event_name == 'push' || github.event.pull_request.head.repo.owner.login == 'scalacenter' 57 | strategy: 58 | matrix: 59 | include: 60 | - os: ubuntu-latest 61 | jvm: 'adoptium:1.11.0.23' 62 | - os: macOS-latest 63 | jvm: 'adoptium:1.17' 64 | - os: windows-latest 65 | jvm: 'adoptium:1.8.0-412' 66 | os: [ubuntu-latest, macOS-latest, windows-latest] 67 | fail-fast: false 68 | name: Test Github action on ${{ matrix.os }} 69 | runs-on: ${{ matrix.os }} 70 | permissions: 71 | contents: write 72 | steps: 73 | - uses: actions/checkout@v3 74 | - uses: coursier/setup-action@v1.3.5 75 | with: 76 | jvm: ${{ matrix.jvm }} 77 | apps: sbt 78 | - run: sbt publishLocal 79 | working-directory: sbt-plugin 80 | - run: npm version 81 | - run: npm install 82 | - run: npm run all 83 | - name: Run sbt-dependency-submission 84 | uses: ./ 85 | id: dependency-submission 86 | with: 87 | working-directory: sbt-plugin 88 | sbt-plugin-version: 3.2.0-SNAPSHOT 89 | - name: Check outputs 90 | run: | 91 | echo ${{ steps.dependency-submission.outputs.submission-id }} 92 | echo ${{ steps.dependency-submission.outputs.submission-api-url }} 93 | echo ${{ steps.dependency-submission.outputs.snapshot-json-path }} 94 | - name: Log snapshot JSON 95 | run: | 96 | cat ${{ steps.dependency-submission.outputs.snapshot-json-path }} | jq 97 | 98 | dependency-review: 99 | name: Dependency Review 100 | runs-on: ubuntu-latest 101 | permissions: 102 | pull-requests: write # for comment-summary-in-pr 103 | needs: test-action 104 | if: github.event_name == 'pull_request' 105 | steps: 106 | - name: Dependency Review 107 | uses: actions/dependency-review-action@v3 108 | with: 109 | comment-summary-in-pr: always 110 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | workflow_dispatch: 4 | push: 5 | tags: ["*"] 6 | jobs: 7 | publish: 8 | name: Release sbt plugin 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | - uses: coursier/setup-action@v1.3.5 13 | with: 14 | apps: sbt 15 | jvm: 'adopt:1.8' 16 | - run: sbt ci-release 17 | working-directory: sbt-plugin 18 | env: 19 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 20 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 21 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 22 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 23 | -------------------------------------------------------------------------------- /.github/workflows/update-dist.yml: -------------------------------------------------------------------------------- 1 | name: Update dist 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | update-dist: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - run: npm install 13 | - run: npm run all 14 | - name: Create Pull Request 15 | id: cpr 16 | uses: peter-evans/create-pull-request@v3 17 | with: 18 | commit-message: Update dist 19 | author: GitHub 20 | delete-branch: true 21 | title: Update dist 22 | - name: Check Pull Request 23 | run: | 24 | echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" 25 | echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Optional REPL history 60 | .node_repl_history 61 | 62 | # Output of 'npm pack' 63 | *.tgz 64 | 65 | # Yarn Integrity file 66 | .yarn-integrity 67 | 68 | # dotenv environment variables file 69 | .env 70 | .env.test 71 | 72 | # parcel-bundler cache (https://parceljs.org/) 73 | .cache 74 | 75 | # next.js build output 76 | .next 77 | 78 | # nuxt.js build output 79 | .nuxt 80 | 81 | # vuepress build output 82 | .vuepress/dist 83 | 84 | # Serverless directories 85 | .serverless/ 86 | 87 | # FuseBox cache 88 | .fusebox/ 89 | 90 | # DynamoDB Local files 91 | .dynamodb/ 92 | 93 | # OS metadata 94 | .DS_Store 95 | Thumbs.db 96 | 97 | # Ignore built ts files 98 | __tests__/runner/* 99 | lib/**/* 100 | 101 | # Scala 102 | .bloop/ 103 | .metals/ 104 | metals.sbt 105 | target/ -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "semi": false, 4 | "singleQuote": true, 5 | "trailingComma": "all", 6 | "arrowParens": "avoid" 7 | } 8 | -------------------------------------------------------------------------------- /.scala-steward.conf: -------------------------------------------------------------------------------- 1 | buildRoots = [ "sbt-plugin" ] 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sbt Dependency Submission 2 | 3 | A Github action to submit the dependency graph of an [sbt](https://www.scala-sbt.org/) build to the Github [Dependency submission API](https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/using-the-dependency-submission-api). 4 | 5 | Before running the workflow, make sure that the `Dependency Graph` feature is enabled in the settings of your repository (`Settings` > `Code Security and Analysis`). 6 | The graph of your sbt build will be visible in the [Dependency Graph](https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/exploring-the-dependencies-of-a-repository) page of the `Insights` tab. 7 | 8 | Enable [Dependabot](https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-supply-chain-security#what-is-dependabot) in your project settings to receive alerts for vulnerabilities that affect your sbt project. 9 | 10 | ## Support 11 | 12 | Any sbt project whose sbt version is equal to or greater than 1.5. 13 | 14 | ## Usage 15 | 16 | Create a Github Action file under `.github/workflows` containing the following definition. 17 | 18 | ```yml 19 | # .github/workflows/dependency-graph.yml 20 | name: Update Dependency Graph 21 | on: 22 | push: 23 | branches: 24 | - main # default branch of the project 25 | jobs: 26 | dependency-graph: 27 | name: Update Dependency Graph 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v3 31 | - uses: sbt/setup-sbt@v1 32 | - uses: scalacenter/sbt-dependency-submission@v2 33 | ``` 34 | 35 | ### Inputs 36 | 37 | #### - `working-directory` (optional) 38 | 39 | The relative path of the working directory of your sbt build. 40 | Default value is `.` 41 | 42 | #### - `modules-ignore` (optional) 43 | 44 | A list of space-separated names of modules to ignore. The action will not resolve nor submit the dependencies of these modules. 45 | The name of a module contains the name of the project and its binary version. 46 | 47 | Example: `foo_2.13 bar_2.13` 48 | 49 | #### - `configs-ignore` (optional) 50 | 51 | A list of space-separated names of configurations to ignore. The action will not submit the dependencies of these configurations. 52 | 53 | Example of configurations are `compile`, `test`, `scala-tool`, `scala-doc-tool`. 54 | 55 | #### - `correlator` (optional) 56 | 57 | An optional identifier to distinguish between multiple dependency snapshots of the same type. 58 | Defaults to the concatenation of the workflow name, the job id and the action id. 59 | 60 | Typically you would specify the correlator in a matrix-based job like this: 61 | 62 | ```yaml 63 | correlator: ${{ github.job }}-${{ matrix.directory }} 64 | ``` 65 | 66 | #### - `token` (optional) 67 | 68 | GitHub Personal Access Token (PAT). Defaults to PAT provided by Action runner. 69 | 70 | Example: `${{ secrets.USER_TOKEN }}` 71 | 72 | ### Outputs 73 | 74 | #### `submission-id` 75 | 76 | Once the snapshot of the dependencies has been submitted, GitHub responds with an ID of this snapshot. 77 | 78 | #### `submission-api-url` 79 | 80 | The API URL of the submission created by the action. It can be queried to get the submitted snapshot. 81 | 82 | #### `snapshot-json-path` 83 | 84 | Path to the temporary JSON file with the dependency snapshot that has been submitted. 85 | 86 | #### Example 87 | 88 | ##### Excluding some projects or some Scala versions from the dependency submission. 89 | 90 | In this example the snapshot will not contain the graphs of `foo_2.13` and `bar_3`. 91 | 92 | ```yaml 93 | 94 | ## in .github/workflows/dependency-graph.md 95 | ... 96 | steps: 97 | - uses: actions/checkout@v3 98 | - uses: scalacenter/sbt-dependency-submission@v2 99 | with: 100 | working-directory: ./my-scala-project 101 | modules-ignore: foo_2.13 bar_3 102 | ``` 103 | 104 | #### Excluding the Scaladoc dependencies. 105 | 106 | In this example the snapshot will not contain the dependencies of the scala-doc-tool configuration. 107 | 108 | ```yaml 109 | 110 | ## in .github/workflows/dependency-graph.md 111 | ... 112 | steps: 113 | - uses: actions/checkout@v3 114 | - uses: scalacenter/sbt-dependency-submission@v2 115 | with: 116 | working-directory: ./my-scala-project 117 | configs-ignore: scala-doc-tool 118 | ``` 119 | 120 | ## Troubleshooting 121 | 122 | ### How to generate a snapshot locally? 123 | 124 | For troubleshooting, it can be convenient to generate a snapshot locally. 125 | 126 | To do so you need to install the `sbt-dependency-submission` plugin in your sbt project. 127 | 128 | ```scala 129 | // In project/plugins.sbt 130 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % "3.1.0") 131 | ``` 132 | 133 | After reloading your build, you can run: 134 | ``` 135 | sbt:example> githubGenerateSnapshot 136 | ... 137 | [info] Dependency snapshot written to /tmp/dependency-snapshot-3080240838874963577.json 138 | ``` 139 | 140 | Or if you want to exclude some modules or configs: 141 | 142 | ``` 143 | sbt:example> githubGenerateSnapshot {"ignoredModules":["server_2.13"], "ignoredConfigs":["test"]} 144 | ... 145 | [info] Dependency snapshot written to /tmp/dependency-snapshot-14803616116503623758.json 146 | ``` 147 | 148 | ### Unexpected Status: 404 149 | 150 | This error happens when the `Dependency Graph` feature is disabled. 151 | You can enable it in `Settings` > `Code Security and Analysis`. 152 | 153 | ![image](https://user-images.githubusercontent.com/13123162/177736071-5bd63d3c-d338-4e51-a3c9-ad8d11e35508.png) 154 | 155 | ### Unexpected Status: 403 156 | 157 | This error happens when the workflow does not have the right permission on the repository. 158 | 159 | First you should check that the workflow is not triggered on PR from forked repositories. 160 | It should be triggered by push to the default branch. 161 | 162 | ```yaml 163 | ## in .github/workflows/dependency-graph.md 164 | on: 165 | push: 166 | branches: 167 | - main # default branch of the project 168 | ... 169 | ``` 170 | 171 | Then check that you enabled the read and write permissions for all workflows, at the bottom of the `Settings > Actions > General` page. 172 | 173 | ![image](https://user-images.githubusercontent.com/13123162/179472237-bffea114-9e99-4736-83ef-00dc7f41149b.png) 174 | 175 | If you do not want to enable this you can add the write permission on the `dependency-graph` workflow only: 176 | 177 | ```yaml 178 | ## in .github/workflows/dependency-graph.md 179 | ... 180 | permissions: 181 | contents: write # this permission is needed to submit the dependency graph 182 | ... 183 | ``` 184 | 185 | ### sbt.librarymanagement.ResolveException: Error downloading 186 | 187 | This error may happen when you try to access artifacts from private GitHub packages with the default GitHub token. You need to pass personal access token which is allowed to access private packages in the `token` input. 188 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: 'Sbt Dependency Submission' 2 | description: 'Submits the dependency graph of an sbt build to the Github Submission API' 3 | author: 'The Scala Center' 4 | branding: 5 | icon: 'package' 6 | color: '#1a84ac' 7 | inputs: 8 | working-directory: 9 | description: "The relative path of the working directory of the sbt build." 10 | required: false 11 | default: '' 12 | modules-ignore: 13 | description: | 14 | A list of space-separated names of modules to ignore. The action will not resolve nor submit the dependencies of these modules. 15 | The name of a module contains the name of the project and its binary version. 16 | Example: `foo_2.13 bar_2.13` 17 | required: false 18 | default: '' 19 | configs-ignore: 20 | description: | 21 | A list of space-separated names of configurations to ignore. The action will not submit the dependencies of these configurations. 22 | Example: `test scala-doc-tool` 23 | required: false 24 | default: '' 25 | correlator: 26 | description: | 27 | An optional identifier to distinguish between multiple dependency snapshots of the same type. 28 | Defaults to the concatenation of the workflow name, the job id and the action id. 29 | required: false 30 | default: '' 31 | on-resolve-failure: 32 | description: | 33 | Either 'error' or 'warning'. 34 | When a dependency resolution failure happens, if 'error' the job will fail and will not submit the snapshot. 35 | If 'warning', the job will ignore the failing modules and submit the snapshot. 36 | required: false 37 | default: error 38 | token: 39 | description: GitHub Personal Access Token (PAT). Defaults to PAT provided by Action runner. 40 | required: false 41 | default: ${{ github.token }} 42 | sbt-plugin-version: 43 | description: Version of the sbt plugin to use. 44 | required: false 45 | default: '3.1.0' 46 | outputs: 47 | submission-id: 48 | description: The ID of the submission created by the action 49 | submission-api-url: 50 | description: The URL of the submission created by the action 51 | snapshot-json-path: 52 | description: The path of the snapshot JSON file created by the action 53 | runs: 54 | using: 'node20' 55 | main: 'dist/index.js' 56 | -------------------------------------------------------------------------------- /dist/licenses.txt: -------------------------------------------------------------------------------- 1 | @actions/core 2 | MIT 3 | The MIT License (MIT) 4 | 5 | Copyright 2019 GitHub 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 8 | 9 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | 13 | @actions/exec 14 | MIT 15 | The MIT License (MIT) 16 | 17 | Copyright 2019 GitHub 18 | 19 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 20 | 21 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 22 | 23 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 | 25 | @actions/github 26 | MIT 27 | The MIT License (MIT) 28 | 29 | Copyright 2019 GitHub 30 | 31 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 32 | 33 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 34 | 35 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 36 | 37 | @actions/http-client 38 | MIT 39 | Actions Http Client for Node.js 40 | 41 | Copyright (c) GitHub, Inc. 42 | 43 | All rights reserved. 44 | 45 | MIT License 46 | 47 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and 48 | associated documentation files (the "Software"), to deal in the Software without restriction, 49 | including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, 50 | and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, 51 | subject to the following conditions: 52 | 53 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 54 | 55 | THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT 56 | LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN 57 | NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 58 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 59 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 60 | 61 | 62 | @actions/io 63 | MIT 64 | The MIT License (MIT) 65 | 66 | Copyright 2019 GitHub 67 | 68 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 69 | 70 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 71 | 72 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 73 | 74 | @fastify/busboy 75 | MIT 76 | Copyright Brian White. All rights reserved. 77 | 78 | Permission is hereby granted, free of charge, to any person obtaining a copy 79 | of this software and associated documentation files (the "Software"), to 80 | deal in the Software without restriction, including without limitation the 81 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 82 | sell copies of the Software, and to permit persons to whom the Software is 83 | furnished to do so, subject to the following conditions: 84 | 85 | The above copyright notice and this permission notice shall be included in 86 | all copies or substantial portions of the Software. 87 | 88 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 89 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 90 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 91 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 92 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 93 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 94 | IN THE SOFTWARE. 95 | 96 | @octokit/auth-token 97 | MIT 98 | The MIT License 99 | 100 | Copyright (c) 2019 Octokit contributors 101 | 102 | Permission is hereby granted, free of charge, to any person obtaining a copy 103 | of this software and associated documentation files (the "Software"), to deal 104 | in the Software without restriction, including without limitation the rights 105 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 106 | copies of the Software, and to permit persons to whom the Software is 107 | furnished to do so, subject to the following conditions: 108 | 109 | The above copyright notice and this permission notice shall be included in 110 | all copies or substantial portions of the Software. 111 | 112 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 113 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 114 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 115 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 116 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 117 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 118 | THE SOFTWARE. 119 | 120 | 121 | @octokit/core 122 | MIT 123 | The MIT License 124 | 125 | Copyright (c) 2019 Octokit contributors 126 | 127 | Permission is hereby granted, free of charge, to any person obtaining a copy 128 | of this software and associated documentation files (the "Software"), to deal 129 | in the Software without restriction, including without limitation the rights 130 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 131 | copies of the Software, and to permit persons to whom the Software is 132 | furnished to do so, subject to the following conditions: 133 | 134 | The above copyright notice and this permission notice shall be included in 135 | all copies or substantial portions of the Software. 136 | 137 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 138 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 139 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 140 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 141 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 142 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 143 | THE SOFTWARE. 144 | 145 | 146 | @octokit/endpoint 147 | MIT 148 | The MIT License 149 | 150 | Copyright (c) 2018 Octokit contributors 151 | 152 | Permission is hereby granted, free of charge, to any person obtaining a copy 153 | of this software and associated documentation files (the "Software"), to deal 154 | in the Software without restriction, including without limitation the rights 155 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 156 | copies of the Software, and to permit persons to whom the Software is 157 | furnished to do so, subject to the following conditions: 158 | 159 | The above copyright notice and this permission notice shall be included in 160 | all copies or substantial portions of the Software. 161 | 162 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 163 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 164 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 165 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 166 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 167 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 168 | THE SOFTWARE. 169 | 170 | 171 | @octokit/graphql 172 | MIT 173 | The MIT License 174 | 175 | Copyright (c) 2018 Octokit contributors 176 | 177 | Permission is hereby granted, free of charge, to any person obtaining a copy 178 | of this software and associated documentation files (the "Software"), to deal 179 | in the Software without restriction, including without limitation the rights 180 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 181 | copies of the Software, and to permit persons to whom the Software is 182 | furnished to do so, subject to the following conditions: 183 | 184 | The above copyright notice and this permission notice shall be included in 185 | all copies or substantial portions of the Software. 186 | 187 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 188 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 189 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 190 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 191 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 192 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 193 | THE SOFTWARE. 194 | 195 | 196 | @octokit/plugin-paginate-rest 197 | MIT 198 | MIT License Copyright (c) 2019 Octokit contributors 199 | 200 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 201 | 202 | The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. 203 | 204 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 205 | 206 | 207 | @octokit/plugin-rest-endpoint-methods 208 | MIT 209 | MIT License Copyright (c) 2019 Octokit contributors 210 | 211 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 212 | 213 | The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. 214 | 215 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 216 | 217 | 218 | @octokit/request 219 | MIT 220 | The MIT License 221 | 222 | Copyright (c) 2018 Octokit contributors 223 | 224 | Permission is hereby granted, free of charge, to any person obtaining a copy 225 | of this software and associated documentation files (the "Software"), to deal 226 | in the Software without restriction, including without limitation the rights 227 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 228 | copies of the Software, and to permit persons to whom the Software is 229 | furnished to do so, subject to the following conditions: 230 | 231 | The above copyright notice and this permission notice shall be included in 232 | all copies or substantial portions of the Software. 233 | 234 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 235 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 236 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 237 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 238 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 239 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 240 | THE SOFTWARE. 241 | 242 | 243 | @octokit/request-error 244 | MIT 245 | The MIT License 246 | 247 | Copyright (c) 2019 Octokit contributors 248 | 249 | Permission is hereby granted, free of charge, to any person obtaining a copy 250 | of this software and associated documentation files (the "Software"), to deal 251 | in the Software without restriction, including without limitation the rights 252 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 253 | copies of the Software, and to permit persons to whom the Software is 254 | furnished to do so, subject to the following conditions: 255 | 256 | The above copyright notice and this permission notice shall be included in 257 | all copies or substantial portions of the Software. 258 | 259 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 260 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 261 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 262 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 263 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 264 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 265 | THE SOFTWARE. 266 | 267 | 268 | before-after-hook 269 | Apache-2.0 270 | Apache License 271 | Version 2.0, January 2004 272 | http://www.apache.org/licenses/ 273 | 274 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 275 | 276 | 1. Definitions. 277 | 278 | "License" shall mean the terms and conditions for use, reproduction, 279 | and distribution as defined by Sections 1 through 9 of this document. 280 | 281 | "Licensor" shall mean the copyright owner or entity authorized by 282 | the copyright owner that is granting the License. 283 | 284 | "Legal Entity" shall mean the union of the acting entity and all 285 | other entities that control, are controlled by, or are under common 286 | control with that entity. For the purposes of this definition, 287 | "control" means (i) the power, direct or indirect, to cause the 288 | direction or management of such entity, whether by contract or 289 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 290 | outstanding shares, or (iii) beneficial ownership of such entity. 291 | 292 | "You" (or "Your") shall mean an individual or Legal Entity 293 | exercising permissions granted by this License. 294 | 295 | "Source" form shall mean the preferred form for making modifications, 296 | including but not limited to software source code, documentation 297 | source, and configuration files. 298 | 299 | "Object" form shall mean any form resulting from mechanical 300 | transformation or translation of a Source form, including but 301 | not limited to compiled object code, generated documentation, 302 | and conversions to other media types. 303 | 304 | "Work" shall mean the work of authorship, whether in Source or 305 | Object form, made available under the License, as indicated by a 306 | copyright notice that is included in or attached to the work 307 | (an example is provided in the Appendix below). 308 | 309 | "Derivative Works" shall mean any work, whether in Source or Object 310 | form, that is based on (or derived from) the Work and for which the 311 | editorial revisions, annotations, elaborations, or other modifications 312 | represent, as a whole, an original work of authorship. For the purposes 313 | of this License, Derivative Works shall not include works that remain 314 | separable from, or merely link (or bind by name) to the interfaces of, 315 | the Work and Derivative Works thereof. 316 | 317 | "Contribution" shall mean any work of authorship, including 318 | the original version of the Work and any modifications or additions 319 | to that Work or Derivative Works thereof, that is intentionally 320 | submitted to Licensor for inclusion in the Work by the copyright owner 321 | or by an individual or Legal Entity authorized to submit on behalf of 322 | the copyright owner. For the purposes of this definition, "submitted" 323 | means any form of electronic, verbal, or written communication sent 324 | to the Licensor or its representatives, including but not limited to 325 | communication on electronic mailing lists, source code control systems, 326 | and issue tracking systems that are managed by, or on behalf of, the 327 | Licensor for the purpose of discussing and improving the Work, but 328 | excluding communication that is conspicuously marked or otherwise 329 | designated in writing by the copyright owner as "Not a Contribution." 330 | 331 | "Contributor" shall mean Licensor and any individual or Legal Entity 332 | on behalf of whom a Contribution has been received by Licensor and 333 | subsequently incorporated within the Work. 334 | 335 | 2. Grant of Copyright License. Subject to the terms and conditions of 336 | this License, each Contributor hereby grants to You a perpetual, 337 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 338 | copyright license to reproduce, prepare Derivative Works of, 339 | publicly display, publicly perform, sublicense, and distribute the 340 | Work and such Derivative Works in Source or Object form. 341 | 342 | 3. Grant of Patent License. Subject to the terms and conditions of 343 | this License, each Contributor hereby grants to You a perpetual, 344 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 345 | (except as stated in this section) patent license to make, have made, 346 | use, offer to sell, sell, import, and otherwise transfer the Work, 347 | where such license applies only to those patent claims licensable 348 | by such Contributor that are necessarily infringed by their 349 | Contribution(s) alone or by combination of their Contribution(s) 350 | with the Work to which such Contribution(s) was submitted. If You 351 | institute patent litigation against any entity (including a 352 | cross-claim or counterclaim in a lawsuit) alleging that the Work 353 | or a Contribution incorporated within the Work constitutes direct 354 | or contributory patent infringement, then any patent licenses 355 | granted to You under this License for that Work shall terminate 356 | as of the date such litigation is filed. 357 | 358 | 4. Redistribution. You may reproduce and distribute copies of the 359 | Work or Derivative Works thereof in any medium, with or without 360 | modifications, and in Source or Object form, provided that You 361 | meet the following conditions: 362 | 363 | (a) You must give any other recipients of the Work or 364 | Derivative Works a copy of this License; and 365 | 366 | (b) You must cause any modified files to carry prominent notices 367 | stating that You changed the files; and 368 | 369 | (c) You must retain, in the Source form of any Derivative Works 370 | that You distribute, all copyright, patent, trademark, and 371 | attribution notices from the Source form of the Work, 372 | excluding those notices that do not pertain to any part of 373 | the Derivative Works; and 374 | 375 | (d) If the Work includes a "NOTICE" text file as part of its 376 | distribution, then any Derivative Works that You distribute must 377 | include a readable copy of the attribution notices contained 378 | within such NOTICE file, excluding those notices that do not 379 | pertain to any part of the Derivative Works, in at least one 380 | of the following places: within a NOTICE text file distributed 381 | as part of the Derivative Works; within the Source form or 382 | documentation, if provided along with the Derivative Works; or, 383 | within a display generated by the Derivative Works, if and 384 | wherever such third-party notices normally appear. The contents 385 | of the NOTICE file are for informational purposes only and 386 | do not modify the License. You may add Your own attribution 387 | notices within Derivative Works that You distribute, alongside 388 | or as an addendum to the NOTICE text from the Work, provided 389 | that such additional attribution notices cannot be construed 390 | as modifying the License. 391 | 392 | You may add Your own copyright statement to Your modifications and 393 | may provide additional or different license terms and conditions 394 | for use, reproduction, or distribution of Your modifications, or 395 | for any such Derivative Works as a whole, provided Your use, 396 | reproduction, and distribution of the Work otherwise complies with 397 | the conditions stated in this License. 398 | 399 | 5. Submission of Contributions. Unless You explicitly state otherwise, 400 | any Contribution intentionally submitted for inclusion in the Work 401 | by You to the Licensor shall be under the terms and conditions of 402 | this License, without any additional terms or conditions. 403 | Notwithstanding the above, nothing herein shall supersede or modify 404 | the terms of any separate license agreement you may have executed 405 | with Licensor regarding such Contributions. 406 | 407 | 6. Trademarks. This License does not grant permission to use the trade 408 | names, trademarks, service marks, or product names of the Licensor, 409 | except as required for reasonable and customary use in describing the 410 | origin of the Work and reproducing the content of the NOTICE file. 411 | 412 | 7. Disclaimer of Warranty. Unless required by applicable law or 413 | agreed to in writing, Licensor provides the Work (and each 414 | Contributor provides its Contributions) on an "AS IS" BASIS, 415 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 416 | implied, including, without limitation, any warranties or conditions 417 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 418 | PARTICULAR PURPOSE. You are solely responsible for determining the 419 | appropriateness of using or redistributing the Work and assume any 420 | risks associated with Your exercise of permissions under this License. 421 | 422 | 8. Limitation of Liability. In no event and under no legal theory, 423 | whether in tort (including negligence), contract, or otherwise, 424 | unless required by applicable law (such as deliberate and grossly 425 | negligent acts) or agreed to in writing, shall any Contributor be 426 | liable to You for damages, including any direct, indirect, special, 427 | incidental, or consequential damages of any character arising as a 428 | result of this License or out of the use or inability to use the 429 | Work (including but not limited to damages for loss of goodwill, 430 | work stoppage, computer failure or malfunction, or any and all 431 | other commercial damages or losses), even if such Contributor 432 | has been advised of the possibility of such damages. 433 | 434 | 9. Accepting Warranty or Additional Liability. While redistributing 435 | the Work or Derivative Works thereof, You may choose to offer, 436 | and charge a fee for, acceptance of support, warranty, indemnity, 437 | or other liability obligations and/or rights consistent with this 438 | License. However, in accepting such obligations, You may act only 439 | on Your own behalf and on Your sole responsibility, not on behalf 440 | of any other Contributor, and only if You agree to indemnify, 441 | defend, and hold each Contributor harmless for any liability 442 | incurred by, or claims asserted against, such Contributor by reason 443 | of your accepting any such warranty or additional liability. 444 | 445 | END OF TERMS AND CONDITIONS 446 | 447 | APPENDIX: How to apply the Apache License to your work. 448 | 449 | To apply the Apache License to your work, attach the following 450 | boilerplate notice, with the fields enclosed by brackets "{}" 451 | replaced with your own identifying information. (Don't include 452 | the brackets!) The text should be enclosed in the appropriate 453 | comment syntax for the file format. We also recommend that a 454 | file or class name and description of purpose be included on the 455 | same "printed page" as the copyright notice for easier 456 | identification within third-party archives. 457 | 458 | Copyright 2018 Gregor Martynus and other contributors. 459 | 460 | Licensed under the Apache License, Version 2.0 (the "License"); 461 | you may not use this file except in compliance with the License. 462 | You may obtain a copy of the License at 463 | 464 | http://www.apache.org/licenses/LICENSE-2.0 465 | 466 | Unless required by applicable law or agreed to in writing, software 467 | distributed under the License is distributed on an "AS IS" BASIS, 468 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 469 | See the License for the specific language governing permissions and 470 | limitations under the License. 471 | 472 | 473 | deprecation 474 | ISC 475 | The ISC License 476 | 477 | Copyright (c) Gregor Martynus and contributors 478 | 479 | Permission to use, copy, modify, and/or distribute this software for any 480 | purpose with or without fee is hereby granted, provided that the above 481 | copyright notice and this permission notice appear in all copies. 482 | 483 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 484 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 485 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 486 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 487 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 488 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR 489 | IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 490 | 491 | 492 | is-plain-object 493 | MIT 494 | The MIT License (MIT) 495 | 496 | Copyright (c) 2014-2017, Jon Schlinkert. 497 | 498 | Permission is hereby granted, free of charge, to any person obtaining a copy 499 | of this software and associated documentation files (the "Software"), to deal 500 | in the Software without restriction, including without limitation the rights 501 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 502 | copies of the Software, and to permit persons to whom the Software is 503 | furnished to do so, subject to the following conditions: 504 | 505 | The above copyright notice and this permission notice shall be included in 506 | all copies or substantial portions of the Software. 507 | 508 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 509 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 510 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 511 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 512 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 513 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 514 | THE SOFTWARE. 515 | 516 | 517 | once 518 | ISC 519 | The ISC License 520 | 521 | Copyright (c) Isaac Z. Schlueter and Contributors 522 | 523 | Permission to use, copy, modify, and/or distribute this software for any 524 | purpose with or without fee is hereby granted, provided that the above 525 | copyright notice and this permission notice appear in all copies. 526 | 527 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 528 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 529 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 530 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 531 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 532 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR 533 | IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 534 | 535 | 536 | tunnel 537 | MIT 538 | The MIT License (MIT) 539 | 540 | Copyright (c) 2012 Koichi Kobayashi 541 | 542 | Permission is hereby granted, free of charge, to any person obtaining a copy 543 | of this software and associated documentation files (the "Software"), to deal 544 | in the Software without restriction, including without limitation the rights 545 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 546 | copies of the Software, and to permit persons to whom the Software is 547 | furnished to do so, subject to the following conditions: 548 | 549 | The above copyright notice and this permission notice shall be included in 550 | all copies or substantial portions of the Software. 551 | 552 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 553 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 554 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 555 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 556 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 557 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 558 | THE SOFTWARE. 559 | 560 | 561 | undici 562 | MIT 563 | MIT License 564 | 565 | Copyright (c) Matteo Collina and Undici contributors 566 | 567 | Permission is hereby granted, free of charge, to any person obtaining a copy 568 | of this software and associated documentation files (the "Software"), to deal 569 | in the Software without restriction, including without limitation the rights 570 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 571 | copies of the Software, and to permit persons to whom the Software is 572 | furnished to do so, subject to the following conditions: 573 | 574 | The above copyright notice and this permission notice shall be included in all 575 | copies or substantial portions of the Software. 576 | 577 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 578 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 579 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 580 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 581 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 582 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 583 | SOFTWARE. 584 | 585 | 586 | universal-user-agent 587 | ISC 588 | # [ISC License](https://spdx.org/licenses/ISC) 589 | 590 | Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) 591 | 592 | Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. 593 | 594 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 595 | 596 | 597 | uuid 598 | MIT 599 | The MIT License (MIT) 600 | 601 | Copyright (c) 2010-2020 Robert Kieffer and other contributors 602 | 603 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 604 | 605 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 606 | 607 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 608 | 609 | 610 | wrappy 611 | ISC 612 | The ISC License 613 | 614 | Copyright (c) Isaac Z. Schlueter and Contributors 615 | 616 | Permission to use, copy, modify, and/or distribute this software for any 617 | purpose with or without fee is hereby granted, provided that the above 618 | copyright notice and this permission notice appear in all copies. 619 | 620 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 621 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 622 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 623 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 624 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 625 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR 626 | IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 627 | -------------------------------------------------------------------------------- /dist/sourcemap-register.js: -------------------------------------------------------------------------------- 1 | (()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},274:(e,r,n)=>{var t=n(339);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(190);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},680:(e,r,n)=>{var t=n(339);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},758:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(339);var i=n(345);var a=n(274).I;var u=n(449);var s=n(758).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(449);var o=n(339);var i=n(274).I;var a=n(680).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},351:(e,r,n)=>{var t;var o=n(591).h;var i=n(339);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},997:(e,r,n)=>{n(591).h;r.SourceMapConsumer=n(952).SourceMapConsumer;n(351)},284:(e,r,n)=>{e=n.nmd(e);var t=n(997).SourceMapConsumer;var o=n(17);var i;try{i=n(147);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function globalProcessVersion(){if(typeof process==="object"&&process!==null){return process.version}else{return""}}function globalProcessStderr(){if(typeof process==="object"&&process!==null){return process.stderr}}function globalProcessExit(e){if(typeof process==="object"&&process!==null&&typeof process.exit==="function"){return process.exit(e)}}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(globalProcessVersion())?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);var n=globalProcessStderr();if(n&&n._handle&&n._handle.setBlocking){n._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);globalProcessExit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},147:e=>{"use strict";e.exports=require("fs")},17:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sbt-dependency-submission", 3 | "version": "3.1.0", 4 | "private": true, 5 | "description": "Submit the dependency graph of an sbt build to Github", 6 | "main": "lib/main.js", 7 | "scripts": { 8 | "build": "tsc", 9 | "format": "prettier --write **/*.ts", 10 | "format-check": "prettier --check **/*.ts", 11 | "lint": "eslint src/**/*.ts", 12 | "package": "ncc build --source-map --license licenses.txt", 13 | "all": "npm run build && npm run format && npm run lint && npm run package" 14 | }, 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/scalacenter/sbt-dependency-submission.git" 18 | }, 19 | "keywords": [ 20 | "actions", 21 | "scala", 22 | "sbt", 23 | "dependency", 24 | "graph" 25 | ], 26 | "author": "The Scala Center", 27 | "license": "Apache-2.0", 28 | "dependencies": { 29 | "@actions/core": "^1.10.1", 30 | "@actions/exec": "^1.1.0", 31 | "@actions/github": "^6.0.0", 32 | "@actions/io": "^1.1.3" 33 | }, 34 | "devDependencies": { 35 | "@octokit/webhooks-types": "^7.5.1", 36 | "@types/node": "^17.0.30", 37 | "@vercel/ncc": "^0.38.1", 38 | "eslint-plugin-github": "^4.9.0", 39 | "js-yaml": "^4.1.0", 40 | "prettier": "2.8.8", 41 | "typescript": "^4.8.4" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.10.11 2 | -------------------------------------------------------------------------------- /sbt-plugin/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .bloop/ 3 | .idea/ 4 | .bsp/ 5 | .vscode/ 6 | .metals/ 7 | **/metals.sbt -------------------------------------------------------------------------------- /sbt-plugin/.scalafix.conf: -------------------------------------------------------------------------------- 1 | rules = [ 2 | ExplicitResultTypes, 3 | OrganizeImports 4 | ] 5 | 6 | ExplicitResultTypes { 7 | unsafeShortenNames = true 8 | } 9 | OrganizeImports { 10 | groupedImports = Explode 11 | expandRelative = true 12 | removeUnused = true # done already by RemoveUnused rule 13 | groups = [ 14 | "re:javax?\\." 15 | "scala." 16 | "scala.meta." 17 | "*" 18 | ] 19 | } -------------------------------------------------------------------------------- /sbt-plugin/.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.8.3" 2 | runner.dialect = scala212source3 3 | maxColumn = 120 4 | align.preset = some 5 | rewrite.rules = [RedundantBraces, AvoidInfix, RedundantParens] 6 | align.stripMargin = true 7 | assumeStandardLibraryStripMargin = true 8 | project.git = true 9 | docstrings.style = keep 10 | newlines.beforeCurlyLambdaParams = multilineWithCaseOnly 11 | newlines.afterCurlyLambdaParams = squash 12 | newlines.implicitParamListModifierPrefer = before 13 | -------------------------------------------------------------------------------- /sbt-plugin/README.md: -------------------------------------------------------------------------------- 1 | # sbt-github-dependency-submission 2 | 3 | An sbt plugin that can extract the dependencies of your project and submit them to the Github Dependency submission API. 4 | 5 | It is not recommended and generally not useful to install this plugin manually, as it can only be used in a Github workflow. 6 | 7 | The easiest way to use this plugin is to set [scalacenter/sbt-dependency-submssion](https://github.com/scalacenter/sbt-dependency-submission) up in your Github workflow. 8 | -------------------------------------------------------------------------------- /sbt-plugin/build.sbt: -------------------------------------------------------------------------------- 1 | def isRelease() = 2 | System.getenv("GITHUB_REPOSITORY") == "scalacenter/sbt-dependency-submission" && 3 | System.getenv("GITHUB_WORKFLOW") == "Release" 4 | 5 | def isCI = System.getenv("CI") != null 6 | 7 | inThisBuild( 8 | Seq( 9 | organization := "ch.epfl.scala", 10 | homepage := Some(url("https://github.com/scalacenter/sbt-dependency-submission")), 11 | onLoadMessage := s"Welcome to sbt-github-dependency-submission ${version.value}", 12 | licenses := List("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")), 13 | developers := Developers.all, 14 | version ~= { dynVer => 15 | if (isRelease) dynVer 16 | else "3.2.0-SNAPSHOT" // only for local publishing 17 | }, 18 | // Scalafix settings 19 | semanticdbEnabled := true, 20 | semanticdbVersion := scalafixSemanticdb.revision 21 | ) 22 | ) 23 | 24 | val `sbt-github-dependency-submission` = project 25 | .in(file(".")) 26 | .enablePlugins(SbtPlugin, ContrabandPlugin, JsonCodecPlugin, BuildInfoPlugin) 27 | .settings( 28 | name := "sbt-github-dependency-submission", 29 | sbtVersion := "1.5.8", 30 | scalaVersion := "2.12.20", 31 | scalacOptions ++= Seq( 32 | "-deprecation", 33 | "-encoding", 34 | "UTF-8", 35 | "-feature", 36 | "-unchecked", 37 | "-Xfatal-warnings", 38 | "-Ywarn-unused-import" 39 | ), 40 | libraryDependencies ++= Seq( 41 | "com.eed3si9n" %% "gigahorse-asynchttpclient" % "0.7.0", 42 | "org.scalameta" %% "munit" % "1.1.0" % Test 43 | ), 44 | buildInfoKeys := Seq[BuildInfoKey](name, version, homepage), 45 | buildInfoPackage := "ch.epfl.scala", 46 | buildInfoObject := "SbtGithubDependencySubmission", 47 | scriptedLaunchOpts += s"-Dplugin.version=${version.value}", 48 | scriptedBufferLog := false, 49 | Compile / generateContrabands / contrabandFormatsForType := ContrabandConfig.getFormats, 50 | scriptedDependencies := { 51 | publishLocal.value 52 | } 53 | ) 54 | -------------------------------------------------------------------------------- /sbt-plugin/project/ContrabandConfig.scala: -------------------------------------------------------------------------------- 1 | import sbt.contraband.ast._ 2 | import sbt.contraband.CodecCodeGen 3 | 4 | object ContrabandConfig { 5 | 6 | /** Extract the only type parameter from a TpeRef */ 7 | def oneArg(tpe: Type): Type = { 8 | val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?)[>\\]]""".r 9 | val pat(arg0) = tpe.name 10 | NamedType(arg0.split('.') toList) 11 | } 12 | 13 | /** Extract the two type parameters from a TpeRef */ 14 | def twoArgs(tpe: Type): List[Type] = { 15 | val pat = s"""${tpe.removeTypeParameters.name}[<\\[](.+?), (.+?)[>\\]]""".r 16 | val pat(arg0, arg1) = tpe.name 17 | NamedType(arg0.split('.') toList) :: NamedType(arg1.split('.') toList) :: Nil 18 | } 19 | 20 | /** sbt codecs */ 21 | val sbtCodecs: PartialFunction[String, Type => List[String]] = { 22 | // sbt-contraband should handle them by default 23 | case "Option" | "Set" | "scala.Vector" => tpe => getFormats(oneArg(tpe)) 24 | case "Map" | "Tuple2" | "scala.Tuple2" => tpe => twoArgs(tpe).flatMap(getFormats) 25 | case "Int" | "Long" => _ => Nil 26 | case "scalajson.ast.unsafe.JValue" | "sjsonnew.shaded.scalajson.ast.unsafe.JValue" => 27 | _ => List("sbt.internal.util.codec.JValueFormats") 28 | 29 | case "sbt.internal.bsp.BuildTargetIdentifier" => _ => List("sbt.internal.bsp.codec.BuildTargetIdentifierFormats") 30 | } 31 | 32 | /** Returns the list of formats required to encode the given `TpeRef`. */ 33 | val getFormats: Type => List[String] = 34 | CodecCodeGen.extensibleFormatsForType { 35 | case tpe: Type if sbtCodecs.isDefinedAt(tpe.removeTypeParameters.name) => 36 | sbtCodecs(tpe.removeTypeParameters.name)(tpe) 37 | case other => CodecCodeGen.formatsForType(other) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /sbt-plugin/project/Developers.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | 3 | object Developers { 4 | val adpi2: Developer = Developer( 5 | "adpi2", 6 | "Adrien Piquerez", 7 | "adrien.piquerez@gmail.com", 8 | url("https://github.com/adpi2/") 9 | ) 10 | 11 | val all = List(adpi2) 12 | } 13 | -------------------------------------------------------------------------------- /sbt-plugin/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.10.11 2 | -------------------------------------------------------------------------------- /sbt-plugin/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.9.3") 2 | addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.7.0") 3 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.14.2") 4 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1") 5 | -------------------------------------------------------------------------------- /sbt-plugin/src/main/contraband/github-dependency-submission-api-0.contra: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala.githubapi 2 | @target(Scala) 3 | @codecPackage("ch.epfl.scala.githubapi") 4 | @fullCodec("JsonProtocol") 5 | 6 | ## Github Dependency Submission API version 0 7 | ## Taken from https://gist.github.com/reiddraper/fdab2883db0f372c146d1a750fc1c43f 8 | 9 | type DependencySnapshot { 10 | version: Int! 11 | job: ch.epfl.scala.githubapi.Job! 12 | sha: String! ## sha of the Git commit 13 | ref: String! ## ref of the Git commit; example: "refs/heads/main" 14 | detector: ch.epfl.scala.githubapi.DetectorMetadata! 15 | metadata: raw"Map[String, sjsonnew.shaded.scalajson.ast.unsafe.JValue]"! 16 | manifests: raw"Map[String, ch.epfl.scala.githubapi.Manifest]"! 17 | scanned: String ## ISO8601Date 18 | } 19 | 20 | type Job { 21 | correlator: String! 22 | id: String! 23 | html_url: String 24 | } 25 | 26 | type DetectorMetadata { 27 | name: String! 28 | url: String! 29 | version: String! 30 | } 31 | 32 | type Manifest { 33 | name: String! 34 | file: ch.epfl.scala.githubapi.FileInfo 35 | metadata: raw"Map[String, sjsonnew.shaded.scalajson.ast.unsafe.JValue]"! 36 | resolved: raw"Map[String, ch.epfl.scala.githubapi.DependencyNode]"! 37 | } 38 | 39 | type FileInfo { 40 | source_location: String 41 | } 42 | 43 | ## A notation of whether a dependency is requested directly 44 | ## by this manifest, or is a dependency of another dependency. 45 | enum DependencyRelationship { 46 | direct 47 | indirect 48 | } 49 | 50 | ## A notation of whether the dependency is required for the primary 51 | ## build artifact (runtime), or is only used for development. 52 | ## Future versions of this specification may allow for more granular 53 | ## scopes, like `runtime:server`, `runtime:shipped`, 54 | ## `development:test`, `development:benchmark`. 55 | enum DependencyScope { 56 | runtime 57 | development 58 | } 59 | 60 | type DependencyNode { 61 | package_url: String! 62 | metadata: raw"Map[String, sjsonnew.shaded.scalajson.ast.unsafe.JValue]"! 63 | relationship: ch.epfl.scala.githubapi.DependencyRelationship 64 | scope: ch.epfl.scala.githubapi.DependencyScope 65 | dependencies: [String] 66 | } 67 | 68 | type SnapshotResponse { 69 | id: Int!, 70 | created_at: String ## ISO8601Date 71 | } 72 | -------------------------------------------------------------------------------- /sbt-plugin/src/main/contraband/input.contra: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala 2 | @target(Scala) 3 | @codecPackage("ch.epfl.scala") 4 | @fullCodec("JsonProtocol") 5 | 6 | enum OnFailure { 7 | error 8 | warning 9 | } 10 | 11 | ## Input of the githubGenerateSnapshot command 12 | type DependencySnapshotInput { 13 | onResolveFailure: ch.epfl.scala.OnFailure 14 | 15 | ## A set of modules to ignore. 16 | ## The name of module is composed of the name of the project and its binary version. 17 | ## Example: foo_2.13 18 | ignoredModules: [String] 19 | 20 | ## A set of sbt configurations to ignore. 21 | ## Examples: 22 | ## - "test" to ignore the test dependencies 23 | ## - "scala-doc-tool" to ignore the scaladoc dependencies 24 | ## - "scala-tool" to ignore the compiler dependencies 25 | ignoredConfigs: [String] 26 | 27 | ## The job correlator of the snapshot 28 | correlator: String 29 | } 30 | -------------------------------------------------------------------------------- /sbt-plugin/src/main/scala/ch/epfl/scala/AnalyzeDependencyGraph.scala: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala 2 | 3 | import java.nio.file.Paths 4 | 5 | import scala.Console 6 | import scala.concurrent.Await 7 | import scala.concurrent.duration.Duration 8 | import scala.sys.process._ 9 | import scala.util.Failure 10 | import scala.util.Properties 11 | import scala.util.Success 12 | import scala.util.Try 13 | 14 | import ch.epfl.scala.GithubDependencyGraphPlugin.autoImport._ 15 | import ch.epfl.scala.githubapi._ 16 | import gigahorse.FullResponse 17 | import gigahorse.HttpClient 18 | import gigahorse.support.asynchttpclient.Gigahorse 19 | import sbt._ 20 | import sbt.internal.util.complete._ 21 | import sjsonnew.shaded.scalajson.ast.unsafe.JArray 22 | import sjsonnew.shaded.scalajson.ast.unsafe.JField 23 | import sjsonnew.shaded.scalajson.ast.unsafe.JObject 24 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 25 | import sjsonnew.support.scalajson.unsafe.{Parser => JsonParser} 26 | 27 | object AnalyzeDependencyGraph { 28 | 29 | val help = 30 | "download and display CVEs alerts from Github, and analyze them against dependencies (use hub or gh local config or GIT_TOKEN env var to authenticate, requires githubGenerateSnapshot)" 31 | 32 | case class AnalysisParams(repository: Option[String]) 33 | 34 | val AnalyzeDependencies = "githubAnalyzeDependencies" 35 | private val AnalyzeDependenciesUsage = 36 | s"""$AnalyzeDependencies [pattern]""" 37 | private val AnalyzeDependenciesDetail = s"""Analyze the dependencies based on a search pattern: 38 | $help 39 | """ 40 | 41 | val commands: Seq[Command] = Seq( 42 | Command(AnalyzeDependencies, (AnalyzeDependenciesUsage, AnalyzeDependenciesDetail), AnalyzeDependenciesDetail)( 43 | parser 44 | )(analyzeDependencies) 45 | ) 46 | 47 | private def parser(state: State): Parser[AnalysisParams] = 48 | Parsers.any.*.map { raw => 49 | raw.mkString.trim.split(" ").toSeq match { 50 | case Seq("") | Nil => AnalysisParams(None) 51 | case Seq(arg) => AnalysisParams(Some(arg)) 52 | } 53 | }.failOnException 54 | 55 | private def analyzeDependencies(state: State, params: AnalysisParams): State = { 56 | for { 57 | repo <- params.repository.orElse(getGitHubRepo) 58 | vulnerabilities <- downloadAlerts(state, repo) match { 59 | case Success(v) => Some(v) 60 | case Failure(e) => 61 | state.log.error(s"Failed to download alerts: ${e.getMessage}") 62 | None 63 | } 64 | } yield analyzeCves(state, vulnerabilities) 65 | state 66 | } 67 | 68 | private def analyzeCves(state: State, vulnerabilities: Seq[Vulnerability]): Unit = { 69 | val artifacts = getAllArtifacts(state) 70 | vulnerabilities.foreach { v => 71 | val (badMatches, goodMatches) = vulnerabilityMatchesArtifacts(v, artifacts) 72 | println(v.toString) 73 | if (goodMatches.nonEmpty || badMatches.nonEmpty) { 74 | goodMatches.foreach(m => println(s" 🟢 ${m.replaceAll(".*@", "")}")) 75 | badMatches.foreach(m => println(s" 🔴 ${m.replaceAll(".*@", "")}")) 76 | } else { 77 | println(" 🎉 no match (dependency was probably removed)") 78 | } 79 | } 80 | } 81 | 82 | private def getStateOrWarn[T](state: State, key: AttributeKey[T], what: String, command: String): Option[T] = 83 | state.get(key).orElse { 84 | println(s"🟠 No $what found, please run '$command' first") 85 | None 86 | } 87 | 88 | private def downloadAlerts(state: State, repo: String): Try[Seq[Vulnerability]] = { 89 | val snapshotUrl = s"https://api.github.com/repos/$repo/dependabot/alerts" 90 | val request = 91 | Gigahorse.url(snapshotUrl).get.addHeaders("Authorization" -> s"token ${getGithubToken()}") 92 | state.log.info(s"Downloading alerts from $snapshotUrl") 93 | for { 94 | httpResp <- Try(Await.result(http.processFull(request), Duration.Inf)) 95 | vulnerabilities <- getVulnerabilities(httpResp) 96 | } yield { 97 | state.log.info(s"Downloaded ${vulnerabilities.size} alerts") 98 | vulnerabilities 99 | } 100 | } 101 | 102 | case class Vulnerability( 103 | packageId: String, 104 | vulnerableVersionRange: String, 105 | firstPatchedVersion: String, 106 | severity: String 107 | ) { 108 | def severityColor: String = severity match { 109 | case "critical" => Console.RED 110 | case "high" => Console.RED 111 | case "medium" => Console.YELLOW 112 | case "low" => Console.GREEN 113 | case _ => Console.RESET 114 | } 115 | 116 | def coloredSeverity: String = s"${severityColor}${severity}${Console.RESET}" 117 | 118 | def coloredPackageId: String = s"${Console.BLUE}$packageId${Console.RESET}" 119 | 120 | override def toString: String = 121 | s"${coloredPackageId} [ $vulnerableVersionRange ] fixed: $firstPatchedVersion $coloredSeverity" 122 | } 123 | 124 | private lazy val http: HttpClient = Gigahorse.http(Gigahorse.config) 125 | 126 | def getGithubManifest(state: State): Seq[Map[String, Manifest]] = 127 | getStateOrWarn(state, githubManifestsKey, "dependencies", SubmitDependencyGraph.Generate).toSeq 128 | 129 | private def getGithubTokenFromFile(ghConfigFile: File): Option[String] = { 130 | println(s"Extract token from ${ghConfigFile.getPath}") 131 | if (ghConfigFile.exists()) { 132 | IO.readLines(ghConfigFile).find(_.contains("oauth_token")).map(_.split(":").last.trim) 133 | } else None 134 | } 135 | 136 | private def getGithubToken(): String = { 137 | val ghConfigDir = 138 | Properties.envOrElse("GH_CONFIG_DIR", Paths.get(System.getProperty("user.home"), ".config", "gh").toString) 139 | val ghConfigFile = Paths.get(ghConfigDir).resolve("hosts.yml").toFile 140 | getGithubTokenFromFile(ghConfigFile).getOrElse { 141 | val ghConfigPath = 142 | Properties.envOrElse("HUB_CONFIG", Paths.get(System.getProperty("user.home"), ".config", "hub").toString) 143 | val hubConfigFile = Paths.get(ghConfigPath).toFile 144 | getGithubTokenFromFile(hubConfigFile).getOrElse(githubToken()) 145 | } 146 | } 147 | 148 | private def getAllArtifacts(state: State): Seq[String] = 149 | getGithubManifest(state).flatMap { manifests => 150 | manifests.flatMap { 151 | case (_, manifest) => 152 | manifest.resolved.values.toSeq.map(_.package_url) 153 | } 154 | }.distinct 155 | 156 | private def translateToSemVer(string: String): String = 157 | string.replaceAll("([a-zA-Z]+)", "0").replaceAll("([0-9]+)\\.([0-9]+)\\.([0-9]+)\\.([0-9]+)", "$1.$2.$3-$4") 158 | 159 | private def versionMatchesRange(versionStr: String, rangeStr: String): Boolean = { 160 | val range = rangeStr.replaceAll(" ", "").replace(",", " ") 161 | VersionNumber(translateToSemVer(versionStr)).matchesSemVer(SemanticSelector(translateToSemVer(range))) 162 | } 163 | 164 | private def vulnerabilityMatchesArtifacts( 165 | alert: Vulnerability, 166 | artifacts: Seq[String] 167 | ): (Seq[String], Seq[String]) = { 168 | val alertMavenPath = s"pkg:maven/${alert.packageId.replace(":", "/")}@" 169 | artifacts 170 | .filter(_.startsWith(alertMavenPath)) 171 | .partition { artifact => 172 | val version = artifact.replaceAll(".*@", "") 173 | versionMatchesRange(version, alert.vulnerableVersionRange) 174 | } 175 | } 176 | 177 | def getGitHubRepo: Option[String] = { 178 | val remoteUrl = "git config --get remote.origin.url".!!.trim 179 | val repoPattern = """(?:https://|git@)github\.com[:/](.+/.+)\.git""".r 180 | remoteUrl match { 181 | case repoPattern(repo) => Some(repo) 182 | case _ => None 183 | } 184 | } 185 | 186 | private def getVulnerabilities(httpResp: FullResponse): Try[Seq[Vulnerability]] = Try { 187 | httpResp.status match { 188 | case status if status / 100 == 2 => 189 | val json: JArray = JsonParser.parseFromByteBuffer(httpResp.bodyAsByteBuffer).get.asInstanceOf[JArray] 190 | json.value.collect { 191 | case obj: JObject if obj.value.collectFirst { case JField("state", JString("open")) => true }.isDefined => 192 | val securityVulnerability = 193 | obj.value.collectFirst { case JField("security_vulnerability", secVuln: JObject) => secVuln }.get.value 194 | val packageObj = 195 | securityVulnerability.collectFirst { case JField("package", pkg: JObject) => pkg }.get.value 196 | val firstPatchedVersion = securityVulnerability 197 | .collectFirst { case JField("first_patched_version", firstPatched: JObject) => firstPatched } 198 | .map(_.value.collectFirst { case JField("identifier", JString(ident)) => ident }.getOrElse("")) 199 | .getOrElse("") 200 | Vulnerability( 201 | packageObj.collectFirst { case JField("name", JString(name)) => name }.get, 202 | securityVulnerability.collectFirst { 203 | case JField("vulnerable_version_range", JString(range)) => range 204 | }.get, 205 | firstPatchedVersion, 206 | securityVulnerability.collectFirst { case JField("severity", JString(sev)) => sev }.get 207 | ) 208 | } 209 | case _ => 210 | val message = 211 | s"Unexpected status ${httpResp.status} ${httpResp.statusText} with body:\n${httpResp.bodyAsString}" 212 | throw new MessageOnlyException(message) 213 | } 214 | } 215 | 216 | private def githubToken(): String = Properties.envOrElse("GITHUB_TOKEN", "") 217 | } 218 | -------------------------------------------------------------------------------- /sbt-plugin/src/main/scala/ch/epfl/scala/GithubDependencyGraphPlugin.scala: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala 2 | 3 | import java.nio.file.Paths 4 | 5 | import scala.collection.mutable 6 | import scala.util.Properties 7 | 8 | import ch.epfl.scala.githubapi._ 9 | import sbt.Scoped.richTaskSeq 10 | import sbt._ 11 | import sbt.internal.util.complete.Parser 12 | import sbt.internal.util.complete.Parsers 13 | import sbt.plugins.JvmPlugin 14 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 15 | 16 | object GithubDependencyGraphPlugin extends AutoPlugin { 17 | private val runtimeConfigs = 18 | Set( 19 | Compile, 20 | Configurations.CompileInternal, 21 | Runtime, 22 | Configurations.RuntimeInternal, 23 | Provided, 24 | Optional, 25 | Configurations.System 26 | ) 27 | .map(_.toConfigRef) 28 | 29 | object autoImport { 30 | val githubSnapshotInputKey: AttributeKey[DependencySnapshotInput] = AttributeKey("githubSnapshotInput") 31 | val githubBuildFile: AttributeKey[githubapi.FileInfo] = AttributeKey("githubBuildFile") 32 | val githubManifestsKey: AttributeKey[Map[String, githubapi.Manifest]] = AttributeKey("githubDependencyManifests") 33 | val githubProjectsKey: AttributeKey[Seq[ProjectRef]] = AttributeKey("githubProjectRefs") 34 | val githubSnapshotFileKey: AttributeKey[File] = AttributeKey("githubSnapshotFile") 35 | 36 | val githubDependencyManifest: TaskKey[Option[githubapi.Manifest]] = taskKey( 37 | "The dependency manifest of the project" 38 | ) 39 | val githubStoreDependencyManifests: InputKey[StateTransform] = 40 | inputKey("Store the dependency manifests of all projects of a Scala version in the attribute map.") 41 | .withRank(KeyRanks.DTask) 42 | } 43 | 44 | import autoImport._ 45 | 46 | override def trigger = allRequirements 47 | override def requires: Plugins = JvmPlugin 48 | 49 | override def globalSettings: Seq[Setting[_]] = Def.settings( 50 | githubStoreDependencyManifests := storeManifestsTask.evaluated, 51 | Keys.commands ++= SubmitDependencyGraph.commands ++ AnalyzeDependencyGraph.commands 52 | ) 53 | 54 | override def projectSettings: Seq[Setting[_]] = Def.settings( 55 | githubDependencyManifest := manifestTask.value, 56 | githubDependencyManifest / Keys.aggregate := false 57 | ) 58 | 59 | private val scalaVersionParser = { 60 | import Parsers._ 61 | import Parser._ 62 | val validOpChars = Set('.', '-', '+') 63 | identifier( 64 | charClass(alphanum, "alphanum"), 65 | charClass(c => alphanum(c) || validOpChars.contains(c), "version character") 66 | ) 67 | } 68 | 69 | private def storeManifestsTask: Def.Initialize[InputTask[StateTransform]] = Def.inputTaskDyn { 70 | val scalaVersionInput = (Parsers.Space ~> scalaVersionParser).parsed 71 | val state = Keys.state.value 72 | val logger = Keys.streams.value.log 73 | 74 | val projectRefs = state 75 | .attributes(githubProjectsKey) 76 | .filter(ref => state.setting(ref / Keys.scalaVersion) == scalaVersionInput) 77 | .filter(ref => includeProject(ref, state, logger)) 78 | 79 | Def.task { 80 | val manifests: Map[String, Manifest] = projectRefs 81 | .map(ref => (ref / githubDependencyManifest).?) 82 | .join 83 | .value 84 | .flatten 85 | .collect { case Some(manifest) => (manifest.name, manifest) } 86 | .toMap 87 | StateTransform { state => 88 | val oldManifests = state.attributes(githubManifestsKey) 89 | state.put(githubManifestsKey, oldManifests ++ manifests) 90 | } 91 | } 92 | } 93 | 94 | private def includeProject(projectRef: ProjectRef, state: State, logger: Logger): Boolean = { 95 | val ignoredModules = state.attributes(githubSnapshotInputKey).ignoredModules 96 | val moduleName = getModuleName(projectRef, state) 97 | val ignored = ignoredModules.contains(moduleName) 98 | if (!ignored) logger.info(s"Including dependency graph of $moduleName") 99 | else logger.info(s"Excluding dependency graph of $moduleName") 100 | !ignored 101 | } 102 | 103 | private def getModuleName(projectRef: ProjectRef, state: State): String = { 104 | val scalaVersion = state.setting(projectRef / Keys.artifactName / Keys.scalaVersion) 105 | val scalaBinaryVersion = state.setting(projectRef / Keys.artifactName / Keys.scalaBinaryVersion) 106 | val projectID = state.setting(projectRef / Keys.projectID) 107 | CrossVersion(scalaVersion, scalaBinaryVersion).apply(projectID).name 108 | } 109 | 110 | private def manifestTask: Def.Initialize[Task[Option[Manifest]]] = Def.task { 111 | // updateFull is needed to have information about callers and reconstruct dependency tree 112 | val reportResult = Keys.updateFull.result.value 113 | val projectID = Keys.projectID.value 114 | val root = Paths.get(Keys.loadedBuild.value.root).toAbsolutePath 115 | val scalaVersion = (Keys.artifactName / Keys.scalaVersion).value 116 | val scalaBinaryVersion = (Keys.artifactName / Keys.scalaBinaryVersion).value 117 | val crossVersion = CrossVersion.apply(scalaVersion, scalaBinaryVersion) 118 | val allDirectDependencies = Keys.allDependencies.value 119 | val baseDirectory = Keys.baseDirectory.value 120 | val logger = Keys.streams.value.log 121 | val state = Keys.state.value 122 | val thisProject = Keys.thisProject.value 123 | val internalConfigurationMap = Keys.internalConfigurationMap.value 124 | 125 | val inputOpt = state.get(githubSnapshotInputKey) 126 | val buildFileOpt = state.get(githubBuildFile) 127 | 128 | val onResolveFailure = inputOpt.flatMap(_.onResolveFailure) 129 | val ignoredConfigs = inputOpt.toSeq.flatMap(_.ignoredConfigs).toSet 130 | val moduleName = crossVersion(projectID).name 131 | 132 | // a reverse view of internalConfigurationMap (internal-test -> test) 133 | val reverseConfigurationMap = 134 | thisProject.configurations 135 | .map(c => internalConfigurationMap(c).name -> c.name) 136 | .filter { case (internal, c) => internal != c } 137 | .toMap 138 | 139 | def getReference(module: ModuleID): String = 140 | crossVersion(module) 141 | .withConfigurations(None) 142 | .withExtraAttributes(Map.empty) 143 | .toString 144 | 145 | def includeConfig(config: ConfigRef): Boolean = 146 | // if ignoredConfigs contain 'test' we should also ignore 'test-internal' 147 | if ( 148 | ignoredConfigs.contains(config.name) || reverseConfigurationMap.get(config.name).exists(ignoredConfigs.contains) 149 | ) { 150 | logger.info(s"Excluding config ${config.name} of ${moduleName} from its dependency graph") 151 | false 152 | } else true 153 | 154 | reportResult match { 155 | case Inc(cause) => 156 | val message = s"Failed to resolve the dependencies of $moduleName" 157 | onResolveFailure match { 158 | case Some(OnFailure.warning) => 159 | logger.warn(message) 160 | None 161 | case _ => 162 | logger.error(message) 163 | throw cause 164 | } 165 | case Value(report) => 166 | val alreadySeen = mutable.Set[String]() 167 | val moduleReports = mutable.Buffer[(ModuleReport, ConfigRef)]() 168 | val allDependencies = mutable.Buffer[(String, String)]() 169 | for { 170 | configReport <- report.configurations 171 | if includeConfig(configReport.configuration) 172 | moduleReport <- configReport.modules 173 | moduleRef = getReference(moduleReport.module) 174 | if !moduleReport.evicted && !alreadySeen.contains(moduleRef) 175 | } { 176 | alreadySeen += moduleRef 177 | moduleReports += (moduleReport -> configReport.configuration) 178 | for (caller <- moduleReport.callers) 179 | allDependencies += (getReference(caller.caller) -> moduleRef) 180 | } 181 | 182 | val allDependenciesMap: Map[String, Vector[String]] = allDependencies.view 183 | .groupBy(_._1) 184 | .mapValues { 185 | _.map { case (_, dep) => dep }.toVector 186 | } 187 | val allDirectDependenciesRefs: Set[String] = allDirectDependencies.map(getReference).toSet 188 | 189 | val resolved = 190 | for ((moduleReport, configRef) <- moduleReports) 191 | yield { 192 | val moduleRef = getReference(moduleReport.module) 193 | val packageUrl = formatPackageUrl(moduleReport) 194 | val dependencies = allDependenciesMap.getOrElse(moduleRef, Vector.empty) 195 | val relationship = 196 | if (allDirectDependenciesRefs.contains(moduleRef)) DependencyRelationship.direct 197 | else DependencyRelationship.indirect 198 | val scope = 199 | if (isRuntime(configRef)) DependencyScope.runtime 200 | else DependencyScope.development 201 | val metadata = Map("config" -> JString(configRef.name)) 202 | val node = DependencyNode(packageUrl, metadata, Some(relationship), Some(scope), dependencies) 203 | moduleRef -> node 204 | } 205 | 206 | val projectModuleRef = getReference(projectID) 207 | val metadata = Map("baseDirectory" -> JString(baseDirectory.toString)) 208 | val manifest = githubapi.Manifest(projectModuleRef, buildFileOpt, metadata, resolved.toMap) 209 | Some(manifest) 210 | } 211 | } 212 | 213 | private def formatPackageUrl(moduleReport: ModuleReport): String = { 214 | val module = moduleReport.module 215 | val artifacts = moduleReport.artifacts.map { case (a, _) => a } 216 | val classifiers = artifacts.flatMap(_.classifier).filter(_ != "default") 217 | val packaging = if (classifiers.nonEmpty) "?" + classifiers.map(c => s"packaging=$c").mkString("&") else "" 218 | s"pkg:maven/${module.organization}/${module.name}@${module.revision}$packaging" 219 | } 220 | 221 | private def isRuntime(config: ConfigRef): Boolean = runtimeConfigs.contains(config) 222 | 223 | private def githubCIEnv(name: String): String = 224 | Properties.envOrNone(name).getOrElse { 225 | throw new MessageOnlyException(s"Missing environment variable $name. This task must run in a Github Action.") 226 | } 227 | } 228 | -------------------------------------------------------------------------------- /sbt-plugin/src/main/scala/ch/epfl/scala/SubmitDependencyGraph.scala: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala 2 | 3 | import java.nio.file.Paths 4 | import java.time.Instant 5 | 6 | import scala.concurrent.Await 7 | import scala.concurrent.duration.Duration 8 | import scala.util.Properties 9 | import scala.util.Try 10 | 11 | import ch.epfl.scala.GithubDependencyGraphPlugin.autoImport._ 12 | import ch.epfl.scala.JsonProtocol._ 13 | import ch.epfl.scala.githubapi.JsonProtocol._ 14 | import ch.epfl.scala.githubapi._ 15 | import gigahorse.FullResponse 16 | import gigahorse.HttpClient 17 | import gigahorse.support.asynchttpclient.Gigahorse 18 | import sbt._ 19 | import sbt.internal.util.complete._ 20 | import sjsonnew.shaded.scalajson.ast.unsafe.JValue 21 | import sjsonnew.support.scalajson.unsafe.{Parser => JsonParser, _} 22 | 23 | object SubmitDependencyGraph { 24 | val Generate = "githubGenerateSnapshot" 25 | private val GenerateUsage = s"""$Generate {"ignoredModules":[], "ignoredConfig":[]}""" 26 | private val GenerateDetail = "Generate the dependency graph of a set of projects and scala versions" 27 | 28 | private val GenerateInternal = s"${Generate}Internal" 29 | private val InternalOnly = "internal usage only" 30 | 31 | val Submit = "githubSubmitSnapshot" 32 | private val SubmitDetail = "Submit the dependency graph to Github Dependency API." 33 | 34 | val commands: Seq[Command] = Seq( 35 | Command(Generate, (GenerateUsage, GenerateDetail), GenerateDetail)(inputParser)(generate), 36 | Command.command(GenerateInternal, InternalOnly, InternalOnly)(generateInternal), 37 | Command.command(Submit, SubmitDetail, SubmitDetail)(submit) 38 | ) 39 | 40 | private lazy val http: HttpClient = Gigahorse.http(Gigahorse.config) 41 | 42 | private def inputParser(state: State): Parser[DependencySnapshotInput] = 43 | Parsers.any.*.map { raw => 44 | val rawString = raw.mkString 45 | if (rawString.isEmpty) DependencySnapshotInput(None, Vector.empty, Vector.empty, Some("")) 46 | else 47 | JsonParser 48 | .parseFromString(rawString) 49 | .flatMap(Converter.fromJson[DependencySnapshotInput]) 50 | .get 51 | }.failOnException 52 | 53 | private def generate(state: State, input: DependencySnapshotInput): State = { 54 | val loadedBuild = state.setting(Keys.loadedBuild) 55 | // all project refs that have a Scala version 56 | val projectRefs = loadedBuild.allProjectRefs 57 | .map(_._1) 58 | .filter(ref => state.getSetting(ref / Keys.scalaVersion).isDefined) 59 | // all cross scala versions of those projects 60 | val scalaVersions = projectRefs 61 | .flatMap(projectRef => state.setting(projectRef / Keys.crossScalaVersions)) 62 | .distinct 63 | 64 | val root = Paths.get(loadedBuild.root).toAbsolutePath 65 | val workspace = Paths.get(githubWorkspace()).toAbsolutePath 66 | val buildFile = 67 | if (root.startsWith(workspace)) workspace.relativize(root).resolve("build.sbt") 68 | else root.resolve("build.sbt") 69 | state.log.info(s"Resolving snapshot of $buildFile") 70 | 71 | val initState = state 72 | .put(githubSnapshotInputKey, input) 73 | .put(githubBuildFile, githubapi.FileInfo(buildFile.toString)) 74 | .put(githubManifestsKey, Map.empty[String, Manifest]) 75 | .put(githubProjectsKey, projectRefs) 76 | 77 | val storeAllManifests = scalaVersions.flatMap { scalaVersion => 78 | Seq(s"++$scalaVersion", s"Global/${githubStoreDependencyManifests.key} $scalaVersion") 79 | } 80 | val commands = storeAllManifests :+ GenerateInternal 81 | commands.toList ::: initState 82 | } 83 | 84 | private def generateInternal(state: State): State = { 85 | val input = state.attributes(githubSnapshotInputKey) 86 | val snapshot = githubDependencySnapshot(state, input.correlator.getOrElse("")) 87 | val snapshotJson = CompactPrinter(Converter.toJsonUnsafe(snapshot)) 88 | val snapshotJsonFile = IO.withTemporaryFile("dependency-snapshot-", ".json", keepFile = true) { file => 89 | IO.write(file, snapshotJson) 90 | state.log.info(s"Dependency snapshot written to ${file.getAbsolutePath}") 91 | file 92 | } 93 | setGithubOutputs("snapshot-json-path" -> snapshotJsonFile.getAbsolutePath) 94 | state.put(githubSnapshotFileKey, snapshotJsonFile) 95 | } 96 | 97 | def submit(state: State): State = { 98 | checkGithubEnv() // fail if the Github CI environment 99 | val snapshotJsonFile = state 100 | .get(githubSnapshotFileKey) 101 | .getOrElse( 102 | throw new MessageOnlyException( 103 | "Missing snapshot file. This command must execute after the githubGenerateSnapshot command" 104 | ) 105 | ) 106 | val snapshotUrl = s"${githubApiUrl()}/repos/${githubRepository()}/dependency-graph/snapshots" 107 | val input = state.attributes(githubSnapshotInputKey) 108 | val job = githubJob(input.correlator.getOrElse("")) 109 | val request = Gigahorse 110 | .url(snapshotUrl) 111 | .post(snapshotJsonFile) 112 | .addHeaders( 113 | "Content-Type" -> "application/json", 114 | "Authorization" -> s"token ${githubToken()}" 115 | ) 116 | 117 | state.log.info(s"Submitting dependency snapshot of job $job to $snapshotUrl") 118 | val result = for { 119 | httpResp <- Try(Await.result(http.processFull(request), Duration.Inf)) 120 | snapshot <- getSnapshot(httpResp) 121 | } yield { 122 | state.log.info(s"Submitted successfully as $snapshotUrl/${snapshot.id}") 123 | setGithubOutputs( 124 | "submission-id" -> s"${snapshot.id}", 125 | "submission-api-url" -> s"${snapshotUrl}/${snapshot.id}" 126 | ) 127 | state 128 | } 129 | 130 | result.get 131 | } 132 | 133 | // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter 134 | private def setGithubOutputs(outputs: (String, String)*): Unit = 135 | for (output <- githubOutput()) 136 | IO.writeLines(output, outputs.map { case (name, value) => s"${name}=${value}" }, append = true) 137 | 138 | private def getSnapshot(httpResp: FullResponse): Try[SnapshotResponse] = 139 | httpResp.status match { 140 | case status if status / 100 == 2 => 141 | JsonParser 142 | .parseFromByteBuffer(httpResp.bodyAsByteBuffer) 143 | .flatMap(Converter.fromJson[SnapshotResponse]) 144 | case status => 145 | val message = 146 | s"Unexpected status $status ${httpResp.statusText} with body:\n${httpResp.bodyAsString}" 147 | throw new MessageOnlyException(message) 148 | } 149 | 150 | private def githubDependencySnapshot(state: State, correlator: String): DependencySnapshot = { 151 | val detector = DetectorMetadata( 152 | SbtGithubDependencySubmission.name, 153 | SbtGithubDependencySubmission.homepage.map(_.toString).getOrElse(""), 154 | SbtGithubDependencySubmission.version 155 | ) 156 | val scanned = Instant.now 157 | val manifests = state.get(githubManifestsKey).get 158 | DependencySnapshot( 159 | 0, 160 | githubJob(correlator), 161 | githubSha(), 162 | githubRef(), 163 | detector, 164 | Map.empty[String, JValue], 165 | manifests, 166 | scanned.toString 167 | ) 168 | } 169 | 170 | private def githubJob(correlator: String): Job = { 171 | val id = githubRunId 172 | val html_url = 173 | for { 174 | serverUrl <- Properties.envOrNone("GITHUB_SERVER_URL") 175 | repository <- Properties.envOrNone("GITHUB_REPOSITORY") 176 | } yield s"$serverUrl/$repository/actions/runs/$id" 177 | Job(correlator, id, html_url) 178 | } 179 | 180 | private def checkGithubEnv(): Unit = { 181 | def check(name: String): Unit = Properties.envOrNone(name).orElse { 182 | throw new MessageOnlyException(s"Missing environment variable $name. This task must run in a Github Action.") 183 | } 184 | check("GITHUB_WORKSPACE") 185 | check("GITHUB_RUN_ID") 186 | check("GITHUB_SHA") 187 | check("GITHUB_REF") 188 | check("GITHUB_API_URL") 189 | check("GITHUB_REPOSITORY") 190 | check("GITHUB_TOKEN") 191 | check("GITHUB_OUTPUT") 192 | } 193 | 194 | private def githubWorkspace(): String = Properties.envOrElse("GITHUB_WORKSPACE", "") 195 | private def githubRunId(): String = Properties.envOrElse("GITHUB_RUN_ID", "") 196 | private def githubSha(): String = Properties.envOrElse("GITHUB_SHA", "") 197 | private def githubRef(): String = Properties.envOrElse("GITHUB_REF", "") 198 | 199 | private def githubApiUrl(): String = Properties.envOrElse("GITHUB_API_URL", "") 200 | private def githubRepository(): String = Properties.envOrElse("GITHUB_REPOSITORY", "") 201 | private def githubToken(): String = Properties.envOrElse("GITHUB_TOKEN", "") 202 | private def githubOutput(): Option[File] = Properties.envOrNone("GITHUB_OUTPUT").map(file) 203 | } 204 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/coursier-manifest/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 5 | 6 | val checkManifest = taskKey[Unit]("Check the Github manifest of a project") 7 | 8 | inThisBuild( 9 | Seq( 10 | organization := "ch.epfl.scala", 11 | version := "1.2.0-SNAPSHOT", 12 | useCoursier := true, 13 | scalaVersion := "2.12.20" 14 | ) 15 | ) 16 | 17 | lazy val p1 = project 18 | .in(file("p1")) 19 | .settings( 20 | libraryDependencies ++= Seq( 21 | "io.circe" %% "circe-generic" % "0.14.1", 22 | "org.tpolecat" %% "doobie-core" % "0.13.4", 23 | "org.scalatest" %% "scalatest" % "3.2.2" % Test 24 | ), 25 | checkManifest := { 26 | val manifest = githubDependencyManifest.value.get 27 | assert(manifest.name == "ch.epfl.scala:p1_2.12:1.2.0-SNAPSHOT") 28 | 29 | // all dependencies are defined 30 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 31 | 32 | checkDependency(manifest, "io.circe:circe-generic_2.12:0.14.1")( 33 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 34 | ) 35 | checkDependency(manifest, "org.tpolecat:doobie-core_2.12:0.13.4")( 36 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 37 | ) 38 | checkDependency(manifest, "com.chuusai:shapeless_2.12:2.3.7")( 39 | expectedRelationship = DependencyRelationship.indirect 40 | ) 41 | checkDependency(manifest, "org.scalatest:scalatest_2.12:3.2.2")( 42 | expectedScope = DependencyScope.development, 43 | expectedConfig = "test", 44 | expectedDeps = Seq("org.scalatest:scalatest-core_2.12:3.2.2") 45 | ) 46 | checkDependency(manifest, "org.scalatest:scalatest-core_2.12:3.2.2")( 47 | expectedRelationship = DependencyRelationship.indirect, 48 | expectedScope = DependencyScope.development, 49 | expectedConfig = "test" 50 | ) 51 | } 52 | ) 53 | 54 | lazy val p2 = project 55 | .in(file("p2")) 56 | .settings( 57 | libraryDependencies ++= Seq( 58 | "com.typesafe.akka" %% "akka-http" % "10.2.8" 59 | ), 60 | checkManifest := { 61 | val manifest = githubDependencyManifest.value.get 62 | assert(manifest.name == "ch.epfl.scala:p2_2.12:1.2.0-SNAPSHOT") 63 | 64 | // all dependencies are defined 65 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 66 | 67 | checkDependency(manifest, "com.typesafe.akka:akka-http_2.12:10.2.8")() 68 | checkDependency(manifest, "ch.epfl.scala:p1_2.12:1.2.0-SNAPSHOT")() 69 | 70 | // transitively depends on circe through p1 71 | checkDependency(manifest, "io.circe:circe-generic_2.12:0.14.1")( 72 | expectedRelationship = DependencyRelationship.indirect, 73 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 74 | ) 75 | 76 | // p2 does not depend on scalatest 77 | assert(manifest.resolved.get("org.scalatest:scalatest_2.12:3.2.2").isEmpty) 78 | } 79 | ) 80 | .dependsOn(p1) 81 | 82 | def checkDependency(manifest: Manifest, name: String)( 83 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 84 | expectedScope: DependencyScope = DependencyScope.runtime, 85 | expectedConfig: String = "compile", 86 | expectedDeps: Seq[String] = Seq.empty 87 | ): Unit = { 88 | val node = manifest.resolved(name) 89 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 90 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 91 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 92 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 93 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 94 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 95 | } 96 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/coursier-manifest/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/coursier-manifest/test: -------------------------------------------------------------------------------- 1 | > p1 / checkManifest 2 | > p2 / checkManifest 3 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/default-scala-manifest/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 5 | 6 | val checkManifest = taskKey[Unit]("Check the Github manifest of a project") 7 | 8 | // using the default scalaVersion 9 | inThisBuild( 10 | Seq( 11 | organization := "ch.epfl.scala", 12 | version := "1.2.0-SNAPSHOT" 13 | ) 14 | ) 15 | 16 | lazy val p1 = project 17 | .in(file("p1")) 18 | .settings( 19 | checkManifest := { 20 | val manifest = githubDependencyManifest.value.get 21 | assert(manifest.name == "ch.epfl.scala:p1_2.12:1.2.0-SNAPSHOT") 22 | 23 | // all dependencies are defined 24 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 25 | 26 | checkDependency(manifest, "org.scala-lang:scala-library:2.12.14")() 27 | checkDependency(manifest, "org.scala-lang:scala-compiler:2.12.14")( 28 | expectedScope = DependencyScope.development, 29 | expectedConfig = "scala-tool" 30 | ) 31 | } 32 | ) 33 | 34 | def checkDependency(manifest: Manifest, name: String)( 35 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 36 | expectedScope: DependencyScope = DependencyScope.runtime, 37 | expectedConfig: String = "compile", 38 | expectedDeps: Seq[String] = Seq.empty 39 | ): Unit = { 40 | val node = manifest.resolved(name) 41 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 42 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 43 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 44 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 45 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 46 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 47 | } 48 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/default-scala-manifest/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/default-scala-manifest/test: -------------------------------------------------------------------------------- 1 | > p1 / checkManifest 2 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-scaladoc/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import ch.epfl.scala.DependencySnapshotInput 5 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 6 | 7 | val checkScaladoc = taskKey[Unit]("Check scaladoc_3 is in the manifest ") 8 | val ignoreScaladoc = taskKey[StateTransform]("Ignore the scala-doc-tool in the submit input") 9 | val checkIgnoreScaladoc = taskKey[Unit]("Check scaladoc_3 is absent in the manifest") 10 | 11 | inThisBuild( 12 | Seq( 13 | organization := "ch.epfl.scala", 14 | version := "1.2.0-SNAPSHOT", 15 | scalaVersion := "3.2.1" 16 | ) 17 | ) 18 | 19 | Global / ignoreScaladoc := { 20 | val input = DependencySnapshotInput(None, Vector.empty, ignoredConfigs = Vector("scala-doc-tool"), correlator = None) 21 | StateTransform(state => state.put(githubSnapshotInputKey, input)) 22 | } 23 | 24 | lazy val p1 = project 25 | .in(file("p1")) 26 | .settings( 27 | checkScaladoc := { 28 | val manifest = githubDependencyManifest.value.get 29 | checkDependency(manifest, "org.scala-lang:scaladoc_3:3.2.1")( 30 | expectedRelationship = DependencyRelationship.direct, 31 | expectedScope = DependencyScope.development, 32 | expectedConfig = "scala-doc-tool" 33 | ) 34 | }, 35 | checkIgnoreScaladoc := { 36 | val manifest = githubDependencyManifest.value.get 37 | val suspicious = manifest.resolved.keys.filter(dep => dep.contains("scaladoc_3")) 38 | assert(suspicious.isEmpty, s"The manifest should not contain scaladoc_3, found ${suspicious.mkString(", ")}") 39 | } 40 | ) 41 | 42 | def checkDependency(manifest: Manifest, name: String)( 43 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 44 | expectedScope: DependencyScope = DependencyScope.runtime, 45 | expectedConfig: String = "compile", 46 | expectedDeps: Seq[String] = Seq.empty 47 | ): Unit = { 48 | val node = manifest.resolved(name) 49 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 50 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 51 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 52 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 53 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 54 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 55 | } 56 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-scaladoc/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-scaladoc/test: -------------------------------------------------------------------------------- 1 | > p1 / checkScaladoc 2 | > Global / ignoreScaladoc 3 | > p1 / checkIgnoreScaladoc 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-test/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import ch.epfl.scala.DependencySnapshotInput 5 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 6 | 7 | val checkTest = taskKey[Unit]("Check munit_3 is in the manifest ") 8 | val ignoreTestConfig = taskKey[StateTransform]("Ignore the test config in the submit input") 9 | val checkIgnoreTest = taskKey[Unit]("Check scaladoc_3 is absent in the manifest") 10 | 11 | inThisBuild( 12 | Seq( 13 | organization := "ch.epfl.scala", 14 | version := "1.2.0-SNAPSHOT", 15 | scalaVersion := "3.2.1" 16 | ) 17 | ) 18 | 19 | Global / ignoreTestConfig := { 20 | val input = DependencySnapshotInput(None, Vector.empty, ignoredConfigs = Vector("test"), correlator = None) 21 | StateTransform(state => state.put(githubSnapshotInputKey, input)) 22 | } 23 | 24 | lazy val p1 = project 25 | .in(file("p1")) 26 | .settings( 27 | libraryDependencies += "org.scalameta" %% "munit" % "1.1.0" % Test, 28 | checkTest := { 29 | val manifest = githubDependencyManifest.value.get 30 | checkDependency(manifest, "org.scalameta:munit_3:1.1.0")( 31 | expectedRelationship = DependencyRelationship.direct, 32 | expectedScope = DependencyScope.development, 33 | expectedConfig = "test" 34 | ) 35 | }, 36 | checkIgnoreTest := { 37 | val manifest = githubDependencyManifest.value.get 38 | val suspicious = manifest.resolved.keys.filter(dep => dep.contains("munit_3")) 39 | assert(suspicious.isEmpty, s"The manifest should not contain munit_3, found ${suspicious.mkString(", ")}") 40 | } 41 | ) 42 | 43 | def checkDependency(manifest: Manifest, name: String)( 44 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 45 | expectedScope: DependencyScope = DependencyScope.runtime, 46 | expectedConfig: String = "compile", 47 | expectedDeps: Seq[String] = Seq.empty 48 | ): Unit = { 49 | val node = manifest.resolved(name) 50 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 51 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 52 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 53 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 54 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 55 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 56 | } 57 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-test/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ignore-test/test: -------------------------------------------------------------------------------- 1 | > p1 / checkTest 2 | > Global / ignoreTestConfig 3 | > p1 / checkIgnoreTest 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ivy-manifest/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 5 | 6 | val checkManifest = taskKey[Unit]("Check the Github manifest of a project") 7 | 8 | inThisBuild( 9 | Seq( 10 | organization := "ch.epfl.scala", 11 | version := "1.2.0-SNAPSHOT", 12 | useCoursier := false, // use Ivy 13 | scalaVersion := "2.12.20" 14 | ) 15 | ) 16 | 17 | lazy val p1 = project 18 | .in(file("p1")) 19 | .settings( 20 | libraryDependencies ++= Seq( 21 | "io.circe" %% "circe-generic" % "0.14.1", 22 | "org.tpolecat" %% "doobie-core" % "0.13.4", 23 | "org.scalatest" %% "scalatest" % "3.2.2" % Test 24 | ), 25 | checkManifest := { 26 | val manifest = githubDependencyManifest.value.get 27 | assert(manifest.name == "ch.epfl.scala:p1_2.12:1.2.0-SNAPSHOT") 28 | 29 | // all dependencies are defined 30 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 31 | 32 | checkDependency(manifest, "io.circe:circe-generic_2.12:0.14.1")( 33 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 34 | ) 35 | checkDependency(manifest, "org.tpolecat:doobie-core_2.12:0.13.4")( 36 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 37 | ) 38 | checkDependency(manifest, "com.chuusai:shapeless_2.12:2.3.7")( 39 | expectedRelationship = DependencyRelationship.indirect 40 | ) 41 | checkDependency(manifest, "org.scalatest:scalatest_2.12:3.2.2")( 42 | expectedScope = DependencyScope.development, 43 | expectedConfig = "test", 44 | expectedDeps = Seq("org.scalatest:scalatest-core_2.12:3.2.2") 45 | ) 46 | checkDependency(manifest, "org.scalatest:scalatest-core_2.12:3.2.2")( 47 | expectedRelationship = DependencyRelationship.indirect, 48 | expectedScope = DependencyScope.development, 49 | expectedConfig = "test" 50 | ) 51 | } 52 | ) 53 | 54 | lazy val p2 = project 55 | .in(file("p2")) 56 | .settings( 57 | libraryDependencies ++= Seq( 58 | "com.typesafe.akka" %% "akka-http" % "10.2.8" 59 | ), 60 | checkManifest := { 61 | val manifest = githubDependencyManifest.value.get 62 | assert(manifest.name == "ch.epfl.scala:p2_2.12:1.2.0-SNAPSHOT") 63 | 64 | // all dependencies are defined 65 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 66 | 67 | checkDependency(manifest, "com.typesafe.akka:akka-http_2.12:10.2.8")() 68 | checkDependency(manifest, "ch.epfl.scala:p1_2.12:1.2.0-SNAPSHOT")() 69 | 70 | // transitively depends on circe through p1 71 | checkDependency(manifest, "io.circe:circe-generic_2.12:0.14.1")( 72 | expectedRelationship = DependencyRelationship.indirect, 73 | expectedDeps = Seq("com.chuusai:shapeless_2.12:2.3.7") 74 | ) 75 | 76 | // p2 does not depend on scalatest 77 | assert(manifest.resolved.get("org.scalatest:scalatest_2.12:3.2.2").isEmpty) 78 | } 79 | ) 80 | .dependsOn(p1) 81 | 82 | def checkDependency(manifest: Manifest, name: String)( 83 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 84 | expectedScope: DependencyScope = DependencyScope.runtime, 85 | expectedConfig: String = "compile", 86 | expectedDeps: Seq[String] = Seq.empty 87 | ): Unit = { 88 | val node = manifest.resolved(name) 89 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 90 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 91 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 92 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 93 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 94 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 95 | } 96 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ivy-manifest/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/ivy-manifest/test: -------------------------------------------------------------------------------- 1 | > p1 / checkManifest 2 | > p2 / checkManifest 3 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/package-urls/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 5 | 6 | val checkManifest = taskKey[Unit]("Check the Github manifest of a project") 7 | 8 | inThisBuild( 9 | Seq( 10 | organization := "ch.epfl.scala", 11 | version := "1.2.0-SNAPSHOT", 12 | // use Ivy because Coursier does not allow several classifier on the same dep 13 | useCoursier := false, 14 | scalaVersion := "2.12.20" 15 | ) 16 | ) 17 | 18 | lazy val p1 = project 19 | .in(file("p1")) 20 | .settings( 21 | libraryDependencies ++= Seq( 22 | ("com.google.inject" % "guice" % "4.0").classifier("no_aop"), 23 | ("org.lwjgl" % "lwjgl" % "3.3.1") 24 | .classifier("natives-windows") 25 | .classifier("natives-linux") 26 | .classifier("natives-macos") 27 | ), 28 | checkManifest := { 29 | val manifest = githubDependencyManifest.value.get 30 | 31 | checkDependency(manifest, "com.google.inject:guice:4.0")( 32 | expectedPackageUrl = "pkg:maven/com.google.inject/guice@4.0?packaging=no_aop" 33 | ) 34 | checkDependency(manifest, "org.lwjgl:lwjgl:3.3.1")( 35 | expectedPackageUrl = 36 | "pkg:maven/org.lwjgl/lwjgl@3.3.1?packaging=natives-linux&packaging=natives-macos&packaging=natives-windows" 37 | ) 38 | } 39 | ) 40 | 41 | def checkDependency(manifest: Manifest, name: String)(expectedPackageUrl: String): Unit = { 42 | val node = manifest.resolved(name) 43 | assert( 44 | node.package_url == expectedPackageUrl, 45 | s"Wrong package_url for node $name:\nfound: ${node.package_url}\nexpected:$expectedPackageUrl" 46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/package-urls/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/package-urls/test: -------------------------------------------------------------------------------- 1 | > p1 / checkManifest 2 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/scala3-manifest/build.sbt: -------------------------------------------------------------------------------- 1 | import ch.epfl.scala.githubapi.DependencyRelationship 2 | import ch.epfl.scala.githubapi.DependencyScope 3 | import ch.epfl.scala.githubapi.Manifest 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 5 | 6 | val checkManifest = taskKey[Unit]("Check the Github manifest of a project") 7 | 8 | inThisBuild( 9 | Seq( 10 | organization := "ch.epfl.scala", 11 | version := "1.2.0-SNAPSHOT", 12 | scalaVersion := "3.1.0" 13 | ) 14 | ) 15 | 16 | lazy val p1 = project 17 | .in(file("p1")) 18 | .settings( 19 | libraryDependencies ++= Seq( 20 | "io.circe" %% "circe-core" % "0.14.1" 21 | ), 22 | checkManifest := { 23 | val manifest = githubDependencyManifest.value.get 24 | assert(manifest.name == "ch.epfl.scala:p1_3:1.2.0-SNAPSHOT") 25 | 26 | // all dependencies are defined 27 | assert(manifest.resolved.values.forall(n => n.dependencies.forall(manifest.resolved.contains))) 28 | 29 | checkDependency(manifest, "io.circe:circe-core_3:0.14.1")( 30 | expectedDeps = Seq("org.scala-lang:scala3-library_3:3.1.0") 31 | ) 32 | checkDependency(manifest, "org.scala-lang:scala3-library_3:3.1.0")() 33 | checkDependency(manifest, "org.scala-lang:scala3-compiler_3:3.1.0")( 34 | expectedConfig = "scala-doc-tool", 35 | expectedScope = DependencyScope.development 36 | ) 37 | } 38 | ) 39 | 40 | def checkDependency(manifest: Manifest, name: String)( 41 | expectedRelationship: DependencyRelationship = DependencyRelationship.direct, 42 | expectedScope: DependencyScope = DependencyScope.runtime, 43 | expectedConfig: String = "compile", 44 | expectedDeps: Seq[String] = Seq.empty 45 | ): Unit = { 46 | val node = manifest.resolved(name) 47 | assert(node.package_url.startsWith("pkg:maven/"), s"Wrong package_url for node $name: ${node.package_url}") 48 | assert(node.relationship.contains(expectedRelationship), s"Wrong relationship for node $name: ${node.relationship}") 49 | assert(node.scope.contains(expectedScope), s"Wrong scope for node $name: ${node.scope}") 50 | val configurations = node.metadata.get("config").collect { case JString(c) => c } 51 | assert(configurations.contains(expectedConfig), s"Wrong config in metadata for node $name: $configurations") 52 | expectedDeps.foreach(d => assert(node.dependencies.contains(d), s"missing dependency $d in node $name")) 53 | } 54 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/scala3-manifest/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/dependency-manifest/scala3-manifest/test: -------------------------------------------------------------------------------- 1 | > p1 / checkManifest 2 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/generate-snapshot/build.sbt: -------------------------------------------------------------------------------- 1 | import scala.util.Properties 2 | import sbt.internal.util.complete.Parsers._ 3 | 4 | val checkManifests = inputKey[Unit]("Check the number of manifests") 5 | 6 | inThisBuild( 7 | Seq( 8 | organization := "ch.epfl.scala", 9 | version := "1.2.0-SNAPSHOT", 10 | // use Ivy because Coursier does not allow several classifier on the same dep 11 | useCoursier := false, 12 | scalaVersion := "2.13.8" 13 | ) 14 | ) 15 | 16 | val a = project 17 | .in(file(".")) 18 | .settings( 19 | scalaVersion := "2.13.8", 20 | crossScalaVersions := Seq( 21 | "2.12.16", 22 | "2.13.8", 23 | "3.1.3" 24 | ), 25 | libraryDependencies ++= Seq( 26 | // a dependency with many classifiers 27 | ("org.lwjgl" % "lwjgl" % "3.3.1") 28 | .classifier("natives-windows") 29 | .classifier("natives-linux") 30 | .classifier("natives-macos") 31 | ) 32 | ) 33 | 34 | // b is not cross-compiled 35 | // but we should still be able to resolve the manifests of the build on 2.12.16 and 3.1.3 36 | // this pattern is taken from scalameta/metals where metals, not cross-compiled, depends on mtags 37 | // which is cross-compiled 38 | val b = project 39 | .in(file("b")) 40 | .settings( 41 | scalaVersion := "2.13.8" 42 | ) 43 | .dependsOn(a) 44 | 45 | Global / checkManifests := { 46 | val logger = streams.value.log 47 | val expectedSize: Int = (Space ~> NatBasic).parsed 48 | val manifests = state.value.get(githubManifestsKey).getOrElse { 49 | throw new MessageOnlyException(s"Not found ${githubManifestsKey.label} attribute") 50 | } 51 | logger.info(s"found ${manifests.size} manifests") 52 | assert( 53 | manifests.size == expectedSize, 54 | s"expected $expectedSize manifests, found ${manifests.size}" 55 | ) 56 | } 57 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/generate-snapshot/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/generate-snapshot/test: -------------------------------------------------------------------------------- 1 | > 'githubGenerateSnapshot {}' 2 | > Global / checkManifests 4 3 | 4 | > 'githubGenerateSnapshot {"ignoredModules":["a_2.13", "b_2.13"]}' 5 | > Global / checkManifests 2 6 | 7 | > 'githubGenerateSnapshot {"ignoredModules":["a_2.12", "a_2.13", "b_2.13"]}' 8 | > Global / checkManifests 1 9 | 10 | > 'githubGenerateSnapshot {"ignoredModules":[]}' 11 | > Global / checkManifests 4 12 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/resolve-failure/build.sbt: -------------------------------------------------------------------------------- 1 | import scala.util.Properties 2 | import sbt.internal.util.complete.Parsers._ 3 | 4 | val checkManifests = inputKey[Unit]("Check the number of manifests") 5 | 6 | inThisBuild( 7 | Seq( 8 | organization := "ch.epfl.scala", 9 | version := "1.2.0-SNAPSHOT", 10 | // use Ivy because Coursier does not allow several classifier on the same dep 11 | useCoursier := false, 12 | scalaVersion := "2.13.8" 13 | ) 14 | ) 15 | 16 | val a = project 17 | .in(file(".")) 18 | .settings( 19 | scalaVersion := "2.13.8" 20 | ) 21 | 22 | // Update on b fails, because b on 2.12.16 depends on a on 2.13.8 23 | val b = project 24 | .in(file("b")) 25 | .settings( 26 | scalaVersion := "2.12.16" 27 | ) 28 | .dependsOn(a) 29 | 30 | Global / checkManifests := { 31 | val logger = streams.value.log 32 | val expectedSize: Int = (Space ~> NatBasic).parsed 33 | val manifests = state.value.get(githubManifestsKey).getOrElse { 34 | throw new MessageOnlyException(s"Not found ${githubManifestsKey.label} attribute") 35 | } 36 | logger.info(s"found ${manifests.size} manifests") 37 | assert( 38 | manifests.size == expectedSize, 39 | s"expected $expectedSize manifests, found ${manifests.size}" 40 | ) 41 | } 42 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/resolve-failure/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/generate-snapshot/resolve-failure/test: -------------------------------------------------------------------------------- 1 | -> 'githubGenerateSnapshot {}' 2 | -> 'Global / githubGenerateSnapshot {"onResolveFailure": "error"}' 3 | 4 | > 'githubGenerateSnapshot {"onResolveFailure": "warning"}' 5 | > Global / checkManifests 1 6 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/submit-snapshot/submit-snapshot/build.sbt: -------------------------------------------------------------------------------- 1 | import scala.util.Properties 2 | import sbt.internal.util.complete.Parsers._ 3 | 4 | val checkManifests = inputKey[Unit]("Check the number of manifests") 5 | 6 | inThisBuild( 7 | Seq( 8 | organization := "ch.epfl.scala", 9 | version := "1.2.0-SNAPSHOT", 10 | // use Ivy because Coursier does not allow several classifier on the same dep 11 | useCoursier := false, 12 | scalaVersion := "2.13.8" 13 | ) 14 | ) 15 | 16 | val a = project 17 | .in(file(".")) 18 | .settings( 19 | scalaVersion := "2.13.8", 20 | crossScalaVersions := Seq( 21 | "2.12.16", 22 | "2.13.8", 23 | "3.1.3" 24 | ), 25 | libraryDependencies ++= Seq( 26 | // a dependency with many classifiers 27 | ("org.lwjgl" % "lwjgl" % "3.3.1") 28 | .classifier("natives-windows") 29 | .classifier("natives-linux") 30 | .classifier("natives-macos") 31 | ) 32 | ) 33 | 34 | // b is not cross-compiled 35 | // but we should still be able to resolve the manifests of the build on 2.12.16 and 3.1.3 36 | // this pattern is taken from scalameta/metals where metals, not cross-compiled, depends on mtags 37 | // which is cross-compiled 38 | val b = project 39 | .in(file("b")) 40 | .settings( 41 | scalaVersion := "2.13.8" 42 | ) 43 | .dependsOn(a) 44 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/submit-snapshot/submit-snapshot/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val pluginVersion = sys.props("plugin.version") 2 | 3 | addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % pluginVersion) 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/sbt-test/submit-snapshot/submit-snapshot/test: -------------------------------------------------------------------------------- 1 | -> 'githubSubmitSnapshot' 2 | > 'githubGenerateSnapshot {}' 3 | > 'githubSubmitSnapshot' 4 | -------------------------------------------------------------------------------- /sbt-plugin/src/test/scala/ch/epfl/scala/JsonProtocolTests.scala: -------------------------------------------------------------------------------- 1 | package ch.epfl.scala 2 | 3 | import munit.FunSuite 4 | import sjsonnew.shaded.scalajson.ast.unsafe.JField 5 | import sjsonnew.shaded.scalajson.ast.unsafe.JNumber 6 | import sjsonnew.shaded.scalajson.ast.unsafe.JObject 7 | import sjsonnew.shaded.scalajson.ast.unsafe.JString 8 | import sjsonnew.shaded.scalajson.ast.unsafe.JValue 9 | import sjsonnew.support.scalajson.unsafe.Converter 10 | import sjsonnew.support.scalajson.unsafe.Parser 11 | 12 | class JsonProtocolTests extends FunSuite { 13 | test("encode metadata") { 14 | import ch.epfl.scala.githubapi.JsonProtocol._ 15 | val metadata = Map("key1" -> JString("value1"), "key2" -> JNumber(1)) 16 | val obtained = Converter.toJson(metadata).get 17 | val expected = JObject(JField("key1", JString("value1")), JField("key2", JNumber(1))) 18 | assertEquals(obtained, expected) 19 | } 20 | 21 | test("decode metadata") { 22 | import ch.epfl.scala.githubapi.JsonProtocol._ 23 | val metadata = JObject(JField("key1", JString("value1")), JField("key2", JNumber(1))) 24 | val obtained = Converter.fromJson[Map[String, JValue]](metadata).get 25 | val expected = Map("key1" -> JString("value1"), "key2" -> JNumber(1)) 26 | assertEquals(obtained, expected) 27 | } 28 | 29 | test("decode empty input") { 30 | import ch.epfl.scala.JsonProtocol._ 31 | val raw = Parser.parseUnsafe("{}") 32 | val obtained = Converter.fromJson[DependencySnapshotInput](raw).get 33 | val expected = DependencySnapshotInput(None, Vector.empty, Vector.empty, None) 34 | assertEquals(obtained, expected) 35 | } 36 | 37 | test("decode input with onResolveFailure: warning") { 38 | import ch.epfl.scala.JsonProtocol._ 39 | val raw = Parser.parseUnsafe("""{"onResolveFailure": "warning"}""") 40 | val obtained = Converter.fromJson[DependencySnapshotInput](raw).get 41 | val expected = DependencySnapshotInput(Some(OnFailure.warning), Vector.empty, Vector.empty, None) 42 | assertEquals(obtained, expected) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import * as cli from '@actions/exec' 2 | import * as core from '@actions/core' 3 | import * as io from '@actions/io' 4 | import * as github from '@actions/github' 5 | import * as crypto from 'crypto' 6 | import * as fs from 'fs' 7 | import * as fsPromises from 'fs/promises' 8 | import * as path from 'path' 9 | import type { PullRequestEvent } from '@octokit/webhooks-types' 10 | 11 | async function run(): Promise { 12 | try { 13 | const token = core.getInput('token') 14 | core.setSecret(token) 15 | 16 | const workingDirInput = core.getInput('working-directory') 17 | const workingDir = workingDirInput.length === 0 ? '.' : workingDirInput 18 | const projectDir = path.join(workingDir, 'project') 19 | if (!fs.existsSync(projectDir)) { 20 | core.setFailed(`${workingDir} is not a valid sbt project: missing folder '${projectDir}'.`) 21 | return 22 | } 23 | 24 | const uuid = crypto.randomUUID() 25 | const pluginFile = path.join(projectDir, `github-dependency-submission-${uuid}.sbt`) 26 | 27 | const pluginVersion = core.getInput('sbt-plugin-version') 28 | const pluginDep = `addSbtPlugin("ch.epfl.scala" % "sbt-github-dependency-submission" % "${pluginVersion}")` 29 | await fsPromises.writeFile(pluginFile, pluginDep) 30 | // check that sbt is installed 31 | await io.which('sbt', true) 32 | 33 | const ignoredModules = core 34 | .getInput('modules-ignore') 35 | .split(' ') 36 | .filter(value => value.length > 0) 37 | 38 | const ignoredConfigs = core 39 | .getInput('configs-ignore') 40 | .split(' ') 41 | .filter(value => value.length > 0) 42 | 43 | const onResolveFailure = core.getInput('on-resolve-failure') 44 | if (!['error', 'warning'].includes(onResolveFailure)) { 45 | core.setFailed( 46 | `Invalid on-resolve-failure input. Should be 'error' or 'warning', found ${onResolveFailure}.`, 47 | ) 48 | return 49 | } 50 | 51 | const correlatorInput = core.getInput('correlator') 52 | const correlator = correlatorInput 53 | ? correlatorInput 54 | : `${github.context.workflow}_${github.context.job}_${github.context.action}` 55 | 56 | const input = { ignoredModules, ignoredConfigs, onResolveFailure, correlator } 57 | 58 | if (github.context.eventName === 'pull_request') { 59 | core.info('pull request, resetting sha') 60 | const payload = github.context.payload as PullRequestEvent 61 | core.info(`setting sha to: ${payload.pull_request.head.sha}`) 62 | process.env['GITHUB_SHA'] = payload.pull_request.head.sha 63 | } 64 | 65 | process.env['GITHUB_TOKEN'] = token 66 | await cli.exec( 67 | 'sbt', 68 | ['--batch', `githubGenerateSnapshot ${JSON.stringify(input)}; githubSubmitSnapshot`], 69 | { 70 | cwd: workingDir, 71 | }, 72 | ) 73 | } catch (error) { 74 | if (error instanceof Error) { 75 | core.setFailed(error) 76 | } else { 77 | core.setFailed(`unknown error: ${error}`) 78 | } 79 | } 80 | } 81 | 82 | run() 83 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 4 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 5 | "outDir": "./lib", /* Redirect output structure to the directory. */ 6 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 7 | "strict": true, /* Enable all strict type-checking options. */ 8 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 9 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 10 | }, 11 | "exclude": ["node_modules", "**/*.test.ts"] 12 | } 13 | --------------------------------------------------------------------------------