├── .eslintrc.json
├── .github
├── policyCheck.png
└── policyReport.png
├── .gitignore
├── .prettierignore
├── .prettierrc.json
├── LICENSE
├── README.md
├── action.yml
├── dist
├── index.js
├── index.js.map
└── sourcemap-register.js
├── jenkinsfile_dev
├── jenkinsfile_pop_detect
├── jenkinsfile_release
├── jest.config.js
├── package-lock.json
├── package.json
├── src
├── _namespaces
│ └── Github.ts
├── application-constants.ts
├── blackduck-api.ts
├── comment.ts
├── detect
│ ├── detect-manager.ts
│ ├── exit-codes.ts
│ ├── report.ts
│ └── reporting.ts
├── github
│ ├── check.ts
│ ├── github-context.ts
│ └── upload-artifacts.ts
├── inputs.ts
└── main.ts
├── tests
└── unit
│ └── github-context.test.ts
└── tsconfig.json
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "es2021": true
5 | },
6 | "extends": [
7 | "eslint:recommended",
8 | "plugin:@typescript-eslint/recommended"
9 | ],
10 | "parser": "@typescript-eslint/parser",
11 | "parserOptions": {
12 | "ecmaVersion": 13,
13 | "sourceType": "module"
14 | },
15 | "plugins": [
16 | "@typescript-eslint"
17 | ],
18 | "rules": {
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/.github/policyCheck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/synopsys-sig/detect-action/8f181c3af220674c9c50bd1d44c24c69dbacbb42/.github/policyCheck.png
--------------------------------------------------------------------------------
/.github/policyReport.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/synopsys-sig/detect-action/8f181c3af220674c9c50bd1d44c24c69dbacbb42/.github/policyReport.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | lib
3 |
4 | **.DS_Store
5 |
6 | .idea
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | dist/
2 | lib/
3 | node_modules/
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 999,
3 | "tabWidth": 2,
4 | "useTabs": false,
5 | "semi": false,
6 | "singleQuote": true,
7 | "trailingComma": "none",
8 | "bracketSpacing": true,
9 | "arrowParens": "avoid"
10 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DEPRECATED: Detect Action
2 |
3 | **NOTE:** This plugin has been deprecated and is no longer supported. It is recommended that you move to our
4 | new and supported Black Duck Security Scan.
5 |
6 |
7 | 
8 |
9 | Richly integrate Synopsys Detect into GitHub action workflows.
10 |
11 | Configure the action to run Detect in Rapid scan mode to get detailed Black Duck policy reports (default behavior), or in Intelligent scan mode to upload your data into Black Duck for more detailed analysis.
12 |
13 | 
14 |
15 | Once your dependencies are clean, configure the action to run Detect in Rapid scan mode to protect your branches with the Black Duck Policy Check and [_Branch Protection Rules_](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#require-status-checks-before-merging).
16 |
17 | 
18 |
19 | # Recommended Usage
20 |
21 | To get the most out of this action, we recommend using _RAPID_ scan-mode for all Pull Requests.
22 |
23 | _INTELLIGENT_ scan-mode is best run on a schedule that can vary by repository. A very active repository would benefit from at least one daily scan, while a less active repository might only need to be scanned once or twice a week. It is still important that low-activity repositories be scanned regularly because new vulnerabilities can be descovered for existing dependencies and source-code.
24 |
25 | # Set Up Workflow
26 |
27 | To start using this action, you'll need to create a _job_ within a GitHub Workflow. You can either [create a new GitHub Workflow](https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions) or use an existing one if appropriate for your use-case.
28 |
29 | Once you have a GitHub Workflow selected, configure which [events will trigger the workflow](https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows) such as _pull requests_ or _schedules_.
30 | **Example**:
31 |
32 | ```yaml
33 | name: Example Workflow
34 | on:
35 | pull_request:
36 | branches:
37 | - main
38 | schedule:
39 | - cron: '0 0 * * *'
40 | ```
41 |
42 | # Set Up Job
43 |
44 | Once you have setup a GitHub Workflow with event triggers, you will need to create a _job_ in which the _Detect Action_ will run.
45 | Your job will look something like this if all configuration options are used:
46 |
47 | ```yaml
48 | jobs:
49 | security:
50 | runs-on: my-github-runner
51 | steps:
52 | - uses: actions/checkout@v2
53 | - name: Set up Java 11
54 | uses: actions/setup-java@v2
55 | with:
56 | java-version: '11'
57 | distribution: 'adopt'
58 | # Because this example is building a Gradle project, it needs to happen after setting up Java
59 | - name: Grant execute permission for gradlew to build my project
60 | run: chmod +x gradlew
61 | - name: Build my project with Gradle
62 | run: ./gradlew build
63 | - name: Create Black Duck Policy
64 | env:
65 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
66 | uses: blackducksoftware/create-policy-action@v0.0.1
67 | with:
68 | blackduck-url: ${{ secrets.BLACKDUCK_URL }}
69 | blackduck-api-token: ${{ secrets.BLACKDUCK_API_TOKEN }}
70 | policy-name: 'My Black Duck Policy For GitHub Actions'
71 | no-fail-if-policy-exists: true
72 | - name: Run Synopsys Detect
73 | uses: synopsys-sig/detect-action@v0.3.5
74 | env:
75 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
76 | with:
77 | github-token: ${{ secrets.GITHUB_TOKEN }}
78 | detect-version: 7.9.0
79 | blackduck-url: ${{ secrets.BLACKDUCK_URL }}
80 | blackduck-api-token: ${{ secrets.BLACKDUCK_API_TOKEN }}
81 | ```
82 |
83 | ## Runners: Self-Hosted
84 | Using a self-hosted runner provides more flexibility in managing your build environment.
85 |
86 | ### Java
87 | It is possible to skip the [Setup Java](#setup-java) step below if you already have Java 11 on your self-hosted runner. Ensure that the _Detect Action_ has access to the correct version of Java on its `$PATH` or within the [_GitHub Tool Cache_](https://docs.github.com/en/enterprise-server@3.0/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access)
88 |
89 |
Certificates
90 | If your Black Duck server is on a private network, the self-hosted runner has access to that network, and the Black Duck server uses custom certificates, then you will likely need to provide a custom certificate to the _Detect Action_.
91 | To do this:
92 | 1. Store the root certificate on the self-hosted runner. Example location: `/certificates/my_custom_cert.pem`
93 | 2. Set `NODE_EXTRA_CA_CERTS` in the _Detect Action's_ environment:
94 |
95 | ```yaml
96 | - name: Run Synopsys Detect
97 | uses: synopsys-sig/detect-action@v0.3.5
98 | env:
99 | NODE_EXTRA_CA_CERTS: /certificates/my_custom_cert.pem
100 | with:
101 | . . .
102 | ```
103 | Note: The path to the certificate can be stored in a [_GitHub Secrect_](https://docs.github.com/en/actions/security-guides/encrypted-secrets).
104 |
105 | Please reference the section [_Include Custom Certificates (Optional)_](#include-custom-certificates-optional) for more information.
106 |
107 | ### More Info
108 | For more information on self-hosted runners, please visit [GitHub's documentation](https://docs.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners).
109 |
110 | ## Runners: GitHub-Hosted
111 | GitHub hosted runners are convenient, but can require extra setup when managing sensitive information.
112 |
113 | Certificates
114 | Because a GitHub-hosted runner starts with a clean file-system each run, if custom certificate files are needed, they must be created in your workflow. There are many ways to do this, two possible ways are:
115 |
116 | **Option 1**: Download the certificate file.
117 |
118 | **Option 2**: Store the base-64 encoded certificate in a GitHub secret, then use a workflow-step to create a _.pem_ file with that certificate's content:
119 |
120 | ```yaml
121 | - name: Create certificate
122 | run: cat <<< "${{secrets.BASE_64_CERTIFICATE_CONTENT}}" > my-cert.pem
123 | ```
124 |
125 | The file created through one of those options can then be provided as a value for `NODE_EXTRA_CA_CERTS` in the Detect Action step:
126 |
127 | ```yaml
128 | - name: Run Synopsys Detect
129 | uses: synopsys-sig/detect-action@v0.3.5
130 | env:
131 | NODE_EXTRA_CA_CERTS: ./my-cert.pem
132 | with:
133 | . . .
134 | ```
135 |
136 | ## Checkout
137 | Checkout the source-code onto your GitHub Runner with the following _step_:
138 | ```yaml
139 | - uses: actions/checkout@v2
140 | ```
141 |
142 | ## Build Your Project
143 | Detect is meant to be run post-build. You should add steps necessary to build your project before invoking the _Detect Action_. For example, here is how this might be done in a Gradle project:
144 | ```yaml
145 | - name: Grant execute permission for gradlew
146 | run: chmod +x gradlew
147 | - name: Build with Gradle
148 | run: ./gradlew build
149 | ```
150 | In the example job above, this needed to be done _after_ setting up Java because Gradle requires Java. If your project does not use Java, this step can be done before setting up Java.
151 |
152 | ## Set Up Java
153 |
154 | Detect runs using Java 11 and the prefered distribution is from [AdoptOpenJDK](https://github.com/AdoptOpenJDK). Configure the _step_ it as follows:
155 | ```yaml
156 | - name: Set up JDK 11
157 | uses: actions/setup-java@v2
158 | with:
159 | java-version: '11'
160 | distribution: 'adopt'
161 | ```
162 |
163 | ## Create Black Duck Policy (Optional)
164 | In order to run Detect using RAPID mode (which is the default mode for the _Detect Action_), the Black Duck server Detect connects to must have at least one _policy_ and that policy must be enabled. You can create a policy within your Black Duck instance, or you can create a policy directly from your workflow using Black Duck's [_Create Policy Action_](https://github.com/blackducksoftware/create-policy-action). Note: The _Create Policy Action_ is provided for convenience and not the preferred way to manage Black Duck policies.
165 |
166 | The most basic usage of the action looks something like this:
167 | ```yaml
168 | - name: Create Black Duck Policy
169 | env:
170 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
171 | uses: blackducksoftware/create-policy-action@v0.0.1
172 | with:
173 | blackduck-url: ${{ secrets.BLACKDUCK_URL }}
174 | blackduck-api-token: ${{ secrets.BLACKDUCK_API_TOKEN }}
175 | policy-name: 'My Black Duck Policy For GitHub Actions'
176 | no-fail-if-policy-exists: true
177 | ```
178 | Please refer to [that action's documentation](https://github.com/blackducksoftware/create-policy-action) for more information on available parameters, certificate management, and troubleshooting.
179 |
180 | ## Set Up Detect Action
181 |
182 | Once your project is checked-out, built, and Java is configured, the _Detect Action_ can be run. At minimum for Detect to run, provide:
183 |
184 | * Black Duck URL (`blackduck-url`)
185 | * Black Duck API Token (`blackduck-api-token`)
186 | * Your desired Detect Version (`detect-version`) to execute
187 | * Your _GITHUB\_TOKEN_ (`github-token`) to comment on Pull Requests or hook into GitHub Checks (in most cases, this is `${{ secrets.GITHUB_TOKEN }}`)
188 |
189 | ### Choose your Scanning Mode
190 |
191 | The _Detect Action_ can be configured either to monitor your commits for policy violations or upload the status of your repository to Black Duck as a project through use of the `scan-mode` option.
192 |
193 | Set the scan mode to:
194 |
195 | * **RAPID** (default) if you want to enable the Black Duck policy check and comments on your pull requests, for example:
196 |
197 | ```yaml
198 | name: Example: Policy check all commits and all Pull Requests to main
199 | on:
200 | pull_request:
201 | branches:
202 | - main
203 | push:
204 | ...
205 | - name: Run Synopsys Detect
206 | uses: synopsys-sig/detect-action@v0.3.5
207 | env:
208 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
209 | with:
210 | scan-mode: RAPID # Can be omitted, since this is the default value
211 | github-token: ${{ secrets.GITHUB_TOKEN }}
212 | detect-version: 7.9.0
213 | blackduck-url: ${{ secrets.BLACKDUCK_URL }}
214 | blackduck-api-token: ${{ secrets.BLACKDUCK_API_TOKEN }}
215 | ```
216 |
217 | **Note**: By default, Detect will only fail on BLOCKER and CRITICAL policy violations. This can be overridden to fail on all severities by setting `fail-on-all-policy-severities=true` in the _detect-action_ workflow parameters.
218 |
219 | * **INTELLIGENT** if you want to execute a full analysis of Detect and upload your results into a project in Black Duck, for example:
220 |
221 | ```yaml
222 | name: Example: Every day at midnight, update Black Duck project
223 | on:
224 | schedule:
225 | - cron: '0 0 * * *'
226 | ...
227 | - name: Run Synopsys Detect
228 | uses: synopsys-sig/detect-action@v0.3.5
229 | env:
230 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
231 | with:
232 | scan-mode: INTELLIGENT
233 | github-token: ${{ secrets.GITHUB_TOKEN }}
234 | detect-version: 7.9.0
235 | blackduck-url: ${{ secrets.BLACKDUCK_URL }}
236 | blackduck-api-token: ${{ secrets.BLACKDUCK_API_TOKEN }}
237 | ```
238 |
239 |
240 |
241 | These modes also have implications for how Detect is run. RAPID will not persist the results and disables select Detect functionality for faster results. INTELLIGENT persists the results and permits all features of Detect.
242 |
243 | See also: [Detect Documentation of Rapid Scan](https://community.synopsys.com/s/document-item?bundleId=integrations-detect&topicId=downloadingandrunning%2Frapidscan.html&_LANG=enus)
244 |
245 | ### Additional Action Parameters
246 |
247 | - `output-path-override`: Override for where to output Detect files
248 | - Default: $RUNNER_TEMP/blackduck/
249 |
250 | ### Additional Detect Properties
251 |
252 | Passing additional [Detect properties](https://community.synopsys.com/s/document-item?bundleId=integrations-detect&topicId=properties%2Fall-properties.html&_LANG=enus) can be done in several ways:
253 | 1. Use individual environment variables
254 |
255 | **Example**:
256 | ```yaml
257 | - name: Synopsys Detect
258 | uses: synopsys-sig/detect-action@v0.3.5
259 | env:
260 | DETECT_TOOLS: DOCKER
261 | DETECT_DOCKER_IMAGE_ID: abc123
262 | DETECT_DOCKER_PATH_REQUIRED: TRUE
263 | with:
264 | . . .
265 | ```
266 | 2. Use the `SPRING_APPLICATION_JSON` environment variable
267 |
268 | **Example**:
269 | ```yaml
270 | - name: Synopsys Detect
271 | uses: synopsys-sig/detect-action@v0.3.5
272 | env:
273 | SPRING_APPLICATION_JSON: '{"detect.tools":"DOCKER","detect.docker.image.id":"abc123","detect.docker.path.required":"TRUE"}'
274 | with:
275 | . . .
276 | ```
277 | 3. Expose an _application.properties_ or _application.yml_ file in your repository's root directory, or in a _config_ subdirectory
278 |
279 | Please refer to the [Detect documentation on this topic](https://community.synopsys.com/s/document-item?bundleId=integrations-detect&topicId=configuring%2Fothermethods.html&_LANG=enus) for more information.
280 |
281 | ### Detect Diagnostic Zip
282 |
283 | When passing the properties `DETECT_DIAGNOSTIC` or `DETECT_DIAGNOSTIC_EXTENDED` as environment variables, the action will helpfully upload the zip as a build artifact for convenient troubleshooting. Note: These properties must be set to `true` or `false` (rather than `1`) when using the action.
284 |
285 | ## Include Custom Certificates (Optional)
286 |
287 | To include one or more certificates, set `NODE_EXTRA_CA_CERTS` to the certificate file-path(s) in the environment.
288 | Notes:
289 |
290 | - The certificate(s) must be in _pem_ format.
291 | - This environment variable can also be used with the _Create Policy Action_.
292 |
293 | **Example**:
294 | ```yaml
295 | - name: Synopsys Detect
296 | uses: synopsys-sig/detect-action@main
297 | env:
298 | NODE_EXTRA_CA_CERTS: ${{ secrets.LOCAL_CA_CERT_PATH }}
299 | with:
300 | . . .
301 | ```
302 | ### Troubleshooting Certificates
303 | - Problem: An error saying the file-path to the certificate cannot be read.
304 | - Solution: Ensure whitespace and other special characers are properly escaped based on your runner's OS.
305 | - Problem: An error about missing certificates in the certificate-chain or missing root certificates.
306 | - Solution: You may only be including the server's certificate and not the _root CA certificate_. Ensure you are using the _root CA certificate_.
307 |
308 | # Policy Checks
309 |
310 | When the _Detect Action_ runs in RAPID mode, it creates a 'Black Duck Policy Check'. This check can be used within [_Branch Protection Rules_](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#require-status-checks-before-merging) to prevent merging Pull Requests that would introduce Black Duck Policy Violations.
311 |
--------------------------------------------------------------------------------
/action.yml:
--------------------------------------------------------------------------------
1 | name: 'Detect Rapid Scan Action'
2 | branding:
3 | icon: 'shield'
4 | color: 'purple'
5 | description: 'DEPRECATED: Please use Black Duck Security Scan "https://github.com/marketplace/actions/black-duck-security-scan" '
6 | inputs:
7 | github-token:
8 | description: 'Your GitHub token'
9 | required: true
10 | detect-version:
11 | description: 'The version of Detect to download'
12 | required: true
13 | blackduck-url:
14 | description: 'Url of Black Duck instance'
15 | required: true
16 | blackduck-api-token:
17 | description: 'API Token for Black Duck instance'
18 | required: true
19 | scan-mode:
20 | description: 'Either RAPID or INTELLIGENT, configures how Detect is invoked. RAPID will not persist the results and disables select Detect functionality for faster results. INTELLIGENT persists the results and permits all features of Detect.'
21 | required: false
22 | default: 'RAPID'
23 | fail-on-all-policy-severities:
24 | description: 'By default, Detect will only fail on policy violations with BLOCKER or CRITICAL severities. This flag will cause the action to fail on all policy severities.'
25 | required: false
26 | default: false
27 | output-path-override:
28 | description: 'Override for where to output Detect files, default is $RUNNER_TEMP/blackduck/'
29 | required: false
30 | detect-trust-cert:
31 | description: 'Acceptable Values: TRUE, FALSE. If TRUE, Detect will trust the Black Duck certificate even if the certificate is not in the keystore.'
32 | default: 'TRUE'
33 | runs:
34 | using: 'node12'
35 | main: 'dist/index.js'
36 |
--------------------------------------------------------------------------------
/dist/sourcemap-register.js:
--------------------------------------------------------------------------------
1 | (()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},284:(e,r,n)=>{e=n.nmd(e);var t=n(596).SourceMapConsumer;var o=n(622);var i;try{i=n(747);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(process.version)?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var a=n(837).I;var u=n(215);var s=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var a=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},747:e=>{"use strict";e.exports=require("fs")},622:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})();
--------------------------------------------------------------------------------
/jenkinsfile_dev:
--------------------------------------------------------------------------------
1 | /*
2 | * This pipeline script was created manually based on scripts created by jenkins-build-creator.
3 | *
4 | */
5 | @Library('integration-pipeline-library@master')
6 | import com.synopsys.integration.pipeline.SimplePipeline
7 | import com.synopsys.integration.Constants
8 | properties(
9 | [
10 | limitBuildsToKeep(),
11 | disableConcurrentBuilds(),
12 | parameters(
13 | [
14 | selectBranch('origin/main')
15 | ]
16 | ),
17 | defaultPipelineTriggers()
18 | ]
19 | )
20 |
21 | String emailRecipients = Constants.CENTRAL_INTEGRATIONS_TEAM_EMAIL
22 | String gitUrl = 'https://github.com/synopsys-sig/detect-action.git'
23 | String archivePattern = 'dist/*.js*'
24 |
25 | node('integrations') {
26 | SimplePipeline pipeline = new SimplePipeline(this)
27 | pipeline.addCleanupStep('.')
28 |
29 | String gitBranch = pipeline.determineGitBranch(params.BRANCH)
30 | pipeline.setDirectoryFromBranch(gitBranch)
31 | def gitStage = pipeline.addGitStage(gitUrl, gitBranch, false)
32 | gitStage.setChangelog(true)
33 |
34 | pipeline.addApiTokenStage()
35 |
36 | pipeline.setUrl(gitUrl)
37 | pipeline.setGithubCredentialsId(gitStage.getCredentialsId())
38 | pipeline.addEmailPipelineWrapper(emailRecipients)
39 |
40 | def buildStage = pipeline.addStage('Build'){
41 | sh 'npm ci && npm run all'
42 | }
43 |
44 | pipeline.addArchiveStage(archivePattern)
45 |
46 | pipeline.run()
47 | }
48 |
--------------------------------------------------------------------------------
/jenkinsfile_pop_detect:
--------------------------------------------------------------------------------
1 | /*
2 | * This pipeline script was created manually based on scripts created by jenkins-build-creator.
3 | *
4 | */
5 | @Library('integration-pipeline-library@master')
6 | import com.synopsys.integration.pipeline.SimplePipeline
7 | import com.synopsys.integration.Constants
8 | properties(
9 | [
10 | limitBuildsToKeep(),
11 | disableConcurrentBuilds(),
12 | parameters(
13 | [
14 | selectBranch('origin/main')
15 | ]
16 | ),
17 | defaultPipelineTriggers()
18 | ]
19 | )
20 |
21 | String emailRecipients = Constants.CENTRAL_INTEGRATIONS_TEAM_EMAIL
22 | String gitUrl = 'https://github.com/synopsys-sig/detect-action.git'
23 |
24 | node('integrations') {
25 | SimplePipeline pipeline = new SimplePipeline(this)
26 | pipeline.addCleanupStep('.')
27 |
28 | String gitBranch = pipeline.determineGitBranch(params.BRANCH)
29 | pipeline.setDirectoryFromBranch(gitBranch)
30 | def gitStage = pipeline.addGitStage(gitUrl, gitBranch, false)
31 | gitStage.setChangelog(true)
32 |
33 | pipeline.setUrl(gitUrl)
34 | pipeline.setGithubCredentialsId(gitStage.getCredentialsId())
35 | pipeline.addEmailPipelineWrapper(emailRecipients)
36 |
37 | pipeline.addSetJdkStage('jdk11')
38 | pipeline.addDetectPopSourceStage()
39 |
40 | pipeline.run()
41 | }
42 |
--------------------------------------------------------------------------------
/jenkinsfile_release:
--------------------------------------------------------------------------------
1 | /*
2 | * This pipeline script was created manually based on scripts created by jenkins-build-creator.
3 | *
4 | */
5 | @Library('integration-pipeline-library@master')
6 | import com.synopsys.integration.Constants
7 |
8 | properties(
9 | [
10 | limitBuildsToKeep(),
11 | disableConcurrentBuilds(),
12 | parameters(
13 | [
14 | releaseCheckbox(),
15 | releaseCommitMessage(),
16 | selectBranch('origin/main')
17 | ]
18 | )
19 | ]
20 | )
21 |
22 | pipeline {
23 | agent {
24 | label 'integrations'
25 | }
26 | stages {
27 | stage('Git') {
28 | steps {
29 | gitCheckoutExportVariables('https://github.com/synopsys-sig/detect-action.git')
30 | }
31 | }
32 | stage('Release') {
33 | when {
34 | allOf {
35 | expression { return params.RUN_RELEASE }
36 | not { branch 'main' }
37 | }
38 | }
39 | steps {
40 | withCredentials([usernamePassword(credentialsId: GitStage.DEFAULT_CREDENTIALS_ID, passwordVariable: 'GIT_PASSWORD', usernameVariable: 'GIT_USERNAME')]) {
41 | sh "git branch -u origin/${env.GIT_LOCAL_BRANCH}"
42 | sh "git remote set-url origin https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/synopsys-sig/detect-action.git"
43 | }
44 | sh 'npm ci'
45 | sh "npm version ${env.GIT_LOCAL_BRANCH}"
46 | archiveArtifacts 'dist/*.js*'
47 | }
48 | }
49 | }
50 | post {
51 | failure {
52 | emailFailure(Constants.CENTRAL_INTEGRATIONS_TEAM_EMAIL)
53 | }
54 | fixed {
55 | emailFixed(Constants.CENTRAL_INTEGRATIONS_TEAM_EMAIL)
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
2 | module.exports = {
3 | preset: 'ts-jest',
4 | testEnvironment: 'node',
5 | };
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "detect-action",
3 | "version": "0.3.5",
4 | "description": "Richly integrate Synopsys Detect and Black Duck policy into your GitHub Action pipelines",
5 | "main": "lib/main.js",
6 | "scripts": {
7 | "build": "tsc",
8 | "format": "prettier --write '**/*.ts'",
9 | "format-check": "prettier --check '**/*.ts'",
10 | "lint": "eslint src/**/*.ts",
11 | "package": "ncc build --source-map",
12 | "test": "jest",
13 | "all": "npm run build && npm run format && npm run lint && npm run package && npm test",
14 | "preversion": "npm test",
15 | "version": "npm run build && git add -A dist",
16 | "postversion": "git push && git push --tags"
17 | },
18 | "repository": {
19 | "type": "git",
20 | "url": "git+https://github.com/synopsys-sig/detect-action.git"
21 | },
22 | "keywords": [],
23 | "author": "Synopsys Inc.",
24 | "license": "Apache 2.0",
25 | "bugs": {
26 | "url": "https://github.com/synopsys-sig/detect-action/issues"
27 | },
28 | "homepage": "https://github.com/synopsys-sig/detect-action#readme",
29 | "dependencies": {
30 | "@actions/artifact": "^0.5.2",
31 | "@actions/core": "^1.6.0",
32 | "@actions/exec": "^1.1.0",
33 | "@actions/github": "^5.0.0",
34 | "@actions/glob": "^0.2.0",
35 | "@actions/tool-cache": "^1.7.1",
36 | "@octokit/rest": "^18.12.0",
37 | "typed-rest-client": "^1.8.6"
38 | },
39 | "devDependencies": {
40 | "@types/jest": "^27.0.3",
41 | "@types/node": "^16.11.6",
42 | "@typescript-eslint/eslint-plugin": "^5.3.0",
43 | "@typescript-eslint/parser": "^5.3.0",
44 | "@vercel/ncc": "^0.31.1",
45 | "eslint": "^8.1.0",
46 | "jest": "^27.4.5",
47 | "prettier": "^2.4.1",
48 | "ts-jest": "^27.1.2",
49 | "typescript": "^4.5.4"
50 | }
51 | }
--------------------------------------------------------------------------------
/src/_namespaces/Github.ts:
--------------------------------------------------------------------------------
1 | import { RestEndpointMethodTypes } from '@octokit/rest'
2 |
3 | // @octokit/rest > Endpoints.d.ts > PullsGetResponseData
4 | export type PullRequest = RestEndpointMethodTypes['pulls']['get']['response']['data']
5 |
--------------------------------------------------------------------------------
/src/application-constants.ts:
--------------------------------------------------------------------------------
1 | export const APPLICATION_NAME = 'synopsys-sig/detect-action'
2 | export const CHECK_NAME = 'Black Duck Policy Check'
3 |
--------------------------------------------------------------------------------
/src/blackduck-api.ts:
--------------------------------------------------------------------------------
1 | import { debug, info, warning } from '@actions/core'
2 | import { IHeaders } from 'typed-rest-client/Interfaces'
3 | import { BearerCredentialHandler } from 'typed-rest-client/Handlers'
4 | import { HttpClient } from 'typed-rest-client/HttpClient'
5 | import { IRestResponse, RestClient } from 'typed-rest-client/RestClient'
6 | import { APPLICATION_NAME } from './application-constants'
7 | export interface IBlackduckView {
8 | _meta: {
9 | href: string
10 | }
11 | }
12 |
13 | export interface IBlackduckItemArray extends IBlackduckView {
14 | totalCount: number
15 | items: Array
16 | }
17 |
18 | export interface IUpgradeGuidance {
19 | version: string
20 | shortTerm: IRecommendedVersion
21 | longTerm: IRecommendedVersion
22 | }
23 |
24 | export interface IRecommendedVersion {
25 | version: string
26 | versionName: string
27 | vulnerabilityRisk: Object
28 | }
29 |
30 | export interface IComponentSearchResult {
31 | version: string
32 | }
33 |
34 | export interface IComponentVersion {
35 | license: {
36 | licenses: {
37 | license: string
38 | name: string
39 | }[]
40 | }
41 | _meta: {
42 | href: string
43 | }
44 | }
45 |
46 | export interface IComponentVulnerability {
47 | name: string
48 | severity: string
49 | useCvss3: boolean
50 | cvss2: ICvssView
51 | cvss3: ICvssView
52 | _meta: {
53 | href: string
54 | }
55 | }
56 |
57 | export interface ICvssView {
58 | baseScore: number
59 | severity: string
60 | }
61 |
62 | export interface IRapidScanResults {
63 | componentName: string
64 | versionName: string
65 | componentIdentifier: string
66 | violatingPolicyNames: string[]
67 | policyViolationVulnerabilities: IRapidScanVulnerability[]
68 | policyViolationLicenses: IRapidScanLicense[]
69 | _meta: {
70 | href: string
71 | }
72 | }
73 |
74 | export interface IRapidScanVulnerability {
75 | name: string
76 | }
77 |
78 | export interface IRapidScanLicense {
79 | licenseName: string
80 | _meta: {
81 | href: string
82 | }
83 | }
84 |
85 | export class BlackduckApiService {
86 | blackduckUrl: string
87 | blackduckApiToken: string
88 |
89 | constructor(blackduckUrl: string, blackduckApiToken: string) {
90 | this.blackduckUrl = cleanUrl(blackduckUrl)
91 | this.blackduckApiToken = blackduckApiToken
92 | }
93 |
94 | async getBearerToken(): Promise {
95 | info('Initiating authentication request to Black Duck...')
96 | const authenticationClient = new HttpClient(APPLICATION_NAME)
97 | const authorizationHeader: IHeaders = { Authorization: `token ${this.blackduckApiToken}` }
98 |
99 | return authenticationClient
100 | .post(`${this.blackduckUrl}/api/tokens/authenticate`, '', authorizationHeader)
101 | .then(authenticationResponse => authenticationResponse.readBody())
102 | .then(responseBody => JSON.parse(responseBody))
103 | .then(responseBodyJson => {
104 | info('Successfully authenticated with Black Duck')
105 | return responseBodyJson.bearerToken
106 | })
107 | }
108 |
109 | async checkIfEnabledBlackduckPoliciesExist(bearerToken: string): Promise {
110 | debug('Requesting policies from Black Duck...')
111 | return this.getPolicies(bearerToken, 1, true).then(blackduckPolicyPage => {
112 | const policyCount = blackduckPolicyPage?.result?.totalCount
113 | if (policyCount === undefined || policyCount === null) {
114 | warning('Failed to check Black Duck for policies')
115 | return false
116 | } else if (policyCount > 0) {
117 | debug(`${policyCount} Black Duck policies existed`)
118 | return true
119 | } else {
120 | info('No Black Duck policies exist')
121 | return false
122 | }
123 | })
124 | }
125 |
126 | async getUpgradeGuidanceFor(bearerToken: string, componentVersion: IComponentVersion): Promise> {
127 | return this.get(bearerToken, `${componentVersion._meta.href}/upgrade-guidance`)
128 | }
129 |
130 | async getComponentsMatching(bearerToken: string, componentIdentifier: string, limit: number = 10): Promise>> {
131 | const requestPath = `/api/components?q=${componentIdentifier}`
132 |
133 | return this.requestPage(bearerToken, requestPath, 0, limit)
134 | }
135 |
136 | async getComponentVersion(bearerToken: string, searchResult: IComponentSearchResult) {
137 | return this.get(bearerToken, searchResult.version)
138 | }
139 |
140 | async getComponentVersionMatching(bearerToken: string, componentIdentifier: string, limit: number = 10): Promise {
141 | const componentSearchResponse = await this.getComponentsMatching(bearerToken, componentIdentifier, limit)
142 | const firstMatchingComponentVersionUrl = componentSearchResponse?.result?.items[0].version
143 |
144 | let componentVersion = null
145 | if (firstMatchingComponentVersionUrl !== undefined) {
146 | const componentVersionResponse: IRestResponse = await this.get(bearerToken, firstMatchingComponentVersionUrl)
147 | componentVersion = componentVersionResponse?.result
148 | }
149 |
150 | return componentVersion
151 | }
152 |
153 | async getComponentVulnerabilties(bearerToken: string, componentVersion: IComponentVersion): Promise>> {
154 | return this.get(bearerToken, `${componentVersion._meta.href}/vulnerabilities`, 'application/vnd.blackducksoftware.vulnerability-4+json')
155 | }
156 |
157 | async getPolicies(bearerToken: string, limit: number = 10, enabled?: boolean) {
158 | const enabledFilter = enabled === undefined || enabled === null ? '' : `filter=policyRuleEnabled%3A${enabled}`
159 | const requestPath = `/api/policy-rules?${enabledFilter}`
160 |
161 | return this.requestPage(bearerToken, requestPath, 0, limit)
162 | }
163 |
164 | async requestPage(bearerToken: string, requestPath: string, offset: number, limit: number): Promise>> {
165 | return this.get(bearerToken, `${this.blackduckUrl}${requestPath}&offset=${offset}&limit=${limit}`)
166 | }
167 |
168 | async get(bearerToken: string, requestUrl: string, acceptHeader?: string): Promise> {
169 | const bearerTokenHandler = new BearerCredentialHandler(bearerToken, true)
170 | const blackduckRestClient = new RestClient(APPLICATION_NAME, this.blackduckUrl, [bearerTokenHandler])
171 |
172 | return blackduckRestClient.get(requestUrl, { acceptHeader })
173 | }
174 | }
175 |
176 | export function cleanUrl(blackduckUrl: string) {
177 | if (blackduckUrl && blackduckUrl.endsWith('/')) {
178 | return blackduckUrl.substr(0, blackduckUrl.length - 1)
179 | }
180 | return blackduckUrl
181 | }
182 |
--------------------------------------------------------------------------------
/src/comment.ts:
--------------------------------------------------------------------------------
1 | import { debug } from '@actions/core'
2 | import { context, getOctokit } from '@actions/github'
3 | import { APPLICATION_NAME } from './application-constants'
4 | import { GITHUB_TOKEN } from './inputs'
5 |
6 | const COMMENT_PREFACE = ''
7 |
8 | export async function commentOnPR(report: string): Promise {
9 | const octokit = getOctokit(GITHUB_TOKEN)
10 |
11 | const message = COMMENT_PREFACE.concat('\r\n', report)
12 |
13 | const contextIssue = context.issue.number
14 | const contextOwner = context.repo.owner
15 | const contextRepo = context.repo.repo
16 |
17 | debug('Gathering existing comments...')
18 | const { data: existingComments } = await octokit.rest.issues.listComments({
19 | issue_number: contextIssue,
20 | owner: contextOwner,
21 | repo: contextRepo
22 | })
23 |
24 | for (const comment of existingComments) {
25 | const firstLine = comment.body?.split('\r\n')[0]
26 | if (firstLine === COMMENT_PREFACE) {
27 | debug(`Existing comment from ${APPLICATION_NAME} found. Attempting to delete it...`)
28 | octokit.rest.issues.deleteComment({
29 | comment_id: comment.id,
30 | owner: contextOwner,
31 | repo: contextRepo
32 | })
33 | }
34 | }
35 |
36 | debug('Creating a new comment...')
37 | octokit.rest.issues.createComment({
38 | issue_number: contextIssue,
39 | owner: contextOwner,
40 | repo: contextRepo,
41 | body: message
42 | })
43 | debug('Successfully created a new comment!')
44 | }
45 |
--------------------------------------------------------------------------------
/src/detect/detect-manager.ts:
--------------------------------------------------------------------------------
1 | import { find, downloadTool, cacheFile } from '@actions/tool-cache'
2 | import { exec } from '@actions/exec'
3 | import path from 'path'
4 | import { DETECT_VERSION } from '../inputs'
5 |
6 | const DETECT_BINARY_REPO_URL = 'https://sig-repo.synopsys.com'
7 | export const TOOL_NAME = 'detect'
8 |
9 | export async function findOrDownloadDetect(): Promise {
10 | const jarName = `synopsys-detect-${DETECT_VERSION}.jar`
11 |
12 | const cachedDetect = find(TOOL_NAME, DETECT_VERSION)
13 | if (cachedDetect) {
14 | return path.resolve(cachedDetect, jarName)
15 | }
16 |
17 | const detectDownloadUrl = createDetectDownloadUrl()
18 |
19 | return (
20 | downloadTool(detectDownloadUrl)
21 | .then(detectDownloadPath => cacheFile(detectDownloadPath, jarName, TOOL_NAME, DETECT_VERSION))
22 | //TODO: Jarsigner?
23 | .then(cachedFolder => path.resolve(cachedFolder, jarName))
24 | )
25 | }
26 |
27 | export async function runDetect(detectPath: string, detectArguments: string[]): Promise {
28 | return exec(`java`, ['-jar', detectPath].concat(detectArguments), { ignoreReturnCode: true })
29 | }
30 |
31 | function createDetectDownloadUrl(repoUrl = DETECT_BINARY_REPO_URL): string {
32 | return `${repoUrl}/bds-integrations-release/com/synopsys/integration/synopsys-detect/${DETECT_VERSION}/synopsys-detect-${DETECT_VERSION}.jar`
33 | }
34 |
--------------------------------------------------------------------------------
/src/detect/exit-codes.ts:
--------------------------------------------------------------------------------
1 | export const SUCCESS = 0
2 | export const POLICY_SEVERITY = 3
3 |
--------------------------------------------------------------------------------
/src/detect/report.ts:
--------------------------------------------------------------------------------
1 | import { warning } from '@actions/core'
2 | import { BlackduckApiService, IComponentVersion, IComponentVulnerability, IRapidScanLicense, IRapidScanResults, IRapidScanVulnerability, IRecommendedVersion, IUpgradeGuidance } from '../blackduck-api'
3 | import { BLACKDUCK_API_TOKEN, BLACKDUCK_URL } from '../inputs'
4 |
5 | export async function createRapidScanReport(policyViolations: IRapidScanResults[], blackduckApiService?: BlackduckApiService): Promise {
6 | const rapidScanReport: IComponentReport[] = []
7 |
8 | if (blackduckApiService === undefined) {
9 | blackduckApiService = new BlackduckApiService(BLACKDUCK_URL, BLACKDUCK_API_TOKEN)
10 | }
11 |
12 | const bearerToken = await blackduckApiService.getBearerToken()
13 |
14 | for (const policyViolation of policyViolations) {
15 | const componentIdentifier = policyViolation.componentIdentifier
16 | const componentVersion = await blackduckApiService.getComponentVersionMatching(bearerToken, componentIdentifier)
17 |
18 | let upgradeGuidance = undefined
19 | let vulnerabilities = undefined
20 | if (componentVersion !== null) {
21 | upgradeGuidance = await blackduckApiService
22 | .getUpgradeGuidanceFor(bearerToken, componentVersion)
23 | .then(response => {
24 | if (response.result === null) {
25 | warning(`Could not get upgrade guidance for ${componentIdentifier}: The upgrade guidance result was empty`)
26 | return undefined
27 | }
28 |
29 | return response.result
30 | })
31 | .catch(reason => {
32 | warning(`Could not get upgrade guidance for ${componentIdentifier}: ${reason}`)
33 | return undefined
34 | })
35 |
36 | const vulnerabilityResponse = await blackduckApiService.getComponentVulnerabilties(bearerToken, componentVersion)
37 | vulnerabilities = vulnerabilityResponse?.result?.items
38 | }
39 |
40 | const componentVersionOrUndefined = componentVersion === null ? undefined : componentVersion
41 | const componentReport = createComponentReport(policyViolation, componentVersionOrUndefined, upgradeGuidance, vulnerabilities)
42 | rapidScanReport.push(componentReport)
43 | }
44 |
45 | return rapidScanReport
46 | }
47 | export interface IComponentReport {
48 | violatedPolicies: string[]
49 | name: string
50 | href?: string
51 | licenses: ILicenseReport[]
52 | vulnerabilities: IVulnerabilityReport[]
53 | shortTermUpgrade?: IUpgradeReport
54 | longTermUpgrade?: IUpgradeReport
55 | }
56 |
57 | export function createComponentReport(violation: IRapidScanResults, componentVersion?: IComponentVersion, upgradeGuidance?: IUpgradeGuidance, vulnerabilities?: IComponentVulnerability[]): IComponentReport {
58 | return {
59 | violatedPolicies: violation.violatingPolicyNames,
60 | name: `${violation.componentName} ${violation.versionName}`,
61 | href: componentVersion?._meta.href,
62 | licenses: createComponentLicenseReports(violation.policyViolationLicenses, componentVersion),
63 | vulnerabilities: createComponentVulnerabilityReports(violation.policyViolationVulnerabilities, vulnerabilities),
64 | shortTermUpgrade: createUpgradeReport(upgradeGuidance?.shortTerm),
65 | longTermUpgrade: createUpgradeReport(upgradeGuidance?.longTerm)
66 | }
67 | }
68 |
69 | export function createComponentLicenseReports(policyViolatingLicenses: IRapidScanLicense[], componentVersion?: IComponentVersion): ILicenseReport[] {
70 | let licenseReport = []
71 | if (componentVersion === undefined) {
72 | licenseReport = policyViolatingLicenses.map(license => createLicenseReport(license.licenseName, license._meta.href, true))
73 | } else {
74 | const violatingPolicyLicenseNames = policyViolatingLicenses.map(license => license.licenseName)
75 | licenseReport = componentVersion.license.licenses.map(license => createLicenseReport(license.name, license.license, violatingPolicyLicenseNames.includes(license.name)))
76 | }
77 |
78 | return licenseReport
79 | }
80 |
81 | export function createComponentVulnerabilityReports(policyViolatingVulnerabilities: IRapidScanVulnerability[], componentVulnerabilities?: IComponentVulnerability[]): IVulnerabilityReport[] {
82 | let vulnerabilityReport = []
83 | if (componentVulnerabilities === undefined) {
84 | vulnerabilityReport = policyViolatingVulnerabilities.map(vulnerability => createVulnerabilityReport(vulnerability.name, true))
85 | } else {
86 | const violatingPolicyVulnerabilityNames = policyViolatingVulnerabilities.map(vulnerability => vulnerability.name)
87 | vulnerabilityReport = componentVulnerabilities.map(vulnerability => {
88 | const compVulnBaseScore = vulnerability.useCvss3 ? vulnerability.cvss3.baseScore : vulnerability.cvss2.baseScore
89 | return createVulnerabilityReport(vulnerability.name, violatingPolicyVulnerabilityNames.includes(vulnerability.name), vulnerability._meta.href, compVulnBaseScore, vulnerability.severity)
90 | })
91 | }
92 |
93 | return vulnerabilityReport
94 | }
95 |
96 | export interface ILicenseReport {
97 | name: string
98 | href: string
99 | violatesPolicy: boolean
100 | }
101 |
102 | export function createLicenseReport(name: string, href: string, violatesPolicy: boolean): ILicenseReport {
103 | return {
104 | name: name,
105 | href: href,
106 | violatesPolicy: violatesPolicy
107 | }
108 | }
109 |
110 | export interface IVulnerabilityReport {
111 | name: string
112 | violatesPolicy: boolean
113 | href?: string
114 | cvssScore?: number
115 | severity?: string
116 | }
117 |
118 | export function createVulnerabilityReport(name: string, violatesPolicy: boolean, href?: string, cvssScore?: number, severity?: string): IVulnerabilityReport {
119 | return {
120 | name: name,
121 | violatesPolicy: violatesPolicy,
122 | href: href,
123 | cvssScore: cvssScore,
124 | severity: severity
125 | }
126 | }
127 |
128 | export interface IUpgradeReport {
129 | name: string
130 | href: string
131 | vulnerabilityCount: number
132 | }
133 |
134 | export function createUpgradeReport(recommendedVersion?: IRecommendedVersion): IUpgradeReport | undefined {
135 | if (recommendedVersion === undefined) {
136 | return undefined
137 | }
138 |
139 | return {
140 | name: recommendedVersion.versionName,
141 | href: recommendedVersion.version,
142 | vulnerabilityCount: Object.values(recommendedVersion.vulnerabilityRisk).reduce((accumulatedValues, value) => accumulatedValues + value, 0)
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/src/detect/reporting.ts:
--------------------------------------------------------------------------------
1 | import { IRapidScanResults } from '../blackduck-api'
2 | import { createRapidScanReport, IComponentReport } from './report'
3 |
4 | export const TABLE_HEADER = '| Policies Violated | Dependency | License(s) | Vulnerabilities | Short Term Recommended Upgrade | Long Term Recommended Upgrade |\r\n' + '|-|-|-|-|-|-|\r\n'
5 |
6 | export async function createRapidScanReportString(policyViolations: IRapidScanResults[], policyCheckWillFail: boolean): Promise {
7 | let message = ''
8 | if (policyViolations.length == 0) {
9 | message = message.concat('# :white_check_mark: None of your dependencies violate policy!')
10 | } else {
11 | const violationSymbol = policyCheckWillFail ? ':x:' : ':warning:'
12 | message = message.concat(`# ${violationSymbol} Found dependencies violating policy!\r\n\r\n`)
13 |
14 | const componentReports = await createRapidScanReport(policyViolations)
15 | const tableBody = componentReports.map(componentReport => createComponentRow(componentReport)).join('\r\n')
16 | const reportTable = TABLE_HEADER.concat(tableBody)
17 | message = message.concat(reportTable)
18 | }
19 |
20 | return message
21 | }
22 |
23 | function createComponentRow(component: IComponentReport): string {
24 | const violatedPolicies = component.violatedPolicies === undefined ? '' : component.violatedPolicies.join('
')
25 | const componentInViolation = component?.href ? `[${component.name}](${component.href})` : component.name
26 | const componentLicenses = component.licenses.map(license => `${license.violatesPolicy ? ':x: ' : ''}[${license.name}](${license.href})`).join('
')
27 | const vulnerabilities = component.vulnerabilities.map(vulnerability => `${vulnerability.violatesPolicy ? ':x: ' : ''}[${vulnerability.name}](${vulnerability.href})${vulnerability.cvssScore && vulnerability.severity ? ` ${vulnerability.severity}: CVSS ${vulnerability.cvssScore}` : ''}`).join('
')
28 | const shortTermString = component.shortTermUpgrade ? `[${component.shortTermUpgrade.name}](${component.shortTermUpgrade.href}) (${component.shortTermUpgrade.vulnerabilityCount} known vulnerabilities)` : ''
29 | const longTermString = component.longTermUpgrade ? `[${component.longTermUpgrade.name}](${component.longTermUpgrade.href}) (${component.longTermUpgrade.vulnerabilityCount} known vulnerabilities)` : ''
30 |
31 | return `| ${violatedPolicies} | ${componentInViolation} | ${componentLicenses} | ${vulnerabilities} | ${shortTermString} | ${longTermString} |`
32 | }
33 |
--------------------------------------------------------------------------------
/src/github/check.ts:
--------------------------------------------------------------------------------
1 | import { debug, info, warning } from '@actions/core'
2 | import { context, getOctokit } from '@actions/github'
3 | import { getSha } from './github-context'
4 | import { GITHUB_TOKEN } from '../inputs'
5 |
6 | export async function createCheck(checkName: string): Promise {
7 | const octokit = getOctokit(GITHUB_TOKEN)
8 |
9 | const head_sha = getSha()
10 |
11 | info(`Creating ${checkName}...`)
12 | const response = await octokit.rest.checks.create({
13 | owner: context.repo.owner,
14 | repo: context.repo.repo,
15 | name: checkName,
16 | head_sha
17 | })
18 |
19 | if (response.status !== 201) {
20 | warning(`Unexpected status code recieved when creating ${checkName}: ${response.status}`)
21 | debug(JSON.stringify(response, null, 2))
22 | } else {
23 | info(`${checkName} created`)
24 | }
25 |
26 | return new GitHubCheck(checkName, response.data.id)
27 | }
28 |
29 | export class GitHubCheck {
30 | checkName: string
31 | checkRunId: number
32 |
33 | constructor(checkName: string, checkRunId: number) {
34 | this.checkName = checkName
35 | this.checkRunId = checkRunId
36 | }
37 |
38 | async passCheck(summary: string, text: string) {
39 | return this.finishCheck('success', summary, text)
40 | }
41 |
42 | async failCheck(summary: string, text: string) {
43 | return this.finishCheck('failure', summary, text)
44 | }
45 |
46 | async skipCheck() {
47 | return this.finishCheck('skipped', `${this.checkName} was skipped`, '')
48 | }
49 |
50 | async cancelCheck() {
51 | return this.finishCheck('cancelled', `${this.checkName} Check could not be completed`, `Something went wrong and the ${this.checkName} could not be completed. Check your action logs for more details.`)
52 | }
53 |
54 | private async finishCheck(conclusion: string, summary: string, text: string) {
55 | const octokit = getOctokit(GITHUB_TOKEN)
56 |
57 | const response = await octokit.rest.checks.update({
58 | owner: context.repo.owner,
59 | repo: context.repo.repo,
60 | check_run_id: this.checkRunId,
61 | status: 'completed',
62 | conclusion,
63 | output: {
64 | title: this.checkName,
65 | summary,
66 | text
67 | }
68 | })
69 |
70 | if (response.status !== 200) {
71 | warning(`Unexpected status code recieved when creating check: ${response.status}`)
72 | debug(JSON.stringify(response, null, 2))
73 | } else {
74 | info(`${this.checkName} updated`)
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/github/github-context.ts:
--------------------------------------------------------------------------------
1 | import { context } from '@actions/github'
2 | import { PullRequest } from '../_namespaces/Github'
3 |
4 | const prEvents = ['pull_request', 'pull_request_review', 'pull_request_review_comment']
5 |
6 | export function isPullRequest(): boolean {
7 | return prEvents.includes(context.eventName)
8 | }
9 |
10 | export function getSha(): string {
11 | let sha = context.sha
12 | if (isPullRequest()) {
13 | const pull = context.payload.pull_request as PullRequest
14 | if (pull?.head.sha) {
15 | sha = pull?.head.sha
16 | }
17 | }
18 |
19 | return sha
20 | }
21 |
--------------------------------------------------------------------------------
/src/github/upload-artifacts.ts:
--------------------------------------------------------------------------------
1 | import { warning, info } from '@actions/core'
2 | import { create, UploadOptions } from '@actions/artifact'
3 |
4 | export async function uploadArtifact(name: string, outputPath: string, files: string[]): Promise {
5 | const artifactClient = create()
6 | const options: UploadOptions = {
7 | continueOnError: false,
8 | retentionDays: 0
9 | }
10 |
11 | info(`Attempting to upload ${name}...`)
12 | const uploadResponse = await artifactClient.uploadArtifact(name, files, outputPath, options)
13 |
14 | if (files.length === 0) {
15 | warning(`Expected to upload ${name}, but the action couldn't find any. Was output-path set correctly?`)
16 | } else if (uploadResponse.failedItems.length > 0) {
17 | warning(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`)
18 | } else {
19 | info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`)
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/inputs.ts:
--------------------------------------------------------------------------------
1 | import { getBooleanInput, getInput } from '@actions/core'
2 |
3 | export const GITHUB_TOKEN = getInput('github-token')
4 | export const BLACKDUCK_URL = getInput('blackduck-url')
5 | export const BLACKDUCK_API_TOKEN = getInput('blackduck-api-token')
6 | export const DETECT_VERSION = getInput('detect-version')
7 | export const SCAN_MODE = getInput('scan-mode').toUpperCase()
8 | export const FAIL_ON_ALL_POLICY_SEVERITIES = getBooleanInput('fail-on-all-policy-severities')
9 | export const OUTPUT_PATH_OVERRIDE = getInput('output-path-override')
10 | export const DETECT_TRUST_CERT = getInput('detect-trust-cert')
11 |
--------------------------------------------------------------------------------
/src/main.ts:
--------------------------------------------------------------------------------
1 | import { info, warning, setFailed, debug } from '@actions/core'
2 | import { create } from '@actions/glob'
3 | import path from 'path'
4 | import fs from 'fs'
5 | import { BlackduckApiService, IBlackduckView, IRapidScanResults } from './blackduck-api'
6 | import { createCheck, GitHubCheck } from './github/check'
7 | import { commentOnPR } from './comment'
8 | import { POLICY_SEVERITY, SUCCESS } from './detect/exit-codes'
9 | import { TOOL_NAME, findOrDownloadDetect, runDetect } from './detect/detect-manager'
10 | import { isPullRequest } from './github/github-context'
11 | import { BLACKDUCK_API_TOKEN, BLACKDUCK_URL, DETECT_TRUST_CERT, DETECT_VERSION, FAIL_ON_ALL_POLICY_SEVERITIES, OUTPUT_PATH_OVERRIDE, SCAN_MODE } from './inputs'
12 | import { createRapidScanReportString } from './detect/reporting'
13 | import { uploadArtifact } from './github/upload-artifacts'
14 | import { CHECK_NAME } from './application-constants'
15 |
16 | export async function run() {
17 | let blackduckPolicyCheck: any
18 | try {
19 | blackduckPolicyCheck = await createCheck(CHECK_NAME)
20 | } catch (error) {
21 | throw error
22 | }
23 | runWithPolicyCheck(blackduckPolicyCheck).catch(unhandledError => {
24 | debug('Canceling policy check because of an unhandled error.')
25 | blackduckPolicyCheck.cancelCheck()
26 | setFailed(`Failed due to an unhandled error: '${unhandledError}'`)
27 | })
28 | }
29 |
30 | export async function runWithPolicyCheck(blackduckPolicyCheck: GitHubCheck): Promise {
31 | info(`detect-version: ${DETECT_VERSION}`)
32 | info(`output-path-override: ${OUTPUT_PATH_OVERRIDE}`)
33 | info(`scan-mode: ${SCAN_MODE}`)
34 |
35 | //Setting process environment for certificate issue fix
36 | if (!process.env['NODE_TLS_REJECT_UNAUTHORIZED']) {
37 | info('NODE_TLS_REJECT_UNAUTHORIZED is not set, disabling strict certificate check')
38 | process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'
39 | }
40 |
41 | const runnerTemp = process.env.RUNNER_TEMP
42 | let outputPath = ''
43 | if (OUTPUT_PATH_OVERRIDE !== '') {
44 | outputPath = OUTPUT_PATH_OVERRIDE
45 | } else if (runnerTemp === undefined) {
46 | setFailed('$RUNNER_TEMP is not defined and output-path-override was not set. Cannot determine where to store output files.')
47 | blackduckPolicyCheck.cancelCheck()
48 | return
49 | } else {
50 | outputPath = path.resolve(runnerTemp, 'blackduck')
51 | }
52 |
53 | if (SCAN_MODE === 'RAPID') {
54 | info('Checking that you have at least one enabled policy...')
55 |
56 | const blackduckApiService = new BlackduckApiService(BLACKDUCK_URL, BLACKDUCK_API_TOKEN)
57 | const blackDuckBearerToken = await blackduckApiService.getBearerToken()
58 | let policiesExist: boolean | void = await blackduckApiService.checkIfEnabledBlackduckPoliciesExist(blackDuckBearerToken).catch(reason => {
59 | setFailed(`Could not verify whether policies existed: ${reason}`)
60 | })
61 |
62 | if (policiesExist === undefined) {
63 | debug('Could not determine if policies existed. Canceling policy check.')
64 | blackduckPolicyCheck.cancelCheck()
65 | return
66 | } else if (!policiesExist) {
67 | setFailed(`Could not run ${TOOL_NAME} using ${SCAN_MODE} scan mode. No enabled policies found on the specified Black Duck server.`)
68 | return
69 | } else {
70 | info(`You have at least one enabled policy, executing ${TOOL_NAME} in ${SCAN_MODE} scan mode...`)
71 | }
72 | }
73 |
74 | const detectArgs = [`--blackduck.trust.cert=${DETECT_TRUST_CERT}`, `--blackduck.url=${BLACKDUCK_URL}`, `--blackduck.api.token=${BLACKDUCK_API_TOKEN}`, `--detect.blackduck.scan.mode=${SCAN_MODE}`, `--detect.output.path=${outputPath}`, `--detect.scan.output.path=${outputPath}`]
75 |
76 | const detectPath = await findOrDownloadDetect().catch(reason => {
77 | setFailed(`Could not download ${TOOL_NAME} ${DETECT_VERSION}: ${reason}`)
78 | })
79 |
80 | if (detectPath === undefined) {
81 | debug(`Could not determine ${TOOL_NAME} path. Canceling policy check.`)
82 | blackduckPolicyCheck.cancelCheck()
83 | return
84 | }
85 |
86 | const detectExitCode = await runDetect(detectPath, detectArgs).catch(reason => {
87 | setFailed(`Could not execute ${TOOL_NAME} ${DETECT_VERSION}: ${reason}`)
88 | })
89 |
90 | if (detectExitCode === undefined) {
91 | debug(`Could not determine ${TOOL_NAME} exit code. Canceling policy check.`)
92 | blackduckPolicyCheck.cancelCheck()
93 | return
94 | } else if (detectExitCode > 0 && detectExitCode != POLICY_SEVERITY) {
95 | setFailed(`Detect failed with exit code: ${detectExitCode}. Check the logs for more information.`)
96 | return
97 | }
98 |
99 | info(`${TOOL_NAME} executed successfully.`)
100 |
101 | let hasPolicyViolations = false
102 |
103 | if (SCAN_MODE === 'RAPID') {
104 | info(`${TOOL_NAME} executed in RAPID mode. Beginning reporting...`)
105 |
106 | const jsonGlobber = await create(`${outputPath}/*.json`)
107 | const scanJsonPaths = await jsonGlobber.glob()
108 | uploadArtifact('Rapid Scan JSON', outputPath, scanJsonPaths)
109 |
110 | const scanJsonPath = scanJsonPaths[0]
111 | const rawdata = fs.readFileSync(scanJsonPath)
112 | const policyViolations = JSON.parse(rawdata.toString()) as IRapidScanResults[]
113 |
114 | hasPolicyViolations = policyViolations.length > 0
115 | debug(`Policy Violations Present: ${hasPolicyViolations}`)
116 |
117 | const failureConditionsMet = detectExitCode === POLICY_SEVERITY || FAIL_ON_ALL_POLICY_SEVERITIES
118 | const rapidScanReport = await createRapidScanReportString(policyViolations, hasPolicyViolations && failureConditionsMet)
119 |
120 | if (isPullRequest()) {
121 | info('This is a pull request, commenting...')
122 | commentOnPR(rapidScanReport)
123 | info('Successfully commented on PR.')
124 | }
125 |
126 | if (hasPolicyViolations) {
127 | if (failureConditionsMet) {
128 | blackduckPolicyCheck.failCheck('Components found that violate your Black Duck Policies!', rapidScanReport)
129 | } else {
130 | blackduckPolicyCheck.passCheck('No components violated your BLOCKER or CRITICAL Black Duck Policies!', rapidScanReport)
131 | }
132 | } else {
133 | blackduckPolicyCheck.passCheck('No components found that violate your Black Duck policies!', rapidScanReport)
134 | }
135 | info('Reporting complete.')
136 | } else {
137 | info(`${TOOL_NAME} executed in ${SCAN_MODE} mode. Skipping policy check.`)
138 | blackduckPolicyCheck.skipCheck()
139 | }
140 |
141 | const diagnosticMode = process.env.DETECT_DIAGNOSTIC?.toLowerCase() === 'true'
142 | const extendedDiagnosticMode = process.env.DETECT_DIAGNOSTIC_EXTENDED?.toLowerCase() === 'true'
143 | if (diagnosticMode || extendedDiagnosticMode) {
144 | const diagnosticGlobber = await create(`${outputPath}/runs/*.zip`)
145 | const diagnosticZip = await diagnosticGlobber.glob()
146 | uploadArtifact('Detect Diagnostic Zip', outputPath, diagnosticZip)
147 | }
148 |
149 | if (hasPolicyViolations) {
150 | warning('Found dependencies violating policy!')
151 | } else if (detectExitCode > 0) {
152 | warning('Dependency check failed! See Detect output for more information.')
153 | } else if (detectExitCode === SUCCESS) {
154 | info('None of your dependencies violate your Black Duck policies!')
155 | }
156 | }
157 |
158 | run().catch(error => {
159 | if (error.message != undefined) {
160 | setFailed(error.message)
161 | } else {
162 | setFailed(error)
163 | }
164 | })
165 |
--------------------------------------------------------------------------------
/tests/unit/github-context.test.ts:
--------------------------------------------------------------------------------
1 | describe('PR events', () => {
2 | let ghContext: any
3 |
4 | beforeAll(() => {
5 | jest.resetModules()
6 | jest.mock('@actions/github', () => {
7 | return {
8 | context: {
9 | eventName: 'pull_request'
10 | }
11 | }
12 | })
13 | })
14 |
15 | test('isPullRequest() returns true', () => {
16 | ghContext = require('../../src/github/github-context')
17 | expect(ghContext.isPullRequest()).toBeTruthy()
18 | })
19 | })
20 |
21 | describe('Non-PR events', () => {
22 | let ghContext: any
23 |
24 | beforeAll(() => {
25 | jest.resetModules()
26 | jest.mock('@actions/github', () => {
27 | return {
28 | context: {
29 | eventName: 'fake_event_type'
30 | }
31 | }
32 | })
33 | })
34 |
35 | test('isPullRequest() returns false', () => {
36 | ghContext = require('../../src/github/github-context')
37 | expect(ghContext.isPullRequest()).toBeFalsy()
38 | })
39 | })
40 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */
4 |
5 | /* Projects */
6 | // "incremental": true, /* Enable incremental compilation */
7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
8 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
12 |
13 | /* Language and Environment */
14 | "target": "es6", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
16 | // "jsx": "preserve", /* Specify what JSX code is generated. */
17 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
22 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
25 |
26 | /* Modules */
27 | "module": "commonjs", /* Specify what module code is generated. */
28 | "rootDir": "./src", /* Specify the root folder within your source files. */
29 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
30 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
31 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
32 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
33 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
34 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */
35 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
36 | // "resolveJsonModule": true, /* Enable importing .json files */
37 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */
38 |
39 | /* JavaScript Support */
40 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
41 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
42 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
43 |
44 | /* Emit */
45 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
46 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */
47 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
48 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
49 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
50 | "outDir": "./lib", /* Specify an output folder for all emitted files. */
51 | // "removeComments": true, /* Disable emitting comments. */
52 | // "noEmit": true, /* Disable emitting files from a compilation. */
53 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
54 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
55 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
56 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
57 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
58 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
59 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
60 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
61 | // "newLine": "crlf", /* Set the newline character for emitting files. */
62 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
63 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
64 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
65 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
66 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
67 |
68 | /* Interop Constraints */
69 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
70 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
71 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */
72 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
73 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
74 |
75 | /* Type Checking */
76 | "strict": true, /* Enable all strict type-checking options. */
77 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */
78 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */
79 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
80 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
81 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
82 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */
83 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */
84 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
85 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */
86 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */
87 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
88 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
89 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
90 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
91 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
92 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */
93 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
94 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
95 |
96 | /* Completeness */
97 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
98 | "skipLibCheck": true /* Skip type checking all .d.ts files. */
99 | },
100 | "include": ["src"]
101 | }
102 |
--------------------------------------------------------------------------------