├── .devcontainer └── devcontainer.json ├── .editorconfig ├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── codeql │ └── codeql-config.yml ├── dependabot.yml └── workflows │ ├── codeql-analysis.yml │ ├── integration-tests.yml │ ├── pr-checks.yml │ └── script │ ├── check-js.sh │ └── check-json-schemas.sh ├── .gitignore ├── .vscode ├── extensions.json ├── settings.json └── tasks.json ├── CODEOWNERS ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── build.mjs ├── dist ├── query.js ├── update-repo-task-status.js └── update-repo-task-statuses.js ├── jest.config.ts ├── package-lock.json ├── package.json ├── query └── action.yml ├── script ├── codeql ├── find-deadcode.ts ├── generate-json-schemas └── test ├── src ├── api-client.ts ├── azure-client.ts ├── codeql-cli.test.ts ├── codeql-cli.ts ├── codeql-setup.ts ├── codeql-version.test.ts ├── codeql-version.ts ├── codeql.test.ts ├── codeql.ts ├── deserialize.ts ├── download.ts ├── errors.ts ├── gh-api-client.ts ├── http-error.ts ├── inputs.ts ├── json-schemas │ ├── BQRSInfo.json │ ├── Instructions.json │ ├── Policy.json │ ├── QueryMetadata.json │ ├── RepoArray.json │ ├── RepoTask.json │ ├── ResolvedDatabase.json │ ├── ResolvedQueries.json │ └── Sarif.json ├── json-validation.test.ts ├── json-validation.ts ├── query-run-memory.ts ├── query.ts ├── retry-helper.ts ├── set-repo-task-statuses.ts ├── update-repo-task-status.ts ├── update-repo-task-statuses.ts ├── util.ts ├── yaml.test.ts └── yaml.ts ├── testdata ├── test_pack │ ├── .gitignore │ ├── qlpack.yml │ └── x │ │ ├── query.ql │ │ └── y │ │ └── MyLib.qll └── test_pack_multiple_queries │ ├── .gitignore │ ├── qlpack.yml │ ├── x │ ├── query.ql │ └── y │ │ └── MyLib.qll │ └── z │ ├── query.ql │ └── y │ └── MyLib.qll ├── tsconfig.json ├── tsconfig.lint.json ├── update-repo-task-status └── action.yml └── update-repo-task-statuses └── action.yml /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "extensions": [ 3 | "github.copilot" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | trim_trailing_whitespace = true 7 | 8 | [*.ts] 9 | indent_style = space 10 | indent_size = 2 11 | 12 | [*.json] 13 | indent_style = space 14 | indent_size = 2 15 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/** 2 | lib/** 3 | jest.config.ts 4 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "@typescript-eslint/parser", 3 | "parserOptions": { 4 | "project": "./tsconfig.lint.json" 5 | }, 6 | "plugins": [ 7 | "@typescript-eslint", 8 | "filenames", 9 | "github", 10 | "import", 11 | "no-async-foreach" 12 | ], 13 | "extends": [ 14 | "eslint:recommended", 15 | "plugin:@typescript-eslint/recommended", 16 | "plugin:@typescript-eslint/recommended-requiring-type-checking", 17 | "plugin:github/recommended", 18 | "plugin:github/typescript", 19 | "plugin:prettier/recommended" 20 | ], 21 | "rules": { 22 | "sort-imports": "off", 23 | "i18n-text/no-en": "off", 24 | "filenames/match-regex": [ 25 | "error", 26 | "^[a-z0-9-]+(\\.test)?$" 27 | ], 28 | "import/extensions": ["error", { 29 | "json": "always" 30 | }], 31 | "import/named": "off", 32 | "import/no-amd": "error", 33 | "import/no-commonjs": "error", 34 | "import/no-dynamic-require": "error", 35 | "import/no-extraneous-dependencies": [ 36 | "error", 37 | { 38 | "devDependencies": true 39 | } 40 | ], 41 | "import/no-namespace": "off", 42 | "import/no-unresolved": "error", 43 | "import/no-webpack-loader-syntax": "error", 44 | "import/order": [ 45 | "error", 46 | { 47 | "alphabetize": { 48 | "order": "asc" 49 | }, 50 | "newlines-between": "always" 51 | } 52 | ], 53 | "no-async-foreach/no-async-foreach": "error", 54 | "no-console": "off", 55 | "no-sequences": "error", 56 | "no-shadow": "off", 57 | "@typescript-eslint/naming-convention": "error", 58 | "eslint-comments/no-use": [ 59 | "error", 60 | { 61 | "allow": [ 62 | "eslint-disable", 63 | "eslint-enable", 64 | "eslint-disable-next-line" 65 | ] 66 | } 67 | ], 68 | "@typescript-eslint/no-shadow": "error", 69 | "one-var": [ 70 | "error", 71 | "never" 72 | ], 73 | "@typescript-eslint/restrict-template-expressions": "off" 74 | }, 75 | "settings": { 76 | "import/parsers": { 77 | "@typescript-eslint/parser": [".ts", ".tsx"] 78 | }, 79 | "import/resolver": { 80 | "typescript": {} 81 | } 82 | }, 83 | "overrides": [ 84 | { 85 | "files": ["**/*.test.ts"], 86 | "env": { 87 | "jest": true 88 | } 89 | } 90 | ] 91 | } 92 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/*.js linguist-generated=true 2 | 3 | # Reduce incidence of needless merge conflicts on CHANGELOG.md 4 | # The man page at 5 | # https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html 6 | # suggests that this might interleave lines arbitrarily, but empirically 7 | # it keeps added chunks contiguous 8 | CHANGELOG.md merge=union 9 | -------------------------------------------------------------------------------- /.github/codeql/codeql-config.yml: -------------------------------------------------------------------------------- 1 | paths-ignore: 2 | - dist 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Please see the documentation for all configuration options: 2 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 3 | 4 | version: 2 5 | updates: 6 | - package-ecosystem: "github-actions" 7 | directory: "/" 8 | schedule: 9 | interval: "weekly" 10 | - package-ecosystem: "npm" 11 | directory: "/" 12 | schedule: 13 | interval: "weekly" 14 | groups: 15 | actions: 16 | patterns: 17 | - "@actions/*" 18 | octokit: 19 | patterns: 20 | - "@octokit/*" 21 | update-types: 22 | - "minor" 23 | - "patch" 24 | typescript-eslint: 25 | patterns: 26 | - "@typescript-eslint/*" 27 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | # The branches below must be a subset of the branches above 8 | branches: [main] 9 | schedule: 10 | - cron: "16 4 * * 6" 11 | 12 | jobs: 13 | analyze: 14 | name: Analyze 15 | runs-on: ubuntu-latest 16 | permissions: 17 | actions: read 18 | contents: read 19 | security-events: write 20 | 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v4 24 | 25 | # Initializes the CodeQL tools for scanning. 26 | - name: Initialize CodeQL 27 | uses: github/codeql-action/init@v3 28 | with: 29 | config-file: ./.github/codeql/codeql-config.yml 30 | languages: "javascript" 31 | queries: security-and-quality 32 | 33 | - name: Perform CodeQL Analysis 34 | uses: github/codeql-action/analyze@v3 35 | -------------------------------------------------------------------------------- /.github/workflows/integration-tests.yml: -------------------------------------------------------------------------------- 1 | name: "Integration tests" 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | 8 | jobs: 9 | integration-tests: 10 | runs-on: ubuntu-latest 11 | timeout-minutes: 30 12 | steps: 13 | - name: Trigger variant analysis 14 | id: trigger 15 | run: | 16 | if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then 17 | REF="$GITHUB_HEAD_REF" 18 | else 19 | REF="$GITHUB_REF_NAME" 20 | fi 21 | 22 | QUERY_PACK=$(curl --no-progress-meter https://github.com/$GITHUB_REPOSITORY/releases/download/test/test_pack2.tar.gz -L | base64) 23 | 24 | cat <> input.json 25 | { 26 | "action_repo_ref": "$REF", 27 | "language": "go", 28 | "query_pack": "$QUERY_PACK", 29 | "repositories": [ 30 | "docker/compose", 31 | "hashicorp/terraform", 32 | "github/does-not-exist" 33 | ] 34 | } 35 | EOF 36 | echo "input.json: $(cat input.json)" 37 | 38 | RESPONSE=$(curl --no-progress-meter -H "Authorization: Bearer ${{ secrets.BOT_TOKEN }}" "https://api.github.com/repos/$GITHUB_REPOSITORY/code-scanning/codeql/variant-analyses" -X POST -d @input.json) 39 | echo "Response: $RESPONSE" 40 | 41 | ID="$(echo "$RESPONSE" | jq '.id')" 42 | echo "Triggered variant analysis $ID" 43 | 44 | if [ "$ID" == "null" ]; then 45 | echo "Error triggering variant analysis" 46 | exit 1 47 | fi 48 | 49 | echo "variant_analysis_id=$ID" >> $GITHUB_OUTPUT 50 | 51 | - name: Wait for variant analysis to complete 52 | run: | 53 | while true; do 54 | RESPONSE=$(curl --no-progress-meter -H "Authorization: Bearer ${{ secrets.BOT_TOKEN }}" "https://api.github.com/repos/$GITHUB_REPOSITORY/code-scanning/codeql/variant-analyses/${{ steps.trigger.outputs.variant_analysis_id }}") 55 | STATUS="$(echo "$RESPONSE" | jq '.status' -r)" 56 | ACTIONS_WORKFLOW_RUN_ID="$(echo "$RESPONSE" | jq '.actions_workflow_run_id' -r)" 57 | echo "Variant analysis ${{ steps.trigger.outputs.variant_analysis_id }} status: $STATUS" 58 | if [ "$ACTION_WORKFLOW_RUN_ID" != "null" ]; then 59 | echo "Actions workflow URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$ACTIONS_WORKFLOW_RUN_ID" 60 | fi 61 | if [ "$STATUS" != "in_progress" ]; then 62 | echo "Exiting..." 63 | exit 0 64 | fi 65 | sleep 10s 66 | done 67 | 68 | - name: Validate variant analysis status 69 | id: validate 70 | run: | 71 | RESPONSE=$(curl --no-progress-meter -H "Authorization: Bearer ${{ secrets.BOT_TOKEN }}" "https://api.github.com/repos/$GITHUB_REPOSITORY/code-scanning/codeql/variant-analyses/${{ steps.trigger.outputs.variant_analysis_id }}") 72 | echo "Response: $RESPONSE" 73 | echo "Actions workflow URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$(echo "$RESPONSE" | jq '.actions_workflow_run_id')" 74 | 75 | if [ "$(echo "$RESPONSE" | jq '.failure_reason')" != "null" ]; then 76 | echo "Failure reason is not null" 77 | exit 1 78 | fi 79 | 80 | if [ "$(echo "$RESPONSE" | jq -r '.status')" != "succeeded" ]; then 81 | echo "Status is not succeeded" 82 | exit 1 83 | fi 84 | 85 | if [ "$(echo "$RESPONSE" | jq '.scanned_repositories | length')" != "2" ]; then 86 | echo "Number of scanned repos is not 2" 87 | exit 1 88 | fi 89 | 90 | if [ "$(echo "$RESPONSE" | jq '.scanned_repositories[].repository.full_name' -r | sort)" != "$(echo -e "docker/compose\nhashicorp/terraform")" ]; then 91 | echo "Full names of scanned repos is incorrect" 92 | exit 1 93 | fi 94 | 95 | if [ "$(echo "$RESPONSE" | jq '.scanned_repositories[] | select(.analysis_status = "succeeded") | .repository.full_name' | wc -l)" != "2" ]; then 96 | echo "Number of repositories with successful status is not 2" 97 | exit 1 98 | fi 99 | 100 | if [ "$(echo "$RESPONSE" | jq '.scanned_repositories[] | select(.result_count = 3) | .repository.full_name' | wc -l)" != "2" ]; then 101 | echo "Number of repositories with precisely 3 results is not 2" 102 | exit 1 103 | fi 104 | 105 | if [ "$(echo "$RESPONSE" | jq '.scanned_repositories[] | select(.artifact_size_in_bytes > 0) | .repository.full_name' | wc -l)" != "2" ]; then 106 | echo "Number of repositories with a non-zero artifact size is not 2" 107 | exit 1 108 | fi 109 | 110 | if [ "$(echo "$RESPONSE" | jq '.skipped_repositories.not_found_repos.repository_count')" != "1" ]; then 111 | echo "Number of not found skipped repos is not 1" 112 | exit 1 113 | fi 114 | 115 | if [ "$(echo "$RESPONSE" | jq '.skipped_repositories.not_found_repos.repository_full_names[]' -r)" != "github/does-not-exist" ]; then 116 | echo "Not found skipped repos is incorrect" 117 | exit 1 118 | fi 119 | 120 | if [ "$(echo "$RESPONSE" | jq '.skipped_repositories.access_mismatch_repos.repository_count')" != "0" ]; then 121 | echo "Number of access mismatch skipped repos is not 0" 122 | exit 1 123 | fi 124 | 125 | if [ "$(echo "$RESPONSE" | jq '.skipped_repositories.no_codeql_db_repos.repository_count')" != "0" ]; then 126 | echo "Number of no CodeQL DB skipped repos is not 0" 127 | exit 1 128 | fi 129 | 130 | if [ "$(echo "$RESPONSE" | jq '.skipped_repositories.over_limit_repos.repository_count')" != "0" ]; then 131 | echo "Number of over limit skipped repos is not 0" 132 | exit 1 133 | fi 134 | 135 | ACTIONS_RESPONSE=$(curl --no-progress-meter -H "Authorization: Bearer ${{ secrets.BOT_TOKEN }}" "https://api.github.com/repos/$GITHUB_REPOSITORY/actions/runs/$(echo "$RESPONSE" | jq '.actions_workflow_run_id')") 136 | 137 | if [ "$(echo "$ACTIONS_RESPONSE" | jq '.status' -r)" != "completed" ]; then 138 | echo "Actions workflow status is not completed" 139 | exit 1 140 | fi 141 | 142 | if [ "$(echo "$ACTIONS_RESPONSE" | jq '.conclusion' -r)" != "success" ]; then 143 | echo "Actions workflow conclusion is not success" 144 | exit 1 145 | fi 146 | -------------------------------------------------------------------------------- /.github/workflows/pr-checks.yml: -------------------------------------------------------------------------------- 1 | name: "PR checks" 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | workflow_dispatch: 8 | 9 | jobs: 10 | lint-js: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v4 15 | - uses: actions/setup-node@v4 16 | with: 17 | node-version: '20.x' 18 | cache: 'npm' 19 | - name: Install dependencies 20 | run: npm ci 21 | - name: Run Lint 22 | run: npm run-script lint 23 | 24 | check-js: 25 | runs-on: ubuntu-latest 26 | 27 | steps: 28 | - uses: actions/checkout@v4 29 | - uses: actions/setup-node@v4 30 | with: 31 | node-version: '20.x' 32 | cache: 'npm' 33 | - name: Install dependencies 34 | run: npm ci 35 | - name: Check generated JavaScript 36 | run: .github/workflows/script/check-js.sh 37 | 38 | check-json-schemas: 39 | runs-on: ubuntu-latest 40 | 41 | steps: 42 | - uses: actions/checkout@v4 43 | - uses: actions/setup-node@v4 44 | with: 45 | node-version: '20.x' 46 | cache: 'npm' 47 | - name: Install dependencies 48 | run: npm ci 49 | - name: Check generated JSON schemas 50 | run: .github/workflows/script/check-json-schemas.sh 51 | 52 | find-deadcode: 53 | runs-on: ubuntu-latest 54 | 55 | steps: 56 | - uses: actions/checkout@v4 57 | - uses: actions/setup-node@v4 58 | with: 59 | node-version: '20.x' 60 | cache: 'npm' 61 | - name: Install dependencies 62 | run: npm ci 63 | - name: Find deadcode 64 | run: npm run find-deadcode 65 | 66 | npm-test: 67 | needs: [check-js] 68 | strategy: 69 | matrix: 70 | os: [ubuntu-latest, macos-latest] 71 | runs-on: ${{ matrix.os }} 72 | 73 | steps: 74 | - uses: actions/checkout@v4 75 | 76 | - uses: actions/setup-node@v4 77 | with: 78 | node-version: '20.x' 79 | cache: 'npm' 80 | - name: Install dependencies 81 | run: npm ci 82 | 83 | - name: Initialize CodeQL 84 | id: init 85 | uses: github/codeql-action/init@v3 86 | with: 87 | languages: javascript 88 | 89 | - name: Put CODEQL on PATH 90 | run: | 91 | echo "$(dirname "${{ steps.init.outputs.codeql-path }}")" >> $GITHUB_PATH 92 | 93 | - name: npm run-script test 94 | run: npm run-script test 95 | -------------------------------------------------------------------------------- /.github/workflows/script/check-js.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | # Sanity check that repo is clean to start with 5 | if [ ! -z "$(git status --porcelain)" ]; then 6 | # If we get a fail here then this workflow needs attention... 7 | >&2 echo "Failed: Repo should be clean before testing!" 8 | exit 1 9 | fi 10 | # Wipe the dist directory incase there are extra unnecessary files in there 11 | rm -rf dist/*.js 12 | # Generate the JavaScript files 13 | npm run-script build 14 | # Check that repo is still clean 15 | if [ ! -z "$(git status --porcelain)" ]; then 16 | # If we get a fail here then the PR needs attention 17 | >&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update" 18 | git status 19 | exit 1 20 | fi 21 | echo "Success: JavaScript files are up to date" 22 | -------------------------------------------------------------------------------- /.github/workflows/script/check-json-schemas.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | # Sanity check that repo is clean to start with 5 | if [ ! -z "$(git status --porcelain)" ]; then 6 | # If we get a fail here then this workflow needs attention... 7 | >&2 echo "Failed: Repo should be clean before testing!" 8 | exit 1 9 | fi 10 | # Generate the JSON schema files 11 | ./script/generate-json-schemas 12 | # Check that repo is still clean 13 | if [ ! -z "$(git status --porcelain)" ]; then 14 | # If we get a fail here then the PR needs attention 15 | >&2 echo "Failed: JSON schema files are not up to date. Run 'script/generate-json-schemas' to update" 16 | git status 17 | exit 1 18 | fi 19 | echo "Success: JSON schema files are up to date" 20 | 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /runner/dist/ 2 | /runner/node_modules/ 3 | /lib/ 4 | /node_modules/ 5 | /dist/*.js.map 6 | tsconfig.tsbuildinfo 7 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "esbenp.prettier-vscode", 4 | "dbaeumer.vscode-eslint" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | // include the defaults from VS Code 4 | "**/.git": true, 5 | "**/.DS_Store": true, 6 | 7 | // transpiled JavaScript 8 | "lib": true, 9 | "dist": true, 10 | }, 11 | "editor.formatOnSave": true, 12 | "editor.formatOnPaste": false, 13 | "[typescript]": { 14 | "editor.defaultFormatter": "esbenp.prettier-vscode" 15 | }, 16 | } 17 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "typescript", 6 | "tsconfig": "tsconfig.json", 7 | "option": "watch", 8 | "problemMatcher": [ 9 | "$tsc-watch" 10 | ], 11 | "group": "build", 12 | "label": "tsc: watch - tsconfig.json" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | @github/code-scanning-secexp-reviewers 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Development guide 2 | 3 | - Install the recommended extensions for automatic formatting on save. 4 | - All compiled artifacts should be checked in. 5 | - We recommend running `npm run watch` in the background to keep compiled artifacts up to date during development. 6 | 7 | ## Running tests 8 | 9 | We use the [jest](https://jestjs.io/) test framework. To run all the tests use `npm run test`. To run a specific test the best experience is to use VS Code. 10 | 11 | The `codeql` executable must be on the path before running any tests. If you run `script/test` instead of `npm run test` it will set that up automatically. 12 | 13 | # Generating sourcemaps 14 | 15 | In case you want to generate sourcemaps for tracing back a specific line in a stacktrace to the source code, you can 16 | use the following command: 17 | 18 | ```shell 19 | CODEQL_VARIANT_ANALYSIS_ACTION_GENERATE_SOURCEMAPS=true npm run build 20 | ``` 21 | 22 | The sourcemaps will be placed in the `dist` directory. 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 GitHub 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CodeQL variant analysis action 2 | 3 | This action is used internally by GitHub's multi-repository variant analysis. It is not intended to be used directly. 4 | 5 | If you want to use CodeQL to analyze your source code, please see the [CodeQL Action](https://github.com/github/codeql-action) and the [Code scanning documentation](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/about-code-scanning). 6 | -------------------------------------------------------------------------------- /build.mjs: -------------------------------------------------------------------------------- 1 | import { rm } from "node:fs/promises"; 2 | import { dirname, join } from "node:path"; 3 | import { fileURLToPath } from "node:url"; 4 | import * as esbuild from "esbuild"; 5 | 6 | const __filename = fileURLToPath(import.meta.url); 7 | const __dirname = dirname(__filename); 8 | 9 | const actions = [ 10 | "query", 11 | "update-repo-task-status", 12 | "update-repo-task-statuses", 13 | ]; 14 | 15 | await rm(join(__dirname, "dist"), { recursive: true, force: true }); 16 | 17 | // This will just log when a build ends 18 | /** @type {esbuild.Plugin} */ 19 | const onEndPlugin = { 20 | name: "on-end", 21 | setup(build) { 22 | build.onEnd((result) => { 23 | console.log(`Build ended with ${result.errors.length} errors`); 24 | }); 25 | }, 26 | }; 27 | 28 | const context = await esbuild.context({ 29 | entryPoints: actions.map((actionName) => `src/${actionName}.ts`), 30 | bundle: true, 31 | outdir: "dist", 32 | platform: "node", 33 | format: "cjs", 34 | sourcemap: !!process.env.CODEQL_VARIANT_ANALYSIS_ACTION_GENERATE_SOURCEMAPS 35 | ? "external" 36 | : false, 37 | chunkNames: "chunks/[name]-[hash]", 38 | plugins: [onEndPlugin], 39 | }); 40 | 41 | if (process.argv.includes("--watch")) { 42 | await context.watch(); 43 | } else { 44 | await context.rebuild(); 45 | 46 | await context.dispose(); 47 | } 48 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * For a detailed explanation regarding each configuration property, visit: 3 | * https://jestjs.io/docs/configuration 4 | */ 5 | 6 | import type { Config } from "jest"; 7 | 8 | const config: Config = { 9 | // All imported modules in your tests should be mocked automatically 10 | // automock: false, 11 | 12 | // Stop running tests after `n` failures 13 | // bail: 0, 14 | 15 | // The directory where Jest should store its cached dependency information 16 | // cacheDirectory: "/private/var/folders/r4/6npmmdcd1lx_cs50n8ftb0gc0000gn/T/jest_dx", 17 | 18 | // Automatically clear mock calls, instances, contexts and results before every test 19 | // clearMocks: true, 20 | 21 | // Indicates whether the coverage information should be collected while executing the test 22 | // collectCoverage: false, 23 | 24 | // An array of glob patterns indicating a set of files for which coverage information should be collected 25 | // collectCoverageFrom: undefined, 26 | 27 | // The directory where Jest should output its coverage files 28 | // coverageDirectory: undefined, 29 | 30 | // An array of regexp pattern strings used to skip coverage collection 31 | // coveragePathIgnorePatterns: [ 32 | // "/node_modules/" 33 | // ], 34 | 35 | // Indicates which provider should be used to instrument code for coverage 36 | coverageProvider: "v8", 37 | 38 | // A list of reporter names that Jest uses when writing coverage reports 39 | // coverageReporters: [ 40 | // "json", 41 | // "text", 42 | // "lcov", 43 | // "clover" 44 | // ], 45 | 46 | // An object that configures minimum threshold enforcement for coverage results 47 | // coverageThreshold: undefined, 48 | 49 | // A path to a custom dependency extractor 50 | // dependencyExtractor: undefined, 51 | 52 | // Make calling deprecated APIs throw helpful error messages 53 | // errorOnDeprecated: false, 54 | 55 | // The default configuration for fake timers 56 | // fakeTimers: { 57 | // "enableGlobally": false 58 | // }, 59 | 60 | // Force coverage collection from ignored files using an array of glob patterns 61 | // forceCoverageMatch: [], 62 | 63 | // A path to a module which exports an async function that is triggered once before all test suites 64 | // globalSetup: undefined, 65 | 66 | // A path to a module which exports an async function that is triggered once after all test suites 67 | // globalTeardown: undefined, 68 | 69 | // A set of global variables that need to be available in all test environments 70 | // globals: {}, 71 | 72 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 73 | // maxWorkers: "50%", 74 | 75 | // An array of directory names to be searched recursively up from the requiring module's location 76 | // moduleDirectories: [ 77 | // "node_modules" 78 | // ], 79 | 80 | // An array of file extensions your modules use 81 | // moduleFileExtensions: [ 82 | // "js", 83 | // "mjs", 84 | // "cjs", 85 | // "jsx", 86 | // "ts", 87 | // "tsx", 88 | // "json", 89 | // "node" 90 | // ], 91 | 92 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 93 | // moduleNameMapper: {}, 94 | 95 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 96 | // modulePathIgnorePatterns: [], 97 | 98 | // Activates notifications for test results 99 | // notify: false, 100 | 101 | // An enum that specifies notification mode. Requires { notify: true } 102 | // notifyMode: "failure-change", 103 | 104 | // A preset that is used as a base for Jest's configuration 105 | preset: "ts-jest", 106 | 107 | // Run tests from one or more projects 108 | // projects: undefined, 109 | 110 | // Use this configuration option to add custom reporters to Jest 111 | // reporters: undefined, 112 | 113 | // Automatically reset mock state before every test 114 | // resetMocks: false, 115 | 116 | // Reset the module registry before running each individual test 117 | // resetModules: false, 118 | 119 | // A path to a custom resolver 120 | // resolver: undefined, 121 | 122 | // Automatically restore mock state and implementation before every test 123 | // restoreMocks: false, 124 | 125 | // The root directory that Jest should scan for tests and modules within 126 | // rootDir: undefined, 127 | 128 | // A list of paths to directories that Jest should use to search for files in 129 | // roots: [ 130 | // "" 131 | // ], 132 | 133 | // Allows you to use a custom runner instead of Jest's default test runner 134 | // runner: "jest-runner", 135 | 136 | // The paths to modules that run some code to configure or set up the testing environment before each test 137 | // setupFiles: [], 138 | 139 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 140 | // setupFilesAfterEnv: [], 141 | 142 | // The number of seconds after which a test is considered as slow and reported as such in the results. 143 | // slowTestThreshold: 5, 144 | 145 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 146 | // snapshotSerializers: [], 147 | 148 | // The test environment that will be used for testing 149 | testEnvironment: "node", 150 | 151 | // Options that will be passed to the testEnvironment 152 | // testEnvironmentOptions: {}, 153 | 154 | // Adds a location field to test results 155 | // testLocationInResults: false, 156 | 157 | // The glob patterns Jest uses to detect test files 158 | // testMatch: [ 159 | // "**/__tests__/**/*.[jt]s?(x)", 160 | // "**/?(*.)+(spec|test).[tj]s?(x)" 161 | // ], 162 | 163 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 164 | // testPathIgnorePatterns: [ 165 | // "/node_modules/" 166 | // ], 167 | 168 | // The regexp pattern or array of patterns that Jest uses to detect test files 169 | // testRegex: [], 170 | 171 | // This option allows the use of a custom results processor 172 | // testResultsProcessor: undefined, 173 | 174 | // This option allows use of a custom test runner 175 | // testRunner: "jest-circus/runner", 176 | 177 | // A map from regular expressions to paths to transformers 178 | transform: { 179 | "^.+\\.tsx?$": [ 180 | "ts-jest", 181 | { 182 | tsconfig: "/tsconfig.json", 183 | }, 184 | ], 185 | node_modules: [ 186 | "babel-jest", 187 | { 188 | presets: ["@babel/preset-env"], 189 | plugins: ["@babel/plugin-transform-modules-commonjs"], 190 | }, 191 | ], 192 | }, 193 | 194 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 195 | transformIgnorePatterns: [ 196 | // These use ES modules, so need to be transformed 197 | "node_modules/(?!(?:@octokit/.+|before-after-hook|universal-user-agent)/.*)", 198 | ], 199 | 200 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 201 | // unmockedModulePathPatterns: undefined, 202 | 203 | // Indicates whether each individual test should be reported during the run 204 | // verbose: undefined, 205 | 206 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 207 | // watchPathIgnorePatterns: [], 208 | 209 | // Whether to use watchman for file crawling 210 | // watchman: true, 211 | }; 212 | 213 | export default config; 214 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "codeql-query", 3 | "version": "1.0.7", 4 | "private": true, 5 | "description": "CodeQL query action", 6 | "scripts": { 7 | "build": "node build.mjs", 8 | "watch": "node build.mjs --watch", 9 | "test": "jest", 10 | "lint": "eslint --report-unused-disable-directives --max-warnings=0 . --ext .js,.ts", 11 | "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --ext .js,.ts --fix", 12 | "find-deadcode": "ts-node script/find-deadcode.ts" 13 | }, 14 | "license": "MIT", 15 | "engines": { 16 | "node": "^20.0.0" 17 | }, 18 | "dependencies": { 19 | "@actions/core": "^1.11.1", 20 | "@actions/exec": "^1.1.1", 21 | "@actions/github": "^6.0.1", 22 | "@actions/http-client": "^2.2.3", 23 | "@actions/io": "^1.1.3", 24 | "@actions/tool-cache": "^2.0.2", 25 | "@octokit/action": "^8.0.2", 26 | "@octokit/plugin-retry": "^8.0.1", 27 | "@octokit/plugin-throttling": "^11.0.1", 28 | "ajv": "8.17.1", 29 | "form-data": "^4.0.2", 30 | "jszip": "3.10.1", 31 | "semver": "^7.7.1", 32 | "uuid": "^11.1.0", 33 | "yaml": "^2.7.1" 34 | }, 35 | "devDependencies": { 36 | "@babel/core": "^7.26.10", 37 | "@babel/plugin-transform-modules-commonjs": "^7.26.3", 38 | "@babel/preset-env": "^7.27.2", 39 | "@babel/preset-react": "^7.26.3", 40 | "@babel/preset-typescript": "^7.26.0", 41 | "@octokit/types": "^14.1.0", 42 | "@types/archiver": "^6.0.3", 43 | "@types/jest": "^29.5.14", 44 | "@types/node": "^20.17.48", 45 | "@types/semver": "^7.5.8", 46 | "@types/uuid": "^10.0.0", 47 | "@typescript-eslint/eslint-plugin": "^8.33.0", 48 | "@typescript-eslint/parser": "^8.32.1", 49 | "archiver": "^7.0.1", 50 | "esbuild": "^0.25.4", 51 | "eslint": "^8.57.0", 52 | "eslint-config-prettier": "^10.1.5", 53 | "eslint-import-resolver-typescript": "^4.3.3", 54 | "eslint-plugin-filenames": "^1.3.2", 55 | "eslint-plugin-github": "^5.1.8", 56 | "eslint-plugin-import": "^2.31.0", 57 | "eslint-plugin-no-async-foreach": "^0.1.1", 58 | "eslint-plugin-prettier": "^5.4.0", 59 | "jest": "^29.7.0", 60 | "prettier": "^3.5.3", 61 | "ts-jest": "^29.3.2", 62 | "ts-json-schema-generator": "2.3.0", 63 | "ts-node": "^10.9.2", 64 | "ts-unused-exports": "^11.0.1", 65 | "typescript": "^5.7.3" 66 | }, 67 | "resolutions": { 68 | "glob-parent": ">=5.1.2", 69 | "normalize-url": ">=4.5.1" 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /query/action.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL: Query" 2 | description: "Run a CodeQL query against a repository" 3 | author: "GitHub" 4 | inputs: 5 | controller_repo_id: 6 | description: "ID of the controller repository where the variant analysis is run." 7 | required: true 8 | 9 | query_pack_url: 10 | description: "URL for a tarball containing CodeQL query plus dependencies." 11 | required: true 12 | 13 | language: 14 | description: "Language of CodeQL query" 15 | required: true 16 | 17 | repositories: 18 | description: "Repositories to run the query against. A JSON encoded array of the form {id: number, nwo: string}[]" 19 | required: true 20 | 21 | variant_analysis_id: 22 | description: "The ID of the variant analysis" 23 | required: true 24 | 25 | instructions_path: 26 | description: "The path to the instructions file." 27 | required: true 28 | 29 | signed_auth_token: 30 | description: "The signed auth token to authenticate against the GitHub API" 31 | required: true 32 | 33 | runs: 34 | using: "node20" 35 | main: "../dist/query.js" 36 | -------------------------------------------------------------------------------- /script/codeql: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | gh codeql "$@" 3 | -------------------------------------------------------------------------------- /script/find-deadcode.ts: -------------------------------------------------------------------------------- 1 | import { join, relative, resolve } from "path"; 2 | import { exit } from "process"; 3 | 4 | import { analyzeTsConfig } from "ts-unused-exports"; 5 | import { Analysis } from "ts-unused-exports/lib/types"; 6 | 7 | function main() { 8 | const repositoryRoot = resolve(join(__dirname, "..")); 9 | 10 | let result: Analysis; 11 | 12 | try { 13 | result = analyzeTsConfig("tsconfig.json"); 14 | } catch (error) { 15 | if (error instanceof Error) { 16 | console.error("Failed to analyze tsconfig.json:", error.message); 17 | } else { 18 | console.error("Failed to analyze tsconfig.json:", error); 19 | } 20 | exit(1); 21 | } 22 | 23 | if (!result) { 24 | console.error("No result from analyzeTsConfig"); 25 | exit(1); 26 | } 27 | 28 | let foundUnusedExports = false; 29 | 30 | for (const [filepath, exportNameAndLocations] of Object.entries( 31 | result.unusedExports, 32 | )) { 33 | const relativeFilepath = relative(repositoryRoot, filepath); 34 | 35 | foundUnusedExports = true; 36 | 37 | console.log(relativeFilepath); 38 | for (const exportNameAndLocation of exportNameAndLocations) { 39 | console.log(` ${exportNameAndLocation.exportName}`); 40 | } 41 | console.log(); 42 | } 43 | 44 | if (foundUnusedExports) { 45 | exit(1); 46 | } 47 | } 48 | 49 | main(); 50 | -------------------------------------------------------------------------------- /script/generate-json-schemas: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | # Generate the schemas to a temporary directory and then move them into their 8 | # canonical positions at the end. This means we can wipe the src/json-schemas 9 | # directory to ensure that it only contains correct generated files. But also 10 | # works around the requirement that the schemas must exist during generation, 11 | # because they are imported by the typescript, so we cannot compile it to 12 | # generate the schemas unless all imported schemas files exist. 13 | rm -rf json-schemas-tmp 14 | mkdir json-schemas-tmp 15 | 16 | generate_schema () { 17 | npx ts-json-schema-generator \ 18 | --path $1 \ 19 | --type $2 \ 20 | --out json-schemas-tmp/$2.json \ 21 | --tsconfig tsconfig.json \ 22 | --additional-properties true \ 23 | --no-type-check 24 | } 25 | 26 | generate_schema ./src/inputs.ts RepoArray 27 | generate_schema ./src/inputs.ts Instructions 28 | generate_schema ./src/codeql.ts Sarif 29 | generate_schema ./src/codeql.ts BQRSInfo 30 | generate_schema ./src/codeql.ts ResolvedQueries 31 | generate_schema ./src/codeql.ts ResolvedDatabase 32 | generate_schema ./src/codeql.ts QueryMetadata 33 | generate_schema ./src/gh-api-client.ts RepoTask 34 | generate_schema ./src/gh-api-client.ts Policy 35 | 36 | rm -rf src/json-schemas/* 37 | mv json-schemas-tmp/* src/json-schemas 38 | rmdir json-schemas-tmp 39 | -------------------------------------------------------------------------------- /script/test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | if gh extension list | grep "^gh codeql" -q 8 | then 9 | echo "gh codeql extension is already installed" 10 | else 11 | gh extension install github/gh-codeql 12 | fi 13 | 14 | gh codeql download 15 | 16 | export PATH="$PWD/script:$PATH" 17 | 18 | npm run test "$@" 19 | -------------------------------------------------------------------------------- /src/api-client.ts: -------------------------------------------------------------------------------- 1 | import * as httpm from "@actions/http-client"; 2 | 3 | import { userAgent } from "./gh-api-client"; 4 | 5 | export function getApiClient(): httpm.HttpClient { 6 | return new httpm.HttpClient(userAgent, [], { 7 | allowRetries: true, 8 | }); 9 | } 10 | -------------------------------------------------------------------------------- /src/azure-client.ts: -------------------------------------------------------------------------------- 1 | import * as core from "@actions/core"; 2 | import FormData from "form-data"; 3 | 4 | import { getApiClient } from "./api-client"; 5 | import { Policy } from "./gh-api-client"; 6 | import { HTTPError } from "./http-error"; 7 | import { RetryHelper } from "./retry-helper"; 8 | 9 | export async function uploadArtifact( 10 | policy: Policy, 11 | artifactContents: Buffer, 12 | ): Promise { 13 | const maxAttempts = 3; 14 | const minSeconds = 10; 15 | const maxSeconds = 20; 16 | const retryHelper = new RetryHelper(maxAttempts, minSeconds, maxSeconds); 17 | return await retryHelper.execute( 18 | async () => { 19 | return await uploadArtifactImpl(policy, artifactContents); 20 | }, 21 | (err: Error) => { 22 | if (err instanceof HTTPError && err.httpStatusCode) { 23 | // Only retry 504 24 | return err.httpStatusCode === 504; 25 | } 26 | 27 | if (err.message.includes("Request timeout")) { 28 | // Retry on request timeout 29 | // Error is created here: 30 | // https://github.com/actions/toolkit/blob/415c42d27ca2a24f3801dd9406344aaea00b7866/packages/http-client/src/index.ts#L535 31 | return true; 32 | } 33 | 34 | // Otherwise abort 35 | return false; 36 | }, 37 | ); 38 | } 39 | 40 | async function uploadArtifactImpl(policy: Policy, artifactContents: Buffer) { 41 | const data = new FormData(); 42 | for (const [key, value] of Object.entries(policy.form)) { 43 | data.append(key, value); 44 | } 45 | 46 | data.append("file", artifactContents, { 47 | contentType: "application/zip", 48 | filename: "results.zip", 49 | }); 50 | 51 | const httpClient = getApiClient(); 52 | 53 | const additionalHeaders = { 54 | ...policy.header, 55 | ...data.getHeaders(), 56 | }; 57 | 58 | const response = await httpClient.sendStream( 59 | "POST", 60 | policy.upload_url, 61 | data, 62 | additionalHeaders, 63 | ); 64 | 65 | if (!response.message.statusCode || response.message.statusCode > 299) { 66 | const responseBody = await response.readBody(); 67 | core.warning( 68 | `Request to ${policy.upload_url} returned status code ${response.message.statusCode}: ${responseBody}`, 69 | ); 70 | throw new HTTPError(response.message.statusCode, responseBody); 71 | } 72 | 73 | // We need to read the response body to make sure the connection is closed 74 | await response.readBody(); 75 | } 76 | -------------------------------------------------------------------------------- /src/codeql-cli.test.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { tmpdir } from "os"; 3 | import path from "path"; 4 | 5 | import { rmRF } from "@actions/io"; 6 | 7 | import { getQueryPackInfo } from "./codeql"; 8 | import { CodeqlCli, CodeqlCliServer } from "./codeql-cli"; 9 | 10 | describe("codeql-cli", () => { 11 | let cli: CodeqlCli; 12 | let tmpDir: string; 13 | 14 | beforeAll(() => { 15 | cli = new CodeqlCliServer(process.env.CODEQL_BIN_PATH || "codeql"); 16 | }); 17 | 18 | afterAll(() => { 19 | if (cli && cli instanceof CodeqlCliServer) { 20 | cli.shutdown(); 21 | } 22 | }); 23 | 24 | beforeEach(() => { 25 | tmpDir = path.resolve(fs.mkdtempSync(path.join(tmpdir(), "tmp-"))); 26 | }); 27 | 28 | afterEach(async () => { 29 | if (tmpDir !== undefined) { 30 | await rmRF(tmpDir); 31 | } 32 | }); 33 | 34 | it( 35 | "creates and bundles a database", 36 | async () => { 37 | const projectDir = path.join(tmpDir, "project"); 38 | const dbDir = path.join(tmpDir, "db"); 39 | fs.mkdirSync(projectDir); 40 | const testFile = path.join(projectDir, "test.js"); 41 | fs.writeFileSync(testFile, "const x = 1;"); 42 | 43 | await cli.run([ 44 | "database", 45 | "create", 46 | "--language=javascript", 47 | `--source-root=${projectDir}`, 48 | dbDir, 49 | ]); 50 | 51 | const dbZip = path.join(tmpDir, "database.zip"); 52 | await cli.run(["database", "bundle", `--output=${dbZip}`, dbDir]); 53 | 54 | expect(fs.statSync(dbZip).isFile()).toBe(true); 55 | }, 56 | // 5 minute timeout to create and bundle a database 57 | 5 * 60 * 1000, 58 | ); 59 | 60 | it("gets query pack info", async () => { 61 | const queryPackInfo = await getQueryPackInfo(cli, "testdata/test_pack"); 62 | 63 | const queries = {}; 64 | queries[path.resolve("testdata/test_pack/x/query.ql")] = { 65 | name: "Test query", 66 | description: "Test query description", 67 | kind: "table", 68 | id: "test/query/id", 69 | }; 70 | expect(queryPackInfo).toEqual({ 71 | path: path.resolve("testdata/test_pack"), 72 | name: "codeql/queries", 73 | queries, 74 | }); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /src/codeql-cli.ts: -------------------------------------------------------------------------------- 1 | import { ChildProcessWithoutNullStreams, spawn } from "node:child_process"; 2 | import { EOL } from "node:os"; 3 | import { Writable } from "node:stream"; 4 | 5 | import { debug, error } from "@actions/core"; 6 | import { getExecOutput } from "@actions/exec"; 7 | 8 | import { asError } from "./errors"; 9 | 10 | interface CodeqlCliOutput { 11 | exitCode: number; 12 | stdout: string; 13 | stderr: string; 14 | } 15 | 16 | export interface CodeqlCli { 17 | run(args: string[]): Promise; 18 | } 19 | 20 | export class BaseCodeqlCli implements CodeqlCli { 21 | constructor(private readonly codeqlPath: string) {} 22 | 23 | async run(args: string[]): Promise { 24 | const { stdout, stderr, exitCode } = await getExecOutput( 25 | this.codeqlPath, 26 | args, 27 | ); 28 | 29 | return { stdout, stderr, exitCode }; 30 | } 31 | } 32 | 33 | export class CodeqlCliServer implements CodeqlCli { 34 | /** 35 | * The process for the cli server, or undefined if one doesn't exist yet 36 | */ 37 | private process?: ChildProcessWithoutNullStreams; 38 | /** 39 | * Queue of future commands 40 | */ 41 | private readonly commandQueue: Array<() => void> = []; 42 | /** 43 | * Whether a command is running 44 | */ 45 | private commandInProcess: boolean = false; 46 | /** 47 | * A buffer with a single null byte. 48 | */ 49 | private readonly nullBuffer: Buffer = Buffer.alloc(1); 50 | 51 | constructor(private readonly codeqlPath: string) {} 52 | 53 | run(args: string[]): Promise { 54 | return new Promise((resolve, reject) => { 55 | const callback = (): void => { 56 | try { 57 | // eslint-disable-next-line github/no-then -- we might not run immediately 58 | this.runCommandImmediately(args).then(resolve, reject); 59 | } catch (err) { 60 | reject(asError(err)); 61 | } 62 | }; 63 | // If the server is not running a command, then run the given command immediately, 64 | // otherwise add to the queue 65 | if (this.commandInProcess) { 66 | this.commandQueue.push(callback); 67 | } else { 68 | callback(); 69 | } 70 | }); 71 | } 72 | 73 | shutdown() { 74 | this.killProcessIfRunning(); 75 | } 76 | 77 | /** 78 | * Launch the cli server 79 | */ 80 | private launchProcess(): ChildProcessWithoutNullStreams { 81 | const args = ["execute", "cli-server"]; 82 | 83 | // Start the server process. 84 | const argsString = args.join(" "); 85 | void debug(`Starting using CodeQL CLI: ${this.codeqlPath} ${argsString}`); 86 | const child = spawn(this.codeqlPath, args); 87 | if (!child || !child.pid) { 88 | throw new Error( 89 | `Failed to start using command ${this.codeqlPath} ${argsString}.`, 90 | ); 91 | } 92 | 93 | let lastStdout: string | Buffer | undefined = undefined; 94 | child.stdout.on("data", (data: string | Buffer) => { 95 | lastStdout = data; 96 | }); 97 | // Set up event listeners. 98 | child.on("close", (code, signal) => { 99 | if (code !== null) { 100 | debug(`Child process exited with code ${code}`); 101 | } 102 | if (signal) { 103 | debug(`Child process exited due to receipt of signal ${signal}`); 104 | } 105 | // If the process exited abnormally, log the last stdout message, 106 | // It may be from the jvm. 107 | if (code !== 0 && lastStdout !== undefined) { 108 | debug(`Last stdout was "${lastStdout.toString()}"`); 109 | } 110 | }); 111 | 112 | return child; 113 | } 114 | 115 | private async runCommandImmediately( 116 | args: string[], 117 | ): Promise { 118 | const stderrBuffers: Buffer[] = []; 119 | const parentProcess = process; 120 | if (this.commandInProcess) { 121 | throw new Error( 122 | "runCommandImmediately called while command was in process", 123 | ); 124 | } 125 | this.commandInProcess = true; 126 | try { 127 | // Launch the process if it doesn't exist 128 | if (!this.process) { 129 | this.process = this.launchProcess(); 130 | } 131 | const process = this.process; 132 | // The array of fragments of stdout 133 | const stdoutBuffers: Buffer[] = []; 134 | 135 | void debug(`Running using CodeQL CLI: ${args.join(" ")}`); 136 | try { 137 | await new Promise((resolve, reject) => { 138 | // Follow standard Actions behavior and print any lines to stdout/stderr immediately 139 | let parentStdout: Writable; 140 | if (parentProcess.stdout instanceof Writable) { 141 | parentStdout = parentProcess.stdout; 142 | } 143 | let parentStderr: Writable | undefined = undefined; 144 | if (parentProcess.stderr instanceof Writable) { 145 | parentStderr = parentProcess.stderr; 146 | } 147 | 148 | // Start listening to stdout 149 | process.stdout.addListener("data", (newData: Buffer) => { 150 | stdoutBuffers.push(newData); 151 | 152 | if ( 153 | newData.length > 0 && 154 | newData.readUInt8(newData.length - 1) === 0 155 | ) { 156 | if (newData.length > 1) { 157 | parentStdout?.write(newData.subarray(0, newData.length - 1)); 158 | } 159 | } else { 160 | parentStdout?.write(newData); 161 | } 162 | 163 | // If the buffer ends in '0' then exit. 164 | // We don't have to check the middle as no output will be written after the null until 165 | // the next command starts 166 | if ( 167 | newData.length > 0 && 168 | newData.readUInt8(newData.length - 1) === 0 169 | ) { 170 | resolve(); 171 | } 172 | }); 173 | // Listen to stderr 174 | process.stderr.addListener("data", (newData: Buffer) => { 175 | stderrBuffers.push(newData); 176 | 177 | parentStderr?.write(newData); 178 | }); 179 | // Listen for process exit. 180 | process.addListener("close", (code) => 181 | reject( 182 | new Error( 183 | `The process ${this.codeqlPath} ${args.join(" ")} exited with code ${code}`, 184 | ), 185 | ), 186 | ); 187 | // Write the command followed by a null terminator. 188 | process.stdin.write(JSON.stringify(args), "utf8"); 189 | process.stdin.write(this.nullBuffer); 190 | }); 191 | 192 | void debug("CLI command succeeded."); 193 | 194 | const stdoutBuffer = Buffer.concat(stdoutBuffers); 195 | 196 | return { 197 | exitCode: 0, 198 | stdout: stdoutBuffer.toString("utf8", 0, stdoutBuffer.length - 1), 199 | stderr: Buffer.concat(stderrBuffers).toString("utf8"), 200 | }; 201 | } catch (err) { 202 | // Kill the process if it isn't already dead. 203 | this.killProcessIfRunning(); 204 | 205 | if (stderrBuffers.length > 0) { 206 | error( 207 | `Failed to run ${args.join(" ")}:${EOL} ${Buffer.concat(stderrBuffers).toString("utf8")}`, 208 | ); 209 | } 210 | 211 | throw err; 212 | } finally { 213 | debug(Buffer.concat(stderrBuffers).toString("utf8")); 214 | // Remove the listeners we set up. 215 | process.stdout.removeAllListeners("data"); 216 | process.stderr.removeAllListeners("data"); 217 | process.removeAllListeners("close"); 218 | } 219 | } finally { 220 | this.commandInProcess = false; 221 | // start running the next command immediately 222 | this.runNext(); 223 | } 224 | } 225 | 226 | /** 227 | * Run the next command in the queue 228 | */ 229 | private runNext(): void { 230 | const callback = this.commandQueue.shift(); 231 | if (callback) { 232 | callback(); 233 | } 234 | } 235 | 236 | private killProcessIfRunning(): void { 237 | if (this.process) { 238 | // Tell the Java CLI server process to shut down. 239 | debug("Sending shutdown request"); 240 | try { 241 | this.process.stdin.write(JSON.stringify(["shutdown"]), "utf8"); 242 | this.process.stdin.write(this.nullBuffer); 243 | debug("Sent shutdown request"); 244 | } catch (e: unknown) { 245 | // We are probably fine here, the process has already closed stdin. 246 | debug( 247 | `Shutdown request failed: process stdin may have already closed. The error was ${e}`, 248 | ); 249 | debug("Stopping the process anyway."); 250 | } 251 | // Close the stdin and stdout streams. 252 | // This is important on Windows where the child process may not die cleanly. 253 | this.process.stdin.end(); 254 | this.process.kill(); 255 | this.process.stdout.destroy(); 256 | this.process.stderr.destroy(); 257 | this.process = undefined; 258 | } 259 | } 260 | } 261 | -------------------------------------------------------------------------------- /src/codeql-setup.ts: -------------------------------------------------------------------------------- 1 | import { OutgoingHttpHeaders } from "http"; 2 | import { join } from "path"; 3 | import { performance } from "perf_hooks"; 4 | 5 | import { debug, info } from "@actions/core"; 6 | import { rmRF } from "@actions/io"; 7 | import { 8 | cacheDir, 9 | downloadTool, 10 | extractTar, 11 | find as findInToolCache, 12 | } from "@actions/tool-cache"; 13 | import { v4 as uuidV4 } from "uuid"; 14 | 15 | import { assertNever } from "./util"; 16 | 17 | type CodeQLToolsSource = 18 | | { 19 | codeqlFolder: string; 20 | sourceType: "toolcache"; 21 | } 22 | | { 23 | codeqlURL: string; 24 | sourceType: "download"; 25 | }; 26 | 27 | function getCodeQLBundleName(): string { 28 | let platform: string; 29 | if (process.platform === "win32") { 30 | platform = "win64"; 31 | } else if (process.platform === "linux") { 32 | platform = "linux64"; 33 | } else if (process.platform === "darwin") { 34 | platform = "osx64"; 35 | } else { 36 | return "codeql-bundle.tar.gz"; 37 | } 38 | return `codeql-bundle-${platform}.tar.gz`; 39 | } 40 | 41 | /** 42 | * Returns the path to the CodeQL bundle after finding or downloading it. 43 | * 44 | * @param tempDir A temporary directory to download the bundle to. 45 | * @param cliVersion The version of the CLI to use. 46 | */ 47 | export async function setupCodeQLBundle( 48 | tempDir: string, 49 | cliVersion: string, 50 | ): Promise { 51 | const source = getCodeQLSource(cliVersion); 52 | 53 | let codeqlFolder: string; 54 | switch (source.sourceType) { 55 | case "toolcache": 56 | codeqlFolder = source.codeqlFolder; 57 | debug(`CodeQL found in cache ${codeqlFolder}`); 58 | break; 59 | case "download": { 60 | codeqlFolder = await downloadCodeQL( 61 | cliVersion, 62 | source.codeqlURL, 63 | tempDir, 64 | ); 65 | break; 66 | } 67 | default: 68 | assertNever(source); 69 | } 70 | 71 | return codeqlFolder; 72 | } 73 | 74 | /** 75 | * Determine where to find the CodeQL tools. This will check the tool cache 76 | * first, and if the tools are not found there, it will provide a download 77 | * URL for the tools. 78 | * 79 | * @param cliVersion The CLI version of the CodeQL bundle to find 80 | */ 81 | function getCodeQLSource(cliVersion: string): CodeQLToolsSource { 82 | // If we find the specified CLI version, we always use that. 83 | const codeqlFolder = findInToolCache("CodeQL", cliVersion); 84 | 85 | if (codeqlFolder) { 86 | info(`Found CodeQL tools version ${cliVersion} in the toolcache.`); 87 | 88 | return { 89 | codeqlFolder, 90 | sourceType: "toolcache", 91 | }; 92 | } 93 | 94 | info(`Did not find CodeQL tools version ${cliVersion} in the toolcache.`); 95 | 96 | /** Tag name of the CodeQL bundle, for example `codeql-bundle-v2.17.1`. */ 97 | const tagName = `codeql-bundle-v${cliVersion}`; 98 | 99 | const url = `https://github.com/github/codeql-action/releases/download/${tagName}/${getCodeQLBundleName()}`; 100 | 101 | return { 102 | codeqlURL: url, 103 | sourceType: "download", 104 | }; 105 | } 106 | 107 | /** 108 | * @param cliVersion The CLI version of the CodeQL bundle to download 109 | * @param codeqlURL The URL to download the CodeQL bundle from 110 | * @param tempDir The temporary directory to download the CodeQL bundle to 111 | * @return the path to the downloaded CodeQL tools folder 112 | */ 113 | async function downloadCodeQL( 114 | cliVersion: string, 115 | codeqlURL: string, 116 | tempDir: string, 117 | ): Promise { 118 | const headers: OutgoingHttpHeaders = { 119 | accept: "application/octet-stream", 120 | }; 121 | info(`Downloading CodeQL tools from ${codeqlURL} . This may take a while.`); 122 | 123 | const dest = join(tempDir, uuidV4()); 124 | const finalHeaders = Object.assign( 125 | // eslint-disable-next-line @typescript-eslint/naming-convention 126 | { "User-Agent": "CodeQL Variant Analysis Action" }, 127 | headers, 128 | ); 129 | 130 | const toolsDownloadStart = performance.now(); 131 | const archivedBundlePath = await downloadTool( 132 | codeqlURL, 133 | dest, 134 | undefined, 135 | finalHeaders, 136 | ); 137 | const toolsDownloadDurationMs = Math.round( 138 | performance.now() - toolsDownloadStart, 139 | ); 140 | 141 | debug( 142 | `Finished downloading CodeQL bundle to ${archivedBundlePath} (${toolsDownloadDurationMs} ms).`, 143 | ); 144 | 145 | debug("Extracting CodeQL bundle."); 146 | const extractionStart = performance.now(); 147 | const extractedBundlePath = await extractTar(archivedBundlePath); 148 | const extractionMs = Math.round(performance.now() - extractionStart); 149 | debug( 150 | `Finished extracting CodeQL bundle to ${extractedBundlePath} (${extractionMs} ms).`, 151 | ); 152 | await rmRF(archivedBundlePath); 153 | 154 | debug("Caching CodeQL bundle."); 155 | const toolcachedBundlePath = await cacheDir( 156 | extractedBundlePath, 157 | "CodeQL", 158 | cliVersion, 159 | ); 160 | 161 | // Defensive check: we expect `cacheDir` to copy the bundle to a new location. 162 | if (toolcachedBundlePath !== extractedBundlePath) { 163 | await rmRF(extractedBundlePath); 164 | } 165 | 166 | return toolcachedBundlePath; 167 | } 168 | -------------------------------------------------------------------------------- /src/codeql-version.test.ts: -------------------------------------------------------------------------------- 1 | import { getDefaultCliVersion } from "./codeql-version"; 2 | 3 | describe("getDefaultCliVersion", () => { 4 | it("returns most recent CodeQL CLI version when passed valid features", () => { 5 | const features = { 6 | /* eslint-disable @typescript-eslint/naming-convention -- names are from an API response */ 7 | analysis_summary_v2_enabled: true, 8 | cli_config_file_enabled: true, 9 | cli_sarif_merge_enabled: true, 10 | codeql_java_lombok_enabled: true, 11 | combine_sarif_files_deprecation_warning_enabled: false, 12 | cpp_dependency_installation_enabled: true, 13 | cpp_trap_caching_enabled: false, 14 | database_uploads_enabled: true, 15 | default_codeql_version_2_16_4_enabled: true, 16 | default_codeql_version_2_16_5_enabled: true, 17 | default_codeql_version_2_16_6_enabled: false, 18 | default_codeql_version_2_17_0_enabled: true, 19 | default_codeql_version_2_17_1_enabled: true, 20 | default_codeql_version_2_17_2_enabled: false, 21 | default_codeql_version_2_17_3_enabled: false, 22 | default_codeql_version_2_17_4_enabled: false, 23 | default_codeql_version_2_17_5_enabled: false, 24 | default_codeql_version_2_17_6_enabled: false, 25 | disable_java_buildless_enabled: false, 26 | disable_kotlin_analysis_enabled: false, 27 | disable_python_dependency_installation_enabled: true, 28 | python_default_is_to_skip_dependency_installation_enabled: true, 29 | evaluator_fine_grained_parallelism_enabled: true, 30 | export_code_scanning_config_enabled: true, 31 | export_diagnostics_enabled: true, 32 | file_baseline_information_enabled: true, 33 | golang_extraction_reconciliation_enabled: true, 34 | language_baseline_config_enabled: true, 35 | lua_tracer_config_enabled: true, 36 | ml_powered_queries_enabled: false, 37 | qa_telemetry_enabled: false, 38 | scaling_reserved_ram_enabled: true, 39 | sublanguage_file_coverage_enabled: true, 40 | trap_caching_enabled: true, 41 | upload_failed_sarif_enabled: true, 42 | /* eslint-enable @typescript-eslint/naming-convention */ 43 | }; 44 | 45 | expect(getDefaultCliVersion(features)).toBe("2.17.1"); 46 | }); 47 | 48 | it("returns undefined when version features are missing", () => { 49 | const features = { 50 | /* eslint-disable @typescript-eslint/naming-convention -- names are from an API response */ 51 | analysis_summary_v2_enabled: true, 52 | upload_failed_sarif_enabled: true, 53 | /* eslint-enable @typescript-eslint/naming-convention */ 54 | }; 55 | 56 | expect(getDefaultCliVersion(features)).toBe(undefined); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /src/codeql-version.ts: -------------------------------------------------------------------------------- 1 | import { debug, warning } from "@actions/core"; 2 | import { valid } from "semver"; 3 | 4 | const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; 5 | const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; 6 | 7 | function getCliVersionFromFeatureFlag(f: string): string | undefined { 8 | if ( 9 | !f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) || 10 | !f.endsWith(DEFAULT_VERSION_FEATURE_FLAG_SUFFIX) 11 | ) { 12 | return undefined; 13 | } 14 | const version = f 15 | .substring( 16 | DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length, 17 | f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length, 18 | ) 19 | .replace(/_/g, "."); 20 | 21 | if (!valid(version)) { 22 | warning( 23 | `Ignoring feature flag ${f} as it does not specify a valid CodeQL version.`, 24 | ); 25 | return undefined; 26 | } 27 | return version; 28 | } 29 | 30 | export function getDefaultCliVersion( 31 | features: Record, 32 | ): string | undefined { 33 | const enabledFeatureFlagCliVersions = Object.entries(features) 34 | .map(([f, isEnabled]) => 35 | isEnabled ? getCliVersionFromFeatureFlag(f) : undefined, 36 | ) 37 | .filter((f): f is string => f !== undefined); 38 | 39 | if (enabledFeatureFlagCliVersions.length === 0) { 40 | return undefined; 41 | } 42 | 43 | const maxCliVersion = enabledFeatureFlagCliVersions.reduce( 44 | (maxVersion, currentVersion) => 45 | currentVersion > maxVersion ? currentVersion : maxVersion, 46 | enabledFeatureFlagCliVersions[0], 47 | ); 48 | debug(`Derived default CLI version of ${maxCliVersion} from feature flags.`); 49 | 50 | return maxCliVersion; 51 | } 52 | -------------------------------------------------------------------------------- /src/codeql.test.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { tmpdir } from "os"; 3 | import path, { join } from "path"; 4 | 5 | import { rmRF } from "@actions/io"; 6 | 7 | import { 8 | runQuery, 9 | getBqrsInfo, 10 | getDatabaseMetadata, 11 | BQRSInfo, 12 | getQueryPackQueries, 13 | injectVersionControlInfo, 14 | getSarifResultCount, 15 | Sarif, 16 | getSarifOutputType, 17 | QueryMetadata, 18 | getBqrsResultCount, 19 | getQueryPackInfo, 20 | } from "./codeql"; 21 | import { BaseCodeqlCli, CodeqlCli } from "./codeql-cli"; 22 | 23 | describe("codeql", () => { 24 | const cwd = process.cwd(); 25 | 26 | let codeql: CodeqlCli; 27 | let db: string; 28 | let tmpDir: string; 29 | let dbTmpDir: string; 30 | 31 | beforeAll( 32 | async () => { 33 | codeql = new BaseCodeqlCli(process.env.CODEQL_BIN_PATH || "codeql"); 34 | 35 | dbTmpDir = path.resolve(fs.mkdtempSync(path.join(tmpdir(), "db-"))); 36 | 37 | const projectDir = path.join(dbTmpDir, "project"); 38 | const dbDir = path.join(dbTmpDir, "db"); 39 | fs.mkdirSync(projectDir); 40 | const testFile = path.join(projectDir, "test.js"); 41 | fs.writeFileSync(testFile, "const x = 1;"); 42 | 43 | await codeql.run([ 44 | "database", 45 | "create", 46 | "--language=javascript", 47 | `--source-root=${projectDir}`, 48 | dbDir, 49 | ]); 50 | 51 | db = path.join(dbTmpDir, "database.zip"); 52 | await codeql.run(["database", "bundle", `--output=${db}`, dbDir]); 53 | }, 54 | // 5 minute timeout to build a CodeQL database 55 | 5 * 60 * 1000, 56 | ); 57 | 58 | afterAll( 59 | async () => { 60 | if (dbTmpDir) { 61 | await rmRF(dbTmpDir); 62 | } 63 | }, 64 | // 30 second timeout to delete an unzipped CodeQL database 65 | 30 * 1000, 66 | ); 67 | 68 | beforeEach(() => { 69 | // Use a different temporary directory that tests can use 70 | tmpDir = path.resolve(fs.mkdtempSync(path.join(tmpdir(), "tmp-"))); 71 | }); 72 | 73 | afterEach(async () => { 74 | if (tmpDir !== undefined) { 75 | await rmRF(tmpDir); 76 | } 77 | }); 78 | 79 | describe("getQueryPackInfo", () => { 80 | it("gets query pack info", async () => { 81 | const queryPackInfo = await getQueryPackInfo( 82 | codeql, 83 | join(cwd, "testdata/test_pack"), 84 | ); 85 | 86 | const queries = {}; 87 | queries[path.resolve("testdata/test_pack/x/query.ql")] = { 88 | name: "Test query", 89 | description: "Test query description", 90 | kind: "table", 91 | id: "test/query/id", 92 | }; 93 | expect(queryPackInfo).toEqual({ 94 | path: path.resolve("testdata/test_pack"), 95 | name: "codeql/queries", 96 | queries, 97 | }); 98 | }); 99 | 100 | it("gets query pack info when there are multiple queries", async () => { 101 | const queryPackInfo = await getQueryPackInfo( 102 | codeql, 103 | join(cwd, "testdata/test_pack_multiple_queries"), 104 | ); 105 | 106 | const queries = {}; 107 | queries[path.resolve("testdata/test_pack_multiple_queries/x/query.ql")] = 108 | { 109 | name: "Test query 1", 110 | kind: "table", 111 | id: "test/query/one", 112 | }; 113 | queries[path.resolve("testdata/test_pack_multiple_queries/z/query.ql")] = 114 | { 115 | name: "Test query 2", 116 | kind: "table", 117 | id: "test/query/two", 118 | }; 119 | expect(queryPackInfo).toEqual({ 120 | path: path.resolve("testdata/test_pack_multiple_queries"), 121 | name: "codeql/queries", 122 | queries, 123 | }); 124 | }); 125 | }); 126 | 127 | describe("runQuery", () => { 128 | beforeEach(() => { 129 | // Change to the temporary directory because some tests write files to the current working directory 130 | process.chdir(tmpDir); 131 | }); 132 | 133 | afterEach(() => { 134 | process.chdir(cwd); 135 | }); 136 | 137 | it( 138 | "runs a query in a pack", 139 | async () => { 140 | const queryPack = await getQueryPackInfo( 141 | codeql, 142 | join(cwd, "testdata/test_pack"), 143 | ); 144 | await runQuery(codeql, db, "a/b", queryPack); 145 | 146 | expect(fs.existsSync(path.join("results", "results.bqrs"))).toBe(true); 147 | expect( 148 | fs.existsSync(path.join("results", "codeql/queries/x/query.bqrs")), 149 | ).toBe(false); 150 | 151 | const bqrsInfo: BQRSInfo = await getBqrsInfo( 152 | codeql, 153 | path.join("results", "results.bqrs"), 154 | ); 155 | expect(bqrsInfo.resultSets.length).toBe(1); 156 | expect(bqrsInfo.resultSets[0].name).toBe("#select"); 157 | expect(bqrsInfo.compatibleQueryKinds.includes("Table")).toBe(true); 158 | }, 159 | // 1 minute timeout to run a CodeQL query 160 | 60 * 1000, 161 | ); 162 | 163 | it( 164 | "runs multiple queries in a pack", 165 | async () => { 166 | const queryPack = await getQueryPackInfo( 167 | codeql, 168 | join(cwd, "testdata/test_pack_multiple_queries"), 169 | ); 170 | await runQuery(codeql, db, "a/b", queryPack); 171 | 172 | const bqrsFilePath1 = "db/results/codeql/queries/x/query.bqrs"; 173 | expect(fs.existsSync(bqrsFilePath1)).toBe(true); 174 | 175 | const bqrsInfo1 = await getBqrsInfo(codeql, bqrsFilePath1); 176 | expect(bqrsInfo1.resultSets.length).toBe(1); 177 | expect(bqrsInfo1.resultSets[0].name).toBe("#select"); 178 | expect(bqrsInfo1.compatibleQueryKinds.includes("Table")).toBe(true); 179 | 180 | const bqrsFilePath2 = "db/results/codeql/queries/z/query.bqrs"; 181 | expect(fs.existsSync(bqrsFilePath2)).toBe(true); 182 | 183 | const bqrsInfo2 = await getBqrsInfo(codeql, bqrsFilePath2); 184 | expect(bqrsInfo2.resultSets.length).toBe(1); 185 | expect(bqrsInfo2.resultSets[0].name).toBe("#select"); 186 | expect(bqrsInfo2.compatibleQueryKinds.includes("Table")).toBe(true); 187 | 188 | expect(fs.existsSync(path.join("results", "results.bqrs"))).toBe(false); 189 | }, 190 | // 1 minute timeout to run a CodeQL query 191 | 60 * 1000, 192 | ); 193 | }); 194 | 195 | describe("getDatabaseMetadata", () => { 196 | it("gets the commit SHA and CLI version from a database", () => { 197 | fs.writeFileSync( 198 | path.join(tmpDir, "codeql-database.yml"), 199 | `--- 200 | sourceLocationPrefix: "hello-world" 201 | baselineLinesOfCode: 1 202 | unicodeNewlines: true 203 | columnKind: "utf16" 204 | primaryLanguage: "javascript" 205 | creationMetadata: 206 | sha: "ccf1e13626d97b009b4da78f719f028d9f7cdf80" 207 | cliVersion: "2.7.2" 208 | creationTime: "2021-11-08T12:58:40.345998Z" 209 | `, 210 | ); 211 | expect(getDatabaseMetadata(tmpDir).creationMetadata?.sha).toBe( 212 | "ccf1e13626d97b009b4da78f719f028d9f7cdf80", 213 | ); 214 | expect(getDatabaseMetadata(tmpDir).creationMetadata?.cliVersion).toBe( 215 | "2.7.2", 216 | ); 217 | }); 218 | 219 | it("gets the commit SHA when codeql-database.yml exists, but does not contain SHA", () => { 220 | fs.writeFileSync( 221 | path.join(tmpDir, "codeql-database.yml"), 222 | `--- 223 | sourceLocationPrefix: "hello-world" 224 | baselineLinesOfCode: 17442 225 | unicodeNewlines: true 226 | columnKind: "utf16" 227 | primaryLanguage: "javascript" 228 | `, 229 | ); 230 | expect(getDatabaseMetadata(tmpDir).creationMetadata?.sha).toBe(undefined); 231 | }); 232 | 233 | it("gets the commit SHA when codeql-database.yml exists, but is invalid", () => { 234 | fs.writeFileSync( 235 | path.join(tmpDir, "codeql-database.yml"), 236 | ` foo:" 237 | bar 238 | `, 239 | ); 240 | expect(getDatabaseMetadata(tmpDir).creationMetadata?.sha).toBe(undefined); 241 | }); 242 | 243 | it("gets the commit SHA when the codeql-database.yml does not exist", () => { 244 | expect(getDatabaseMetadata(tmpDir).creationMetadata?.sha).toBe(undefined); 245 | }); 246 | }); 247 | 248 | describe("getQueryPackQueries", () => { 249 | it("gets the queries from a pack", async () => { 250 | expect( 251 | await getQueryPackQueries( 252 | codeql, 253 | "testdata/test_pack", 254 | "codeql/queries", 255 | ), 256 | ).toEqual([path.resolve("testdata/test_pack/x/query.ql")]); 257 | }); 258 | }); 259 | 260 | describe("injectVersionControlInfo", () => { 261 | it("populates the SARIF versionControlProvenance property", () => { 262 | const sarif: Sarif = { 263 | runs: [ 264 | { 265 | results: [], 266 | }, 267 | ], 268 | }; 269 | const nwo = "a/b"; 270 | const sha = "testsha123"; 271 | 272 | injectVersionControlInfo(sarif, nwo, sha); 273 | const expected = { 274 | repositoryUri: `https://github.com/${nwo}`, 275 | revisionId: sha, 276 | }; 277 | 278 | expect(sarif.runs[0].versionControlProvenance?.[0]).toEqual(expected); 279 | }); 280 | }); 281 | 282 | describe("getSarifResultCount", () => { 283 | it("counts the number of results in a SARIF file)", () => { 284 | const sarif: Sarif = { 285 | runs: [ 286 | { 287 | results: [ 288 | { 289 | ruleId: "test-rule1", 290 | }, 291 | { 292 | ruleId: "test-rule2", 293 | }, 294 | { 295 | ruleId: "test-rule3", 296 | }, 297 | ], 298 | }, 299 | ], 300 | }; 301 | 302 | expect(getSarifResultCount(sarif)).toBe(3); 303 | }); 304 | }); 305 | 306 | describe("getSarifOutputType", () => { 307 | it("gets the SARIF output type when there is no `@kind` metadata", () => { 308 | const queryMetadata: QueryMetadata = {}; 309 | 310 | const compatibleQueryKinds = [ 311 | "Problem", 312 | "PathProblem", 313 | "Table", 314 | "Diagnostic", 315 | ]; 316 | 317 | expect(getSarifOutputType(queryMetadata, compatibleQueryKinds)).toBe( 318 | undefined, 319 | ); 320 | }); 321 | 322 | it("gets the SARIF output type when the `@kind` metadata is not compatible with output", () => { 323 | const queryMetadata: QueryMetadata = { 324 | kind: "path-problem", 325 | }; 326 | 327 | const compatibleQueryKinds = ["Problem", "Table", "Diagnostic"]; 328 | 329 | expect(getSarifOutputType(queryMetadata, compatibleQueryKinds)).toBe( 330 | undefined, 331 | ); 332 | }); 333 | 334 | it("gets the SARIF output type when the `@kind` metadata is compatible with output", () => { 335 | const queryMetadata: QueryMetadata = { 336 | kind: "problem", 337 | }; 338 | 339 | const compatibleQueryKinds = [ 340 | "Problem", 341 | "PathProblem", 342 | "Table", 343 | "Diagnostic", 344 | ]; 345 | 346 | expect(getSarifOutputType(queryMetadata, compatibleQueryKinds)).toBe( 347 | "problem", 348 | ); 349 | }); 350 | 351 | it("gets the SARIF output type when the `@kind` metadata is an alert alias", () => { 352 | const queryMetadata: QueryMetadata = { 353 | kind: "alert", 354 | }; 355 | 356 | const compatibleQueryKinds = [ 357 | "Problem", 358 | "PathProblem", 359 | "Table", 360 | "Diagnostic", 361 | ]; 362 | 363 | expect(getSarifOutputType(queryMetadata, compatibleQueryKinds)).toBe( 364 | "problem", 365 | ); 366 | }); 367 | 368 | it("gets the SARIF output type when the `@kind` metadata is a path-alert alias", () => { 369 | const queryMetadata: QueryMetadata = { 370 | kind: "path-alert", 371 | }; 372 | 373 | const compatibleQueryKinds = [ 374 | "Problem", 375 | "PathProblem", 376 | "Table", 377 | "Diagnostic", 378 | ]; 379 | 380 | expect(getSarifOutputType(queryMetadata, compatibleQueryKinds)).toBe( 381 | "path-problem", 382 | ); 383 | }); 384 | }); 385 | 386 | describe("getBqrsResultCount", () => { 387 | it("uses result count from #select result set if it exists", () => { 388 | const bqrsInfo: BQRSInfo = { 389 | resultSets: [{ name: "#select", rows: 3 }], 390 | compatibleQueryKinds: [], 391 | }; 392 | 393 | expect(getBqrsResultCount(bqrsInfo)).toBe(3); 394 | }); 395 | 396 | it("uses result count from problems result set if it exists", () => { 397 | const bqrsInfo: BQRSInfo = { 398 | resultSets: [{ name: "problems", rows: 4 }], 399 | compatibleQueryKinds: [], 400 | }; 401 | 402 | expect(getBqrsResultCount(bqrsInfo)).toBe(4); 403 | }); 404 | 405 | it("uses result count from #select result set if both #select and problems result sets exist", () => { 406 | const bqrsInfo: BQRSInfo = { 407 | resultSets: [ 408 | { name: "#select", rows: 3 }, 409 | { name: "problems", rows: 4 }, 410 | ], 411 | compatibleQueryKinds: [], 412 | }; 413 | 414 | expect(getBqrsResultCount(bqrsInfo)).toBe(3); 415 | }); 416 | 417 | it("throws error if neither #select or problems result sets exist", () => { 418 | const bqrsInfo: BQRSInfo = { 419 | resultSets: [ 420 | { name: "something", rows: 13 }, 421 | { name: "unknown", rows: 42 }, 422 | ], 423 | compatibleQueryKinds: [], 424 | }; 425 | 426 | expect(() => getBqrsResultCount(bqrsInfo)).toThrow( 427 | new Error( 428 | "BQRS does not contain any result sets matching known names. Expected one of #select or problems but found something, unknown", 429 | ), 430 | ); 431 | }); 432 | }); 433 | }); 434 | -------------------------------------------------------------------------------- /src/codeql.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | 4 | import { CodeqlCli } from "./codeql-cli"; 5 | import { camelize } from "./deserialize"; 6 | import { download } from "./download"; 7 | import { HTTPError } from "./http-error"; 8 | import { validateObject } from "./json-validation"; 9 | import { getMemoryFlagValue } from "./query-run-memory"; 10 | import { parseYamlFromFile } from "./yaml"; 11 | 12 | export interface RunQueryResult { 13 | resultCount: number; 14 | databaseSHA: string | undefined; 15 | sourceLocationPrefix: string; 16 | bqrsFilePaths: BqrsFilePaths; 17 | sarifFilePath?: string; 18 | } 19 | 20 | interface BqrsFilePaths { 21 | basePath: string; 22 | relativeFilePaths: string[]; 23 | } 24 | 25 | // Must be a valid value for "-t=kind" when doing "codeql bqrs interpret" 26 | type SarifOutputType = "problem" | "path-problem"; 27 | 28 | // Models just the pieces of the SARIF spec that we need 29 | export interface Sarif { 30 | runs: Array<{ 31 | versionControlProvenance?: unknown[]; 32 | results: unknown[]; 33 | }>; 34 | } 35 | 36 | /** 37 | * Run a query. Will operate on the current working directory and create the following directories: 38 | * - query/ (query.ql and any other supporting files) 39 | * - results/ (results.{bqrs,sarif}) 40 | * 41 | * @param codeql A runner of the CodeQL CLI to execute commands 42 | * @param database The path to the bundled database zip file 43 | * @param nwo The name of the repository 44 | * @param queryPackPath The path to the query pack 45 | * @returns Promise Resolves when the query has finished running. Returns information 46 | * about the query result and paths to the result files. 47 | */ 48 | export async function runQuery( 49 | codeql: CodeqlCli, 50 | database: string, 51 | nwo: string, 52 | queryPack: QueryPackInfo, 53 | ): Promise { 54 | fs.mkdirSync("results"); 55 | 56 | const databasePath = path.resolve("db"); 57 | await codeql.run([ 58 | "database", 59 | "unbundle", 60 | database, 61 | `--name=${path.basename(databasePath)}`, 62 | `--target=${path.dirname(databasePath)}`, 63 | ]); 64 | 65 | const dbMetadata = getDatabaseMetadata(databasePath); 66 | console.log( 67 | `This database was created using CodeQL CLI version ${dbMetadata.creationMetadata?.cliVersion}`, 68 | ); 69 | const databaseSHA = dbMetadata.creationMetadata?.sha?.toString(); 70 | 71 | await codeql.run([ 72 | "database", 73 | "run-queries", 74 | `--ram=${getMemoryFlagValue().toString()}`, 75 | "--additional-packs", 76 | queryPack.path, 77 | "--", 78 | databasePath, 79 | queryPack.name, 80 | ]); 81 | 82 | // Calculate query run information like BQRS file paths, etc. 83 | const queryPackRunResults = await getQueryPackRunResults( 84 | codeql, 85 | databasePath, 86 | queryPack, 87 | ); 88 | 89 | const sourceLocationPrefix = await getSourceLocationPrefix( 90 | codeql, 91 | databasePath, 92 | ); 93 | 94 | const shouldGenerateSarif = queryPackSupportsSarif(queryPackRunResults); 95 | 96 | let resultCount: number; 97 | let sarifFilePath: string | undefined; 98 | if (shouldGenerateSarif) { 99 | const sarif = await generateSarif( 100 | codeql, 101 | nwo, 102 | databasePath, 103 | queryPack.path, 104 | databaseSHA, 105 | ); 106 | resultCount = getSarifResultCount(sarif); 107 | sarifFilePath = path.resolve("results", "results.sarif"); 108 | fs.writeFileSync(sarifFilePath, JSON.stringify(sarif)); 109 | } else { 110 | resultCount = queryPackRunResults.totalResultsCount; 111 | } 112 | 113 | const bqrsFilePaths = await adjustBqrsFiles(queryPackRunResults); 114 | 115 | return { 116 | resultCount, 117 | databaseSHA, 118 | sourceLocationPrefix, 119 | bqrsFilePaths, 120 | sarifFilePath, 121 | }; 122 | } 123 | 124 | async function adjustBqrsFiles( 125 | queryPackRunResults: QueryPackRunResults, 126 | ): Promise { 127 | if (queryPackRunResults.queries.length === 1) { 128 | // If we have a single query, move the BQRS file to "results.bqrs" in order to 129 | // maintain backwards compatibility with the VS Code extension, since it expects 130 | // the BQRS file to be at the top level and be called "results.bqrs". 131 | const currentBqrsFilePath = path.join( 132 | queryPackRunResults.resultsBasePath, 133 | queryPackRunResults.queries[0].relativeBqrsFilePath, 134 | ); 135 | const newBqrsFilePath = path.resolve("results", "results.bqrs"); 136 | await fs.promises.rename(currentBqrsFilePath, newBqrsFilePath); 137 | return { basePath: "results", relativeFilePaths: ["results.bqrs"] }; 138 | } 139 | 140 | return { 141 | basePath: queryPackRunResults.resultsBasePath, 142 | relativeFilePaths: queryPackRunResults.queries.map( 143 | (q) => q.relativeBqrsFilePath, 144 | ), 145 | }; 146 | } 147 | 148 | export async function downloadDatabase( 149 | repoId: number, 150 | repoName: string, 151 | language: string, 152 | pat?: string, 153 | ): Promise { 154 | let authHeader: string | undefined = undefined; 155 | if (pat) { 156 | authHeader = `token ${pat}`; 157 | } 158 | 159 | try { 160 | return await download( 161 | `${process.env.GITHUB_API_URL || "https://api.github.com"}/repos/${repoName}/code-scanning/codeql/databases/${language}`, 162 | `${repoId}.zip`, 163 | authHeader, 164 | "application/zip", 165 | ); 166 | } catch (error: unknown) { 167 | console.log("Error while downloading database"); 168 | if ( 169 | error instanceof HTTPError && 170 | error.httpStatusCode === 404 && 171 | error.httpMessage.includes("No database available for") 172 | ) { 173 | throw new Error( 174 | `Language mismatch: The query targets ${language}, but the repository "${repoName}" has no CodeQL database available for that language.`, 175 | ); 176 | } else { 177 | throw error; 178 | } 179 | } 180 | } 181 | 182 | export type QueryMetadata = { 183 | id?: string; 184 | kind?: string; 185 | }; 186 | 187 | // Calls `resolve metadata` for the given query file and returns JSON output 188 | async function getQueryMetadata( 189 | codeql: CodeqlCli, 190 | query: string, 191 | ): Promise { 192 | const queryMetadataOutput = await codeql.run([ 193 | "resolve", 194 | "metadata", 195 | "--format=json", 196 | query, 197 | ]); 198 | if (queryMetadataOutput.exitCode !== 0) { 199 | throw new Error( 200 | `Unable to run codeql resolve metadata. Exit code: ${queryMetadataOutput.exitCode}`, 201 | ); 202 | } 203 | return validateObject( 204 | JSON.parse(queryMetadataOutput.stdout, camelize), 205 | "queryMetadata", 206 | ); 207 | } 208 | 209 | export interface BQRSInfo { 210 | resultSets: Array<{ 211 | name: string; 212 | rows: number; 213 | }>; 214 | compatibleQueryKinds: string[]; 215 | } 216 | 217 | // Calls `bqrs info` for the given bqrs file and returns JSON output 218 | export async function getBqrsInfo( 219 | codeql: CodeqlCli, 220 | bqrs: string, 221 | ): Promise { 222 | const bqrsInfoOutput = await codeql.run([ 223 | "bqrs", 224 | "info", 225 | "--format=json", 226 | bqrs, 227 | ]); 228 | if (bqrsInfoOutput.exitCode !== 0) { 229 | throw new Error( 230 | `Unable to run codeql bqrs info. Exit code: ${bqrsInfoOutput.exitCode}`, 231 | ); 232 | } 233 | return validateObject( 234 | JSON.parse(bqrsInfoOutput.stdout, camelize), 235 | "bqrsInfo", 236 | ); 237 | } 238 | 239 | // The expected output from "codeql resolve database" in getSourceLocationPrefix 240 | export interface ResolvedDatabase { 241 | sourceLocationPrefix: string; 242 | } 243 | 244 | async function getSourceLocationPrefix( 245 | codeql: CodeqlCli, 246 | databasePath: string, 247 | ) { 248 | const resolveDbOutput = await codeql.run([ 249 | "resolve", 250 | "database", 251 | databasePath, 252 | ]); 253 | const resolvedDatabase = validateObject( 254 | JSON.parse(resolveDbOutput.stdout), 255 | "resolvedDatabase", 256 | ); 257 | return resolvedDatabase.sourceLocationPrefix; 258 | } 259 | 260 | interface QueryPackRunResults { 261 | queries: Array<{ 262 | queryPath: string; 263 | queryMetadata: QueryMetadata; 264 | relativeBqrsFilePath: string; 265 | bqrsInfo: BQRSInfo; 266 | }>; 267 | totalResultsCount: number; 268 | resultsBasePath: string; 269 | } 270 | 271 | async function getQueryPackRunResults( 272 | codeql: CodeqlCli, 273 | databasePath: string, 274 | queryPack: QueryPackInfo, 275 | ): Promise { 276 | // This is where results are saved, according to 277 | // https://codeql.github.com/docs/codeql-cli/manual/database-run-queries/ 278 | const resultsBasePath = path.resolve(databasePath, "results"); 279 | 280 | const queries: Array<{ 281 | queryPath: string; 282 | queryMetadata: QueryMetadata; 283 | relativeBqrsFilePath: string; 284 | bqrsInfo: BQRSInfo; 285 | }> = []; 286 | 287 | let totalResultsCount = 0; 288 | 289 | for (const [queryPath, queryMetadata] of Object.entries(queryPack.queries)) { 290 | // Calculate the BQRS file path 291 | const queryPackRelativePath = path.relative(queryPack.path, queryPath); 292 | const parsedQueryPath = path.parse(queryPackRelativePath); 293 | const relativeBqrsFilePath = path.join( 294 | queryPack.name, 295 | parsedQueryPath.dir, 296 | `${parsedQueryPath.name}.bqrs`, 297 | ); 298 | const bqrsFilePath = path.join(resultsBasePath, relativeBqrsFilePath); 299 | 300 | if (!fs.existsSync(bqrsFilePath)) { 301 | throw new Error( 302 | `Could not find BQRS file for query ${queryPath} at ${bqrsFilePath}`, 303 | ); 304 | } 305 | 306 | const bqrsInfo = await getBqrsInfo(codeql, bqrsFilePath); 307 | 308 | queries.push({ 309 | queryPath, 310 | queryMetadata, 311 | relativeBqrsFilePath, 312 | bqrsInfo, 313 | }); 314 | 315 | totalResultsCount += getBqrsResultCount(bqrsInfo); 316 | } 317 | 318 | return { 319 | totalResultsCount, 320 | resultsBasePath, 321 | queries, 322 | }; 323 | } 324 | 325 | function querySupportsSarif( 326 | queryMetadata: QueryMetadata, 327 | bqrsInfo: BQRSInfo, 328 | ): boolean { 329 | const sarifOutputType = getSarifOutputType( 330 | queryMetadata, 331 | bqrsInfo.compatibleQueryKinds, 332 | ); 333 | return sarifOutputType !== undefined; 334 | } 335 | 336 | /** 337 | * All queries in the pack must support SARIF in order 338 | * for the query pack to support SARIF. 339 | */ 340 | function queryPackSupportsSarif( 341 | queriesResultInfo: QueryPackRunResults, 342 | ): boolean { 343 | return queriesResultInfo.queries.every((q) => 344 | querySupportsSarif(q.queryMetadata, q.bqrsInfo), 345 | ); 346 | } 347 | 348 | /** 349 | * Checks if the query kind is compatible with SARIF output. 350 | */ 351 | export function getSarifOutputType( 352 | queryMetadata: QueryMetadata, 353 | compatibleQueryKinds: string[], 354 | ): SarifOutputType | undefined { 355 | const queryKind = queryMetadata.kind; 356 | if ( 357 | // path-alert is an alias of path-problem 358 | (queryKind === "path-problem" || queryKind === "path-alert") && 359 | compatibleQueryKinds.includes("PathProblem") 360 | ) { 361 | return "path-problem"; 362 | } else if ( 363 | // alert is an alias of problem 364 | (queryKind === "problem" || queryKind === "alert") && 365 | compatibleQueryKinds.includes("Problem") 366 | ) { 367 | return "problem"; 368 | } else { 369 | return undefined; 370 | } 371 | } 372 | 373 | // Generates sarif from the given bqrs file, if query kind supports it 374 | async function generateSarif( 375 | codeql: CodeqlCli, 376 | nwo: string, 377 | databasePath: string, 378 | queryPackPath: string, 379 | databaseSHA?: string, 380 | ): Promise { 381 | const sarifFile = path.resolve("results", "results.sarif"); 382 | await codeql.run([ 383 | "database", 384 | "interpret-results", 385 | "--format=sarif-latest", 386 | `--output=${sarifFile}`, 387 | "--sarif-add-snippets", 388 | "--no-group-results", 389 | databasePath, 390 | queryPackPath, 391 | ]); 392 | const sarif = validateObject( 393 | JSON.parse(fs.readFileSync(sarifFile, "utf8")), 394 | "sarif", 395 | ); 396 | 397 | injectVersionControlInfo(sarif, nwo, databaseSHA); 398 | return sarif; 399 | } 400 | 401 | /** 402 | * Injects the GitHub repository URL and, if available, the commit SHA into the 403 | * SARIF `versionControlProvenance` property. 404 | */ 405 | export function injectVersionControlInfo( 406 | sarif: Sarif, 407 | nwo: string, 408 | databaseSHA?: string, 409 | ): void { 410 | for (const run of sarif.runs) { 411 | run.versionControlProvenance = run.versionControlProvenance || []; 412 | const repositoryUri = `${process.env.GITHUB_SERVER_URL || "https://github.com"}/${nwo}`; 413 | if (databaseSHA) { 414 | run.versionControlProvenance.push({ 415 | repositoryUri, 416 | revisionId: databaseSHA, 417 | }); 418 | } else { 419 | run.versionControlProvenance.push({ 420 | repositoryUri, 421 | }); 422 | } 423 | } 424 | } 425 | 426 | /** 427 | * Gets the number of results in the given SARIF data. 428 | */ 429 | export function getSarifResultCount(sarif: Sarif): number { 430 | let count = 0; 431 | for (const run of sarif.runs) { 432 | count = count + run.results.length; 433 | } 434 | return count; 435 | } 436 | 437 | /** 438 | * Names of result sets that can be considered the "default" result set 439 | * and should be used when calculating number of results and when showing 440 | * results to users. 441 | * Will check result sets in this order and use the first one that exists. 442 | */ 443 | const KNOWN_RESULT_SET_NAMES: string[] = ["#select", "problems"]; 444 | 445 | /** 446 | * Gets the number of results in the given BQRS data. 447 | */ 448 | export function getBqrsResultCount(bqrsInfo: BQRSInfo): number { 449 | for (const name of KNOWN_RESULT_SET_NAMES) { 450 | const resultSet = bqrsInfo.resultSets.find((r) => r.name === name); 451 | if (resultSet !== undefined) { 452 | return resultSet.rows; 453 | } 454 | } 455 | 456 | const resultSetNames = bqrsInfo.resultSets.map((r) => r.name); 457 | throw new Error( 458 | `BQRS does not contain any result sets matching known names. Expected one of ${KNOWN_RESULT_SET_NAMES.join(" or ")} but found ${resultSetNames.join(", ")}`, 459 | ); 460 | } 461 | 462 | interface DatabaseMetadata { 463 | creationMetadata?: { 464 | sha?: string | bigint; 465 | cliVersion?: string; 466 | }; 467 | } 468 | 469 | /** 470 | * Gets (a subset of) the database metadata from a CodeQL database. In the 471 | * future this information may be available using `codeql resolve database` 472 | * instead. Because this information is only used for enhancing the output we 473 | * catch errors for now. The caller must decide what to do in the case of 474 | * missing information. 475 | * 476 | * @param databasePath The path to the database. 477 | * @returns The database metadata. 478 | */ 479 | export function getDatabaseMetadata(databasePath: string): DatabaseMetadata { 480 | try { 481 | return parseYamlFromFile( 482 | path.join(databasePath, "codeql-database.yml"), 483 | ); 484 | } catch (error) { 485 | console.log(`Unable to read codeql-database.yml: ${error}`); 486 | return {}; 487 | } 488 | } 489 | 490 | interface QueryPackInfo { 491 | path: string; 492 | name: string; 493 | queries: { [path: string]: QueryMetadata }; 494 | } 495 | 496 | export async function getQueryPackInfo( 497 | codeql: CodeqlCli, 498 | queryPackPath: string, 499 | ): Promise { 500 | queryPackPath = path.resolve(queryPackPath); 501 | 502 | const name = getQueryPackName(queryPackPath); 503 | 504 | const queryPaths = await getQueryPackQueries(codeql, queryPackPath, name); 505 | const queries: { [path: string]: QueryMetadata } = {}; 506 | for (const queryPath of queryPaths) { 507 | const queryMetadata = await getQueryMetadata(codeql, queryPath); 508 | queries[queryPath] = queryMetadata; 509 | } 510 | 511 | return { 512 | path: queryPackPath, 513 | name, 514 | queries, 515 | }; 516 | } 517 | 518 | // The expected output from "codeql resolve queries" in getQueryPackQueries 519 | export type ResolvedQueries = string[]; 520 | 521 | /** 522 | * Gets the queries for a pack. 523 | * 524 | * @param codeql The path to the codeql CLI 525 | * @param queryPackPath The path to the query pack on disk. 526 | * @returns The path to a query file. 527 | */ 528 | export async function getQueryPackQueries( 529 | codeql: CodeqlCli, 530 | queryPackPath: string, 531 | queryPackName: string, 532 | ): Promise { 533 | const output = await codeql.run([ 534 | "resolve", 535 | "queries", 536 | "--format=json", 537 | "--additional-packs", 538 | queryPackPath, 539 | queryPackName, 540 | ]); 541 | 542 | return validateObject(JSON.parse(output.stdout), "resolvedQueries"); 543 | } 544 | 545 | function getQueryPackName(queryPackPath: string) { 546 | const qlpackFile = path.join(queryPackPath, "qlpack.yml"); 547 | const codeqlpackFile = path.join(queryPackPath, "codeql-pack.yml"); 548 | let packFile: string; 549 | if ( 550 | fs 551 | .statSync(qlpackFile, { 552 | throwIfNoEntry: false, 553 | }) 554 | ?.isFile() 555 | ) { 556 | packFile = qlpackFile; 557 | } else if ( 558 | fs 559 | .statSync(codeqlpackFile, { 560 | throwIfNoEntry: false, 561 | }) 562 | ?.isFile() 563 | ) { 564 | packFile = codeqlpackFile; 565 | } else { 566 | throw new Error(`Path '${queryPackPath}' is missing a qlpack file.`); 567 | } 568 | const packContents = parseYamlFromFile<{ name: string }>(packFile); 569 | return packContents.name; 570 | } 571 | -------------------------------------------------------------------------------- /src/deserialize.ts: -------------------------------------------------------------------------------- 1 | export function camelize(_: string, value: unknown): unknown { 2 | if (value && typeof value === "object") { 3 | for (const k in value) { 4 | if (/-./.exec(k)) { 5 | const l = k.replace(/-./g, (x) => x[1].toUpperCase()); 6 | // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment 7 | value[l] = value[k]; 8 | delete value[k]; 9 | } 10 | } 11 | } 12 | return value; 13 | } 14 | -------------------------------------------------------------------------------- /src/download.ts: -------------------------------------------------------------------------------- 1 | // This file borrows heavily from the actions "downloadTool" function: 2 | // https://github.com/actions/toolkit/blob/27f76dfe1afb2b7e5e679cd8e97192d34d8320e6/packages/tool-cache/src/tool-cache.ts 3 | 4 | import * as fs from "fs"; 5 | import { OutgoingHttpHeaders } from "http"; 6 | import * as path from "path"; 7 | import * as stream from "stream"; 8 | import * as util from "util"; 9 | 10 | import * as core from "@actions/core"; 11 | import * as httpm from "@actions/http-client"; 12 | import * as io from "@actions/io"; 13 | 14 | import { getApiClient } from "./api-client"; 15 | import { HTTPError } from "./http-error"; 16 | import { RetryHelper } from "./retry-helper"; 17 | 18 | /** 19 | * Download a file from an url and stream it into a local file 20 | * 21 | * @param url url of file to download 22 | * @param dest path to download file 23 | * @param auth authorization header 24 | * @returns path to downloaded file 25 | */ 26 | export async function download( 27 | url: string, 28 | dest: string, 29 | auth?: string, 30 | accept?: string, 31 | ): Promise { 32 | await io.mkdirP(path.dirname(dest)); 33 | core.debug(`Downloading ${url}`); 34 | core.debug(`Destination ${dest}`); 35 | 36 | const maxAttempts = 3; 37 | const minSeconds = 10; 38 | const maxSeconds = 20; 39 | const retryHelper = new RetryHelper(maxAttempts, minSeconds, maxSeconds); 40 | return await retryHelper.execute( 41 | async () => { 42 | return await downloadAttempt(url, dest, auth, accept); 43 | }, 44 | (err: Error) => { 45 | if (err instanceof HTTPError && err.httpStatusCode) { 46 | // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests 47 | if ( 48 | err.httpStatusCode < 500 && 49 | err.httpStatusCode !== 408 && 50 | err.httpStatusCode !== 429 51 | ) { 52 | return false; 53 | } 54 | } 55 | 56 | // Otherwise retry 57 | return true; 58 | }, 59 | ); 60 | } 61 | 62 | async function downloadAttempt( 63 | url: string, 64 | dest: string, 65 | auth?: string, 66 | accept?: string, 67 | ): Promise { 68 | if (fs.existsSync(dest)) { 69 | throw new Error(`Destination file path ${dest} already exists`); 70 | } 71 | 72 | // Get the response headers 73 | const http = getApiClient(); 74 | 75 | const headers: OutgoingHttpHeaders = {}; 76 | if (auth) { 77 | core.debug("set auth"); 78 | headers.authorization = auth; 79 | } 80 | if (accept) { 81 | headers.accept = accept; 82 | } 83 | 84 | const response: httpm.HttpClientResponse = await http.get(url, headers); 85 | 86 | if (response.message.statusCode !== 200) { 87 | const err = new HTTPError( 88 | response.message.statusCode, 89 | await response.readBody(), 90 | ); 91 | core.debug( 92 | `Failed to download from "${url}". Code(${err.httpStatusCode}) Message(${err.httpMessage})`, 93 | ); 94 | throw err; 95 | } 96 | 97 | // Download the response body 98 | const pipeline = util.promisify(stream.pipeline); 99 | let succeeded = false; 100 | try { 101 | await pipeline(response.message, fs.createWriteStream(dest)); 102 | core.debug("download complete"); 103 | succeeded = true; 104 | return dest; 105 | } finally { 106 | // Error, delete dest before retry 107 | if (!succeeded) { 108 | core.debug("download failed"); 109 | try { 110 | await io.rmRF(dest); 111 | } catch (err: unknown) { 112 | core.debug( 113 | `Failed to delete '${dest}'. ${ 114 | err instanceof Error ? err.message : err 115 | }`, 116 | ); 117 | } 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | export function asError(e: unknown): Error { 2 | return e instanceof Error ? e : new Error(String(e)); 3 | } 4 | -------------------------------------------------------------------------------- /src/gh-api-client.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/naming-convention */ 2 | import { Octokit } from "@octokit/action"; 3 | import { retry } from "@octokit/plugin-retry"; 4 | import { throttling } from "@octokit/plugin-throttling"; 5 | import { 6 | EndpointOptions, 7 | RequestError, 8 | RequestInterface, 9 | } from "@octokit/types"; 10 | 11 | import { getSignedAuthToken } from "./inputs"; 12 | import { validateObject } from "./json-validation"; 13 | 14 | export const userAgent = "GitHub multi-repository variant analysis action"; 15 | 16 | function getOctokit(): Octokit { 17 | const throttlingOctokit = Octokit.plugin(throttling); 18 | const octokit = new throttlingOctokit({ 19 | userAgent, 20 | retry, 21 | authStrategy: () => { 22 | return { 23 | hook: (request: RequestInterface, options: EndpointOptions) => { 24 | if (options.headers) { 25 | options.headers.authorization = `RemoteAuth ${getSignedAuthToken()}`; 26 | } 27 | return request(options); 28 | }, 29 | }; 30 | }, 31 | throttle: { 32 | enabled: !!process.env.CODEQL_VARIANT_ANALYSIS_ACTION_WAIT_ON_RATE_LIMIT, 33 | onRateLimit: (retryAfter, options) => { 34 | console.log( 35 | `Rate limit exhausted for request ${options.method} ${options.url}, retrying after ${retryAfter} seconds`, 36 | ); 37 | return true; 38 | }, 39 | onSecondaryRateLimit: (retryAfter, options) => { 40 | console.log( 41 | `Secondary rate limit triggered for request ${options.method} ${options.url}, retrying after ${retryAfter} seconds`, 42 | ); 43 | return true; 44 | }, 45 | }, 46 | }); 47 | 48 | return octokit; 49 | } 50 | 51 | export interface Policy { 52 | upload_url: string; 53 | header: Record; 54 | form: Record; 55 | } 56 | 57 | export interface RepoTask { 58 | analysis_status: AnalysisStatus; 59 | } 60 | 61 | type AnalysisStatus = 62 | | "pending" 63 | | "in_progress" 64 | | "succeeded" 65 | | "failed" 66 | | "canceled" 67 | | "timed_out"; 68 | 69 | interface InProgressAnalysis { 70 | status: "in_progress"; 71 | } 72 | 73 | interface SuccessfulAnalysis { 74 | status: "succeeded"; 75 | source_location_prefix: string; 76 | result_count: number; 77 | database_commit_sha: string; 78 | } 79 | 80 | interface FailedAnalysis { 81 | status: "failed"; 82 | failure_message: string; 83 | } 84 | 85 | interface CanceledAnalysis { 86 | status: "canceled"; 87 | } 88 | 89 | type UpdateVariantAnalysis = 90 | | InProgressAnalysis 91 | | SuccessfulAnalysis 92 | | FailedAnalysis 93 | | CanceledAnalysis; 94 | 95 | type UpdateVariantAnalyses = { 96 | repository_ids: number[]; 97 | } & (FailedAnalysis | CanceledAnalysis); 98 | 99 | export async function setVariantAnalysisRepoInProgress( 100 | controllerRepoId: number, 101 | variantAnalysisId: number, 102 | repoId: number, 103 | ): Promise { 104 | await updateVariantAnalysisStatus( 105 | controllerRepoId, 106 | variantAnalysisId, 107 | repoId, 108 | { 109 | status: "in_progress", 110 | }, 111 | ); 112 | } 113 | 114 | export async function setVariantAnalysisRepoSucceeded( 115 | controllerRepoId: number, 116 | variantAnalysisId: number, 117 | repoId: number, 118 | sourceLocationPrefix: string, 119 | resultCount: number, 120 | databaseCommitSha: string, 121 | ): Promise { 122 | await updateVariantAnalysisStatus( 123 | controllerRepoId, 124 | variantAnalysisId, 125 | repoId, 126 | { 127 | status: "succeeded", 128 | source_location_prefix: sourceLocationPrefix, 129 | result_count: resultCount, 130 | database_commit_sha: databaseCommitSha, 131 | }, 132 | ); 133 | } 134 | 135 | export async function setVariantAnalysisFailed( 136 | controllerRepoId: number, 137 | variantAnalysisId: number, 138 | repoId: number, 139 | failureMessage: string, 140 | ): Promise { 141 | await updateVariantAnalysisStatus( 142 | controllerRepoId, 143 | variantAnalysisId, 144 | repoId, 145 | { 146 | status: "failed", 147 | failure_message: failureMessage, 148 | }, 149 | ); 150 | } 151 | 152 | export async function setVariantAnalysesFailed( 153 | controllerRepoId: number, 154 | variantAnalysisId: number, 155 | repoIds: number[], 156 | failureMessage: string, 157 | ): Promise { 158 | await updateVariantAnalysisStatuses(controllerRepoId, variantAnalysisId, { 159 | repository_ids: repoIds, 160 | status: "failed", 161 | failure_message: failureMessage, 162 | }); 163 | } 164 | 165 | export async function setVariantAnalysesCanceled( 166 | controllerRepoId: number, 167 | variantAnalysisId: number, 168 | repoIds: number[], 169 | ): Promise { 170 | await updateVariantAnalysisStatuses(controllerRepoId, variantAnalysisId, { 171 | repository_ids: repoIds, 172 | status: "canceled", 173 | }); 174 | } 175 | 176 | function isRequestError(obj: unknown): obj is RequestError { 177 | return typeof obj?.["status"] === "number"; 178 | } 179 | 180 | async function updateVariantAnalysisStatus( 181 | controllerRepoId: number, 182 | variantAnalysisId: number, 183 | repoId: number, 184 | data: UpdateVariantAnalysis, 185 | ): Promise { 186 | const octokit = getOctokit(); 187 | 188 | const url = `PATCH /repositories/${controllerRepoId}/code-scanning/codeql/variant-analyses/${variantAnalysisId}/repositories/${repoId}/status`; 189 | try { 190 | await octokit.request(url, { data }); 191 | } catch (e: unknown) { 192 | if (isRequestError(e)) { 193 | console.error(`Request to ${url} failed with status code ${e.status}`); 194 | } 195 | throw e; 196 | } 197 | } 198 | 199 | async function updateVariantAnalysisStatuses( 200 | controllerRepoId: number, 201 | variantAnalysisId: number, 202 | data: UpdateVariantAnalyses, 203 | ): Promise { 204 | const octokit = getOctokit(); 205 | 206 | const url = `PATCH /repositories/${controllerRepoId}/code-scanning/codeql/variant-analyses/${variantAnalysisId}/repositories`; 207 | try { 208 | await octokit.request(url, { data }); 209 | } catch (e: unknown) { 210 | if (isRequestError(e)) { 211 | console.error(`Request to ${url} failed with status code ${e.status}`); 212 | } 213 | throw e; 214 | } 215 | } 216 | 217 | export async function getPolicyForRepoArtifact( 218 | controllerRepoId: number, 219 | variantAnalysisId: number, 220 | repoId: number, 221 | artifactSize: number, 222 | ): Promise { 223 | const data = { 224 | name: "results.zip", 225 | content_type: "application/zip", 226 | size: artifactSize, 227 | }; 228 | const octokit = getOctokit(); 229 | 230 | const url = `PUT /repositories/${controllerRepoId}/code-scanning/codeql/variant-analyses/${variantAnalysisId}/repositories/${repoId}/artifact`; 231 | try { 232 | const response = await octokit.request(url, { data }); 233 | return validateObject(response.data, "policy"); 234 | } catch (e: unknown) { 235 | if (isRequestError(e)) { 236 | console.error(`Request to ${url} failed with status code ${e.status}`); 237 | } 238 | throw e; 239 | } 240 | } 241 | -------------------------------------------------------------------------------- /src/http-error.ts: -------------------------------------------------------------------------------- 1 | export class HTTPError extends Error { 2 | httpStatusCode: number | undefined; 3 | httpMessage: string; 4 | constructor(httpStatusCode: number | undefined, httpMessage: string) { 5 | super(`Unexpected HTTP response: ${httpStatusCode}. ${httpMessage}`); 6 | this.httpStatusCode = httpStatusCode; 7 | this.httpMessage = httpMessage; 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/inputs.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs"; 2 | 3 | import { getInput } from "@actions/core"; 4 | 5 | import { validateObject } from "./json-validation"; 6 | 7 | export interface Repo { 8 | id: number; 9 | nwo: string; 10 | downloadUrl?: string; 11 | 12 | // pat is deprecated and only used during integration tests 13 | pat?: string; 14 | } 15 | 16 | export type RepoArray = Repo[]; 17 | 18 | export interface Instructions { 19 | repositories: Repo[]; 20 | features: Record; 21 | } 22 | 23 | export function getControllerRepoId(): number { 24 | return parseInt(getInput("controller_repo_id", { required: true })); 25 | } 26 | 27 | export function getVariantAnalysisId(): number { 28 | return parseInt(getInput("variant_analysis_id", { required: true })); 29 | } 30 | 31 | export function getSignedAuthToken(): string { 32 | return getInput("signed_auth_token", { required: true }); 33 | } 34 | 35 | export function getRepos(): Repo[] { 36 | return validateObject( 37 | JSON.parse(getInput("repositories", { required: true })), 38 | "repoArray", 39 | ); 40 | } 41 | 42 | export function getWorkflowStatus(): string { 43 | return getInput("workflow_status", { required: true }); 44 | } 45 | 46 | export async function getInstructions(): Promise { 47 | const filePath = getInput("instructions_path", { required: true }); 48 | return validateObject( 49 | JSON.parse(await fs.promises.readFile(filePath, "utf-8")), 50 | "instructions", 51 | ); 52 | } 53 | -------------------------------------------------------------------------------- /src/json-schemas/BQRSInfo.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/BQRSInfo", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "BQRSInfo": { 6 | "properties": { 7 | "compatibleQueryKinds": { 8 | "items": { 9 | "type": "string" 10 | }, 11 | "type": "array" 12 | }, 13 | "resultSets": { 14 | "items": { 15 | "properties": { 16 | "name": { 17 | "type": "string" 18 | }, 19 | "rows": { 20 | "type": "number" 21 | } 22 | }, 23 | "required": [ 24 | "name", 25 | "rows" 26 | ], 27 | "type": "object" 28 | }, 29 | "type": "array" 30 | } 31 | }, 32 | "required": [ 33 | "resultSets", 34 | "compatibleQueryKinds" 35 | ], 36 | "type": "object" 37 | } 38 | } 39 | } -------------------------------------------------------------------------------- /src/json-schemas/Instructions.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/Instructions", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "Instructions": { 6 | "properties": { 7 | "features": { 8 | "additionalProperties": { 9 | "type": "boolean" 10 | }, 11 | "type": "object" 12 | }, 13 | "repositories": { 14 | "items": { 15 | "$ref": "#/definitions/Repo" 16 | }, 17 | "type": "array" 18 | } 19 | }, 20 | "required": [ 21 | "repositories", 22 | "features" 23 | ], 24 | "type": "object" 25 | }, 26 | "Repo": { 27 | "properties": { 28 | "downloadUrl": { 29 | "type": "string" 30 | }, 31 | "id": { 32 | "type": "number" 33 | }, 34 | "nwo": { 35 | "type": "string" 36 | }, 37 | "pat": { 38 | "type": "string" 39 | } 40 | }, 41 | "required": [ 42 | "id", 43 | "nwo" 44 | ], 45 | "type": "object" 46 | } 47 | } 48 | } -------------------------------------------------------------------------------- /src/json-schemas/Policy.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/Policy", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "Policy": { 6 | "properties": { 7 | "form": { 8 | "additionalProperties": { 9 | "type": "string" 10 | }, 11 | "type": "object" 12 | }, 13 | "header": { 14 | "additionalProperties": { 15 | "type": "string" 16 | }, 17 | "type": "object" 18 | }, 19 | "upload_url": { 20 | "type": "string" 21 | } 22 | }, 23 | "required": [ 24 | "upload_url", 25 | "header", 26 | "form" 27 | ], 28 | "type": "object" 29 | } 30 | } 31 | } -------------------------------------------------------------------------------- /src/json-schemas/QueryMetadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/QueryMetadata", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "QueryMetadata": { 6 | "properties": { 7 | "id": { 8 | "type": "string" 9 | }, 10 | "kind": { 11 | "type": "string" 12 | } 13 | }, 14 | "type": "object" 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /src/json-schemas/RepoArray.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/RepoArray", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "Repo": { 6 | "properties": { 7 | "downloadUrl": { 8 | "type": "string" 9 | }, 10 | "id": { 11 | "type": "number" 12 | }, 13 | "nwo": { 14 | "type": "string" 15 | }, 16 | "pat": { 17 | "type": "string" 18 | } 19 | }, 20 | "required": [ 21 | "id", 22 | "nwo" 23 | ], 24 | "type": "object" 25 | }, 26 | "RepoArray": { 27 | "items": { 28 | "$ref": "#/definitions/Repo" 29 | }, 30 | "type": "array" 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /src/json-schemas/RepoTask.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/RepoTask", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "RepoTask": { 6 | "properties": { 7 | "analysis_status": { 8 | "enum": [ 9 | "pending", 10 | "in_progress", 11 | "succeeded", 12 | "failed", 13 | "canceled", 14 | "timed_out" 15 | ], 16 | "type": "string" 17 | } 18 | }, 19 | "required": [ 20 | "analysis_status" 21 | ], 22 | "type": "object" 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /src/json-schemas/ResolvedDatabase.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/ResolvedDatabase", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "ResolvedDatabase": { 6 | "properties": { 7 | "sourceLocationPrefix": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": [ 12 | "sourceLocationPrefix" 13 | ], 14 | "type": "object" 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /src/json-schemas/ResolvedQueries.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/ResolvedQueries", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "ResolvedQueries": { 6 | "items": { 7 | "type": "string" 8 | }, 9 | "type": "array" 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /src/json-schemas/Sarif.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/Sarif", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "definitions": { 5 | "Sarif": { 6 | "properties": { 7 | "runs": { 8 | "items": { 9 | "properties": { 10 | "results": { 11 | "items": {}, 12 | "type": "array" 13 | }, 14 | "versionControlProvenance": { 15 | "items": {}, 16 | "type": "array" 17 | } 18 | }, 19 | "required": [ 20 | "results" 21 | ], 22 | "type": "object" 23 | }, 24 | "type": "array" 25 | } 26 | }, 27 | "required": [ 28 | "runs" 29 | ], 30 | "type": "object" 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /src/json-validation.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BQRSInfo, 3 | QueryMetadata, 4 | ResolvedDatabase, 5 | ResolvedQueries, 6 | Sarif, 7 | } from "./codeql"; 8 | import { Policy, RepoTask } from "./gh-api-client"; 9 | import { Instructions, RepoArray } from "./inputs"; 10 | import { 11 | SchemaValidationError, 12 | schemaNames, 13 | validateObject, 14 | } from "./json-validation"; 15 | 16 | describe("validateObject", () => { 17 | for (const schema of schemaNames) { 18 | it(`throws error for invalid ${schema}`, () => { 19 | const testObj = { 20 | trash: true, 21 | kind: 123, 22 | }; 23 | expect(() => validateObject(testObj, schema)).toThrow( 24 | SchemaValidationError, 25 | ); 26 | }); 27 | } 28 | 29 | it("can successfully validate RepoArray", () => { 30 | const obj: RepoArray = [ 31 | { 32 | id: 123, 33 | nwo: "a/b", 34 | }, 35 | { 36 | id: 456, 37 | nwo: "c/d", 38 | downloadUrl: "https://example.com", 39 | }, 40 | { 41 | id: 789, 42 | nwo: "e/f", 43 | pat: "abcdef", 44 | }, 45 | ]; 46 | expect(() => validateObject(obj, "repoArray")).not.toThrow(); 47 | }); 48 | 49 | it("can successfully validate Instructions", () => { 50 | const obj: Instructions = { 51 | repositories: [ 52 | { 53 | id: 123, 54 | nwo: "a/b", 55 | }, 56 | ], 57 | features: {}, 58 | }; 59 | expect(() => validateObject(obj, "instructions")).not.toThrow(); 60 | }); 61 | 62 | it("can successfully validate Sarif", () => { 63 | const obj: Sarif = { 64 | runs: [ 65 | { 66 | results: [], 67 | }, 68 | ], 69 | }; 70 | expect(() => validateObject(obj, "sarif")).not.toThrow(); 71 | }); 72 | 73 | it("can successfully validate BQRSInfo", () => { 74 | const obj: BQRSInfo = { 75 | resultSets: [ 76 | { 77 | name: "aaa", 78 | rows: 13, 79 | }, 80 | ], 81 | compatibleQueryKinds: ["problem"], 82 | }; 83 | expect(() => validateObject(obj, "bqrsInfo")).not.toThrow(); 84 | }); 85 | 86 | it("can successfully validate ResolvedQueries", () => { 87 | const obj: ResolvedQueries = ["foo"]; 88 | expect(() => validateObject(obj, "resolvedQueries")).not.toThrow(); 89 | }); 90 | 91 | it("can successfully validate ResolvedDatabase", () => { 92 | const obj: ResolvedDatabase = { 93 | sourceLocationPrefix: "foo", 94 | }; 95 | expect(() => validateObject(obj, "resolvedDatabase")).not.toThrow(); 96 | }); 97 | 98 | it("can successfully validate QueryMetadata", () => { 99 | const obj: QueryMetadata = { 100 | kind: "problem", 101 | }; 102 | expect(() => validateObject(obj, "queryMetadata")).not.toThrow(); 103 | }); 104 | 105 | it("can successfully validate RepoTask", () => { 106 | /* eslint-disable @typescript-eslint/naming-convention */ 107 | const obj: RepoTask = { 108 | analysis_status: "pending", 109 | }; 110 | /* eslint-enable @typescript-eslint/naming-convention */ 111 | expect(() => validateObject(obj, "repoTask")).not.toThrow(); 112 | }); 113 | 114 | it("can successfully validate Policy", () => { 115 | /* eslint-disable @typescript-eslint/naming-convention */ 116 | const obj: Policy = { 117 | upload_url: "https://example.com", 118 | header: { 119 | foo: "bar", 120 | }, 121 | form: { 122 | baz: "qux", 123 | }, 124 | }; 125 | /* eslint-enable @typescript-eslint/naming-convention */ 126 | expect(() => validateObject(obj, "policy")).not.toThrow(); 127 | }); 128 | }); 129 | -------------------------------------------------------------------------------- /src/json-validation.ts: -------------------------------------------------------------------------------- 1 | import { Ajv, ValidateFunction } from "ajv"; 2 | 3 | import { 4 | BQRSInfo, 5 | QueryMetadata, 6 | ResolvedDatabase, 7 | ResolvedQueries, 8 | Sarif, 9 | } from "./codeql"; 10 | import { Policy, RepoTask } from "./gh-api-client"; 11 | import { Instructions, RepoArray } from "./inputs"; 12 | import BQRSInfoSchema from "./json-schemas/BQRSInfo.json"; 13 | import instructionsSchema from "./json-schemas/Instructions.json"; 14 | import policySchema from "./json-schemas/Policy.json"; 15 | import queryMetadataSchema from "./json-schemas/QueryMetadata.json"; 16 | import repoArraySchema from "./json-schemas/RepoArray.json"; 17 | import repoTaskSchema from "./json-schemas/RepoTask.json"; 18 | import ResolvedDatabaseSchema from "./json-schemas/ResolvedDatabase.json"; 19 | import ResolvedQueriesSchema from "./json-schemas/ResolvedQueries.json"; 20 | import sarifSchema from "./json-schemas/Sarif.json"; 21 | 22 | type SchemaTypes = { 23 | repoArray: RepoArray; 24 | instructions: Instructions; 25 | sarif: Sarif; 26 | bqrsInfo: BQRSInfo; 27 | resolvedQueries: ResolvedQueries; 28 | resolvedDatabase: ResolvedDatabase; 29 | queryMetadata: QueryMetadata; 30 | repoTask: RepoTask; 31 | policy: Policy; 32 | }; 33 | type Schema = keyof SchemaTypes; 34 | 35 | const ajv = new Ajv(); 36 | const validators: Record = { 37 | repoArray: ajv.compile(repoArraySchema), 38 | instructions: ajv.compile(instructionsSchema), 39 | sarif: ajv.compile(sarifSchema), 40 | bqrsInfo: ajv.compile(BQRSInfoSchema), 41 | resolvedQueries: ajv.compile(ResolvedQueriesSchema), 42 | resolvedDatabase: ajv.compile(ResolvedDatabaseSchema), 43 | queryMetadata: ajv.compile(queryMetadataSchema), 44 | repoTask: ajv.compile(repoTaskSchema), 45 | policy: ajv.compile(policySchema), 46 | }; 47 | export const schemaNames = Object.keys(validators) as Schema[]; 48 | 49 | export class SchemaValidationError extends Error {} 50 | 51 | export function validateObject( 52 | obj: unknown, 53 | schema: T, 54 | ): SchemaTypes[T] { 55 | const validator = validators[schema]; 56 | if (!validator(obj)) { 57 | throw new SchemaValidationError( 58 | `Object does not match the "${schema}" schema: ${ajv.errorsText( 59 | validator.errors, 60 | )}`, 61 | ); 62 | } 63 | return obj as SchemaTypes[T]; 64 | } 65 | -------------------------------------------------------------------------------- /src/query-run-memory.ts: -------------------------------------------------------------------------------- 1 | import * as os from "os"; 2 | 3 | /** 4 | * Gets an OS-specific amount of memory (in MB) to reserve for OS processes 5 | * when the user doesn't explicitly specify a memory setting. 6 | * This is a heuristic to avoid OOM errors (exit code 137 / SIGKILL) 7 | * from committing too much of the available memory to CodeQL. 8 | * @returns {number} 9 | */ 10 | function getSystemReservedMemoryMegaBytes(): number { 11 | // Windows needs more memory for OS processes. 12 | return 1024 * (process.platform === "win32" ? 1.5 : 1); 13 | } 14 | 15 | /** 16 | * Get the value for the codeql `--ram` flag. 17 | * We use the total available memory minus a threshold reserved for the OS. 18 | * 19 | * @returns {number} the amount of RAM to use, in megabytes 20 | */ 21 | export function getMemoryFlagValue(): number { 22 | const totalMemoryBytes = os.totalmem(); 23 | const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024); 24 | const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(); 25 | const memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes; 26 | 27 | return Math.floor(memoryToUseMegaBytes); 28 | } 29 | -------------------------------------------------------------------------------- /src/query.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { tmpdir } from "os"; 3 | import path from "path"; 4 | import { chdir, cwd } from "process"; 5 | 6 | import { 7 | endGroup, 8 | getInput, 9 | setFailed, 10 | setSecret, 11 | startGroup, 12 | info, 13 | warning, 14 | } from "@actions/core"; 15 | import { 16 | find as findInToolCache, 17 | extractTar, 18 | HTTPError, 19 | } from "@actions/tool-cache"; 20 | import JSZip from "jszip"; 21 | 22 | import { uploadArtifact } from "./azure-client"; 23 | import { 24 | downloadDatabase, 25 | getQueryPackInfo, 26 | runQuery, 27 | RunQueryResult, 28 | } from "./codeql"; 29 | import { CodeqlCliServer } from "./codeql-cli"; 30 | import { setupCodeQLBundle } from "./codeql-setup"; 31 | import { getDefaultCliVersion } from "./codeql-version"; 32 | import { download } from "./download"; 33 | import { 34 | getPolicyForRepoArtifact, 35 | setVariantAnalysisFailed, 36 | setVariantAnalysisRepoInProgress, 37 | setVariantAnalysisRepoSucceeded, 38 | } from "./gh-api-client"; 39 | import { 40 | getControllerRepoId, 41 | getInstructions, 42 | getRepos, 43 | getVariantAnalysisId, 44 | Repo, 45 | } from "./inputs"; 46 | 47 | const shutdownHandlers: Array<() => void> = []; 48 | 49 | async function run(): Promise { 50 | const controllerRepoId = getControllerRepoId(); 51 | const queryPackUrl = getInput("query_pack_url", { required: true }); 52 | const language = getInput("language", { required: true }); 53 | const repos: Repo[] = getRepos(); 54 | const variantAnalysisId = getVariantAnalysisId(); 55 | const instructions = await getInstructions(); 56 | 57 | for (const repo of repos) { 58 | if (repo.downloadUrl) { 59 | setSecret(repo.downloadUrl); 60 | } 61 | if (repo.pat) { 62 | setSecret(repo.pat); 63 | } 64 | } 65 | 66 | startGroup("Setup CodeQL CLI"); 67 | let codeqlBundlePath: string | undefined; 68 | 69 | const cliVersion = getDefaultCliVersion(instructions.features); 70 | if (cliVersion) { 71 | codeqlBundlePath = await setupCodeQLBundle( 72 | process.env.RUNNER_TEMP ?? tmpdir(), 73 | cliVersion, 74 | ); 75 | } else { 76 | warning( 77 | `Unable to determine CodeQL version from feature flags, using latest version in tool cache`, 78 | ); 79 | 80 | codeqlBundlePath = findInToolCache("CodeQL", "*"); 81 | 82 | info(`Using CodeQL CLI from tool cache: ${codeqlBundlePath}`); 83 | } 84 | 85 | let codeqlCmd = path.join(codeqlBundlePath, "codeql", "codeql"); 86 | if (process.platform === "win32") { 87 | codeqlCmd += ".exe"; 88 | } 89 | 90 | endGroup(); 91 | 92 | const curDir = cwd(); 93 | 94 | let queryPackPath: string; 95 | try { 96 | // Download and extract the query pack. 97 | console.log("Getting query pack"); 98 | const queryPackArchive = await download(queryPackUrl, "query_pack.tar.gz"); 99 | queryPackPath = await extractTar(queryPackArchive); 100 | } catch (e: unknown) { 101 | console.error(e); 102 | const errorMessage = e instanceof Error ? e.message : `${e}`; 103 | if (e instanceof HTTPError && e.httpStatusCode === 403) { 104 | setFailed( 105 | `${errorMessage}. The query pack is only available for 24 hours. To retry, create a new variant analysis.`, 106 | ); 107 | } else { 108 | setFailed(errorMessage); 109 | } 110 | // Consider all repos to have failed 111 | for (const repo of repos) { 112 | await setVariantAnalysisFailed( 113 | controllerRepoId, 114 | variantAnalysisId, 115 | repo.id, 116 | errorMessage, 117 | ); 118 | } 119 | return; 120 | } 121 | 122 | const codeqlCli = new CodeqlCliServer(codeqlCmd); 123 | 124 | shutdownHandlers.push(() => { 125 | codeqlCli.shutdown(); 126 | }); 127 | 128 | const codeqlVersionInfo = await codeqlCli.run(["version", "--format=json"]); 129 | console.log(codeqlVersionInfo.stdout); 130 | 131 | const queryPackInfo = await getQueryPackInfo(codeqlCli, queryPackPath); 132 | 133 | for (const repo of repos) { 134 | // Create a new directory to contain all files created during analysis of this repo. 135 | const workDir = createTempRepoDir(curDir, repo); 136 | // Change into the new directory to further ensure that all created files go in there. 137 | chdir(workDir); 138 | 139 | try { 140 | await setVariantAnalysisRepoInProgress( 141 | controllerRepoId, 142 | variantAnalysisId, 143 | repo.id, 144 | ); 145 | 146 | const dbZip = await getDatabase(repo, language); 147 | const dbZipPath = path.resolve(dbZip); 148 | 149 | console.log("Running query"); 150 | const runQueryResult = await runQuery( 151 | codeqlCli, 152 | dbZipPath, 153 | repo.nwo, 154 | queryPackInfo, 155 | ); 156 | 157 | if (runQueryResult.resultCount > 0) { 158 | await uploadRepoResult( 159 | controllerRepoId, 160 | variantAnalysisId, 161 | repo, 162 | runQueryResult, 163 | ); 164 | } 165 | 166 | await setVariantAnalysisRepoSucceeded( 167 | controllerRepoId, 168 | variantAnalysisId, 169 | repo.id, 170 | runQueryResult.sourceLocationPrefix, 171 | runQueryResult.resultCount, 172 | runQueryResult.databaseSHA || "HEAD", 173 | ); 174 | } catch (e: unknown) { 175 | console.error(e); 176 | const errorMessage = e instanceof Error ? e.message : `${e}`; 177 | if (e instanceof HTTPError && e.httpStatusCode === 403) { 178 | setFailed( 179 | `${errorMessage}. Database downloads are only available for 24 hours. To retry, create a new variant analysis.`, 180 | ); 181 | } else { 182 | setFailed(errorMessage); 183 | } 184 | 185 | await setVariantAnalysisFailed( 186 | controllerRepoId, 187 | variantAnalysisId, 188 | repo.id, 189 | errorMessage, 190 | ); 191 | } 192 | // We can now delete the work dir. All required files have already been uploaded. 193 | chdir(curDir); 194 | fs.rmSync(workDir, { recursive: true }); 195 | } 196 | } 197 | 198 | async function uploadRepoResult( 199 | controllerRepoId: number, 200 | variantAnalysisId: number, 201 | repo: Repo, 202 | runQueryResult: RunQueryResult, 203 | ) { 204 | const artifactContents = await getArtifactContentsForUpload(runQueryResult); 205 | 206 | // Get policy for artifact upload 207 | const policy = await getPolicyForRepoArtifact( 208 | controllerRepoId, 209 | variantAnalysisId, 210 | repo.id, 211 | artifactContents.length, 212 | ); 213 | 214 | // Use Azure client for uploading to Azure Blob Storage 215 | await uploadArtifact(policy, artifactContents); 216 | } 217 | 218 | async function getArtifactContentsForUpload( 219 | runQueryResult: RunQueryResult, 220 | ): Promise { 221 | const zip = new JSZip(); 222 | 223 | if (runQueryResult.sarifFilePath) { 224 | const sarifFileContents = fs.createReadStream(runQueryResult.sarifFilePath); 225 | zip.file("results.sarif", sarifFileContents); 226 | } 227 | 228 | for (const relativePath of runQueryResult.bqrsFilePaths.relativeFilePaths) { 229 | const fullPath = path.join( 230 | runQueryResult.bqrsFilePaths.basePath, 231 | relativePath, 232 | ); 233 | const bqrsFileContents = fs.createReadStream(fullPath); 234 | zip.file(relativePath, bqrsFileContents); 235 | } 236 | 237 | return await zip.generateAsync({ 238 | compression: "DEFLATE", 239 | type: "nodebuffer", 240 | }); 241 | } 242 | 243 | async function getDatabase(repo: Repo, language: string) { 244 | console.log(`Getting database for ${repo.nwo}`); 245 | if (repo.downloadUrl) { 246 | // Use the provided signed URL to download the database 247 | return await download(repo.downloadUrl, `${repo.id}.zip`); 248 | } else { 249 | // Use the GitHub API to download the database using token 250 | return await downloadDatabase(repo.id, repo.nwo, language, repo.pat); 251 | } 252 | } 253 | 254 | /** 255 | * Creates a temporary directory for a given repository. 256 | * @param curDir The current directory. 257 | * @param repo The repository to create a temporary directory for. 258 | * @returns The path to the temporary directory. 259 | */ 260 | function createTempRepoDir(curDir: string, repo: Repo): string { 261 | const workDir = fs.mkdtempSync(path.join(curDir, repo.id.toString())); 262 | return workDir; 263 | } 264 | 265 | void run().finally(() => { 266 | for (const handler of shutdownHandlers) { 267 | handler(); 268 | } 269 | }); 270 | -------------------------------------------------------------------------------- /src/retry-helper.ts: -------------------------------------------------------------------------------- 1 | import * as core from "@actions/core"; 2 | 3 | /** 4 | * Internal class for retries. 5 | * Borrowed from https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/retry-helper.ts. 6 | */ 7 | export class RetryHelper { 8 | private maxAttempts: number; 9 | private minSeconds: number; 10 | private maxSeconds: number; 11 | 12 | constructor(maxAttempts: number, minSeconds: number, maxSeconds: number) { 13 | if (maxAttempts < 1) { 14 | throw new Error("max attempts should be greater than or equal to 1"); 15 | } 16 | 17 | this.maxAttempts = maxAttempts; 18 | this.minSeconds = Math.floor(minSeconds); 19 | this.maxSeconds = Math.floor(maxSeconds); 20 | if (this.minSeconds > this.maxSeconds) { 21 | throw new Error( 22 | "min seconds should be less than or equal to max seconds", 23 | ); 24 | } 25 | } 26 | 27 | async execute( 28 | action: () => Promise, 29 | isRetryable: (e: Error) => boolean, 30 | ): Promise { 31 | let attempt = 1; 32 | while (attempt < this.maxAttempts) { 33 | // Try 34 | try { 35 | return await action(); 36 | } catch (err: unknown) { 37 | if (!(err instanceof Error) || !isRetryable(err)) { 38 | throw err; 39 | } 40 | 41 | core.info(err.message); 42 | } 43 | 44 | // Sleep 45 | const seconds = this.getSleepAmount(); 46 | core.info(`Waiting ${seconds} seconds before trying again`); 47 | await this.sleep(seconds); 48 | attempt++; 49 | } 50 | 51 | // Last attempt 52 | return await action(); 53 | } 54 | 55 | private getSleepAmount(): number { 56 | return ( 57 | Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + 58 | this.minSeconds 59 | ); 60 | } 61 | 62 | private async sleep(seconds: number): Promise { 63 | return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/set-repo-task-statuses.ts: -------------------------------------------------------------------------------- 1 | import { 2 | setVariantAnalysesCanceled, 3 | setVariantAnalysesFailed, 4 | } from "./gh-api-client"; 5 | import { 6 | getControllerRepoId, 7 | getVariantAnalysisId, 8 | getWorkflowStatus, 9 | Repo, 10 | } from "./inputs"; 11 | 12 | /** 13 | * If the overall variant analysis workflow failed or was canceled, 14 | * propagate the failure/cancellation status to the individual repo tasks. 15 | */ 16 | export async function setRepoTaskStatuses(repos: Repo[]): Promise { 17 | const controllerRepoId = getControllerRepoId(); 18 | const variantAnalysisId = getVariantAnalysisId(); 19 | const workflowStatus = getWorkflowStatus(); 20 | 21 | const repoIds = repos.map((repo) => repo.id); 22 | 23 | if (workflowStatus === "failed") { 24 | await setVariantAnalysesFailed( 25 | controllerRepoId, 26 | variantAnalysisId, 27 | repoIds, 28 | "The GitHub Actions workflow failed.", 29 | ); 30 | } 31 | 32 | if (workflowStatus === "canceled") { 33 | await setVariantAnalysesCanceled( 34 | controllerRepoId, 35 | variantAnalysisId, 36 | repoIds, 37 | ); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/update-repo-task-status.ts: -------------------------------------------------------------------------------- 1 | import { getRepos } from "./inputs"; 2 | import { setRepoTaskStatuses } from "./set-repo-task-statuses"; 3 | 4 | /** 5 | * If the overall variant analysis workflow failed or was canceled, 6 | * propagate the failure/cancellation status to the individual repo tasks. 7 | */ 8 | async function updateRepoTaskStatus(): Promise { 9 | const repos = getRepos(); 10 | 11 | await setRepoTaskStatuses(repos); 12 | } 13 | 14 | void updateRepoTaskStatus(); 15 | -------------------------------------------------------------------------------- /src/update-repo-task-statuses.ts: -------------------------------------------------------------------------------- 1 | import { getInstructions } from "./inputs"; 2 | import { setRepoTaskStatuses } from "./set-repo-task-statuses"; 3 | 4 | /** 5 | * If the overall variant analysis workflow failed or was canceled, 6 | * propagate the failure/cancellation status to the individual repo tasks. 7 | */ 8 | async function updateRepoTaskStatuses(): Promise { 9 | const instructions = await getInstructions(); 10 | await setRepoTaskStatuses(instructions.repositories); 11 | } 12 | 13 | void updateRepoTaskStatuses(); 14 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This error is used to indicate a runtime failure of an exhaustivity check enforced at compile time. 3 | */ 4 | class ExhaustivityCheckingError extends Error { 5 | constructor(public expectedExhaustiveValue: never) { 6 | super("Internal error: exhaustivity checking failure"); 7 | } 8 | } 9 | 10 | /** 11 | * Used to perform compile-time exhaustivity checking on a value. This function will not be executed at runtime unless 12 | * the type system has been subverted. 13 | */ 14 | export function assertNever(value: never): never { 15 | throw new ExhaustivityCheckingError(value); 16 | } 17 | -------------------------------------------------------------------------------- /src/yaml.test.ts: -------------------------------------------------------------------------------- 1 | import { parseYaml } from "./yaml"; 2 | 3 | describe("parseYaml", () => { 4 | it("can successfully parse YAML with potentially exponential commit SHA", () => { 5 | const expectedResult = { 6 | sourceLocationPrefix: "/home/runner/work/bulk-builder/bulk-builder", 7 | baselineLinesOfCode: BigInt(13088), 8 | unicodeNewlines: false, 9 | columnKind: "utf16", 10 | primaryLanguage: "java", 11 | creationMetadata: { 12 | sha: "4225332178759948e04347560002921719079454", 13 | cliVersion: "2.14.1", 14 | creationTime: new Date("2023-08-03T18:19:44.622274245Z"), 15 | }, 16 | finalised: true, 17 | }; 18 | const actualResult = parseYaml(`--- 19 | sourceLocationPrefix: /home/runner/work/bulk-builder/bulk-builder 20 | baselineLinesOfCode: 13088 21 | unicodeNewlines: false 22 | columnKind: utf16 23 | primaryLanguage: java 24 | creationMetadata: 25 | sha: 4225332178759948e04347560002921719079454 26 | cliVersion: 2.14.1 27 | creationTime: 2023-08-03T18:19:44.622274245Z 28 | finalised: true 29 | `); 30 | expect(actualResult).toEqual(expectedResult); 31 | }); 32 | 33 | it("can successfully parse YAML with numeric commit SHA", () => { 34 | const expectedResult = { 35 | sourceLocationPrefix: "/home/runner/work/bulk-builder/bulk-builder", 36 | baselineLinesOfCode: BigInt(13088), 37 | unicodeNewlines: false, 38 | columnKind: "utf16", 39 | primaryLanguage: "java", 40 | creationMetadata: { 41 | sha: BigInt("4225332178759948504347560002921719079454"), 42 | cliVersion: "2.14.1", 43 | creationTime: new Date("2023-08-03T18:19:44.622274245Z"), 44 | }, 45 | finalised: true, 46 | }; 47 | const actualResult = parseYaml(`--- 48 | sourceLocationPrefix: /home/runner/work/bulk-builder/bulk-builder 49 | baselineLinesOfCode: 13088 50 | unicodeNewlines: false 51 | columnKind: utf16 52 | primaryLanguage: java 53 | creationMetadata: 54 | sha: 4225332178759948504347560002921719079454 55 | cliVersion: 2.14.1 56 | creationTime: 2023-08-03T18:19:44.622274245Z 57 | finalised: true 58 | `); 59 | expect(actualResult).toEqual(expectedResult); 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /src/yaml.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | 3 | import { parse, ScalarTag } from "yaml"; 4 | 5 | // Use a custom tag for floats in exponential notation, to make the +/- mandatory 6 | // This fixes commit SHAs consisting only of numbers with a single "e" in them 7 | const floatExp: ScalarTag = { 8 | identify: (value) => typeof value === "number", 9 | default: true, 10 | tag: "tag:yaml.org,2002:float", 11 | format: "EXP", 12 | test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+][0-9]+$/, // Change is here, making the [-+] mandatory 13 | resolve: (str: string) => parseFloat(str.replace(/_/g, "")), 14 | stringify(node) { 15 | const num = Number(node.value); 16 | if (isFinite(num)) { 17 | return num.toExponential(); 18 | } 19 | 20 | return isNaN(num) ? ".nan" : num < 0 ? "-.inf" : ".inf"; 21 | }, 22 | }; 23 | 24 | export function parseYaml(src: string): T { 25 | return parse(src, { 26 | version: "1.1", // CodeQL CLI uses YAML 1.1 27 | schema: "yaml-1.1", 28 | intAsBigInt: true, // We want to retrieve the complete original commit SHA, which we can't do if it's parsed as a float 29 | customTags: (tags) => { 30 | // Remove the original float EXP tag, and add our custom one 31 | const tagsWithoutFloatExp = tags.filter((tag) => { 32 | if (typeof tag !== "object" || !tag.tag) { 33 | return true; 34 | } 35 | 36 | return tag.tag !== "tag:yaml.org,2002:float" && tag.format !== "EXP"; 37 | }); 38 | 39 | return [floatExp, ...tagsWithoutFloatExp]; 40 | }, 41 | }) as T; 42 | } 43 | 44 | export function parseYamlFromFile(filePath: string): T { 45 | return parseYaml(fs.readFileSync(filePath, "utf8")); 46 | } 47 | -------------------------------------------------------------------------------- /testdata/test_pack/.gitignore: -------------------------------------------------------------------------------- 1 | .cache/ -------------------------------------------------------------------------------- /testdata/test_pack/qlpack.yml: -------------------------------------------------------------------------------- 1 | library: false 2 | name: codeql/queries 3 | version: 1.0.0 4 | buildMetadata: 5 | creationTime: 2021-11-23T16:09:11.452312400Z 6 | cliVersion: 2.7.2 7 | defaultSuite: 8 | - description: Query suite for remote query 9 | - query: x/query.ql 10 | -------------------------------------------------------------------------------- /testdata/test_pack/x/query.ql: -------------------------------------------------------------------------------- 1 | /** 2 | * @name Test query 3 | * @description Test query description 4 | * @kind table 5 | * @id test/query/id 6 | */ 7 | 8 | import y.MyLib 9 | 10 | from int i 11 | where i in [0 .. 2] 12 | select i, smallPlusOne(i) 13 | -------------------------------------------------------------------------------- /testdata/test_pack/x/y/MyLib.qll: -------------------------------------------------------------------------------- 1 | int smallPlusOne(int i) { result = i + 1 and i in [0 .. 9] } 2 | -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/.gitignore: -------------------------------------------------------------------------------- 1 | .cache/ -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/qlpack.yml: -------------------------------------------------------------------------------- 1 | library: false 2 | name: codeql/queries 3 | version: 1.0.0 4 | buildMetadata: 5 | creationTime: 2021-11-23T16:09:11.452312400Z 6 | cliVersion: 2.7.2 7 | defaultSuite: 8 | - description: Query suite for remote query 9 | - query: x/query.ql 10 | - query: z/query.ql 11 | -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/x/query.ql: -------------------------------------------------------------------------------- 1 | /** 2 | * @name Test query 1 3 | * @kind table 4 | * @id test/query/one 5 | */ 6 | 7 | import y.MyLib 8 | 9 | from int i 10 | where i in [0 .. 2] 11 | select i, smallPlusOne(i) 12 | -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/x/y/MyLib.qll: -------------------------------------------------------------------------------- 1 | int smallPlusOne(int i) { result = i + 1 and i in [0 .. 9] } 2 | -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/z/query.ql: -------------------------------------------------------------------------------- 1 | /** 2 | * @name Test query 2 3 | * @kind table 4 | * @id test/query/two 5 | */ 6 | 7 | import y.MyLib 8 | 9 | from int i 10 | where i in [0 .. 2] 11 | select i, smallPlusOne(i) 12 | -------------------------------------------------------------------------------- /testdata/test_pack_multiple_queries/z/y/MyLib.qll: -------------------------------------------------------------------------------- 1 | int smallPlusOne(int i) { result = i + 1 and i in [0 .. 9] } 2 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | // "incremental": true, /* Enable incremental compilation */ 5 | "target": "ES2022", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 6 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 7 | // "allowJs": true, /* Allow javascript files to be compiled. */ 8 | // "checkJs": true, /* Report errors in .js files. */ 9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 10 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 11 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 12 | "sourceMap": true, /* Generates corresponding '.map' file. */ 13 | // "outFile": "./", /* Concatenate and emit output to single file. */ 14 | "outDir": "./lib", /* Redirect output structure to the directory. */ 15 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 16 | // "composite": true, /* Enable project compilation */ 17 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 18 | // "removeComments": true, /* Do not emit comments to output. */ 19 | // "noEmit": true, /* Do not emit outputs. */ 20 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 21 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 22 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 23 | 24 | /* Strict Type-Checking Options */ 25 | "strict": true, /* Enable all strict type-checking options. */ 26 | "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ 27 | "strictNullChecks": true, /* Enable strict null checks. */ 28 | "strictFunctionTypes": true, /* Enable strict checking of function types. */ 29 | "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 30 | "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 31 | "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 32 | "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 33 | 34 | /* Additional Checks */ 35 | "noUnusedLocals": true, /* Report errors on unused locals. */ 36 | "noUnusedParameters": true, /* Report errors on unused parameters. */ 37 | "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 38 | "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 39 | 40 | /* Module Resolution Options */ 41 | "moduleResolution": "bundler", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 42 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 43 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 44 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 45 | // "typeRoots": [], /* List of folders to include type definitions from. */ 46 | // "types": [], /* Type declaration files to be included in compilation. */ 47 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 48 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 49 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 50 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 51 | "resolveJsonModule": true, 52 | 53 | /* Source Map Options */ 54 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 55 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 56 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 57 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 58 | 59 | /* Experimental Options */ 60 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 61 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 62 | }, 63 | "include": ["src"] 64 | } 65 | -------------------------------------------------------------------------------- /tsconfig.lint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["src", "script"] 4 | } 5 | -------------------------------------------------------------------------------- /update-repo-task-status/action.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL: Update repository task status" 2 | description: "Set repository task as failed or canceled" 3 | author: "GitHub" 4 | inputs: 5 | workflow_status: 6 | description: "The status of the workflow: 'canceled' or 'failed'." 7 | required: true 8 | 9 | controller_repo_id: 10 | description: "ID of the controller repository where the variant analysis is run." 11 | required: true 12 | 13 | repositories: 14 | description: "Repositories to run the query against. A JSON encoded array of the form {id: number, nwo: string}[]" 15 | required: true 16 | 17 | variant_analysis_id: 18 | description: "The ID of the variant analysis" 19 | required: true 20 | 21 | signed_auth_token: 22 | description: "The signed auth token to authenticate against the GitHub API" 23 | required: true 24 | 25 | runs: 26 | using: "node20" 27 | main: "../dist/update-repo-task-status.js" 28 | -------------------------------------------------------------------------------- /update-repo-task-statuses/action.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL: Update repository task statuses" 2 | description: "Set repository tasks as failed or canceled" 3 | author: "GitHub" 4 | inputs: 5 | workflow_status: 6 | description: "The status of the workflow: 'canceled' or 'failed'." 7 | required: true 8 | 9 | controller_repo_id: 10 | description: "ID of the controller repository where the variant analysis is run." 11 | required: true 12 | 13 | instructions_path: 14 | description: "The path to the instructions file." 15 | required: true 16 | 17 | variant_analysis_id: 18 | description: "The ID of the variant analysis" 19 | required: true 20 | 21 | signed_auth_token: 22 | description: "The signed auth token to authenticate against the GitHub API" 23 | required: true 24 | 25 | runs: 26 | using: "node20" 27 | main: "../dist/update-repo-task-statuses.js" 28 | --------------------------------------------------------------------------------