├── branch-suggestion ├── .gitignore ├── vitest.config.js ├── package.json ├── scripts │ ├── github │ │ ├── __tests__ │ │ │ └── add-pr-comment.test.js │ │ ├── github-api.js │ │ └── add-pr-comment.js │ ├── jira │ │ ├── __tests__ │ │ │ └── get-fixedversion.test.js │ │ ├── get-fixedversion.js │ │ └── jira-api.js │ └── common │ │ ├── __tests__ │ │ └── match-branches.test.js │ │ └── match-branches.js ├── branch_suggestion.yml └── README.md ├── docs ├── workflows │ ├── jira-lint.md │ ├── sbom.md │ ├── owasp.md │ ├── golangci.md │ ├── govulncheck.md │ ├── sonarcloud.md │ ├── godoc.md │ ├── nancy.md │ ├── ci-lint.md │ ├── semgrep.md │ └── ci-docker-tools.md ├── actions │ └── checkout-pr.md └── Taskfile.yml ├── .yamllint ├── .gitignore ├── .github ├── .dependabot.yml ├── workflows │ ├── govulncheck.yaml │ ├── owasp.yaml │ ├── example-usage.yml.template │ ├── jira-lint.yaml │ ├── visor.yaml │ ├── semgrep.yaml │ ├── ci-lint.yml │ ├── ci-test.yml │ ├── create-update-comment.yaml │ ├── nancy.yaml │ ├── sonarcloud.yaml │ ├── branch-suggestion.yml │ ├── ci-docker-tools.yml │ ├── gotest.yaml │ ├── godoc.yml │ ├── s1-cns-scan.yml │ ├── force-merge.yaml │ ├── golangci.yaml │ ├── sbom.yaml │ ├── sbom-dev.yaml │ └── release-bot.yaml └── actions │ ├── tests │ ├── checkout-tyk-pro │ │ └── action.yaml │ ├── ui-tests │ │ └── action.yaml │ ├── ui-tests-report │ │ └── action.yaml │ ├── test-controller │ │ └── action.yaml │ ├── api-tests │ │ └── action.yaml │ ├── reporting │ │ └── action.yaml │ ├── choose-test-branch │ │ └── action.yaml │ └── env-up │ │ └── action.yaml │ ├── gh-logs-analyser │ ├── action.yaml │ └── log-collector.sh │ ├── checkout-pr │ └── action.yml │ └── latest-versions │ ├── test.sh │ ├── action.yaml │ ├── get_tags.sh │ └── versions.txt ├── docker └── tools │ ├── Taskfile.yml │ ├── latest │ └── Dockerfile │ └── README.md ├── Taskfile.yml ├── LICENSE ├── sbom ├── merge.py └── gen_dep.py └── README.md /branch-suggestion/.gitignore: -------------------------------------------------------------------------------- 1 | coverage/ 2 | output/ 3 | debug-artifacts/ 4 | -------------------------------------------------------------------------------- /docs/workflows/jira-lint.md: -------------------------------------------------------------------------------- 1 | ## JIRA linter 2 | 3 | Adoption: Gateway, Dashboard. 4 | -------------------------------------------------------------------------------- /docs/workflows/sbom.md: -------------------------------------------------------------------------------- 1 | ## SBOM - source bill of materials 2 | 3 | Adoption: Gateway, Dashboard. 4 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | extends: default 2 | 3 | rules: 4 | line-length: disable 5 | document-start: disable 6 | trailing-spaces: disable 7 | truthy: disable 8 | 9 | ignore: | 10 | .github/workflows/gotest.yaml 11 | .github/workflows/golangci.yaml 12 | -------------------------------------------------------------------------------- /docs/workflows/owasp.md: -------------------------------------------------------------------------------- 1 | ## OWASP scanner 2 | 3 | Example usage: 4 | 5 | ```yaml 6 | jobs: 7 | owasp: 8 | uses: TykTechnologies/github-actions/.github/workflows/owasp.yaml@main 9 | with: 10 | target: http://staging-url.com 11 | ``` 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Node.js 2 | node_modules/ 3 | npm-debug.log* 4 | yarn-debug.log* 5 | yarn-error.log* 6 | 7 | # Environment files 8 | .env 9 | .env.local 10 | .env.*.local 11 | 12 | # OS files 13 | .DS_Store 14 | Thumbs.db 15 | 16 | # IDE 17 | .vscode/ 18 | .idea/ 19 | *.swp 20 | *.swo 21 | *~ 22 | -------------------------------------------------------------------------------- /docs/workflows/golangci.md: -------------------------------------------------------------------------------- 1 | ## Golang CI 2 | 3 | Popular linter for Go lang with good defaults. 4 | 5 | Example usage: 6 | 7 | ```yaml 8 | jobs: 9 | golangci: 10 | uses: TykTechnologies/github-actions/.github/workflows/golangci.yaml@main 11 | with: 12 | main_branch: master 13 | ``` 14 | -------------------------------------------------------------------------------- /docs/workflows/govulncheck.md: -------------------------------------------------------------------------------- 1 | ## Go govulncheck 2 | 3 | Official Go Vulnerability Management. 4 | 5 | See: https://go.dev/blog/vuln 6 | 7 | Example usage: 8 | 9 | ```yaml 10 | jobs: 11 | govulncheck: 12 | uses: TykTechnologies/github-actions/.github/workflows/govulncheck.yaml@main 13 | ``` 14 | -------------------------------------------------------------------------------- /.github/.dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | 4 | updates: 5 | - package-ecosystem: github-actions 6 | directory: / 7 | schedule: 8 | interval: weekly 9 | commit-message: 10 | include: scope 11 | prefix: "Actions" 12 | reviewers: 13 | - "TykTechnologies/engineering" 14 | -------------------------------------------------------------------------------- /docs/workflows/sonarcloud.md: -------------------------------------------------------------------------------- 1 | ## SonarCloud 2 | 3 | Put it after Golang CI to automatically upload its reports to SonarCloud. 4 | 5 | Example usage: 6 | 7 | ```yaml 8 | jobs: 9 | golangci: 10 | uses: TykTechnologies/github-actions/.github/workflows/sonarcloud.yaml@main 11 | with: 12 | main_branch: master 13 | exclusions: "" 14 | secrets: inherit 15 | ``` 16 | -------------------------------------------------------------------------------- /.github/workflows/govulncheck.yaml: -------------------------------------------------------------------------------- 1 | name: Go Vulnerability detection 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | govulncheck: 8 | name: Go Vulnerability detection 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | - uses: actions/setup-go@v3 14 | 15 | - name: Govulncheck scan 16 | run: | 17 | go install golang.org/x/vuln/cmd/govulncheck@latest 18 | govulncheck ./... 19 | -------------------------------------------------------------------------------- /docs/workflows/godoc.md: -------------------------------------------------------------------------------- 1 | ## Print Go API Changes 2 | 3 | For a PR, the action will print the changes in `go doc` output. This 4 | surfaces API changes (function removals, renames, additions), as well as 5 | comment changes. 6 | 7 | Example usage: 8 | 9 | ```yaml 10 | jobs: 11 | godoc: 12 | uses: TykTechnologies/github-actions/.github/workflows/godoc.yml@main 13 | secrets: 14 | ORG_GH_TOKEN: ${{ secrets.ORG_GH_TOKEN }} 15 | ``` 16 | 17 | Adoption: Gateway, Dashboard. 18 | -------------------------------------------------------------------------------- /.github/workflows/owasp.yaml: -------------------------------------------------------------------------------- 1 | name: OWASP scanner 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | target: 7 | required: true 8 | type: string 9 | 10 | jobs: 11 | zap_scan: 12 | name: OWASP Zap 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: OWASP Zap 16 | uses: zaproxy/action-full-scan@v0.4.0 17 | with: 18 | target: ${{ inputs.target }} 19 | cmd_options: '-a' 20 | allow_issue_writing: false 21 | -------------------------------------------------------------------------------- /branch-suggestion/vitest.config.js: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: 'node', 7 | coverage: { 8 | provider: 'v8', 9 | reporter: ['text', 'json', 'html'], 10 | exclude: [ 11 | 'node_modules/', 12 | '**/*.test.js', 13 | '**/__tests__/**', 14 | 'vitest.config.js' 15 | ], 16 | include: ['scripts/**/*.js'] 17 | } 18 | } 19 | }); 20 | -------------------------------------------------------------------------------- /.github/workflows/example-usage.yml.template: -------------------------------------------------------------------------------- 1 | # Branch Suggestion Workflow 2 | # Documentation: https://github.com/TykTechnologies/github-actions/tree/main/branch-suggestion 3 | 4 | name: PR Branch Suggestions 5 | 6 | on: 7 | pull_request: 8 | types: [opened, synchronize, reopened] 9 | 10 | permissions: 11 | pull-requests: write 12 | contents: read 13 | 14 | jobs: 15 | branch-suggestions: 16 | uses: TykTechnologies/REFINE/.github/workflows/branch-suggestion.yml@main 17 | secrets: 18 | JIRA_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 19 | -------------------------------------------------------------------------------- /docs/workflows/nancy.md: -------------------------------------------------------------------------------- 1 | ## Nancy Scan 2 | 3 | OSS scanner which helps find CVEs in Go dependencies 4 | 5 | Example usage: 6 | 7 | ```yaml 8 | jobs: 9 | nancy: 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | package: 14 | - controller 15 | - dashboard 16 | - billing 17 | - monitor 18 | - pkg 19 | 20 | uses: TykTechnologies/github-actions/.github/workflows/nancy.yaml@main 21 | with: 22 | dir: ${{ matrix.package }} 23 | secrets: inherit 24 | ``` 25 | -------------------------------------------------------------------------------- /docs/workflows/ci-lint.md: -------------------------------------------------------------------------------- 1 | ## CI lint 2 | 3 | In order to ensure some standard of quality, a lint action is being run 4 | that checks for syntax issues, yaml issues and validates github actions 5 | in the repository. It's not complete or fully accurate by any measure, 6 | but it enforces conventions for the work being added in PRs. 7 | 8 | It's generally incomplete, but extensions are welcome. 9 | 10 | The action regenerates `README.md` from the docs/ folder contents. 11 | 12 | To invoke the linter locally, use `task lint`. 13 | 14 | Adoption: Internal use for PR workflows on the repository. 15 | -------------------------------------------------------------------------------- /docker/tools/Taskfile.yml: -------------------------------------------------------------------------------- 1 | # yamllint disable rule:line-length 2 | --- 3 | version: "3" 4 | 5 | vars: 6 | image: internal/ci-tools 7 | platform: '{{.BUILD_PLATFORM | default "linux/amd64"}}' 8 | 9 | env: 10 | DOCKER_BUILDKIT: 1 11 | BUILDX_EXPERIMENTAL: 1 12 | 13 | tasks: 14 | default: 15 | desc: "Build docker images" 16 | vars: 17 | tags: latest 18 | args: --rm --platform {{.platform}} --progress=plain --no-cache --pull 19 | cmds: 20 | - for: 21 | var: tags 22 | as: tag 23 | cmd: docker build {{.args}} -t {{.image}}:{{.tag}} -f {{.tag}}/Dockerfile {{.tag}}/ 24 | -------------------------------------------------------------------------------- /Taskfile.yml: -------------------------------------------------------------------------------- 1 | # yamllint disable rule:line-length 2 | --- 3 | version: "3" 4 | 5 | tasks: 6 | default: 7 | desc: Lint actions 8 | aliases: ["lint"] 9 | cmds: 10 | - yamllint . 11 | - task: gen 12 | 13 | gen: 14 | desc: "Generate jobs (all)" 15 | deps: [gen:readme] 16 | 17 | gen:readme: 18 | desc: "Generate README.md" 19 | cmds: 20 | - echo "# Re-usable github actions" > README.md 21 | - echo >> README.md 22 | - echo "Collection of shared github actions and workflows which are used in our org." >> README.md 23 | - echo >> README.md 24 | - cd docs && task markdown >> ../README.md 25 | -------------------------------------------------------------------------------- /.github/actions/tests/checkout-tyk-pro/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Checkout test code' 2 | description: 'On workflow job failure, fetch and send relevant logs with branch information to an external API' 3 | author: 'konrad' 4 | inputs: 5 | org_gh_token: 6 | description: 'GitHub token for API access' 7 | required: true 8 | runs: 9 | using: "composite" 10 | steps: 11 | - name: fetch env from tyk-pro 12 | shell: bash 13 | env: 14 | GH_TOKEN: ${{ inputs.org_gh_token }} 15 | run: | 16 | gh release download --repo github.com/TykTechnologies/tyk-pro --archive tar.gz -O env.tgz 17 | mkdir auto && tar --strip-components=1 -C auto -xzvf env.tgz 18 | -------------------------------------------------------------------------------- /.github/workflows/jira-lint.yaml: -------------------------------------------------------------------------------- 1 | name: JIRA linter 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | JIRA_TOKEN: 7 | required: true 8 | ORG_GH_TOKEN: 9 | required: true 10 | 11 | jobs: 12 | jira-lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: cyrus-za/jira-lint@master 16 | name: jira-lint 17 | with: 18 | github-token: ${{ secrets.ORG_GH_TOKEN }} 19 | jira-token: ${{ secrets.JIRA_TOKEN }} 20 | jira-base-url: https://tyktech.atlassian.net/ 21 | skip-branches: '^(release-[0-9.-]+(lts)?|master|main)$' 22 | validate_issue_status: true 23 | allowed_issue_statuses: "In Dev,In Code Review,Ready for Testing,In Test,In Progress,In Review" 24 | -------------------------------------------------------------------------------- /.github/workflows/visor.yaml: -------------------------------------------------------------------------------- 1 | name: Visor 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize] 6 | issues: 7 | types: [opened] 8 | issue_comment: 9 | types: [created] 10 | 11 | permissions: 12 | contents: read 13 | pull-requests: write 14 | issues: write 15 | checks: write 16 | 17 | jobs: 18 | visor: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout code 22 | uses: actions/checkout@v4 23 | - uses: probelabs/visor@main 24 | with: 25 | app-id: ${{ secrets.PROBE_APP_ID }} 26 | private-key: ${{ secrets.PROBE_APP_PRIVATE_KEY }} 27 | installation-id: ${{ secrets.PROBE_APP_INSTALLATION_ID }} 28 | env: 29 | GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} 30 | -------------------------------------------------------------------------------- /docs/actions/checkout-pr.md: -------------------------------------------------------------------------------- 1 | ## PR Checkout 2 | 3 | The checkout PR action will fetch only the commits that belong to the PR. 4 | This is required for various code analysis tooling, including sonarcloud. 5 | 6 | Example usage: 7 | 8 | ```yaml 9 | jobs: 10 | golangci-lint: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout PR 14 | uses: TykTechnologies/github-actions/.github/actions/checkout-pr@main 15 | ``` 16 | 17 | The main use case behind this is to make sure the HEAD and the current PR 18 | state can be compared, and that we don't fetch the full git history for 19 | the checkout. This supports some of our custom actions like `godoc`. 20 | 21 | Supports: godoc, sonarcloud, dashboard (bindata size). 22 | 23 | Adoption: gateway, dashboard, reuse in shared CI workflows. 24 | -------------------------------------------------------------------------------- /.github/workflows/semgrep.yaml: -------------------------------------------------------------------------------- 1 | name: "Semgrep scan" 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | config: 7 | default: "auto" 8 | type: string 9 | 10 | jobs: 11 | semgrep: 12 | name: Semgrep 13 | runs-on: ubuntu-20.04 14 | 15 | outputs: 16 | scan: ${{ steps.scan.outputs.test }} 17 | 18 | container: 19 | image: returntocorp/semgrep 20 | 21 | steps: 22 | - name: Check out code into the Go module directory 23 | uses: actions/checkout@v3 24 | 25 | - name: Semgrep scan 26 | id: scan 27 | run: | 28 | semgrep --config ${{ inputs.config }} --output semgrep-report.txt --error --exclude=docs 29 | 30 | - name: Archive Semgrep report 31 | if: ${{ always() }} 32 | uses: actions/upload-artifact@v3 33 | with: 34 | name: semgrep-report.txt 35 | path: semgrep-report.txt 36 | -------------------------------------------------------------------------------- /.github/workflows/ci-lint.yml: -------------------------------------------------------------------------------- 1 | # yamllint disable rule:line-length 2 | --- 3 | name: CI lint 4 | 5 | # This workflow runs the taskfile in the root to ensure 6 | # that the changes added to the repository do not trigger 7 | # linter errors. This covers yaml files and github actions. 8 | 9 | on: # yamllint disable-line rule:truthy 10 | pull_request: 11 | 12 | # Make sure to cancel previous job runs in case a PR 13 | # gets new commits. Changes being merged to the main 14 | # branch will continue to run. 15 | 16 | concurrency: 17 | group: ${{ github.head_ref || github.run_id }}-task-lint 18 | cancel-in-progress: true 19 | 20 | # Set the default install path for `go install`. 21 | 22 | env: 23 | GOBIN: /usr/local/bin 24 | 25 | jobs: 26 | tools-latest: 27 | name: 'Lint tyk-github-actions repo' 28 | runs-on: ubuntu-latest 29 | 30 | steps: 31 | - uses: actions/checkout@v3 32 | - uses: arduino/setup-task@v1 33 | with: 34 | version: 3 35 | 36 | - name: 'Run task lint' 37 | run: task lint 38 | -------------------------------------------------------------------------------- /branch-suggestion/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "release-manager", 3 | "version": "1.0.0", 4 | "type": "module", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node index.js", 8 | "test": "vitest run", 9 | "test:watch": "vitest", 10 | "test:coverage": "vitest run --coverage", 11 | "fetch:jira": "node scripts/jira/batch-fetch.js", 12 | "fetch:zendesk": "node scripts/zendesk/zendesk-api.js", 13 | "fetch:all": "node scripts/common/unified-fetch.js", 14 | "analyze": "node scripts/common/analyze-ticket.js" 15 | }, 16 | "keywords": [], 17 | "author": "", 18 | "license": "ISC", 19 | "description": "", 20 | "dependencies": { 21 | "@anthropic-ai/claude-code": "^1.0.77", 22 | "@anthropic-ai/sdk": "^0.60.0", 23 | "@modelcontextprotocol/sdk": "^1.17.4", 24 | "commander": "^14.0.0", 25 | "dotenv": "^17.2.1", 26 | "mcp-remote": "^0.1.18", 27 | "turndown": "^7.2.1" 28 | }, 29 | "devDependencies": { 30 | "@vitest/coverage-v8": "^3.2.4", 31 | "vitest": "^3.2.4" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /.github/actions/gh-logs-analyser/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Upload Failed Job Logs' 2 | description: 'On workflow job failure, fetch and send relevant logs with branch information to an external API' 3 | author: 'Your Name' 4 | inputs: 5 | run_id: 6 | description: 'Workflow run ID to fetch logs for (defaults to current run if not set)' 7 | required: false 8 | github_token: 9 | description: 'GitHub token for API access' 10 | required: true 11 | gh_logs_analyser_token: 12 | description: 'Authentication token for the gh-logs-analyser API' 13 | required: true 14 | runs: 15 | using: "composite" 16 | steps: 17 | - name: Fetch and send failed logs 18 | id: send_logs 19 | shell: bash 20 | run: bash ${{ github.action_path }}/log-collector.sh 21 | env: 22 | GITHUB_TOKEN: ${{ inputs.github_token }} 23 | GH_LOGS_ANALYSER: ${{ inputs.gh_logs_analyser_token }} 24 | TARGET_RUN_ID: ${{ inputs.run_id || github.run_id }} 25 | TARGET_REPO: ${{ github.repository }} 26 | GITHUB_REF: ${{ github.ref }} 27 | GITHUB_BASE_REF: ${{ github.base_ref }} 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Tyk Technologies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/ci-test.yml: -------------------------------------------------------------------------------- 1 | name: CI Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - 'release-*' 8 | pull_request: 9 | paths: 10 | - 'branch-suggestion/**' 11 | - '.github/workflows/ci-test.yml' 12 | 13 | defaults: 14 | run: 15 | working-directory: branch-suggestion 16 | 17 | jobs: 18 | test: 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v4 24 | 25 | - name: Setup Node.js 26 | uses: actions/setup-node@v4 27 | with: 28 | node-version: '20' 29 | cache: 'npm' 30 | cache-dependency-path: branch-suggestion/package-lock.json 31 | 32 | - name: Install dependencies 33 | run: npm ci 34 | 35 | - name: Run tests 36 | run: npm test 37 | 38 | - name: Generate coverage report 39 | run: npm run test:coverage 40 | 41 | - name: Upload coverage reports 42 | uses: codecov/codecov-action@v4 43 | if: always() 44 | with: 45 | files: ./branch-suggestion/coverage/coverage-final.json 46 | flags: branch-suggestion 47 | -------------------------------------------------------------------------------- /.github/actions/checkout-pr/action.yml: -------------------------------------------------------------------------------- 1 | # yamllint disable rule:line-length 2 | --- 3 | name: 'Checkout PR' 4 | 5 | description: >- 6 | Checkout the exact number of PR commits + 1 (base_ref). 7 | 8 | inputs: 9 | jobname: 10 | default: ${{ github.job }} 11 | token: 12 | description: 'A Github PAT' 13 | 14 | runs: 15 | using: "composite" 16 | steps: 17 | - name: 'PR commits + 1' 18 | shell: bash 19 | run: echo "PR_FETCH_DEPTH=$(( ${{ github.event.pull_request.commits || 1 }} + 1 ))" >> "${GITHUB_ENV}" 20 | 21 | - name: 'Checkout PR branch and all PR commits' 22 | uses: actions/checkout@v4 23 | with: 24 | ref: ${{ github.event.pull_request.head.ref }} 25 | token: ${{ inputs.token }} 26 | fetch-depth: ${{ env.PR_FETCH_DEPTH }} 27 | submodules: true 28 | lfs: true 29 | 30 | - name: 'Fetch the other branch with enough history for a common merge-base commit' 31 | shell: bash 32 | run: | 33 | git fetch origin ${{ github.event.pull_request.base.sha || github.event.before }} 34 | 35 | - name: 'Print git position' 36 | shell: bash 37 | run: | 38 | git log -n ${{ env.PR_FETCH_DEPTH }} --oneline 39 | -------------------------------------------------------------------------------- /.github/workflows/create-update-comment.yaml: -------------------------------------------------------------------------------- 1 | name: Create or update a GitHub comment 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | comment-author: 7 | type: string 8 | default: "" 9 | description: "The author of the comment to find." 10 | body-includes: 11 | type: string 12 | default: "" 13 | description: "The text to search for in the comment body." 14 | body: 15 | required: true 16 | type: string 17 | description: "The text to add to the comment body." 18 | 19 | jobs: 20 | create-update-comment: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Find Comment 24 | uses: peter-evans/find-comment@v2 25 | id: fc 26 | with: 27 | issue-number: ${{ github.event.pull_request.number }} 28 | comment-author: ${{ inputs.comment-author }} 29 | body-includes: ${{ inputs.body-includes }} 30 | 31 | - name: Create or update comment 32 | uses: peter-evans/create-or-update-comment@v3 33 | with: 34 | comment-id: ${{ steps.fc.outputs.comment-id }} 35 | issue-number: ${{ github.event.pull_request.number }} 36 | body: ${{ inputs.body }} 37 | edit-mode: replace 38 | -------------------------------------------------------------------------------- /docs/workflows/semgrep.md: -------------------------------------------------------------------------------- 1 | ## Semgrep 2 | 3 | CodeQL like OSS linter 4 | 5 | Example usage: 6 | 7 | ```yaml 8 | jobs: 9 | semgrep: 10 | uses: TykTechnologies/github-actions/.github/workflows/semgrep.yaml@main 11 | ``` 12 | 13 | Usage: unknown; Status: a bit out of date. 14 | 15 | Recent images use `semgrep/semgrep`, while this workflow still uses 16 | `returntocorp/semgrep`. Looks to be compatible at time of writing. 17 | 18 | If you'd like to use semgrep: 19 | 20 | - reach out to @titpetric if you need working-user assistance, 21 | - https://github.com/TykTechnologies/exp/tree/main/lsc 22 | - https://github.com/TykTechnologies/exp/actions/workflows/semgrep.yml 23 | 24 | The current state allows to automate refactorings with semgrep, by using 25 | github actions automation to open up PR's against target repositories. 26 | 27 | Example outputs: 28 | 29 | - https://github.com/TykTechnologies/tyk/pull/6380 30 | - https://github.com/TykTechnologies/tyk-analytics/pull/4051 31 | 32 | We experience several problems where semgrep could be used more extensively: 33 | 34 | - code cleanups to enforce consistent style 35 | - large scale refactorings 36 | - ensuring code style compliance with new contributions 37 | - detecting bugs based on our own rules/bugs occuring 38 | -------------------------------------------------------------------------------- /.github/workflows/nancy.yaml: -------------------------------------------------------------------------------- 1 | name: Nancy Scan 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | dir: 7 | required: false 8 | type: string 9 | secrets: 10 | ORG_GH_TOKEN: 11 | required: false 12 | 13 | jobs: 14 | nancy_scan: 15 | name: Sonatype Nexus 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Check out code into the Go module directory 20 | uses: actions/checkout@v3 21 | 22 | - name: Set up Go 1.x in order to write go.list file 23 | uses: actions/setup-go@v3 24 | with: 25 | go-version: 1.17 26 | 27 | - name: Configure git access for Tyk's private Go modules 28 | env: 29 | GITHUB_PAT: ${{ secrets.ORG_GH_TOKEN }} 30 | run: | 31 | echo "https://$GITHUB_PAT:x-oauth-basic@github.com" >> ~/.git-credentials 32 | git config --global credential.helper store 33 | git config --global url."https://$GITHUB_PAT:x-oauth-basic@github.com".insteadOf "https://github.com" 34 | - name: Write Go List 35 | run: go list -json -m all > go.list 36 | working-directory: ./${{ inputs.dir }} 37 | 38 | - name: Nancy scan 39 | uses: sonatype-nexus-community/nancy-github-action@main 40 | with: 41 | goListFile: ${{ inputs.dir }}/go.list 42 | -------------------------------------------------------------------------------- /.github/actions/tests/ui-tests/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Run UI Tests' 2 | description: 'Sets up Node.js environment and executes UI tests with Playwright using specified markers' 3 | author: 'konrad' 4 | inputs: 5 | ui_markers: 6 | description: 'Markers to filter tests' 7 | required: false 8 | default: ${{ matrix.envfiles.uimarkers }} 9 | db_type: 10 | description: 'Database used in the environment' 11 | required: false 12 | default: ${{ matrix.envfiles.db }} 13 | runs: 14 | using: "composite" 15 | steps: 16 | - name: Install Node.js 18.16 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: "18.16" 20 | cache-dependency-path: tyk-analytics/tests/ui 21 | cache: 'npm' 22 | - name: Execute UI tests 23 | working-directory: tyk-analytics/tests/ui 24 | id: test_execution 25 | shell: bash 26 | env: 27 | GW_URL: 'https://localhost:8080/' 28 | NODE_TLS_REJECT_UNAUTHORIZED: 0 29 | UI_MARKERS: ${{ inputs.ui_markers && format('--grep {0}', inputs.ui_markers ) || '' }} 30 | TYK_TEST_DB_TYPE: ${{ inputs.db_type }} 31 | run: | 32 | npm ci 33 | npx playwright install --with-deps chromium 34 | PLAYWRIGHT_JUNIT_OUTPUT_NAME=${XUNIT_REPORT_PATH} npx playwright test --project=chromium --reporter=junit,html $UI_MARKERS 35 | -------------------------------------------------------------------------------- /.github/actions/tests/ui-tests-report/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Upload UI Test Report to S3' 2 | description: 'Uploads Playwright UI test reports to S3 and shares the report link in GitHub step summary' 3 | author: 'konrad' 4 | inputs: 5 | aws_acces_key_id: 6 | description: 'AWS access key ID' 7 | required: true 8 | aws_secret_access_key: 9 | description: 'AWS secret access key' 10 | required: true 11 | matrix: 12 | description: 'Matrix for the test' 13 | required: false 14 | runs: 15 | using: "composite" 16 | steps: 17 | - name: Upload Playwright Test Report to S3 18 | shell: bash 19 | run: 20 | npm run upload_report_to_s3 21 | env: 22 | AWS_ACCESS_KEY_ID: ${{ inputs.aws_acces_key_id }} 23 | AWS_SECRET_ACCESS_KEY: ${{ inputs.aws_secret_access_key }} 24 | RUN_ID: '${{ github.event.repository.name }}/${{ github.run_id }}' 25 | working-directory: tyk-analytics/tests/ui 26 | 27 | - name: Share S3 report link into summary 28 | shell: bash 29 | run: | 30 | echo "# :clipboard: S3 UI Test REPORT" >> $GITHUB_STEP_SUMMARY 31 | echo "- Status: :no_entry_sign:" >> $GITHUB_STEP_SUMMARY 32 | echo "- [Link to report](https://tyk-qa-reports.s3.eu-central-1.amazonaws.com/${{ github.event.repository.name }}/${{ github.run_id }}/index.html)" >> $GITHUB_STEP_SUMMARY 33 | -------------------------------------------------------------------------------- /.github/workflows/sonarcloud.yaml: -------------------------------------------------------------------------------- 1 | name: Sonarcloud 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | exclusions: 7 | required: false 8 | type: string 9 | secrets: 10 | SONAR_TOKEN: 11 | required: true 12 | GH_TOKEN: 13 | required: true 14 | 15 | jobs: 16 | sonarcloud: 17 | name: Sonarcloud 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v4 21 | with: 22 | fetch-depth: 0 23 | - uses: actions/download-artifact@v4 24 | with: 25 | name: coverage 26 | - uses: actions/download-artifact@v4 27 | with: 28 | name: golangci-report 29 | - name: SonarCloud Scan 30 | uses: sonarsource/sonarcloud-github-action@master 31 | with: 32 | args: > 33 | -Dsonar.organization=tyktechnologies 34 | -Dsonar.projectKey=TykTechnologies_${{ github.event.repository.name }} 35 | -Dsonar.sources=. 36 | -Dsonar.exclusions=${{ github.event.inputs.exclusions }} 37 | -Dsonar.coverage.exclusions=**/*_test.go 38 | -Dsonar.test.inclusions=**/*_test.go 39 | -Dsonar.tests=. 40 | -Dsonar.go.coverage.reportPaths=*.cov 41 | -Dsonar.go.golangci-lint.reportPaths=golangci_lint.xml 42 | env: 43 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} 44 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 45 | -------------------------------------------------------------------------------- /sbom/merge.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | # Get the filenames from command line arguments 5 | filenames = sys.argv[1:] 6 | 7 | base_sbom = {} 8 | 9 | # Merge everything into the first provided sbom 10 | for idx, filename in enumerate(filenames): 11 | with open(filename, 'r') as file: 12 | sbom = json.load(file) 13 | 14 | if idx == 0: 15 | base_sbom = sbom 16 | else: 17 | appRef = sbom["metadata"]["component"]["bom-ref"] 18 | baseAppRef = base_sbom["metadata"]["component"]["bom-ref"] 19 | 20 | for didx, dep in enumerate(base_sbom["dependencies"]): 21 | if dep["ref"] == baseAppRef: 22 | dep["dependsOn"].append(appRef) 23 | 24 | base_sbom["components"].append(sbom["metadata"]["component"]) 25 | base_sbom["components"] += sbom["components"] 26 | base_sbom["dependencies"] += sbom["dependencies"] 27 | 28 | # Set groups 29 | for idx, cp in enumerate(base_sbom["components"]): 30 | if "purl" in cp: 31 | if cp["purl"].startswith("pkg:golang"): 32 | cp["group"] = "gomod" 33 | 34 | if cp["purl"].startswith("pkg:npm"): 35 | cp["group"] = "npm" 36 | 37 | if cp["purl"].startswith("pkg:deb"): 38 | cp["group"] = "deb" 39 | 40 | if "type" in cp and cp["type"] == "application": 41 | cp["group"] = "application" 42 | 43 | base_sbom["components"][idx] = cp 44 | 45 | print(json.dumps(base_sbom, indent=4)) 46 | -------------------------------------------------------------------------------- /sbom/gen_dep.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | import uuid 4 | import datetime 5 | import yaml 6 | 7 | # Get the filenames from command line arguments 8 | filenames = sys.argv[1:] 9 | 10 | baseRef = str(uuid.uuid4()) 11 | sbom = { 12 | "bomFormat": "CycloneDX", 13 | "specVersion": "1.4", 14 | "version": 1, 15 | "metadata": { 16 | "component": { 17 | "bom-ref": baseRef, 18 | "type": "file", 19 | "name": filenames[0], 20 | } 21 | }, 22 | "components":[], 23 | "dependencies":[] 24 | } 25 | 26 | sbom["serialNumber"] = "urn:uuid:" + str(uuid.uuid4()) 27 | sbom["metadata"]["timestamp"] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00") 28 | 29 | depRefs = [] 30 | 31 | try: 32 | with open(filenames[0], 'r') as file: 33 | spec = yaml.safe_load(file) 34 | for dep in spec["spec"]["dependsOnExt"]: 35 | for v in dep["versions"]: 36 | depUUID = str(uuid.uuid4()) 37 | depRefs.append(depUUID) 38 | sbom["components"].append({ 39 | "bom-ref": depUUID, 40 | "type": "application", 41 | "name": (dep["name"] + " " + str(v)), 42 | "version": str(v), 43 | "cpe": (dep["cpe"] + ":" + str(v)) 44 | }) 45 | except: 46 | pass 47 | 48 | sbom["dependencies"] = [ 49 | { 50 | "ref": baseRef, 51 | "dependsOn": depRefs 52 | } 53 | ] 54 | 55 | print(json.dumps(sbom, indent=4)) 56 | -------------------------------------------------------------------------------- /.github/workflows/branch-suggestion.yml: -------------------------------------------------------------------------------- 1 | name: Branch Suggestion for PRs 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened, ready_for_review] 6 | workflow_call: 7 | secrets: 8 | JIRA_TOKEN: 9 | required: true 10 | description: 'Pre-encoded JIRA token (base64(email:api_token))' 11 | 12 | permissions: 13 | pull-requests: write 14 | contents: read 15 | 16 | jobs: 17 | suggest-branches: 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - name: Checkout github-actions repository 22 | uses: actions/checkout@v4 23 | with: 24 | repository: TykTechnologies/github-actions 25 | ref: main 26 | 27 | - name: Setup Node.js 28 | uses: actions/setup-node@v4 29 | with: 30 | node-version: '20' 31 | 32 | - name: Install dependencies 33 | working-directory: branch-suggestion 34 | run: npm install 35 | 36 | - name: Install Visor 37 | run: npm install -g @probelabs/visor 38 | 39 | - name: Analyze PR and suggest branches 40 | working-directory: branch-suggestion 41 | env: 42 | JIRA_TOKEN: ${{ secrets.JIRA_TOKEN }} 43 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 44 | PR_TITLE: ${{ github.event.pull_request.title }} 45 | PR_NUMBER: ${{ github.event.pull_request.number }} 46 | REPOSITORY: ${{ github.repository }} 47 | BRANCH_NAME: ${{ github.head_ref }} 48 | run: | 49 | visor --config branch_suggestion.yml --debug --tags remote 50 | -------------------------------------------------------------------------------- /.github/actions/latest-versions/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | repos=("tyk" "tyk-analytics" "tyk-pump" "tyk-sink") 5 | actions=("pull_request" "push") 6 | 7 | # Set branches for each action 8 | branches_pull_request=("master" "release-5-lts" "release-4-lts" "release-2.3") 9 | branches_push=("ref/head/master" "ref/head/release-5-lts" "ref/head/release-4-lts" "ref/head/release-4.2") 10 | 11 | function process_combination { 12 | local repo=$1 13 | local action=$2 14 | local ref=$3 15 | local base_ref=$4 16 | 17 | if [[ -z "$ref" ]]; then 18 | branch="$base_ref" 19 | else 20 | branch="$ref" 21 | fi 22 | 23 | # Your logic here 24 | echo -e "### Processing combination: Repo=$repo, Action=$action, Branch=$branch ### \n" 25 | # Modify this line according to your needs 26 | bash get_tags.sh "$repo" "$action" "$ref" "$base_ref" 'my_build_sha' 27 | echo -e "---------------------------------------------------------------------\n" 28 | } 29 | 30 | for repo in "${repos[@]}"; do 31 | for action in "${actions[@]}"; do 32 | if [[ "$action" == "pull_request" ]]; then 33 | base_refs=("${branches_pull_request[@]}") 34 | elif [[ "$action" == "push" ]]; then 35 | refs=("${branches_push[@]}") 36 | else 37 | echo "Unknown action: $action" 38 | exit 1 39 | fi 40 | 41 | for base_ref in "${base_refs[@]}"; do 42 | for ref in "${refs[@]}"; do 43 | process_combination "$repo" "$action" "$ref" "$base_ref" 44 | done 45 | done 46 | done 47 | done 48 | -------------------------------------------------------------------------------- /.github/actions/latest-versions/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Calculate tests tags' 2 | 3 | description: >- 4 | Calculates corresponding CI image tags based on github events for a group of tyk repositories 5 | 6 | inputs: 7 | repo: 8 | description: Name of the repository where the workflow was triggered from 9 | github_event: 10 | description: Github event passed from main workflow 11 | github_ref: 12 | description: Github reference passed from main workflow 13 | github_base_ref: 14 | description: Github reference passed from main workflow 15 | build_sha: 16 | description: Github sha for referencing the build tag for ci image 17 | 18 | outputs: 19 | tyk: 20 | description: "Tyk gateway ci image tag" 21 | value: ${{ steps.get-version.outputs.tyk }} 22 | tyk-analytics: 23 | description: "Tyk analytics ci image version" 24 | value: ${{ steps.get-version.outputs.tyk_analytics }} 25 | tyk-pump: 26 | description: "Tyk pump ci image version" 27 | value: ${{ steps.get-version.outputs.tyk_pump }} 28 | tyk-sink: 29 | description: "Tyk sink ci image version" 30 | value: ${{ steps.get-version.outputs.tyk_sink }} 31 | tyk-automated-tests: 32 | description: "Tyk automated tests ci image version" 33 | value: ${{ steps.get-version.outputs.tyk_automated_tests }} 34 | 35 | runs: 36 | using: "composite" 37 | steps: 38 | - run: echo "${{ github.action_path }}" >> $GITHUB_PATH 39 | shell: bash 40 | 41 | - name: Calculate versions 42 | id: get-version 43 | shell: bash 44 | run: get_tags.sh ${{ inputs.repo }} ${{ inputs.github_event }} ${{ inputs.github_ref }} ${{ inputs.github_base_ref }} ${{ inputs.build_sha }} 45 | -------------------------------------------------------------------------------- /docs/workflows/ci-docker-tools.md: -------------------------------------------------------------------------------- 1 | ## CI tooling 2 | 3 | We build a docker image from the CI pipeline in this repository that 4 | builds and installs all the CI tooling needed for the test pipelines. 5 | 6 | Providing the docker image avoids continous compilation of the tools from 7 | using `go install` or `go get`, decreasing resource usage on GitHub 8 | actions. 9 | 10 | All the tools are built using a recent go version and `CGO_ENABLED=0`, 11 | enabling reuse for old releases. It's still possible to version the 12 | tooling against releases either inside the image, or by creating new 13 | versions of the docker image in the future. 14 | 15 | The images built are: 16 | 17 | - `tykio/ci-tools:latest`. 18 | 19 | The image is rebuilt weekly and on triggers from `exp/cmd`. 20 | 21 | To use the CI tools from any github pipeline: 22 | 23 | ```yaml 24 | - name: 'Extract tykio/ci-tools:${{ matrix.tag }}' 25 | uses: shrink/actions-docker-extract@v3 26 | with: 27 | image: tykio/ci-tools:${{ matrix.tag }} 28 | path: /usr/local/bin/. 29 | destination: /usr/local/bin 30 | 31 | - run: gotestsum --version 32 | ``` 33 | 34 | The action 35 | [shrink/actions-docker-extract](https://github.com/shrink/actions-docker-extract) 36 | is used to download and extract the CI tools binaries into your CI 37 | workflow. The set of tools being provided can be adjusted in 38 | [docker/tools/latest/Dockerfile](https://github.com/TykTechnologies/tyk-github-actions/blob/main/docker/tools/latest/Dockerfile). 39 | 40 | A local Taskfile is available in `docker/tools/` that allows you to build 41 | the tools image locally. Changes are tested in PRs. 42 | 43 | Adoption: Internal use for PR workflows on the repository. 44 | -------------------------------------------------------------------------------- /.github/actions/tests/test-controller/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Test Configuration Controller' 2 | description: 'Fetches test configuration parameters from an internal API based on variation, repository, and test type' 3 | author: 'konrad' 4 | inputs: 5 | variation: 6 | description: 'Variation of data configs, e.g. prod-variations' 7 | required: true 8 | base_ref: 9 | description: 'Base ref for the test' 10 | required: true 11 | test_type: 12 | description: 'Type of test, e.g. api or ui' 13 | required: true 14 | outputs: 15 | envfiles: 16 | description: 'Environment files for the test' 17 | value: ${{ steps.params.outputs.envfiles }} 18 | pump: 19 | description: 'Pump image for the test' 20 | value: ${{ steps.params.outputs.pump }} 21 | sink: 22 | description: 'Sink image for the test' 23 | value: ${{ steps.params.outputs.sink }} 24 | runs: 25 | using: "composite" 26 | steps: 27 | - name: fetch env from tyk-pro 28 | shell: bash 29 | id: params 30 | env: 31 | BASE_REF: ${{ inputs.base_ref }} 32 | VARIATION: ${{ inputs.variation }} 33 | REPO_NAME: ${{ github.event.repository.name }} 34 | TEST_TYPE: ${{ inputs.test_type }} 35 | TRIGGER: ${{ github.event_name }} 36 | run: | 37 | set -eo pipefail 38 | curl -s --retry 5 --retry-delay 10 --fail-with-body "http://tui.internal.dev.tyk.technology/v2/$VARIATION/$REPO_NAME/$BASE_REF/$TRIGGER/$TEST_TYPE.gho" | tee -a "$GITHUB_OUTPUT" 39 | if ! [[ $VARIATION =~ prod ]] ;then 40 | echo "::warning file=.github/workflows/release.yml,line=24,col=1,endColumn=8::Using non-prod variation" 41 | echo "### :warning: You are using VARIATION=${VARIATION} in test-controller-{{ .test }}" >> $GITHUB_STEP_SUMMARY 42 | fi 43 | -------------------------------------------------------------------------------- /.github/actions/tests/api-tests/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Run API Tests' 2 | description: 'Sets up Python environment and executes API tests with pytest using specified markers' 3 | author: 'konrad' 4 | inputs: 5 | user_api_secret: 6 | description: 'API secret of created user' 7 | required: true 8 | api_markers: 9 | description: 'Markers to filter tests' 10 | required: false 11 | default: ${{ matrix.envfiles.apimarkers }} 12 | runs: 13 | using: "composite" 14 | steps: 15 | - name: Set up Python 16 | uses: actions/setup-python@v5 17 | with: 18 | cache: 'pip' 19 | python-version: '3.10' 20 | - name: Execute API tests 21 | id: test_execution 22 | shell: bash 23 | working-directory: tyk-analytics/tests/api 24 | env: 25 | USER_API_SECRET: ${{ inputs.user_api_secret }} 26 | API_MARKERS: ${{ inputs.api_markers }} 27 | run: | 28 | pytest="pytest --ci --random-order --force-flaky --no-success-flaky-report --maxfail=3 --junitxml=${XUNIT_REPORT_PATH} --cache-clear --ignore=./tests/mdcb -v --log-cli-level=ERROR" 29 | pip install -r requirements.txt 30 | cat >pytest.env <<-EOF 31 | TYK_TEST_BASE_URL=http://localhost:3000/ 32 | TYK_TEST_GW_URL=https://localhost:8080/ 33 | TYK_TEST_GW_1_ALFA_URL=https://localhost:8181/ 34 | TYK_TEST_GW_1_BETA_URL=https://localhost:8182/ 35 | TYK_TEST_GW_2_ALFA_URL=https://localhost:8281/ 36 | TYK_TEST_GW_2_BETA_URL=https://localhost:8282/ 37 | TYK_TEST_MONGODB=localhost:27017 38 | TYK_TEST_REDIS=localhost 39 | TYK_TEST_DB_ADMIN=12345 40 | TYK_TEST_GW_SECRET=352d20ee67be67f6340b4c0605b044b7 41 | TYK_TEST_DB_NAME=tyk_analytics 42 | TYK_TEST_FEDERATION_HOST=federation 43 | TYK_TEST_GRAPHQL_FAKER_HOST=graphql-faker 44 | GATEWAY_CONTAINER_NAME=tyk 45 | EOF 46 | env $(cat pytest.env | xargs) $pytest -m "${{ inputs.api_markers }}" 47 | -------------------------------------------------------------------------------- /.github/actions/latest-versions/get_tags.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | # GITHUB_OUTPUT='/dev/tty' 5 | REPO_LIST=("tyk" "tyk-analytics" "tyk-pump" "tyk-sink" "tyk-automated-tests") 6 | 7 | current_repo=${1##*/} 8 | event=${2} 9 | ref=${3} 10 | base_ref=${4} 11 | commit_sha=${5} 12 | 13 | function get_latest_tag() { 14 | tyk_repo=${1} 15 | git -c 'versionsort.suffix=-' ls-remote --exit-code --refs --sort='version:refname' --tags https://github.com/TykTechnologies/${tyk_repo}.git '*.*.*' | egrep 'v[0-9]+.[0-9]+.[0-9]+$' | tail -1 | cut -d '/' -f3 16 | } 17 | 18 | function bulk_set_tag() { 19 | default_tag=${1} 20 | for repository in "${REPO_LIST[@]}";do 21 | repository=${repository//-/_} 22 | echo "$repository=$default_tag" >> $GITHUB_OUTPUT 23 | echo "#DEBUG# $repository=$default_tag" 24 | done 25 | } 26 | 27 | if [ $event == 'pull_request' ];then 28 | branch=${base_ref} 29 | else 30 | branch=${ref##*/} 31 | fi 32 | 33 | # release-x-case = branch 34 | # optional to only accept release-x-lts cases use ^release-[0-9]-lts$ 35 | if [ "$current_repo" == 'tyk' -o "$current_repo" == 'tyk-analytics' ] && [[ "$branch" =~ ^release- ]]; then 36 | echo "tyk=$branch" >> $GITHUB_OUTPUT 37 | echo "#DEBUG# tyk=$branch" 38 | echo "tyk_analytics=$branch" >> $GITHUB_OUTPUT 39 | echo "#DEBUG# tyk_analytics=$branch" 40 | echo "tyk_pump=$(get_latest_tag 'tyk-pump')" >> $GITHUB_OUTPUT 41 | echo "#DEBUG# tyk_pump=$(get_latest_tag 'tyk-pump')" 42 | echo "tyk_sink=$(get_latest_tag 'tyk-sink')" >> $GITHUB_OUTPUT 43 | echo "#DEBUG# tyk_sink=$(get_latest_tag 'tyk-sink')" 44 | if [[ "$branch" =~ ^release-[0-9]-lts$ ]];then 45 | echo "tyk_automated_tests=$branch" >> $GITHUB_OUTPUT 46 | echo "#DEBUG# tyk_automated_tests=$branch" 47 | else 48 | echo "tyk_automated_tests=master" >> $GITHUB_OUTPUT 49 | echo "#DEBUG# tyk_automated_tests=master" 50 | fi 51 | 52 | else #default to master case 53 | bulk_set_tag master 54 | fi 55 | 56 | # Override always with build_tag, does not contain ecr URL 57 | current_repo=${current_repo//-/_} 58 | echo "$current_repo=sha-$commit_sha" >> $GITHUB_OUTPUT 59 | echo "#DEBUG# $current_repo=sha-$commit_sha" 60 | 61 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/github/__tests__/add-pr-comment.test.js: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from 'vitest'; 2 | import { parseRepo, COMMENT_MARKER } from '../add-pr-comment.js'; 3 | 4 | describe('parseRepo', () => { 5 | it('should parse valid repo string', () => { 6 | const result = parseRepo('TykTechnologies/tyk'); 7 | expect(result).toEqual({ 8 | owner: 'TykTechnologies', 9 | repo: 'tyk' 10 | }); 11 | }); 12 | 13 | it('should parse repo with different owner', () => { 14 | const result = parseRepo('octocat/hello-world'); 15 | expect(result).toEqual({ 16 | owner: 'octocat', 17 | repo: 'hello-world' 18 | }); 19 | }); 20 | 21 | it('should handle repo names with hyphens', () => { 22 | const result = parseRepo('TykTechnologies/tyk-identity-broker'); 23 | expect(result).toEqual({ 24 | owner: 'TykTechnologies', 25 | repo: 'tyk-identity-broker' 26 | }); 27 | }); 28 | 29 | it('should handle repo names with underscores', () => { 30 | const result = parseRepo('owner/repo_name'); 31 | expect(result).toEqual({ 32 | owner: 'owner', 33 | repo: 'repo_name' 34 | }); 35 | }); 36 | 37 | it('should throw error for invalid format - no slash', () => { 38 | expect(() => parseRepo('invalid-repo')).toThrow('Repository must be in format "owner/repo"'); 39 | }); 40 | 41 | it('should throw error for invalid format - multiple slashes', () => { 42 | expect(() => parseRepo('owner/repo/extra')).toThrow('Repository must be in format "owner/repo"'); 43 | }); 44 | 45 | it('should throw error for empty string', () => { 46 | expect(() => parseRepo('')).toThrow('Repository must be in format "owner/repo"'); 47 | }); 48 | 49 | it('should handle slash at beginning - empty owner', () => { 50 | const result = parseRepo('/repo'); 51 | expect(result).toEqual({ 52 | owner: '', 53 | repo: 'repo' 54 | }); 55 | }); 56 | 57 | it('should handle slash at end - empty repo', () => { 58 | const result = parseRepo('owner/'); 59 | expect(result).toEqual({ 60 | owner: 'owner', 61 | repo: '' 62 | }); 63 | }); 64 | }); 65 | 66 | describe('COMMENT_MARKER', () => { 67 | it('should have correct marker value', () => { 68 | expect(COMMENT_MARKER).toBe(''); 69 | }); 70 | }); 71 | -------------------------------------------------------------------------------- /.github/workflows/ci-docker-tools.yml: -------------------------------------------------------------------------------- 1 | # yamllint disable rule:line-length 2 | --- 3 | name: CI tooling 4 | 5 | # This workflow builds the CI tooling from docker/tools/ 6 | # within this repository. It runs whenever a file inside 7 | # is modified, and on a weekly schedule to get updates. 8 | 9 | on: # yamllint disable-line rule:truthy 10 | workflow_dispatch: 11 | repository_dispatch: 12 | types: ['exp-cmd'] 13 | pull_request: 14 | paths: 15 | - 'docker/tools/**' 16 | - '.github/workflows/ci-docker-tools.yml' 17 | push: 18 | paths: 19 | - 'docker/tools/**' 20 | - '.github/workflows/ci-docker-tools.yml' 21 | branches: 22 | - 'main' 23 | schedule: 24 | - cron: '0 0 * * 1' # Run every Monday at 12:00 AM UTC 25 | 26 | # Make sure to cancel previous job runs in case a PR 27 | # gets new commits. Changes being merged to the main 28 | # branch will continue to run. 29 | 30 | concurrency: 31 | group: ${{ github.head_ref || github.run_id }}-docker-tools 32 | cancel-in-progress: true 33 | 34 | jobs: 35 | tools-latest: 36 | name: 'Build tykio/ci-tools:${{ matrix.tag }}' 37 | permissions: 38 | id-token: write 39 | strategy: 40 | fail-fast: false 41 | matrix: 42 | tag: 43 | - 'latest' 44 | 45 | runs-on: ubuntu-latest 46 | 47 | steps: 48 | - uses: actions/checkout@v3 49 | - uses: docker/setup-buildx-action@v2 50 | - uses: docker/login-action@v2 51 | with: 52 | username: ${{ secrets.DOCKER_USERNAME }} 53 | password: ${{ secrets.DOCKER_PASSWORD }} 54 | 55 | - name: 'Build tykio/ci-tools:${{ matrix.tag }}' 56 | uses: docker/build-push-action@v4 57 | with: 58 | push: ${{ github.ref_name == 'main' || github.event_name == 'workflow_dispatch' }} 59 | pull: true 60 | load: ${{ github.ref_name != 'main' && github.event_name != 'workflow_dispatch' }} 61 | no-cache: true 62 | context: docker/tools/${{ matrix.tag }} 63 | tags: tykio/ci-tools:${{ matrix.tag }} 64 | 65 | - run: docker image ls 66 | 67 | - name: 'Extract tykio/ci-tools:${{ matrix.tag }}' 68 | uses: shrink/actions-docker-extract@v3 69 | with: 70 | image: tykio/ci-tools:${{ matrix.tag }} 71 | path: /usr/local/bin/. 72 | destination: /usr/local/bin 73 | 74 | - run: gotestsum --version 75 | -------------------------------------------------------------------------------- /.github/workflows/gotest.yaml: -------------------------------------------------------------------------------- 1 | name: Go test 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | go: 7 | type: string 8 | default: "1.17.x" 9 | mongo: 10 | type: string 11 | redis: 12 | type: string 13 | test-options: 14 | type: string 15 | default: "-race -count=1 -failfast -v" 16 | 17 | jobs: 18 | gotest: 19 | name: Go Test 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/setup-go@v3 23 | with: 24 | go-version: ${{ inputs.go }} 25 | - uses: actions/checkout@v4 26 | 27 | - name: Start Redis 28 | if: ${{ inputs.redis != '' }} 29 | uses: supercharge/redis-github-action@1.2.0 30 | with: 31 | redis-version: '${{ inputs.redis }}' 32 | 33 | - name: Start MongoDB 34 | if: ${{ inputs.mongo != '' }} 35 | uses: supercharge/mongodb-github-action@1.2.0 36 | with: 37 | mongodb-version: '${{ inputs.mongo }}' 38 | 39 | - name: Cache 40 | uses: actions/cache@v3 41 | with: 42 | path: ~/go/pkg/mod 43 | key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} 44 | restore-keys: | 45 | ${{ runner.os }}-go- 46 | 47 | - name: Set up test tooling 48 | run: | 49 | go install gotest.tools/gotestsum@latest 50 | 51 | - name: Go Test 52 | id: test 53 | run: | 54 | PKGS="$(go list ./...)" 55 | OPTS="${{ inputs.test-options }}" 56 | 57 | for pkg in ${PKGS}; do 58 | tags="" 59 | if [[ ${pkg} == *"goplugin" ]]; then 60 | tags="--tags 'goplugin'" 61 | fi 62 | 63 | coveragefile=`echo "$pkg" | awk -F/ '{print $NF}'` 64 | 65 | echo go test ${OPTS} -json -timeout 15m -coverprofile=${coveragefile}.cov ${pkg} ${tags} 66 | 67 | gotestsum --junitfile ${coveragefile}.xml --raw-command go test ${OPTS} --json -timeout 15m -coverprofile=${coveragefile}.cov ${pkg} ${tags} 68 | done 69 | 70 | - uses: actions/upload-artifact@v4 71 | with: 72 | name: coverage 73 | path: "*cov" 74 | - uses: actions/upload-artifact@v4 75 | if: ${{ always() }} 76 | with: 77 | name: junit 78 | path: "*xml" 79 | - name: Github report view 80 | if: ${{ always() }} 81 | uses: phoenix-actions/test-reporting@v8 82 | with: 83 | name: Unit Test Results 84 | path: "*.xml" 85 | reporter: java-junit 86 | -------------------------------------------------------------------------------- /docker/tools/latest/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.25 AS tools-build 2 | 3 | ENV CGO_ENABLED=0 4 | ENV GOBIN=/usr/local/bin 5 | ENV GOPROXY=https://proxy.golang.org,direct 6 | WORKDIR /usr/local/bin 7 | 8 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/httpbin-logserver@main 9 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/workflow-lint@main 10 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/schema-gen@main 11 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/summary@main 12 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/modcheck@main 13 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/TykTechnologies/exp/cmd/go-fsck@main 14 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install go.uber.org/mock/mockgen@v0.6.0 15 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install golang.org/x/tools/cmd/goimports@latest 16 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest 17 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/fatih/faillint@latest 18 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install gotest.tools/gotestsum@latest 19 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/wadey/gocovmerge@latest 20 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/evilmartians/lefthook@latest 21 | RUN --mount=type=cache,mode=0755,target=/go/pkg/mod go install github.com/bwplotka/mdox@latest 22 | 23 | ARG VENOM_VERSION=v1.2.0 24 | RUN curl -L https://github.com/ovh/venom/releases/download/${VENOM_VERSION}/venom.linux-amd64 -o /usr/local/bin/venom && chmod +x /usr/local/bin/venom 25 | 26 | ARG TASK_VERSION=v3.27.1 27 | RUN curl -sSL https://github.com/go-task/task/releases/download/${TASK_VERSION}/task_linux_amd64.tar.gz | tar -zxv 28 | 29 | # Final tools image 30 | # 31 | # This uses the `scratch` as the image base. The image 32 | # doesn't need a base OS, as it's intended to be used 33 | # in CI pipelines that do. Example action: 34 | # 35 | # ``` 36 | # - uses: shrink/actions-docker-extract@v3 37 | # with: 38 | # image: 39 | # path: /usr/local/bin/. 40 | # destination: /usr/local/bin 41 | # ``` 42 | # 43 | # https://github.com/marketplace/actions/docker-extract 44 | 45 | FROM scratch 46 | COPY --from=tools-build /usr/local/bin/ /usr/local/bin/ 47 | COPY --from=busybox:latest /bin/ls /bin/ls 48 | 49 | ENTRYPOINT ["/bin/ls"] 50 | -------------------------------------------------------------------------------- /docker/tools/README.md: -------------------------------------------------------------------------------- 1 | # CI tools 2 | 3 | Providing the docker image with CI tooling avoids continous compilation 4 | of the tools from using `go install`, decreasing resource usage on GitHub 5 | actions. Adopters can extract single binaries for use. 6 | 7 | This uses Go 1.23 (or latest version) to build the CI tooling. 8 | 9 | ## Development 10 | 11 | We build a docker image from the CI pipeline in this repository that 12 | builds and installs all the CI tooling needed for the test pipelines. 13 | 14 | - Image rebuilds based on a schedule, 1x / week 15 | - Image rebuilds on changes from exp/cmd repository 16 | 17 | The experimental repository holds several tools that ensure static code 18 | analysis, or aid in automation tasks. 19 | 20 | All the tools are built using a recent go version and `CGO_ENABLED=0`, 21 | enabling reuse for old releases. It's still possible to version the 22 | tooling against releases either inside the image, or by creating new 23 | versions of the docker image in the future. 24 | 25 | ## Local testing 26 | 27 | Run `task` to build all local images. It will build: 28 | 29 | - `internal/ci-tools:latest` 30 | 31 | Inspect other taskfile targets with `task -l`. 32 | 33 | ## CI tools 34 | 35 | The images built are: 36 | 37 | - `tykio/ci-tools:latest`. 38 | 39 | The image is rebuilt weekly. 40 | 41 | To use the CI tools from any github pipeline: 42 | 43 | ```yaml 44 | - name: 'Extract CI tools' 45 | uses: shrink/actions-docker-extract@v3 46 | with: 47 | image: tykio/ci-tools:latest 48 | path: /usr/local/bin/. 49 | destination: /usr/local/bin 50 | 51 | - run: gotestsum --version 52 | ``` 53 | 54 | To use a single tool replace the `.` in the `path` value with the binary 55 | you want. This allows you to extract only what's used in the pipeline, 56 | for example, if you only need `gocovmerge`: 57 | 58 | ```yaml 59 | - name: 'Extract gocovmerge' 60 | uses: shrink/actions-docker-extract@v3 61 | with: 62 | image: tykio/ci-tools:latest 63 | path: /usr/local/bin/gocovmerge 64 | destination: /usr/local/bin 65 | 66 | - run: gotestsum --version 67 | ``` 68 | 69 | ## References 70 | 71 | - Uses [shrink/actions-docker-extract](https://github.com/shrink/actions-docker-extract) 72 | - Tools installed configured via [docker/tools/latest/Dockerfile](https://github.com/TykTechnologies/github-actions/blob/main/docker/tools/latest/Dockerfile#L8-L20) 73 | - [Used in Tyk Gateway](https://github.com/TykTechnologies/tyk/blob/master/.github/workflows/ci-tests.yml#L62) 74 | - [Used in Tyk Dashboard - golangci-lint](https://github.com/TykTechnologies/tyk-analytics/blob/master/.github/workflows/ci-tests.yml#L39) 75 | - [Used in Tyk Dashboard - goimports](https://github.com/TykTechnologies/tyk-analytics/blob/master/.github/workflows/ci-tests.yml#L142) 76 | -------------------------------------------------------------------------------- /.github/actions/tests/reporting/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Test Reporting and Logs' 2 | description: 'Generates metadata reports, uploads test reports to S3, and collects Docker logs when tests fail' 3 | author: 'konrad' 4 | inputs: 5 | report_xml: 6 | description: 'Should xunit report be uploaded' 7 | required: false 8 | default: 'true' 9 | execution_status: 10 | description: 'Should docker logs be uploaded' 11 | required: true 12 | matrix: 13 | description: 'Matrix for the test' 14 | required: false 15 | runs: 16 | using: "composite" 17 | steps: 18 | - name: Generate metadata and upload test reports 19 | shell: bash 20 | id: metadata_report 21 | if: always() 22 | env: 23 | REPORT_NAME: ${{ github.repository }}_${{ github.run_id }}_${{ github.run_attempt }}-${{steps.env_up.outputs.ts}} 24 | METADATA_REPORT_PATH: metadata.toml 25 | MATRIX: ${{ toJson(env.matrix) }} 26 | run: | 27 | # Generate metadata report 28 | set -eo pipefail 29 | echo "[metadata] 30 | repo = ${{ github.repository }} 31 | branch = ${{ github.ref }} 32 | commit = ${{ github.sha }} 33 | test_suite_version = $BASE_REF 34 | test_suite_name = ${{ github.job }} 35 | test_suite_run = ${{ github.run_id }}-${{ github.run_attempt }} 36 | db = ${{ matrix.envfiles.db }} 37 | conf = ${{ matrix.envfiles.config }} 38 | cache = ${{ matrix.envfiles.cache }} 39 | pump_compatibility = ${{ matrix.pump }} 40 | sink_compatibility = ${{ matrix.sink }} 41 | " | tee ${METADATA_REPORT_PATH} 42 | aws s3 cp ${XUNIT_REPORT_PATH} s3://assets.dev.tyk.technology/testreports/${REPORT_NAME#*/}.xml 43 | aws s3 cp ${METADATA_REPORT_PATH} s3://assets.dev.tyk.technology/testreports/${REPORT_NAME#*/}.metadata.toml 44 | - name: Docker logs for all components 45 | if: always() && inputs.execution_status != 'success' 46 | working-directory: auto 47 | shell: bash 48 | env: 49 | pull_policy: 'if_not_present' 50 | ECR: ${{ steps.ecr.outputs.registry }} 51 | run: | 52 | docker compose -p auto -f pro-ha.yml -f deps_pro-ha.yml -f ${{ matrix.envfiles.db }}.yml -f ${{ matrix.envfiles.cache }}.yml --env-file versions.env --profile all logs | sort > ${{ github.workspace }}/docker-compose.log 53 | echo "::group::DockerLogs" 54 | cat ${{ github.workspace }}/docker-compose.log 55 | echo "::endgroup::" 56 | - name: Upload compose logs 57 | uses: actions/upload-artifact@v4 58 | if: inputs.execution_status != 'success' 59 | with: 60 | name: docker-compose-logs-${{ github.job }}-${{ matrix.envfiles.db }}-${{ matrix.envfiles.config }}-${{ github.run_id }} 61 | path: ${{ github.workspace }}/docker-compose.log 62 | retention-days: 10 63 | overwrite: true 64 | -------------------------------------------------------------------------------- /.github/workflows/godoc.yml: -------------------------------------------------------------------------------- 1 | name: Print Go API changes 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | ORG_GH_TOKEN: 7 | required: true 8 | inputs: 9 | go-version: 10 | type: string 11 | default: "1.19" 12 | 13 | env: 14 | GOPRIVATE: github.com/TykTechnologies 15 | 16 | jobs: 17 | test: 18 | name: Detect Go API changes 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - name: Use GitHub Token 23 | env: 24 | TOKEN: ${{ secrets.ORG_GH_TOKEN }} 25 | run: > 26 | git config --global url."https://${TOKEN}@github.com".insteadOf "https://github.com" 27 | 28 | - name: Checkout repo 29 | uses: TykTechnologies/github-actions/.github/actions/checkout-pr@main 30 | with: 31 | token: ${{ secrets.ORG_GH_TOKEN }} 32 | 33 | - name: Checkout exp 34 | uses: actions/checkout@v3 35 | with: 36 | fetch-depth: 1 37 | repository: TykTechnologies/exp 38 | ref: main 39 | path: exp 40 | 41 | - name: Setup Go 42 | uses: actions/setup-go@v4 43 | with: 44 | go-version: ${{ inputs.go-version }} 45 | 46 | - name: Install Task 47 | uses: arduino/setup-task@v1 48 | with: 49 | version: 3 50 | 51 | - name: Set up comment author 52 | run: | 53 | git config --local user.email "github-actions[bot]@users.noreply.github.com" 54 | git config --local user.name "github-actions[bot]" 55 | 56 | - name: Collect API docs 57 | run: | 58 | cp ./exp/.taskfiles/godoc/Taskfile.yml ./Taskfile.godoc.yml 59 | 60 | go mod tidy 61 | task -t Taskfile.godoc.yml > current.txt 62 | 63 | git checkout -- . 64 | git fetch --depth=1 origin ${{ github.base_ref }} 65 | git checkout ${{ github.base_ref }} 66 | 67 | go mod tidy 68 | task -t Taskfile.godoc.yml > prev.txt 69 | 70 | - name: Diff API docs 71 | id: api-check 72 | run: | 73 | set +e 74 | diff -u prev.txt current.txt > changes.txt 75 | echo "diff-output<> $GITHUB_OUTPUT 76 | cat changes.txt >> $GITHUB_OUTPUT 77 | echo "EOF" >> $GITHUB_OUTPUT 78 | 79 | - name: Find Comment 80 | uses: peter-evans/find-comment@v2 81 | id: fc 82 | with: 83 | issue-number: ${{ github.event.pull_request.number }} 84 | comment-author: 'github-actions[bot]' 85 | body-includes: API Changes 86 | 87 | - name: Create or update comment 88 | uses: peter-evans/create-or-update-comment@v3 89 | with: 90 | comment-id: ${{ steps.fc.outputs.comment-id }} 91 | issue-number: ${{ github.event.pull_request.number }} 92 | body: | 93 | API Changes 94 | ```diff 95 | ${{ steps.api-check.outputs.diff-output || 'no api changes detected' }} 96 | ``` 97 | edit-mode: replace 98 | -------------------------------------------------------------------------------- /.github/actions/tests/choose-test-branch/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Checkout test code' 2 | description: 'On workflow job failure, fetch and send relevant logs with branch information to an external API' 3 | author: 'konrad' 4 | inputs: 5 | test_folder: 6 | description: 'Folder with tests: api or ui' 7 | required: true 8 | branch: 9 | description: 'Branch with code. If not provided it will be taken from the event' 10 | required: false 11 | org_gh_token: 12 | description: 'GitHub token for API access' 13 | required: true 14 | runs: 15 | using: "composite" 16 | steps: 17 | - uses: actions/checkout@v4 18 | with: 19 | repository: TykTechnologies/tyk-analytics 20 | path: tyk-analytics 21 | token: ${{ inputs.org_gh_token }} 22 | fetch-depth: 0 23 | sparse-checkout: tests/${{ inputs.test_folder }} 24 | - name: Checkout proper branch 25 | if: ${{ github.event.repository.name != 'tyk-analytics' }} 26 | working-directory: tyk-analytics/tests/${{ inputs.test_folder }} 27 | shell: bash 28 | run: | 29 | echo "Checking out proper branch..." 30 | if [[ ${{ github.event_name }} == "pull_request" ]]; then 31 | PR_BRANCH=${{ github.event.pull_request.head.ref }} 32 | TARGET_BRANCH=${{ github.event.pull_request.base.ref }} 33 | echo "Looking for PR_BRANCH:$PR_BRANCH or TARGET_BRANCH:$TARGET_BRANCH..." 34 | if git rev-parse --verify "origin/$PR_BRANCH" >/dev/null 2>&1; then 35 | echo "PR branch $PR_BRANCH exists. Checking out..." 36 | git checkout "$PR_BRANCH" 37 | elif git rev-parse --verify "origin/$TARGET_BRANCH" >/dev/null 2>&1; then 38 | echo "Target branch $TARGET_BRANCH exists. Checking out..." 39 | git checkout "$TARGET_BRANCH" 40 | fi 41 | fi 42 | if [[ ${{ github.event_name }} == "push" ]]; then 43 | # Check if this is a tag push 44 | if [[ "${{ github.ref }}" == refs/tags/* ]] || [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then 45 | TAG_NAME=${{ github.ref_name }} 46 | echo "This is a tag push. Tag name: $TAG_NAME" 47 | # Try to check out the tag directly 48 | if git rev-parse --verify "$TAG_NAME" >/dev/null 2>&1; then 49 | echo "Tag $TAG_NAME exists. Checking out..." 50 | git checkout "$TAG_NAME" 51 | else 52 | echo "Tag $TAG_NAME not found in this repository." 53 | fi 54 | else 55 | # Regular branch push 56 | PUSH_BRANCH=${{ github.ref_name }} 57 | echo "Looking for PUSH_BRANCH:$PUSH_BRANCH..." 58 | if git rev-parse --verify "origin/$PUSH_BRANCH" >/dev/null 2>&1; then 59 | echo "Push branch $PUSH_BRANCH exists. Checking out..." 60 | git checkout "$PUSH_BRANCH" 61 | fi 62 | fi 63 | fi 64 | if [[ -n "${{ inputs.branch }}" ]]; then 65 | echo "Checking out branch ${{ inputs.branch }}..." 66 | git checkout "${{ inputs.branch }}" 67 | fi 68 | echo "Current commit: $(git rev-parse HEAD)" 69 | -------------------------------------------------------------------------------- /.github/actions/latest-versions/versions.txt: -------------------------------------------------------------------------------- 1 | ### Processing combination: Repo=tyk, Action=pull_request, Branch=master ### 2 | 3 | BRANCH =master 4 | --------------------------------------------------------------------- 5 | 6 | ### Processing combination: Repo=tyk, Action=pull_request, Branch=release-5-lts ### 7 | 8 | BRANCH =release-5-lts 9 | --------------------------------------------------------------------- 10 | 11 | ### Processing combination: Repo=tyk, Action=pull_request, Branch=release-4-lts ### 12 | 13 | BRANCH =release-4-lts 14 | --------------------------------------------------------------------- 15 | 16 | ### Processing combination: Repo=tyk-analytics, Action=pull_request, Branch=master ### 17 | 18 | BRANCH =master 19 | --------------------------------------------------------------------- 20 | 21 | ### Processing combination: Repo=tyk-analytics, Action=pull_request, Branch=release-5-lts ### 22 | 23 | BRANCH =release-5-lts 24 | --------------------------------------------------------------------- 25 | 26 | ### Processing combination: Repo=tyk-analytics, Action=pull_request, Branch=release-4-lts ### 27 | 28 | BRANCH =release-4-lts 29 | --------------------------------------------------------------------- 30 | 31 | ### Processing combination: Repo=tyk-pump, Action=pull_request, Branch=master ### 32 | 33 | BRANCH =master 34 | --------------------------------------------------------------------- 35 | 36 | ### Processing combination: Repo=tyk-pump, Action=pull_request, Branch=release-5-lts ### 37 | 38 | BRANCH =release-5-lts 39 | --------------------------------------------------------------------- 40 | 41 | ### Processing combination: Repo=tyk-pump, Action=pull_request, Branch=release-4-lts ### 42 | 43 | BRANCH =release-4-lts 44 | --------------------------------------------------------------------- 45 | 46 | ### Processing combination: Repo=tyk-sink, Action=pull_request, Branch=master ### 47 | 48 | BRANCH =master 49 | --------------------------------------------------------------------- 50 | 51 | ### Processing combination: Repo=tyk-sink, Action=pull_request, Branch=release-5-lts ### 52 | 53 | BRANCH =release-5-lts 54 | --------------------------------------------------------------------- 55 | 56 | ### Processing combination: Repo=tyk-sink, Action=pull_request, Branch=release-4-lts ### 57 | 58 | BRANCH =release-4-lts 59 | --------------------------------------------------------------------- 60 | 61 | tyk=master 62 | tyk-analytics=master 63 | tyk-pump=master 64 | tyk-sink=master 65 | tyk=sha-my_build_sha 66 | tyk=release-5-lts 67 | tyk-analytics=release-5-lts 68 | tyk-pump=v1.8.3 69 | tyk-sink=v2.4.1 70 | tyk=sha-my_build_sha 71 | tyk=release-4-lts 72 | tyk-analytics=release-4-lts 73 | tyk-pump=v1.8.3 74 | tyk-sink=v2.4.1 75 | tyk=sha-my_build_sha 76 | tyk=master 77 | tyk-analytics=master 78 | tyk-pump=master 79 | tyk-sink=master 80 | tyk-analytics=sha-my_build_sha 81 | tyk=release-5-lts 82 | tyk-analytics=release-5-lts 83 | tyk-pump=v1.8.3 84 | tyk-sink=v2.4.1 85 | tyk-analytics=sha-my_build_sha 86 | tyk=release-4-lts 87 | tyk-analytics=release-4-lts 88 | tyk-pump=v1.8.3 89 | tyk-sink=v2.4.1 90 | tyk-analytics=sha-my_build_sha 91 | tyk=master 92 | tyk-analytics=master 93 | tyk-pump=master 94 | tyk-sink=master 95 | tyk-pump=sha-my_build_sha 96 | tyk-pump=sha-my_build_sha 97 | tyk-pump=sha-my_build_sha 98 | tyk=master 99 | tyk-analytics=master 100 | tyk-pump=master 101 | tyk-sink=master 102 | tyk-sink=sha-my_build_sha 103 | tyk-sink=sha-my_build_sha 104 | tyk-sink=sha-my_build_sha 105 | -------------------------------------------------------------------------------- /.github/actions/gh-logs-analyser/log-collector.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | # Inputs and environment variables 5 | RUN_ID="${TARGET_RUN_ID:-$GITHUB_RUN_ID}" 6 | REPO="${TARGET_REPO:-$GITHUB_REPOSITORY}" 7 | 8 | # Extract branch name - prioritize base_ref for PRs 9 | if [[ -n "$GITHUB_BASE_REF" ]]; then 10 | # For Pull Requests, use the destination branch directly 11 | BRANCH="$GITHUB_BASE_REF" 12 | elif [[ -n "$GITHUB_REF" ]]; then 13 | # For direct pushes, extract from refs/heads/branch-name 14 | BRANCH=$(echo "$GITHUB_REF" | sed -e 's,.*/\(.*\),\1,') 15 | else 16 | # Default to empty string if not available 17 | BRANCH="" 18 | fi 19 | 20 | echo "Branch: $BRANCH" 21 | 22 | # 1. List all jobs for the workflow run and filter for failures 23 | echo "Fetching jobs for run $RUN_ID in $REPO..." 24 | JOBS_JSON=$(curl -s -H "Authorization: Bearer $GITHUB_TOKEN" \ 25 | -H "Accept: application/vnd.github+json" \ 26 | "https://api.github.com/repos/$REPO/actions/runs/$RUN_ID/jobs?per_page=100") 27 | 28 | # Use jq to find failed jobs and their first failed step (if any) 29 | FAILED_JOBS=$(echo "$JOBS_JSON" | jq -c '.jobs[] | select(.conclusion=="failure") | {id: .id, name: .name, completed_at: .completed_at, failed_step: ((.steps[]? | select(.conclusion=="failure") | .name) // null)}') 30 | 31 | # 2. Loop through each failed job and handle its log 32 | echo "$FAILED_JOBS" | while IFS= read -r job; do 33 | [ -z "$job" ] && continue # skip if empty 34 | job_id=$(echo "$job" | jq -r '.id') 35 | job_name=$(echo "$job" | jq -r '.name') 36 | step_name=$(echo "$job" | jq -r '.failed_step') 37 | timestamp=$(echo "$job" | jq -r '.completed_at') 38 | 39 | echo "Downloading log for failed job '$job_name' (ID $job_id)..." 40 | # Download the job log (the API returns a redirect to a text log file) 41 | curl -s -L -H "Authorization: Bearer $GITHUB_TOKEN" \ 42 | "https://api.github.com/repos/$REPO/actions/jobs/$job_id/logs" \ 43 | -o "job_${job_id}.log" 44 | 45 | # Preprocess the log to reduce size 46 | echo "Preprocessing log for job '$job_name'..." 47 | 48 | # Remove ANSI color codes 49 | sed -i 's/\x1b\[[0-9;]*m//g' "job_${job_id}.log" 50 | 51 | # Remove timestamp patterns (optional, but helps reduce size) 52 | sed -i -E 's/^\[?[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|[+-][0-9]{2}:[0-9]{2})\]? //g' "job_${job_id}.log" 53 | 54 | # Remove debug/trace lines (optional) 55 | grep -v "^DEBUG:" "job_${job_id}.log" | grep -v "^TRACE:" > "preprocessed_${job_id}.log" || cp "job_${job_id}.log" "preprocessed_${job_id}.log" 56 | 57 | # 3. Format the JSON payload with required fields 58 | payload=$(jq -n \ 59 | --arg repo "$REPO" \ 60 | --arg run_id "$RUN_ID" \ 61 | --arg job_name "$job_name" \ 62 | --arg step_name "${step_name:-}" \ 63 | --arg branch "$BRANCH" \ 64 | --arg timestamp "$timestamp" \ 65 | --rawfile raw_log "preprocessed_${job_id}.log" \ 66 | '{ repo: $repo, run_id: $run_id, job_name: $job_name, step_name: $step_name, branch: $branch, timestamp: $timestamp, raw_log: $raw_log }') 67 | 68 | # 4. Send the JSON to the external API with Authorization header using GH_LOGS_ANALYSER secret 69 | echo "Sending log for '$job_name' (step '$step_name', branch '$BRANCH')..." 70 | curl -s -X POST \ 71 | -H "Content-Type: application/json" \ 72 | -H "Authorization: Bearer $GH_LOGS_ANALYSER" \ 73 | -d "$payload" "https://gh-logs-analyser.dokku.tyk.technology/api/v1/logs" || \ 74 | echo "Warning: Failed to send log to API." 75 | 76 | done 77 | 78 | echo "Log extraction completed." 79 | -------------------------------------------------------------------------------- /.github/workflows/s1-cns-scan.yml: -------------------------------------------------------------------------------- 1 | name: SentinelOne CNS Scan 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | tag: 7 | description: 'The tag configured for the scan policy in S1 Console' 8 | default: "scan:tykchecks" 9 | type: string 10 | scope_type: 11 | description: 'The scope in which the scan policy is configured' 12 | default: "ACCOUNT" 13 | type: string 14 | iac_enabled: 15 | description: 'Whether Iac scanning should be enabled' 16 | default: true 17 | type: boolean 18 | secrets_enabled: 19 | description: 'Whether secrets scanning should be enabled(It will run only on a pull_request event)' 20 | default: true 21 | type: boolean 22 | vuln_enabled: 23 | description: 'Whether vulnerability scannin should be enabled' 24 | default: true 25 | type: boolean 26 | skip_paths: 27 | description: 'Provide a space separated list of paths that need to be skipped during vulnerablity scanning' 28 | type: string 29 | secrets: 30 | S1_API_TOKEN: 31 | description: 'S1 API Token configured for scanning' 32 | required: true 33 | CONSOLE_URL: 34 | description: 'S1 Management consoloe URL' 35 | required: true 36 | SCOPE_ID: 37 | description: 'Scope ID from S1 console' 38 | required: true 39 | 40 | jobs: 41 | s1-shift-left-cli: 42 | runs-on: ubuntu-latest 43 | container: 44 | # latest version v0.5.4 - update digest after checking the image when 45 | # new version comes out. 46 | image: pingsafe/s1-shift-left-cli@sha256:acd49cfd5ad72d488daf4e2418bd56891fbdef4d5da4729ccdafd6b9bbc5c8ad 47 | options: --entrypoint "" 48 | permissions: 49 | contents: read 50 | env: 51 | REPO_FULL_NAME: ${{ github.repository }} 52 | REPO_URL: ${{ github.server_url }} 53 | 54 | 55 | steps: 56 | - uses: actions/checkout@v5 57 | with: 58 | ref: ${{ github.ref }} 59 | filter: tree:0 60 | fetch-depth: 0 61 | 62 | - name: Configure SentinelOne Shift Left CLI 63 | run: s1-cns-cli config --service-user-api-token "$S1_TOKEN" --management-console-url "$CONSOLE_URL" --scope-type "$SCOPE_TYPE" --scope-id "$SCOPE_ID" --tag "$TAG" 64 | env: 65 | S1_TOKEN: ${{ secrets.S1_API_TOKEN }} 66 | CONSOLE_URL: ${{ secrets.CONSOLE_URL }} 67 | SCOPE_TYPE: ${{ inputs.scope_type }} 68 | SCOPE_ID: ${{ secrets.SCOPE_ID }} 69 | TAG: ${{ inputs.tag }} 70 | 71 | 72 | - name: Configure git config 73 | run: git config --global --add safe.directory "$PWD" 74 | 75 | - name: Run Secret Detector 76 | # Run only on pull requests as we've scans configured to run on pull requests and publish is 77 | # only available on pull requests. 78 | if: github.event_name == 'pull_request' && inputs.secrets_enabled 79 | id: secret-detector 80 | run: s1-cns-cli scan secret -d "$PWD" --pull-request "$SRC" "$DEST" --repo-full-name "$REPO_FULL_NAME" --repo-url "$REPO_URL/$REPO_FULL_NAME" --provider GITHUB --publish-result 81 | env: 82 | DEST: ${{ github.event.pull_request.base.sha }} 83 | SRC: ${{ github.event.pull_request.head.sha }} 84 | 85 | - name: Run IaC Scanner 86 | if: inputs.iac_enabled 87 | run: s1-cns-cli scan iac -d "$PWD" --repo-full-name "$REPO_FULL_NAME" --repo-url "$REPO_URL/$REPO_FULL_NAME" --branch "$BRANCH" --provider GITHUB --publish-result 88 | id: iac-scanner 89 | env: 90 | BRANCH: ${{ github.head_ref || github.ref_name }} 91 | 92 | - name: Run Vulnerability Scanner 93 | if: inputs.vuln_enabled 94 | id: vuln-scanner 95 | run: s1-cns-cli scan vuln --repo-full-name "$REPO_FULL_NAME" ${{ inputs.skip_paths != '' && '--skip-paths "$SKIP_PATHS"' || '' }} -d "$PWD" 96 | env: 97 | SKIP_PATHS: ${{ inputs.skip_paths }} 98 | -------------------------------------------------------------------------------- /branch-suggestion/branch_suggestion.yml: -------------------------------------------------------------------------------- 1 | version: "1.0" 2 | 3 | # Visor configuration for Branch Suggestion automation 4 | # Analyzes JIRA fix versions and suggests appropriate merge target branches 5 | # based on the repository's branching strategy 6 | 7 | checks: 8 | # ============================================================================ 9 | # COMBINED STEP: ANALYZE AND SUGGEST BRANCHES 10 | # ============================================================================ 11 | # Combines all steps into one to avoid Visor's output passing limitations: 12 | # 1. Extract JIRA ticket from PR title/branch 13 | # 2. Fetch fix versions from JIRA 14 | # 3. Fetch repository branches 15 | # 4. Match and suggest branches 16 | # ============================================================================ 17 | analyze-and-suggest: 18 | type: command 19 | timeout: 90 20 | exec: | 21 | set -e 22 | 23 | # Read from environment variables with defaults for local testing 24 | PR_TITLE="${PR_TITLE:-TT-12345: Test PR}" 25 | BRANCH_NAME="${BRANCH_NAME:-feature/TT-12345-test}" 26 | REPO="${REPOSITORY:-TykTechnologies/tyk}" 27 | 28 | set +e 29 | JIRA_RESULT=$(node scripts/jira/get-fixedversion.js "$PR_TITLE" "$BRANCH_NAME") 30 | JIRA_EXIT_CODE=$? 31 | set -e 32 | 33 | # Handle exit codes 34 | if [ $JIRA_EXIT_CODE -eq 2 ]; then 35 | echo "ℹ️ No JIRA ticket found. Skipping branch suggestions." >&2 36 | exit 0 37 | fi 38 | 39 | if [ $JIRA_EXIT_CODE -eq 1 ]; then 40 | echo "❌ JIRA ticket found but no fix versions set." >&2 41 | exit 1 42 | fi 43 | 44 | # Fetch repository branches 45 | # Check if gh CLI is available 46 | if command -v gh &> /dev/null; then 47 | BRANCHES=$(gh api "repos/$REPO/branches" --paginate | jq -c '[.[] | {name: .name}]') 48 | else 49 | # Use curl with GitHub API 50 | if [ -n "$GITHUB_TOKEN" ]; then 51 | BRANCHES=$(curl -s -H "Authorization: Bearer $GITHUB_TOKEN" \ 52 | -H "Accept: application/vnd.github.v3+json" \ 53 | "https://api.github.com/repos/$REPO/branches?per_page=100" | \ 54 | jq -c '[.[] | {name: .name}]') 55 | else 56 | BRANCHES=$(curl -s -H "Accept: application/vnd.github.v3+json" \ 57 | "https://api.github.com/repos/$REPO/branches?per_page=100" | \ 58 | jq -c '[.[] | {name: .name}]') 59 | fi 60 | fi 61 | 62 | # Match branches and generate suggestions 63 | MATCH_RESULT=$(node scripts/common/match-branches.js "$JIRA_RESULT" "$BRANCHES" "$REPO") 64 | 65 | # Extract markdown and save for PR comment (use printf to avoid escape sequence issues) 66 | printf '%s\n' "$MATCH_RESULT" | jq -r '.markdown' > /tmp/branch_suggestion_markdown.txt 67 | 68 | # Output full result to stdout 69 | printf '%s\n' "$MATCH_RESULT" 70 | 71 | # ============================================================================ 72 | # STEP 2: POST COMMENT TO PR 73 | # ============================================================================ 74 | # Creates or updates a comment on the PR with branch suggestions 75 | # ============================================================================ 76 | post-pr-comment: 77 | type: command 78 | depends_on: [analyze-and-suggest] 79 | tags: ["remote"] 80 | timeout: 30 81 | exec: | 82 | set -e 83 | 84 | # Read from environment variables with defaults for local testing 85 | REPO="${REPOSITORY:-TykTechnologies/tyk}" 86 | PR_NUMBER="${PR_NUMBER:-123}" 87 | 88 | # Check if markdown file exists from previous step 89 | if [ ! -f /tmp/branch_suggestion_markdown.txt ]; then 90 | echo "❌ Error: Markdown file not found. analyze-and-suggest step may have failed." >&2 91 | exit 1 92 | fi 93 | 94 | # Post or update the comment 95 | node scripts/github/add-pr-comment.js "$REPO" "$PR_NUMBER" --file /tmp/branch_suggestion_markdown.txt 96 | 97 | # Clean up 98 | rm -f /tmp/branch_suggestion_markdown.txt 99 | -------------------------------------------------------------------------------- /.github/workflows/force-merge.yaml: -------------------------------------------------------------------------------- 1 | name: Force Merge PR (Reusable) 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | ADMIN_PAT: 7 | description: 'Personal Access Token with repo scope from an admin' 8 | required: true 9 | SLACK_WEBHOOK_URL: 10 | description: 'Slack Webhook URL for notifications' 11 | required: true 12 | 13 | jobs: 14 | force_merge: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Check and Merge Pull Request 18 | uses: actions/github-script@v6 19 | env: 20 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} 21 | with: 22 | github-token: ${{ secrets.ADMIN_PAT }} 23 | script: | 24 | const comment = context.payload.comment; 25 | const issue = context.payload.issue; 26 | const repo = context.repo; 27 | const octokit = github; 28 | 29 | // Verify the comment is on a pull request 30 | if (!issue.pull_request) { 31 | console.log('Comment is not on a pull request'); 32 | return; 33 | } 34 | 35 | // Check if the comment starts with "/force-merge" and extract the reason 36 | const commentBody = comment.body.trim(); 37 | const forceMergePrefix = '/force-merge'; 38 | if (!commentBody.startsWith(forceMergePrefix)) { 39 | console.log('Comment does not start with "/force-merge"'); 40 | return; 41 | } 42 | 43 | const reason = commentBody.substring(forceMergePrefix.length).trim(); 44 | if (!reason) { 45 | await octokit.rest.issues.createComment({ 46 | owner: repo.owner, 47 | repo: repo.repo, 48 | issue_number: issue.number, 49 | body: 'Error: A reason must be provided after "/force-merge" (e.g., "/force-merge CI failing tests not related").' 50 | }); 51 | console.log('No reason provided after "/force-merge"'); 52 | return; 53 | } 54 | 55 | // Check if the commenter is an admin 56 | const username = comment.user.login; 57 | const permission = await octokit.rest.repos.getCollaboratorPermissionLevel({ 58 | owner: repo.owner, 59 | repo: repo.repo, 60 | username: username 61 | }); 62 | 63 | if (permission.data.permission !== 'admin') { 64 | console.log(`${username} is not an admin`); 65 | return; 66 | } 67 | 68 | // Fetch the pull request details to get the description (body) 69 | const pr = await octokit.rest.pulls.get({ 70 | owner: repo.owner, 71 | repo: repo.repo, 72 | pull_number: issue.number 73 | }); 74 | const prDescription = pr.data.body || 'No description provided'; 75 | 76 | // Attempt to squash merge the pull request 77 | try { 78 | await octokit.rest.pulls.merge({ 79 | owner: repo.owner, 80 | repo: repo.repo, 81 | pull_number: issue.number, 82 | merge_method: 'squash', 83 | commit_title: `${pr.data.title} (#${issue.number})`, 84 | commit_message: `${prDescription}\n\nForce merged by ${username} with reason: ${reason}` 85 | }); 86 | console.log(`PR #${issue.number} squash merged successfully`); 87 | 88 | // Send Slack notification 89 | const slackWebhookUrl = process.env.SLACK_WEBHOOK_URL; 90 | const message = { 91 | text: `User ${username} force-merged PR #${issue.number} in ${repo.owner}/${repo.repo} with reason: "${reason}" (squash merge)` 92 | }; 93 | await fetch(slackWebhookUrl, { 94 | method: 'POST', 95 | headers: { 'Content-Type': 'application/json' }, 96 | body: JSON.stringify(message) 97 | }); 98 | console.log('Slack notification sent'); 99 | } catch (error) { 100 | console.error('Squash merge failed:', error.message); 101 | await octokit.rest.issues.createComment({ 102 | owner: repo.owner, 103 | repo: repo.repo, 104 | issue_number: issue.number, 105 | body: `Failed to force-merge the pull request (squash): ${error.message}` 106 | }); 107 | } 108 | -------------------------------------------------------------------------------- /.github/workflows/golangci.yaml: -------------------------------------------------------------------------------- 1 | name: GolangCI 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | main_branch: 7 | default: "main" 8 | type: string 9 | go: 10 | type: string 11 | default: "1.17" 12 | 13 | jobs: 14 | golangci: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Git - checkout master 18 | uses: actions/checkout@v3 19 | with: 20 | ref: ${{ inputs.main_branch }} 21 | fetch-depth: 1 22 | - name: Git - checkout current ref 23 | uses: actions/checkout@v3 24 | - uses: actions/cache@v3 25 | with: 26 | # In order: 27 | # * Module download cache 28 | # * Build cache (Linux) 29 | # * Build cache (Mac) 30 | # * Build cache (Windows) 31 | path: | 32 | ~/go/pkg/mod 33 | ~/.cache/go-build 34 | ~/Library/Caches/go-build 35 | ~\AppData\Local\go-build 36 | ~/.cache/golangci-lint 37 | ~/Library/Caches/golangci-lint 38 | key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} 39 | restore-keys: | 40 | ${{ runner.os }}-go- 41 | - name: Download template 42 | run: | 43 | curl 'https://raw.githubusercontent.com/TykTechnologies/github-actions/main/.github/workflows/.golangci.tmpl.yaml' -o .golangci.yaml 44 | - name: Render template 45 | id: render_template 46 | uses: chuhlomin/render-template@v1.7 47 | with: 48 | template: .golangci.yaml 49 | result_path: .golangci.yaml 50 | vars: | 51 | main_branch: ${{ inputs.main_branch }} 52 | goimports: '' 53 | build_tags: [] 54 | skip_dirs: [] 55 | go_version: ${{ inputs.go }} 56 | - name: Apply template 57 | run: | 58 | cp .golangci.yaml /tmp/.golangci.yaml 59 | if [[ `git status --porcelain` ]]; then 60 | export ref=$(git rev-parse HEAD) 61 | git checkout ${{ inputs.main_branch }} 62 | git pull --rebase 63 | git config --global user.name 'Bender' 64 | git config --global user.email 'bender@users.noreply.github.com' 65 | git add -A 66 | git commit -a -m 'Update Golangci config' 67 | git push origin ${{ inputs.main_branch }} 68 | git checkout $ref 69 | fi 70 | cp /tmp/.golangci.yaml .golangci.yaml 71 | - uses: actions/setup-go@v3 72 | with: 73 | go-version: ${{ inputs.go }} 74 | - name: Fetch modules 75 | run: | 76 | go mod download 77 | - name: Download golangci-lint 78 | run: curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin 79 | 80 | - name: golangci-lint 81 | if: ${{ github.event_name == 'pull_request' }} 82 | run: | 83 | $(go env GOPATH)/bin/golangci-lint run --verbose --out-format 'checkstyle:golangci_lint.xml' --timeout=300s --max-same-issues 50 --fix --new=true --new-from-rev=origin/${{ github.base_ref }} ./... 84 | - name: golangci-lint 85 | if: ${{ github.event_name == 'push' }} 86 | run: | 87 | $(go env GOPATH)/bin/golangci-lint run --verbose --out-format 'checkstyle:golangci_lint.xml' --timeout=300s --new=false --new-from-rev= ./... 88 | 89 | - uses: actions/upload-artifact@v4 90 | if: ${{ always() }} 91 | with: 92 | name: golangci-report 93 | path: "*xml" 94 | 95 | - uses: reviewdog/action-setup@v1 96 | if: ${{ always() }} 97 | with: 98 | reviewdog_version: latest # Optional. [latest,nightly,v.X.Y.Z] 99 | - name: Run reviewdog 100 | if: ${{ github.event_name == 'pull_request' }} 101 | env: 102 | REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} 103 | run: | 104 | go install golang.org/x/tools/cmd/goimports@latest 105 | goimports -w . 106 | 107 | cat golangci_lint.xml | reviewdog -f=checkstyle -name="Linter" -reporter=github-pr-review 108 | 109 | TMPFILE=$(mktemp) 110 | git diff >"${TMPFILE}" 111 | git diff 112 | git status 113 | 114 | git stash -u && git stash drop 115 | reviewdog -f=diff -f.diff.strip=1 -name="Linter" -reporter=github-pr-review < "${TMPFILE}" 116 | 117 | # cat golangci_lint.xml | reviewdog -f=checkstyle -name="Linter" -reporter=github-pr-check 118 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/github/github-api.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import dotenv from 'dotenv'; 3 | 4 | dotenv.config(); 5 | 6 | // Configuration 7 | const GITHUB_TOKEN = process.env.GITHUB_TOKEN; 8 | 9 | /** 10 | * Generic GitHub API request wrapper 11 | * @param {string} endpoint - API endpoint (e.g., '/repos/owner/repo/branches') 12 | * @param {object} options - Fetch options (method, body, etc.) 13 | */ 14 | 15 | async function githubAPI(endpoint, options = {}) { 16 | if (!GITHUB_TOKEN) { 17 | throw new Error('GITHUB_TOKEN must be set in .env file'); 18 | } 19 | 20 | const url = endpoint.startsWith('http') 21 | ? endpoint 22 | : `https://api.github.com${endpoint}`; 23 | const response = await fetch(url, { 24 | ...options, 25 | headers: { 26 | 'Authorization': `token ${GITHUB_TOKEN}`, 27 | 'Accept': 'application/vnd.github.v3+json', 28 | ...options.headers 29 | } 30 | }); 31 | 32 | if (!response.ok) { 33 | const error = await response.text(); 34 | throw new Error(`GitHub API Error (${response.status}): ${error}`); 35 | } 36 | 37 | // Handle 204 No Content responses 38 | if(response.status === 204 || response.headers.get('content-length') === '0'){ 39 | return {} 40 | } 41 | 42 | return response.json(); 43 | } 44 | 45 | /** 46 | * Get all branches for a repository 47 | * @param {string} owner - Repository owner 48 | * @param {string} repo - Repository name 49 | * @returns {Promise} Array of branch objects 50 | */ 51 | async function getRepoBranches(owner, repo) { 52 | const branches = await githubAPI(`/repos/${owner}/${repo}/branches`); 53 | return branches; 54 | } 55 | 56 | /** 57 | * Get a specific pull request 58 | * @param {string} owner - Repository owner 59 | * @param {string} repo - Repository name 60 | * @param {number} prNumber - PR number 61 | * @returns {Promise} PR object 62 | */ 63 | async function getPullRequest(owner, repo, prNumber) { 64 | const pr = await githubAPI(`/repos/${owner}/${repo}/pulls/${prNumber}`); 65 | return pr; 66 | } 67 | 68 | /** 69 | * Create a comment on a pull request 70 | * @param {string} owner - Repository owner 71 | * @param {string} repo - Repository name 72 | * @param {number} prNumber - PR number 73 | * @param {string} body - Comment body 74 | * @returns {Promise} Created comment object 75 | */ 76 | async function createPRComment(owner, repo, prNumber, body) { 77 | const comment = await githubAPI(`/repos/${owner}/${repo}/issues/${prNumber}/comments`, { 78 | method: 'POST', 79 | body: JSON.stringify({ body }) 80 | }); 81 | return comment; 82 | } 83 | 84 | /** 85 | * Update an existing comment 86 | * @param {string} owner - Repository owner 87 | * @param {string} repo - Repository name 88 | * @param {number} commentId - Comment ID 89 | * @param {string} body - New comment body 90 | * @returns {Promise} Updated comment object 91 | */ 92 | async function updatePRComment(owner, repo, commentId, body) { 93 | const comment = await githubAPI(`/repos/${owner}/${repo}/issues/comments/${commentId}`, { 94 | method: 'PATCH', 95 | body: JSON.stringify({ body }) 96 | }); 97 | return comment; 98 | } 99 | 100 | /** 101 | * List all comments on a pull request 102 | * @param {string} owner - Repository owner 103 | * @param {string} repo - Repository name 104 | * @param {number} prNumber - PR number 105 | * @returns {Promise} Array of comment objects 106 | */ 107 | async function listPRComments(owner, repo, prNumber) { 108 | const comments = await githubAPI(`/repos/${owner}/${repo}/issues/${prNumber}/comments`); 109 | return comments; 110 | } 111 | 112 | /** 113 | * Find a comment by a marker/identifier in its body 114 | * Useful for updating existing bot comments instead of creating duplicates 115 | * @param {string} owner - Repository owner 116 | * @param {string} repo - Repository name 117 | * @param {number} prNumber - PR number 118 | * @param {string} marker - Unique marker to search for (e.g., '') 119 | * @returns {Promise} Comment object if found, null otherwise 120 | */ 121 | async function findCommentByMarker(owner, repo, prNumber, marker) { 122 | const comments = await listPRComments(owner, repo, prNumber); 123 | return comments.find(comment => comment.body.includes(marker)) || null; 124 | } 125 | 126 | export { 127 | githubAPI, 128 | getRepoBranches, 129 | getPullRequest, 130 | createPRComment, 131 | updatePRComment, 132 | listPRComments, 133 | findCommentByMarker 134 | }; -------------------------------------------------------------------------------- /docs/Taskfile.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: "3" 3 | 4 | tasks: 5 | default: 6 | desc: "List actions and workflows" 7 | vars: 8 | actions: 9 | sh: ls ../.github/actions/*/*.{yml,yaml} | grep -v "\-dev" 10 | workflows: 11 | sh: ls ../.github/workflows/*.{yml,yaml} | grep -v "\-dev" 12 | cmds: 13 | - for: {var: workflows} 14 | task: have:docs 15 | vars: 16 | filename: '{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml"}}.md' 17 | 18 | - for: {var: actions} 19 | task: have:docs 20 | vars: 21 | filename: '{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml" | trimSuffix "/action" }}.md' 22 | 23 | - for: {var: workflows} 24 | task: have:lint 25 | vars: 26 | want: name 27 | filename: '{{.ITEM}}' 28 | 29 | - for: {var: actions} 30 | task: have:lint 31 | vars: 32 | want: name description 33 | filename: '{{.ITEM}}' 34 | 35 | markdown: 36 | desc: "Display markdown" 37 | silent: true 38 | vars: 39 | actions: 40 | sh: ls ../.github/actions/*/*.{yml,yaml} | sort 41 | workflows: 42 | sh: ls ../.github/workflows/*.{yml,yaml} | sort 43 | cmds: 44 | - echo "# Composite actions" && echo 45 | - for: {var: actions} 46 | task: cat:docs 47 | vars: 48 | source: '{{.ITEM}}' 49 | filename: '{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml" | trimSuffix "/action" }}.md' 50 | 51 | - echo "# Reusable workflows" && echo 52 | - for: {var: workflows} 53 | task: cat:docs 54 | vars: 55 | source: '{{.ITEM}}' 56 | filename: '{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml"}}.md' 57 | 58 | toc: 59 | desc: "Display markdown TOC" 60 | silent: true 61 | vars: 62 | actions: 63 | sh: ls ../.github/actions/*/*.{yml,yaml} | sort 64 | workflows: 65 | sh: ls ../.github/workflows/*.{yml,yaml} | sort 66 | cmds: 67 | - echo "# Composite actions" 68 | - echo 69 | - for: {var: actions} 70 | task: cat:docs-toc 71 | vars: 72 | source: '{{.ITEM}}' 73 | filename: '{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml" | trimSuffix "/action" }}.md' 74 | - echo 75 | 76 | - echo "# Reusable workflows" 77 | - echo 78 | - for: {var: workflows} 79 | task: cat:docs-toc 80 | vars: 81 | source: '{{.ITEM}}' 82 | filename: 'docs/{{.ITEM | trimPrefix "../.github/" | trimSuffix ".yaml" | trimSuffix ".yml"}}.md' 83 | 84 | 85 | # This target ensures that every action and workflow 86 | # should have documentation, and reports if it doesn't. 87 | have:docs: 88 | desc: "Check docs exists" 89 | silent: true 90 | requires: 91 | vars: [filename] 92 | status: 93 | - test -e ./{{.filename}} 94 | cmds: 95 | - echo "❌ Action not documented in {{.filename}}" 96 | 97 | # This target reads a file for markdown output. 98 | cat:docs: 99 | internal: true 100 | silent: true 101 | requires: 102 | vars: [filename, source] 103 | vars: 104 | link: '{{.source | replace "../" "/"}}' 105 | cmds: 106 | - |- 107 | if [ -e ./{{.filename}} ]; then 108 | cat ./{{.filename}} 109 | else 110 | name=$(yq '.name' {{.source}}) 111 | description=$(yq '.description' {{.source}}) 112 | if [ "$description" = "null" ] || [ -z "$description" ]; then 113 | description="Undocumented action." 114 | fi 115 | echo -e "## $name\n\n$description" 116 | fi 117 | echo 118 | echo "Source: [{{.link}}]({{.link}})" 119 | echo 120 | 121 | # This target reads a file for markdown output. 122 | cat:docs-toc: 123 | internal: true 124 | silent: true 125 | requires: 126 | vars: [filename, source] 127 | vars: 128 | link: '{{.source | replace "../" "/"}}' 129 | cmds: 130 | - |- 131 | name=$(yq '.name' {{.source}}) 132 | echo "- $name [documentation]({{.filename}}) [source]({{.link}})" 133 | 134 | # This target ensures that every action and workflow 135 | # should have a `name` field present (poor mans schema). 136 | have:lint: 137 | desc: "Check lint pass" 138 | silent: true 139 | requires: 140 | vars: [filename, want] 141 | cmds: 142 | - for: {var: want} 143 | cmd: yq -o props {{.filename}} | grep -q '^{{.ITEM}} ' || echo "❌ Missing '{{.ITEM}}' in {{.filename}}" 144 | -------------------------------------------------------------------------------- /.github/actions/tests/env-up/action.yaml: -------------------------------------------------------------------------------- 1 | name: 'Set Up Test Environment' 2 | description: 'Configures and starts the test environment with Docker Compose, setting up master and slave datacenters' 3 | author: 'konrad' 4 | inputs: 5 | base_ref: 6 | description: 'Base ref for the test' 7 | required: true 8 | tags: 9 | description: 'Tags for the test' 10 | required: true 11 | github_token: 12 | description: 'GitHub token for API access' 13 | required: true 14 | TYK_DB_LICENSEKEY: 15 | description: 'Tyk DB license key' 16 | required: true 17 | TYK_MDCB_LICENSE: 18 | description: 'Tyk MDCB license key' 19 | required: true 20 | dashboard_image: 21 | description: 'Override dashboard image (optional). If not provided, uses gromit policy matching.' 22 | required: false 23 | default: '' 24 | outputs: 25 | USER_API_SECRET: 26 | description: 'User API secret for the test' 27 | value: ${{ steps.env_up.outputs.USER_API_SECRET }} 28 | 29 | runs: 30 | using: "composite" 31 | steps: 32 | - name: Starting environment 33 | working-directory: auto 34 | shell: bash 35 | id: env_up 36 | env: 37 | ECR: "754489498669.dkr.ecr.eu-central-1.amazonaws.com" 38 | REPO_NAME: ${{ github.event.repository.name }} 39 | TAGS: ${{ inputs.tags }} 40 | BASE_REF: ${{ inputs.base_ref }} 41 | pull_policy: 'if_not_present' 42 | GITHUB_TOKEN: ${{ inputs.github_token }} 43 | TYK_DB_LICENSEKEY: ${{ inputs.TYK_DB_LICENSEKEY }} 44 | TYK_MDCB_LICENSE: ${{ inputs.TYK_MDCB_LICENSE }} 45 | DASHBOARD_IMAGE: ${{ inputs.dashboard_image }} 46 | run: | 47 | match_tag=$ECR/$REPO_NAME:$BASE_REF 48 | 49 | # Extract the non-SHA tag from TAGS 50 | # Debug: Show the raw TAGS input 51 | echo "Raw TAGS input:" 52 | echo "$TAGS" 53 | 54 | # Convert TAGS to an array by splitting on newlines 55 | readarray -t tag_array <<< "$TAGS" 56 | 57 | echo "Found ${#tag_array[@]} tags:" 58 | for i in "${!tag_array[@]}"; do 59 | # Trim leading/trailing whitespace 60 | tag_array[$i]=$(echo "${tag_array[$i]}" | xargs) 61 | echo " $i: ${tag_array[$i]}" 62 | done 63 | 64 | # Find the tag that doesn't contain "sha-" 65 | for tag in "${tag_array[@]}"; do 66 | if [[ ! $tag == *":sha-"* ]]; then 67 | non_sha_tag=$tag 68 | echo "Found non-SHA tag: $non_sha_tag" 69 | break 70 | fi 71 | done 72 | 73 | # If no non-SHA tag found, use the first tag as fallback 74 | if [ -z "$non_sha_tag" ]; then 75 | non_sha_tag=${tag_array[0]} 76 | echo "No non-SHA tag found, using first tag as fallback: $non_sha_tag" 77 | fi 78 | 79 | echo "Using tag: $non_sha_tag" 80 | 81 | set -eaxo pipefail 82 | docker run -q --rm -v ~/.docker/config.json:/root/.docker/config.json tykio/gromit policy match ${non_sha_tag} ${match_tag} 2>versions.env 83 | 84 | # Override dashboard image if provided 85 | if [ -n "$DASHBOARD_IMAGE" ]; then 86 | echo "🔧 Overriding dashboard image with: $DASHBOARD_IMAGE" 87 | echo "tyk_analytics_image=$DASHBOARD_IMAGE" >> versions.env 88 | fi 89 | 90 | echo '# alfa and beta have to come after the override 91 | tyk_image="$ECR/tyk-ee" 92 | tyk_alfa_image=$tyk_image 93 | tyk_beta_image=$tyk_image 94 | confs_dir=./pro-ha 95 | env_file=local.env' >> versions.env 96 | if [[ $REPO_NAME != "tyk-pump" ]]; then 97 | tyk_pump_image=${{ matrix.pump }} 98 | fi 99 | if [[ $REPO_NAME != "tyk-sink" ]]; then 100 | tyk_sink_image=${{ matrix.sink }} 101 | fi 102 | cat ./confs/${{ matrix.envfiles.config }}.env local-${{ matrix.envfiles.db }}.env > local.env 103 | echo "::group::versions" 104 | cat versions.env local.env 105 | echo "::endgroup::" 106 | # bring up env, the project name is important 107 | docker compose -p auto -f pro-ha.yml -f deps_pro-ha.yml -f ${{ matrix.envfiles.db }}.yml -f ${{ matrix.envfiles.cache }}.yml --env-file versions.env --profile master-datacenter up --quiet-pull -d 108 | ./dash-bootstrap.sh http://localhost:3000 109 | docker compose -p auto -f pro-ha.yml -f deps_pro-ha.yml -f ${{ matrix.envfiles.db }}.yml -f ${{ matrix.envfiles.cache }}.yml --env-file versions.env --profile slave-datacenter up --quiet-pull -d 110 | echo "$(cat pytest.env | grep USER_API_SECRET)" >> $GITHUB_OUTPUT 111 | echo "ts=$(date +%s%N)" >> $GITHUB_OUTPUT 112 | echo "::group::Docker images" 113 | docker ps -a --format '{{.Names}}\t{{.Image}}' 114 | echo "::endgroup::" 115 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/jira/__tests__/get-fixedversion.test.js: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from 'vitest'; 2 | import { extractJiraTicket, parseVersion, detectComponent } from '../get-fixedversion.js'; 3 | 4 | describe('extractJiraTicket', () => { 5 | it('should extract ticket from PR title', () => { 6 | expect(extractJiraTicket('TT-12345: Fix bug')).toBe('TT-12345'); 7 | }); 8 | 9 | it('should extract ticket from branch name', () => { 10 | expect(extractJiraTicket('feature/TT-12345-fix-bug')).toBe('TT-12345'); 11 | }); 12 | 13 | it('should extract ticket from middle of text', () => { 14 | expect(extractJiraTicket('Fix bug (TT-12345)')).toBe('TT-12345'); 15 | }); 16 | 17 | it('should handle different project keys', () => { 18 | expect(extractJiraTicket('ABC-999: Test')).toBe('ABC-999'); 19 | expect(extractJiraTicket('PROJ-1: Test')).toBe('PROJ-1'); 20 | }); 21 | 22 | it('should return null for no ticket', () => { 23 | expect(extractJiraTicket('Fix bug without ticket')).toBe(null); 24 | }); 25 | 26 | it('should return null for empty input', () => { 27 | expect(extractJiraTicket('')).toBe(null); 28 | expect(extractJiraTicket(null)).toBe(null); 29 | }); 30 | 31 | it('should not match single letter project keys', () => { 32 | expect(extractJiraTicket('T-12345')).toBe(null); 33 | }); 34 | }); 35 | 36 | describe('detectComponent', () => { 37 | it('should detect TIB component', () => { 38 | expect(detectComponent('TIB 1.7.0')).toEqual(['tyk-identity-broker']); 39 | }); 40 | 41 | it('should detect Tyk component', () => { 42 | expect(detectComponent('Tyk 5.8.1')).toEqual(['tyk', 'tyk-analytics', 'tyk-analytics-ui']); 43 | expect(detectComponent('Tyk Gateway 5.8.1')).toEqual(['tyk', 'tyk-analytics', 'tyk-analytics-ui']); 44 | }); 45 | 46 | it('should detect Pump component', () => { 47 | expect(detectComponent('Pump 1.9.0')).toEqual(['tyk-pump']); 48 | expect(detectComponent('Tyk Pump 1.9.0')).toEqual(['tyk-pump']); 49 | }); 50 | 51 | it('should detect MDCB component', () => { 52 | expect(detectComponent('MDCB 2.0.0')).toEqual(['tyk-sink']); 53 | }); 54 | 55 | it('should return empty array for unknown prefix', () => { 56 | expect(detectComponent('Unknown 1.0.0')).toEqual([]); 57 | expect(detectComponent('1.0.0')).toEqual([]); 58 | }); 59 | 60 | it('should handle case insensitivity', () => { 61 | expect(detectComponent('tib 1.7.0')).toEqual(['tyk-identity-broker']); 62 | expect(detectComponent('TYK 5.8.1')).toEqual(['tyk', 'tyk-analytics', 'tyk-analytics-ui']); 63 | }); 64 | 65 | it('should handle empty or null input', () => { 66 | expect(detectComponent('')).toEqual([]); 67 | expect(detectComponent(null)).toEqual([]); 68 | }); 69 | }); 70 | 71 | describe('parseVersion', () => { 72 | it('should parse semantic version', () => { 73 | const result = parseVersion('5.8.1'); 74 | expect(result).toEqual({ 75 | major: 5, 76 | minor: 8, 77 | patch: 1, 78 | original: '5.8.1', 79 | component: [] 80 | }); 81 | }); 82 | 83 | it('should parse version with v prefix', () => { 84 | const result = parseVersion('v5.8.1'); 85 | expect(result.major).toBe(5); 86 | expect(result.minor).toBe(8); 87 | expect(result.patch).toBe(1); 88 | }); 89 | 90 | it('should parse version with Tyk prefix', () => { 91 | const result = parseVersion('Tyk 5.8.1'); 92 | expect(result.major).toBe(5); 93 | expect(result.component).toEqual(['tyk', 'tyk-analytics', 'tyk-analytics-ui']); 94 | }); 95 | 96 | it('should parse version with Tyk Gateway prefix', () => { 97 | const result = parseVersion('Tyk Gateway 5.8.1'); 98 | expect(result.major).toBe(5); 99 | expect(result.component).toEqual(['tyk', 'tyk-analytics', 'tyk-analytics-ui']); 100 | }); 101 | 102 | it('should handle TIB version', () => { 103 | const result = parseVersion('TIB 1.7.0'); 104 | expect(result.major).toBe(1); 105 | expect(result.minor).toBe(7); 106 | expect(result.patch).toBe(0); 107 | expect(result.component).toEqual(['tyk-identity-broker']); 108 | }); 109 | 110 | it('should handle Pump version', () => { 111 | const result = parseVersion('Pump 1.9.0'); 112 | expect(result.major).toBe(1); 113 | expect(result.component).toEqual(['tyk-pump']); 114 | }); 115 | 116 | it('should handle MDCB version', () => { 117 | const result = parseVersion('MDCB 2.0.0'); 118 | expect(result.major).toBe(2); 119 | expect(result.component).toEqual(['tyk-sink']); 120 | }); 121 | 122 | it('should handle minor version only', () => { 123 | const result = parseVersion('5.8'); 124 | expect(result.major).toBe(5); 125 | expect(result.minor).toBe(8); 126 | expect(result.patch).toBe(null); 127 | }); 128 | 129 | it('should handle major version only', () => { 130 | const result = parseVersion('5'); 131 | expect(result.major).toBe(5); 132 | expect(result.minor).toBe(null); 133 | expect(result.patch).toBe(null); 134 | }); 135 | 136 | it('should return null for invalid version', () => { 137 | expect(parseVersion('invalid')).toBe(null); 138 | expect(parseVersion('not-a-version')).toBe(null); 139 | expect(parseVersion('')).toBe(null); 140 | expect(parseVersion(null)).toBe(null); 141 | }); 142 | 143 | it('should preserve original version string', () => { 144 | const result = parseVersion('TIB 1.7.0'); 145 | expect(result.original).toBe('TIB 1.7.0'); 146 | }); 147 | }); 148 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/github/add-pr-comment.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import dotenv from 'dotenv'; 3 | import fs from 'fs'; 4 | import { 5 | getPullRequest, 6 | createPRComment, 7 | updatePRComment, 8 | findCommentByMarker 9 | } from './github-api.js'; 10 | 11 | // Silence dotenv v17+ logging 12 | process.env.DOTENV_LOG_LEVEL = 'error'; 13 | dotenv.config(); 14 | 15 | const COMMENT_MARKER = ''; 16 | 17 | /** 18 | * Add or update a comment on a GitHub PR 19 | * @param {string} owner - Repository owner 20 | * @param {string} repo - Repository name 21 | * @param {number} prNumber - Pull request number 22 | * @param {string} body - Comment body (markdown) 23 | * @param {string} marker - Unique marker to identify the comment 24 | * @returns {Promise} Created or updated comment 25 | */ 26 | async function addOrUpdateComment(owner, repo, prNumber, body, marker = COMMENT_MARKER) { 27 | // Ensure the marker is included in the comment body 28 | const commentBody = body.includes(marker) ? body : `${body}\n\n${marker}`; 29 | 30 | try { 31 | // Check if a comment with this marker already exists 32 | const existingComment = await findCommentByMarker(owner, repo, prNumber, marker); 33 | 34 | if (existingComment) { 35 | console.log(`Found existing comment (ID: ${existingComment.id}). Updating...`); 36 | const updated = await updatePRComment(owner, repo, existingComment.id, commentBody); 37 | console.log(`✅ Updated comment on PR #${prNumber}`); 38 | return updated; 39 | } else { 40 | console.log(`No existing comment found. Creating new comment...`); 41 | const created = await createPRComment(owner, repo, prNumber, commentBody); 42 | console.log(`✅ Created comment on PR #${prNumber}`); 43 | return created; 44 | } 45 | } catch (error) { 46 | throw new Error(`Failed to add/update comment on PR #${prNumber}: ${error.message}`); 47 | } 48 | } 49 | 50 | /** 51 | * Parse repository string in format "owner/repo" 52 | * @param {string} repoString - Repository in "owner/repo" format 53 | * @returns {object} {owner, repo} 54 | */ 55 | function parseRepo(repoString) { 56 | const parts = repoString.split('/'); 57 | if (parts.length !== 2) { 58 | throw new Error('Repository must be in format "owner/repo"'); 59 | } 60 | return { owner: parts[0], repo: parts[1] }; 61 | } 62 | 63 | // Main execution when run directly 64 | async function main() { 65 | const args = process.argv.slice(2); 66 | 67 | if (args.length < 2) { 68 | console.log('Usage: node add-pr-comment.js [] [options]'); 69 | console.log('\nOptions:'); 70 | console.log(' --file Read comment body from file'); 71 | console.log(' --marker Custom marker (default: )'); 72 | console.log('\nExamples:'); 73 | console.log(' # Direct comment text'); 74 | console.log(' node add-pr-comment.js TykTechnologies/tyk 123 "## Suggested branches\\n- release-5.8"'); 75 | console.log(''); 76 | console.log(' # From file'); 77 | console.log(' node add-pr-comment.js TykTechnologies/tyk 123 --file /tmp/comment.md'); 78 | console.log(''); 79 | console.log(' # Custom marker'); 80 | console.log(' node add-pr-comment.js TykTechnologies/tyk 123 "Comment" --marker ""'); 81 | process.exit(1); 82 | } 83 | 84 | try { 85 | const { owner, repo } = parseRepo(args[0]); 86 | const prNumber = parseInt(args[1], 10); 87 | 88 | if (isNaN(prNumber) || prNumber < 1) { 89 | throw new Error('PR number must be a positive integer'); 90 | } 91 | 92 | let commentBody = args[2] || ''; 93 | let marker = COMMENT_MARKER; 94 | 95 | // Parse options starting from position 2 (to handle --file in that position) 96 | for (let i = 2; i < args.length; i++) { 97 | if (args[i] === '--file' && args[i + 1]) { 98 | const filePath = args[i + 1]; 99 | if (!fs.existsSync(filePath)) { 100 | throw new Error(`File not found: ${filePath}`); 101 | } 102 | commentBody = fs.readFileSync(filePath, 'utf-8'); 103 | i++; // Skip next arg 104 | } else if (args[i] === '--marker' && args[i + 1]) { 105 | marker = args[i + 1]; 106 | i++; // Skip next arg 107 | } 108 | } 109 | 110 | if (!commentBody || commentBody.trim() === '') { 111 | throw new Error('Comment body cannot be empty. Use --file to read from file.'); 112 | } 113 | 114 | console.log(`Repository: ${owner}/${repo}`); 115 | console.log(`PR Number: ${prNumber}`); 116 | console.log(`Marker: ${marker}`); 117 | console.log(''); 118 | 119 | const result = await addOrUpdateComment(owner, repo, prNumber, commentBody, marker); 120 | 121 | console.log(''); 122 | console.log('Comment URL:', result.html_url); 123 | 124 | } catch (error) { 125 | console.error(`❌ Error: ${error.message}`); 126 | process.exit(1); 127 | } 128 | } 129 | 130 | // Export functions for use in other scripts 131 | export { 132 | addOrUpdateComment, 133 | parseRepo, 134 | COMMENT_MARKER 135 | }; 136 | 137 | // Run main if executed directly 138 | if (import.meta.url === `file://${process.argv[1]}`) { 139 | main(); 140 | } -------------------------------------------------------------------------------- /.github/workflows/sbom.yaml: -------------------------------------------------------------------------------- 1 | name: SBOM - source bill of materials 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | # Remove this as soon as this is removed from release wf. 7 | TF_API_TOKEN: 8 | required: false 9 | DOCKER_IMAGE: 10 | required: false 11 | DEPDASH_URL: 12 | required: true 13 | DEPDASH_KEY: 14 | required: true 15 | ORG_GH_TOKEN: 16 | required: true 17 | 18 | jobs: 19 | changedfiles: 20 | runs-on: ubuntu-latest 21 | # Map a step output to a job output 22 | outputs: 23 | go: ${{ steps.changes.outputs.go }} 24 | npm: ${{ steps.changes.outputs.npm }} 25 | ci: ${{ steps.changes.outputs.ci }} 26 | docker: ${{ steps.changes.outputs.docker }} 27 | github: ${{ steps.changes.outputs.github }} 28 | steps: 29 | # Make sure we have some code to diff. 30 | - name: Checkout repository 31 | uses: actions/checkout@v2 32 | with: 33 | fetch-depth: 50 34 | 35 | - name: Get changed files 36 | id: changes 37 | # Set outputs using the command. 38 | run: | 39 | if [ -n "${{ github.event.pull_request.head.sha }}" ]; then 40 | # If pull request event is available, use the PR commit SHA 41 | COMMIT_SHA="${{ github.event.pull_request.head.sha }}" 42 | else 43 | # If pull request event is not available, use the push commit SHA 44 | COMMIT_SHA="${{ github.sha }}" 45 | fi 46 | 47 | echo $COMMIT_SHA 48 | FILES=$(git diff --name-only --diff-filter=ACMRT ${{ github.event.pull_request.base.sha }} $COMMIT_SHA) 49 | 50 | echo "go=$(echo $FILES | grep go.mod | xargs )" >> $GITHUB_OUTPUT 51 | echo "npm=$(echo $FILES | grep npm | xargs )" >> $GITHUB_OUTPUT 52 | echo "ci=$(echo $FILES | grep ci | xargs )" >> $GITHUB_OUTPUT 53 | echo "docker=$(echo $FILES | grep Dockerfile | xargs )" >> $GITHUB_OUTPUT 54 | echo "github=$(echo $FILES | grep github | xargs )" >> $GITHUB_OUTPUT 55 | 56 | echo $FILES | grep github | xargs 57 | sbom: 58 | name: SBOM 59 | runs-on: ubuntu-latest 60 | permissions: 61 | id-token: write # This is required for requesting the JWT 62 | contents: read # This is required for actions/checkout 63 | needs: changedfiles 64 | if: contains(github.ref, 'release-') || contains(github.ref, 'master') || contains(github.base_ref, 'release-') || contains(github.base_ref, 'master') || needs.changedfiles.outputs.go || needs.changedfiles.outputs.npm || needs.changedfiles.outputs.ci || needs.changedfiles.outputs.docker || needs.changedfiles.outputs.github 65 | steps: 66 | - name: Check out code into the Go module directory 67 | uses: actions/checkout@v3 68 | with: 69 | fetch-depth: 1 70 | token: ${{ secrets.ORG_GH_TOKEN }} 71 | submodules: true 72 | 73 | - name: Configure AWS credentials for use 74 | uses: aws-actions/configure-aws-credentials@v1 75 | env: 76 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 77 | if: env.DOCKER_IMAGE == null 78 | with: 79 | role-to-assume: arn:aws:iam::754489498669:role/ecr_rw_tyk 80 | role-session-name: cipush 81 | aws-region: eu-central-1 82 | 83 | - name: Login to Amazon ECR 84 | env: 85 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 86 | if: env.DOCKER_IMAGE == null 87 | id: login-ecr 88 | uses: aws-actions/amazon-ecr-login@v1 89 | 90 | - name: Generate Source code SBOM 91 | uses: aquasecurity/trivy-action@0.16.1 92 | with: 93 | scan-type: 'fs' 94 | format: 'cyclonedx' 95 | output: 'source.sbom.json' 96 | image-ref: '.' 97 | 98 | - name: Generate Docker SBOM 99 | uses: aquasecurity/trivy-action@0.16.1 100 | env: 101 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 102 | if: env.DOCKER_IMAGE == null 103 | with: 104 | format: 'cyclonedx' 105 | output: 'docker.sbom.json' 106 | image-ref: '${{ steps.login-ecr.outputs.registry }}/${{ github.event.repository.name}}:sha-${{ github.sha }}' 107 | 108 | - name: Generate Docker SBOM 109 | uses: aquasecurity/trivy-action@0.16.1 110 | env: 111 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 112 | if: env.DOCKER_IMAGE 113 | with: 114 | format: 'cyclonedx' 115 | output: 'docker.sbom.json' 116 | image-ref: '${{ secrets.DOCKER_IMAGE }}' 117 | 118 | 119 | - name: Generate Dependencies SBOM 120 | run: | 121 | curl https://raw.githubusercontent.com/TykTechnologies/github-actions/main/sbom/gen_dep.py > gen_dep.py 122 | python3 gen_dep.py service.yaml > deps.sbom.json 123 | 124 | - name: Merge SBOM 125 | run: | 126 | curl https://raw.githubusercontent.com/TykTechnologies/github-actions/main/sbom/merge.py > merge.py 127 | python3 merge.py source.sbom.json docker.sbom.json deps.sbom.json > sbom.json 128 | 129 | - name: Upload SBOM 130 | run: | 131 | if [ -n "$GITHUB_HEAD_REF" ]; then 132 | BRANCH_NAME="${GITHUB_HEAD_REF##*/}" 133 | else 134 | BRANCH_NAME="${GITHUB_REF##*/}" 135 | fi 136 | 137 | curl '${{secrets.DEPDASH_URL}}/api/v1/bom' -X 'POST' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}}' -H "Content-Type: multipart/form-data" -F "autoCreate=true" -F "projectName=${GITHUB_REPOSITORY##*/}" -F "projectVersion=${BRANCH_NAME}" -F "bom=@./sbom.json" -vvv 138 | 139 | # curl -vvv '${{secrets.DEPDASH_URL}}/api/v1/project/lookup?name=${GITHUB_REPOSITORY##*/}&version=${BRANCH_NAME}' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}} 140 | # PROJECT_UUID=$(curl --silent '${{secrets.DEPDASH_URL}}/api/v1/project/lookup?name=${GITHUB_REPOSITORY##*/}&version=${BRANCH_NAME}' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}}' | jq -r .uuid) 141 | -------------------------------------------------------------------------------- /.github/workflows/sbom-dev.yaml: -------------------------------------------------------------------------------- 1 | name: SBOM - source bill of materials (dev) 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | # Remove this as soon as this is removed from release wf. 7 | TF_API_TOKEN: 8 | required: false 9 | DOCKER_IMAGE: 10 | required: false 11 | DEPDASH_URL: 12 | required: true 13 | DEPDASH_KEY: 14 | required: true 15 | ORG_GH_TOKEN: 16 | required: true 17 | 18 | jobs: 19 | changedfiles: 20 | runs-on: ubuntu-latest 21 | # Map a step output to a job output 22 | outputs: 23 | go: ${{ steps.changes.outputs.go }} 24 | npm: ${{ steps.changes.outputs.npm }} 25 | ci: ${{ steps.changes.outputs.ci }} 26 | docker: ${{ steps.changes.outputs.docker }} 27 | github: ${{ steps.changes.outputs.github }} 28 | steps: 29 | # Make sure we have some code to diff. 30 | - name: Checkout repository 31 | uses: actions/checkout@v2 32 | with: 33 | fetch-depth: 50 34 | 35 | - name: Get changed files 36 | id: changes 37 | # Set outputs using the command. 38 | run: | 39 | if [ -n "${{ github.event.pull_request.head.sha }}" ]; then 40 | # If pull request event is available, use the PR commit SHA 41 | COMMIT_SHA="${{ github.event.pull_request.head.sha }}" 42 | else 43 | # If pull request event is not available, use the push commit SHA 44 | COMMIT_SHA="${{ github.sha }}" 45 | fi 46 | 47 | echo $COMMIT_SHA 48 | FILES=$(git diff --name-only --diff-filter=ACMRT ${{ github.event.pull_request.base.sha }} $COMMIT_SHA) 49 | 50 | echo "go=$(echo $FILES | grep go.mod | xargs )" >> $GITHUB_OUTPUT 51 | echo "npm=$(echo $FILES | grep npm | xargs )" >> $GITHUB_OUTPUT 52 | echo "ci=$(echo $FILES | grep ci | xargs )" >> $GITHUB_OUTPUT 53 | echo "docker=$(echo $FILES | grep Dockerfile | xargs )" >> $GITHUB_OUTPUT 54 | echo "github=$(echo $FILES | grep github | xargs )" >> $GITHUB_OUTPUT 55 | 56 | echo $FILES | grep github | xargs 57 | sbom: 58 | name: SBOM 59 | runs-on: ubuntu-latest 60 | permissions: 61 | id-token: write # This is required for requesting the JWT 62 | contents: read # This is required for actions/checkout 63 | needs: changedfiles 64 | if: contains(github.ref, 'release-') || contains(github.ref, 'master') || contains(github.base_ref, 'release-') || contains(github.base_ref, 'master') || needs.changedfiles.outputs.go || needs.changedfiles.outputs.npm || needs.changedfiles.outputs.ci || needs.changedfiles.outputs.docker || needs.changedfiles.outputs.github 65 | steps: 66 | - name: Check out code into the Go module directory 67 | uses: actions/checkout@v3 68 | with: 69 | fetch-depth: 1 70 | token: ${{ secrets.ORG_GH_TOKEN }} 71 | submodules: true 72 | 73 | - name: Configure AWS credentials for use 74 | uses: aws-actions/configure-aws-credentials@v1 75 | env: 76 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 77 | if: env.DOCKER_IMAGE == null 78 | with: 79 | role-to-assume: arn:aws:iam::754489498669:role/ecr_rw_tyk 80 | role-session-name: cipush 81 | aws-region: eu-central-1 82 | 83 | - name: Login to Amazon ECR 84 | env: 85 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 86 | if: env.DOCKER_IMAGE == null 87 | id: login-ecr 88 | uses: aws-actions/amazon-ecr-login@v1 89 | 90 | - name: Generate Source code SBOM 91 | uses: aquasecurity/trivy-action@0.20.0 92 | with: 93 | scan-type: 'fs' 94 | format: 'cyclonedx' 95 | output: 'source.sbom.json' 96 | image-ref: '.' 97 | 98 | - name: Generate Docker SBOM 99 | uses: aquasecurity/trivy-action@0.20.0 100 | env: 101 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 102 | if: env.DOCKER_IMAGE == null 103 | with: 104 | format: 'cyclonedx' 105 | output: 'docker.sbom.json' 106 | image-ref: '${{ steps.login-ecr.outputs.registry }}/${{ github.event.repository.name}}:sha-${{ github.sha }}' 107 | 108 | - name: Generate Docker SBOM 109 | uses: aquasecurity/trivy-action@0.20.0 110 | env: 111 | DOCKER_IMAGE: ${{ secrets.DOCKER_IMAGE }} 112 | if: env.DOCKER_IMAGE 113 | with: 114 | format: 'cyclonedx' 115 | output: 'docker.sbom.json' 116 | image-ref: '${{ secrets.DOCKER_IMAGE }}' 117 | 118 | 119 | - name: Generate Dependencies SBOM 120 | run: | 121 | curl https://raw.githubusercontent.com/TykTechnologies/github-actions/main/sbom/gen_dep.py > gen_dep.py 122 | python3 gen_dep.py service.yaml > deps.sbom.json 123 | 124 | - name: Merge SBOM 125 | run: | 126 | curl https://raw.githubusercontent.com/TykTechnologies/github-actions/main/sbom/merge.py > merge.py 127 | python3 merge.py source.sbom.json docker.sbom.json deps.sbom.json > sbom.json 128 | 129 | - name: Upload SBOM 130 | run: | 131 | if [ -n "$GITHUB_HEAD_REF" ]; then 132 | BRANCH_NAME="${GITHUB_HEAD_REF##*/}" 133 | else 134 | BRANCH_NAME="${GITHUB_REF##*/}" 135 | fi 136 | 137 | curl '${{secrets.DEPDASH_URL}}/api/v1/bom' -X 'POST' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}}' -H "Content-Type: multipart/form-data" -F "autoCreate=true" -F "projectName=${GITHUB_REPOSITORY##*/}" -F "projectVersion=${BRANCH_NAME}" -F "bom=@./sbom.json" -vvv 138 | 139 | # curl -vvv '${{secrets.DEPDASH_URL}}/api/v1/project/lookup?name=${GITHUB_REPOSITORY##*/}&version=${BRANCH_NAME}' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}} 140 | # PROJECT_UUID=$(curl --silent '${{secrets.DEPDASH_URL}}/api/v1/project/lookup?name=${GITHUB_REPOSITORY##*/}&version=${BRANCH_NAME}' -H 'X-API-Key: ${{secrets.DEPDASH_KEY}}' | jq -r .uuid) 141 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/jira/get-fixedversion.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import dotenv from 'dotenv'; 3 | import { jiraAPI, getIssue } from './jira-api.js'; 4 | 5 | // Only load .env if JIRA_TOKEN is not already set (to avoid log output in CI) 6 | // Silence dotenv v17+ logging 7 | if (!process.env.JIRA_TOKEN) { 8 | process.env.DOTENV_LOG_LEVEL = 'error'; 9 | dotenv.config(); 10 | } 11 | 12 | /** 13 | * Extract JIRA ticket key from text (e.g., PR title, branch name) 14 | * @param {string} text - Text to search 15 | * @returns {string|null} First JIRA ticket key found, or null 16 | */ 17 | function extractJiraTicket(text) { 18 | if (!text) return null; 19 | 20 | // Match pattern: 2+ uppercase letters, dash, 1+ digits 21 | // Works in titles: "TT-12345: Fix bug" 22 | // Works in branches: "feature/TT-12345-fix-bug" 23 | const match = text.match(/\b([A-Z]{2,})-(\d+)\b/); 24 | return match ? match[0] : null; 25 | } 26 | 27 | 28 | /** 29 | * Detect which component repositories a version applies to based on prefix 30 | * @param {string} versionString - Version string to detect component from 31 | * @returns {Array} Array of repository names this version applies to 32 | */ 33 | function detectComponent(versionString) { 34 | if (!versionString) return []; 35 | 36 | const normalized = versionString.trim(); 37 | 38 | // Check for TIB (Tyk Identity Broker) 39 | if (/^TIB\s+/i.test(normalized)) { 40 | return ['tyk-identity-broker']; 41 | } 42 | 43 | // Check for Pump (Tyk Pump) 44 | if (/^(Tyk\s+)?Pump\s+/i.test(normalized)) { 45 | return ['tyk-pump']; 46 | } 47 | 48 | // Check for MDCB 49 | if (/^MDCB\s+/i.test(normalized)) { 50 | return ['tyk-sink']; 51 | } 52 | 53 | // Check for Tyk or Tyk Gateway (shared release cadence) 54 | if (/^Tyk(\s+Gateway)?\s+/i.test(normalized)) { 55 | return ['tyk', 'tyk-analytics', 'tyk-analytics-ui']; 56 | } 57 | 58 | // Unknown prefix - return empty array (no filtering) 59 | return []; 60 | } 61 | 62 | /** 63 | * Parse a version string into semantic version components 64 | * @param {string} versionString - Version string to parse 65 | * @returns {object|null} Object with {major, minor, patch, original, component} or null if invalid 66 | */ 67 | function parseVersion(versionString) { 68 | if (!versionString) return null; 69 | 70 | // Detect component before removing prefixes 71 | const component = detectComponent(versionString); 72 | 73 | // Remove common prefixes: "v5.8.1" → "5.8.1", "Tyk 5.8.1" → "5.8.1", "TIB 1.7.0" → "1.7.0" 74 | const cleaned = versionString 75 | .replace(/^v/i, '') 76 | .replace(/^Tyk(\s+Gateway)?\s+/i, '') 77 | .replace(/^TIB\s+/i, '') 78 | .replace(/^(Tyk\s+)?Pump\s+/i, '') 79 | .replace(/^MDCB\s+/i, '') 80 | .trim(); 81 | 82 | // Match semantic version: X.Y.Z or X.Y or X 83 | const match = cleaned.match(/^(\d+)(?:\.(\d+))?(?:\.(\d+))?/); 84 | 85 | if (!match) return null; 86 | 87 | return { 88 | major: parseInt(match[1], 10), 89 | minor: match[2] ? parseInt(match[2], 10) : null, 90 | patch: match[3] ? parseInt(match[3], 10) : null, 91 | original: versionString, 92 | component: component 93 | }; 94 | } 95 | 96 | /** 97 | * Get fix versions from a JIRA ticket 98 | * @param {string} ticketKey - JIRA ticket key 99 | * @returns {Promise} Object with ticket info and fix versions 100 | */ 101 | async function getFixVersions(ticketKey) { 102 | try { 103 | // Fetch ticket with all fields 104 | const ticket = await getIssue(ticketKey); 105 | 106 | const fixVersions = ticket.fields.fixVersions || []; 107 | 108 | return { 109 | ticket: ticketKey, 110 | summary: ticket.fields.summary, 111 | priority: ticket.fields.priority?.name || 'Unknown', 112 | issueType: ticket.fields.issuetype?.name || 'Unknown', 113 | fixVersions: fixVersions.map(v => ({ 114 | name: v.name, 115 | id: v.id, 116 | released: v.released || false, 117 | parsed: parseVersion(v.name) 118 | })) 119 | }; 120 | } catch (error) { 121 | throw new Error(`Failed to fetch JIRA ticket ${ticketKey}: ${error.message}`); 122 | } 123 | } 124 | 125 | async function main() { 126 | const args = process.argv.slice(2); 127 | 128 | 129 | if (args.length < 1) { 130 | console.log('Usage: node get-fixversion.js []'); 131 | console.log('\nExamples:'); 132 | console.log(' # From PR title only'); 133 | console.log(' node get-fixversion.js "TT-12345: Fix authentication bug"'); 134 | console.log(''); 135 | console.log(' # From PR title and branch name (branch name takes precedence)'); 136 | console.log(' node get-fixversion.js "TT-12345: Fix bug" "feature/TT-67890-fix-auth"'); 137 | console.log(''); 138 | console.log(' # Direct ticket key'); 139 | console.log(' node get-fixversion.js TT-12345'); 140 | console.log('\nOutput: JSON object with ticket info and fix versions'); 141 | console.log('\nExit codes:'); 142 | console.log(' 0 - Success (fix versions found)'); 143 | console.log(' 1 - Error (ticket found but no fix versions set)'); 144 | console.log(' 2 - No JIRA ticket found'); 145 | process.exit(1); 146 | } 147 | 148 | const prTitle = args[0]; 149 | const branchName = args[1]; // Optional 150 | 151 | let ticketKey = null; 152 | 153 | // Priority 1: Try to extract from branch name (if provided) 154 | if (branchName) { 155 | ticketKey = extractJiraTicket(branchName); 156 | } 157 | 158 | // Priority 2: Try to extract from PR title (if not found in branch name) 159 | if (!ticketKey && prTitle) { 160 | // Check if prTitle is already a valid ticket key format 161 | if (prTitle.match(/^[A-Z]{2,}-\d+$/)) { 162 | ticketKey = prTitle; 163 | } else { 164 | ticketKey = extractJiraTicket(prTitle); 165 | } 166 | } 167 | 168 | if (!ticketKey) { 169 | console.error(JSON.stringify({ 170 | error: 'No JIRA ticket found in PR title or branch name', 171 | prTitle: prTitle, 172 | branchName: branchName || 'not provided' 173 | })); 174 | process.exit(2); 175 | } 176 | 177 | try { 178 | const result = await getFixVersions(ticketKey); 179 | 180 | // Check if no fix versions found (acceptance criteria: fail if missing) 181 | if (result.fixVersions.length === 0) { 182 | console.error(JSON.stringify({ 183 | error: 'No fix versions found in JIRA ticket', 184 | ticket: ticketKey, 185 | summary: result.summary, 186 | priority: result.priority, 187 | issueType: result.issueType 188 | })); 189 | process.exit(1); 190 | } 191 | 192 | console.log(JSON.stringify(result, null, 2)); 193 | 194 | } catch (error) { 195 | console.error(JSON.stringify({ 196 | error: error.message, 197 | ticket: ticketKey 198 | })); 199 | process.exit(1); 200 | } 201 | 202 | } 203 | 204 | // Export functions for use in other scripts 205 | export { 206 | extractJiraTicket, 207 | getFixVersions, 208 | parseVersion, 209 | detectComponent 210 | }; 211 | 212 | 213 | // Run main if executed directly 214 | if (import.meta.url === `file://${process.argv[1]}`) { 215 | main(); 216 | } 217 | 218 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/jira/jira-api.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import dotenv from 'dotenv'; 3 | import { URL } from 'url'; 4 | import readline from 'readline'; 5 | 6 | // Only load .env if JIRA_TOKEN is not already set (to avoid log output in CI) 7 | if (!process.env.JIRA_TOKEN) { 8 | dotenv.config(); 9 | } 10 | 11 | // JIRA configuration 12 | const JIRA_BASE_URL = 'https://tyktech.atlassian.net'; 13 | const JIRA_TOKEN = process.env.JIRA_TOKEN; // Pre-encoded base64(email:api_token) 14 | 15 | // Debug logging (without exposing sensitive data) 16 | console.error('DEBUG: Environment check:'); 17 | console.error(` JIRA_TOKEN: ${JIRA_TOKEN ? 'SET' : 'EMPTY'}`); 18 | console.error(` All JIRA env vars: ${Object.keys(process.env).filter(k => k.includes('JIRA')).join(', ')}`); 19 | 20 | // Extract JQL from URL or use directly 21 | function extractJQL(input) { 22 | // Check if input is a URL 23 | if (input.includes('atlassian.net') || input.includes('jql=')) { 24 | try { 25 | const url = new URL(input); 26 | const jql = url.searchParams.get('jql'); 27 | if (jql) { 28 | console.log('Extracted JQL from URL:', jql); 29 | return jql; 30 | } 31 | } catch (e) { 32 | // Not a valid URL, might be direct JQL 33 | } 34 | } 35 | return input; 36 | } 37 | 38 | // Make JIRA API request 39 | async function jiraAPI(endpoint, options = {}) { 40 | if (!JIRA_TOKEN) { 41 | throw new Error('JIRA_TOKEN must be set in .env file (pre-encoded base64(email:api_token))'); 42 | } 43 | 44 | // Token is already base64 encoded, use directly 45 | const auth = JIRA_TOKEN; 46 | 47 | const response = await fetch(`${JIRA_BASE_URL}/rest/api/3${endpoint}`, { 48 | ...options, 49 | headers: { 50 | 'Authorization': `Basic ${auth}`, 51 | 'Accept': 'application/json', 52 | 'Content-Type': 'application/json', 53 | ...options.headers 54 | } 55 | }); 56 | 57 | if (!response.ok) { 58 | const error = await response.text(); 59 | throw new Error(`JIRA API Error (${response.status}): ${error}`); 60 | } 61 | 62 | return response.json(); 63 | } 64 | 65 | // Search for issues using JQL 66 | async function searchIssues(jql, startAt = 0, maxResults = 50) { 67 | // Use GET request with query parameters for v3 API 68 | const params = new URLSearchParams({ 69 | jql: jql, 70 | startAt: startAt.toString(), 71 | maxResults: maxResults.toString(), 72 | fields: 'key,summary,status,issuetype,priority,created,assignee,reporter,customfield_10116,customfield_10117,customfield_10118,labels,components' 73 | }); 74 | 75 | return jiraAPI(`/search/jql?${params}`, { 76 | method: 'GET' 77 | }); 78 | } 79 | 80 | // Get issue details 81 | async function getIssue(issueKey) { 82 | return jiraAPI(`/issue/${issueKey}`); 83 | } 84 | 85 | // Format issue for display 86 | function formatIssue(issue, index) { 87 | const lines = []; 88 | lines.push(`${index + 1}. [${issue.key}] ${issue.fields?.summary || 'No summary'}`); 89 | lines.push(` Status: ${issue.fields?.status?.name || 'Unknown'}`); 90 | lines.push(` Type: ${issue.fields?.issuetype?.name || 'Unknown'}`); 91 | lines.push(` Priority: ${issue.fields?.priority?.name || 'None'}`); 92 | lines.push(` Created: ${issue.fields?.created ? new Date(issue.fields.created).toLocaleDateString() : 'Unknown'}`); 93 | 94 | if (issue.fields?.assignee) { 95 | lines.push(` Assignee: ${issue.fields.assignee.displayName}`); 96 | } 97 | if (issue.fields?.reporter) { 98 | lines.push(` Reporter: ${issue.fields.reporter.displayName}`); 99 | // Check if created via Zendesk integration 100 | if (issue.fields.reporter.displayName === 'Zendesk Support for Jira') { 101 | lines.push(` Source: Zendesk (check Zendesk Support tab in JIRA)`); 102 | } 103 | } 104 | 105 | // Common custom fields for "Customers Impacted" 106 | const customFields = [ 107 | issue.fields?.customfield_10116, 108 | issue.fields?.customfield_10117, 109 | issue.fields?.customfield_10118 110 | ].filter(Boolean); 111 | 112 | if (customFields.length > 0) { 113 | lines.push(` Customers Impacted: Yes`); 114 | } 115 | 116 | if (issue.fields?.labels && issue.fields.labels.length > 0) { 117 | lines.push(` Labels: ${issue.fields.labels.join(', ')}`); 118 | } 119 | 120 | if (issue.fields?.components && issue.fields.components.length > 0) { 121 | lines.push(` Components: ${issue.fields.components.map(c => c.name).join(', ')}`); 122 | } 123 | 124 | lines.push(` Link: ${JIRA_BASE_URL}/browse/${issue.key}`); 125 | 126 | return lines.join('\n'); 127 | } 128 | 129 | async function main() { 130 | const args = process.argv.slice(2); 131 | 132 | if (args.length === 0) { 133 | console.log('Usage: node jira-api.js ""'); 134 | console.log('\nExamples:'); 135 | console.log(' node jira-api.js "project = TT AND status != closed"'); 136 | console.log(' node jira-api.js "https://tyktech.atlassian.net/jira/software/c/projects/TT/issues/?jql=..."'); 137 | console.log('\nMake sure to set in .env:'); 138 | console.log(' JIRA_TOKEN='); 139 | console.log('\nNote: JIRA_TOKEN should be pre-encoded as base64(email:api_token)'); 140 | process.exit(1); 141 | } 142 | 143 | const input = args.join(' '); 144 | const jql = extractJQL(input); 145 | 146 | console.log('\n🔍 JIRA Issue Search (Direct API)'); 147 | console.log('=' .repeat(80)); 148 | console.log('JQL Query:', jql); 149 | console.log('=' .repeat(80)); 150 | 151 | try { 152 | let allIssues = []; 153 | let startAt = 0; 154 | const pageSize = 50; 155 | let total = 0; 156 | 157 | // Fetch first page 158 | console.log(`\nFetching issues...`); 159 | const firstPage = await searchIssues(jql, startAt, pageSize); 160 | 161 | total = firstPage.total || 0; 162 | allIssues = firstPage.issues || []; 163 | 164 | console.log(`\n📊 Total issues found: ${total}`); 165 | 166 | // Handle pagination 167 | if (total > pageSize) { 168 | const rl = readline.createInterface({ 169 | input: process.stdin, 170 | output: process.stdout 171 | }); 172 | 173 | const answer = await new Promise(resolve => { 174 | rl.question(`\nShowing first ${pageSize} issues. Fetch all ${total} issues? (y/n) `, (answer) => { 175 | rl.close(); 176 | resolve(answer.toLowerCase()); 177 | }); 178 | }); 179 | 180 | if (answer === 'y' || answer === 'yes') { 181 | // Fetch remaining pages 182 | startAt = pageSize; 183 | while (startAt < total) { 184 | process.stdout.write(`\rFetching issues ${startAt + 1} to ${Math.min(startAt + pageSize, total)}...`); 185 | const page = await searchIssues(jql, startAt, pageSize); 186 | if (page.issues) { 187 | allIssues = allIssues.concat(page.issues); 188 | } 189 | startAt += pageSize; 190 | } 191 | console.log(' Done!'); 192 | } 193 | } 194 | 195 | // Display results 196 | console.log('\n📋 Issues:\n'); 197 | allIssues.forEach((issue, index) => { 198 | console.log(formatIssue(issue, index)); 199 | console.log(); 200 | }); 201 | 202 | console.log(`✅ Displayed ${allIssues.length} of ${total} total issues`); 203 | 204 | // Export option 205 | if (allIssues.length > 0) { 206 | const rl = readline.createInterface({ 207 | input: process.stdin, 208 | output: process.stdout 209 | }); 210 | 211 | const answer = await new Promise(resolve => { 212 | rl.question('\nExport to CSV? (y/n) ', (answer) => { 213 | rl.close(); 214 | resolve(answer.toLowerCase()); 215 | }); 216 | }); 217 | 218 | if (answer === 'y' || answer === 'yes') { 219 | const csv = exportToCSV(allIssues); 220 | const filename = `jira-export-${Date.now()}.csv`; 221 | await import('fs').then(fs => fs.promises.writeFile(filename, csv)); 222 | console.log(`\n📁 Exported to ${filename}`); 223 | } 224 | } 225 | 226 | } catch (error) { 227 | console.error('\n❌ Error:', error.message); 228 | console.error('\nMake sure you have set JIRA_TOKEN in your .env file (pre-encoded as base64(email:api_token))'); 229 | process.exit(1); 230 | } 231 | } 232 | 233 | // Export issues to CSV 234 | function exportToCSV(issues) { 235 | const headers = ['Key', 'Summary', 'Status', 'Type', 'Priority', 'Created', 'Assignee', 'Reporter', 'Link']; 236 | const rows = [headers.join(',')]; 237 | 238 | for (const issue of issues) { 239 | const row = [ 240 | issue.key, 241 | `"${issue.fields.summary.replace(/"/g, '""')}"`, 242 | issue.fields.status.name, 243 | issue.fields.issuetype.name, 244 | issue.fields.priority?.name || 'None', 245 | new Date(issue.fields.created).toLocaleDateString(), 246 | issue.fields.assignee?.displayName || '', 247 | issue.fields.reporter?.displayName || '', 248 | `${JIRA_BASE_URL}/browse/${issue.key}` 249 | ]; 250 | rows.push(row.join(',')); 251 | } 252 | 253 | return rows.join('\n'); 254 | } 255 | 256 | // Export functions for use in other scripts 257 | export { 258 | jiraAPI, 259 | extractJQL, 260 | searchIssues, 261 | getIssue, 262 | formatIssue, 263 | exportToCSV 264 | }; 265 | 266 | // Run main if executed directly 267 | if (import.meta.url === `file://${process.argv[1]}`) { 268 | main().catch(console.error); 269 | } -------------------------------------------------------------------------------- /branch-suggestion/scripts/common/__tests__/match-branches.test.js: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from 'vitest'; 2 | import { 3 | generateBranchCandidates, 4 | filterFixVersionsByRepository, 5 | matchBranches, 6 | getBranchReason, 7 | getBranchPriority 8 | } from '../match-branches.js'; 9 | 10 | describe('generateBranchCandidates', () => { 11 | it('should generate candidates for patch release', () => { 12 | const parsed = { major: 5, minor: 8, patch: 1 }; 13 | const candidates = generateBranchCandidates(parsed); 14 | expect(candidates).toEqual(['release-5.8.1', 'release-5.8', 'release-5', 'master']); 15 | }); 16 | 17 | it('should generate candidates for minor release', () => { 18 | const parsed = { major: 5, minor: 8, patch: 0 }; 19 | const candidates = generateBranchCandidates(parsed); 20 | expect(candidates).toEqual(['release-5.8.0', 'release-5.8', 'release-5', 'master']); 21 | }); 22 | 23 | it('should generate candidates for major release', () => { 24 | const parsed = { major: 5, minor: null, patch: null }; 25 | const candidates = generateBranchCandidates(parsed); 26 | expect(candidates).toEqual(['release-5', 'master']); 27 | }); 28 | 29 | it('should return only master for null input', () => { 30 | const candidates = generateBranchCandidates(null); 31 | expect(candidates).toEqual(['master']); 32 | }); 33 | 34 | it('should remove duplicates', () => { 35 | const parsed = { major: 5, minor: 0, patch: 0 }; 36 | const candidates = generateBranchCandidates(parsed); 37 | // Should not have duplicate 'release-5' 38 | expect(candidates.filter(c => c === 'release-5').length).toBe(1); 39 | }); 40 | }); 41 | 42 | describe('filterFixVersionsByRepository', () => { 43 | it('should filter versions by component - TIB', () => { 44 | const fixVersions = [ 45 | { name: 'TIB 1.7.0', parsed: { component: ['tyk-identity-broker'] } }, 46 | { name: 'Tyk 5.8.1', parsed: { component: ['tyk', 'tyk-analytics'] } }, 47 | { name: '1.0.0', parsed: { component: [] } } 48 | ]; 49 | 50 | const filtered = filterFixVersionsByRepository(fixVersions, 'TykTechnologies/tyk-identity-broker'); 51 | expect(filtered).toHaveLength(2); // TIB version + no-component version 52 | expect(filtered[0].name).toBe('TIB 1.7.0'); 53 | expect(filtered[1].name).toBe('1.0.0'); 54 | }); 55 | 56 | it('should filter versions by component - Tyk', () => { 57 | const fixVersions = [ 58 | { name: 'TIB 1.7.0', parsed: { component: ['tyk-identity-broker'] } }, 59 | { name: 'Tyk 5.8.1', parsed: { component: ['tyk', 'tyk-analytics'] } }, 60 | ]; 61 | 62 | const filtered = filterFixVersionsByRepository(fixVersions, 'TykTechnologies/tyk'); 63 | expect(filtered).toHaveLength(1); 64 | expect(filtered[0].name).toBe('Tyk 5.8.1'); 65 | }); 66 | 67 | it('should include all versions with empty component', () => { 68 | const fixVersions = [ 69 | { name: '1.0.0', parsed: { component: [] } }, 70 | { name: '2.0.0', parsed: { component: [] } } 71 | ]; 72 | 73 | const filtered = filterFixVersionsByRepository(fixVersions, 'TykTechnologies/any-repo'); 74 | expect(filtered).toHaveLength(2); 75 | }); 76 | 77 | it('should return all versions when no repository specified', () => { 78 | const fixVersions = [ 79 | { name: 'TIB 1.7.0', parsed: { component: ['tyk-identity-broker'] } }, 80 | { name: 'Tyk 5.8.1', parsed: { component: ['tyk'] } } 81 | ]; 82 | 83 | const filtered = filterFixVersionsByRepository(fixVersions, null); 84 | expect(filtered).toHaveLength(2); 85 | }); 86 | }); 87 | 88 | describe('getBranchPriority', () => { 89 | it('should return required for master', () => { 90 | const priority = getBranchPriority('master', { parsed: { major: 5, minor: 8, patch: 1 } }); 91 | expect(priority).toBe('required'); 92 | }); 93 | 94 | it('should return required for exact patch version branch', () => { 95 | const priority = getBranchPriority('release-5.8.1', { parsed: { major: 5, minor: 8, patch: 1 } }); 96 | expect(priority).toBe('required'); 97 | }); 98 | 99 | it('should return required for patch release branch', () => { 100 | const priority = getBranchPriority('release-5.8', { parsed: { major: 5, minor: 8, patch: 1 } }); 101 | expect(priority).toBe('required'); 102 | }); 103 | 104 | it('should return recommended for minor release branch', () => { 105 | const priority = getBranchPriority('release-5.8', { parsed: { major: 5, minor: 8, patch: 0 } }); 106 | expect(priority).toBe('recommended'); 107 | }); 108 | 109 | it('should return recommended for major release branch', () => { 110 | const priority = getBranchPriority('release-5', { parsed: { major: 5, minor: 8, patch: 1 } }); 111 | expect(priority).toBe('recommended'); 112 | }); 113 | }); 114 | 115 | describe('getBranchReason', () => { 116 | it('should return reason for master branch', () => { 117 | const reason = getBranchReason('master', { parsed: { major: 5, minor: 8, patch: 1 } }); 118 | expect(reason).toContain('Main development branch'); 119 | }); 120 | 121 | it('should return reason for exact patch version branch', () => { 122 | const reason = getBranchReason('release-5.8.1', { 123 | name: '5.8.1', 124 | parsed: { major: 5, minor: 8, patch: 1 } 125 | }); 126 | expect(reason).toContain('Exact version branch'); 127 | expect(reason).toContain('5.8.1'); 128 | }); 129 | 130 | it('should return reason for patch release', () => { 131 | const reason = getBranchReason('release-5.8', { 132 | name: '5.8.1', 133 | parsed: { major: 5, minor: 8, patch: 1 } 134 | }); 135 | expect(reason).toContain('required for creating 5.8.1'); 136 | }); 137 | 138 | it('should return reason for minor release', () => { 139 | const reason = getBranchReason('release-5.8', { 140 | name: '5.8.0', 141 | parsed: { major: 5, minor: 8, patch: 0 } 142 | }); 143 | expect(reason).toContain('5.8.x releases'); 144 | }); 145 | }); 146 | 147 | describe('matchBranches', () => { 148 | it('should match fix versions to branches', () => { 149 | const fixVersions = [ 150 | { 151 | name: '5.8.1', 152 | parsed: { major: 5, minor: 8, patch: 1, component: [] } 153 | } 154 | ]; 155 | const repoBranches = [ 156 | { name: 'master' }, 157 | { name: 'release-5.8' }, 158 | { name: 'release-5' } 159 | ]; 160 | 161 | const results = matchBranches(fixVersions, repoBranches); 162 | expect(results).toHaveLength(1); 163 | expect(results[0].branches).toHaveLength(3); 164 | expect(results[0].branches[0].branch).toBe('release-5.8'); 165 | expect(results[0].branches[0].priority).toBe('required'); 166 | }); 167 | 168 | it('should return warning when no release branches found', () => { 169 | const fixVersions = [ 170 | { name: '5.8.1', parsed: { major: 5, minor: 8, patch: 1, component: [] } } 171 | ]; 172 | const repoBranches = [ 173 | { name: 'master' } 174 | ]; 175 | 176 | const results = matchBranches(fixVersions, repoBranches); 177 | expect(results[0].warning).toBeDefined(); 178 | expect(results[0].warning).toContain('Expected release branches not found'); 179 | }); 180 | 181 | it('should handle multiple fix versions', () => { 182 | const fixVersions = [ 183 | { name: '5.8.1', parsed: { major: 5, minor: 8, patch: 1, component: [] } }, 184 | { name: '5.9.0', parsed: { major: 5, minor: 9, patch: 0, component: [] } } 185 | ]; 186 | const repoBranches = [ 187 | { name: 'master' }, 188 | { name: 'release-5.8' }, 189 | { name: 'release-5.9' } 190 | ]; 191 | 192 | const results = matchBranches(fixVersions, repoBranches); 193 | expect(results).toHaveLength(2); 194 | }); 195 | 196 | it('should only match existing branches', () => { 197 | const fixVersions = [ 198 | { name: '5.8.1', parsed: { major: 5, minor: 8, patch: 1, component: [] } } 199 | ]; 200 | const repoBranches = [ 201 | { name: 'master' }, 202 | { name: 'release-5.7' } // Different version 203 | ]; 204 | 205 | const results = matchBranches(fixVersions, repoBranches); 206 | expect(results[0].branches).toHaveLength(1); 207 | expect(results[0].branches[0].branch).toBe('master'); 208 | }); 209 | 210 | it('should prefer exact version match when available', () => { 211 | const fixVersions = [ 212 | { name: '5.10.1', parsed: { major: 5, minor: 10, patch: 1, component: [] } } 213 | ]; 214 | const repoBranches = [ 215 | { name: 'master' }, 216 | { name: 'release-5.10' }, 217 | { name: 'release-5.10.1' } 218 | ]; 219 | 220 | const results = matchBranches(fixVersions, repoBranches); 221 | expect(results[0].branches).toHaveLength(3); 222 | expect(results[0].branches[0].branch).toBe('release-5.10.1'); 223 | expect(results[0].branches[0].priority).toBe('required'); 224 | expect(results[0].branches[1].branch).toBe('release-5.10'); 225 | expect(results[0].branches[1].priority).toBe('required'); 226 | expect(results[0].branches[2].branch).toBe('master'); 227 | }); 228 | 229 | it('should fallback to minor version when exact match not available', () => { 230 | const fixVersions = [ 231 | { name: '5.10.1', parsed: { major: 5, minor: 10, patch: 1, component: [] } } 232 | ]; 233 | const repoBranches = [ 234 | { name: 'master' }, 235 | { name: 'release-5.10' } 236 | ]; 237 | 238 | const results = matchBranches(fixVersions, repoBranches); 239 | expect(results[0].branches).toHaveLength(2); 240 | expect(results[0].branches[0].branch).toBe('release-5.10'); 241 | expect(results[0].branches[0].priority).toBe('required'); 242 | }); 243 | }); 244 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Re-usable github actions 2 | 3 | Collection of shared github actions and workflows which are used in our org. 4 | 5 | # Composite actions 6 | 7 | ## PR Checkout 8 | 9 | The checkout PR action will fetch only the commits that belong to the PR. 10 | This is required for various code analysis tooling, including sonarcloud. 11 | 12 | Example usage: 13 | 14 | ```yaml 15 | jobs: 16 | golangci-lint: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Checkout PR 20 | uses: TykTechnologies/github-actions/.github/actions/checkout-pr@main 21 | ``` 22 | 23 | The main use case behind this is to make sure the HEAD and the current PR 24 | state can be compared, and that we don't fetch the full git history for 25 | the checkout. This supports some of our custom actions like `godoc`. 26 | 27 | Supports: godoc, sonarcloud, dashboard (bindata size). 28 | 29 | Adoption: gateway, dashboard, reuse in shared CI workflows. 30 | 31 | Source: [/.github/actions/checkout-pr/action.yml](/.github/actions/checkout-pr/action.yml) 32 | 33 | ## Github to slack 34 | 35 | Maps github email with slack user, based on a key value map. Maps needs to be mantained manually. 36 | 37 | Source: [/.github/actions/github-to-slack/action.yaml](/.github/actions/github-to-slack/action.yaml) 38 | 39 | ## Calculate tests tags 40 | 41 | Calculates corresponding CI image tags based on github events for a group of tyk repositories 42 | 43 | Source: [/.github/actions/latest-versions/action.yaml](/.github/actions/latest-versions/action.yaml) 44 | 45 | # Reusable workflows 46 | 47 | ## CI tooling 48 | 49 | We build a docker image from the CI pipeline in this repository that 50 | builds and installs all the CI tooling needed for the test pipelines. 51 | 52 | Providing the docker image avoids continous compilation of the tools from 53 | using `go install` or `go get`, decreasing resource usage on GitHub 54 | actions. 55 | 56 | All the tools are built using a recent go version and `CGO_ENABLED=0`, 57 | enabling reuse for old releases. It's still possible to version the 58 | tooling against releases either inside the image, or by creating new 59 | versions of the docker image in the future. 60 | 61 | The images built are: 62 | 63 | - `tykio/ci-tools:latest`. 64 | 65 | The image is rebuilt weekly and on triggers from `exp/cmd`. 66 | 67 | To use the CI tools from any github pipeline: 68 | 69 | ```yaml 70 | - name: 'Extract tykio/ci-tools:${{ matrix.tag }}' 71 | uses: shrink/actions-docker-extract@v3 72 | with: 73 | image: tykio/ci-tools:${{ matrix.tag }} 74 | path: /usr/local/bin/. 75 | destination: /usr/local/bin 76 | 77 | - run: gotestsum --version 78 | ``` 79 | 80 | The action 81 | [shrink/actions-docker-extract](https://github.com/shrink/actions-docker-extract) 82 | is used to download and extract the CI tools binaries into your CI 83 | workflow. The set of tools being provided can be adjusted in 84 | [docker/tools/latest/Dockerfile](https://github.com/TykTechnologies/tyk-github-actions/blob/main/docker/tools/latest/Dockerfile). 85 | 86 | A local Taskfile is available in `docker/tools/` that allows you to build 87 | the tools image locally. Changes are tested in PRs. 88 | 89 | Adoption: Internal use for PR workflows on the repository. 90 | 91 | Source: [/.github/workflows/ci-docker-tools.yml](/.github/workflows/ci-docker-tools.yml) 92 | 93 | ## CI lint 94 | 95 | In order to ensure some standard of quality, a lint action is being run 96 | that checks for syntax issues, yaml issues and validates github actions 97 | in the repository. It's not complete or fully accurate by any measure, 98 | but it enforces conventions for the work being added in PRs. 99 | 100 | It's generally incomplete, but extensions are welcome. 101 | 102 | The action regenerates `README.md` from the docs/ folder contents. 103 | 104 | To invoke the linter locally, use `task lint`. 105 | 106 | Adoption: Internal use for PR workflows on the repository. 107 | 108 | Source: [/.github/workflows/ci-lint.yml](/.github/workflows/ci-lint.yml) 109 | 110 | ## Create or update a GitHub comment 111 | 112 | Undocumented action. 113 | 114 | Source: [/.github/workflows/create-update-comment.yaml](/.github/workflows/create-update-comment.yaml) 115 | 116 | ## Print Go API Changes 117 | 118 | For a PR, the action will print the changes in `go doc` output. This 119 | surfaces API changes (function removals, renames, additions), as well as 120 | comment changes. 121 | 122 | Example usage: 123 | 124 | ```yaml 125 | jobs: 126 | godoc: 127 | uses: TykTechnologies/github-actions/.github/workflows/godoc.yml@main 128 | secrets: 129 | ORG_GH_TOKEN: ${{ secrets.ORG_GH_TOKEN }} 130 | ``` 131 | 132 | Adoption: Gateway, Dashboard. 133 | 134 | Source: [/.github/workflows/godoc.yml](/.github/workflows/godoc.yml) 135 | 136 | ## Golang CI 137 | 138 | Popular linter for Go lang with good defaults. 139 | 140 | Example usage: 141 | 142 | ```yaml 143 | jobs: 144 | golangci: 145 | uses: TykTechnologies/github-actions/.github/workflows/golangci.yaml@main 146 | with: 147 | main_branch: master 148 | ``` 149 | 150 | Source: [/.github/workflows/golangci.yaml](/.github/workflows/golangci.yaml) 151 | 152 | ## Go test 153 | 154 | Undocumented action. 155 | 156 | Source: [/.github/workflows/gotest.yaml](/.github/workflows/gotest.yaml) 157 | 158 | ## Go govulncheck 159 | 160 | Official Go Vulnerability Management. 161 | 162 | See: https://go.dev/blog/vuln 163 | 164 | Example usage: 165 | 166 | ```yaml 167 | jobs: 168 | govulncheck: 169 | uses: TykTechnologies/github-actions/.github/workflows/govulncheck.yaml@main 170 | ``` 171 | 172 | Source: [/.github/workflows/govulncheck.yaml](/.github/workflows/govulncheck.yaml) 173 | 174 | ## JIRA linter 175 | 176 | Adoption: Gateway, Dashboard. 177 | 178 | Source: [/.github/workflows/jira-lint.yaml](/.github/workflows/jira-lint.yaml) 179 | 180 | ## Nancy Scan 181 | 182 | OSS scanner which helps find CVEs in Go dependencies 183 | 184 | Example usage: 185 | 186 | ```yaml 187 | jobs: 188 | nancy: 189 | strategy: 190 | fail-fast: false 191 | matrix: 192 | package: 193 | - controller 194 | - dashboard 195 | - billing 196 | - monitor 197 | - pkg 198 | 199 | uses: TykTechnologies/github-actions/.github/workflows/nancy.yaml@main 200 | with: 201 | dir: ${{ matrix.package }} 202 | secrets: inherit 203 | ``` 204 | 205 | Source: [/.github/workflows/nancy.yaml](/.github/workflows/nancy.yaml) 206 | 207 | ## OWASP scanner 208 | 209 | Example usage: 210 | 211 | ```yaml 212 | jobs: 213 | owasp: 214 | uses: TykTechnologies/github-actions/.github/workflows/owasp.yaml@main 215 | with: 216 | target: http://staging-url.com 217 | ``` 218 | 219 | Source: [/.github/workflows/owasp.yaml](/.github/workflows/owasp.yaml) 220 | 221 | ## Release bot 222 | 223 | ``` 224 | name: Release bot 225 | 226 | on: 227 | issue_comment: 228 | types: [created] 229 | 230 | jobs: 231 | release_bot: 232 | uses: TykTechnologies/github-actions/.github/workflows/release-bot.yaml@main 233 | ``` 234 | 235 | ## PR Agent 236 | 237 | Undocumented action. 238 | 239 | Source: [/.github/workflows/pr-agent.yaml](/.github/workflows/pr-agent.yaml) 240 | 241 | ## SBOM - source bill of materials (dev) 242 | 243 | Undocumented action. 244 | 245 | Source: [/.github/workflows/sbom-dev.yaml](/.github/workflows/sbom-dev.yaml) 246 | 247 | ## SBOM - source bill of materials 248 | 249 | Adoption: Gateway, Dashboard. 250 | 251 | Source: [/.github/workflows/sbom.yaml](/.github/workflows/sbom.yaml) 252 | 253 | ## Semgrep 254 | 255 | CodeQL like OSS linter 256 | 257 | Example usage: 258 | 259 | ```yaml 260 | jobs: 261 | semgrep: 262 | uses: TykTechnologies/github-actions/.github/workflows/semgrep.yaml@main 263 | ``` 264 | 265 | Usage: unknown; Status: a bit out of date. 266 | 267 | Recent images use `semgrep/semgrep`, while this workflow still uses 268 | `returntocorp/semgrep`. Looks to be compatible at time of writing. 269 | 270 | If you'd like to use semgrep: 271 | 272 | - reach out to @titpetric if you need working-user assistance, 273 | - https://github.com/TykTechnologies/exp/tree/main/lsc 274 | - https://github.com/TykTechnologies/exp/actions/workflows/semgrep.yml 275 | 276 | The current state allows to automate refactorings with semgrep, by using 277 | github actions automation to open up PR's against target repositories. 278 | 279 | Example outputs: 280 | 281 | - https://github.com/TykTechnologies/tyk/pull/6380 282 | - https://github.com/TykTechnologies/tyk-analytics/pull/4051 283 | 284 | We experience several problems where semgrep could be used more extensively: 285 | 286 | - code cleanups to enforce consistent style 287 | - large scale refactorings 288 | - ensuring code style compliance with new contributions 289 | - detecting bugs based on our own rules/bugs occuring 290 | 291 | Source: [/.github/workflows/semgrep.yaml](/.github/workflows/semgrep.yaml) 292 | 293 | ## SonarCloud 294 | 295 | Put it after Golang CI to automatically upload its reports to SonarCloud. 296 | 297 | Example usage: 298 | 299 | ```yaml 300 | jobs: 301 | golangci: 302 | uses: TykTechnologies/github-actions/.github/workflows/sonarcloud.yaml@main 303 | with: 304 | main_branch: master 305 | exclusions: "" 306 | secrets: inherit 307 | ``` 308 | 309 | Source: [/.github/workflows/sonarcloud.yaml](/.github/workflows/sonarcloud.yaml) 310 | 311 | ## Sentinel One CNS Scans 312 | 313 | This runs the S1 scans and publishes the results to the S1 console. 314 | It has three available scanners. 315 | - Secret scanner 316 | - IaC scanner 317 | - Vulnerability scanner 318 | 319 | By default, all three are enabled, but it could be controlled by setting the flags appropriately 320 | while calling the workflow. 321 | Also, keep in mind that the secret scanner runs only on pull request events, as the scanner only supports 322 | publishing results on pull requsts. 323 | 324 | Example usage: 325 | 326 | ```yaml 327 | name: SentinelOne CNS Scan 328 | 329 | on: 330 | pull_request: 331 | types: [ opened, reopened, synchronize ] 332 | branches: [ master ] 333 | 334 | jobs: 335 | s1_scanner: 336 | uses: TykTechnologies/github-actions/.github/workflows/s1-cns-scan.yml@main 337 | with: 338 | iac_enabled: false 339 | tag: service:vulnscan 340 | scope_type: ACCOUNT 341 | secrets: 342 | S1_API_TOKEN: ${{ secrets.S1_API_TOKEN }} 343 | CONSOLE_URL: ${{ secrets.S1_CONSOLE_URL }} 344 | SCOPE_ID: ${{ secrets.S1_SCOPE_ID }} 345 | ``` 346 | 347 | Source: [/.github/workflows/s1-cns-scan.yml](/.github/workflows/s1-cns-scan.yml) 348 | -------------------------------------------------------------------------------- /branch-suggestion/README.md: -------------------------------------------------------------------------------- 1 | # Branch Suggestion Automation 2 | 3 | Automated branch suggestion tool that analyzes JIRA fix versions and suggests appropriate merge target branches for pull requests based on the repository's branching strategy. 4 | 5 | ## Features 6 | 7 | - Extracts JIRA ticket from PR title or branch name 8 | - Fetches fix versions from JIRA 9 | - Matches fix versions to repository branches using deterministic rules 10 | - Posts/updates a comment on PRs with suggested merge targets 11 | - Supports both local testing (Visor) and GitHub Actions integration 12 | 13 | ## Quick Start 14 | 15 | ### For Repository Maintainers 16 | 17 | Add this workflow to your repository to enable automatic branch suggestions: 18 | 19 | 1. Copy `.github/workflows/example-usage.yml.template` to your repository as `.github/workflows/branch-suggestion.yml` 20 | 21 | 2. Add required secrets to your repository (Settings → Secrets and variables → Actions): 22 | - `JIRA_API_TOKEN`: JIRA API token (generate at https://id.atlassian.com/manage-profile/security/api-tokens) 23 | 24 | 3. That's it! The workflow will automatically analyze PRs and post branch suggestions. 25 | 26 | ### Example Workflow Configuration 27 | 28 | ```yaml 29 | # .github/workflows/branch-suggestion.yml 30 | name: PR Branch Suggestions 31 | 32 | on: 33 | pull_request: 34 | types: [opened, synchronize, reopened] 35 | 36 | permissions: 37 | pull-requests: write 38 | contents: read 39 | 40 | jobs: 41 | branch-suggestions: 42 | uses: TykTechnologies/REFINE/.github/workflows/branch-suggestion.yml@main 43 | secrets: 44 | JIRA_EMAIL: ${{ secrets.JIRA_EMAIL }} 45 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 46 | ``` 47 | 48 | ## How It Works 49 | 50 | ### 1. JIRA Ticket Extraction 51 | 52 | The tool looks for JIRA ticket keys in the PR title or branch name: 53 | - Pattern: `TT-12345`, `TYK-456`, etc. 54 | - Examples: 55 | - "TT-12345: Fix authentication bug" → `TT-12345` 56 | - "feature/TT-12345-fix-auth" → `TT-12345` 57 | - "Fix auth (TT-12345)" → `TT-12345` 58 | 59 | ### 2. Fix Version Fetching 60 | 61 | Retrieves the "Fix Version/s" field from the JIRA ticket, which determines which product versions the fix should be merged into. 62 | 63 | ### 3. Branch Matching 64 | 65 | Uses deterministic rules to match fix versions to branches: 66 | 67 | #### Priority Levels 68 | - **Required** (✅): Exact version match or release branch 69 | - **Recommended** (📌): Minor version match or LTS branch 70 | - **Optional** (💡): Major version match or main branch 71 | 72 | #### Matching Rules 73 | 1. **Exact Match**: `5.8.1` → `release-5.8.1` (Required) 74 | 2. **Minor Version**: `5.8.1` → `release-5.8` (Required) 75 | 3. **Major Version**: `5.8.1` → `release-5` (Recommended) 76 | 4. **LTS Branch**: `5.3.x LTS` → `release-5.3-lts` (Required) 77 | 5. **Main Branch**: Always suggested as Optional 78 | 79 | ### 4. PR Comment 80 | 81 | Posts a comment on the PR with: 82 | - JIRA ticket information 83 | - List of suggested branches organized by fix version 84 | - Priority indicators (required/recommended/optional) 85 | - Merging instructions 86 | 87 | ## Local Testing 88 | 89 | ### Prerequisites 90 | 91 | 1. Install dependencies: 92 | ```bash 93 | npm install 94 | ``` 95 | 96 | 2. Install Visor (if not already installed): 97 | ```bash 98 | npm install -g @probelabs/visor 99 | ``` 100 | 101 | 3. Set up environment variables: 102 | ```bash 103 | export JIRA_EMAIL="your-email@example.com" 104 | export JIRA_API_TOKEN="your-jira-api-token" 105 | ``` 106 | 107 | ### Test Individual Scripts 108 | 109 | #### Test JIRA Fix Version Fetcher 110 | 111 | ```bash 112 | # Direct ticket key 113 | node scripts/jira/get-fixedversion.js TT-12345 114 | 115 | # From PR title 116 | node scripts/jira/get-fixedversion.js "TT-12345: Fix authentication bug" 117 | 118 | # From branch name 119 | node scripts/jira/get-fixedversion.js "feature/TT-12345-fix-auth" 120 | ``` 121 | 122 | **Expected output:** 123 | ```json 124 | { 125 | "ticket": "TT-12345", 126 | "summary": "Fix authentication bug", 127 | "priority": "High", 128 | "issueType": "Bug", 129 | "fixVersions": [ 130 | { 131 | "name": "5.8.1", 132 | "id": "12345", 133 | "released": false, 134 | "parsed": { 135 | "major": 5, 136 | "minor": 8, 137 | "patch": 1, 138 | "original": "5.8.1" 139 | } 140 | } 141 | ] 142 | } 143 | ``` 144 | 145 | **Exit codes:** 146 | - `0`: Success (fix versions found) 147 | - `1`: Error (ticket found but no fix versions set) 148 | - `2`: No JIRA ticket found 149 | 150 | #### Test Branch Matcher 151 | 152 | ```bash 153 | # Create test data 154 | echo '{ 155 | "ticket": "TT-12345", 156 | "fixVersions": [ 157 | {"name": "5.8.1", "parsed": {"major": 5, "minor": 8, "patch": 1}} 158 | ] 159 | }' > /tmp/jira.json 160 | 161 | echo '[ 162 | {"name": "main"}, 163 | {"name": "release-5.8"}, 164 | {"name": "release-5.8.1"} 165 | ]' > /tmp/branches.json 166 | 167 | # Run matcher 168 | node scripts/common/match-branches.js \ 169 | "$(cat /tmp/jira.json)" \ 170 | "$(cat /tmp/branches.json)" 171 | ``` 172 | 173 | #### Test PR Comment Posting 174 | 175 | ```bash 176 | # Create test comment 177 | echo "## Test Comment\nThis is a test." > /tmp/comment.md 178 | 179 | # Post to PR (requires GITHUB_TOKEN) 180 | export GITHUB_TOKEN="your-github-token" 181 | 182 | node scripts/github/add-pr-comment.js \ 183 | TykTechnologies/tyk \ 184 | 123 \ 185 | --file /tmp/comment.md 186 | ``` 187 | 188 | ### Test Complete Pipeline with Visor 189 | 190 | ```bash 191 | # Test with a real ticket 192 | env JIRA_EMAIL="your-email@example.com" \ 193 | JIRA_API_TOKEN="your-token" \ 194 | PR_TITLE="TT-12345" \ 195 | REPOSITORY="TykTechnologies/tyk" \ 196 | visor --config branch_suggestion.yml 197 | 198 | # Test with TIB ticket (different version format) 199 | env JIRA_EMAIL="your-email@example.com" \ 200 | JIRA_API_TOKEN="your-token" \ 201 | PR_TITLE="TT-5433" \ 202 | REPOSITORY="TykTechnologies/tyk-identity-broker" \ 203 | visor --config branch_suggestion.yml 204 | ``` 205 | 206 | **Expected output:** 207 | ``` 208 | ======================================== 209 | BRANCH SUGGESTION ANALYSIS 210 | ======================================== 211 | 📝 PR Title: TT-12345 212 | 🌿 Branch: feature/TT-12345-test 213 | 📦 Repository: TykTechnologies/tyk 214 | ======================================== 215 | 216 | 🔍 STEP 1: Fetching JIRA ticket and fix versions... 217 | ✅ JIRA Ticket: TT-12345 218 | Summary: Fix authentication bug 219 | Fix Versions: 1 220 | - 5.8.1 (released: false) 221 | 222 | 🔍 STEP 2: Fetching repository branches... 223 | ✅ Branches fetched: 45 224 | - main 225 | - release-5.8 226 | - release-5.8.1 227 | ... 228 | 229 | 🔍 STEP 3: Matching branches using deterministic rules... 230 | ✅ Branch matching complete 231 | 232 | ======================================== 233 | ✅ ANALYSIS COMPLETE 234 | ======================================== 235 | 236 | 📋 JIRA Ticket: TT-12345 237 | Summary: Fix authentication bug 238 | 239 | 🎯 Fix Version: 5.8.1 240 | ✅ release-5.8.1 - Exact version match 241 | ✅ release-5.8 - Minor version match 242 | 📌 release-5 - Major version match 243 | 💡 main - Main development branch 244 | ``` 245 | 246 | ## Project Structure 247 | 248 | ``` 249 | . 250 | ├── .github/workflows/ 251 | │ ├── branch-suggestion.yml # Reusable GitHub Actions workflow 252 | │ └── example-usage.yml.template # Template for other repositories 253 | ├── branch_suggestion.yml # Visor pipeline configuration 254 | ├── scripts/ 255 | │ ├── jira/ 256 | │ │ ├── get-fixedversion.js # Extract ticket and fetch fix versions 257 | │ │ └── jira-api.js # JIRA API wrapper 258 | │ ├── github/ 259 | │ │ ├── add-pr-comment.js # Create/update PR comments 260 | │ │ └── github-api.js # GitHub API wrapper 261 | │ └── common/ 262 | │ └── match-branches.js # Branch matching logic 263 | └── schemas/ 264 | └── branch-suggestion.json # JSON schema for output 265 | ``` 266 | 267 | ## Configuration 268 | 269 | ### Environment Variables 270 | 271 | #### Required 272 | - `JIRA_EMAIL`: JIRA account email 273 | - `JIRA_API_TOKEN`: JIRA API token 274 | 275 | #### Optional (for PR comment posting) 276 | - `GITHUB_TOKEN`: GitHub token (automatically provided in GitHub Actions) 277 | - `PR_NUMBER`: Pull request number 278 | - `REPOSITORY`: Repository in format `owner/repo` 279 | 280 | ### Visor Configuration 281 | 282 | The `branch_suggestion.yml` file contains the Visor pipeline configuration with two main steps: 283 | 284 | 1. **analyze-and-suggest**: Combines JIRA fetching, branch matching, and output generation 285 | 2. **post-pr-comment**: Posts or updates a PR comment (only runs with `--tags remote`) 286 | 287 | ### Branching Strategy Support 288 | 289 | The tool automatically adapts to different branching strategies: 290 | 291 | #### Release Branches 292 | - `release-5.8.1` (patch releases) 293 | - `release-5.8` (minor releases) 294 | - `release-5` (major releases) 295 | 296 | #### LTS Branches 297 | - `release-5.3-lts` 298 | - `lts-5.3` 299 | - `5.3-lts` 300 | 301 | #### Special Branches 302 | - `main` / `master` (main development) 303 | - Feature branches (not suggested) 304 | - Hotfix branches (not suggested) 305 | 306 | ## Troubleshooting 307 | 308 | ### No JIRA ticket found 309 | 310 | **Symptom:** Exit code 2, message "No JIRA ticket found in input" 311 | 312 | **Solution:** Ensure PR title or branch name contains a JIRA ticket key in format `TT-12345` 313 | 314 | ### No fix versions set 315 | 316 | **Symptom:** Exit code 1, message "No fix versions found in JIRA ticket" 317 | 318 | **Solution:** Set the "Fix Version/s" field in JIRA before creating the PR 319 | 320 | ### JIRA API authentication failed 321 | 322 | **Symptom:** Error message about authentication 323 | 324 | **Solution:** 325 | 1. Verify `JIRA_EMAIL` matches your JIRA account email 326 | 2. Generate a new API token at https://id.atlassian.com/manage-profile/security/api-tokens 327 | 3. Ensure the token has not expired 328 | 329 | ### GitHub API rate limiting 330 | 331 | **Symptom:** Error 403 from GitHub API 332 | 333 | **Solution:** 334 | 1. Use a GitHub token with sufficient rate limits 335 | 2. In GitHub Actions, the automatic `GITHUB_TOKEN` has higher rate limits 336 | 3. For local testing, create a personal access token with `repo` scope 337 | 338 | ### Control characters in output 339 | 340 | **Symptom:** JSON parse error about control characters 341 | 342 | **Solution:** This has been fixed in the latest version by using regex-based extraction instead of JSON parsing. Update to the latest version of the tool. 343 | 344 | ### dotenv logging pollution 345 | 346 | **Symptom:** `[dotenv@17.2.1]` messages in output 347 | 348 | **Solution:** Already fixed by setting `DOTENV_LOG_LEVEL=error` before loading dotenv. Update to the latest version. 349 | 350 | ## Output Schema 351 | 352 | The tool outputs JSON conforming to the schema in `schemas/branch-suggestion.json`: 353 | 354 | ```json 355 | { 356 | "ticket": "TT-12345", 357 | "summary": "Fix authentication bug", 358 | "priority": "High", 359 | "issueType": "Bug", 360 | "fixVersions": [...], 361 | "matchResults": [ 362 | { 363 | "fixVersion": "5.8.1", 364 | "branches": [ 365 | { 366 | "branch": "release-5.8.1", 367 | "reason": "Exact version match", 368 | "priority": "required" 369 | } 370 | ] 371 | } 372 | ], 373 | "markdown": "# Branch Suggestions..." 374 | } 375 | ``` 376 | 377 | ## Contributing 378 | 379 | ### Adding New Branching Patterns 380 | 381 | Edit `scripts/common/match-branches.js` and add new patterns to the `matchBranches` function. 382 | 383 | ### Modifying Priority Rules 384 | 385 | Update the priority assignment logic in `scripts/common/match-branches.js`. 386 | 387 | ### Customizing PR Comment Format 388 | 389 | Modify the markdown template generation in `scripts/common/match-branches.js`. 390 | 391 | ## API Documentation 392 | 393 | ### `get-fixedversion.js` 394 | 395 | ```javascript 396 | import { extractJiraTicket, getFixVersions } from './scripts/jira/get-fixedversion.js'; 397 | 398 | // Extract ticket from text 399 | const ticket = extractJiraTicket('TT-12345: Fix bug'); 400 | // Returns: 'TT-12345' 401 | 402 | // Get fix versions 403 | const result = await getFixVersions('TT-12345'); 404 | // Returns: { ticket, summary, priority, issueType, fixVersions } 405 | ``` 406 | 407 | ### `match-branches.js` 408 | 409 | ```javascript 410 | import { matchBranches } from './scripts/common/match-branches.js'; 411 | 412 | const jiraData = { ticket: 'TT-12345', fixVersions: [...] }; 413 | const branches = [{ name: 'main' }, { name: 'release-5.8' }]; 414 | 415 | const result = matchBranches(jiraData, branches); 416 | // Returns: { ticket, summary, matchResults, markdown } 417 | ``` 418 | 419 | ### `add-pr-comment.js` 420 | 421 | ```javascript 422 | import { addOrUpdateComment } from './scripts/github/add-pr-comment.js'; 423 | 424 | const result = await addOrUpdateComment( 425 | 'TykTechnologies', // owner 426 | 'tyk', // repo 427 | 123, // PR number 428 | '## Comment body', // markdown content 429 | '' // unique identifier 430 | ); 431 | ``` 432 | 433 | ## License 434 | 435 | [Your License Here] 436 | 437 | ## Support 438 | 439 | For issues or questions: 440 | - Open an issue in this repository 441 | - Contact the DevOps team 442 | -------------------------------------------------------------------------------- /branch-suggestion/scripts/common/match-branches.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import dotenv from 'dotenv'; 3 | 4 | // Only load .env if JIRA_TOKEN is not already set (to avoid log output in CI) 5 | // Silence dotenv v17+ logging 6 | if (!process.env.JIRA_TOKEN) { 7 | process.env.DOTENV_LOG_LEVEL = 'error'; 8 | dotenv.config(); 9 | } 10 | 11 | /** 12 | * Generate candidate branch names based on a parsed version 13 | * @param {object} parsedVersion - {major, minor, patch, original} 14 | * @returns {Array} Array of branch name candidates in priority order 15 | */ 16 | 17 | function generateBranchCandidates(parsedVersion) { 18 | if (!parsedVersion) return ['master']; 19 | 20 | const { major, minor, patch } = parsedVersion; 21 | const candidates = []; 22 | 23 | // First priority: exact match for the full version (X.Y.Z) 24 | if (patch !== null && minor !== null) { 25 | candidates.push(`release-${major}.${minor}.${patch}`); 26 | } 27 | 28 | // Second priority: minor version branch (X.Y) for patch releases 29 | if (patch !== null && patch > 0 && minor !== null) { 30 | candidates.push(`release-${major}.${minor}`); 31 | } 32 | 33 | // Third priority: minor version branch for minor releases (X.Y.0 or X.Y) 34 | if (minor !== null) { 35 | candidates.push(`release-${major}.${minor}`); 36 | } 37 | 38 | // Fourth priority: major version branch (X) 39 | if (major !== null) { 40 | candidates.push(`release-${major}`); 41 | } 42 | 43 | // Always include master as fallback 44 | candidates.push('master'); 45 | 46 | // Remove duplicates while preserving order 47 | return [...new Set(candidates)]; 48 | } 49 | 50 | /** 51 | * Filter fix versions by component to only include those relevant to the current repository 52 | * @param {Array} fixVersions - Array of fix version objects with parsed.component field 53 | * @param {string} repository - Repository in "owner/repo" format 54 | * @returns {Array} Filtered array of fix versions relevant to this repository 55 | */ 56 | function filterFixVersionsByRepository(fixVersions, repository) { 57 | if (!repository) { 58 | // No repository specified - return all fix versions (backward compatibility) 59 | return fixVersions; 60 | } 61 | 62 | // Extract repo name from "owner/repo" format 63 | const repoName = repository.split('/').pop(); 64 | 65 | return fixVersions.filter(fixVersion => { 66 | const component = fixVersion.parsed?.component; 67 | 68 | // If no component array (empty), include it (applies to all repos) 69 | if (!component || component.length === 0) { 70 | return true; 71 | } 72 | 73 | // Check if current repo is in the component's applicable repos 74 | return component.includes(repoName); 75 | }); 76 | } 77 | 78 | /** 79 | * Match fix versions to actual branches in the repository 80 | * @param {Array} fixVersions - Array of fix version objects with parsed field 81 | * @param {Array} repoBranches - Array of branch objects from GitHub API 82 | * @returns {Array} Array of match results with branch suggestions 83 | */ 84 | 85 | function matchBranches(fixVersions, repoBranches) { 86 | const branchNames = repoBranches.map(b => b.name); 87 | const results = []; 88 | 89 | for (const fixVersion of fixVersions) { 90 | const candidates = generateBranchCandidates(fixVersion.parsed); 91 | const matches = []; 92 | 93 | // Check which candidate branches actually exist 94 | for (const candidate of candidates) { 95 | if (branchNames.includes(candidate)) { 96 | matches.push({ 97 | branch: candidate, 98 | reason: getBranchReason(candidate, fixVersion), 99 | priority: getBranchPriority(candidate, fixVersion) 100 | }); 101 | } 102 | } 103 | 104 | // If no release branches found, only suggest master 105 | if (matches.length === 0 || (matches.length === 1 && matches[0].branch === 'master')) { 106 | results.push({ 107 | fixVersion: fixVersion.name, 108 | parsed: fixVersion.parsed, 109 | branches: [{ 110 | branch: 'master', 111 | reason: 'No matching release branches found. Fix will be included in future releases.', 112 | priority: 'required' 113 | }], 114 | warning: 'Expected release branches not found in repository' 115 | }); 116 | } else { 117 | results.push({ 118 | fixVersion: fixVersion.name, 119 | parsed: fixVersion.parsed, 120 | branches: matches 121 | }); 122 | } 123 | } 124 | 125 | return results; 126 | } 127 | 128 | /** 129 | * Get human-readable reason for why a branch is suggested 130 | * @param {string} branch - Branch name 131 | * @param {object} fixVersion - Fix version object 132 | * @returns {string} Explanation 133 | */ 134 | function getBranchReason(branch, fixVersion) { 135 | const { major, minor, patch } = fixVersion.parsed || {}; 136 | 137 | if (branch === 'master') { 138 | return 'Main development branch - ensures fix is in all future releases'; 139 | } 140 | 141 | // Exact patch version match (e.g., release-5.10.1) 142 | if (patch !== null && branch === `release-${major}.${minor}.${patch}`) { 143 | return `Exact version branch for ${fixVersion.name} - specific patch release`; 144 | } 145 | 146 | // Minor version branch (e.g., release-5.10) 147 | if (branch === `release-${major}.${minor}`) { 148 | if (patch > 0) { 149 | return `Minor version branch for ${major}.${minor}.x patches - required for creating ${fixVersion.name}`; 150 | } else { 151 | return `Minor version branch for ${major}.${minor}.x releases`; 152 | } 153 | } 154 | 155 | // Major version branch (e.g., release-5) 156 | if (branch === `release-${major}`) { 157 | return `Major version branch for all ${major}.x releases`; 158 | } 159 | 160 | return `Release branch for version ${fixVersion.name}`; 161 | } 162 | 163 | /** 164 | * Determine priority level for a branch suggestion 165 | * @param {string} branch - Branch name 166 | * @param {object} fixVersion - Fix version object 167 | * @returns {string} 'required' | 'recommended' | 'optional' 168 | */ 169 | function getBranchPriority(branch, fixVersion) { 170 | const { major, minor, patch } = fixVersion.parsed || {}; 171 | 172 | // Master is always required 173 | if (branch === 'master') { 174 | return 'required'; 175 | } 176 | 177 | // Exact patch version match is required (e.g., release-5.10.1 for version 5.10.1) 178 | if (patch !== null && branch === `release-${major}.${minor}.${patch}`) { 179 | return 'required'; 180 | } 181 | 182 | // For patch releases (5.8.1), release-5.8 is required 183 | if (patch > 0 && branch === `release-${major}.${minor}`) { 184 | return 'required'; 185 | } 186 | 187 | // Minor and major release branches are recommended 188 | if (branch === `release-${major}.${minor}` || branch === `release-${major}`) { 189 | return 'recommended'; 190 | } 191 | 192 | return 'optional'; 193 | } 194 | 195 | /** 196 | * Format branch suggestions as markdown for PR comment 197 | * @param {Array} matchResults - Results from matchBranches() 198 | * @param {object} jiraTicket - JIRA ticket info 199 | * @returns {string} Markdown formatted comment 200 | */ 201 | function formatBranchSuggestions(matchResults, jiraTicket = {}) { 202 | const lines = []; 203 | 204 | lines.push('## 🎯 Recommended Merge Targets'); 205 | lines.push(''); 206 | 207 | if (jiraTicket.ticket) { 208 | lines.push(`Based on JIRA ticket **${jiraTicket.ticket}**${jiraTicket.summary ? `: ${jiraTicket.summary}` : ''}`); 209 | lines.push(''); 210 | } 211 | 212 | for (const result of matchResults) { 213 | lines.push(`### Fix Version: ${result.fixVersion}`); 214 | lines.push(''); 215 | 216 | if (result.warning) { 217 | lines.push(`> ⚠️ **Warning:** ${result.warning}`); 218 | lines.push(''); 219 | } 220 | 221 | const required = result.branches.filter(b => b.priority === 'required'); 222 | const recommended = result.branches.filter(b => b.priority === 'recommended'); 223 | const optional = result.branches.filter(b => b.priority === 'optional'); 224 | 225 | if (required.length > 0) { 226 | lines.push('**Required:**'); 227 | for (const branch of required) { 228 | lines.push(`- \`${branch.branch}\` - ${branch.reason}`); 229 | } 230 | lines.push(''); 231 | } 232 | 233 | if (recommended.length > 0) { 234 | lines.push('**Recommended:**'); 235 | for (const branch of recommended) { 236 | lines.push(`- \`${branch.branch}\` - ${branch.reason}`); 237 | } 238 | lines.push(''); 239 | } 240 | 241 | if (optional.length > 0) { 242 | lines.push('**Optional:**'); 243 | for (const branch of optional) { 244 | lines.push(`- \`${branch.branch}\` - ${branch.reason}`); 245 | } 246 | lines.push(''); 247 | } 248 | } 249 | 250 | lines.push('---'); 251 | lines.push(''); 252 | lines.push('### 📋 Workflow'); 253 | lines.push(''); 254 | lines.push('1. **Merge this PR to `master` first**'); 255 | lines.push(''); 256 | 257 | // Collect all non-master branches that need cherry-picking using Set for O(1) operations 258 | const releaseBranchesSet = new Set(); 259 | for (const result of matchResults) { 260 | for (const branch of result.branches) { 261 | if (branch.branch !== 'master') { 262 | releaseBranchesSet.add(branch.branch); 263 | } 264 | } 265 | } 266 | const releaseBranches = Array.from(releaseBranchesSet); 267 | 268 | if (releaseBranches.length > 0) { 269 | lines.push('2. **Cherry-pick to release branches** by commenting on the **merged PR**:'); 270 | lines.push(''); 271 | for (const branch of releaseBranches) { 272 | lines.push(` - \`/release to ${branch}\``); 273 | } 274 | lines.push(''); 275 | lines.push('3. **Automated backport** - The bot will automatically create backport PRs to the specified release branches'); 276 | } 277 | 278 | lines.push(''); 279 | lines.push(''); 280 | 281 | return lines.join('\n'); 282 | } 283 | 284 | // Main execution when run directly 285 | async function main() { 286 | const args = process.argv.slice(2); 287 | 288 | if (args.length < 2) { 289 | console.log('Usage: node match-branches.js []'); 290 | console.log('\nExample:'); 291 | console.log(' node match-branches.js \'[{"name":"5.8.1","parsed":{"major":5,"minor":8,"patch":1}}]\' \'[{"name":"master"},{"name":"release-5.8"}]\' \'TykTechnologies/tyk\''); 292 | console.log('\nOr pipe from other commands:'); 293 | console.log(' VERSIONS=$(node scripts/jira/get-fixversion.js TT-12345)'); 294 | console.log(' BRANCHES=$(gh api repos/TykTechnologies/tyk/branches | jq -c \'[.[] | {name: .name}]\')'); 295 | console.log(' node match-branches.js "$VERSIONS" "$BRANCHES" "TykTechnologies/tyk"'); 296 | console.log('\nThe repository parameter is optional. If provided, filters fix versions to only show those relevant to the specified repository.'); 297 | process.exit(1); 298 | } 299 | 300 | try { 301 | const fixVersionsInput = JSON.parse(args[0]); 302 | const repoBranches = JSON.parse(args[1]); 303 | const repository = args[2]; // Optional: filter by repository 304 | 305 | // Handle if fixVersionsInput is the full JIRA response 306 | let fixVersions = fixVersionsInput.fixVersions || fixVersionsInput; 307 | const jiraTicket = fixVersionsInput.ticket ? { 308 | ticket: fixVersionsInput.ticket, 309 | summary: fixVersionsInput.summary 310 | } : {}; 311 | 312 | // Filter fix versions by repository if specified 313 | if (repository) { 314 | fixVersions = filterFixVersionsByRepository(fixVersions, repository); 315 | } 316 | 317 | const matchResults = matchBranches(fixVersions, repoBranches); 318 | 319 | // Output JSON for pipeline processing 320 | console.log(JSON.stringify({ 321 | jiraTicket, 322 | matchResults, 323 | markdown: formatBranchSuggestions(matchResults, jiraTicket) 324 | }, null, 2)); 325 | 326 | } catch (error) { 327 | console.error(JSON.stringify({ 328 | error: error.message, 329 | details: 'Failed to parse input JSON or match branches' 330 | })); 331 | process.exit(1); 332 | } 333 | } 334 | 335 | // Export functions for use in other scripts 336 | export { 337 | generateBranchCandidates, 338 | matchBranches, 339 | getBranchReason, 340 | getBranchPriority, 341 | formatBranchSuggestions, 342 | filterFixVersionsByRepository 343 | }; 344 | 345 | // Run main if executed directly 346 | if (import.meta.url === `file://${process.argv[1]}`) { 347 | main(); 348 | } 349 | 350 | -------------------------------------------------------------------------------- /.github/workflows/release-bot.yaml: -------------------------------------------------------------------------------- 1 | name: Cherry-pick to Release Branch (light checkout + GitHub App, resilient) 2 | 3 | on: 4 | # Direct usage in this repo: comment "/release to " on a PR 5 | issue_comment: 6 | types: [created] 7 | 8 | # Reusable usage from another workflow / repo 9 | workflow_call: 10 | secrets: 11 | APP_ID: 12 | description: "GitHub App ID" 13 | required: false 14 | APP_PRIVATE_KEY: 15 | description: "GitHub App private key (PEM)" 16 | required: false 17 | # Kept for compatibility, but we do not pass it to the action (avoids warnings) 18 | APP_INSTALLATION_ID: 19 | description: "GitHub App installation ID (optional, unused)" 20 | required: false 21 | 22 | permissions: 23 | contents: write 24 | pull-requests: write 25 | issues: write 26 | 27 | jobs: 28 | cherry_pick: 29 | runs-on: ubuntu-latest 30 | 31 | env: 32 | # Depths to try (in order) when we need to scan the base branch to find a squash "merge" 33 | BASE_FETCH_DEPTHS: "100 500 2000" 34 | # Depth for the target release branch 35 | TARGET_FETCH_DEPTH: "50" 36 | 37 | steps: 38 | # 1) Only run when someone comments `/release to ` on a PR 39 | - name: Check for release command 40 | id: check_command 41 | uses: actions/github-script@v6 42 | with: 43 | github-token: ${{ secrets.GITHUB_TOKEN }} 44 | script: | 45 | const { issue, comment } = context.payload || {}; 46 | if (!issue || !issue.pull_request || !comment) { 47 | core.setOutput('release_valid','false'); 48 | return; 49 | } 50 | const m = (comment.body || '').trim().match(/^\/release to\s+([^\s]+)\s*$/i); 51 | if (!m) { 52 | core.setOutput('release_valid','false'); 53 | return; 54 | } 55 | core.setOutput('release_valid','true'); 56 | core.setOutput('release_branch', m[1]); 57 | core.setOutput('pr_number', issue.number); 58 | 59 | - name: Skip if not a valid command 60 | if: steps.check_command.outputs.release_valid == 'false' 61 | run: | 62 | echo "Skipping: no '/release to ' command found." 63 | 64 | # 2) Minimal checkout (shallow + partial clone) 65 | - name: Checkout repository (shallow, partial) 66 | if: steps.check_command.outputs.release_valid == 'true' 67 | uses: actions/checkout@v4 68 | with: 69 | fetch-depth: 1 70 | filter: blob:none 71 | persist-credentials: false 72 | 73 | # 3) Resolve GitHub App credentials (supports workflow_call APP_* and repo PROBE_APP_*) 74 | - name: Resolve GitHub App credentials 75 | id: resolve_app 76 | if: steps.check_command.outputs.release_valid == 'true' 77 | env: 78 | CALL_APP_ID: ${{ secrets.APP_ID }} 79 | CALL_APP_PRIVATE_KEY: ${{ secrets.APP_PRIVATE_KEY }} 80 | CALL_APP_INSTALLATION_ID: ${{ secrets.APP_INSTALLATION_ID }} 81 | PROBE_APP_ID: ${{ secrets.PROBE_APP_ID }} 82 | PROBE_APP_PRIVATE_KEY: ${{ secrets.PROBE_APP_PRIVATE_KEY }} 83 | PROBE_APP_INSTALLATION_ID: ${{ secrets.PROBE_APP_INSTALLATION_ID }} 84 | run: | 85 | set -euo pipefail 86 | APP_ID="${CALL_APP_ID:-}" 87 | APP_PRIVATE_KEY="${CALL_APP_PRIVATE_KEY:-}" 88 | 89 | # Fallbacks for direct runs in this repo 90 | if [ -z "${APP_ID}" ] && [ -n "${PROBE_APP_ID:-}" ]; then APP_ID="${PROBE_APP_ID}"; fi 91 | if [ -z "${APP_PRIVATE_KEY}" ] && [ -n "${PROBE_APP_PRIVATE_KEY:-}" ]; then APP_PRIVATE_KEY="${PROBE_APP_PRIVATE_KEY}"; fi 92 | 93 | HAS_APP=false 94 | if [ -n "${APP_ID}" ] && [ -n "${APP_PRIVATE_KEY}" ]; then HAS_APP=true; fi 95 | 96 | echo "HAS_APP=${HAS_APP}" >> "$GITHUB_ENV" 97 | if [ "${HAS_APP}" = "true" ]; then 98 | { 99 | echo "APP_ID=${APP_ID}" 100 | echo "APP_PRIVATE_KEY<> "$GITHUB_ENV" 104 | fi 105 | 106 | # Create GitHub App token (owner-based; avoids 'installation-id' warning) 107 | - name: Create GitHub App token 108 | id: app_token 109 | if: env.HAS_APP == 'true' 110 | uses: actions/create-github-app-token@v1 111 | with: 112 | app-id: ${{ env.APP_ID }} 113 | private-key: ${{ env.APP_PRIVATE_KEY }} 114 | owner: ${{ github.repository_owner }} 115 | 116 | - name: Select auth token & prepare git remote 117 | if: steps.check_command.outputs.release_valid == 'true' 118 | run: | 119 | set -euo pipefail 120 | TOKEN="${{ steps.app_token.outputs.token }}" 121 | if [ -z "$TOKEN" ]; then TOKEN="${{ secrets.GITHUB_TOKEN }}"; fi 122 | echo "GITHUB_TOKEN=$TOKEN" >> "$GITHUB_ENV" 123 | echo "GH_TOKEN=$TOKEN" >> "$GITHUB_ENV" 124 | git config --local --unset-all http.https://github.com/.extraheader || true 125 | git remote set-url origin "https://x-access-token:${TOKEN}@github.com/${{ github.repository }}.git" 126 | git remote -v 127 | 128 | - name: Configure git user 129 | if: steps.check_command.outputs.release_valid == 'true' 130 | run: | 131 | git config --global user.email "bot@tyk.io" 132 | git config --global user.name "Tyk Bot" 133 | git config --global advice.submoduleMergeConflict false 134 | 135 | # 4) Find the merge (or squash) commit that landed on the base branch, using REST only 136 | - name: Find merge commit on base branch (shallow fetch) 137 | id: pr_details 138 | if: steps.check_command.outputs.release_valid == 'true' 139 | env: 140 | GH_TOKEN: ${{ env.GH_TOKEN }} 141 | BASE_FETCH_DEPTHS: ${{ env.BASE_FETCH_DEPTHS }} 142 | run: | 143 | set -euo pipefail 144 | 145 | PR_NUMBER='${{ steps.check_command.outputs.pr_number }}' 146 | OWNER_REPO='${{ github.repository }}' 147 | 148 | PR_JSON="$(gh api "repos/${OWNER_REPO}/pulls/${PR_NUMBER}")" 149 | PR_TITLE="$(jq -r '.title' <<<"$PR_JSON")" 150 | BASE_REF="$(jq -r '.base.ref' <<<"$PR_JSON")" 151 | MERGED="$(jq -r '.merged' <<<"$PR_JSON")" 152 | MERGE_SHA="$(jq -r '.merge_commit_sha // empty' <<<"$PR_JSON")" 153 | 154 | if [ "${MERGED}" != "true" ]; then 155 | echo "PR #${PR_NUMBER} is not merged; cannot use merge commit." >&2 156 | exit 1 157 | fi 158 | 159 | if [ -n "${MERGE_SHA}" ] && [ "${MERGE_SHA}" != "null" ]; then 160 | for D in 50 200 1000; do 161 | git fetch --no-tags --filter=blob:none --depth="$D" origin "+refs/heads/${BASE_REF}:refs/remotes/origin/${BASE_REF}" 162 | if git cat-file -e "${MERGE_SHA}^{commit}" 2>/dev/null; then 163 | break 164 | fi 165 | done 166 | else 167 | MERGE_SHA="" 168 | for D in ${BASE_FETCH_DEPTHS}; do 169 | git fetch --no-tags --filter=blob:none --depth="$D" origin "+refs/heads/${BASE_REF}:refs/remotes/origin/${BASE_REF}" 170 | MERGE_SHA="$(git log "origin/${BASE_REF}" --grep="(#${PR_NUMBER})" -n 1 --pretty=format:%H || true)" 171 | if [ -n "${MERGE_SHA}" ]; then break; fi 172 | done 173 | if [ -z "${MERGE_SHA}" ]; then 174 | echo "Could not locate the squash commit for PR #${PR_NUMBER} on ${BASE_REF} within depth(s): ${BASE_FETCH_DEPTHS}" >&2 175 | exit 1 176 | fi 177 | fi 178 | 179 | { 180 | echo "COMMIT_SHA=${MERGE_SHA}" 181 | echo "PR_TITLE=${PR_TITLE}" 182 | echo "BASE_REF=${BASE_REF}" 183 | echo "PR_NUMBER=${PR_NUMBER}" 184 | } >> "$GITHUB_OUTPUT" 185 | 186 | # 5) Cherry-pick to the target branch, with robust conflict handling + guaranteed PR 187 | - name: Cherry-pick merge commit to target release (shallow + resilient) 188 | id: cherry_pick 189 | if: steps.check_command.outputs.release_valid == 'true' 190 | env: 191 | GH_TOKEN: ${{ env.GH_TOKEN }} 192 | GITHUB_TOKEN: ${{ env.GITHUB_TOKEN }} 193 | GITHUB_REPO: ${{ github.repository }} 194 | GITHUB_BRANCH: ${{ steps.check_command.outputs.release_branch }} 195 | GITHUB_CHERRY_PICK_COMMIT: ${{ steps.pr_details.outputs.COMMIT_SHA }} 196 | PR_TITLE: ${{ steps.pr_details.outputs.PR_TITLE }} 197 | BASE_REF: ${{ steps.pr_details.outputs.BASE_REF }} 198 | TARGET_FETCH_DEPTH: ${{ env.TARGET_FETCH_DEPTH }} 199 | run: | 200 | set -euo pipefail 201 | 202 | # Shallow fetch only target & ensure base exists 203 | git fetch --no-tags --filter=blob:none --depth="${TARGET_FETCH_DEPTH}" origin "+refs/heads/${GITHUB_BRANCH}:refs/remotes/origin/${GITHUB_BRANCH}" || { 204 | echo "Target branch '${GITHUB_BRANCH}' not found on remote." >&2 205 | exit 1 206 | } 207 | git fetch --no-tags --filter=blob:none --depth=10 origin "+refs/heads/${BASE_REF}:refs/remotes/origin/${BASE_REF}" || true 208 | 209 | # Ensure we have the commit object 210 | git fetch --no-tags --filter=blob:none origin "${GITHUB_CHERRY_PICK_COMMIT}" || true 211 | 212 | # Work on the target branch (shallow) 213 | git checkout -B "${GITHUB_BRANCH}" "origin/${GITHUB_BRANCH}" 214 | 215 | # Create a consistent branch name (optionally include JIRA key) 216 | JIRA_ID="$(echo "${PR_TITLE}" | grep -oE '[A-Z]{1,10}-[0-9]{1,10}' | head -n 1 || true)" 217 | BRANCH_NAME="merge/${GITHUB_BRANCH}/${GITHUB_CHERRY_PICK_COMMIT}" 218 | [ -n "${JIRA_ID}" ] && BRANCH_NAME="${BRANCH_NAME}/${JIRA_ID}" 219 | 220 | # Clean any stale branches locally/remotely 221 | git branch -D "${BRANCH_NAME}" 2>/dev/null || true 222 | git push origin --delete "${BRANCH_NAME}" 2>/dev/null || true 223 | 224 | # Create working branch from target 225 | git switch -c "${BRANCH_NAME}" 226 | 227 | # If the commit touches submodules and .gitmodules exists, shallow-init submodules 228 | if [ -f .gitmodules ]; then 229 | SUB_PATHS="$(git config -f .gitmodules --name-only --get-regexp '^submodule\..*\.path' | sed -E 's/^submodule\.[^.]+\.path=//')" 230 | NEED_SUBS=0 231 | for P in $SUB_PATHS; do 232 | if git diff-tree --no-commit-id --name-only -r "${GITHUB_CHERRY_PICK_COMMIT}" | grep -Fx "$P" >/dev/null 2>&1; then 233 | NEED_SUBS=1; break 234 | fi 235 | done 236 | if [ "$NEED_SUBS" -eq 1 ]; then 237 | git submodule sync --recursive 238 | git submodule update --init --depth=1 --recursive --recommend-shallow 239 | fi 240 | fi 241 | 242 | # Is it a merge commit (has >1 parent)? 243 | PARENTS_LINE="$(git rev-list --parents -n 1 "${GITHUB_CHERRY_PICK_COMMIT}")" 244 | WORDS_COUNT="$(wc -w <<<"${PARENTS_LINE}")" 245 | 246 | MERGE_FAILED=0 247 | # Try to apply; if conflicts arise, we won't force-continue here 248 | if [ "${WORDS_COUNT}" -gt 2 ]; then 249 | git cherry-pick -x -m 1 "${GITHUB_CHERRY_PICK_COMMIT}" || MERGE_FAILED=$? 250 | else 251 | git cherry-pick -x "${GITHUB_CHERRY_PICK_COMMIT}" || MERGE_FAILED=$? 252 | fi 253 | 254 | CONFLICT_COUNT=0 255 | CONFLICT_LIST="" 256 | if [ "${MERGE_FAILED}" -ne 0 ]; then 257 | # Gather a short conflicts summary BEFORE aborting 258 | CONFLICT_COUNT="$(git ls-files -u | wc -l || true)" 259 | CONFLICT_LIST="$(git diff --name-only --diff-filter=U | head -n 40 | sed 's/^/ - /' || true)" 260 | # Abort to get the branch back to a clean state 261 | git cherry-pick --abort || true 262 | fi 263 | 264 | # If branch has no commits compared to target (e.g., due to abort), add an empty commit 265 | AHEAD_COUNT="$(git rev-list --count "origin/${GITHUB_BRANCH}..HEAD" || echo 0)" 266 | if [ "${AHEAD_COUNT}" -eq 0 ]; then 267 | git commit --allow-empty -m "chore: prepare PR for cherry-pick ${GITHUB_CHERRY_PICK_COMMIT} → ${GITHUB_BRANCH} 268 | 269 | Cherry-pick failed with conflicts and requires manual resolution. 270 | This empty commit exists to allow opening a draft PR. 271 | " 272 | fi 273 | 274 | # Push branch 275 | git push origin "${BRANCH_NAME}" --force 276 | 277 | # Compose title/body 278 | MESSAGE="$(git show -s --format=%B "${GITHUB_CHERRY_PICK_COMMIT}" 2>/dev/null || echo "")" 279 | [ -z "${MESSAGE}" ] && MESSAGE="Cherry-pick ${GITHUB_CHERRY_PICK_COMMIT} to ${GITHUB_BRANCH}" 280 | TITLE="$(echo "${MESSAGE}" | head -n 1)" 281 | 282 | if [ "${MERGE_FAILED}" -ne 0 ]; then 283 | BODY="Cherry-pick of \`${GITHUB_CHERRY_PICK_COMMIT}\` from \`${BASE_REF}\` to \`${GITHUB_BRANCH}\` requires manual resolution. 284 | 285 | **Conflicts detected:** ${CONFLICT_COUNT} 286 | ${CONFLICT_LIST} 287 | 288 | Tips: 289 | - Check out this branch locally and run: \`git cherry-pick -x ${GITHUB_CHERRY_PICK_COMMIT}\` 290 | - Resolve conflicts (including submodules if any), then push back to this branch. 291 | 292 | Original commit: https://github.com/${GITHUB_REPO}/commit/${GITHUB_CHERRY_PICK_COMMIT} 293 | " 294 | else 295 | BODY="${MESSAGE}" 296 | fi 297 | 298 | # Create PR (draft if conflicts) 299 | PR_URL="$(gh pr view --repo "${GITHUB_REPO}" --head "${BRANCH_NAME}" --json url -q .url 2>/dev/null || true)" 300 | if [ -z "${PR_URL}" ]; then 301 | if [ "${MERGE_FAILED}" -ne 0 ]; then 302 | PR_URL="$(gh pr create --draft \ 303 | --repo "${GITHUB_REPO}" \ 304 | --base "${GITHUB_BRANCH}" \ 305 | --head "${BRANCH_NAME}" \ 306 | --title "Merging to ${GITHUB_BRANCH}: ${TITLE}" \ 307 | --body "${BODY}")" 308 | else 309 | PR_URL="$(gh pr create \ 310 | --repo "${GITHUB_REPO}" \ 311 | --base "${GITHUB_BRANCH}" \ 312 | --head "${BRANCH_NAME}" \ 313 | --title "Merging to ${GITHUB_BRANCH}: ${TITLE}" \ 314 | --body "${BODY}")" 315 | fi 316 | fi 317 | 318 | # Optionally label the PR if conflicts 319 | if [ "${MERGE_FAILED}" -ne 0 ]; then 320 | gh label create "needs-manual-cherry-pick" --color FF8700 --description "Cherry-pick has conflicts" --repo "${GITHUB_REPO}" 2>/dev/null || true 321 | gh pr edit "${PR_URL}" --add-label "needs-manual-cherry-pick" --repo "${GITHUB_REPO}" || true 322 | fi 323 | 324 | { 325 | echo "PR_URL=${PR_URL}" 326 | echo "MERGE_FAILED=${MERGE_FAILED}" 327 | echo "BRANCH_NAME=${BRANCH_NAME}" 328 | } >> "$GITHUB_OUTPUT" 329 | 330 | # 6) Comment back on the original PR with the result (uses selected token) 331 | - name: Comment back on original PR 332 | if: always() && steps.check_command.outputs.release_valid == 'true' 333 | uses: actions/github-script@v6 334 | with: 335 | github-token: ${{ env.GITHUB_TOKEN }} 336 | script: | 337 | const prUrl = '${{ steps.cherry_pick.outputs.PR_URL }}'; 338 | const mergeFailed = '${{ steps.cherry_pick.outputs.MERGE_FAILED }}' === '1'; 339 | let body; 340 | if ('${{ job.status }}' === 'success') { 341 | body = mergeFailed 342 | ? `⚠️ Cherry-pick encountered conflicts. A draft PR was created: ${prUrl}` 343 | : `✅ Cherry-pick successful. A PR was created: ${prUrl}`; 344 | } else { 345 | body = '❌ Cherry-pick failed. Please check the workflow logs.'; 346 | } 347 | github.rest.issues.createComment({ 348 | issue_number: ${{ steps.check_command.outputs.pr_number }}, 349 | owner: context.repo.owner, 350 | repo: context.repo.repo, 351 | body 352 | }); 353 | --------------------------------------------------------------------------------