├── .dockerignore ├── .env.example ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ └── new-config.yaml ├── dependabot.yml ├── labeler.yml ├── release.yml └── workflows │ ├── build-publish.yaml │ ├── ci.yaml │ ├── labeler..yaml │ └── labels.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .secrets.baseline ├── .yamllint ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── SECURITY.md ├── SUPPORT.md ├── asgi.py ├── bin ├── healthcheck.py ├── lint.sh └── test.sh ├── config ├── config.local.yaml ├── config.nonprod.yaml └── config.prod.yaml ├── docker-compose.yaml ├── docs ├── actions.md ├── adrs │ ├── 001-actions-reusability.md │ ├── 002-action-customizaiton.md │ ├── 003-bucket.drawio.jpg │ ├── 003-bugzilla-response-codes.md │ ├── 003.drawio.jpg │ └── template.md ├── deployment.md └── troubleshooting.md ├── jbi ├── __init__.py ├── __main__.py ├── app.py ├── bugzilla │ ├── __init__.py │ ├── client.py │ ├── models.py │ └── service.py ├── common │ ├── __init__.py │ └── instrument.py ├── configuration.py ├── environment.py ├── errors.py ├── jira │ ├── __init__.py │ ├── client.py │ ├── service.py │ └── utils.py ├── log.py ├── models.py ├── queue.py ├── retry.py ├── router.py ├── runner.py ├── static │ └── styles.css ├── steps.py └── templates │ └── powered_by_template.html ├── poetry.lock ├── pyproject.toml ├── tests ├── __init__.py ├── conftest.py ├── fixtures │ ├── __init__.py │ ├── bad-config.yaml │ └── factories.py └── unit │ ├── __init__.py │ ├── bugzilla │ ├── __init__.py │ └── test_client.py │ ├── jira │ ├── __init__.py │ ├── test_client.py │ ├── test_service.py │ └── test_utils.py │ ├── test_app.py │ ├── test_configuration.py │ ├── test_environment.py │ ├── test_models.py │ ├── test_queue.py │ ├── test_retry.py │ ├── test_router.py │ ├── test_runner.py │ └── test_steps.py └── version.json /.dockerignore: -------------------------------------------------------------------------------- 1 | # ignore everything 2 | * 3 | # use exceptions to create an "allow list" 4 | !/asgi.py 5 | !/bin 6 | !/config 7 | !/jbi 8 | !/poetry.lock 9 | !/pyproject.toml 10 | !/version.json 11 | 12 | # Ignore generated credentials from google-github-actions/auth 13 | gha-creds-*.json 14 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Environment variables for the local development environment 2 | ENV=local 3 | LOG_FORMAT=text 4 | LOG_LEVEL=debug 5 | APP_RELOAD=True 6 | APP_DEBUG=True 7 | JBI_API_KEY="fake_api_key" 8 | DL_QUEUE_DSN=file:///tmp/dlqueue 9 | DL_QUEUE_CONSTANT_RETRY=true 10 | DL_QUEUE_RETRY_TIMEOUT_DAYS=7 11 | 12 | # Jira API Secrets 13 | JIRA_USERNAME="fake_jira_username" 14 | JIRA_API_KEY="fake_jira_api_key" 15 | 16 | # Bugzilla API Secrets 17 | BUGZILLA_API_KEY="fake_bugzilla_api_key" 18 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @mozilla/fx-delivery-tools-wg 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-config.yaml: -------------------------------------------------------------------------------- 1 | name: New sync configuration 2 | description: Request a new Bugzilla to Jira sync connection 3 | title: "Sync with " 4 | labels: ["configuration"] 5 | body: 6 | - type: input 7 | id: bugzilla_product 8 | attributes: 9 | label: Bugzilla Product 10 | description: What Bugzilla [product](https://wiki.mozilla.org/BMO/UserGuide/BugFields#product) would you like to sync? 11 | placeholder: ex. Firefox 12 | validations: 13 | required: true 14 | - type: input 15 | id: bugzilla_component 16 | attributes: 17 | label: Bugzilla Component 18 | description: Optionally, what Bugzilla [component](https://wiki.mozilla.org/BMO/UserGuide/BugFields#component) would you like to scope the sync to? 19 | placeholder: ex. General 20 | validations: 21 | required: false 22 | - type: input 23 | id: jira_project_key 24 | attributes: 25 | label: Jira Project Key 26 | description: What is the key of the [Jira Project](https://mozilla-hub.atlassian.net/jira/projects) you'd like to sync your bugs to? 27 | placeholder: ex. JB 28 | validations: 29 | required: true 30 | - type: input 31 | id: whiteboard_tag 32 | attributes: 33 | label: Whiteboard Tag 34 | description: Adding a whiteboard tag to a bug marks it to be synced to Jira. What whiteboard tag do you want to use for this sync pipeline? 35 | placeholder: ex. myTag 36 | validations: 37 | required: true 38 | - type: input 39 | id: project_contact 40 | attributes: 41 | label: Project Contact (Bugzilla user id) 42 | description: What is the ID of the Bugzilla user that should be contacted if something goes wrong with the sync pipeline? 43 | placeholder: "123456" 44 | validations: 45 | required: false 46 | - type: checkboxes 47 | id: sync_data_options 48 | attributes: 49 | label: Data Sync Options 50 | description: Select the data you'd like to sync to Jira issues 51 | options: 52 | - label: Component (from the Bug and as specified in config) 53 | - label: Issue Type (please provide mapping, eg. `enhancement` -> `Story`) 54 | - label: Status (please provide mapping, eg. `RESOLVED` -> `Done`) 55 | - label: Resolution (please provide mapping, eg. `WONTFIX` -> `Won't do`) 56 | - label: Priority 57 | - label: Severity 58 | - label: Issue points 59 | - label: Comments 60 | - label: Keywords (as labels) 61 | - label: Whiteboard tags (as labels) 62 | - label: Assignee (Bugzilla and Jira user must use the same email address) 63 | - label: Field changes (recorded as comments on the issue) 64 | - type: checkboxes 65 | id: internal_tasks 66 | attributes: 67 | label: (Internal) For operators 68 | options: 69 | - label: '[Request](https://mozilla-hub.atlassian.net/servicedesk/customer/portal/4/group/36/create/172) Jira Automation permissions for project' 70 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | groups: 8 | minor-patch: 9 | update-types: ["minor", "patch"] 10 | - package-ecosystem: "docker" 11 | directory: "/" 12 | schedule: 13 | interval: "weekly" 14 | - package-ecosystem: "github-actions" 15 | directory: "/" 16 | schedule: 17 | interval: "weekly" 18 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | configuration: 2 | - changed-files: 3 | - any-glob-to-any-file: 'config/*' 4 | 5 | documentation: 6 | - changed-files: 7 | - any-glob-to-any-file: '**/*.md' -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - dependabot 5 | categories: 6 | - title: Breaking Changes 7 | labels: 8 | - "breaking-change" 9 | - title: Bug Fixes 10 | labels: 11 | - "bug" 12 | - title: New Features 13 | labels: 14 | - "enhancement" 15 | - title: Configuration 16 | labels: 17 | - "configuration" 18 | - title: Documentation 19 | labels: 20 | - "documentation" 21 | - title: Dependency Updates 22 | labels: 23 | - "dependencies" 24 | - title: Other Changes 25 | labels: 26 | - "*" 27 | -------------------------------------------------------------------------------- /.github/workflows/build-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Publish Docker image 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | tags: 11 | - v[0-9]+.[0-9]+.[0-9]+ 12 | 13 | jobs: 14 | build-and-publish: 15 | runs-on: ubuntu-latest 16 | permissions: 17 | contents: 'read' 18 | id-token: 'write' 19 | env: 20 | TEST_TAG: ${{ github.repository }}:test 21 | TEST_CONTAINER_NAME: jbi-healthcheck 22 | GAR_LOCATION: us 23 | GAR_REPOSITORY: jbi-prod 24 | GCP_PROJECT_ID: moz-fx-jbi-prod 25 | IMAGE: jbi 26 | steps: 27 | - name: Check out the repo 28 | uses: actions/checkout@v4 29 | with: 30 | fetch-depth: 0 # Fetch everything (tags) 31 | fetch-tags: true 32 | 33 | - name: Set tag version 34 | run: echo "JBI_TAG=$(git describe --tags --abbrev=4)" >> $GITHUB_ENV 35 | 36 | - name: Build `version.json` file 37 | run: | 38 | printf '{\n "commit": "%s",\n "version": "%s",\n "source": "%s",\n "build": "%s"\n}\n' \ 39 | "$GITHUB_SHA" \ 40 | "$JBI_TAG" \ 41 | "$GITHUB_SERVER_URL/$GITHUB_REPOSITORY" \ 42 | "$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" > ./version.json 43 | 44 | - name: Extract metadata (tags, labels) for Docker 45 | id: meta 46 | uses: docker/metadata-action@v5 47 | with: 48 | images: | 49 | ${{ github.repository }} 50 | ${{ env.GAR_LOCATION }}-docker.pkg.dev/${{ env.GCP_PROJECT_ID }}/${{ env.GAR_REPOSITORY }}/${{ env.IMAGE }} 51 | # https://github.com/marketplace/actions/docker-metadata-action#tags-input 52 | tags: | 53 | type=semver,pattern={{raw}} 54 | type=raw,value=latest,enable={{is_default_branch}} 55 | type=sha,prefix=,enable={{is_default_branch}} 56 | 57 | - id: gcp_auth 58 | name: Log into GCP 59 | uses: google-github-actions/auth@v2 60 | if: github.event_name != 'pull_request' 61 | with: 62 | token_format: access_token 63 | service_account: artifact-writer@${{ env.GCP_PROJECT_ID }}.iam.gserviceaccount.com 64 | workload_identity_provider: ${{ vars.GCPV2_GITHUB_WORKLOAD_IDENTITY_PROVIDER }} 65 | 66 | - name: Login to GAR 67 | uses: docker/login-action@v3 68 | if: github.event_name != 'pull_request' 69 | with: 70 | registry: ${{ env.GAR_LOCATION }}-docker.pkg.dev 71 | username: oauth2accesstoken 72 | password: ${{ steps.gcp_auth.outputs.access_token }} 73 | 74 | - name: Login to Docker Hub 75 | if: github.event_name != 'pull_request' 76 | uses: docker/login-action@v3 77 | with: 78 | username: ${{ secrets.DOCKERHUB_USERNAME }} 79 | password: ${{ secrets.DOCKERHUB_TOKEN }} 80 | 81 | - name: Set up Docker Buildx 82 | uses: docker/setup-buildx-action@v3 83 | 84 | - name: Build and export to Docker 85 | uses: docker/build-push-action@v6 86 | with: 87 | context: . 88 | load: true 89 | push: false 90 | tags: ${{ env.TEST_TAG }} 91 | cache-from: type=gha 92 | cache-to: type=gha,mode=max 93 | 94 | - name: Spin up container 95 | run: | 96 | docker run \ 97 | --name ${{ env.TEST_CONTAINER_NAME }} \ 98 | --detach \ 99 | --env-file .env.example \ 100 | --publish 8000:8000 \ 101 | ${{ env.TEST_TAG }} 102 | 103 | - name: Check that container is running 104 | run: | 105 | docker exec ${{ env.TEST_CONTAINER_NAME }} python bin/healthcheck.py 106 | 107 | - name: Spin down container 108 | run: | 109 | docker rm -f ${{ env.TEST_CONTAINER_NAME }} 110 | 111 | - name: Build and push 112 | uses: docker/build-push-action@v6 113 | with: 114 | context: . 115 | push: ${{ github.event_name != 'pull_request' }} 116 | tags: ${{ steps.meta.outputs.tags }} 117 | cache-from: type=gha 118 | cache-to: type=gha,mode=max 119 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: Run CI checks 2 | 3 | on: pull_request 4 | 5 | jobs: 6 | run_lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | - uses: actions/setup-python@v5 11 | id: setup-python 12 | with: 13 | python-version: "3.13" 14 | - name: Install poetry 15 | run: pipx install poetry 16 | - uses: actions/cache@v4 17 | with: 18 | path: .venv 19 | key: poetry-${{ steps.setup-python.outputs.python-version}}-${{ hashFiles('poetry.lock') }} 20 | - name: Install dependencies 21 | run: make install 22 | - run: bin/lint.sh 23 | run_test: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v4 27 | - uses: actions/setup-python@v5 28 | id: setup-python 29 | with: 30 | python-version: "3.13" 31 | - name: Install pandoc 32 | run: sudo apt-get install -y pandoc 33 | - name: Install poetry 34 | run: pipx install poetry 35 | - uses: actions/cache@v4 36 | with: 37 | path: .venv 38 | key: poetry-${{ steps.setup-python.outputs.python-version}}-${{ hashFiles('poetry.lock') }} 39 | - name: Install dependencies 40 | run: make install 41 | - name: Run tests 42 | run: bin/test.sh 43 | - name: Run retry 44 | env: 45 | JBI_API_KEY: key # pragma: allowlist secret 46 | JIRA_API_KEY: key # pragma: allowlist secret 47 | JIRA_USERNAME: foo@bar 48 | BUGZILLA_API_KEY: key # pragma: allowlist secret 49 | DL_QUEUE_CONSTANT_RETRY: false 50 | DL_QUEUE_DSN: "file:///tmp/dlqueue" 51 | run: .venv/bin/python -m jbi.retry 52 | review-dependabot-pr: 53 | permissions: 54 | contents: write 55 | pull-requests: write 56 | runs-on: ubuntu-latest 57 | if: ${{ github.event.pull_request.user.login == 'dependabot[bot]' }} 58 | steps: 59 | - name: Enable Dependabot automation 60 | uses: mozilla/syseng-pod/actions/dependabot-automerge@main 61 | -------------------------------------------------------------------------------- /.github/workflows/labeler..yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | triage: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/labeler@v5 13 | -------------------------------------------------------------------------------- /.github/workflows/labels.yaml: -------------------------------------------------------------------------------- 1 | name: Force pull-requests label(s) 2 | 3 | on: 4 | pull_request: 5 | types: [opened, labeled, unlabeled] 6 | jobs: 7 | wait-triage: 8 | runs-on: ubuntu-latest 9 | if: github.event.action == 'opened' 10 | steps: 11 | - name: Wait for auto-labeler 12 | run: sleep 10s 13 | shell: bash 14 | 15 | pr-has-label: 16 | needs: wait-triage 17 | name: Will be skipped if labelled 18 | runs-on: ubuntu-latest 19 | if: ${{ join(github.event.pull_request.labels.*.name, ', ') == '' }} 20 | steps: 21 | - run: | 22 | echo 'Pull-request must have at least one label' 23 | exit 1 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | .ruff_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | # IDEs 133 | .idea/ 134 | .vscode/ 135 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v2.1.0 4 | hooks: 5 | - id: check-added-large-files 6 | - id: check-json 7 | - id: detect-private-key 8 | - id: end-of-file-fixer 9 | - id: pretty-format-json 10 | args: [ 11 | '--autofix', 12 | '--indent', '4', 13 | '--no-sort-keys', 14 | ] 15 | exclude: "bin/k8s/secret.json" 16 | - id: trailing-whitespace 17 | - repo: local 18 | hooks: 19 | - id: lint 20 | name: lint 21 | entry: bin/lint.sh lint 22 | language: script 23 | types: [python] 24 | - repo: local 25 | hooks: 26 | - id: mypy 27 | name: mypy 28 | entry: bin/lint.sh mypy 29 | language: script 30 | types: [python] 31 | - repo: local 32 | hooks: 33 | - id: bandit 34 | name: bandit 35 | entry: bin/lint.sh bandit 36 | language: script 37 | types: [python] 38 | - repo: local 39 | hooks: 40 | - id: detect-secrets 41 | name: detect-secrets 42 | entry: bin/lint.sh detect-secrets 43 | language: script 44 | - repo: local 45 | hooks: 46 | - id: format 47 | name: format 48 | entry: bin/lint.sh format 49 | language: script 50 | types: [python] 51 | - repo: local 52 | hooks: 53 | - id: yamllint 54 | name: yamllint 55 | entry: bin/lint.sh yamllint 56 | language: script 57 | types: [yaml] 58 | -------------------------------------------------------------------------------- /.secrets.baseline: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.5.0", 3 | "plugins_used": [ 4 | { 5 | "name": "ArtifactoryDetector" 6 | }, 7 | { 8 | "name": "AWSKeyDetector" 9 | }, 10 | { 11 | "name": "Base64HighEntropyString", 12 | "limit": 4.5 13 | }, 14 | { 15 | "name": "BasicAuthDetector" 16 | }, 17 | { 18 | "name": "CloudantDetector" 19 | }, 20 | { 21 | "name": "HexHighEntropyString", 22 | "limit": 3 23 | }, 24 | { 25 | "name": "IbmCloudIamDetector" 26 | }, 27 | { 28 | "name": "IbmCosHmacDetector" 29 | }, 30 | { 31 | "name": "JwtTokenDetector" 32 | }, 33 | { 34 | "name": "KeywordDetector", 35 | "keyword_exclude": "" 36 | }, 37 | { 38 | "name": "MailchimpDetector" 39 | }, 40 | { 41 | "name": "PrivateKeyDetector" 42 | }, 43 | { 44 | "name": "SlackDetector" 45 | }, 46 | { 47 | "name": "SoftlayerDetector" 48 | }, 49 | { 50 | "name": "StripeDetector" 51 | }, 52 | { 53 | "name": "TwilioKeyDetector" 54 | } 55 | ], 56 | "filters_used": [ 57 | { 58 | "path": "detect_secrets.filters.allowlist.is_line_allowlisted" 59 | }, 60 | { 61 | "path": "detect_secrets.filters.common.is_baseline_file", 62 | "filename": ".secrets.baseline" 63 | }, 64 | { 65 | "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", 66 | "min_level": 2 67 | }, 68 | { 69 | "path": "detect_secrets.filters.heuristic.is_indirect_reference" 70 | }, 71 | { 72 | "path": "detect_secrets.filters.heuristic.is_likely_id_string" 73 | }, 74 | { 75 | "path": "detect_secrets.filters.heuristic.is_lock_file" 76 | }, 77 | { 78 | "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" 79 | }, 80 | { 81 | "path": "detect_secrets.filters.heuristic.is_potential_uuid" 82 | }, 83 | { 84 | "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" 85 | }, 86 | { 87 | "path": "detect_secrets.filters.heuristic.is_sequential_string" 88 | }, 89 | { 90 | "path": "detect_secrets.filters.heuristic.is_swagger_file" 91 | }, 92 | { 93 | "path": "detect_secrets.filters.heuristic.is_templated_secret" 94 | }, 95 | { 96 | "path": "detect_secrets.filters.regex.should_exclude_file", 97 | "pattern": [ 98 | "poetry.lock" 99 | ] 100 | } 101 | ], 102 | "results": { 103 | ".env.example": [ 104 | { 105 | "type": "Secret Keyword", 106 | "filename": ".env.example", 107 | "hashed_secret": "7dfe63b6762fc69b8e486a2bafa43b8f7d23b788", 108 | "is_verified": false, 109 | "line_number": 7 110 | }, 111 | { 112 | "type": "Secret Keyword", 113 | "filename": ".env.example", 114 | "hashed_secret": "4b9a4ce92b6a01a4cd6ee1672d31c043f2ae79ab", 115 | "is_verified": false, 116 | "line_number": 14 117 | }, 118 | { 119 | "type": "Secret Keyword", 120 | "filename": ".env.example", 121 | "hashed_secret": "77ea6398f252999314d609a708842a49fc43e055", 122 | "is_verified": false, 123 | "line_number": 17 124 | } 125 | ] 126 | }, 127 | "generated_at": "2024-04-10T20:05:01Z" 128 | } 129 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | extends: default 3 | 4 | rules: 5 | key-duplicates: enable 6 | 7 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of conduct 2 | 3 | This repository is governed by Mozilla's code of conduct and etiquette guidelines. For more details please see the [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/) and [Developer Etiquette Guidelines](https://bugzilla.mozilla.org/page.cgi?id=etiquette.html). 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | First off, thanks for taking the time to contribute! ❤️ 2 | 3 | All types of contributions are encouraged and valued. 4 | 5 | Before doing so, here are a few guidelines: 6 | 7 | * You agree to license your contributions under the project [license](LICENSE). 8 | * Use pull-requests early so it's open for discussion, even if your 9 | contribution isn't ready yet. 10 | * All pull requests should include tests, as they help us avoid regressions in 11 | our code. 12 | * A pull-request adding functionality should also update the documentation 13 | accordingly. 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Creating a python base with shared environment variables 2 | FROM python:3.13.3 AS base 3 | ENV PIP_NO_CACHE_DIR=off \ 4 | PIP_DEFAULT_TIMEOUT=100 \ 5 | PIP_DISABLE_PIP_VERSION_CHECK=on \ 6 | POETRY_HOME="/opt/poetry" \ 7 | POETRY_NO_INTERACTION=1 \ 8 | POETRY_VIRTUALENVS_IN_PROJECT=true \ 9 | PYSETUP_PATH="/opt/pysetup" 10 | 11 | ENV PATH="$POETRY_HOME/bin:$PATH" 12 | 13 | # Install Poetry - respects $POETRY_VERSION & $POETRY_HOME 14 | RUN python3 -m venv $POETRY_HOME && \ 15 | $POETRY_HOME/bin/pip install poetry==1.7.1 && \ 16 | $POETRY_HOME/bin/poetry --version 17 | 18 | # We copy our Python requirements here to cache them 19 | # and install only runtime deps using poetry 20 | WORKDIR $PYSETUP_PATH 21 | COPY ./poetry.lock ./pyproject.toml ./ 22 | RUN $POETRY_HOME/bin/poetry install --without dev --no-root 23 | 24 | # `production` stage uses the dependencies downloaded in the `base` stage 25 | FROM python:3.13.3-slim AS production 26 | 27 | # Install pandoc for markdown to Jira conversions. 28 | RUN apt-get -y update && \ 29 | apt-get -y install --no-install-recommends pandoc 30 | 31 | ENV PORT=8000 \ 32 | PYTHONUNBUFFERED=1 \ 33 | PYTHONDONTWRITEBYTECODE=1 \ 34 | VIRTUAL_ENV=/opt/pysetup/.venv 35 | 36 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 37 | 38 | COPY --from=base $VIRTUAL_ENV $VIRTUAL_ENV 39 | 40 | ARG userid=10001 41 | ARG groupid=10001 42 | RUN groupadd --gid $groupid app && \ 43 | useradd -g app --uid $userid --shell /usr/sbin/nologin --create-home app 44 | USER app 45 | 46 | WORKDIR /app 47 | COPY . . 48 | 49 | EXPOSE $PORT 50 | CMD ["python", "-m", "asgi"] 51 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Set these in the environment to override them. This is helpful for 2 | # development if you have file ownership problems because the user 3 | # in the container doesn't match the user on your host. 4 | _UID ?= 10001 5 | _GID ?= 10001 6 | 7 | VENV := $(shell echo $${VIRTUAL_ENV-.venv}) 8 | INSTALL_STAMP = $(VENV)/.install.stamp 9 | DOTENV_FILE = .env 10 | 11 | .PHONY: help 12 | help: 13 | @echo "Usage: make RULE" 14 | @echo "" 15 | @echo "JBI make rules:" 16 | @echo "" 17 | @echo "Local" 18 | @echo " clean - clean local cache folders" 19 | @echo " format - run linting checks, fix in place" 20 | @echo " lint - run linting checks" 21 | @echo " start - run the API service locally" 22 | @echo " test - run test suite" 23 | @echo "" 24 | @echo "Docker" 25 | @echo " build - build docker container" 26 | @echo " docker-start - run the API service through docker" 27 | @echo " docker-shell - open a shell in the web container" 28 | @echo "" 29 | @echo " help - see this text" 30 | 31 | .PHONY: clean 32 | clean: 33 | find . -name "__pycache__" | xargs rm -rf 34 | rm -rf .mypy_cache .ruff_cache .coverage .venv 35 | 36 | $(VENV)/bin/python: 37 | python3 -m venv $(VENV) 38 | 39 | install: $(VENV)/bin/python $(INSTALL_STAMP) 40 | $(INSTALL_STAMP): poetry.lock 41 | @if [ -z $(shell command -v poetry 2> /dev/null) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi 42 | POETRY_VIRTUALENVS_IN_PROJECT=1 poetry install --with dev --no-root 43 | touch $(INSTALL_STAMP) 44 | 45 | .PHONY: build 46 | build: 47 | docker-compose build \ 48 | --build-arg userid=${_UID} --build-arg groupid=${_GID} 49 | 50 | .PHONY: format 51 | format: $(INSTALL_STAMP) 52 | bin/lint.sh lint --fix 53 | bin/lint.sh format --fix 54 | 55 | .PHONY: lint 56 | lint: $(INSTALL_STAMP) 57 | bin/lint.sh 58 | 59 | .PHONY: start 60 | start: $(INSTALL_STAMP) $(DOTENV_FILE) 61 | poetry run python -m asgi 62 | 63 | $(DOTENV_FILE): 64 | cp .env.example $(DOTENV_FILE) 65 | 66 | .PHONY: docker-shell 67 | docker-shell: $(DOTENV_FILE) 68 | docker compose run --rm web /bin/sh 69 | 70 | .PHONY: docker-start 71 | docker-start: $(DOTENV_FILE) 72 | docker compose up 73 | 74 | .PHONY: test 75 | test: $(INSTALL_STAMP) 76 | bin/test.sh 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Status Sustain](https://img.shields.io/badge/Status-Sustain-green) 2 | [![Build Docker image](https://github.com/mozilla/jira-bugzilla-integration/actions/workflows/build-publish.yaml/badge.svg)](https://github.com/mozilla/jira-bugzilla-integration/actions/workflows/build-publish.yaml) 3 | [![Run tests](https://github.com/mozilla/jira-bugzilla-integration/actions/workflows/test.yaml/badge.svg)](https://github.com/mozilla/jira-bugzilla-integration/actions/workflows/test.yaml) 4 | [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) 5 | 6 | # Jira Bugzilla Integration (JBI) 7 | System to sync Bugzilla bugs to Jira issues. 8 | 9 | ## Caveats 10 | - The system accepts webhook events from Bugzilla 11 | - Bugs' `whiteboard` tags are used to determine if they should be synchronized or ignored 12 | - **Only public bugs** are eligible for synchronization. 13 | - The events are transformed into Jira issues 14 | - The system sets the `see_also` field of the Bugzilla bug with the URL to the Jira issue 15 | - No other information is synchronized from Jira to Bugzilla. 16 | 17 | > **Note:** whiteboard tags are string between brackets, and can have prefixes/suffixes using 18 | > dashes (eg. ``[project]``, ``[project-fx-h2]``, ``[backlog-project]``). 19 | 20 | ## Diagram Overview 21 | 22 | ``` mermaid 23 | graph TD 24 | subgraph bugzilla services 25 | A[Bugzilla] -.-|bugzilla event| B[(Webhook Queue)] 26 | B --- C[Webhook Push Service] 27 | end 28 | D --> |create/update/delete issue| E[Jira] 29 | D<-->|read bug| A 30 | D -->|update see_also| A 31 | subgraph jira-bugzilla-integration 32 | C -.->|post /bugzilla_webhook| D{JBI} 33 | F["config.{ENV}.yaml"] ---| read actions config| D 34 | end 35 | ``` 36 | 37 | ## Documentation 38 | 39 | * [Actions](docs/actions.md) 40 | * [Deployment](docs/deployment.md) 41 | * [Troubleshooting](docs/troubleshooting.md) 42 | * [RRA for JBI](https://docs.google.com/document/d/1p0wWVNK5V1jXKAOE-3EquBVcGOIksHD6Rgz9afZQ1A4/edit?usp=sharing) 43 | 44 | ## Usage 45 | 46 | ### How to onboard a new project? 47 | 48 | 1. Submit configuration for your project 49 | - If you're comfortable opening your own pull request, add an entry for your whiteboard tag (eg. `famous-product`) in the [actions configuration files](config/). 50 | See [actions documentation](docs/actions.md) 51 | - If not, submit an issue [here](https://github.com/mozilla/jira-bugzilla-integration/issues/new?assignees=&labels=configuration&projects=&template=new-config.yaml&title=Sync+%3CBugzilla+Product%3E+with+%3CJira+Project%3E), and we'll set up your configuration 52 | 1. Grant permissions to the Jira Automation Bot 53 | 54 | - If you are an admin of the Jira project 55 | 56 | - go to your Jira project and open `Project Settings`, then `People`. 57 | - Select `Add People` and search for `Jira Automation`. If two are listed select the one with the green logo 58 | - From the `Roles` drop down select `Bots`. Click `Add 1 person`. 59 | - Add these permissions for the bot 60 | 61 | ``` 62 | "ADD_COMMENTS", 63 | "CREATE_ISSUES", 64 | "DELETE_ISSUES", 65 | "EDIT_ISSUES" 66 | ``` 67 | 68 | - If you are not an admin of the Jira project, contact the admin or reach out to `#jira-support` in Slack to determine how best to request the changes described above 69 | 70 | 1. Some actions require specific fields on the create and update screens in Jira. Double check the [actions documentation](docs/actions.md) 71 | 1. Once your configuration is merged and a JBI release is deployed, create a bug in Bugzilla and add your whiteboard tag to the bug. Note that the tag must be enclosed in square brackets (eg. `[famous-project]`). You should see an issue appear in Jira 72 | - If you want to start syncing a bug to a Jira issue that already exists, add the issue's link to the `See Also` section of the Bugzilla bug before you add the whiteboard tag 73 | 74 | 1. Verify that the action you took on the bug was property reflected on the Jira issue (e.g. the description was updated or a comment was added) 75 | 76 | # Development 77 | 78 | We use [pandoc](https://pandoc.org) to convert markdown to the Jira syntax. Make sure the binary is found in path or [specify your custom location](https://github.com/JessicaTegner/pypandoc#specifying-the-location-of-pandoc-binaries) and its version is sufficiently recent to support Jira syntax (e.g. 3.6.3). 79 | 80 | - `make start`: run the application locally (http://localhost:8000) 81 | - `make test`: run the unit tests suites 82 | - `make lint`: static analysis of the code base 83 | - `make format`: automatically format code to align to linting standards 84 | 85 | In order to pass arguments to `pytest`: 86 | 87 | ``` 88 | poetry run pytest -vv -k test_bugzilla_list_webhooks 89 | ``` 90 | 91 | You may consider: 92 | 93 | * Tweaking the application settings in the `.env` file (See [jbi/environment.py](../jbi/environment.py) for details) 94 | * Installing a pre-commit hook to lint your changes with `pre-commit install` 95 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | Mozilla has a [well-defined process for handling security vulnerabilities](https://www.mozilla.org/en-US/about/governance/policies/security-group/bugs/) based around responsible disclosure. 4 | 5 | ## Supported Versions 6 | 7 | | Version | Supported | 8 | | ------- | ------------------ | 9 | | v3.x.x | :white_check_mark: | 10 | | < v3.0 | :x: | 11 | 12 | ## Reporting a Vulnerability 13 | 14 | If you believe you have found a security vulnerability, you should visit the [Mozilla bug bounty program](https://www.mozilla.org/en-US/security/bug-bounty/) for information on how to submit them. 15 | 16 | [This Bugzilla template](https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=--&bug_status=NEW&bug_type=defect&cf_fx_iteration=---&cf_fx_points=---&component=SRE&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&filed_via=standard_form&flag_type-4=X&flag_type-607=X&flag_type-674=X&flag_type-800=X&flag_type-803=X&flag_type-936=X&form_name=enter_bug&groups=releng-security&groups=mozilla-employee-confidential&groups=partner-confidential&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=--&product=Infrastructure%20%26%20Operations&rep_platform=Unspecified&target_milestone=---&version=unspecified) will help you report a security vulnerability directly to our SRE team. 17 | -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | # Support 2 | 3 | If you're looking for support for JBI, check out: 4 | 5 | * `#jbi` on mozilla.slack.com 6 | -------------------------------------------------------------------------------- /asgi.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | 3 | from jbi.environment import get_settings 4 | 5 | settings = get_settings() 6 | 7 | 8 | if __name__ == "__main__": 9 | server = uvicorn.Server( 10 | uvicorn.Config( 11 | "jbi.app:app", 12 | host=settings.host, 13 | port=settings.port, 14 | reload=settings.app_reload, 15 | log_config=None, 16 | ) 17 | ) 18 | server.run() 19 | -------------------------------------------------------------------------------- /bin/healthcheck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | 5 | import backoff 6 | import requests 7 | 8 | PORT = os.getenv("PORT", "8000") 9 | 10 | 11 | @backoff.on_exception( 12 | backoff.expo, 13 | requests.exceptions.RequestException, 14 | max_tries=5, 15 | ) 16 | def check_server(): 17 | url = f"http://0.0.0.0:{PORT}" 18 | response = requests.get(f"{url}/") 19 | response.raise_for_status() 20 | 21 | hb_response = requests.get(f"{url}/__heartbeat__") 22 | hb_details = hb_response.json() 23 | # Check that pandoc is installed, but ignore other checks 24 | # like connection to Jira or Bugzilla. 25 | assert hb_details["checks"]["jira.pandoc_install"] == "ok" 26 | print("Ok") 27 | 28 | 29 | if __name__ == "__main__": 30 | check_server() 31 | -------------------------------------------------------------------------------- /bin/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | POETRY_RUN="poetry run" 6 | 7 | BANDIT_CMD="$POETRY_RUN bandit -lll --recursive jbi" 8 | 9 | FORMAT_CMD="$POETRY_RUN ruff format ." 10 | 11 | # Scan only files fixed into the repo, omit poetry.lock 12 | DETECT_SECRETS_FILES="$(git ls-tree --full-tree -r --name-only HEAD | grep -v poetry.lock)" 13 | DETECT_SECRETS_CMD="$POETRY_RUN detect-secrets-hook $DETECT_SECRETS_FILES --baseline .secrets.baseline" 14 | 15 | LINT_CMD="$POETRY_RUN ruff check ." 16 | 17 | MYPY_CMD="$POETRY_RUN mypy jbi" 18 | 19 | YAMLLINT_CMD="$POETRY_RUN yamllint -c .yamllint config/*.yaml" 20 | 21 | ACTIONS_LINT_CMD="$POETRY_RUN jbi lint" 22 | 23 | all () { 24 | echo "running bandit" 25 | $BANDIT_CMD 26 | echo "running format" 27 | $FORMAT_CMD 28 | echo "running detect-secrets" 29 | $DETECT_SECRETS_CMD 30 | echo "running lint" 31 | $LINT_CMD 32 | echo "running mypy" 33 | $MYPY_CMD 34 | echo "running yamllint" 35 | $YAMLLINT_CMD 36 | echo "running actions lint" 37 | $ACTIONS_LINT_CMD 38 | } 39 | 40 | usage () { 41 | echo "Usage: bin/lint.sh [subcommand] [--fix]" 42 | echo " run linting checks, and optionally fix in place (if available)" 43 | echo "Subcommand": 44 | echo " bandit" 45 | echo " detect-secrets" 46 | echo " format" 47 | echo " lint" 48 | echo " mypy" 49 | echo " yamllint" 50 | echo " actions" 51 | } 52 | 53 | if [ -z "$1" ]; then 54 | all 55 | else 56 | subcommand=$1; shift 57 | case $subcommand in 58 | "format") 59 | if [ -n "$1" ] && [ "$1" != "--fix" ]; then 60 | usage 61 | else 62 | check_flag="--check" 63 | [ "$1" = "--fix" ] && check_flag="" 64 | $FORMAT_CMD ${check_flag:-} 65 | fi 66 | ;; 67 | "lint") 68 | if [ -n "$1" ] && [ "$1" != "--fix" ]; then 69 | usage 70 | else 71 | $LINT_CMD ${1:-} 72 | fi 73 | ;; 74 | "yamllint") 75 | $YAMLLINT_CMD 76 | ;; 77 | "mypy") 78 | $MYPY_CMD 79 | ;; 80 | "bandit") 81 | $BANDIT_CMD 82 | ;; 83 | "detect-secrets") 84 | $DETECT_SECRETS_CMD 85 | ;; 86 | "actions") 87 | $ACTIONS_LINT_CMD 88 | ;; 89 | *) 90 | usage 91 | ;; 92 | esac 93 | fi 94 | 95 | -------------------------------------------------------------------------------- /bin/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | CURRENT_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 6 | BASE_DIR="$(dirname "$CURRENT_DIR")" 7 | 8 | POETRY_RUN="poetry run" 9 | 10 | $POETRY_RUN coverage run --rcfile "${BASE_DIR}/pyproject.toml" -m pytest 11 | $POETRY_RUN coverage report --rcfile "${BASE_DIR}/pyproject.toml" -m --fail-under 75 12 | $POETRY_RUN coverage html --rcfile "${BASE_DIR}/pyproject.toml" 13 | -------------------------------------------------------------------------------- /config/config.local.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Action Config 3 | - whiteboard_tag: devtest 4 | bugzilla_user_id: tbd 5 | description: DevTest whiteboard tag 6 | parameters: 7 | jira_project_key: DevTest 8 | jira_components: 9 | set_custom_components: 10 | - "Main" 11 | -------------------------------------------------------------------------------- /config/config.nonprod.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Action Config 3 | - whiteboard_tag: nonprodtest 4 | bugzilla_user_id: 644672 5 | description: Nonprod testing whiteboard tag (JBI Bin Project) 6 | parameters: 7 | jira_project_key: JB 8 | labels_brackets: both 9 | steps: 10 | new: 11 | - create_issue 12 | - maybe_delete_duplicate 13 | - maybe_update_components 14 | - add_link_to_bugzilla 15 | - add_link_to_jira 16 | - maybe_assign_jira_user 17 | - maybe_update_issue_severity 18 | - sync_whiteboard_labels 19 | - sync_keywords_labels 20 | existing: 21 | - update_issue_summary 22 | - maybe_assign_jira_user 23 | - maybe_update_issue_severity 24 | - sync_whiteboard_labels 25 | - sync_keywords_labels 26 | comment: 27 | - create_comment 28 | 29 | - whiteboard_tag: fxcm 30 | bugzilla_user_id: tbd 31 | description: Firefox Credential Management Team whiteboard tag 32 | parameters: 33 | jira_project_key: JB 34 | steps: 35 | new: 36 | - create_issue 37 | - maybe_delete_duplicate 38 | - add_link_to_bugzilla 39 | - add_link_to_jira 40 | - maybe_assign_jira_user 41 | - maybe_update_issue_priority 42 | - maybe_update_issue_status 43 | - sync_whiteboard_labels 44 | existing: 45 | - update_issue_summary 46 | - maybe_assign_jira_user 47 | - maybe_update_issue_priority 48 | - maybe_update_issue_status 49 | - sync_whiteboard_labels 50 | labels_brackets: both 51 | status_map: 52 | ASSIGNED: To Do 53 | REOPENED: To Do 54 | RESOLVED: Done 55 | VERIFIED: Done 56 | FIXED: Done 57 | INVALID: Done 58 | WONTFIX: Done 59 | INACTIVE: Done 60 | DUPLICATE: Done 61 | WORKSFORME: Done 62 | INCOMPLETE: Done 63 | MOVED: Done 64 | 65 | - whiteboard_tag: fxdroid 66 | bugzilla_user_id: 430528 67 | description: Firefox Android Team Tag 68 | parameters: 69 | jira_project_key: JB 70 | steps: 71 | new: 72 | - create_issue 73 | - maybe_delete_duplicate 74 | - add_link_to_bugzilla 75 | - add_link_to_jira 76 | - maybe_assign_jira_user 77 | - maybe_update_issue_status 78 | - sync_whiteboard_labels 79 | existing: 80 | - update_issue_summary 81 | - maybe_assign_jira_user 82 | - maybe_update_issue_status 83 | - sync_whiteboard_labels 84 | labels_brackets: both 85 | status_map: 86 | UNCONFIRMED: Backlog 87 | NEW: Backlog 88 | ASSIGNED: In Progress 89 | REOPENED: In Progress 90 | RESOLVED: Closed 91 | VERIFIED: Closed 92 | FIXED: Closed 93 | INVALID: Closed 94 | WONTFIX: Closed 95 | INACTIVE: Closed 96 | DUPLICATE: Closed 97 | WORKSFORME: Closed 98 | INCOMPLETE: Closed 99 | MOVED: Closed 100 | 101 | - whiteboard_tag: fxp 102 | bugzilla_user_id: 396948 103 | description: Performance Team 104 | parameters: 105 | jira_project_key: JB 106 | steps: 107 | new: 108 | - create_issue 109 | - maybe_delete_duplicate 110 | - add_link_to_bugzilla 111 | - add_link_to_jira 112 | - maybe_assign_jira_user 113 | - maybe_update_components 114 | - maybe_update_issue_status 115 | - sync_whiteboard_labels 116 | - sync_keywords_labels 117 | existing: 118 | - update_issue_summary 119 | - maybe_assign_jira_user 120 | - maybe_update_components 121 | - maybe_update_issue_status 122 | - sync_whiteboard_labels 123 | - sync_keywords_labels 124 | comment: 125 | - create_comment 126 | labels_brackets: both 127 | status_map: 128 | UNCONFIRMED: Backlog 129 | NEW: Backlog 130 | ASSIGNED: In Progress 131 | REOPENED: In Progress 132 | RESOLVED: Done 133 | VERIFIED: Done 134 | FIXED: Done 135 | INVALID: Done 136 | WONTFIX: Done 137 | INACTIVE: Done 138 | DUPLICATE: Done 139 | WORKSFORME: Done 140 | INCOMPLETE: Done 141 | MOVED: Done 142 | 143 | - whiteboard_tag: sp3 144 | bugzilla_user_id: 396948 145 | description: Speedometer 3 146 | parameters: 147 | jira_project_key: JB 148 | steps: 149 | new: 150 | - create_issue 151 | - maybe_delete_duplicate 152 | - add_link_to_bugzilla 153 | - add_link_to_jira 154 | - maybe_assign_jira_user 155 | - maybe_update_components 156 | - maybe_update_issue_status 157 | - sync_whiteboard_labels 158 | - sync_keywords_labels 159 | existing: 160 | - update_issue_summary 161 | - maybe_assign_jira_user 162 | - maybe_update_components 163 | - maybe_update_issue_status 164 | - sync_whiteboard_labels 165 | - sync_keywords_labels 166 | comment: 167 | - create_comment 168 | labels_brackets: both 169 | status_map: 170 | UNCONFIRMED: To Do 171 | NEW: To Do 172 | ASSIGNED: In Progress 173 | REOPENED: In Progress 174 | RESOLVED: Done 175 | VERIFIED: Done 176 | FIXED: Done 177 | INVALID: Done 178 | WONTFIX: Done 179 | INACTIVE: Done 180 | DUPLICATE: Done 181 | WORKSFORME: Done 182 | INCOMPLETE: Done 183 | MOVED: Done 184 | 185 | - whiteboard_tag: dataplatform 186 | bugzilla_user_id: tbd 187 | description: Data Platform Infrastructure 188 | parameters: 189 | jira_project_key: JB 190 | jira_components: 191 | set_custom_components: 192 | - "Data Platform Infrastructure" 193 | steps: 194 | new: 195 | - create_issue 196 | - maybe_delete_duplicate 197 | - maybe_update_components 198 | - add_link_to_bugzilla 199 | - add_link_to_jira 200 | - maybe_assign_jira_user 201 | - maybe_update_issue_status 202 | existing: 203 | - update_issue_summary 204 | - maybe_assign_jira_user 205 | - maybe_update_issue_status 206 | comment: 207 | - create_comment 208 | labels_brackets: both 209 | status_map: 210 | UNCONFIRMED: Backlog 211 | NEW: Backlog 212 | ASSIGNED: In Progress 213 | REOPENED: In Progress 214 | RESOLVED: Done 215 | VERIFIED: Done 216 | FIXED: Done 217 | INVALID: Done 218 | WONTFIX: Done 219 | INACTIVE: Done 220 | DUPLICATE: Done 221 | WORKSFORME: Done 222 | INCOMPLETE: Done 223 | MOVED: Done 224 | 225 | - whiteboard_tag: dataquality 226 | bugzilla_user_id: tbd 227 | description: Data Quality 228 | parameters: 229 | jira_project_key: JB 230 | jira_components: 231 | set_custom_components: 232 | - "Data Quality" 233 | steps: 234 | new: 235 | - create_issue 236 | - maybe_delete_duplicate 237 | - maybe_update_components 238 | - add_link_to_bugzilla 239 | - add_link_to_jira 240 | - maybe_assign_jira_user 241 | - maybe_update_issue_status 242 | existing: 243 | - update_issue_summary 244 | - maybe_assign_jira_user 245 | - maybe_update_issue_status 246 | comment: 247 | - create_comment 248 | labels_brackets: both 249 | status_map: 250 | UNCONFIRMED: To Do 251 | NEW: To Do 252 | ASSIGNED: In Progress 253 | REOPENED: In Progress 254 | RESOLVED: Done 255 | VERIFIED: Done 256 | FIXED: Done 257 | INVALID: Done 258 | WONTFIX: Done 259 | INACTIVE: Done 260 | DUPLICATE: Done 261 | WORKSFORME: Done 262 | INCOMPLETE: Done 263 | MOVED: Done 264 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | web: 4 | build: . 5 | command: python -m asgi 6 | env_file: 7 | - .env 8 | # Let the init system handle signals for us. 9 | # among other things this helps shutdown be fast 10 | init: true 11 | ports: 12 | - ${PORT:-8000}:${PORT:-8000} 13 | volumes: 14 | - .:/app 15 | retry: 16 | build: . 17 | command: python -m jbi.retry 18 | env_file: 19 | - .env 20 | volumes: 21 | - .:/app 22 | -------------------------------------------------------------------------------- /docs/actions.md: -------------------------------------------------------------------------------- 1 | # Action 2 | The system reads the action configurations from a YAML file, one per environment. Each entry controls the synchronization between Bugzilla tickets and Jira issues. 3 | 4 | ## Configuration 5 | 6 | Below is an example of an action configuration: 7 | ```yaml 8 | - whiteboard_tag: example 9 | bugzilla_user_id: 514230 10 | description: example configuration 11 | parameters: 12 | jira_project_key: EXMPL 13 | ``` 14 | 15 | A bit more about the different fields... 16 | - `whiteboard_tag` 17 | - string 18 | - The tag to be matched in the Bugzilla `whiteboard` field 19 | - `bugzilla_user_id` 20 | - a bugzilla user id, a list of user ids, or a literal "tbd" to signify that no bugzilla user id is available 21 | - If an issue arises with the workflow, communication will be established with these users 22 | - Please enter the user information for one or more stakeholders 23 | - `description` 24 | - string 25 | - Please enter a description; for example, team name or project use-case. 26 | - `enabled` (optional) 27 | - bool [true, false] 28 | - default: true 29 | - If false, matching events will not be synchronized 30 | - `parameters` (optional) 31 | - `ActionParams` 32 | - The parameters passed to step functions when the action is run (see below) 33 | 34 | 35 | [View 'nonprod' configurations here.](../config/config.nonprod.yaml) 36 | 37 | [View 'prod' configurations here.](../config/config.prod.yaml) 38 | 39 | 40 | ### Parameters 41 | 42 | Parameters are used by `step` functions to control what Bugzilla data is synced with Jira issues. Possible parameters are: 43 | 44 | - `jira_project_key` (**mandatory**) 45 | - string 46 | - The Jira project identifier 47 | - `steps` (optional) 48 | - mapping [str, list[str]] 49 | - If defined, the specified steps are executed. The group of steps listed under `new` are executed when a Bugzilla event occurs on a ticket that is unknown to Jira. The steps under `existing`, when the Bugzilla ticket is already linked to a Jira issue. The steps under `comment` when a comment is posted on a linked Bugzilla ticket. 50 | If one of these groups is not specified, the default steps will be used. 51 | - `jira_components` (optional) 52 | - object 53 | - Controls how Jira components are set on issues in the `maybe_update_components` step. 54 | - `use_bug_component` (optional) 55 | - bool 56 | - Set bug's component as issue component, eg. ``General`` (default `true`) 57 | - `use_bug_product` (optional) 58 | - bool 59 | - Set bug's product as issue component, eg. ``Core`` (default `false`) 60 | - `use_bug_component_with_product_prefix` (optional) 61 | - bool 62 | - Set bug's full component as issue component, eg. ``Core::General`` (default `false`) 63 | - `set_custom_components` (optional) 64 | - list[str] 65 | - If defined, the issues will be assigned the specified components (default `[]`) 66 | - `labels_brackets` (optional) 67 | - enum ["yes", "no", "both"] 68 | - Controls whether issue labels should have brackets or not in the `sync_whiteboard_labels` step (default: "no") 69 | - `status_map` (optional) 70 | - mapping [str, str] 71 | - If defined, map the Bugzilla bug status (or resolution) to Jira issue status 72 | - `resolution_map` (optional) 73 | - mapping [str, str] 74 | - If defined, map the Bugzilla bug resolution to Jira issue resolution 75 | - `issue_type_map` (optional) 76 | - mapping [str, str] 77 | - If defined, map the Bugzilla type to Jira issue type (default: ``Bug`` if ``defect`` else ``Task``) 78 | 79 | Minimal configuration: 80 | ```yaml 81 | whiteboard_tag: example 82 | bugzilla_user_id: 514230 83 | description: minimal configuration 84 | parameters: 85 | jira_project_key: EXMPL 86 | ``` 87 | 88 | A configuration that will set an assignee and change the Jira issue status and resolution. 89 | 90 | ```yaml 91 | - whiteboard_tag: fidefe 92 | bugzilla_user_id: 514230 93 | description: full configuration 94 | parameters: 95 | jira_project_key: FIDEFE 96 | steps: 97 | new: 98 | - create_issue 99 | - maybe_delete_duplicate 100 | - add_link_to_bugzilla 101 | - add_link_to_jira 102 | - maybe_assign_jira_user 103 | - maybe_update_issue_resolution 104 | - maybe_update_issue_status 105 | existing: 106 | - update_issue 107 | - add_jira_comments_for_changes 108 | - maybe_assign_jira_user 109 | - maybe_update_issue_resolution 110 | - maybe_update_issue_status 111 | comment: 112 | - create_comment 113 | status_map: 114 | ASSIGNED: In Progress 115 | FIXED: Closed 116 | WONTFIX: Closed 117 | DUPLICATE: Closed 118 | INVALID: Closed 119 | INCOMPLETE: Closed 120 | WORKSFORME: Closed 121 | REOPENED: In Progress 122 | resolution_map: 123 | FIXED: Done 124 | DUPLICATE: Duplicate 125 | WONTFIX: "Won't Do" 126 | ``` 127 | 128 | In this case if the bug changes to the NEW status the action will attempt to set the linked Jira 129 | issue status to "In Progress". If the bug changes to RESOLVED FIXED it will attempt to set the 130 | linked Jira issue status to "Closed". If the bug changes to a status not listed in `status_map` then no change will be made to the Jira issue. 131 | 132 | ### Available Steps 133 | 134 | - `create_issue` 135 | - `maybe_delete_duplicate` 136 | - `add_link_to_bugzilla` 137 | - `add_link_to_jira` 138 | - `maybe_assign_jira_user`: 139 | It will attempt to assign the Jira issue the same person as the bug is assigned to. This relies on 140 | the user using the same email address in both Bugzilla and Jira. If the user does not exist in Jira 141 | then the assignee is cleared from the Jira issue. 142 | **Requirements**: The Jira account that JBI uses requires the "Browse users and groups" global permission in order to set the assignee. 143 | - `maybe_update_issue_resolution`: 144 | If the Bugzilla ticket resolution field is specified in the `resolution_map` parameter, it will set the 145 | Jira issue resolution. 146 | **Requirements**: ``resolution`` field must be present on issue forms (or configure `jira_resolution_field`). 147 | - `maybe_update_issue_status`: 148 | If the Bugzilla ticket status field is specified in the `status_map` parameter, it will set the 149 | Jira issue status. 150 | - `add_jira_comments_for_changes` 151 | - `maybe_update_issue_priority` 152 | **Requirements**: ``priority`` field must be present on issue forms (or configure `jira_priority_field`). 153 | - `maybe_update_issue_resolution` 154 | - `maybe_update_issue_severity` 155 | **Requirements**: ``customfield_10319`` field must be present on issue forms (or configure `jira_severity_field`). 156 | - `maybe_update_issue_status` 157 | - `maybe_update_issue_points` 158 | **Requirements**: ``customfield_10037`` field must be present on issue forms (or configure `jira_cf_fx_points_field`). 159 | - `create_comment` 160 | - `sync_keywords_labels` 161 | - `sync_whiteboard_labels` 162 | - `maybe_update_components`: looks at the component that's set on the bug (if any) and any components added to the project configuration with the `jira_components` parameter (see above). If those components are available on the Jira side as well, they're added to the Jira issue 163 | -------------------------------------------------------------------------------- /docs/adrs/001-actions-reusability.md: -------------------------------------------------------------------------------- 1 | # Composability of Actions 2 | 3 | - Status: accepted 4 | - Date: 2022-10-05 5 | 6 | Tracking issue: https://github.com/mozilla/jira-bugzilla-integration/pull/232 7 | 8 | ## Context and Problem Statement 9 | 10 | The gap between the default action behavior and custom workflow is too hard. 11 | How to improve composability of workflows? How can we make it easier for customers 12 | to create their own workflows without us? 13 | 14 | ## Decision Drivers 15 | 16 | - Amount of efforts 17 | - Code readability 18 | - Reusability 19 | - Configuration 20 | - Testability 21 | 22 | ## Considered Options 23 | 24 | 1. Add parameters to default action 25 | 2. Split default action into reusable steps 26 | 27 | ## Decision Outcome 28 | 29 | Chosen option: "[option 2]", because the amount of efforts to refactor 30 | the actions code is justified by the benefits in terms of readability, 31 | testability, reusability. The resulting configuration can be verbose, but 32 | will be explicit. 33 | 34 | ## Pros and Cons of the Options 35 | 36 | ### Option 1 - Add parameters to default action 37 | 38 | With this approach, we introduce parameters to the default action class, in 39 | order to enable or disable certain parts of its workflow. 40 | 41 | ```yaml 42 | whiteboard_tag: example 43 | module: jbi.actions.default 44 | parameters: 45 | jira_project_key: EXMPL 46 | sync_whiteboard_labels: false 47 | update_issue_resolution: true 48 | ``` 49 | 50 | - **Amount of efforts**: Low. Almost no refactoring necessary. 51 | - **Code readability**: Bad. Having several combinations of parameters will result in a lot of code branches. Plus, in order to implement custom workflows, contributors will inherit the default action, which will result in a lot of indirections. 52 | - **Reusability**: Bad. Reusing some bits of the default action won't be trivial without inheriting classes. 53 | - **Configuration**: Easy. Document all available parameters. 54 | - **Testability**: Bad. The number of combinations for all parameters can be huge and hard to test. 55 | 56 | ### Option 2 - Split default action into reusable steps 57 | 58 | With this approach, we split the default action class into tiny functions called "steps". 59 | The configuration lists the steps to be executed by context, whether a comment is 60 | posted, a bug is created, or updated. 61 | 62 | ```yaml 63 | whiteboard_tag: example 64 | module: jbi.actions.default 65 | parameters: 66 | jira_project_key: EXMPL 67 | steps: 68 | new: 69 | - create_issue 70 | - add_link_to_bugzilla 71 | - add_link_to_jira 72 | existing: 73 | - update_issue 74 | - add_jira_comments_for_changes 75 | comment: 76 | - create_comment 77 | ``` 78 | 79 | - **Amount of efforts**: High. The whole action code and its tests has to be refactored. 80 | - **Code readability**: Great. Each step has its own limited scope. 81 | - **Reusability**: Great. Reusing steps is trivial. 82 | - **Configuration**: Verbose. Each workflow will repeat all necessary steps. It could also be hard to differentiate 83 | workflows if the list of steps is too long. 84 | - **Testability**: Great. Each step has a limited scope, and is follows the functional programming paradigm. 85 | -------------------------------------------------------------------------------- /docs/adrs/002-action-customizaiton.md: -------------------------------------------------------------------------------- 1 | # Restrict Workflow Customization and Validate Default Action Parameters 2 | 3 | - Status: Accepted 4 | - Date: 2023-06-27 5 | 6 | Tracking issue: [#544](https://github.com/mozilla/jira-bugzilla-integration/issues/544) 7 | 8 | ## Context and Problem Statement 9 | 10 | When JBI was first designed, we envisioned a scenario where a user might want to contribute a Python module to create an entirely custom sync workflow. As we've continued to make workflow customization easier, we've questioned whether this freedom of customization is worth the added complexity of supporting custom modules and action parameters. 11 | 12 | ## Decision Drivers 13 | 14 | - Reduce complexity in handling custom modules 15 | - Prevent bugs due to misconfigured workflows 16 | - Align with the evolved designs that emphasize customization through combining steps 17 | 18 | ## Considered Options 19 | 20 | - Option 1: Maintain the ability to customize workflows through custom modules and parameters 21 | - Option 2: Restrict customization to the default action and validate action parameters with a schema 22 | 23 | ## Decision Outcome 24 | 25 | Considering the positive consequences of Option 2 and the fact that workflow customization is still possible through action steps, it is reasonable to choose Option 2 to simplify workflow customization and focus on improving the reliability and robustness of a single action. 26 | 27 | ## Pros and Cons of the Options 28 | 29 | ### Option 1: Maintain the ability to customize workflows through custom modules and parameters 30 | 31 | - Good, because it provides flexibility for users to create entirely custom action workflows 32 | - Bad, because it increases complexity in handling different parameter structures and custom module configurations 33 | 34 | ### Option 2: Restrict customization to the default action and enforce parameters with a schema 35 | 36 | - Good, because it validates that configured parameters are useable by step functions 37 | - Good, because we can safely assume that action parameters are of a certain type 38 | - Good, because it aligns with the evolved designs that emphasize customization through action steps 39 | 40 | Option 2 also still provides plenty of freedom to customize an action workflow. One can 41 | simply supply one large function that contains all of the logic of that action, e.g. 42 | 43 | ```yaml 44 | parameters: 45 | jira_project_key: FIDEFE 46 | steps: 47 | new: 48 | - my_giant_big_step 49 | existing: 50 | - my_giant_big_step 51 | ``` 52 | 53 | While this is perhaps less "elegant" than the module-based approached, it still provides an equivalent amount of customization. 54 | -------------------------------------------------------------------------------- /docs/adrs/003-bucket.drawio.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/docs/adrs/003-bucket.drawio.jpg -------------------------------------------------------------------------------- /docs/adrs/003-bugzilla-response-codes.md: -------------------------------------------------------------------------------- 1 | # Bugzilla disables webhooks after too many errors 2 | 3 | - Status: implemented 4 | - Date: 2023-11-27 5 | 6 | Tracking issues: 7 | - [21 - Setting up Webhooks with 'ANY' Product](https://github.com/mozilla/jira-bugzilla-integration/issues/21) 8 | - [82 - Exception in JBI can delay webhook delivery](https://github.com/mozilla/jira-bugzilla-integration/issues/82) 9 | - [181 - Can we ping Bugzilla to see if the webhook is enabled?](https://github.com/mozilla/jira-bugzilla-integration/issues/181) 10 | - [710 - System is able to "drop" erring messages without user interference](https://github.com/mozilla/jira-bugzilla-integration/issues/710) 11 | - [730 - Establish convention for capturing system incidents](https://github.com/mozilla/jira-bugzilla-integration/issues/730) 12 | - [743 - Create alerts for when bugs fail to sync](https://github.com/mozilla/jira-bugzilla-integration/issues/743) 13 | 14 | ## Context and Problem Statement 15 | When Bugzilla receives too many error responses from JBI, it stops triggering webhook calls for the entire project causing data to stop syncing. Frequently, these errors are due to a configuration error in Jira or Bugzilla. JBI being unable to process a payload due to errors in configuration (or incomplete configuration) in Jira or a mismatch of data for a single bug. These outages can last multiple days in some cases. 16 | 17 | We don't want the entire sync process to stop because of this. We have identified four options to solve this problem. 18 | 19 | ## Decision Drivers 20 | - Amount of initial engineering effort 21 | - Amount of maintenance effort 22 | - Overall performance of JBI (how quickly is data able to move) 23 | - How intuitive the solution is to the users that depend on the data (will picking the easiest option solve their needs?) 24 | 25 | ## Proposed Solution 26 | We propose to use a file share (or a data bucket) as a dead-letter-queue. Events will attempt to be reprocessed every 12 hours for up to 7 days. After which they will be dropped. Errors will be logged for each event that cannot be processed. Alerts can be triggered based on this to let Jira and Bugzilla admins know there is a problem. 27 | 28 | See the diagram below for a detailed flow of data. Note: This is designed to show the flow of data, not be representative of coding patterns or infrastructure. 29 | 30 | ![Flow chart detailing the data flow, see expandable below for full details](./003.drawio.jpg "Proposed Solution Flow Chart") 31 | 32 | 33 | ![Image detailing the bucket data structure. DLQ bucket > folders with names like project-bug_id > json files with names like action-[id-]timestamp.json](./003-bucket.drawio.jpg "Proposed Solution Flow Chart") 34 | 35 | 36 |
37 | 38 | Breakdown of flowchart 39 | 40 | 1. JBI receives a payload from Bugzilla or the Retry Scheduler. 41 | 1. JBI will always return 200/OK for a response. 42 | 1. If the bug is private, discard the event and log why. 43 | 1. If the bug cannot be found in the Bugzilla API, discard the event and log why. 44 | 1. If an associated action cannot be found for the event, discard the event and log why. 45 | 1. If a matching Jira issue cannot be found, and the event is not creating one, discard the event and log why. 46 | 1. If there is a mismatch between project keys in the event and Jira, discard the event and log why. 47 | 1. If there is already an event for this bug in the DLQ, do not try to process this event and skip to the Error Event Handler. 48 | 1. Write updated data to Jira's API. 49 | 1. If successful, delete any associated items in DLQ. 50 | 1. If error is returned, continue to Error Event Handler 51 | 1. Handle errors in Error Event Handler 52 | 1. Write an error to the logs, which may be forwarded to an alerting mechanism. 53 | 1. Write an updated event file to the DLQ if the original event is less than 7 days old. 54 | 1. If we have exceeded 7 days from the original event, delete associated DLQ items. 55 | 1. The retry scheduler runs every 12 hours and will re-send events to JBI. Oldest events will be processed first. An additional parameter will be provided that notes these are events to reprocess. 56 |
57 | 58 | ### Pros: 59 | - Avoids the problem of accidentally overwriting newer data with older data 60 | - Avoids making users correct data manually if something is misconfigured 61 | - Gives users a whole work week to update potentially misconfigured settings 62 | - Low maintenance effort 63 | - Mid-low engineering effort 64 | - High performance of JBI 65 | - Intuitive solution with alerting via error logs 66 | 67 | ### Cons: 68 | - Additional infrastructure for the DLQ file share or data bucket 69 | - Events will wait up to 12 hours to be reprocessed 70 | 71 | ### Notes: 72 | - This relies on using the ``last_change_time`` property from Bugzilla webhook payloads. 73 | - Also relies on checking the ``issue.comment.updated`` and ``updated`` properties in the Jira API. 74 | - This will cause a bit more latency in event processing, but nothing noticeable to users. 75 | - This will cause more API calls to Jira. We should consider rate limits. 76 | 77 | 78 | ## Considered Options 79 | For all of these options, we will be returning a successful 200 response to Bugzilla's webhook calls. Note: we have to return a 200 because of Bugzilla's webhook functionality (they check for 200 specifically, not just any OK response). 80 | 81 | ### Option 1: Log the failure and move on 82 | JBI will log that we couldn't process a specific payload, along with relevant ID's (bug id, Jira issue id, comment id, etc) so further investigation can be done if needed. 83 | 84 | **Decision Drivers** 85 | - Amount of initial engineering effort: very low 86 | - Amount of maintenance effort: very low 87 | - Overall performance of JBI: high 88 | - How intuitive the solution: low - users will notice data is missing but see status pages that look green 89 | 90 | **Pros:** 91 | - The simplest solution 92 | 93 | **Cons:** 94 | - Will not alert people to data loss (without additional alerting functionality) 95 | - Still requires engineers to investigate further if needed 96 | 97 | ### Option 2: Ask a human to do something 98 | JBI will alert users that data could not be synced. This could happen through an IM alert or an email immediately, or a scheduled (daily?) report, or by creating a well-formed log that an alerting workflow picks up. We should know which users to identify based on project configuration in Bugzilla or a distribution list if doing an IM or email directly. 99 | 100 | **Decision Drivers** 101 | - Amount of initial engineering effort: low 102 | - Amount of maintenance effort: low 103 | - Overall performance of JBI: high 104 | - How intuitive the solution: high 105 | 106 | **Pros:** 107 | - Removes need for engineering to investigate 108 | - Alerts users directly that there is a problem 109 | 110 | **Cons:** 111 | - Alerts can be noisy and cause notification fatigue 112 | 113 | ### Option 3: Queue retries internally 114 | Create a persistence layer within the JBI containers that will queue and retry jobs for a specific length (2 hours? 2 days?) of time. This could be done with an internal cache (redis) or database (postgres) within the container. After retries exceed the max time length, an error would be logged and the data would be dropped. 115 | 116 | **Decision Drivers** 117 | - Amount of initial engineering effort: high, creating more complex containers 118 | - Amount of maintenance effort: moderate, increased complexity of testing and debugging 119 | - Overall performance of JBI: high 120 | - How intuitive the solution: low - users will notice data is missing but see status pages that look green 121 | 122 | **Pros:** 123 | - Allows for retries up to a designated amount of time 124 | - Keeping all services within the container make end-to-end testing and debugging easier (compared to option 4) 125 | 126 | **Cons:** 127 | - Increases complexity of the containers 128 | - Data will not persist container restarts (ie. redeploy) 129 | - High effort for engineers to build and maintain 130 | - Less intuitive to users and engineers, we would need to report on cache/queue metrics to logs 131 | - Data could be processed out of order, causing newer updates to get lost 132 | 133 | ### Option 4: Use a simple DLQ (dead letter queue) 134 | We would always return 200, but any events that fail to process internally would get sent to a DLQ and be replayed later if needed. This could be a storage bucket, kubernetes volume, or table in a database. A scheduled kubernetes job would then run to try and pick these up and reprocess them later (every 4 hours, for example). 135 | 136 | After too many failed attempts the payload would be marked as unprocessable (setting a flag in the table, or updating the file name). 137 | 138 | **Decision Drivers** 139 | - Amount of initial engineering effort: mid, minimal added infrastructure (database/table or k8s volume or storage bucket) 140 | - Amount of maintenance effort: mid-low (mid if we spin up a new database) 141 | - Overall performance of JBI: high 142 | - How intuitive the solution: high to engineers, low to users 143 | 144 | **Pros:** 145 | - Durable and expandable solution 146 | - Does not reduce JBI throughput 147 | - Intuitive to engineers 148 | 149 | **Cons:** 150 | - Added infrastructure 151 | - Not intuitive to end users unless we build additional reporting so they know why an update didn't come over 152 | - Data could be processed out of order, causing newer updates to get lost 153 | 154 | ### Option 5: Use a dedicated queue solution 155 | We would have a dedicated service that accepts all API calls from Bugzilla and puts them into a queue (apache kafka, rabbitMQ, etc). JBI would shift to being a downstream service and process these events asynchronously. Any events that fail to process would get sent to a DLQ (dead letter queue) that could be replayed later if needed. 156 | 157 | There are plenty of existing solutions we could use to solve this problem from a technical perspective. A separate ADR would be done to identify the best answer if we choose to go this route. 158 | 159 | **Decision Drivers** 160 | - Amount of initial engineering effort: high, building out more infrastructure 161 | - Amount of maintenance effort: high, maintaining more infrastructure 162 | - Overall performance of JBI: highest, event driven 163 | - How intuitive the solution: high - we'll have reporting on queue metrics 164 | 165 | **Pros:** 166 | - Most durable solution 167 | - Does not reduce JBI throughput 168 | - Intuitive to users and engineers, we can see and report on data in queue 169 | 170 | **Cons:** 171 | - Most complex solution 172 | - Highest effort for engineers to build and maintain 173 | 174 | ### Option 6: A combination of the above 175 | Example: We could create a simple DLQ (a table in postgres) external queue for re-processing and then alert users if the DLQ grows too quickly. 176 | 177 | ### Miscellaneous options that we thought about 178 | - Using a postgres or redis server to store data. This would mean another server to maintain and coordinate maintenance downtime. 179 | - Using a sqlite (or similar) file to store data. This doesn't work well in a scalable solution that will have multiple pods and threads running. 180 | - Using a queue (kafka, pub/sub, etc) but only as the DLQ and not as a work queue. There is a chance for data to be processed out of order with this approach if events come in too quickly. 181 | 182 | 183 | ## Links 184 | - [What is event streaming?](https://kafka.apache.org/documentation/#intro_streaming) - Documentation from Apache Kafka 185 | -------------------------------------------------------------------------------- /docs/adrs/003.drawio.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/docs/adrs/003.drawio.jpg -------------------------------------------------------------------------------- /docs/adrs/template.md: -------------------------------------------------------------------------------- 1 | # [short title of solved problem and solution] 2 | 3 | - Status: [proposed | rejected | accepted | deprecated | … | superseded by 4 | [ADR-0005](0005-example.md)] 5 | - Date: [YYYY-MM-DD when the decision was last updated] 6 | 7 | Tracking issue: [ticket/issue URL] 8 | 9 | ## Context and Problem Statement 10 | 11 | [Describe the context and problem statement, e.g., in free form using two to 12 | three sentences. You may want to articulate the problem in form of a question.] 13 | 14 | ## Decision Drivers 15 | 16 | - [driver 1, e.g., a force, facing concern, …] 17 | - [driver 2, e.g., a force, facing concern, …] 18 | - … 19 | 20 | ## Considered Options 21 | 22 | 1. [option 1] 23 | 2. [option 2] 24 | 3. [option 3] 25 | 4. … 26 | 27 | ## Decision Outcome 28 | 29 | Chosen option: "[option 1]", because [justification. e.g., only option, which 30 | meets k.o. criterion decision driver | which resolves force force | … | comes 31 | out best (see below)]. 32 | 33 | ### Positive Consequences 34 | 35 | - [e.g., improvement of quality attribute satisfaction, follow-up decisions 36 | required, …] 37 | - … 38 | 39 | ### Negative Consequences 40 | 41 | - [e.g., compromising quality attribute, follow-up decisions required, …] 42 | - … 43 | 44 | ## Pros and Cons of the Options 45 | 46 | ### Option 1 - … 47 | 48 | [example | description | pointer to more information | …] 49 | 50 | - Good, because [argument a] 51 | - Bad, because [argument b] 52 | - … 53 | 54 | ### Option 2 - … 55 | 56 | [example | description | pointer to more information | …] 57 | 58 | - Good, because [argument a] 59 | - Bad, because [argument b] 60 | - … 61 | 62 | ### Option 3 - … 63 | 64 | [example | description | pointer to more information | …] 65 | 66 | - Good, because [argument a] 67 | - Bad, because [argument b] 68 | - … 69 | 70 | ## Links 71 | 72 | - [Link type] [Link to ADR] 73 | 74 | - … 75 | -------------------------------------------------------------------------------- /docs/deployment.md: -------------------------------------------------------------------------------- 1 | # Deployment 2 | 3 | Software and configuration are deployed automatically: 4 | 5 | - on NONPROD when a pull-request is merged 6 | - on PROD when a tag is pushed 7 | 8 | | Env | Base URL | 9 | |---------|------------------------------------------------| 10 | | Nonprod | https://stage.jbi.nonprod.cloudops.mozgcp.net/ | 11 | | Prod | https://jbi.services.mozilla.com/ | 12 | 13 | In order to view the configured Jira and Bugzilla, check the root URL: 14 | 15 | ``` 16 | GET / 17 | 18 | { 19 | "configuration": { 20 | "bugzilla_base_url": "https://bugzilla-dev.allizom.org", 21 | "jira_base_url": "https://mozit-test.atlassian.net/" 22 | }, 23 | "description": "JBI v2 Platform", 24 | "documentation": "/docs", 25 | "title": "Jira Bugzilla Integration (JBI)", 26 | "version": "X.Y.Z" 27 | } 28 | ``` 29 | 30 | In order to verify that a certain commit was deployed, check that the Github Actions executed successfully on the commit, and use the *Version* endpoint: 31 | 32 | ``` 33 | GET /__version__ 34 | 35 | { 36 | "commit": "1ea79...", 37 | "version": "refs/tags/vX.Y.Z", 38 | "source": "https://github.com/mozilla/jira-bugzilla-integration", 39 | "build": "https://github.com/mozilla/jira-bugzilla-integration/actions/runs/2315380477" 40 | } 41 | ``` 42 | 43 | In order to verify that a certain action is configured correctly and enabled, use the *Powered By JBI* endpoint: [https://${SERVER}/powered_by_jbi](https://jbi.services.mozilla.com/powered_by_jbi) 44 | 45 | For the list of configured whiteboard tags: 46 | 47 | ``` 48 | GET /whiteboard_tags/ 49 | { 50 | "addons": { 51 | "whiteboard_tag": "addons", 52 | "bugzilla_user_id": 514230, 53 | "description": "Addons whiteboard tag for AMO Team", 54 | "enabled": true, 55 | "parameters": { 56 | "jira_project_key": "WEBEXT" 57 | } 58 | } 59 | ... 60 | } 61 | ``` 62 | 63 | ## Metrics 64 | 65 | The following metrics are sent via StatsD: 66 | 67 | - `jbi.bugzilla.ignored.count` 68 | - `jbi.bugzilla.processed.count` 69 | - `jbi.action.execution.timer` 70 | - `jbi.jira.methods.*.count` 71 | - `jbi.jira.methods.*.timer` 72 | - `jbi.bugzilla.methods.*.count` 73 | - `jbi.bugzilla.methods.*.timer` 74 | -------------------------------------------------------------------------------- /docs/troubleshooting.md: -------------------------------------------------------------------------------- 1 | # Troubleshooting 2 | 3 | ## Bugzilla tickets are not showing up as issues on Jira 4 | 5 | As a consumer, you can: 6 | 7 | - Open https://jbi.services.mozilla.com/powered_by_jbi/ and check that your project is listed and enabled there 8 | - Open https://jbi.services.mozilla.com/__heartbeat__ and make sure everything is `true` 9 | 10 | If you have access to the configured Bugzilla account: 11 | 12 | - Open https://bugzilla.mozilla.org/userprefs.cgi?tab=webhooks 13 | - Check that Webhook is still **enabled** 14 | - Check that WebHook is setup to be executed for your product 15 | 16 | ## Log Explorer Queries Examples 17 | 18 | * All incoming WebHooks: 19 | 20 | ``` 21 | jsonPayload.Type="request.summary" 22 | jsonPayload.Fields.path="/bugzilla_webhook" 23 | ``` 24 | 25 | * All action log entries: 26 | 27 | ``` 28 | jsonPayload.Type!="request.summary" AND 29 | ( 30 | NOT jsonPayload.Fields.operation:* --Entries without `operation` field 31 | OR (jsonPayload.Fields.operation!="handle" AND jsonPayload.Fields.operation!="ignore") 32 | ) 33 | ``` 34 | 35 | * For bugs whose whiteboard contains a certain string: 36 | 37 | ``` 38 | jsonPayload.Fields.bug.whiteboard=~"flowstate" 39 | ``` 40 | 41 | * For a certain Bug number: 42 | 43 | ``` 44 | jsonPayload.Fields.bug.id=1780798 45 | ``` 46 | 47 | * For a certain Jira project: 48 | 49 | ``` 50 | jsonPayload.Fields.action.parameters.jira_project_key="MR" 51 | ``` 52 | 53 | -------------------------------------------------------------------------------- /jbi/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module domain specific code related to JBI. 3 | 4 | This part of the code is not aware of the HTTP context it runs in. 5 | """ 6 | 7 | from enum import Enum 8 | 9 | 10 | class Operation(str, Enum): 11 | """Enumeration of possible operations logged during WebHook execution.""" 12 | 13 | HANDLE = "handle" 14 | EXECUTE = "execute" 15 | IGNORE = "ignore" 16 | SUCCESS = "success" 17 | 18 | CREATE = "create" 19 | UPDATE = "update" 20 | DELETE = "delete" 21 | COMMENT = "comment" 22 | ATTACHMENT = "attachment" 23 | LINK = "link" 24 | 25 | 26 | ActionResult = tuple[bool, dict] 27 | -------------------------------------------------------------------------------- /jbi/__main__.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from jbi.configuration import get_actions 4 | 5 | 6 | @click.group() 7 | def cli(): 8 | pass 9 | 10 | 11 | @cli.command() 12 | @click.argument("env", default="all") 13 | def lint(env): 14 | click.echo(f"Linting: {env} action configuration") 15 | 16 | if env == "all": 17 | envs = ["local", "nonprod", "prod"] 18 | else: 19 | envs = [env] 20 | 21 | for env in envs: 22 | get_actions(env) 23 | click.secho(f"No issues found for {env}.", fg="green") 24 | 25 | 26 | if __name__ == "__main__": 27 | cli() 28 | -------------------------------------------------------------------------------- /jbi/app.py: -------------------------------------------------------------------------------- 1 | """ 2 | Core FastAPI app (setup, middleware) 3 | """ 4 | 5 | import logging 6 | from contextlib import asynccontextmanager 7 | from pathlib import Path 8 | from typing import Any, AsyncGenerator 9 | 10 | import sentry_sdk 11 | from dockerflow import checks 12 | from dockerflow.fastapi import router as dockerflow_router 13 | from dockerflow.fastapi.middleware import ( 14 | MozlogRequestSummaryLogger, 15 | RequestIdMiddleware, 16 | ) 17 | from dockerflow.version import get_version 18 | from fastapi import FastAPI, Request, Response, status 19 | from fastapi.encoders import jsonable_encoder 20 | from fastapi.exceptions import RequestValidationError 21 | from fastapi.responses import JSONResponse 22 | from fastapi.staticfiles import StaticFiles 23 | 24 | import jbi 25 | import jbi.queue 26 | from jbi.configuration import get_actions 27 | from jbi.environment import get_settings 28 | from jbi.log import CONFIG 29 | from jbi.router import router 30 | 31 | SRC_DIR = Path(__file__).parent 32 | APP_DIR = Path(__file__).parents[1] 33 | 34 | ACTIONS = get_actions() 35 | settings = get_settings() 36 | version_info: dict[str, str] = get_version(APP_DIR) 37 | VERSION: str = version_info["version"] 38 | 39 | logging.config.dictConfig(CONFIG) 40 | 41 | logger = logging.getLogger(__name__) 42 | 43 | 44 | def traces_sampler(sampling_context: dict[str, Any]) -> float: 45 | """Function to dynamically set Sentry sampling rates""" 46 | 47 | request_path = sampling_context.get("asgi_scope", {}).get("path") 48 | if request_path == "/__lbheartbeat__": 49 | # Drop all __lbheartbeat__ requests 50 | return 0 51 | return settings.sentry_traces_sample_rate 52 | 53 | 54 | sentry_sdk.init( 55 | dsn=str(settings.sentry_dsn) if settings.sentry_dsn else None, 56 | traces_sampler=traces_sampler, 57 | release=VERSION, 58 | ) 59 | 60 | 61 | # https://github.com/tiangolo/fastapi/discussions/9241 62 | @asynccontextmanager 63 | async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: 64 | jira_service = jbi.jira.service.get_service() 65 | bugzilla_service = jbi.bugzilla.service.get_service() 66 | queue = jbi.queue.get_dl_queue() 67 | 68 | checks.register(bugzilla_service.check_bugzilla_connection, name="bugzilla.up") 69 | checks.register( 70 | bugzilla_service.check_bugzilla_webhooks, 71 | name="bugzilla.all_webhooks_enabled", 72 | ) 73 | 74 | checks.register(jira_service.check_jira_connection, name="jira.up") 75 | checks.register_partial( 76 | jira_service.check_jira_all_projects_are_visible, 77 | ACTIONS, 78 | name="jira.all_projects_are_visible", 79 | ) 80 | checks.register_partial( 81 | jira_service.check_jira_all_projects_have_permissions, 82 | ACTIONS, 83 | name="jira.all_projects_have_permissions", 84 | ) 85 | checks.register_partial( 86 | jira_service.check_jira_all_project_custom_components_exist, 87 | ACTIONS, 88 | name="jira.all_project_custom_components_exist", 89 | ) 90 | checks.register_partial( 91 | jira_service.check_jira_all_project_issue_types_exist, 92 | ACTIONS, 93 | name="jira.all_project_issue_types_exist", 94 | ) 95 | checks.register(jira_service.check_jira_pandoc_install, name="jira.pandoc_install") 96 | checks.register(queue.check_writable, name="queue.writable") 97 | checks.register(queue.check_readable, name="queue.readable") 98 | 99 | yield 100 | 101 | 102 | app = FastAPI( 103 | title="Jira Bugzilla Integration (JBI)", 104 | description="Platform providing synchronization of Bugzilla bugs to Jira issues.", 105 | version=VERSION, 106 | debug=settings.app_debug, 107 | lifespan=lifespan, 108 | ) 109 | 110 | app.state.APP_DIR = APP_DIR 111 | app.state.DOCKERFLOW_HEARTBEAT_FAILED_STATUS_CODE = 503 112 | app.state.DOCKERFLOW_SUMMARY_LOG_QUERYSTRING = True 113 | 114 | app.include_router(router) 115 | app.include_router(dockerflow_router) 116 | app.add_middleware(RequestIdMiddleware) 117 | app.add_middleware(MozlogRequestSummaryLogger) 118 | 119 | app.mount("/static", StaticFiles(directory=SRC_DIR / "static"), name="static") 120 | 121 | 122 | @app.exception_handler(RequestValidationError) 123 | async def validation_exception_handler( 124 | request: Request, exc: RequestValidationError 125 | ) -> Response: 126 | """ 127 | Override the default exception handler for validation 128 | errors in order to log some information about malformed 129 | requests. 130 | """ 131 | logger.error( 132 | "invalid incoming request: %s", 133 | exc, 134 | extra={ 135 | "errors": exc.errors(), 136 | }, 137 | ) 138 | return JSONResponse( 139 | status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, 140 | content={"detail": jsonable_encoder(exc.errors())}, 141 | ) 142 | -------------------------------------------------------------------------------- /jbi/bugzilla/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/jbi/bugzilla/__init__.py -------------------------------------------------------------------------------- /jbi/bugzilla/client.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import requests 4 | 5 | from jbi import environment 6 | from jbi.common.instrument import instrument 7 | 8 | from .models import ( 9 | ApiResponse, 10 | Bug, 11 | BugzillaComments, 12 | Comment, 13 | WebhooksResponse, 14 | ) 15 | 16 | settings = environment.get_settings() 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | class BugzillaClientError(Exception): 22 | """Errors raised by `BugzillaClient`.""" 23 | 24 | 25 | class BugNotAccessibleError(BugzillaClientError): 26 | """Bug is private or not accessible.""" 27 | 28 | 29 | instrumented_method = instrument( 30 | prefix="bugzilla", 31 | exceptions=( 32 | BugzillaClientError, 33 | requests.RequestException, 34 | ), 35 | ) 36 | 37 | 38 | class BugzillaClient: 39 | """A wrapper around `requests` to interact with a Bugzilla REST API.""" 40 | 41 | def __init__(self, base_url, api_key): 42 | """Initialize the client, without network activity.""" 43 | self.base_url = base_url 44 | self.api_key = api_key 45 | self._client = requests.Session() 46 | 47 | def _call(self, verb, url, *args, **kwargs): 48 | """Send HTTP requests with API key in querystring parameters.""" 49 | # Send API key in headers. 50 | # https://bmo.readthedocs.io/en/latest/api/core/v1/general.html?highlight=x-bugzilla-api-key#authentication 51 | headers = kwargs.setdefault("headers", {}) 52 | headers.setdefault("x-bugzilla-api-key", self.api_key) 53 | try: 54 | resp = self._client.request(verb, url, *args, **kwargs) 55 | resp.raise_for_status() 56 | except requests.HTTPError: 57 | logger.exception("%s %s", verb, url) 58 | raise 59 | parsed = resp.json() 60 | if parsed.get("error"): 61 | raise BugzillaClientError(parsed["message"]) 62 | return parsed 63 | 64 | @instrumented_method 65 | def logged_in(self) -> bool: 66 | """Verify the API key validity.""" 67 | # https://bugzilla.readthedocs.io/en/latest/api/core/v1/user.html#who-am-i 68 | try: 69 | resp = self._call("GET", f"{self.base_url}/rest/whoami") 70 | except (requests.HTTPError, BugzillaClientError): 71 | return False 72 | return "id" in resp 73 | 74 | @instrumented_method 75 | def get_bug(self, bugid) -> Bug: 76 | """Retrieve details about the specified bug id.""" 77 | # https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#rest-single-bug 78 | url = f"{self.base_url}/rest/bug/{bugid}" 79 | 80 | try: 81 | bug_info = self._call("GET", url) 82 | except requests.HTTPError as err: 83 | if err.response is not None and err.response.status_code in (401, 403, 404): 84 | if self.logged_in(): 85 | # If bug returns 401 and credentials are valid. 86 | msg = err.response.json().get("message", "bug not accessible") 87 | raise BugNotAccessibleError(msg) 88 | raise 89 | 90 | parsed = ApiResponse.model_validate(bug_info) 91 | if not parsed.bugs: 92 | raise BugzillaClientError( 93 | f"Unexpected response content from 'GET {url}' (no 'bugs' field)" 94 | ) 95 | bug = parsed.bugs[0] 96 | # If comment is private, then fetch it from server 97 | if bug.comment and bug.comment.is_private: 98 | comment_list = self.get_comments(bugid) 99 | matching_comments = [c for c in comment_list if c.id == bug.comment.id] 100 | # If no matching entry is found, set `bug.comment` to `None`. 101 | found = matching_comments[0] if matching_comments else None 102 | bug = bug.model_copy(update={"comment": found}, deep=True) 103 | return bug 104 | 105 | @instrumented_method 106 | def get_comments(self, bugid) -> list[Comment]: 107 | """Retrieve the list of comments of the specified bug id.""" 108 | # https://bugzilla.readthedocs.io/en/latest/api/core/v1/comment.html#rest-comments 109 | url = f"{self.base_url}/rest/bug/{bugid}/comment" 110 | comments_info = self._call("GET", url) 111 | comments = comments_info.get("bugs", {}).get(str(bugid), {}).get("comments") 112 | if comments is None: 113 | raise BugzillaClientError( 114 | f"Unexpected response content from 'GET {url}' (no 'bugs' field)" 115 | ) 116 | return BugzillaComments.validate_python(comments) 117 | 118 | @instrumented_method 119 | def update_bug(self, bugid, **fields) -> Bug: 120 | """Update the specified fields of the specified bug.""" 121 | # https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#rest-update-bug 122 | url = f"{self.base_url}/rest/bug/{bugid}" 123 | updated_info = self._call("PUT", url, json=fields) 124 | parsed = ApiResponse.model_validate(updated_info) 125 | if not parsed.bugs: 126 | raise BugzillaClientError( 127 | f"Unexpected response content from 'PUT {url}' (no 'bugs' field)" 128 | ) 129 | return parsed.bugs[0] 130 | 131 | @instrumented_method 132 | def list_webhooks(self): 133 | """List the currently configured webhooks, including their status.""" 134 | url = f"{self.base_url}/rest/webhooks/list" 135 | webhooks_info = self._call("GET", url) 136 | parsed = WebhooksResponse.model_validate(webhooks_info) 137 | if parsed.webhooks is None: 138 | raise BugzillaClientError( 139 | f"Unexpected response content from 'GET {url}' (no 'webhooks' field)" 140 | ) 141 | return [wh for wh in parsed.webhooks if "/bugzilla_webhook" in wh.url] 142 | -------------------------------------------------------------------------------- /jbi/bugzilla/models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | from typing import Any, Optional, TypedDict 4 | from urllib.parse import ParseResult, urlparse 5 | 6 | from pydantic import ( 7 | AwareDatetime, 8 | BaseModel, 9 | TypeAdapter, 10 | ValidationError, 11 | ValidationInfo, 12 | ValidatorFunctionWrapHandler, 13 | ) 14 | from pydantic.functional_validators import WrapValidator 15 | from typing_extensions import Annotated 16 | 17 | logger = logging.getLogger(__name__) 18 | JIRA_HOSTNAMES = ("jira", "atlassian") 19 | 20 | BugId = TypedDict("BugId", {"id": Optional[int]}) 21 | 22 | 23 | def maybe_add_timezone( 24 | v: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo 25 | ): 26 | if isinstance(v, str): 27 | try: 28 | return handler(v) 29 | except ValidationError: 30 | return handler(v + "+00:00") 31 | assert isinstance(v, datetime.datetime), "must be a datetime here" 32 | v = v.replace(tzinfo=datetime.timezone.utc) 33 | return v 34 | 35 | 36 | SmartAwareDatetime = Annotated[AwareDatetime, WrapValidator(maybe_add_timezone)] 37 | 38 | 39 | class WebhookUser(BaseModel, frozen=True): 40 | """Bugzilla User Object""" 41 | 42 | id: int 43 | login: str 44 | real_name: str 45 | 46 | 47 | class WebhookEventChange(BaseModel, frozen=True, coerce_numbers_to_str=True): 48 | """Bugzilla Change Object""" 49 | 50 | field: str 51 | removed: str 52 | added: str 53 | 54 | 55 | class WebhookEvent(BaseModel, frozen=True): 56 | """Bugzilla Event Object""" 57 | 58 | action: str 59 | time: SmartAwareDatetime 60 | user: Optional[WebhookUser] = None 61 | changes: Optional[list[WebhookEventChange]] = None 62 | target: Optional[str] = None 63 | routing_key: Optional[str] = None 64 | 65 | def changed_fields(self) -> list[str]: 66 | """Returns the names of changed fields in a bug""" 67 | 68 | return [c.field for c in self.changes] if self.changes else [] 69 | 70 | 71 | class WebhookComment(BaseModel, frozen=True): 72 | """Bugzilla Comment Object""" 73 | 74 | body: Optional[str] = None 75 | id: Optional[int] = None 76 | number: Optional[int] = None 77 | is_private: Optional[bool] = None 78 | creation_time: Optional[SmartAwareDatetime] = None 79 | 80 | 81 | class Bug(BaseModel, frozen=True): 82 | """Bugzilla Bug Object""" 83 | 84 | id: int 85 | is_private: Optional[bool] = None 86 | type: Optional[str] = None 87 | product: Optional[str] = None 88 | component: Optional[str] = None 89 | whiteboard: Optional[str] = None 90 | keywords: Optional[list] = None 91 | flags: Optional[list] = None 92 | groups: Optional[list] = None 93 | status: Optional[str] = None 94 | resolution: Optional[str] = None 95 | see_also: Optional[list] = None 96 | summary: Optional[str] = None 97 | severity: Optional[str] = None 98 | priority: Optional[str] = None 99 | creator: Optional[str] = None 100 | assigned_to: Optional[str] = None 101 | comment: Optional[WebhookComment] = None 102 | # Custom field Firefox for story points 103 | cf_fx_points: Optional[str] = None 104 | 105 | @property 106 | def product_component(self) -> str: 107 | """Return the component prefixed with the product 108 | as show in the Bugzilla UI (eg. ``Core::General``). 109 | """ 110 | result = self.product + "::" if self.product else "" 111 | return result + self.component if self.component else result 112 | 113 | def is_assigned(self) -> bool: 114 | """Return `true` if the bug is assigned to a user.""" 115 | return self.assigned_to != "nobody@mozilla.org" 116 | 117 | def extract_from_see_also(self, project_key): 118 | """Extract Jira Issue Key from see_also if jira url present""" 119 | if not self.see_also or len(self.see_also) == 0: 120 | return None 121 | 122 | candidates = [] 123 | for url in self.see_also: 124 | try: 125 | parsed_url: ParseResult = urlparse(url=url) 126 | host_parts = parsed_url.hostname.split(".") 127 | except (ValueError, AttributeError): 128 | logger.info( 129 | "Bug %s `see_also` is not a URL: %s", 130 | self.id, 131 | url, 132 | extra={ 133 | "bug": { 134 | "id": self.id, 135 | } 136 | }, 137 | ) 138 | continue 139 | 140 | if any(part in JIRA_HOSTNAMES for part in host_parts): 141 | parsed_jira_key = parsed_url.path.rstrip("/").split("/")[-1] 142 | if parsed_jira_key: # URL ending with / 143 | # Issue keys are like `{project_key}-{number}` 144 | if parsed_jira_key.startswith(f"{project_key}-"): 145 | return parsed_jira_key 146 | # If not obvious, then keep this link as candidate. 147 | candidates.append(parsed_jira_key) 148 | 149 | return candidates[0] if candidates else None 150 | 151 | 152 | class WebhookRequest(BaseModel, frozen=True): 153 | """Bugzilla Webhook Request Object""" 154 | 155 | webhook_id: int 156 | webhook_name: str 157 | event: WebhookEvent 158 | bug: Bug 159 | 160 | 161 | class Comment(BaseModel, frozen=True): 162 | """Bugzilla Comment""" 163 | 164 | id: int 165 | text: str 166 | is_private: bool 167 | creator: str 168 | 169 | 170 | BugzillaComments = TypeAdapter(list[Comment]) 171 | 172 | 173 | class ApiResponse(BaseModel, frozen=True): 174 | """Bugzilla Response Object""" 175 | 176 | faults: Optional[list] = None 177 | bugs: Optional[list[Bug]] = None 178 | 179 | 180 | class Webhook(BaseModel, frozen=True): 181 | """Bugzilla Webhook""" 182 | 183 | id: int 184 | name: str 185 | url: str 186 | event: str 187 | product: str 188 | component: str 189 | enabled: bool 190 | errors: int 191 | # Ignored fields: 192 | # creator: str 193 | 194 | @property 195 | def slug(self): 196 | """Return readable identifier""" 197 | name = self.name.replace(" ", "-").lower() 198 | product = self.product.replace(" ", "-").lower() 199 | return f"{self.id}-{name}-{product}" 200 | 201 | 202 | class WebhooksResponse(BaseModel, frozen=True): 203 | """Bugzilla Webhooks List Response Object""" 204 | 205 | webhooks: Optional[list[Webhook]] = None 206 | -------------------------------------------------------------------------------- /jbi/bugzilla/service.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import lru_cache 3 | 4 | import requests 5 | from dockerflow import checks 6 | from statsd.defaults.env import statsd 7 | 8 | from jbi import environment 9 | 10 | from .client import BugzillaClient, BugzillaClientError 11 | from .models import Bug 12 | 13 | settings = environment.get_settings() 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class BugzillaService: 19 | """Used by action workflows to perform action-specific Bugzilla tasks""" 20 | 21 | def __init__(self, client: BugzillaClient) -> None: 22 | self.client = client 23 | 24 | def add_link_to_see_also(self, bug: Bug, link: str): 25 | """Add link to Bugzilla ticket""" 26 | 27 | return self.client.update_bug(bug.id, see_also={"add": [link]}) 28 | 29 | def get_description(self, bug_id: int): 30 | """Fetch a bug's description 31 | 32 | A Bug's description does not appear in the payload of a bug. Instead, it is "comment 0" 33 | """ 34 | 35 | comment_list = self.client.get_comments(bug_id) 36 | comment_body = comment_list[0].text if comment_list else "" 37 | return str(comment_body) 38 | 39 | def refresh_bug_data(self, bug: Bug): 40 | """Re-fetch a bug to ensure we have the most up-to-date data""" 41 | 42 | refreshed_bug_data = self.client.get_bug(bug.id) 43 | # When bugs come in as webhook payloads, they have a "comment" 44 | # attribute, but this field isn't available when we get a bug by ID. 45 | # So, we make sure to add the comment back if it was present on the bug. 46 | updated_bug = refreshed_bug_data.model_copy(update={"comment": bug.comment}) 47 | return updated_bug 48 | 49 | def list_webhooks(self): 50 | """List the currently configured webhooks, including their status.""" 51 | 52 | return self.client.list_webhooks() 53 | 54 | def check_bugzilla_connection(self): 55 | if not self.client.logged_in(): 56 | return [checks.Error("Login fails or service down", id="bugzilla.login")] 57 | return [] 58 | 59 | def check_bugzilla_webhooks(self): 60 | # Do not bother executing the rest of checks if connection fails. 61 | if messages := self.check_bugzilla_connection(): 62 | return messages 63 | 64 | # Check that all JBI webhooks are enabled in Bugzilla, 65 | # and report disabled ones. 66 | try: 67 | jbi_webhooks = self.list_webhooks() 68 | except (BugzillaClientError, requests.HTTPError) as e: 69 | return [ 70 | checks.Error( 71 | f"Could not list webhooks ({e})", id="bugzilla.webhooks.fetch" 72 | ) 73 | ] 74 | 75 | results = [] 76 | 77 | if len(jbi_webhooks) == 0: 78 | results.append( 79 | checks.Warning("No webhooks enabled", id="bugzilla.webhooks.empty") 80 | ) 81 | 82 | for webhook in jbi_webhooks: 83 | # Report errors in each webhook 84 | statsd.gauge(f"jbi.bugzilla.webhooks.{webhook.slug}.errors", webhook.errors) 85 | # Warn developers when there are errors 86 | if webhook.errors > 0: 87 | results.append( 88 | checks.Warning( 89 | f"Webhook {webhook.name} has {webhook.errors} error(s)", 90 | id="bugzilla.webhooks.errors", 91 | ) 92 | ) 93 | 94 | if not webhook.enabled: 95 | results.append( 96 | checks.Error( 97 | f"Webhook {webhook.name} is disabled ({webhook.errors} errors)", 98 | id="bugzilla.webhooks.disabled", 99 | ) 100 | ) 101 | 102 | return results 103 | 104 | 105 | @lru_cache(maxsize=1) 106 | def get_service(): 107 | """Get bugzilla service""" 108 | client = BugzillaClient( 109 | settings.bugzilla_base_url, api_key=str(settings.bugzilla_api_key) 110 | ) 111 | return BugzillaService(client=client) 112 | -------------------------------------------------------------------------------- /jbi/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/jbi/common/__init__.py -------------------------------------------------------------------------------- /jbi/common/instrument.py: -------------------------------------------------------------------------------- 1 | """Contains code common to all services 2 | 3 | ServiceHealth: Return type that service health checks should use 4 | InstrumentedClient: wraps service clients so that we can track their usage 5 | """ 6 | 7 | import logging 8 | from functools import wraps 9 | from typing import Sequence, Type 10 | 11 | import backoff 12 | from statsd.defaults.env import statsd 13 | 14 | from jbi import environment 15 | 16 | settings = environment.get_settings() 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | ServiceHealth = dict[str, bool] 22 | 23 | 24 | def instrument(prefix: str, exceptions: Sequence[Type[Exception]], **backoff_params): 25 | """This decorator wraps a function such that it increments a counter every 26 | time it is called and times its execution. It retries the function if the 27 | specified exceptions are raised. 28 | """ 29 | 30 | def decorator(func): 31 | @backoff.on_exception( 32 | backoff.expo, 33 | exceptions, 34 | max_tries=settings.max_retries + 1, 35 | **backoff_params, 36 | ) 37 | @wraps(func) 38 | def wrapper(*args, **kwargs): 39 | # Increment the call counter. 40 | statsd.incr(f"jbi.{prefix}.methods.{func.__name__}.count") 41 | # Time its execution. 42 | with statsd.timer(f"jbi.{prefix}.methods.{func.__name__}.timer"): 43 | return func(*args, **kwargs) 44 | 45 | return wrapper 46 | 47 | return decorator 48 | -------------------------------------------------------------------------------- /jbi/configuration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Parsing and validating the YAML configuration occurs within this module 3 | """ 4 | 5 | import logging 6 | 7 | from pydantic import ValidationError 8 | from pydantic_yaml import parse_yaml_raw_as 9 | 10 | from jbi import environment 11 | from jbi.models import Actions 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class ConfigError(Exception): 17 | """Error when an exception occurs during processing config""" 18 | 19 | 20 | def get_actions_from_file(jbi_config_file: str) -> Actions: 21 | """Convert and validate YAML configuration to `Action` objects""" 22 | try: 23 | with open(jbi_config_file, encoding="utf8") as file: 24 | content = file.read() 25 | actions: Actions = parse_yaml_raw_as(Actions, content) 26 | return actions 27 | except ValidationError as exception: 28 | logger.exception(exception) 29 | raise ConfigError("Errors exist.") from exception 30 | 31 | 32 | def get_actions(env=None) -> Actions: 33 | """Load actions from file determined by ENV name""" 34 | if env is None: 35 | settings = environment.get_settings() 36 | env = settings.env 37 | return get_actions_from_file(f"config/config.{env}.yaml") 38 | -------------------------------------------------------------------------------- /jbi/environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module dedicated to interacting with the environment (variables, version.json) 3 | """ 4 | 5 | # https://github.com/python/mypy/issues/12841 6 | from enum import StrEnum, auto # type: ignore 7 | from functools import lru_cache 8 | from typing import Optional 9 | 10 | from pydantic import AnyUrl, FileUrl 11 | from pydantic_settings import BaseSettings, SettingsConfigDict 12 | 13 | 14 | class Environment(StrEnum): 15 | """Production environment choices""" 16 | 17 | LOCAL = auto() 18 | NONPROD = auto() 19 | PROD = auto() 20 | 21 | 22 | class Settings(BaseSettings): 23 | """The Settings object extracts environment variables for convenience.""" 24 | 25 | host: str = "0.0.0.0" 26 | port: int = 8000 27 | app_reload: bool = False 28 | app_debug: bool = False 29 | max_retries: int = 3 30 | # https://github.com/python/mypy/issues/12841 31 | env: Environment = Environment.NONPROD # type: ignore 32 | jbi_api_key: str 33 | 34 | # Jira 35 | jira_base_url: str = "https://mozit-test.atlassian.net/" 36 | jira_username: str 37 | jira_api_key: str 38 | 39 | # Bugzilla 40 | bugzilla_base_url: str = "https://bugzilla-dev.allizom.org" 41 | bugzilla_api_key: str 42 | 43 | # Logging 44 | log_level: str = "info" 45 | log_format: str = "json" # set to "text" for human-readable logs 46 | 47 | # Sentry 48 | sentry_dsn: Optional[AnyUrl] = None 49 | sentry_traces_sample_rate: float = 1.0 50 | 51 | # Retry queue 52 | dl_queue_dsn: FileUrl 53 | 54 | model_config = SettingsConfigDict( 55 | env_file=".env", env_file_encoding="utf-8", extra="ignore" 56 | ) 57 | 58 | 59 | @lru_cache(maxsize=1) 60 | def get_settings() -> Settings: 61 | """Return the Settings object; use cache""" 62 | return Settings() 63 | -------------------------------------------------------------------------------- /jbi/errors.py: -------------------------------------------------------------------------------- 1 | """Custom exceptions for JBI""" 2 | 3 | 4 | class ActionNotFoundError(Exception): 5 | """No Action could be found for this bug""" 6 | 7 | 8 | class IgnoreInvalidRequestError(Exception): 9 | """Error thrown when requests are invalid and ignored""" 10 | 11 | 12 | class ActionError(Exception): 13 | """Error occurred during Action handling""" 14 | -------------------------------------------------------------------------------- /jbi/jira/__init__.py: -------------------------------------------------------------------------------- 1 | from .service import JiraService as JiraService 2 | from .service import get_service as get_service 3 | -------------------------------------------------------------------------------- /jbi/jira/client.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, Collection, Iterable, Optional, Union 3 | 4 | import requests 5 | from atlassian import Jira 6 | from atlassian import errors as atlassian_errors 7 | from atlassian.rest_client import log as atlassian_logger 8 | from requests import exceptions as requests_exceptions 9 | 10 | from jbi import environment 11 | from jbi.common.instrument import instrument 12 | 13 | settings = environment.get_settings() 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def fatal_code(exc): 19 | """Do not retry 4XX errors, mark them as fatal.""" 20 | try: 21 | return 400 <= exc.response.status_code < 500 22 | except AttributeError: 23 | # `ApiError` or `ConnectionError` won't have response attribute. 24 | return False 25 | 26 | 27 | instrumented_method = instrument( 28 | prefix="jira", 29 | exceptions=( 30 | atlassian_errors.ApiError, 31 | requests_exceptions.RequestException, 32 | ), 33 | giveup=fatal_code, 34 | ) 35 | 36 | 37 | class JiraCreateError(Exception): 38 | """Error raised on Jira issue creation.""" 39 | 40 | 41 | class JiraClient(Jira): 42 | """Adapted Atlassian Jira client that logs errors and wraps methods 43 | in our instrumentation decorator. 44 | """ 45 | 46 | def raise_for_status(self, *args, **kwargs): 47 | """Catch and log HTTP errors responses of the Jira self.client. 48 | 49 | Without this the actual requests and responses are not exposed when an error 50 | occurs, which makes troubleshooting tedious. 51 | """ 52 | try: 53 | return super().raise_for_status(*args, **kwargs) 54 | except requests.HTTPError as exc: 55 | request = exc.request 56 | response = exc.response 57 | assert response is not None, f"HTTPError {exc} has no attached response" 58 | atlassian_logger.error( 59 | "HTTP: %s %s -> %s %s", 60 | request.method, 61 | request.path_url, 62 | response.status_code, 63 | response.reason, 64 | extra={"body": response.text}, 65 | ) 66 | # Set the exception message so that its str version contains details. 67 | msg = f"{request.method} {request.path_url} -> HTTP {response.status_code}: {exc}" 68 | exc.args = (msg,) + exc.args[1:] 69 | raise 70 | 71 | get_server_info = instrumented_method(Jira.get_server_info) 72 | get_project_components = instrumented_method(Jira.get_project_components) 73 | update_issue = instrumented_method(Jira.update_issue) 74 | update_issue_field = instrumented_method(Jira.update_issue_field) 75 | set_issue_status = instrumented_method(Jira.set_issue_status) 76 | issue_add_comment = instrumented_method(Jira.issue_add_comment) 77 | create_issue = instrumented_method(Jira.create_issue) 78 | get_project = instrumented_method(Jira.get_project) 79 | 80 | @instrumented_method 81 | def paginated_projects( 82 | self, 83 | included_archived=None, 84 | expand=None, 85 | url=None, 86 | keys: Optional[Collection[str]] = None, 87 | ) -> dict: 88 | """Returns a paginated list of projects visible to the user. 89 | 90 | https://developer.atlassian.com/cloud/jira/platform/rest/v2/api-group-projects/#api-rest-api-2-project-search-get 91 | 92 | We've patched this method of the Jira client to accept the `keys` param. 93 | """ 94 | 95 | if not self.cloud: 96 | raise ValueError( 97 | "``projects_from_cloud`` method is only available for Jira Cloud platform" 98 | ) 99 | 100 | params_dict: dict[str, Any] = {} 101 | 102 | if keys is not None: 103 | if len(keys) > 50: 104 | raise ValueError("Up to 50 project keys can be provided.") 105 | params_dict["keys"] = list(keys) 106 | 107 | if included_archived: 108 | params_dict["includeArchived"] = included_archived 109 | if expand: 110 | params_dict["expand"] = expand 111 | page_url = url or self.resource_url("project/search") 112 | is_url_absolute = bool(page_url.lower().startswith("http")) 113 | projects: Union[dict, None] = self.get( 114 | page_url, params=params_dict, absolute=is_url_absolute 115 | ) 116 | return projects if projects else {"values": []} 117 | 118 | @instrumented_method 119 | def permitted_projects(self, permissions: Optional[Iterable] = None) -> list[dict]: 120 | """Fetches projects that the user has the required permissions for 121 | 122 | https://developer.atlassian.com/cloud/jira/platform/rest/v2/api-group-permissions/#api-rest-api-2-permissions-project-post 123 | """ 124 | if permissions is None: 125 | permissions = [] 126 | 127 | response = self.post( 128 | "/rest/api/2/permissions/project", 129 | json={"permissions": list(permissions)}, 130 | ) 131 | projects: list[dict] = response["projects"] if response else [] 132 | return projects 133 | -------------------------------------------------------------------------------- /jbi/jira/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pypandoc # type: ignore 4 | 5 | logging.getLogger("pypandoc").addHandler(logging.NullHandler()) 6 | 7 | 8 | def markdown_to_jira(markdown: str, max_length: int = 0) -> str: 9 | """ 10 | Convert markdown content into Jira specific markup language. 11 | """ 12 | jira_output = pypandoc.convert_text(markdown, "jira", format="gfm").strip() 13 | if max_length > 0 and len(jira_output) > max_length: 14 | # Truncate on last word. 15 | jira_output = jira_output[:max_length].rsplit(maxsplit=1)[0] 16 | return jira_output # type: ignore 17 | -------------------------------------------------------------------------------- /jbi/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Dedicated module for logging configuration and setup 3 | """ 4 | 5 | import logging 6 | import logging.config 7 | import sys 8 | 9 | from jbi.environment import get_settings 10 | 11 | settings = get_settings() 12 | 13 | 14 | CONFIG = { 15 | "version": 1, 16 | "disable_existing_loggers": False, 17 | "filters": { 18 | "request_id": { 19 | "()": "dockerflow.logging.RequestIdLogFilter", 20 | }, 21 | }, 22 | "formatters": { 23 | "mozlog_json": { 24 | "()": "dockerflow.logging.JsonLogFormatter", 25 | "logger_name": "jbi", 26 | }, 27 | "text": { 28 | "format": "%(asctime)s %(levelname)-8s [%(rid)s] %(name)-15s %(message)s", 29 | "datefmt": "%Y-%m-%d %H:%M:%S", 30 | }, 31 | }, 32 | "handlers": { 33 | "console": { 34 | "level": settings.log_level.upper(), 35 | "class": "logging.StreamHandler", 36 | "filters": ["request_id"], 37 | "formatter": "text" 38 | if settings.log_format.lower() == "text" 39 | else "mozlog_json", 40 | "stream": sys.stdout, 41 | }, 42 | "null": { 43 | "class": "logging.NullHandler", 44 | }, 45 | }, 46 | "loggers": { 47 | "": {"handlers": ["console"]}, 48 | "request.summary": {"level": logging.INFO}, 49 | "jbi": {"level": logging.DEBUG}, 50 | "uvicorn": {"level": logging.INFO}, 51 | "uvicorn.access": {"handlers": ["null"], "propagate": False}, 52 | }, 53 | } 54 | -------------------------------------------------------------------------------- /jbi/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python Module for Pydantic Models and validation 3 | """ 4 | 5 | import functools 6 | import logging 7 | import warnings 8 | from collections import defaultdict 9 | from copy import copy 10 | from typing import DefaultDict, Literal, Mapping, Optional 11 | 12 | from pydantic import ( 13 | BaseModel, 14 | ConfigDict, 15 | Field, 16 | RootModel, 17 | field_validator, 18 | ) 19 | 20 | from jbi import Operation, steps 21 | from jbi.bugzilla.models import Bug, BugId, WebhookEvent 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | JIRA_HOSTNAMES = ("jira", "atlassian") 26 | 27 | 28 | class ActionSteps(BaseModel, frozen=True): 29 | """Step functions to run for each type of Bugzilla webhook payload""" 30 | 31 | new: list[str] = [ 32 | "create_issue", 33 | "maybe_delete_duplicate", 34 | "add_link_to_bugzilla", 35 | "add_link_to_jira", 36 | "sync_whiteboard_labels", 37 | ] 38 | existing: list[str] = [ 39 | "update_issue_summary", 40 | "sync_whiteboard_labels", 41 | "add_jira_comments_for_changes", 42 | ] 43 | comment: list[str] = [ 44 | "create_comment", 45 | ] 46 | attachment: list[str] = [ 47 | "create_comment", 48 | ] 49 | 50 | @field_validator("*") 51 | @classmethod 52 | def validate_steps(cls, function_names: list[str]): 53 | """Validate that all configure step functions exist in the steps module""" 54 | invalid_functions = [ 55 | func_name for func_name in function_names if not hasattr(steps, func_name) 56 | ] 57 | if invalid_functions: 58 | raise ValueError( 59 | f"The following functions are not available in the `steps` module: {', '.join(invalid_functions)}" 60 | ) 61 | 62 | # Make sure `maybe_update_resolution` comes after `maybe_update_status`. 63 | try: 64 | idx_resolution = function_names.index("maybe_update_issue_resolution") 65 | idx_status = function_names.index("maybe_update_issue_status") 66 | assert idx_resolution > idx_status, ( 67 | "Step `maybe_update_resolution` should be put after `maybe_update_issue_status`" 68 | ) 69 | except ValueError: 70 | # One of these 2 steps not listed. 71 | pass 72 | 73 | return function_names 74 | 75 | 76 | class JiraComponents(BaseModel, frozen=True): 77 | """Controls how Jira components are set on issues in the `maybe_update_components` step.""" 78 | 79 | use_bug_component: bool = True 80 | use_bug_product: bool = False 81 | use_bug_component_with_product_prefix: bool = False 82 | set_custom_components: list[str] = [] 83 | 84 | 85 | class ActionParams(BaseModel, frozen=True): 86 | """Params passed to Action step functions""" 87 | 88 | jira_project_key: str 89 | steps: ActionSteps = ActionSteps() 90 | jira_char_limit: int = 32667 91 | jira_components: JiraComponents = JiraComponents() 92 | jira_cf_fx_points_field: str = "customfield_10037" 93 | jira_severity_field: str = "customfield_10319" 94 | jira_priority_field: str = "priority" 95 | jira_resolution_field: str = "resolution" 96 | labels_brackets: Literal["yes", "no", "both"] = "no" 97 | status_map: dict[str, str] = {} 98 | priority_map: dict[str, str] = { 99 | "": "(none)", 100 | "P1": "P1", 101 | "P2": "P2", 102 | "P3": "P3", 103 | "P4": "Low", 104 | "P5": "Lowest", 105 | } 106 | resolution_map: dict[str, str] = {} 107 | severity_map: dict[str, str] = { 108 | "": "N/A", 109 | "S1": "S1", 110 | "S2": "S2", 111 | "S3": "S3", 112 | "S4": "S4", 113 | } 114 | cf_fx_points_map: dict[str, int] = { 115 | "---": 0, 116 | "": 0, 117 | "?": 0, 118 | "1": 1, 119 | "2": 2, 120 | "3": 3, 121 | "5": 5, 122 | "7": 7, 123 | "8": 8, 124 | "12": 12, 125 | "13": 13, 126 | "15": 15, 127 | } 128 | issue_type_map: dict[str, str] = {"task": "Task", "defect": "Bug"} 129 | 130 | 131 | class Action(BaseModel, frozen=True): 132 | """ 133 | Action is the inner model for each action in the configuration file""" 134 | 135 | whiteboard_tag: str 136 | bugzilla_user_id: int | list[int] | Literal["tbd"] 137 | description: str 138 | enabled: bool = True 139 | parameters: ActionParams 140 | 141 | @property 142 | def jira_project_key(self): 143 | """Return the configured project key.""" 144 | return self.parameters.jira_project_key 145 | 146 | 147 | class Actions(RootModel): 148 | """ 149 | Actions is the container model for the list of actions in the configuration file 150 | """ 151 | 152 | root: list[Action] = Field(..., min_length=1) 153 | 154 | @functools.cached_property 155 | def by_tag(self) -> Mapping[str, Action]: 156 | """Build mapping of actions by lookup tag.""" 157 | 158 | return {action.whiteboard_tag: action for action in self.root} 159 | 160 | def __iter__(self): 161 | return iter(self.root) 162 | 163 | def __len__(self): 164 | return len(self.root) 165 | 166 | def __getitem__(self, item): 167 | return self.by_tag[item] 168 | 169 | def get(self, tag: Optional[str]) -> Optional[Action]: 170 | """Lookup actions by whiteboard tag""" 171 | return self.by_tag.get(tag.lower()) if tag else None 172 | 173 | @functools.cached_property 174 | def configured_jira_projects_keys(self) -> set[str]: 175 | """Return the list of Jira project keys from all configured actions""" 176 | 177 | return {action.jira_project_key for action in self.root} 178 | 179 | @field_validator("root") 180 | @classmethod 181 | def validate_actions(cls, actions: list[Action]): 182 | """ 183 | Inspect the list of actions: 184 | - Validate that lookup tags are uniques 185 | - Ensure we haven't exceeded our maximum configured project limit (see error below) 186 | - If the action's bugzilla_user_id is "tbd", emit a warning. 187 | """ 188 | tags = [action.whiteboard_tag.lower() for action in actions] 189 | duplicated_tags = [t for i, t in enumerate(tags) if t in tags[:i]] 190 | if duplicated_tags: 191 | raise ValueError(f"actions have duplicated lookup tags: {duplicated_tags}") 192 | 193 | if len(tags) > 50: 194 | raise ValueError( 195 | "The Jira client's `paginated_projects` method assumes we have " 196 | "up to 50 projects configured. Adjust that implementation before " 197 | "removing this validation check." 198 | ) 199 | 200 | for action in actions: 201 | if action.bugzilla_user_id == "tbd": 202 | warnings.warn( 203 | f"Provide bugzilla_user_id data for `{action.whiteboard_tag}` action." 204 | ) 205 | 206 | assert action.parameters.status_map or ( 207 | "maybe_update_issue_status" not in action.parameters.steps.new 208 | and "maybe_update_issue_status" not in action.parameters.steps.existing 209 | ), "`maybe_update_issue_status` was used without `status_map`" 210 | assert action.parameters.resolution_map or ( 211 | "maybe_update_issue_resolution" not in action.parameters.steps.new 212 | and "maybe_update_issue_resolution" 213 | not in action.parameters.steps.existing 214 | ), "`maybe_update_issue_resolution` was used without `resolution_map`" 215 | 216 | return actions 217 | 218 | model_config = ConfigDict(ignored_types=(functools.cached_property,)) 219 | 220 | 221 | class Context(BaseModel, frozen=True): 222 | """Generic log context throughout JBI""" 223 | 224 | def update(self, **kwargs): 225 | """Return a copy with updated fields.""" 226 | return self.model_copy(update=kwargs, deep=True) 227 | 228 | 229 | class JiraContext(Context): 230 | """Logging context about Jira""" 231 | 232 | project: str 233 | issue: Optional[str] = None 234 | labels: Optional[list[str]] = None 235 | 236 | 237 | class RunnerContext(Context, extra="forbid"): 238 | """Logging context from runner""" 239 | 240 | operation: Operation 241 | event: WebhookEvent 242 | actions: Optional[list[Action]] = None 243 | bug: BugId | Bug 244 | 245 | 246 | class ActionContext(Context, extra="forbid"): 247 | """Logging context from actions""" 248 | 249 | action: Action 250 | operation: Operation 251 | current_step: Optional[str] = None 252 | event: WebhookEvent 253 | jira: JiraContext 254 | bug: Bug 255 | extra: dict[str, str] = {} 256 | responses_by_step: DefaultDict[str, list] = defaultdict(list) 257 | 258 | def append_responses(self, *responses): 259 | """Shortcut function to add responses to the existing list.""" 260 | if not self.current_step: 261 | raise ValueError("`current_step` unset in context.") 262 | copied = copy(self.responses_by_step) 263 | copied[self.current_step].extend(responses) 264 | return self.update(responses_by_step=copied) 265 | -------------------------------------------------------------------------------- /jbi/queue.py: -------------------------------------------------------------------------------- 1 | """This `queue` module stores Bugzilla webhook messages that we failed to sync 2 | to Jira. 3 | 4 | As Bugzilla sends us webhook messages, we want to eagerly accept them and 5 | return a `200` response so that we don't prevent it from sending new messages. 6 | But if we fail to sync a bug, we want to keep the message so we can retry it 7 | later. We also want to store any messages that might be successfuly synced, but 8 | were preceded by a message that wasn't synced. 9 | 10 | Classes: 11 | - QueueItem: An entry in the dead letter queue, containing information 12 | about the payload, timestamp, and any associated errors when attempting 13 | to sync the bug. 14 | - PythonException: Information about any exception that occured when 15 | syncing a bug, stored along with the item. 16 | - DeadLetterQueue: Class representing the dead letter queue system, providing methods 17 | for adding, retrieving, and managing queue items. Supports pluggable backends. 18 | - QueueBackend: Abstract base class defining the interface for a DeadLetterQueue backend. 19 | - FileBackend: Implementation of a QueueBackend that stores messages in files. 20 | - InvalidQueueDSNError: Exception raised when an invalid queue DSN is provided. 21 | - QueueItemRetrievalError: Exception raised when the queue is unable to retreive a failed 22 | item and parse it as an item 23 | """ 24 | 25 | import logging 26 | import re 27 | import tempfile 28 | import traceback 29 | from abc import ABC, abstractmethod 30 | from datetime import datetime 31 | from functools import cached_property, lru_cache 32 | from json import JSONDecodeError 33 | from pathlib import Path 34 | from typing import AsyncIterator, Optional 35 | from urllib.parse import ParseResult, urlparse 36 | 37 | import dockerflow.checks 38 | from pydantic import BaseModel, FileUrl, ValidationError, computed_field 39 | 40 | from jbi.bugzilla import models as bugzilla_models 41 | from jbi.environment import get_settings 42 | 43 | logger = logging.getLogger(__name__) 44 | 45 | ITEM_ID_PATTERN = re.compile( 46 | r"(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\+\d{2}:\d{2})-(?P\d+)-(?P\w*)-(?Perror|postponed)" 47 | ) 48 | 49 | 50 | def extract_bug_id_from_item_id(item_id: str) -> str: 51 | if match := re.search(ITEM_ID_PATTERN, item_id): 52 | return match.group("bug_id") 53 | raise ValueError( 54 | "item_id %s did not match expected format: %s", item_id, ITEM_ID_PATTERN.pattern 55 | ) 56 | 57 | 58 | class QueueItemRetrievalError(Exception): 59 | def __init__(self, message=None, path=None): 60 | self.message = message or "Error reading or parsing queue item" 61 | self.path = path 62 | 63 | def __str__(self): 64 | return f"QueueItemRetrievalError: {self.message} - path: {self.path}." 65 | 66 | 67 | class InvalidQueueDSNError(Exception): 68 | pass 69 | 70 | 71 | class PythonException(BaseModel, frozen=True): 72 | type: str 73 | description: str 74 | details: str 75 | 76 | @classmethod 77 | def from_exc(cls, exc: Exception): 78 | return PythonException( 79 | type=exc.__class__.__name__, 80 | description=str(exc), 81 | details="".join(traceback.format_exception(exc)), 82 | ) 83 | 84 | 85 | class QueueItem(BaseModel, frozen=True): 86 | """Dead Letter Queue entry.""" 87 | 88 | payload: bugzilla_models.WebhookRequest 89 | error: Optional[PythonException] = None 90 | rid: Optional[str] = None 91 | 92 | @computed_field # type: ignore 93 | @cached_property 94 | def version(self) -> str: 95 | # Prevents circular imports. 96 | from jbi import app 97 | 98 | return app.VERSION 99 | 100 | @property 101 | def timestamp(self) -> datetime: 102 | return self.payload.event.time 103 | 104 | @computed_field # type: ignore 105 | @property 106 | def identifier(self) -> str: 107 | return f"{self.payload.event.time}-{self.payload.bug.id}-{self.payload.event.action}-{'error' if self.error else 'postponed'}" 108 | 109 | 110 | @lru_cache(maxsize=1) 111 | def get_dl_queue(): 112 | settings = get_settings() 113 | return DeadLetterQueue(settings.dl_queue_dsn) 114 | 115 | 116 | class QueueBackend(ABC): 117 | """An interface for dead letter queues.""" 118 | 119 | @abstractmethod 120 | def ping(self) -> bool: 121 | """Report if the queue backend is available and ready to be written to""" 122 | pass 123 | 124 | @abstractmethod 125 | async def clear(self) -> None: 126 | """Remove all bugs and their items from the queue""" 127 | pass 128 | 129 | @abstractmethod 130 | async def put(self, item: QueueItem) -> None: 131 | """Insert item into queued items for a bug, maintaining sorted order by 132 | payload event time ascending 133 | """ 134 | pass 135 | 136 | @abstractmethod 137 | async def remove(self, bug_id: int, identifier: str) -> None: 138 | """Remove an item from the target bug's queue. If the item is the last 139 | one for the bug, remove the bug from the queue entirely. 140 | """ 141 | pass 142 | 143 | @abstractmethod 144 | def get(self, bug_id: int) -> AsyncIterator[QueueItem]: 145 | """Retrieve all of the queue items for a specific bug, sorted in 146 | ascending order by the timestamp of the payload event. 147 | """ 148 | pass 149 | 150 | @abstractmethod 151 | async def exists(self, item_id: str) -> bool: 152 | """ 153 | Report whether an item with id `item_id` exists in the queue 154 | """ 155 | pass 156 | 157 | @abstractmethod 158 | async def get_all(self) -> dict[int, AsyncIterator[QueueItem]]: 159 | """Retrieve all items in the queue, grouped by bug 160 | 161 | Returns: 162 | dict[int, List[QueueItem]]: Returns a dict of 163 | {bug_id: list of events}. Each list of events sorted in ascending 164 | order by the timestamp of the payload event. 165 | """ 166 | pass 167 | 168 | @abstractmethod 169 | async def size(self, bug_id: Optional[int] = None) -> int: 170 | """Report the number of items in the queue, optionally filtered by bug id""" 171 | pass 172 | 173 | 174 | class FileBackend(QueueBackend): 175 | def __init__(self, location): 176 | self.location = Path(location) 177 | self.location.mkdir(parents=True, exist_ok=True) 178 | 179 | def __repr__(self) -> str: 180 | return f"FileBackend({self.location})" 181 | 182 | def ping(self): 183 | try: 184 | with tempfile.TemporaryDirectory(dir=self.location) as temp_dir: 185 | with tempfile.TemporaryFile(dir=temp_dir) as f: 186 | f.write(b"") 187 | return True 188 | except Exception: 189 | logger.exception("Could not write to file backed queue") 190 | return False 191 | 192 | async def clear(self): 193 | for root, dirs, files in self.location.walk(top_down=False): 194 | for name in files: 195 | (root / name).unlink() 196 | for name in dirs: 197 | (root / name).rmdir() 198 | 199 | async def put(self, item: QueueItem): 200 | folder = self.location / f"{item.payload.bug.id}" 201 | folder.mkdir(exist_ok=True) 202 | path = folder / (item.identifier + ".json") 203 | path.write_text(item.model_dump_json()) 204 | logger.debug( 205 | "Wrote item %s for bug %s to path %s", 206 | item.identifier, 207 | item.payload.bug.id, 208 | path, 209 | ) 210 | logger.debug("%d items in dead letter queue", await self.size()) 211 | 212 | async def remove(self, bug_id: int, identifier: str): 213 | bug_dir = self.location / f"{bug_id}" 214 | item_path = bug_dir / (identifier + ".json") 215 | try: 216 | logger.debug("Removing %s from queue for bug %s", identifier, bug_id) 217 | item_path.unlink() 218 | except FileNotFoundError as exc: 219 | logger.warning( 220 | "Could not delete missing item at path %s", str(item_path), exc 221 | ) 222 | 223 | if not any(bug_dir.iterdir()): 224 | bug_dir.rmdir() 225 | logger.debug("Removed directory for bug %s", bug_id) 226 | 227 | async def exists(self, item_id: str) -> bool: 228 | try: 229 | bug_id = extract_bug_id_from_item_id(item_id) 230 | except ValueError as e: 231 | logger.warning( 232 | "provided item_id %s did not match expected format", item_id, exc_info=e 233 | ) 234 | return False 235 | 236 | item_path = (self.location / bug_id / item_id).with_suffix(".json") 237 | # even though pathlib.Path.exists() returns a bool, mypy doesn't seem to get it 238 | return bool(item_path.exists()) 239 | 240 | async def get(self, bug_id: int) -> AsyncIterator[QueueItem]: 241 | folder = self.location / str(bug_id) 242 | if not folder.is_dir(): 243 | return 244 | yield 245 | for path in sorted(folder.iterdir()): 246 | try: 247 | yield QueueItem.parse_file(path) 248 | except (JSONDecodeError, ValidationError) as e: 249 | raise QueueItemRetrievalError( 250 | "Unable to load item from queue", path=path 251 | ) from e 252 | 253 | async def get_all(self) -> dict[int, AsyncIterator[QueueItem]]: 254 | all_items: dict[int, AsyncIterator[QueueItem]] = {} 255 | for filesystem_object in self.location.iterdir(): 256 | if filesystem_object.is_dir() and re.match( 257 | r"\d", filesystem_object.name 258 | ): # filtering out temp files from checks 259 | all_items[int(filesystem_object.name)] = self.get(filesystem_object) 260 | return all_items 261 | 262 | async def size(self, bug_id=None) -> int: 263 | location = self.location / str(bug_id) if bug_id else self.location 264 | return sum(1 for _ in location.rglob("*.json")) 265 | 266 | 267 | class DeadLetterQueue: 268 | backend: QueueBackend 269 | 270 | def __init__(self, dsn: FileUrl | str | ParseResult): 271 | dsn = urlparse(url=dsn) if isinstance(dsn, str) else dsn 272 | 273 | if dsn.scheme != "file": 274 | raise InvalidQueueDSNError(f"{dsn.scheme} is not supported") 275 | self.backend = FileBackend(dsn.path) 276 | 277 | def check_writable(self) -> list[dockerflow.checks.CheckMessage]: 278 | """Heartbeat check to assert we can write items to queue""" 279 | results = [] 280 | ping_result = self.backend.ping() 281 | if ping_result is False: 282 | results.append( 283 | dockerflow.checks.Error( 284 | f"queue with {str(self.backend)} unavailable", 285 | hint="with FileBackend, check that folder is writable", 286 | id="queue.backend.ping", 287 | ) 288 | ) 289 | return results 290 | 291 | async def check_readable(self) -> list[dockerflow.checks.CheckMessage]: 292 | results = [] 293 | try: 294 | bugs = await self.retrieve() 295 | 296 | for bug_id, items in bugs.items(): 297 | try: 298 | bug_items = (await self.retrieve()).values() 299 | [[i async for i in items] for items in bug_items] 300 | except QueueItemRetrievalError as exc: 301 | results.append( 302 | dockerflow.checks.Error( 303 | f"failed to parse file {str(exc.path)}", 304 | hint="check that parked event files are not corrupt", 305 | id="queue.backend.read", 306 | ) 307 | ) 308 | except Exception as exc: 309 | logger.exception(exc) 310 | results.append( 311 | dockerflow.checks.Error( 312 | f"queue with {str(self.backend)} cannot be retrieved", 313 | hint=f"invalid data: {exc}", 314 | id="queue.backend.retrieve", 315 | ) 316 | ) 317 | return results 318 | 319 | async def postpone(self, payload: bugzilla_models.WebhookRequest, rid: str) -> None: 320 | """ 321 | Postpone the specified request for later. 322 | """ 323 | item = QueueItem(payload=payload, rid=rid) 324 | await self.backend.put(item) 325 | 326 | async def track_failed( 327 | self, payload: bugzilla_models.WebhookRequest, exc: Exception, rid: str 328 | ) -> QueueItem: 329 | """ 330 | Store the specified payload and exception information into the queue. 331 | """ 332 | item = QueueItem( 333 | payload=payload, 334 | error=PythonException.from_exc(exc), 335 | rid=rid, 336 | ) 337 | await self.backend.put(item) 338 | return item 339 | 340 | async def is_blocked(self, payload: bugzilla_models.WebhookRequest) -> bool: 341 | """ 342 | Return `True` if the specified `payload` is blocked and should be 343 | queued instead of being processed. 344 | """ 345 | existing = await self.backend.size(payload.bug.id) 346 | return existing > 0 347 | 348 | async def retrieve(self) -> dict[int, AsyncIterator[QueueItem]]: 349 | """ 350 | Returns the whole queue -- a dict of bug_id and a generator for the 351 | items for that bug 352 | """ 353 | return await self.backend.get_all() 354 | 355 | async def size(self, bug_id=None): 356 | return await self.backend.size(bug_id=bug_id) 357 | 358 | async def done(self, item: QueueItem) -> None: 359 | """ 360 | Mark item as done, remove from queue. 361 | """ 362 | return await self.backend.remove(item.payload.bug.id, item.identifier) 363 | 364 | async def exists(self, item_id) -> bool: 365 | """ 366 | Report whether an item with id `item_id` exists in the queue 367 | """ 368 | return await self.backend.exists(item_id) 369 | 370 | async def delete(self, item_id) -> None: 371 | """ 372 | Remove an item from the queue by item_id 373 | """ 374 | bug_id = extract_bug_id_from_item_id(item_id) 375 | await self.backend.remove(bug_id=int(bug_id), identifier=item_id) 376 | -------------------------------------------------------------------------------- /jbi/retry.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import sys 4 | from datetime import UTC, datetime, timedelta 5 | from os import getenv 6 | from time import sleep 7 | 8 | from dockerflow.logging import JsonLogFormatter, request_id_context 9 | 10 | import jbi.runner as runner 11 | from jbi.configuration import get_actions 12 | from jbi.errors import IgnoreInvalidRequestError 13 | from jbi.queue import get_dl_queue 14 | 15 | CONSTANT_RETRY = getenv("DL_QUEUE_CONSTANT_RETRY", "false") == "true" 16 | RETRY_TIMEOUT_DAYS = getenv("DL_QUEUE_RETRY_TIMEOUT_DAYS", 7) 17 | CONSTANT_RETRY_SLEEP = getenv("DL_QUEUE_CONSTANT_RETRY_SLEEP", 5) 18 | 19 | logger = logging.getLogger(__name__) 20 | logger.setLevel(logging.INFO) 21 | lsh = logging.StreamHandler(sys.stdout) 22 | lsh.setFormatter(JsonLogFormatter(logger_name=__name__)) 23 | logger.addHandler(lsh) 24 | 25 | ACTIONS = get_actions() 26 | 27 | 28 | async def retry_failed(item_executor=runner.execute_action, queue=get_dl_queue()): 29 | min_event_timestamp = datetime.now(UTC) - timedelta(days=int(RETRY_TIMEOUT_DAYS)) 30 | 31 | # load all bugs from DLQ 32 | bugs = await queue.retrieve() 33 | 34 | # metrics to track 35 | metrics = { 36 | "bug_count": len(bugs), 37 | "events_processed": 0, 38 | "events_skipped": 0, 39 | "events_failed": 0, 40 | "bugs_failed": 0, 41 | } 42 | 43 | for bug_id, items in bugs.items(): 44 | try: 45 | async for item in items: 46 | # skip and delete item if we have exceeded RETRY_TIMEOUT_DAYS 47 | if item.timestamp < min_event_timestamp: 48 | logger.warning("removing expired event %s", item.identifier) 49 | await queue.done(item) 50 | metrics["events_skipped"] += 1 51 | continue 52 | 53 | # Put original request id in logging context for better tracking. 54 | if item.rid is not None: 55 | request_id_context.set(item.rid) 56 | 57 | logger.info( 58 | "retry event %s", 59 | item.identifier, 60 | extra={ 61 | "item": item.model_dump(), 62 | }, 63 | ) 64 | try: 65 | item_executor(item.payload, ACTIONS) 66 | await queue.done(item) 67 | metrics["events_processed"] += 1 68 | except IgnoreInvalidRequestError: 69 | logger.warning("removing invalid event %s", item.identifier) 70 | await queue.done(item) 71 | metrics["events_processed"] += 1 72 | except Exception: 73 | metrics["events_failed"] += 1 74 | logger.exception( 75 | "failed to reprocess event %s.", 76 | item.identifier, 77 | extra={ 78 | "item": item.model_dump(), 79 | "bug": {"id": bug_id}, 80 | }, 81 | ) 82 | 83 | # check for other events that will be skipped 84 | pending_events = await queue.size(bug_id) 85 | if pending_events > 1: # if this isn't the only event for the bug 86 | logger.info( 87 | "skipping %d event(s) for bug %d, previous event %s failed", 88 | pending_events - 1, 89 | bug_id, 90 | item.identifier, 91 | ) 92 | metrics["events_skipped"] += pending_events - 1 93 | break 94 | except Exception: 95 | metrics["bugs_failed"] += 1 96 | logger.exception( 97 | "failed to parse events for bug %d.", 98 | bug_id, 99 | extra={"bug": {"id": bug_id}}, 100 | ) 101 | 102 | return metrics 103 | 104 | 105 | async def main(): 106 | while True: 107 | metrics = await retry_failed() 108 | logger.info("event queue processing complete", extra=metrics) 109 | if not CONSTANT_RETRY: 110 | return 111 | sleep(int(CONSTANT_RETRY_SLEEP)) 112 | 113 | 114 | if __name__ == "__main__": 115 | asyncio.run(main()) 116 | -------------------------------------------------------------------------------- /jbi/router.py: -------------------------------------------------------------------------------- 1 | """ 2 | Core FastAPI app (setup, middleware) 3 | """ 4 | 5 | import secrets 6 | from pathlib import Path 7 | from typing import Annotated, Any, Optional 8 | 9 | from fastapi import APIRouter, Body, Depends, HTTPException, Request, status 10 | from fastapi.encoders import jsonable_encoder 11 | from fastapi.responses import HTMLResponse 12 | from fastapi.security import APIKeyHeader, HTTPBasic, HTTPBasicCredentials 13 | from fastapi.templating import Jinja2Templates 14 | 15 | from jbi import jira 16 | from jbi.bugzilla import models as bugzilla_models 17 | from jbi.bugzilla import service as bugzilla_service 18 | from jbi.configuration import get_actions 19 | from jbi.environment import Settings, get_settings 20 | from jbi.models import Actions 21 | from jbi.queue import DeadLetterQueue, get_dl_queue 22 | from jbi.runner import execute_or_queue 23 | 24 | SettingsDep = Annotated[Settings, Depends(get_settings)] 25 | ActionsDep = Annotated[Actions, Depends(get_actions)] 26 | BugzillaServiceDep = Annotated[ 27 | bugzilla_service.BugzillaService, Depends(bugzilla_service.get_service) 28 | ] 29 | JiraServiceDep = Annotated[jira.JiraService, Depends(jira.get_service)] 30 | 31 | router = APIRouter() 32 | 33 | 34 | @router.get("/", include_in_schema=False) 35 | def root(request: Request, settings: SettingsDep): 36 | """Expose key configuration""" 37 | return { 38 | "title": request.app.title, 39 | "description": request.app.description, 40 | "version": request.app.version, 41 | "documentation": request.app.docs_url, 42 | "configuration": { 43 | "jira_base_url": settings.jira_base_url, 44 | "bugzilla_base_url": settings.bugzilla_base_url, 45 | }, 46 | } 47 | 48 | 49 | header_scheme = APIKeyHeader(name="X-Api-Key", auto_error=False) 50 | basicauth_scheme = HTTPBasic(auto_error=False) 51 | 52 | 53 | def api_key_auth( 54 | settings: SettingsDep, 55 | api_key: Annotated[str, Depends(header_scheme)], 56 | basic_auth: Annotated[HTTPBasicCredentials, Depends(basicauth_scheme)], 57 | ): 58 | if not api_key and basic_auth: 59 | api_key = basic_auth.password 60 | if not api_key or not secrets.compare_digest(api_key, settings.jbi_api_key): 61 | raise HTTPException( 62 | status_code=status.HTTP_401_UNAUTHORIZED, 63 | detail="Incorrect API Key", 64 | headers={"WWW-Authenticate": "Basic"}, 65 | ) 66 | 67 | 68 | @router.post( 69 | "/bugzilla_webhook", 70 | dependencies=[Depends(api_key_auth)], 71 | ) 72 | async def bugzilla_webhook( 73 | request: Request, 74 | actions: ActionsDep, 75 | queue: Annotated[DeadLetterQueue, Depends(get_dl_queue)], 76 | webhook_request: bugzilla_models.WebhookRequest = Body(..., embed=False), 77 | ): 78 | """API endpoint that Bugzilla Webhook Events request""" 79 | return await execute_or_queue(webhook_request, queue, actions) 80 | 81 | 82 | @router.get( 83 | "/dl_queue/", 84 | dependencies=[Depends(api_key_auth)], 85 | ) 86 | async def inspect_dl_queue(queue: Annotated[DeadLetterQueue, Depends(get_dl_queue)]): 87 | """API for viewing queue content""" 88 | bugs = await queue.retrieve() 89 | results = [] 90 | fields: dict[str, Any] = { 91 | "identifier": True, 92 | "rid": True, 93 | "error": True, 94 | "version": True, 95 | "payload": { 96 | "bug": {"id", "whiteboard", "product", "component"}, 97 | "event": {"action", "time"}, 98 | }, 99 | } 100 | for items in bugs.values(): 101 | async for item in items: 102 | results.append(item.model_dump(include=fields)) 103 | return results 104 | 105 | 106 | @router.delete("/dl_queue/{item_id}", dependencies=[Depends(api_key_auth)]) 107 | async def delete_queue_item_by_id( 108 | item_id: str, queue: Annotated[DeadLetterQueue, Depends(get_dl_queue)] 109 | ): 110 | item_exists = await queue.exists(item_id) 111 | if item_exists: 112 | await queue.delete(item_id) 113 | else: 114 | raise HTTPException( 115 | status_code=404, detail=f"Item {item_id} not found in queue" 116 | ) 117 | 118 | 119 | @router.get( 120 | "/whiteboard_tags/", 121 | dependencies=[Depends(api_key_auth)], 122 | ) 123 | def get_whiteboard_tags( 124 | actions: ActionsDep, 125 | whiteboard_tag: Optional[str] = None, 126 | ): 127 | """API for viewing whiteboard_tags and associated data""" 128 | if existing := actions.get(whiteboard_tag): 129 | return {whiteboard_tag: existing} 130 | return actions.by_tag 131 | 132 | 133 | @router.get( 134 | "/bugzilla_webhooks/", 135 | dependencies=[Depends(api_key_auth)], 136 | ) 137 | def get_bugzilla_webhooks(bugzilla_service: BugzillaServiceDep): 138 | """API for viewing webhooks details""" 139 | return bugzilla_service.list_webhooks() 140 | 141 | 142 | @router.get( 143 | "/jira_projects/", 144 | dependencies=[Depends(api_key_auth)], 145 | ) 146 | def get_jira_projects(jira_service: JiraServiceDep): 147 | """API for viewing projects that are currently accessible by API""" 148 | return jira_service.fetch_visible_projects() 149 | 150 | 151 | SRC_DIR = Path(__file__).parent 152 | templates = Jinja2Templates(directory=SRC_DIR / "templates") 153 | 154 | 155 | @router.get( 156 | "/powered_by_jbi/", 157 | dependencies=[Depends(api_key_auth)], 158 | response_class=HTMLResponse, 159 | ) 160 | def powered_by_jbi( 161 | request: Request, 162 | actions: ActionsDep, 163 | enabled: Optional[bool] = None, 164 | ): 165 | """API for `Powered By` endpoint""" 166 | context = { 167 | "request": request, 168 | "title": "Powered by JBI", 169 | "actions": jsonable_encoder(actions), 170 | "enable_query": enabled, 171 | } 172 | return templates.TemplateResponse("powered_by_template.html", context) 173 | -------------------------------------------------------------------------------- /jbi/static/styles.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: sans-serif; 3 | } 4 | 5 | table { 6 | border-collapse: collapse; 7 | margin: 25px 0; 8 | font-size: 0.9em; 9 | min-width: 400px; 10 | box-shadow: 0 0 20px rgba(0, 0, 0, 0.15); 11 | } 12 | 13 | thead tr { 14 | background-color: #009879; 15 | color: #ffffff; 16 | text-align: left; 17 | } 18 | 19 | th, td { 20 | padding: 12px 15px; 21 | } 22 | 23 | tbody tr { 24 | border-bottom: 1px solid #dddddd; 25 | } 26 | 27 | tbody tr:nth-of-type(even) { 28 | background-color: #f3f3f3; 29 | } 30 | 31 | tbody tr:last-of-type { 32 | border-bottom: 2px solid #009879; 33 | } 34 | 35 | .sort:after { 36 | content: "▼▲"; 37 | padding-left: 10px; 38 | opacity: 0.5; 39 | } 40 | .sort.desc:after { 41 | content: "▲"; 42 | opacity: 1; 43 | } 44 | .sort.asc:after { 45 | content: "▼"; 46 | opacity: 1; 47 | } 48 | 49 | dl { 50 | display: grid; 51 | grid-template-columns: max-content auto; 52 | } 53 | 54 | dt { 55 | grid-column-start: 1; 56 | } 57 | 58 | dt:after { 59 | content: ':'; 60 | } 61 | 62 | dd { 63 | grid-column-start: 2; 64 | margin-left: 5; 65 | } 66 | -------------------------------------------------------------------------------- /jbi/templates/powered_by_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{title}} 4 | 5 | 6 | 7 |

{{title}}

8 |

{{actions|length}} actions.

9 |
10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | {% for action in actions %} 23 | {% if enable_query == none or action.enabled == enable_query %} 24 | 25 | 26 | 27 | 28 | 29 | 30 | 38 | 39 | {% endif %} 40 | {% endfor %} 41 | 42 |
TagContactDescription EnabledModuleParameters
{{action.whiteboard_tag}}{{action.contact}}{{action.description}}{{action.enabled}}{{action.module}} 31 |
32 | {% for param, value in action.parameters.items() %} 33 |
{{param}}
34 |
{{value}}
35 | {% endfor %} 36 |
37 |
43 |
44 | 45 | 46 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "jbi" 3 | version = "0" 4 | description = "jira-bugzilla-integration" 5 | authors = ["@mozilla/jbi-core"] 6 | license = "MPL" 7 | 8 | [tool.poetry.dependencies] 9 | python = ">=3.12, <3.14" 10 | fastapi = "^0.115.12" 11 | pydantic = {version = "^2.11.5", extras = ["email"]} 12 | uvicorn = {extras = ["standard"], version = "^0.34.3"} 13 | atlassian-python-api = "^4.0.4" 14 | dockerflow = {extras = ["fastapi"], version = "2024.4.2"} 15 | Jinja2 = "^3.1.6" 16 | sentry-sdk = {extras = ["fastapi"], version = "^2.29.1"} 17 | pydantic-yaml = "^1.5.1" 18 | backoff = "^2.2.1" 19 | statsd = "^4.0.1" 20 | requests = "^2.32.3" 21 | pydantic-settings = "^2.9.1" 22 | pypandoc = "^1.15" 23 | 24 | [tool.poetry.group.dev.dependencies] 25 | click = "^8.2.1" 26 | pre-commit = "^4.2.0" 27 | coverage = {extras = ["toml"], version = "^7.8"} 28 | mypy = "^1.16" 29 | detect-secrets = "^1.5.0" 30 | bandit = "^1.8.3" 31 | pytest = "^8.3.5" 32 | yamllint = "^1.37.1" 33 | pytest-dotenv = "^0.5.2" 34 | types-requests = "^2.32.0" 35 | responses = "^0.25.7" 36 | httpx = "^0.28.1" 37 | factory-boy = "^3.3.3" 38 | pytest-factoryboy = "^2.7.0" 39 | ruff = "^0.11.12" 40 | pytest-mock = "^3.14.1" 41 | pytest-asyncio = "^0.26.0" 42 | 43 | [tool.poetry.scripts] 44 | jbi = "jbi.__main__:cli" 45 | 46 | [build-system] 47 | requires = ["poetry-core>=1.0.0"] 48 | build-backend = "poetry.core.masonry.api" 49 | 50 | [tool.ruff] 51 | target-version = "py312" 52 | lint.extend-select = ["I"] 53 | 54 | [tool.ruff.lint.per-file-ignores] 55 | "__init__.py" = ["F401"] 56 | 57 | [tool.pytest.ini_options] 58 | testpaths = [ 59 | "tests/unit", 60 | ] 61 | env_override_existing_values = true 62 | env_files = [ 63 | ".env.example" 64 | ] 65 | markers = [ 66 | "no_mocked_bugzilla", 67 | "no_mocked_jira", 68 | ] 69 | 70 | [tool.mypy] 71 | python_version = "3.13" 72 | # Warn when returning Any from function with non-Any return 73 | warn_return_any = true 74 | plugins = "pydantic.mypy" 75 | 76 | [[tool.mypy.overrides]] 77 | module = ["ruamel", "bugzilla", "atlassian", "atlassian.rest_client", "statsd.defaults.env", "dockerflow.*"] 78 | ignore_missing_imports = true 79 | 80 | [[tool.mypy.overrides]] 81 | module = [ 82 | "jbi.app" 83 | ] 84 | disallow_any_generics = true 85 | disallow_subclassing_any = true 86 | disallow_untyped_calls = true 87 | disallow_untyped_defs = true 88 | disallow_incomplete_defs = true 89 | check_untyped_defs = true 90 | disallow_untyped_decorators = true 91 | no_implicit_optional = true 92 | warn_unused_ignores = true 93 | warn_return_any = true 94 | no_implicit_reexport = true 95 | strict_equality = true 96 | strict_concatenate = true 97 | 98 | [tool.coverage] 99 | # https://github.com/nedbat/coveragepy 100 | [tool.coverage.run] 101 | omit = [ 102 | '*/.local/*', 103 | '/usr/*', 104 | '*/.venv/*', 105 | '*/.tox/*', 106 | '*/virtualenvs/*', 107 | ] 108 | 109 | [tool.coverage.report] 110 | 111 | exclude_lines = [ 112 | "# noqa", 113 | "raise NotImplementedError", 114 | "pragma: no cover", 115 | "def __repr__", 116 | "if .debug:", 117 | "raise NotImplementedError", 118 | "if __name__ == .__main__.:", 119 | "logger.", 120 | "from", 121 | "import" 122 | ] 123 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for setting up pytest fixtures 3 | """ 4 | 5 | import time 6 | from unittest import mock 7 | 8 | import pytest 9 | import responses 10 | from fastapi.testclient import TestClient 11 | from pytest_factoryboy import register 12 | 13 | import jbi.app 14 | import tests.fixtures.factories as factories 15 | from jbi import Operation, bugzilla, jira 16 | from jbi.environment import Settings 17 | from jbi.models import ActionContext 18 | from jbi.queue import DeadLetterQueue, get_dl_queue 19 | 20 | 21 | class FilteredLogCaptureFixture(pytest.LogCaptureFixture): 22 | """A custom implementation to simplify capture 23 | of logs for a particular logger.""" 24 | 25 | def __init__(self, *args, **kwargs): 26 | super().__init__(*args, **kwargs) 27 | self.logger_name = "" # root (all) 28 | 29 | @property 30 | def records(self): 31 | """Return filtered list of messages""" 32 | return [ 33 | r 34 | for r in super().records 35 | if not self.logger_name or r.name == self.logger_name 36 | ] 37 | 38 | def for_logger(self, logger_name): 39 | """Specify logger to filter captured messages""" 40 | self.logger_name = logger_name 41 | return self 42 | 43 | 44 | @pytest.fixture() 45 | def capturelogs(request): 46 | """A custom log capture that can filter on logger name.""" 47 | result = FilteredLogCaptureFixture(request.node) 48 | yield result 49 | result._finalize() 50 | 51 | 52 | @pytest.fixture(autouse=True) 53 | def mocked_statsd(): 54 | with mock.patch("jbi.common.instrument.statsd") as _mocked_statsd: 55 | yield _mocked_statsd 56 | 57 | 58 | register(factories.ActionStepsFactory) 59 | register(factories.ActionContextFactory) 60 | register(factories.ActionFactory) 61 | register(factories.ActionsFactory, "_actions") 62 | register(factories.ActionParamsFactory) 63 | register(factories.BugFactory) 64 | register(factories.WebhookFactory, "bugzilla_webhook") 65 | register(factories.CommentFactory) 66 | register(factories.JiraContextFactory) 67 | register(factories.WebhookEventFactory) 68 | register(factories.WebhookEventChangeFactory) 69 | register(factories.WebhookRequestFactory, "bugzilla_webhook_request") 70 | register(factories.WebhookUserFactory) 71 | register(factories.QueueItemFactory) 72 | 73 | 74 | register( 75 | factories.ActionContextFactory, "context_create_example", operation=Operation.CREATE 76 | ) 77 | 78 | 79 | @pytest.fixture 80 | def app(dl_queue): 81 | app = jbi.app.app 82 | app.dependency_overrides[get_dl_queue] = lambda: dl_queue 83 | return app 84 | 85 | 86 | @pytest.fixture 87 | def anon_client(app): 88 | """A test client with no authorization.""" 89 | return TestClient(app) 90 | 91 | 92 | @pytest.fixture 93 | def test_api_key(): 94 | # api key for tests defined in .env.example 95 | return "fake_api_key" 96 | 97 | 98 | @pytest.fixture 99 | def authenticated_client(app, test_api_key): 100 | """An test client with a valid API key.""" 101 | return TestClient( 102 | app, headers={"X-Api-Key": test_api_key}, raise_server_exceptions=False 103 | ) 104 | 105 | 106 | @pytest.fixture 107 | def settings(): 108 | """A test Settings object""" 109 | return Settings() 110 | 111 | 112 | @pytest.fixture() 113 | def actions(actions_factory): 114 | return actions_factory() 115 | 116 | 117 | @pytest.fixture 118 | def mock_queue(): 119 | return mock.MagicMock(spec=DeadLetterQueue) 120 | 121 | 122 | @pytest.fixture 123 | def dl_queue(tmp_path): 124 | return DeadLetterQueue("file://" + str(tmp_path)) 125 | 126 | 127 | @pytest.fixture(autouse=True) 128 | def mocked_bugzilla(request): 129 | if "no_mocked_bugzilla" in request.keywords: 130 | yield None 131 | bugzilla.service.get_service.cache_clear() 132 | else: 133 | with mock.patch("jbi.bugzilla.service.BugzillaClient") as mocked_bz: 134 | yield mocked_bz() 135 | bugzilla.service.get_service.cache_clear() 136 | 137 | 138 | @pytest.fixture(autouse=True) 139 | def mocked_jira(request): 140 | if "no_mocked_jira" in request.keywords: 141 | yield None 142 | jira.get_service.cache_clear() 143 | else: 144 | with mock.patch("jbi.jira.service.JiraClient") as mocked_jira: 145 | yield mocked_jira() 146 | jira.get_service.cache_clear() 147 | 148 | 149 | @pytest.fixture 150 | def mocked_responses(): 151 | with responses.RequestsMock() as rsps: 152 | yield rsps 153 | 154 | 155 | @pytest.fixture 156 | def context_comment_example(action_context_factory) -> ActionContext: 157 | return action_context_factory( 158 | operation=Operation.COMMENT, 159 | bug__see_also=["https://mozilla.atlassian.net/browse/JBI-234"], 160 | bug__with_comment=True, 161 | bug__comment__number=2, 162 | bug__comment__body="> hello\n>\n\nworld", 163 | event__target="comment", 164 | event__user__login="mathieu@mozilla.org", 165 | jira__issue="JBI-234", 166 | ) 167 | 168 | 169 | @pytest.fixture 170 | def context_attachment_example(action_context_factory) -> ActionContext: 171 | return action_context_factory( 172 | operation=Operation.ATTACHMENT, 173 | bug__see_also=["https://mozilla.atlassian.net/browse/JBI-234"], 174 | event__target="attachment", 175 | event__routed_key="attachment.create", 176 | event__user__login="phab-bot@bmo.tld", 177 | jira__issue="JBI-234", 178 | ) 179 | 180 | 181 | @pytest.fixture(autouse=True) 182 | def sleepless(monkeypatch): 183 | # https://stackoverflow.com/a/54829577 184 | monkeypatch.setattr(time, "sleep", lambda s: None) 185 | 186 | 187 | @pytest.fixture 188 | def exclude_middleware(app): 189 | # Hack to work around issue with Starlette issue on Jinja templates 190 | # https://github.com/encode/starlette/issues/472#issuecomment-704188037 191 | user_middleware = app.user_middleware.copy() 192 | app.user_middleware = [] 193 | app.middleware_stack = app.build_middleware_stack() 194 | yield 195 | app.user_middleware = user_middleware 196 | app.middleware_stack = app.build_middleware_stack() 197 | -------------------------------------------------------------------------------- /tests/fixtures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/tests/fixtures/__init__.py -------------------------------------------------------------------------------- /tests/fixtures/bad-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Action Config 3 | - whiteboard_tag: A 4 | bugzilla_user_id: foobar 5 | description: test config 6 | parameters: 7 | jira_project_key: KEY 8 | -------------------------------------------------------------------------------- /tests/fixtures/factories.py: -------------------------------------------------------------------------------- 1 | from datetime import UTC, datetime 2 | 3 | import factory 4 | 5 | import jbi.bugzilla.models as bugzilla_models 6 | from jbi import Operation, models, queue 7 | 8 | 9 | class PydanticFactory(factory.Factory): 10 | """ 11 | - factory_instance(**kwargs) -> Model(**kwargs) 12 | - factory_instance.create(**kwargs) -> Model(**kwargs) 13 | - factory_instance.build(**kwargs) -> Model.model_construct(**kwargs) 14 | 15 | https://docs.pydantic.dev/latest/api/base_model/#pydantic.main.BaseModel.model_construct 16 | """ 17 | 18 | class Meta: 19 | abstract = True 20 | 21 | @classmethod 22 | def _build(cls, model_class, *args, **kwargs): 23 | return model_class.model_construct(**kwargs) 24 | 25 | 26 | class ActionStepsFactory(PydanticFactory): 27 | class Meta: 28 | model = models.ActionSteps 29 | 30 | 31 | class ActionParamsFactory(PydanticFactory): 32 | class Meta: 33 | model = models.ActionParams 34 | 35 | steps = factory.SubFactory(ActionStepsFactory) 36 | jira_project_key = "JBI" 37 | jira_components = {} 38 | labels_brackets = "no" 39 | status_map = {} 40 | resolution_map = {} 41 | issue_type_map = {"task": "Task", "defect": "Bug"} 42 | 43 | 44 | class ActionFactory(PydanticFactory): 45 | class Meta: 46 | model = models.Action 47 | 48 | whiteboard_tag = "devtest" 49 | bugzilla_user_id = "tbd" 50 | description = "test config" 51 | parameters = factory.SubFactory(ActionParamsFactory) 52 | 53 | 54 | class ActionsFactory(PydanticFactory): 55 | class Meta: 56 | model = models.Actions 57 | 58 | root = factory.List([factory.SubFactory(ActionFactory)]) 59 | 60 | 61 | class WebhookCommentFactory(PydanticFactory): 62 | class Meta: 63 | model = bugzilla_models.WebhookComment 64 | 65 | body = None 66 | id = None 67 | number = None 68 | is_private = None 69 | creation_time = None 70 | 71 | 72 | class BugFactory(PydanticFactory): 73 | class Meta: 74 | model = bugzilla_models.Bug 75 | 76 | class Params: 77 | with_comment = factory.Trait(comment=factory.SubFactory(WebhookCommentFactory)) 78 | 79 | assigned_to = "nobody@mozilla.org" 80 | comment = None 81 | component = "General" 82 | creator = "nobody@mozilla.org" 83 | flags = [] 84 | id = 654321 85 | is_private = False 86 | keywords = [] 87 | priority = "" 88 | product = "JBI" 89 | resolution = "" 90 | see_also = [] 91 | severity = "--" 92 | status = "NEW" 93 | summary = "JBI Test" 94 | type = "defect" 95 | whiteboard = "[devtest]" 96 | 97 | 98 | class WebhookUserFactory(PydanticFactory): 99 | class Meta: 100 | model = bugzilla_models.WebhookUser 101 | 102 | id = 123456 103 | login = "nobody@mozilla.org" 104 | real_name = "Nobody [ :nobody ]" 105 | 106 | 107 | class WebhookEventChangeFactory(PydanticFactory): 108 | class Meta: 109 | model = bugzilla_models.WebhookEventChange 110 | 111 | field = "field" 112 | removed = "old value" 113 | added = "new value" 114 | 115 | 116 | class WebhookEventFactory(PydanticFactory): 117 | class Meta: 118 | model = bugzilla_models.WebhookEvent 119 | 120 | action = "create" 121 | changes = None 122 | routing_key = "bug.create" 123 | target = "bug" 124 | time = factory.LazyFunction(lambda: datetime.now(UTC).isoformat(timespec="seconds")) 125 | user = factory.SubFactory(WebhookUserFactory) 126 | 127 | 128 | class WebhookRequestFactory(PydanticFactory): 129 | class Meta: 130 | model = bugzilla_models.WebhookRequest 131 | 132 | bug = factory.SubFactory(BugFactory) 133 | event = factory.SubFactory(WebhookEventFactory) 134 | webhook_id = 34 135 | webhook_name = "local-test" 136 | 137 | 138 | class CommentFactory(PydanticFactory): 139 | class Meta: 140 | model = bugzilla_models.Comment 141 | 142 | id = 343 143 | text = "comment text" 144 | bug_id = 654321 145 | count = 1 146 | is_private = True 147 | creator = "mathieu@mozilla.org" 148 | 149 | 150 | class JiraContextFactory(PydanticFactory): 151 | class Meta: 152 | model = models.JiraContext 153 | 154 | project = "JBI" 155 | issue = None 156 | labels = [] 157 | 158 | 159 | class ActionContextFactory(PydanticFactory): 160 | class Meta: 161 | model = models.ActionContext 162 | 163 | action = factory.SubFactory(ActionFactory) 164 | operation = Operation.IGNORE 165 | bug = factory.SubFactory(BugFactory) 166 | event = factory.SubFactory(WebhookEventFactory) 167 | jira = factory.SubFactory(JiraContextFactory) 168 | 169 | 170 | class WebhookFactory(PydanticFactory): 171 | class Meta: 172 | model = bugzilla_models.Webhook 173 | 174 | component = "General" 175 | creator = "admin@mozilla.bugs" 176 | enabled = True 177 | errors = 0 178 | event = "create,change,attachment,comment" 179 | id = 1 180 | name = "Test Webhooks" 181 | product = "Firefox" 182 | url = "http://server.example.com/bugzilla_webhook" 183 | 184 | 185 | class PythonExceptionFactory(PydanticFactory): 186 | class Meta: 187 | model = queue.PythonException 188 | 189 | type = "ValueError" 190 | description = "boom!" 191 | details = "Traceback: foo" 192 | 193 | 194 | class QueueItemFactory(PydanticFactory): 195 | class Meta: 196 | model = queue.QueueItem 197 | 198 | payload = factory.SubFactory(WebhookRequestFactory) 199 | error = factory.SubFactory(PythonExceptionFactory) 200 | version = "42.0.1" 201 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/bugzilla/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/tests/unit/bugzilla/__init__.py -------------------------------------------------------------------------------- /tests/unit/bugzilla/test_client.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import requests 3 | import responses 4 | from responses import matchers 5 | 6 | from jbi.bugzilla.client import ( 7 | BugNotAccessibleError, 8 | BugzillaClient, 9 | BugzillaClientError, 10 | ) 11 | 12 | 13 | @pytest.fixture 14 | def webhook_private_comment_example( 15 | webhook_user_factory, webhook_event_factory, bug_factory, webhook_request_factory 16 | ): 17 | user = webhook_user_factory(login="mathieu@mozilla.org") 18 | event = webhook_event_factory(target="comment", user=user) 19 | bug = bug_factory( 20 | comment={"id": 344, "number": 2, "is_private": True}, 21 | see_also=["https://mozilla.atlassian.net/browse/JBI-234"], 22 | ) 23 | webhook_payload = webhook_request_factory(bug=bug, event=event) 24 | return webhook_payload 25 | 26 | 27 | @pytest.fixture 28 | def bugzilla_client(settings): 29 | return BugzillaClient( 30 | base_url=settings.bugzilla_base_url, api_key=settings.bugzilla_api_key 31 | ) 32 | 33 | 34 | @pytest.mark.no_mocked_bugzilla 35 | def test_timer_is_used_on_bugzilla_get_comments( 36 | bugzilla_client, settings, mocked_responses, mocked_statsd 37 | ): 38 | mocked_responses.add( 39 | "GET", 40 | f"{settings.bugzilla_base_url}/rest/bug/42/comment", 41 | json={ 42 | "bugs": {"42": {"comments": []}}, 43 | }, 44 | ) 45 | bugzilla_client.get_comments(42) 46 | mocked_statsd.timer.assert_called_with("jbi.bugzilla.methods.get_comments.timer") 47 | 48 | 49 | @pytest.mark.no_mocked_bugzilla 50 | def test_bugzilla_methods_are_retried_if_raising( 51 | bugzilla_client, settings, mocked_responses 52 | ): 53 | url = f"{settings.bugzilla_base_url}/rest/bug/42/comment" 54 | mocked_responses.add(responses.GET, url, status=503, json={}) 55 | mocked_responses.add( 56 | responses.GET, 57 | url, 58 | json={ 59 | "bugs": {"42": {"comments": []}}, 60 | }, 61 | ) 62 | 63 | # Not raising 64 | bugzilla_client.get_comments(42) 65 | 66 | assert len(mocked_responses.calls) == 2 67 | 68 | 69 | @pytest.mark.no_mocked_bugzilla 70 | def test_bugzilla_key_is_passed_in_header(bugzilla_client, settings, mocked_responses): 71 | url = f"{settings.bugzilla_base_url}/rest/whoami" 72 | mocked_responses.add( 73 | responses.GET, 74 | url, 75 | json={"id": "you"}, 76 | match=[ 77 | matchers.header_matcher({"x-bugzilla-api-key": "fake_bugzilla_api_key"}) 78 | ], 79 | ) 80 | 81 | assert bugzilla_client.logged_in() 82 | 83 | assert len(mocked_responses.calls) == 1 84 | # The following assertion is redundant with matchers but also more explicit. 85 | assert "x-bugzilla-api-key" in mocked_responses.calls[0].request.headers 86 | 87 | 88 | @pytest.mark.no_mocked_bugzilla 89 | def test_bugzilla_raises_if_response_has_error( 90 | bugzilla_client, settings, mocked_responses 91 | ): 92 | url = f"{settings.bugzilla_base_url}/rest/bug/42" 93 | mocked_responses.add( 94 | responses.GET, url, json={"error": True, "message": "not happy"} 95 | ) 96 | 97 | with pytest.raises(BugzillaClientError) as exc: 98 | bugzilla_client.get_bug(42) 99 | 100 | assert "not happy" in str(exc) 101 | 102 | 103 | @pytest.mark.no_mocked_bugzilla 104 | def test_bugzilla_get_bug_raises_if_response_is_401_and_credentials_invalid( 105 | bugzilla_client, settings, mocked_responses 106 | ): 107 | url = f"{settings.bugzilla_base_url}/rest/bug/42" 108 | mocked_responses.add( 109 | responses.GET, 110 | url, 111 | status=401, 112 | json={ 113 | "code": 102, 114 | "documentation": "https://bmo.readthedocs.io/en/latest/api/", 115 | "error": True, 116 | "message": "You are not authorized to access bug 42.", 117 | }, 118 | ) 119 | mocked_responses.add( 120 | responses.GET, 121 | f"{settings.bugzilla_base_url}/rest/whoami", 122 | status=401, 123 | ) 124 | 125 | with pytest.raises(requests.HTTPError) as exc: 126 | bugzilla_client.get_bug(42) 127 | 128 | assert ( 129 | f"401 Client Error: Unauthorized for url: {settings.bugzilla_base_url}/rest/bug/42" 130 | in str(exc) 131 | ) 132 | 133 | 134 | @pytest.mark.no_mocked_bugzilla 135 | def test_bugzilla_get_bug_raises_if_response_is_401_and_credentials_valid( 136 | bugzilla_client, settings, mocked_responses 137 | ): 138 | url = f"{settings.bugzilla_base_url}/rest/bug/42" 139 | mocked_responses.add( 140 | responses.GET, 141 | url, 142 | status=401, 143 | json={ 144 | "code": 102, 145 | "documentation": "https://bmo.readthedocs.io/en/latest/api/", 146 | "error": True, 147 | "message": "You are not authorized to access bug 42.", 148 | }, 149 | ) 150 | mocked_responses.add( 151 | responses.GET, 152 | f"{settings.bugzilla_base_url}/rest/whoami", 153 | json={"id": "you"}, 154 | ) 155 | 156 | with pytest.raises(BugNotAccessibleError) as exc: 157 | bugzilla_client.get_bug(42) 158 | 159 | assert "You are not authorized to access bug 42" in str(exc) 160 | 161 | 162 | @pytest.mark.no_mocked_bugzilla 163 | def test_bugzilla_get_bug_raises_if_response_has_no_bugs( 164 | bugzilla_client, settings, mocked_responses 165 | ): 166 | url = f"{settings.bugzilla_base_url}/rest/bug/42" 167 | mocked_responses.add(responses.GET, url, json={"bugs": []}) 168 | 169 | with pytest.raises(BugzillaClientError) as exc: 170 | bugzilla_client.get_bug(42) 171 | 172 | assert "Unexpected response" in str(exc) 173 | 174 | 175 | @pytest.mark.no_mocked_bugzilla 176 | def test_bugzilla_get_comments_raises_if_response_has_no_bugs( 177 | bugzilla_client, settings, mocked_responses 178 | ): 179 | url = f"{settings.bugzilla_base_url}/rest/bug/42/comment" 180 | mocked_responses.add(responses.GET, url, json={"bugs": {"42": {}}}) 181 | 182 | with pytest.raises(BugzillaClientError) as exc: 183 | bugzilla_client.get_comments(42) 184 | 185 | assert "Unexpected response" in str(exc) 186 | 187 | 188 | @pytest.mark.no_mocked_bugzilla 189 | def test_bugzilla_update_bug_uses_a_put(bugzilla_client, settings, mocked_responses): 190 | url = f"{settings.bugzilla_base_url}/rest/bug/42" 191 | mocked_responses.add(responses.PUT, url, json={"bugs": [{"id": 42}]}) 192 | 193 | bugzilla_client.update_bug(42, see_also={"add": ["http://url.com"]}) 194 | 195 | assert ( 196 | mocked_responses.calls[0].request.body 197 | == b'{"see_also": {"add": ["http://url.com"]}}' 198 | ) 199 | 200 | 201 | @pytest.mark.no_mocked_bugzilla 202 | def test_bugzilla_get_bug_comment( 203 | bugzilla_client, settings, mocked_responses, webhook_private_comment_example 204 | ): 205 | # given 206 | bug_url = ( 207 | f"{settings.bugzilla_base_url}/rest/bug/%s" 208 | % webhook_private_comment_example.bug.id 209 | ) 210 | mocked_responses.add( 211 | responses.GET, 212 | bug_url, 213 | json={"bugs": [webhook_private_comment_example.bug.model_dump()]}, 214 | ) 215 | mocked_responses.add( 216 | responses.GET, 217 | bug_url + "/comment", 218 | json={ 219 | "bugs": { 220 | str(webhook_private_comment_example.bug.id): { 221 | "comments": [ 222 | { 223 | "id": 343, 224 | "text": "not this one", 225 | "is_private": False, 226 | "creator": "mathieu@mozilla.org", 227 | }, 228 | { 229 | "id": 344, 230 | "text": "hello", 231 | "is_private": False, 232 | "creator": "mathieu@mozilla.org", 233 | }, 234 | { 235 | "id": 345, 236 | "text": "not this one", 237 | "is_private": False, 238 | "creator": "mathieu@mozilla.org", 239 | }, 240 | ] 241 | } 242 | }, 243 | "comments": {}, 244 | }, 245 | ) 246 | 247 | expanded = bugzilla_client.get_bug(webhook_private_comment_example.bug.id) 248 | 249 | # then 250 | assert expanded.comment.creator == "mathieu@mozilla.org" 251 | assert expanded.comment.text == "hello" 252 | 253 | 254 | @pytest.mark.no_mocked_bugzilla 255 | def test_bugzilla_missing_private_comment( 256 | bugzilla_client, 257 | settings, 258 | mocked_responses, 259 | webhook_private_comment_example, 260 | ): 261 | bug_url = ( 262 | f"{settings.bugzilla_base_url}/rest/bug/%s" 263 | % webhook_private_comment_example.bug.id 264 | ) 265 | mocked_responses.add( 266 | responses.GET, 267 | bug_url, 268 | json={"bugs": [webhook_private_comment_example.bug.model_dump()]}, 269 | ) 270 | mocked_responses.add( 271 | responses.GET, 272 | bug_url + "/comment", 273 | json={ 274 | "bugs": {str(webhook_private_comment_example.bug.id): {"comments": []}}, 275 | "comments": {}, 276 | }, 277 | ) 278 | 279 | expanded = bugzilla_client.get_bug(webhook_private_comment_example.bug.id) 280 | 281 | assert not expanded.comment 282 | 283 | 284 | @pytest.mark.no_mocked_bugzilla 285 | def test_bugzilla_list_webhooks(bugzilla_client, settings, mocked_responses): 286 | url = f"{settings.bugzilla_base_url}/rest/webhooks/list" 287 | mocked_responses.add( 288 | responses.GET, 289 | url, 290 | json={ 291 | "webhooks": [ 292 | { 293 | "id": 0, 294 | "creator": "Bob", 295 | "name": "", 296 | "url": "http://server/bugzilla_webhook", 297 | "event": "create,change,comment", 298 | "product": "Any", 299 | "component": "Any", 300 | "enabled": True, 301 | "errors": 0, 302 | } 303 | ] 304 | }, 305 | ) 306 | 307 | webhooks = bugzilla_client.list_webhooks() 308 | 309 | assert len(webhooks) == 1 310 | assert webhooks[0].event == "create,change,comment" 311 | assert "/bugzilla_webhook" in webhooks[0].url 312 | 313 | 314 | @pytest.mark.no_mocked_bugzilla 315 | def test_bugzilla_list_webhooks_raises_if_response_has_no_webhooks( 316 | bugzilla_client, settings, mocked_responses 317 | ): 318 | url = f"{settings.bugzilla_base_url}/rest/webhooks/list" 319 | mocked_responses.add(responses.GET, url, json={}) 320 | 321 | with pytest.raises(BugzillaClientError) as exc: 322 | bugzilla_client.list_webhooks() 323 | 324 | assert "Unexpected response" in str(exc) 325 | -------------------------------------------------------------------------------- /tests/unit/jira/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/jira-bugzilla-integration/7c85b662f3e78319ccdafd102d1e6511ce057c9d/tests/unit/jira/__init__.py -------------------------------------------------------------------------------- /tests/unit/jira/test_client.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pytest 4 | import requests 5 | import responses 6 | 7 | from jbi.jira.client import JiraClient 8 | 9 | 10 | @pytest.fixture 11 | def jira_client(settings): 12 | return JiraClient( 13 | url=settings.jira_base_url, 14 | username=settings.jira_username, 15 | password=settings.jira_api_key, 16 | cloud=True, 17 | ) 18 | 19 | 20 | def test_jira_create_issue_is_instrumented( 21 | settings, jira_client, mocked_responses, context_create_example, mocked_statsd 22 | ): 23 | url = f"{settings.jira_base_url}rest/api/2/issue" 24 | mocked_responses.add( 25 | responses.POST, 26 | url, 27 | json={ 28 | "id": "10000", 29 | "key": "ED-24", 30 | }, 31 | ) 32 | 33 | jira_client.create_issue({}) 34 | 35 | mocked_statsd.incr.assert_called_with("jbi.jira.methods.create_issue.count") 36 | mocked_statsd.timer.assert_called_with("jbi.jira.methods.create_issue.timer") 37 | 38 | 39 | def test_jira_calls_log_http_errors( 40 | settings, jira_client, mocked_responses, context_create_example, caplog 41 | ): 42 | url = f"{settings.jira_base_url}rest/api/2/project/{context_create_example.jira.project}/components" 43 | mocked_responses.add( 44 | responses.GET, 45 | url, 46 | status=404, 47 | json={ 48 | "errorMessages": ["No project could be found with key 'X'."], 49 | "errors": {}, 50 | }, 51 | ) 52 | 53 | with caplog.at_level(logging.ERROR): 54 | with pytest.raises(requests.HTTPError): 55 | jira_client.get_project_components(context_create_example.jira.project) 56 | 57 | log_messages = [log.msg % log.args for log in caplog.records] 58 | idx = log_messages.index( 59 | "HTTP: GET /rest/api/2/project/JBI/components -> 404 Not Found" 60 | ) 61 | log_record = caplog.records[idx] 62 | assert ( 63 | log_record.body 64 | == '{"errorMessages": ["No project could be found with key \'X\'."], "errors": {}}' 65 | ) 66 | 67 | 68 | def test_paginated_projects_no_keys(settings, jira_client, mocked_responses): 69 | url = f"{settings.jira_base_url}rest/api/2/project/search" 70 | mocked_response_data = {"some": "data"} 71 | mocked_responses.add( 72 | responses.GET, 73 | url, 74 | status=200, 75 | match=[responses.matchers.query_string_matcher(None)], 76 | json=mocked_response_data, 77 | ) 78 | resp = jira_client.paginated_projects() 79 | assert resp == mocked_response_data 80 | 81 | 82 | def test_paginated_projects_with_keys(settings, jira_client, mocked_responses): 83 | url = f"{settings.jira_base_url}rest/api/2/project/search" 84 | mocked_response_data = {"some": "data"} 85 | mocked_responses.add( 86 | responses.GET, 87 | url, 88 | status=200, 89 | match=[responses.matchers.query_string_matcher("keys=['ABC', 'DEF']")], 90 | json=mocked_response_data, 91 | ) 92 | resp = jira_client.paginated_projects(keys=["ABC", "DEF"]) 93 | assert resp == mocked_response_data 94 | 95 | 96 | def test_paginated_projects_greater_than_50_keys( 97 | settings, jira_client, mocked_responses 98 | ): 99 | keys = [str(i) for i in range(51)] 100 | with pytest.raises(ValueError): 101 | jira_client.paginated_projects(keys=keys) 102 | -------------------------------------------------------------------------------- /tests/unit/jira/test_utils.py: -------------------------------------------------------------------------------- 1 | from textwrap import dedent 2 | 3 | import pytest 4 | 5 | from jbi.jira.utils import markdown_to_jira 6 | 7 | 8 | def test_markdown_to_jira(): 9 | markdown = dedent( 10 | """ 11 | Mixed nested lists 12 | 13 | * a 14 | * bulleted 15 | - with 16 | - nested 17 | 1. nested-nested 18 | - numbered 19 | * list 20 | 21 | List without newline: 22 | * one 23 | * two 24 | 25 | this was `inline` value ``that`` is turned into ```monospace``` tag. 26 | 27 | this sentence __has__ **bold** and _has_ *italic*. 28 | 29 | this was ~~wrong~~. 30 | """ 31 | ).lstrip() 32 | 33 | jira = dedent( 34 | """ 35 | Mixed nested lists 36 | 37 | * a 38 | * bulleted 39 | ** with 40 | ** nested 41 | **# nested-nested 42 | ** numbered 43 | * list 44 | 45 | List without newline: 46 | 47 | * one 48 | * two 49 | 50 | this was {{inline}} value {{that}} is turned into {{monospace}} tag. 51 | 52 | this sentence *has* *bold* and _has_ _italic_. 53 | 54 | this was -wrong-. 55 | """ 56 | ).strip() 57 | 58 | assert markdown_to_jira(markdown) == jira 59 | 60 | 61 | def test_markdown_to_jira_with_malformed_input(): 62 | assert markdown_to_jira("[link|http://noend") == "\\[link|http://noend" 63 | 64 | 65 | @pytest.mark.parametrize( 66 | "markdown, expected, max_length", 67 | [ 68 | ("a" * 10, "aaaaa", 5), 69 | ("aa aaa", "aa", 5), 70 | ("aa\naaa", "aa", 5), 71 | ("aa\taaa", "aa", 5), 72 | ("aaaaaa", "aaaaa", 5), 73 | ("aaaaa ", "aaaaa", 5), 74 | ("`fo` `fo`", "{{fo}}", 9), 75 | ], 76 | ) 77 | def test_markdown_to_jira_with_max_chars(markdown, expected, max_length): 78 | assert markdown_to_jira(markdown, max_length=max_length) == expected 79 | -------------------------------------------------------------------------------- /tests/unit/test_app.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | 6 | from jbi.app import traces_sampler 7 | from jbi.environment import get_settings 8 | 9 | 10 | def test_request_summary_is_logged(caplog, anon_client): 11 | with caplog.at_level(logging.INFO): 12 | # https://fastapi.tiangolo.com/advanced/testing-events/ 13 | anon_client.get( 14 | "/", 15 | headers={ 16 | "X-Request-Id": "foo-bar", 17 | }, 18 | ) 19 | 20 | summary = [r for r in caplog.records if r.name == "request.summary"][0] 21 | 22 | assert summary.rid == "foo-bar" 23 | assert summary.method == "GET" 24 | assert summary.path == "/" 25 | assert summary.querystring == "" 26 | 27 | 28 | def test_request_summary_defaults_user_agent_to_empty_string(caplog, anon_client): 29 | with caplog.at_level(logging.INFO): 30 | del anon_client.headers["User-Agent"] 31 | anon_client.get("/") 32 | 33 | summary = [r for r in caplog.records if r.name == "request.summary"][0] 34 | 35 | assert summary.agent == "" 36 | 37 | 38 | def test_422_errors_are_logged(authenticated_client, webhook_request_factory, caplog): 39 | webhook = webhook_request_factory.build(bug=None) 40 | 41 | with caplog.at_level(logging.INFO): 42 | authenticated_client.post( 43 | "/bugzilla_webhook", 44 | headers={"X-Api-Key": "fake_api_key"}, 45 | data=webhook.model_dump_json(), 46 | ) 47 | 48 | logged = [r for r in caplog.records if r.name == "jbi.app"][0] 49 | assert logged.errors[0]["loc"] == ("body", "bug") 50 | assert ( 51 | logged.errors[0]["msg"] 52 | == "Input should be a valid dictionary or object to extract fields from" 53 | ) 54 | 55 | 56 | @pytest.mark.parametrize( 57 | "sampling_context,expected", 58 | [ 59 | # /__lbheartbeat__ 60 | ({"asgi_scope": {"path": "/__lbheartbeat__"}}, 0), 61 | # path that isn't /__lbheartbeat__ 62 | ( 63 | {"asgi_scope": {"path": "/"}}, 64 | get_settings().sentry_traces_sample_rate, 65 | ), 66 | # context w/o an asgi_scope 67 | ( 68 | {"parent_sampled": None}, 69 | get_settings().sentry_traces_sample_rate, 70 | ), 71 | # context w/o an asgi_scope.path 72 | ( 73 | {"asgi_scope": {"type": "lifespan"}}, 74 | get_settings().sentry_traces_sample_rate, 75 | ), 76 | ], 77 | ) 78 | def test_traces_sampler(sampling_context, expected): 79 | assert traces_sampler(sampling_context) == expected 80 | 81 | 82 | @pytest.mark.asyncio 83 | async def test_errors_are_reported_to_sentry(anon_client, bugzilla_webhook_request): 84 | with patch("sentry_sdk.capture_event") as mocked: 85 | with patch("jbi.router.execute_or_queue", side_effect=ValueError): 86 | with pytest.raises(ValueError): 87 | anon_client.post( 88 | "/bugzilla_webhook", 89 | headers={"X-Api-Key": "fake_api_key"}, 90 | data=bugzilla_webhook_request.model_dump_json(), 91 | ) 92 | 93 | assert mocked.called, "Sentry captured the exception" 94 | 95 | 96 | @pytest.mark.asyncio 97 | async def test_request_id_is_passed_down_to_logger_contexts( 98 | caplog, 99 | bugzilla_webhook_request, 100 | authenticated_client, 101 | mocked_jira, 102 | mocked_bugzilla, 103 | ): 104 | mocked_bugzilla.get_bug.return_value = bugzilla_webhook_request.bug 105 | mocked_jira.create_issue.return_value = { 106 | "key": "JBI-1922", 107 | } 108 | with caplog.at_level(logging.DEBUG): 109 | authenticated_client.post( 110 | "/bugzilla_webhook", 111 | data=bugzilla_webhook_request.model_dump_json(), 112 | headers={ 113 | "X-Request-Id": "foo-bar", 114 | }, 115 | ) 116 | 117 | runner_logs = [r for r in caplog.records if r.name == "jbi.runner"] 118 | assert runner_logs[0].rid == "foo-bar" 119 | -------------------------------------------------------------------------------- /tests/unit/test_configuration.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from jbi import configuration 4 | 5 | 6 | def test_mock_jbi_files(): 7 | with pytest.raises(configuration.ConfigError) as exc_info: 8 | configuration.get_actions_from_file( 9 | jbi_config_file="tests/fixtures/bad-config.yaml" 10 | ) 11 | assert "Errors exist" in str(exc_info.value) 12 | 13 | 14 | def test_actual_jbi_files(): 15 | assert configuration.get_actions_from_file( 16 | jbi_config_file="config/config.nonprod.yaml" 17 | ) 18 | assert configuration.get_actions_from_file( 19 | jbi_config_file="config/config.prod.yaml" 20 | ) 21 | 22 | 23 | def test_filename_uses_env(mocker, settings): 24 | get_actions_from_file_spy = mocker.spy(configuration, "get_actions_from_file") 25 | assert settings.env == "local" 26 | 27 | configuration.get_actions() 28 | 29 | get_actions_from_file_spy.assert_called_with("config/config.local.yaml") 30 | -------------------------------------------------------------------------------- /tests/unit/test_environment.py: -------------------------------------------------------------------------------- 1 | """Tests for our application environment config parsing 2 | 3 | Pytest overwrites your local environment with the values set by options in 4 | `tool.pytest.ini_options` 5 | """ 6 | 7 | import pydantic 8 | import pytest 9 | 10 | from jbi.environment import Environment, Settings 11 | 12 | 13 | def test_settings_env_is_enum_string(): 14 | settings = Settings(env=Environment.PROD) 15 | 16 | assert settings.env == "prod" 17 | assert str(settings.env) == "prod" 18 | 19 | 20 | def test_sentry_dsn(): 21 | Settings(sentry_dsn="http://www.example.com/") 22 | 23 | 24 | def test_sentry_dsn_no_url_string_raises(): 25 | with pytest.raises(pydantic.ValidationError): 26 | Settings(sentry_dsn="foobar") 27 | 28 | 29 | def dl_queue_dsn_allowed_schema(dsn): 30 | Settings(dl_queue_dsn="file://tmp/queue") 31 | 32 | 33 | @pytest.mark.parametrize("dsn", ["http://www.example.com", "foobar"]) 34 | def invalid_dl_queue_dsn_raises(dsn): 35 | with pytest.raises(pydantic.ValidationError): 36 | Settings(dl_queue_dsn=dsn) 37 | -------------------------------------------------------------------------------- /tests/unit/test_models.py: -------------------------------------------------------------------------------- 1 | import pydantic 2 | import pytest 3 | 4 | from jbi.models import ActionParams, Actions, ActionSteps 5 | 6 | 7 | @pytest.mark.parametrize("value", [123456, [123456], [12345, 67890], "tbd"]) 8 | def test_valid_bugzilla_user_ids(action_factory, value): 9 | action = action_factory(bugzilla_user_id=value) 10 | assert action.bugzilla_user_id == value 11 | 12 | 13 | @pytest.mark.parametrize("value", [None, "foobar@example.com"]) 14 | def test_invalid_bugzilla_user_ids(action_factory, value): 15 | with pytest.raises(pydantic.ValidationError): 16 | action_factory(bugzilla_user_id=value) 17 | 18 | 19 | def test_no_actions_fails(): 20 | with pytest.raises(ValueError) as exc_info: 21 | Actions(root=[]) 22 | assert "List should have at least 1 item after validation, not 0" in str( 23 | exc_info.value 24 | ) 25 | 26 | 27 | def test_default_invalid_step(): 28 | with pytest.raises(pydantic.ValidationError) as exc: 29 | ActionSteps(new=["BOOM", "POW"], comment=["BAM"]) 30 | error_message = str(exc.value) 31 | 32 | assert "BOOM" in error_message 33 | assert "POW" in error_message 34 | assert "BAM" in error_message 35 | 36 | 37 | def test_duplicated_whiteboard_tag_fails(action_factory): 38 | with pytest.raises(ValueError) as exc_info: 39 | Actions( 40 | root=[ 41 | action_factory(whiteboard_tag="x"), 42 | action_factory(whiteboard_tag="y"), 43 | action_factory(whiteboard_tag="x"), 44 | ] 45 | ) 46 | assert "actions have duplicated lookup tags: ['x']" in str(exc_info.value) 47 | 48 | 49 | def test_override_step_configuration_for_single_action_type(): 50 | default_steps = ActionSteps() 51 | params = ActionParams( 52 | jira_project_key="JBI", steps=ActionSteps(new=["create_issue"]) 53 | ) 54 | assert params.steps.new == ["create_issue"] 55 | assert params.steps.new != default_steps.new 56 | assert params.steps.existing == default_steps.existing 57 | assert params.steps.comment == default_steps.comment 58 | 59 | 60 | @pytest.mark.parametrize( 61 | "see_also,expected", 62 | [ 63 | (None, None), 64 | ([], None), 65 | (["foo"], None), 66 | (["fail:/format"], None), 67 | (["foo", "http://jira.net/123"], "123"), 68 | (["http://org/123"], None), 69 | (["http://jira.com"], None), 70 | (["http://mozilla.jira.com/"], None), 71 | (["http://mozilla.jira.com/123"], "123"), 72 | (["http://mozilla.jira.com/123/"], "123"), 73 | (["http://mozilla.jira.com/ticket/123"], "123"), 74 | (["http://atlassian.com/ticket/123"], "123"), 75 | (["http://mozilla.jira.com/123", "http://mozilla.jira.com/456"], "123"), 76 | ( 77 | ["http://mozilla.jira.com/FOO-123", "http://mozilla.jira.com/BAR-456"], 78 | "FOO-123", 79 | ), 80 | ( 81 | ["http://mozilla.jira.com/FOO-123", "http://mozilla.jira.com/JBI456"], 82 | "FOO-123", 83 | ), 84 | ( 85 | ["http://mozilla.jira.com/FOO-123", "http://mozilla.jira.com/JBI-456"], 86 | "JBI-456", 87 | ), 88 | ], 89 | ) 90 | def test_extract_see_also(see_also, expected, bug_factory): 91 | bug = bug_factory(see_also=see_also) 92 | assert bug.extract_from_see_also("JBI") == expected 93 | 94 | 95 | @pytest.mark.parametrize( 96 | "product,component,expected", 97 | [ 98 | (None, None, ""), 99 | (None, "General", "General"), 100 | ("Product", None, "Product::"), 101 | ("Product", "General", "Product::General"), 102 | ], 103 | ) 104 | def test_product_component(product, component, expected, bug_factory): 105 | bug = bug_factory(product=product, component=component) 106 | assert bug.product_component == expected 107 | 108 | 109 | def test_payload_empty_changes_list(webhook_event_factory): 110 | event = webhook_event_factory(routing_key="bug.modify", changes=None) 111 | assert event.changed_fields() == [] 112 | 113 | 114 | def test_payload_changes_list(webhook_event_change_factory, webhook_event_factory): 115 | changes = [ 116 | webhook_event_change_factory(field="status", removed="OPEN", added="FIXED"), 117 | webhook_event_change_factory( 118 | field="assignee", removed="nobody@mozilla.org", added="mathieu@mozilla.com" 119 | ), 120 | ] 121 | event = webhook_event_factory(routing_key="bug.modify", changes=changes) 122 | assert event.changed_fields() == [ 123 | "status", 124 | "assignee", 125 | ] 126 | 127 | 128 | def test_payload_changes_coerces_numbers_to_strings( 129 | webhook_event_change_factory, webhook_event_factory 130 | ): 131 | changes = [ 132 | webhook_event_change_factory(field="is_confirmed", removed="1", added=0), 133 | ] 134 | event = webhook_event_factory(routing_key="bug.modify", changes=changes) 135 | assert event.changed_fields() == ["is_confirmed"] 136 | assert event.changes[0].added == "0" 137 | 138 | 139 | def test_max_configured_projects_raises_error(action_factory): 140 | actions = [action_factory(whiteboard_tag=str(i)) for i in range(51)] 141 | with pytest.raises(pydantic.ValidationError): 142 | Actions(root=actions) 143 | -------------------------------------------------------------------------------- /tests/unit/test_queue.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime, timedelta 3 | 4 | import pytest 5 | from pydantic import HttpUrl 6 | 7 | from jbi.queue import ( 8 | DeadLetterQueue, 9 | FileBackend, 10 | InvalidQueueDSNError, 11 | QueueBackend, 12 | QueueItemRetrievalError, 13 | ) 14 | 15 | 16 | @pytest.fixture 17 | def backend(tmp_path): 18 | return FileBackend(tmp_path) 19 | 20 | 21 | @pytest.fixture 22 | def queue(tmp_path): 23 | return DeadLetterQueue("file://" + str(tmp_path)) 24 | 25 | 26 | @pytest.mark.parametrize( 27 | "dsn", ["memory://", "http://www.example.com", HttpUrl("http://www.example.com")] 28 | ) 29 | def test_invalid_queue_url(dsn): 30 | with pytest.raises(InvalidQueueDSNError): 31 | DeadLetterQueue(dsn) 32 | 33 | 34 | def test_ping(backend: QueueBackend): 35 | assert backend.ping() is True 36 | 37 | 38 | def test_filebackend_ping_fails(caplog, tmp_path): 39 | tmp_path.chmod(0o400) # set to readonly 40 | backend = FileBackend(tmp_path) 41 | assert backend.ping() is False 42 | 43 | 44 | @pytest.mark.asyncio 45 | async def test_backend_remove_last_item(backend: QueueBackend, queue_item_factory): 46 | """When we remove the last item for a bug, we also remove it's key from the 47 | backend""" 48 | 49 | item = queue_item_factory() 50 | 51 | await backend.put(item) 52 | assert await backend.size() == 1 53 | 54 | await backend.remove(item.payload.bug.id, item.identifier) 55 | assert await backend.size() == 0 56 | 57 | 58 | @pytest.mark.asyncio 59 | async def test_backend_clear(backend: QueueBackend, queue_item_factory): 60 | item_1 = queue_item_factory(payload__bug__id=123) 61 | item_2 = queue_item_factory(payload__bug__id=456) 62 | 63 | await backend.put(item_1) 64 | await backend.put(item_2) 65 | assert await backend.size() == 2 66 | 67 | await backend.clear() 68 | assert await backend.size() == 0 69 | 70 | 71 | @pytest.mark.asyncio 72 | async def test_backend_put_maintains_sorted_order( 73 | backend: QueueBackend, queue_item_factory 74 | ): 75 | now = datetime.now() 76 | item_1 = queue_item_factory(payload__event__time=now + timedelta(minutes=1)) 77 | item_2 = queue_item_factory(payload__event__time=now + timedelta(minutes=2)) 78 | item_3 = queue_item_factory(payload__event__time=now + timedelta(minutes=3)) 79 | item_4 = queue_item_factory(payload__event__time=now + timedelta(minutes=4)) 80 | 81 | await backend.put(item_2) 82 | await backend.put(item_1) 83 | await backend.put(item_3) 84 | await backend.put(item_4) 85 | 86 | items = [item async for item in backend.get(item_1.payload.bug.id)] 87 | assert list(items) == [item_1, item_2, item_3, item_4] 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_backend_get_ordering(backend: QueueBackend, queue_item_factory): 92 | now = datetime.now() 93 | item_1 = queue_item_factory(payload__event__time=now + timedelta(minutes=1)) 94 | item_2 = queue_item_factory(payload__event__time=now + timedelta(minutes=2)) 95 | item_3 = queue_item_factory(payload__event__time=now + timedelta(minutes=3)) 96 | item_4 = queue_item_factory(payload__event__time=now + timedelta(minutes=4)) 97 | 98 | await backend.put(item_2) 99 | await backend.put(item_1) 100 | await backend.put(item_3) 101 | await backend.put(item_4) 102 | 103 | item_metadata = [ 104 | item.identifier async for item in backend.get(bug_id=item_1.payload.bug.id) 105 | ] 106 | exptected_id_order = [item.identifier for item in [item_1, item_2, item_3, item_4]] 107 | assert exptected_id_order == item_metadata 108 | 109 | 110 | @pytest.mark.asyncio 111 | async def test_backend_get_all(backend: QueueBackend, queue_item_factory): 112 | now = datetime.now() 113 | item_1 = queue_item_factory( 114 | payload__bug__id=123, payload__event__time=now + timedelta(minutes=1) 115 | ) 116 | item_2 = queue_item_factory( 117 | payload__bug__id=456, payload__event__time=now + timedelta(minutes=2) 118 | ) 119 | item_3 = queue_item_factory( 120 | payload__bug__id=123, payload__event__time=now + timedelta(minutes=3) 121 | ) 122 | item_4 = queue_item_factory( 123 | payload__bug__id=456, payload__event__time=now + timedelta(minutes=4) 124 | ) 125 | 126 | await backend.put(item_3) 127 | await backend.put(item_4) 128 | await backend.put(item_1) 129 | await backend.put(item_2) 130 | 131 | items = await backend.get_all() 132 | assert len(items) == 2 133 | assert [item async for item in items[123]] == [item_1, item_3] 134 | assert [item async for item in items[456]] == [item_2, item_4] 135 | 136 | 137 | @pytest.mark.asyncio 138 | async def test_backend_get_all_invalid_json(backend: QueueBackend, queue_item_factory): 139 | item_1 = queue_item_factory() 140 | await backend.put(item_1) 141 | 142 | corrupt_file_dir = backend.location / "999" 143 | corrupt_file_dir.mkdir() 144 | 145 | corrupt_file_path = corrupt_file_dir / "xxx.json" 146 | corrupt_file_path.write_text("BOOM") 147 | 148 | items = await backend.get_all() 149 | assert len(items) == 2 150 | 151 | 152 | @pytest.mark.asyncio 153 | async def test_backend_get_all_ignores_bad_folders( 154 | backend: QueueBackend, queue_item_factory 155 | ): 156 | item_1 = queue_item_factory() 157 | await backend.put(item_1) 158 | 159 | corrupt_file_dir = backend.location / "abc" 160 | corrupt_file_dir.mkdir() 161 | 162 | items = await backend.get_all() 163 | assert len(items) == 1 164 | 165 | 166 | @pytest.mark.asyncio 167 | async def test_backend_get_all_payload_doesnt_match_schema( 168 | backend: QueueBackend, queue_item_factory 169 | ): 170 | item_1 = queue_item_factory() 171 | await backend.put(item_1) 172 | 173 | # this is invalid, as whiteboard should be a string 174 | item_2 = queue_item_factory.build( 175 | payload__bug__id=999, payload__bug__whiteboard=False 176 | ) 177 | await backend.put(item_2) 178 | 179 | items = await backend.get_all() 180 | assert len(items) == 2 181 | 182 | 183 | @pytest.mark.asyncio 184 | async def test_backend_get_invalid_json(backend: QueueBackend, queue_item_factory): 185 | corrupt_file_dir = backend.location / "999" 186 | corrupt_file_dir.mkdir() 187 | corrupt_file_path = corrupt_file_dir / "xxx.json" 188 | corrupt_file_path.write_text("BOOM") 189 | 190 | items = backend.get(999) 191 | 192 | with pytest.raises(QueueItemRetrievalError): 193 | await anext(items) 194 | 195 | 196 | @pytest.mark.asyncio 197 | async def test_get_missing_timezone(backend: QueueBackend, queue_item_factory): 198 | item = queue_item_factory.build(payload__bug__id=666) 199 | dump = item.model_dump() 200 | dump["payload"]["event"]["time"] = "2024-04-18T12:46:54" 201 | 202 | queue_dir = backend.location / "666" 203 | queue_dir.mkdir() 204 | corrupt_file_path = queue_dir / f"{item.identifier}.json" 205 | corrupt_file_path.write_text(json.dumps(dump)) 206 | 207 | items = backend.get(666) 208 | item = await anext(items) 209 | 210 | assert item.timestamp.tzname() == "UTC", "default timezone added" 211 | assert "2024-04-18T12:46:54Z" in item.model_dump_json(), "timezone put in dump" 212 | 213 | 214 | @pytest.mark.asyncio 215 | async def test_backend_get_payload_doesnt_match_schema( 216 | backend: QueueBackend, queue_item_factory 217 | ): 218 | # this is invalid, as whiteboard should be a string 219 | item = queue_item_factory.build( 220 | payload__bug__id=999, payload__bug__whiteboard=False 221 | ) 222 | await backend.put(item) 223 | 224 | items = backend.get(999) 225 | 226 | with pytest.raises(QueueItemRetrievalError): 227 | await anext(items) 228 | 229 | 230 | def test_check_writable_ok(queue: DeadLetterQueue): 231 | assert queue.check_writable() == [] 232 | 233 | 234 | def test_check_writable_not_writable(queue: DeadLetterQueue, tmp_path): 235 | queue.backend = FileBackend(tmp_path) 236 | tmp_path.chmod(0o400) # set to readonly 237 | [failure] = queue.check_writable() 238 | assert failure.id == "queue.backend.ping" 239 | 240 | 241 | @pytest.mark.asyncio 242 | async def test_check_readable_ok(queue: DeadLetterQueue): 243 | assert await queue.check_readable() == [] 244 | 245 | 246 | @pytest.mark.asyncio 247 | async def test_check_readable_not_parseable(queue: DeadLetterQueue): 248 | corrupt_file_dir = queue.backend.location / "999" 249 | corrupt_file_dir.mkdir() 250 | corrupt_file_path = corrupt_file_dir / "xxx.json" 251 | corrupt_file_path.write_text("BOOM") 252 | 253 | [failure] = await queue.check_readable() 254 | assert failure.id == "queue.backend.read" 255 | assert failure.hint.startswith("check that parked event files are not corrupt") 256 | 257 | 258 | @pytest.mark.asyncio 259 | async def test_postpone(queue: DeadLetterQueue, webhook_request_factory): 260 | webhook_payload = webhook_request_factory() 261 | await queue.postpone(webhook_payload, rid="rid") 262 | 263 | [item] = [_ async for _ in queue.backend.get(webhook_payload.bug.id)] 264 | assert item.payload == webhook_payload 265 | assert item.rid == "rid" 266 | 267 | 268 | @pytest.mark.asyncio 269 | async def test_track_failed(queue: DeadLetterQueue, webhook_request_factory): 270 | webhook_payload = webhook_request_factory() 271 | exc = Exception("boom") 272 | 273 | await queue.track_failed(webhook_payload, exc, rid="rid") 274 | [item] = [_ async for _ in queue.backend.get(webhook_payload.bug.id)] 275 | assert item.payload == webhook_payload 276 | 277 | assert item.payload == webhook_payload 278 | assert item.error.description == str(exc) 279 | assert item.rid == "rid" 280 | 281 | 282 | @pytest.mark.asyncio 283 | async def test_is_blocked( 284 | queue: DeadLetterQueue, queue_item_factory, webhook_request_factory 285 | ): 286 | blocked_payload = webhook_request_factory(bug__id=123) 287 | item = queue_item_factory(payload=blocked_payload) 288 | await queue.backend.put(item) 289 | 290 | assert await queue.is_blocked(blocked_payload) is True 291 | 292 | another_payload = webhook_request_factory(bug__id=456) 293 | assert await queue.is_blocked(another_payload) is False 294 | 295 | 296 | @pytest.mark.asyncio 297 | async def test_size(backend, queue_item_factory): 298 | item = queue_item_factory(payload__bug__id=1) 299 | another_item = queue_item_factory(payload__bug__id=2) 300 | 301 | await backend.put(item) 302 | await backend.put(another_item) 303 | 304 | assert await backend.size() == 2 305 | assert await backend.size(bug_id=1) == 1 306 | 307 | 308 | @pytest.mark.asyncio 309 | async def test_size_empty(backend, queue_item_factory): 310 | assert await backend.size() == 0 311 | assert await backend.size(bug_id=999) == 0 312 | 313 | 314 | @pytest.mark.asyncio 315 | async def test_retrieve(queue: DeadLetterQueue, queue_item_factory): 316 | bug_ids = (1, 2, 1, 3) 317 | now = datetime.now() 318 | 319 | for idx, bug_id in enumerate(bug_ids): 320 | timestamp = now + timedelta(minutes=idx) 321 | await queue.backend.put( 322 | queue_item_factory( 323 | timestamp=timestamp, 324 | payload__event__time=timestamp, 325 | payload__bug__id=bug_id, 326 | ) 327 | ) 328 | 329 | items = await queue.retrieve() 330 | assert len(items) == 3 331 | bug_1_items = [item async for item in items[1]] 332 | assert bug_1_items[0].payload.event.time < bug_1_items[1].payload.event.time 333 | 334 | 335 | @pytest.mark.asyncio 336 | async def test_done(queue: DeadLetterQueue, queue_item_factory): 337 | item = queue_item_factory() 338 | 339 | await queue.backend.put(item) 340 | assert await queue.backend.size() == 1 341 | 342 | await queue.done(item) 343 | assert await queue.backend.size() == 0 344 | 345 | 346 | @pytest.mark.asyncio 347 | async def test_delete(queue: DeadLetterQueue, queue_item_factory): 348 | item = queue_item_factory() 349 | 350 | await queue.backend.put(item) 351 | assert await queue.backend.size() == 1 352 | 353 | await queue.delete(item.identifier) 354 | assert await queue.backend.size() == 0 355 | 356 | 357 | @pytest.mark.asyncio 358 | async def test_exists(queue: DeadLetterQueue, queue_item_factory): 359 | item = queue_item_factory() 360 | 361 | await queue.backend.put(item) 362 | assert await queue.exists(item.identifier) is True 363 | -------------------------------------------------------------------------------- /tests/unit/test_retry.py: -------------------------------------------------------------------------------- 1 | from datetime import UTC, datetime, timedelta 2 | from unittest import mock 3 | 4 | import pytest 5 | 6 | from jbi.errors import IgnoreInvalidRequestError 7 | from jbi.retry import RETRY_TIMEOUT_DAYS, retry_failed 8 | from jbi.runner import execute_action 9 | 10 | 11 | def mock_aiter_error(): 12 | _mock = mock.MagicMock() 13 | _mock.__aiter__.return_value = None 14 | _mock.__aiter__.side_effect = Exception("Throwing an exception") 15 | return _mock 16 | 17 | 18 | async def aiter_sync(iterable): 19 | for i in iterable: 20 | yield i 21 | 22 | 23 | @pytest.fixture 24 | def mock_executor(mocker): 25 | return mocker.MagicMock(spec=execute_action) 26 | 27 | 28 | @pytest.mark.asyncio 29 | async def test_retry_empty_list(caplog, mock_queue): 30 | mock_queue.retrieve.return_value = {} 31 | 32 | metrics = await retry_failed(queue=mock_queue) 33 | mock_queue.retrieve.assert_called_once() 34 | assert len(caplog.messages) == 0 35 | assert metrics == { 36 | "bug_count": 0, 37 | "events_processed": 0, 38 | "events_skipped": 0, 39 | "events_failed": 0, 40 | "bugs_failed": 0, 41 | } 42 | 43 | 44 | @pytest.mark.asyncio 45 | async def test_retry_success(caplog, mock_queue, mock_executor, queue_item_factory): 46 | mock_queue.retrieve.return_value = { 47 | 1: aiter_sync([queue_item_factory(payload__bug__id=1)]) 48 | } 49 | 50 | metrics = await retry_failed(item_executor=mock_executor, queue=mock_queue) 51 | assert len(caplog.messages) == 1 # only one log been generated 52 | assert caplog.text.count("retry event") == 1 53 | mock_queue.retrieve.assert_called_once() 54 | mock_queue.done.assert_called_once() # item should be marked as complete 55 | mock_executor.assert_called_once() # item should have been processed 56 | assert metrics == { 57 | "bug_count": 1, 58 | "events_processed": 1, 59 | "events_skipped": 0, 60 | "events_failed": 0, 61 | "bugs_failed": 0, 62 | } 63 | 64 | 65 | @pytest.mark.asyncio 66 | async def test_retry_fail_and_skip( 67 | caplog, mock_queue, mock_executor, queue_item_factory 68 | ): 69 | mock_queue.retrieve.return_value = { 70 | 1: aiter_sync( 71 | [ 72 | queue_item_factory(payload__bug__id=1), 73 | queue_item_factory(payload__bug__id=1), 74 | ] 75 | ) 76 | } 77 | 78 | mock_executor.side_effect = Exception("Throwing an exception") 79 | mock_queue.size.return_value = 3 80 | 81 | metrics = await retry_failed(item_executor=mock_executor, queue=mock_queue) 82 | mock_queue.retrieve.assert_called_once() 83 | mock_queue.done.assert_not_called() # no items should have been marked as done 84 | assert caplog.text.count("failed to reprocess event") == 1 85 | assert caplog.text.count("skipping 2 event(s)") == 1 86 | assert caplog.text.count("removing expired event") == 0 87 | mock_executor.assert_called_once() # only one item should have been attempted to be processed 88 | assert metrics == { 89 | "bug_count": 1, 90 | "events_processed": 0, 91 | "events_skipped": 2, 92 | "events_failed": 1, 93 | "bugs_failed": 0, 94 | } 95 | 96 | 97 | @pytest.mark.asyncio 98 | async def test_retry_remove_expired( 99 | caplog, mock_queue, mock_executor, queue_item_factory 100 | ): 101 | mock_queue.retrieve.return_value = { 102 | 1: aiter_sync( 103 | [ 104 | queue_item_factory( 105 | payload__bug__id=1, 106 | payload__event__time=datetime.now(UTC) 107 | - timedelta(days=int(RETRY_TIMEOUT_DAYS), seconds=1), 108 | ), 109 | queue_item_factory(payload__bug__id=1), 110 | ] 111 | ) 112 | } 113 | 114 | metrics = await retry_failed(item_executor=mock_executor, queue=mock_queue) 115 | mock_queue.retrieve.assert_called_once() 116 | assert len(mock_queue.done.call_args_list) == 2, ( 117 | "both items should have been marked as done" 118 | ) 119 | assert caplog.text.count("failed to reprocess event") == 0 120 | assert caplog.text.count("removing expired event") == 1 121 | mock_executor.assert_called_once() # only one item should have been attempted to be processed 122 | assert metrics == { 123 | "bug_count": 1, 124 | "events_processed": 1, 125 | "events_skipped": 1, 126 | "events_failed": 0, 127 | "bugs_failed": 0, 128 | } 129 | 130 | 131 | @pytest.mark.asyncio 132 | async def test_retry_remove_invalid( 133 | caplog, mock_queue, mock_executor, queue_item_factory 134 | ): 135 | mock_queue.retrieve.return_value = { 136 | 1: aiter_sync(queue_item_factory.create_batch(2)) 137 | } 138 | mock_executor.side_effect = [ 139 | IgnoreInvalidRequestError("How did this get in here"), 140 | mock.DEFAULT, 141 | ] 142 | metrics = await retry_failed(item_executor=mock_executor, queue=mock_queue) 143 | assert len(mock_queue.done.call_args_list) == 2, ( 144 | "both items should have been marked as done" 145 | ) 146 | assert caplog.text.count("removing invalid event") == 1 147 | assert metrics == { 148 | "bug_count": 1, 149 | "events_processed": 2, 150 | "events_skipped": 0, 151 | "events_failed": 0, 152 | "bugs_failed": 0, 153 | } 154 | 155 | 156 | @pytest.mark.asyncio 157 | async def test_retry_bug_failed(caplog, mock_queue, mock_executor, queue_item_factory): 158 | mock_queue.retrieve.return_value = { 159 | 1: aiter_sync([queue_item_factory(payload__bug__id=1)]), 160 | 2: mock_aiter_error(), 161 | } 162 | 163 | metrics = await retry_failed(item_executor=mock_executor, queue=mock_queue) 164 | mock_queue.retrieve.assert_called_once() 165 | mock_queue.done.assert_called_once() # one item should have been marked as done 166 | assert caplog.text.count("failed to reprocess event") == 0 167 | assert caplog.text.count("removing expired event") == 0 168 | assert caplog.text.count("failed to parse events for bug") == 1 169 | mock_executor.assert_called_once() # only one item should have been attempted to be processed 170 | assert metrics == { 171 | "bug_count": 2, 172 | "events_processed": 1, 173 | "events_skipped": 0, 174 | "events_failed": 0, 175 | "bugs_failed": 1, 176 | } 177 | 178 | 179 | @pytest.mark.asyncio 180 | async def test_original_rid_is_put_in_retry_logs( 181 | caplog, authenticated_client, bugzilla_webhook_request, dl_queue, mocked_bugzilla 182 | ): 183 | mocked_bugzilla.get_bug.side_effect = ValueError("Boom!") 184 | 185 | # Post an event that will fail. 186 | assert (await dl_queue.size()) == 0 187 | authenticated_client.post( 188 | "/bugzilla_webhook", 189 | data=bugzilla_webhook_request.model_dump_json(), 190 | ) 191 | logged = [r for r in caplog.records if r.name == "jbi.runner"] 192 | original_rid = logged[0].rid 193 | assert original_rid, "rid was set in logs when webhook is received" 194 | assert (await dl_queue.size()) == 1, "an event was put in queue" 195 | 196 | # Reset log capture and retry the queue. 197 | caplog.clear() 198 | assert len(caplog.records) == 0 199 | metrics = await retry_failed(queue=dl_queue) 200 | 201 | # Inspect retry logs. 202 | assert metrics["events_failed"] == 1, "event failed again" 203 | assert (await dl_queue.size()) == 1, "an event still in queue" 204 | logged = [r for r in caplog.records if r.name == "jbi.runner"] 205 | assert logged[0].rid == original_rid, "logs of retry have original request id" 206 | -------------------------------------------------------------------------------- /version.json: -------------------------------------------------------------------------------- 1 | { 2 | "commit": "HEAD", 3 | "version": "v0.0.0", 4 | "image_tag": null, 5 | "source": "https://github.com/mozilla/jira-bugzilla-integration", 6 | "build": null 7 | } 8 | --------------------------------------------------------------------------------