├── .flake8
├── .github
├── CODEOWNERS
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── docker-images-security-scan.yml
│ ├── docker-publish.yml
│ ├── python-app.yml
│ └── release.yaml
├── .gitignore
├── .pre-commit-config.yaml
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── app
├── __init__.py
├── consumers
│ ├── __init__.py
│ ├── base_consumer.py
│ └── kafka_consumer.py
├── control_the_payload_config.json
├── core
│ ├── __init__.py
│ ├── config.py
│ └── consts.py
├── invokers
│ ├── __init__.py
│ ├── base_invoker.py
│ └── webhook_invoker.py
├── main.py
├── port_client.py
├── processors
│ ├── __init__.py
│ └── kafka
│ │ ├── __init__.py
│ │ └── kafka_to_webhook_processor.py
├── streamers
│ ├── __init__.py
│ ├── base_streamer.py
│ ├── kafka
│ │ ├── __init__.py
│ │ └── kafka_streamer.py
│ └── streamer_factory.py
└── utils.py
├── mypy.ini
├── poetry.lock
├── pyproject.toml
├── scripts
├── format.sh
├── lint.sh
└── test.sh
└── tests
├── __init__.py
└── unit
├── __init__.py
├── invokers
└── test_webhook_invoker.py
├── processors
├── __init__.py
└── kafka
│ ├── __init__.py
│ ├── conftest.py
│ └── test_kafka_to_webhook_processor.py
└── streamers
├── __init__.py
└── kafka
├── __init__.py
├── conftest.py
└── test_kafka_streamer.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 88
3 | exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv
4 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Port agent
2 | * @port-labs/ecosystem-team
3 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | # Description
2 |
3 | What -
4 | Why -
5 | How -
6 |
7 | ## Type of change
8 |
9 | Please leave one option from the following and delete the rest:
10 |
11 | - [ ] Bug fix (non-breaking change which fixes an issue)
12 | - [ ] New feature (non-breaking change which adds functionality)
13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
14 | - [ ] Non-breaking change (fix of existing functionality that will not change current behavior)
15 | - [ ] Documentation (added/updated documentation)
16 |
17 |
--------------------------------------------------------------------------------
/.github/workflows/docker-images-security-scan.yml:
--------------------------------------------------------------------------------
1 | name: Scan docker images
2 |
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *' # Every day at midnight
6 | workflow_dispatch:
7 |
8 | env:
9 | REGISTRY: ghcr.io
10 | IMAGE_NAME: ${{ github.repository }}
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Checkout repository
17 | uses: actions/checkout@v3
18 |
19 | - name: Set up QEMU
20 | uses: docker/setup-qemu-action@v2
21 |
22 | - name: Setup Docker buildx
23 | uses: docker/setup-buildx-action@v2
24 | with:
25 | platforms: linux/amd64,linux/arm64
26 |
27 | - name: Log into registry ${{ env.REGISTRY }}
28 | if: github.event_name != 'pull_request'
29 | uses: docker/login-action@v2
30 | with:
31 | registry: ${{ env.REGISTRY }}
32 | username: ${{ github.actor }}
33 | password: ${{ secrets.GITHUB_TOKEN }}
34 |
35 | - name: Extract Docker metadata
36 | id: meta
37 | uses: docker/metadata-action@v4
38 | with:
39 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
40 |
41 | - name: Build and push Docker image
42 | id: build-and-push
43 | uses: docker/build-push-action@v3
44 | with:
45 | context: .
46 | platforms: linux/amd64
47 | push: false
48 | load: true
49 | tags: ${{ steps.meta.outputs.tags }}
50 | labels: ${{ steps.meta.outputs.labels }}
51 | cache-from: type=gha
52 | cache-to: type=gha,mode=max
53 |
54 | - name: Run Trivy vulnerability scanner
55 | uses: aquasecurity/trivy-action@0.29.0
56 | with:
57 | image-ref: ${{ steps.meta.outputs.tags }}
58 | ignore-unfixed: true
59 | vuln-type: 'os,library'
60 | severity: 'CRITICAL,HIGH'
61 | output: trivy-vulnerability.txt
62 |
63 | - name: Publish Trivy Output to Summary
64 | run: |
65 | if [[ -s trivy-vulnerability.txt ]]; then
66 | {
67 | echo "### Security Output"
68 | echo "Click to expand
"
69 | echo ""
70 | echo '```terraform'
71 | cat trivy-vulnerability.txt
72 | echo '```'
73 | echo " "
74 | } >> $GITHUB_STEP_SUMMARY
75 | fi
76 |
77 | - name: Set output for trivy results
78 | run: |
79 | cat trivy-vulnerability.txt
80 | cat trivy-vulnerability.txt | grep -i "total:" | awk '{print $2}'
81 | echo "VULNERABILITIES_COUNT=$(cat trivy-vulnerability.txt | grep -i "total:" | awk '{print $2}')" >> $GITHUB_ENV
82 | echo ${{ env.VULNERABILITIES_COUNT }}
83 |
84 | - name: Send slack alert if vulnerabilities found
85 | if: ${{ env.VULNERABILITIES_COUNT != '0' }}
86 | uses: slackapi/slack-github-action@v2.0.0
87 | with:
88 | webhook-type: incoming-webhook
89 | payload: |
90 | {
91 | "text": "Vulnerabilities found in `${{ steps.meta.outputs.tags }}` image",
92 | "attachments": [
93 | {
94 | "text": "${{ steps.meta.outputs.tags }} image has vulnerabilities",
95 | "fields": [
96 | {
97 | "title": "Image",
98 | "value": "${{ steps.meta.outputs.tags }}",
99 | "short": true
100 | },
101 | {
102 | "title": "Vulnerabilities",
103 | "value": "Count: ${{ env.VULNERABILITIES_COUNT }}",
104 | "short": true
105 | },
106 | {
107 | "title": "link",
108 | "value": "https://github.com/port-labs/port-agent/actions/runs/${{ github.run_id }}",
109 | "short": true
110 | }
111 | ],
112 |
113 | "color": "#FF0000"
114 | }
115 | ]
116 | }
117 | env:
118 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_RND_ECOSYSTEM_DEPENDABOT_ALERTS_WEBHOOK_URL }}
--------------------------------------------------------------------------------
/.github/workflows/docker-publish.yml:
--------------------------------------------------------------------------------
1 | name: Docker
2 |
3 | on:
4 | push:
5 | tags: [ 'v*.*.*' ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | env:
10 | REGISTRY: ghcr.io
11 | IMAGE_NAME: ${{ github.repository }}
12 |
13 | jobs:
14 | build:
15 |
16 | runs-on: ubuntu-latest
17 | permissions:
18 | contents: read
19 | packages: write
20 |
21 | steps:
22 | - name: Checkout repository
23 | uses: actions/checkout@v3
24 |
25 | - name: Set up QEMU
26 | uses: docker/setup-qemu-action@v2
27 |
28 | - name: Setup Docker buildx
29 | uses: docker/setup-buildx-action@v2
30 | with:
31 | platforms: linux/amd64,linux/arm64
32 |
33 | - name: Log into registry ${{ env.REGISTRY }}
34 | if: github.event_name != 'pull_request'
35 | uses: docker/login-action@v2
36 | with:
37 | registry: ${{ env.REGISTRY }}
38 | username: ${{ github.actor }}
39 | password: ${{ secrets.GITHUB_TOKEN }}
40 |
41 | - name: Extract Docker metadata
42 | id: meta
43 | uses: docker/metadata-action@v4
44 | with:
45 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
46 |
47 | - name: Build and push Docker image
48 | id: build-and-push
49 | uses: docker/build-push-action@v3
50 | with:
51 | context: .
52 | platforms: linux/amd64,linux/arm64
53 | push: ${{ github.event_name != 'pull_request' }}
54 | tags: ${{ steps.meta.outputs.tags }}
55 | labels: ${{ steps.meta.outputs.labels }}
56 | cache-from: type=gha
57 | cache-to: type=gha,mode=max
58 |
--------------------------------------------------------------------------------
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | name: Python application
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - uses: actions/checkout@v3
15 |
16 | - name: Set up Python 3.11
17 | uses: actions/setup-python@v3
18 | with:
19 | python-version: "3.11"
20 |
21 | # Install Poetry
22 | - name: Install Poetry
23 | run: |
24 | curl -sSL https://install.python-poetry.org | python3 -
25 | export PATH="$HOME/.local/bin:$PATH"
26 |
27 | # Install dependencies via Poetry
28 | - name: Install dependencies
29 | run: |
30 | export PATH="$HOME/.local/bin:$PATH"
31 | poetry install --no-interaction --no-ansi
32 |
33 | # Run Lint
34 | - name: Lint
35 | run: |
36 | export PATH="$HOME/.local/bin:$PATH"
37 | poetry run ./scripts/lint.sh
38 |
39 | # Run Tests with Pytest
40 | - name: Test with pytest
41 | run: |
42 | export PATH="$HOME/.local/bin:$PATH"
43 | poetry run ./scripts/test.sh
44 |
45 | - name: Create coverage report
46 | run: |
47 | cd app
48 | poetry run coverage html
49 | poetry run coverage json
50 |
51 | - name: Upload coverage report
52 | id: upload-coverage
53 | uses: actions/upload-artifact@v4
54 | with:
55 | name: coverage-report
56 | path: app/htmlcov
57 | - name: Set repo code coverage percentage by the percentage of statements covered in the tests
58 | id: set-stmts-coverage
59 | run: |
60 | stmts=$(jq '.totals.percent_covered | . * 100 | round | . / 100' app/coverage.json)
61 | echo "STMTS_COVERAGE=$stmts" >> $GITHUB_OUTPUT
62 | - name: Get PR_NUMBER
63 | id: pr-number
64 | run: |
65 | if [ ! -z ${{ inputs.PR_NUMBER }} ]; then
66 | echo "PR_NUMBER=${{ inputs.PR_NUMBER }}" >> $GITHUB_OUTPUT
67 | elif [ ! -z ${{ github.event.pull_request.number }} ]; then
68 | echo "PR_NUMBER=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
69 | else
70 | echo "PR_NUMBER=0" >> $GITHUB_OUTPUT
71 | fi
72 | - name: Comment PR with code coverage summary
73 | if: ${{ steps.pr-number.outputs.PR_NUMBER != 0 }}
74 | uses: actions/github-script@v7
75 | env:
76 | CODE_COVERAGE_ARTIFACT_URL: ${{ steps.upload-coverage.outputs.artifact-url }}
77 | PR_NUMBER: ${{ steps.pr-number.outputs.PR_NUMBER }}
78 | with:
79 | github-token: ${{ secrets.GITHUB_TOKEN }}
80 | script: |
81 | const output = `#### Code Coverage Artifact 📈: ${{ env.CODE_COVERAGE_ARTIFACT_URL }}
82 | #### Code Coverage Total Percentage: \`${{ steps.set-stmts-coverage.outputs.STMTS_COVERAGE }}%\``;
83 | github.rest.issues.createComment({
84 | issue_number: ${{ env.PR_NUMBER }},
85 | owner: context.repo.owner,
86 | repo: context.repo.repo,
87 | body: output
88 | })
89 | - name: Get current repo coverage percentage from Port
90 | uses: port-labs/port-github-action@v1
91 | id: get-current-coverage
92 | with:
93 | clientId: ${{ secrets.PORT_CLIENT_ID }}
94 | clientSecret: ${{ secrets.PORT_CLIENT_SECRET }}
95 | baseUrl: https://api.getport.io
96 | operation: GET
97 | identifier: port-agent
98 | blueprint: repository
99 | - name: Set current code coverage
100 | id: set-current-coverage
101 | run: echo "CURRENT_COVERAGE=${{ fromJson(steps.get-current-coverage.outputs.entity).properties.coverage_percent }}" >> $GITHUB_OUTPUT
102 | - name: Comment if Coverage Regression
103 | if: ${{ (fromJson(steps.set-stmts-coverage.outputs.STMTS_COVERAGE) < fromJson(steps.set-current-coverage.outputs.CURRENT_COVERAGE)) && (steps.pr-number.outputs.PR_NUMBER != 0) }}
104 | uses: actions/github-script@v7
105 | env:
106 | PR_NUMBER: ${{ steps.pr-number.outputs.PR_NUMBER }}
107 | CURRENT_COVERAGE: ${{ steps.set-current-coverage.outputs.CURRENT_COVERAGE }}
108 | NEW_COVERAGE: ${{ steps.set-stmts-coverage.outputs.STMTS_COVERAGE }}
109 | with:
110 | github-token: ${{ secrets.GITHUB_TOKEN }}
111 | script: |
112 | const output = `🚨 The new code coverage percentage is lower than the current one. Current coverage: \`${{ env.CURRENT_COVERAGE }}\`\n While the new one is: \`${{ env.NEW_COVERAGE }}\``;
113 | github.rest.issues.createComment({
114 | issue_number: ${{ env.PR_NUMBER }},
115 | owner: context.repo.owner,
116 | repo: context.repo.repo,
117 | body: output
118 | })
119 | - name: Calculate minimum required coverage with tolerance
120 | run: |
121 | STMT_COVERAGE=${{ steps.set-stmts-coverage.outputs.STMTS_COVERAGE }}
122 | THRESHOLD_DELTA=${{ vars.COVERAGE_THRESHOLD_DELTA }}
123 | MIN_REQUIRED=$(echo "$STMT_COVERAGE + $THRESHOLD_DELTA" | bc)
124 | echo "MIN_REQUIRED_COVERAGE=$MIN_REQUIRED" >> $GITHUB_ENV
125 | - name: Fail PR if current code coverage percentage is higher than the new one
126 | if: ${{ (fromJson(env.MIN_REQUIRED_COVERAGE) < fromJson(steps.set-current-coverage.outputs.CURRENT_COVERAGE)) && (vars.CODE_COVERAGE_ENFORCEMENT == 'true') }}
127 | run: exit 1
128 | - name: Update service code coverage percentage in Port
129 | if: ${{ (github.event_name == 'push') }}
130 | uses: port-labs/port-github-action@v1
131 | with:
132 | clientId: ${{ secrets.PORT_CLIENT_ID }}
133 | clientSecret: ${{ secrets.PORT_CLIENT_SECRET }}
134 | baseUrl: https://api.getport.io
135 | operation: UPSERT
136 | identifier: port-agent
137 | blueprint: repository
138 | properties: |-
139 | {
140 | "coverage_percent": "${{ steps.set-stmts-coverage.outputs.STMTS_COVERAGE }}"
141 | }
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: Update Chart AppVersion
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | update-chart:
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - name: Checkout code
13 | uses: actions/checkout@v3
14 | with:
15 | repository: port-labs/helm-charts
16 |
17 | - name: Set up Git for PR
18 | run: |
19 | git config --global user.name "${{ env.GIT_COMMITTER_NAME }}"
20 | git config --global user.email "${{ env.GIT_COMMITTER_EMAIL }}"
21 | env:
22 | GIT_COMMITTER_NAME: github-actions[bot]
23 | GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com
24 |
25 | - name: Update appVersion and chart version
26 | run: |
27 | # Update appVersion and version in Chart.yaml
28 | sed -i "s/^appVersion:.*/appVersion: '${{ github.event.release.tag_name }}'/" "${{ env.CHART_PATH }}"
29 | # Bump chart version
30 | new_chart_version=$(yq eval '.version | split(".") | .[0] + "." + .[1] + "." + (.[2] | tonumber + 1 | tostring)' "${{ env.CHART_PATH }}")
31 | sed -i "s/^version:.*/version: $new_chart_version/" "${{ env.CHART_PATH }}"
32 | git add ${{ env.CHART_PATH }}
33 | git commit -m "Update appVersion to ${{ github.event.release.tag_name }}"
34 | env:
35 | CHART_PATH: charts/port-agent/Chart.yaml
36 |
37 | - name: Create Pull Request
38 | uses: peter-evans/create-pull-request@v6
39 | with:
40 | token: ${{ secrets.HELM_CHART_WRITER }}
41 | commit-message: "Update port-agent helm chart appVersion to ${{ github.event.release.tag_name }}"
42 | title: "Update port-agent helm chart appVersion to ${{ github.event.release.tag_name }}"
43 | body: "This is an automated PR to update the port-agent helm chart appVersion in the Helm chart to ${{ github.event.release.tag_name }}."
44 | branch: update-app-version-${{ github.event.release.tag_name }}
45 | base: main
46 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | .idea
132 |
133 | .vscode/
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | files: app|tests
2 | repos:
3 | - repo: https://github.com/PyCQA/autoflake
4 | rev: v1.7.7
5 | hooks:
6 | - id: autoflake
7 | args: ["--remove-all-unused-imports",
8 | "--remove-unused-variables",
9 | "--in-place",
10 | "--exclude=__init__.py"]
11 | - repo: https://github.com/psf/black
12 | rev: 22.10.0
13 | hooks:
14 | - id: black
15 | - repo: https://github.com/pycqa/isort
16 | rev: 5.10.1
17 | hooks:
18 | - id: isort
19 | args: ["--profile", "black"]
20 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Port Execution Agent
2 |
3 | Thank you for your interest in contributing to the Port Execution Agent! We welcome contributions and feedback from the community. To help you get started, please follow these guidelines.
4 |
5 | ## How to Contribute
6 |
7 | ### Reporting Issues
8 |
9 | If you encounter any bugs or issues with the execution agent, please report an issue via our [Slack Community](https://port-community.slack.com/archives/C07CB3MV63G). When reporting an issue, be sure to include:
10 | - A description of the problem, including any error messages.
11 | - Steps to reproduce the issue.
12 | - Any relevant logs or screenshots.
13 |
14 | ### Suggesting Enhancements
15 |
16 | If you have suggestions for new features or improvements, please open a feature request [here](https://roadmap.getport.io/ideas), or reach to us via our [Slack Community](https://port-community.slack.com/archives/C07CB3MV63G).
17 |
18 | Provide details on:
19 | - The enhancement or feature you are proposing.
20 | - How it would benefit users.
21 | - Any additional context or use cases.
22 |
23 | ### Submitting Code
24 |
25 | To submit code contributions:
26 | 1. Fork the repository on GitHub.
27 | 2. Create a new branch for your changes.
28 | 3. Make your changes and test them thoroughly.
29 | 4. Ensure your code adheres to our coding style and guidelines.
30 | 5. Open a pull request against the `main` branch of the original repository. Include a clear description of your changes and any relevant context.
31 |
32 | ## Coding Guidelines
33 |
34 | - Write clear and concise commit messages.
35 | - Ensure your changes do not break existing functionality.
36 | - Add or update tests as necessary.
37 |
38 | ## Debugging
39 |
40 | ### Running Against Local Port Instance
41 |
42 | When debugging the Port Execution Agent locally against a local instance of Port, follow these steps:
43 |
44 | 1. **Set Up Local Environment:**
45 | - Ensure you have a local instance of Port running. This will act as your development and testing environment.
46 |
47 | 2. **Configure Environment Variables:**
48 | - Create or update your `.env` file to include the following environment variable:
49 | ```env
50 | USING_LOCAL_PORT_INSTANCE=True
51 | ```
52 |
53 | 3. **Kafka Authentication:**
54 | - When `USING_LOCAL_PORT_INSTANCE` is set to `True`, the execution agent will not attempt to pull your local organization's kafka credentials.
55 |
56 | 4. **Running the Agent Locally:**
57 | - Start the execution agent as you normally would.
58 |
59 | ### General Troubleshooting (Optional)
60 |
61 | For debugging the Port Execution Agent in other environments, consider the following tips:
62 |
63 | 1. **Check Authentication and Configuration:**
64 | - Ensure that all necessary authentication details are correctly configured for Kafka and any other external services.
65 |
66 | 2. **Review Logs:**
67 | - Examine the logs for any error messages or issues that might provide insights into problems.
68 |
69 | 3. **Verify Endpoints and Connectivity:**
70 | - Ensure that all endpoints are correctly specified and accessible.
71 |
72 | 4. **Update Dependencies:**
73 | - Check that all dependencies are up-to-date and compatible with your environment.
74 |
75 | 5. **Consult Documentation:**
76 | - Refer to our [Documentation](https://docs.getport.io/actions-and-automations/setup-backend/webhook/port-execution-agent).
77 |
78 | ## Contact
79 |
80 | For any questions or additional support, please contact us via Intercom or check our [Slack Community](https://port-community.slack.com/archives/C07CB3MV63G).
81 |
82 | Thank you for contributing to the Port Execution Agent!
83 |
84 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-alpine AS base
2 |
3 | ENV LIBRDKAFKA_VERSION=1.9.2
4 |
5 | # Install system dependencies and libraries
6 | RUN apk add --no-cache \
7 | gcc \
8 | musl-dev \
9 | librdkafka-dev \
10 | build-base \
11 | bash \
12 | oniguruma-dev \
13 | make \
14 | autoconf \
15 | automake \
16 | libtool \
17 | curl \
18 | libffi-dev # Added libffi-dev for compatibility with some packages
19 |
20 | # Install Poetry
21 | RUN curl -sSL https://install.python-poetry.org | POETRY_VERSION=1.8.3 python3 -
22 |
23 | # Ensure Poetry's bin directory is in PATH
24 | ENV PATH="/root/.local/bin:$PATH"
25 |
26 | WORKDIR /app
27 |
28 | # Copy pyproject.toml and poetry.lock to the container
29 | COPY pyproject.toml poetry.lock ./
30 |
31 | RUN poetry config virtualenvs.in-project true
32 |
33 | # Install Python dependencies using Poetry
34 | RUN poetry install --without dev --no-ansi
35 |
36 | FROM python:3.11-alpine AS prod
37 |
38 | ENV LIBRDKAFKA_VERSION=1.9.2
39 |
40 | # Install only runtime dependencies
41 | RUN apk add --no-cache \
42 | librdkafka-dev \
43 | bash \
44 | oniguruma-dev
45 |
46 | WORKDIR /app
47 |
48 | # Copy dependencies from the build stage
49 | COPY --from=base /app /app
50 |
51 | # Copy the application code
52 | COPY ./app/. .
53 |
54 | # Clean up old setuptools
55 | RUN pip uninstall -y setuptools || true
56 |
57 | # Run the application
58 | CMD ["sh", "-c", "update-ca-certificates && /app/.venv/bin/python main.py"]
59 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Port Agent
2 |
3 | ## [Documentation](https://docs.getport.io/create-self-service-experiences/setup-backend/port-execution-agent/)
4 |
5 | ## Control the payload of your self-service actions
6 |
7 | Some of the 3rd party applications that you may want to integrate with may not accept the raw payload incoming from Port's
8 | self-service actions. The Port agent allows you to control the payload that is sent to every 3rd party application.
9 |
10 | You can alter the requests sent to your third-party application by providing a payload mapping config file when deploying the
11 | Port-agent container.
12 |
13 | ### Control the payload mapping
14 |
15 | The payload mapping file is a JSON file that specifies how to transform the request sent to the Port agent to the
16 | request that is sent to the third-party application.
17 |
18 | The payload mapping file is mounted to the Port agent as a volume. The path to the payload mapping file is set in the
19 | `CONTROL_THE_PAYLOAD_CONFIG_PATH` environment variable. By default, the Port agent will look for the payload mapping
20 | file at `~/control_the_payload_config.json`.
21 |
22 | The payload mapping file is a json file that contains a list of mappings. Each mapping contains the request fields that
23 | will be overridden and sent to the 3rd party application.
24 |
25 | You can see examples showing how to deploy the Port agent with different mapping configurations for various common use cases below.
26 |
27 | Each of the mapping fields can be constructed by JQ expressions. The JQ expression will be evaluated against the
28 | original payload that is sent to the port agent from Port and the result will be sent to the 3rd party application.
29 |
30 | Here is the mapping file schema:
31 |
32 | ```
33 | [ # Can have multiple mappings. Will use the first one it will find with enabled = True (Allows you to apply mapping over multiple actions at once)
34 | {
35 | "enabled": bool || JQ,
36 | "url": JQ, # Optional. default is the incoming url from port
37 | "method": JQ, # Optional. default is 'POST'. Should return one of the following string values 'POST' / 'PUT' / 'DELETE' / 'GET'
38 | "headers": dict[str, JQ], # Optional. default is {}
39 | "body": ".body", # Optional. default is the whole payload incoming from Port.
40 | "query": dict[str, JQ] # Optional. default is {},
41 | "report" { # Optional. Used to report the run status back to Port right after the request is sent to the 3rd party application
42 | "status": JQ, # Optional. Should return the wanted runs status
43 | "link": JQ, # Optional. Should return the wanted link or a list of links
44 | "summary": JQ, # Optional. Should return the wanted summary
45 | "externalRunId": JQ # Optional. Should return the wanted external run id
46 | },
47 | "fieldsToDecryptPaths": ["dot.separated.path"] # Optional. List of dot-separated string paths to fields to decrypt by PORT_CLIENT_SECRET
48 | # For top-level fields, use just the field name (e.g., "api_key"). For nested fields, use the full dot-separated path (e.g., "payload.entity.properties.api_key").
49 | }
50 | ]
51 | ```
52 |
53 | **The body can be partially constructed by json as follows:**
54 |
55 | ```json
56 | {
57 | "body": {
58 | "key": 2,
59 | "key2": {
60 | "key3": ".im.a.jq.expression",
61 | "key4": "\"im a string\""
62 | }
63 | }
64 | }
65 | ```
66 |
67 | ### The incoming message to base your mapping on
68 |
69 |
70 | Example for incoming event
71 |
72 | ```json
73 | {
74 | "action": "action_identifier",
75 | "resourceType": "run",
76 | "status": "TRIGGERED",
77 | "trigger": {
78 | "by": {
79 | "orgId": "org_XXX",
80 | "userId": "auth0|XXXXX",
81 | "user": {
82 | "email": "executor@mail.com",
83 | "firstName": "user",
84 | "lastName": "userLastName",
85 | "phoneNumber": "0909090909090909",
86 | "picture": "https://s.gravatar.com/avatar/dd1cf547c8b950ce6966c050234ac997?s=480&r=pg&d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fga.png",
87 | "providers": [
88 | "port"
89 | ],
90 | "status": "ACTIVE",
91 | "id": "auth0|XXXXX",
92 | "createdAt": "2022-12-08T16:34:20.735Z",
93 | "updatedAt": "2023-11-13T15:11:38.243Z"
94 | }
95 | },
96 | "origin": "UI",
97 | "at": "2023-11-13T15:20:16.641Z"
98 | },
99 | "context": {
100 | "entity": "e_iQfaF14FJln6GVBn",
101 | "blueprint": "kubecostCloudAllocation",
102 | "runId": "r_HardNzG6kzc9vWOQ"
103 | },
104 | "payload": {
105 | "entity": {
106 | "identifier": "e_iQfaF14FJln6GVBn",
107 | "title": "myEntity",
108 | "icon": "Port",
109 | "blueprint": "myBlueprint",
110 | "team": [],
111 | "properties": {
112 | },
113 | "relations": {},
114 | "createdAt": "2023-11-13T15:24:46.880Z",
115 | "createdBy": "auth0|XXXXX",
116 | "updatedAt": "2023-11-13T15:24:46.880Z",
117 | "updatedBy": "auth0|XXXXX"
118 | },
119 | "action": {
120 | "invocationMethod": {
121 | "type": "WEBHOOK",
122 | "agent": true,
123 | "synchronized": false,
124 | "method": "POST",
125 | "url": "https://myGitlabHost.com"
126 | },
127 | "trigger": "DAY-2"
128 | },
129 | "properties": {
130 | },
131 | "censoredProperties": []
132 | }
133 | }
134 | ```
135 |
136 |
137 |
138 |
139 | ### The report mapping
140 |
141 | After the request is sent to the 3rd party application, the Port agent can report the run status back to Port.
142 | The report mapping is used to construct the report that will be sent to Port.
143 |
144 | The report mapping can use the following fields:
145 |
146 | `.body` - The incoming message as mentioned [Above](#the-incoming-message-to-base-your-mapping-on)
147 | `.request` - The request that was calculated using the control the payload mapping and sent to the 3rd party application
148 | `.response` - The response that was received from the 3rd party application
149 |
150 |
151 | ### Examples
152 |
153 | #### Terraform Cloud
154 |
155 | Create the following blueprint, action and mapping to trigger a Terraform Cloud run.
156 |
157 |
158 | Blueprint
159 |
160 | ```json
161 | {
162 | "identifier": "terraform_cloud_workspace",
163 | "title": "Terraform Cloud Workspace",
164 | "icon": "Terraform",
165 | "schema": {
166 | "properties": {
167 | "workspace_id": {
168 | "title": "Workspace Id",
169 | "type": "string"
170 | },
171 | "organization_name": {
172 | "title": "Organization Name",
173 | "type": "string"
174 | },
175 | "workspace_name": {
176 | "title": "Workspace Name",
177 | "type": "string"
178 | }
179 | },
180 | "required": [
181 | "workspace_id",
182 | "organization_name",
183 | "workspace_name"
184 | ]
185 | },
186 | "mirrorProperties": {},
187 | "calculationProperties": {},
188 | "relations": {}
189 | }
190 | ```
191 |
192 |
193 |
194 | Action
195 |
196 | ```json
197 | [
198 | {
199 | "identifier": "trigger_tf_run",
200 | "title": "Trigger TF Cloud run",
201 | "icon": "Terraform",
202 | "userInputs": {
203 | "properties": {},
204 | "required": [],
205 | "order": []
206 | },
207 | "invocationMethod": {
208 | "type": "WEBHOOK",
209 | "agent": true,
210 | "synchronized": false,
211 | "method": "POST",
212 | "url": "https://app.terraform.io/api/v2/runs/"
213 | },
214 | "trigger": "DAY-2",
215 | "requiredApproval": false
216 | }
217 | ]
218 | ```
219 |
220 |
221 |
222 | Mapping - (Should be saved as `invocations.json`)
223 |
224 | ```json
225 | [
226 | {
227 | "enabled": ".action == \"trigger_tf_run\"",
228 | "headers": {
229 | "Authorization": "\"Bearer \" + env.TF_TOKEN",
230 | "Content-Type": "\"application/vnd.api+json\""
231 | },
232 | "body": {
233 | "data": {
234 | "attributes": {
235 | "is-destroy": false,
236 | "message": "\"Triggered via Port\"",
237 | "variables": ".payload.properties | to_entries | map({key: .key, value: .value})"
238 | },
239 | "type": "\"runs\"",
240 | "relationships": {
241 | "workspace": {
242 | "data": {
243 | "type": "\"workspaces\"",
244 | "id": ".payload.entity.properties.workspace_id"
245 | }
246 | }
247 | }
248 | }
249 | },
250 | "report": {
251 | "status": "if .response.statusCode == 201 then \"SUCCESS\" else \"FAILURE\" end",
252 | "link": "\"https://app.terraform.io/app/\" + .body.payload.entity.properties.organization_name + \"/workspaces/\" + .body.payload.entity.properties.workspace_name + \"/runs/\" + .response.json.data.id",
253 | "externalRunId": ".response.json.data.id"
254 | }
255 | }
256 | ]
257 | ```
258 |
259 |
260 | **Port agent installation for Terraform cloud example**:
261 |
262 | ```sh
263 | helm repo add port-labs https://port-labs.github.io/helm-charts
264 |
265 | helm repo update
266 |
267 | helm install my-port-agent port-labs/port-agent \
268 | --create-namespace --namespace port-agent \
269 | --set env.secret.PORT_CLIENT_ID=YOUR_PORT_CLIENT_ID \
270 | --set env.secret.PORT_CLIENT_SECRET=YOUR_PORT_CLIENT_SECRET \
271 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
272 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
273 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
274 | --set env.normal.STREAMER_NAME=KAFKA \
275 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
276 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
277 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
278 | --set env.secret.TF_TOKEN=YOU_TERRAFORM_CLOUD_TOKEN \
279 | --set-file controlThePayloadConfig=./invocations.json
280 | ```
281 |
282 |
283 |
284 | #### CircleCI
285 |
286 | Create the following blueprint, action and mapping to trigger a CircleCI pipeline.
287 |
288 |
289 | Blueprint
290 |
291 | ```json
292 | {
293 | "identifier": "circle_ci_project",
294 | "title": "CircleCI Project",
295 | "icon": "CircleCI",
296 | "schema": {
297 | "properties": {
298 | "project_slug": {
299 | "title": "Slug",
300 | "type": "string"
301 | }
302 | },
303 | "required": [
304 | "project_slug"
305 | ]
306 | },
307 | "mirrorProperties": {},
308 | "calculationProperties": {},
309 | "relations": {}
310 | }
311 | ```
312 |
313 |
314 |
315 | Action
316 |
317 | ```json
318 | [
319 | {
320 | "identifier": "trigger_circle_ci_pipeline",
321 | "title": "Trigger CircleCI pipeline",
322 | "icon": "CircleCI",
323 | "userInputs": {
324 | "properties": {},
325 | "required": [],
326 | "order": []
327 | },
328 | "invocationMethod": {
329 | "type": "WEBHOOK",
330 | "agent": true,
331 | "synchronized": false,
332 | "method": "POST",
333 | "url": "https://circleci.com"
334 | },
335 | "trigger": "DAY-2",
336 | "requiredApproval": false
337 | }
338 | ]
339 | ```
340 |
341 |
342 |
343 | Mapping - (Should be saved as `invocations.json`)
344 |
345 | ```json
346 | [{
347 | "enabled": ".action == \"trigger_circle_ci_pipeline\"",
348 | "url": "(env.CIRCLE_CI_URL // \"https://circleci.com\") as $baseUrl | .payload.entity.properties.project_slug | @uri as $path | $baseUrl + \"/api/v2/project/\" + $path + \"/pipeline\"",
349 | "headers": {
350 | "Circle-Token": "env.CIRCLE_CI_TOKEN"
351 | },
352 | "body": {
353 | "branch": ".payload.properties.branch // \"main\"",
354 | "parameters": ".payload.action.invocationMethod as $invocationMethod | .payload.properties | to_entries | map({(.key): (.value | tostring)}) | add | if $invocationMethod.omitUserInputs then {} else . end"
355 | }
356 | }]
357 | ```
358 |
359 |
360 | **Port agent installation for CircleCI example**:
361 |
362 | ```sh
363 | helm repo add port-labs https://port-labs.github.io/helm-charts
364 |
365 | helm repo update
366 |
367 | helm install my-port-agent port-labs/port-agent \
368 | --create-namespace --namespace port-agent \
369 | --set env.secret.PORT_CLIENT_ID=YOUR_PORT_CLIENT_ID \
370 | --set env.secret.PORT_CLIENT_SECRET=YOUR_PORT_CLIENT_SECRET \
371 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
372 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
373 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
374 | --set env.normal.STREAMER_NAME=KAFKA \
375 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
376 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
377 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
378 | --set env.secret.CIRCLE_CI_TOKEN=YOUR_CIRCLE_CI_PERSONAL_TOKEN \
379 | --set-file controlThePayloadConfig=./invocations.json
380 | ```
381 |
382 | #### Windmill - Async execution
383 |
384 | This example helps internal developer teams to trigger [Windmill](https://www.windmill.dev) job using Port's self service actions. In particular, you will create a blueprint for `windmillJob` that will be connected to a backend action. You will then add some configuration files (`invocations.json`) to control the payload and trigger your Windmill job directly from Port using the async execution method.
385 |
386 |
387 | Create the following blueprint, action and mapping to trigger a Windmill job.
388 |
389 |
390 | Blueprint
391 |
392 | ```json
393 | {
394 | "identifier": "windmillJob",
395 | "description": "This blueprint represents a windmill job in our software catalog",
396 | "title": "Windmill",
397 | "icon": "DefaultProperty",
398 | "schema": {
399 | "properties": {
400 | "workspace": {
401 | "type": "string",
402 | "title": "Workspace"
403 | },
404 | "path": {
405 | "type": "string",
406 | "title": "File Path"
407 | },
408 | "trigerredBy": {
409 | "type": "string",
410 | "title": "Triggered By",
411 | "format": "user"
412 | },
413 | "createdAt": {
414 | "type": "string",
415 | "format": "date-time",
416 | "title": "Created At"
417 | }
418 | },
419 | "required": []
420 | },
421 | "mirrorProperties": {},
422 | "calculationProperties": {},
423 | "relations": {}
424 | }
425 | ```
426 |
427 |
428 |
429 | Action
430 |
431 | ```json
432 | [
433 | {
434 | "identifier":"trigger_windmill_pipeline",
435 | "title":"Trigger Windmill Pipeline",
436 | "icon":"DefaultProperty",
437 | "userInputs":{
438 | "properties":{
439 | "workspace":{
440 | "title":"Workspace",
441 | "description":"The Workspace identifier",
442 | "type":"string"
443 | },
444 | "file_path":{
445 | "title":"File Path",
446 | "description":"The path of the job script in the workspace, including the /u and /f prefix",
447 | "type":"string"
448 | },
449 | "job_data":{
450 | "title":"Job Data",
451 | "description":"The data to be passed to the job in order to execute successfully",
452 | "type":"object"
453 | }
454 | },
455 | "required":[
456 | "workspace",
457 | "file_path",
458 | "job_data"
459 | ],
460 | "order":[
461 | "workspace",
462 | "file_path",
463 | "job_data"
464 | ]
465 | },
466 | "invocationMethod":{
467 | "type":"WEBHOOK",
468 | "agent":true,
469 | "synchronized":false,
470 | "method":"POST",
471 | "url":"https://app.windmill.dev/api"
472 | },
473 | "trigger":"CREATE",
474 | "requiredApproval":false
475 | }
476 | ]
477 | ```
478 |
479 |
480 |
481 | Mapping - (Should be saved as `invocations.json`)
482 |
483 | ```json
484 | [
485 | {
486 | "enabled": ".action == \"trigger_windmill_pipeline\"",
487 | "url": "\"https://app.windmill.dev\" as $baseUrl | .payload.properties.workspace as $workspace | .payload.properties.file_path as $path | $baseUrl + \"/api/w/\" + $workspace + \"/jobs/run/f/\" + $path",
488 | "headers": {
489 | "Authorization": "\"Bearer \" + env.WINDMILL_TOKEN",
490 | "Content-Type": "\"application/json\""
491 | },
492 | "body": ".payload.properties.job_data",
493 | "report": {
494 | "status": "if .response.statusCode == 201 and (.response.text != null) then \"SUCCESS\" else \"FAILURE\" end",
495 | "link": "\"https://app.windmill.dev/api/w/\" + .body.payload.properties.workspace + \"/jobs/run/f/\" + .body.payload.properties.file_path",
496 | "externalRunId": ".response.text"
497 | }
498 | }
499 | ]
500 | ```
501 |
502 |
503 | **Port agent installation for Windmill example**:
504 |
505 | ```sh
506 | helm repo add port-labs https://port-labs.github.io/helm-charts
507 |
508 | helm repo update
509 |
510 | helm install my-port-agent port-labs/port-agent \
511 | --create-namespace --namespace port-agent \
512 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
513 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
514 | --set env.secret.KAFKA_CONSUMER_USERNAME=YOUR_KAFKA_USERNAME \
515 | --set env.secret.KAFKA_CONSUMER_PASSWORD=YOUR_KAFKA_PASSWORD
516 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
517 | --set env.normal.STREAMER_NAME=KAFKA \
518 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
519 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
520 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
521 | --set env.secret.WINDMILL_TOKEN=YOUR_WINDMILL_TOKEN \
522 | --set-file controlThePayloadConfig=./invocations.json
523 | ```
524 | #### Run action
525 | Run this action with some input
526 |
527 | ```json showLineNumbers
528 | {
529 | "workspace": "demo",
530 | "file_path": "f/examples/ban_user_example",
531 | "job_data": {
532 | "value": "batman",
533 | "reason": "Gotham city in need of superhero",
534 | "database": "$res:f/examples/demo_windmillshowcases",
535 | "username": "Jack",
536 | "slack_channel": "bans"
537 | }
538 | }
539 | ```
540 |
541 | #### Windmill - Sync execution
542 |
543 | This example helps internal developer teams to trigger [Windmill](https://www.windmill.dev) job using Port's self service actions. In particular, you will create a blueprint for `windmillJob` that will be connected to a backend action. You will then add some configuration files (`invocations.json`) to control the payload and trigger your Windmill job directly from Port using the sync execution method.
544 |
545 |
546 | Create the following blueprint, action and mapping to trigger a Windmill job.
547 |
548 |
549 | Blueprint
550 |
551 | ```json
552 | {
553 | "identifier": "windmillJob",
554 | "description": "This blueprint represents a windmill job in our software catalog",
555 | "title": "Windmill",
556 | "icon": "DefaultProperty",
557 | "schema": {
558 | "properties": {
559 | "workspace": {
560 | "type": "string",
561 | "title": "Workspace"
562 | },
563 | "path": {
564 | "type": "string",
565 | "title": "File Path"
566 | },
567 | "trigerredBy": {
568 | "type": "string",
569 | "title": "Triggered By",
570 | "format": "user"
571 | },
572 | "createdAt": {
573 | "type": "string",
574 | "format": "date-time",
575 | "title": "Created At"
576 | }
577 | },
578 | "required": []
579 | },
580 | "mirrorProperties": {},
581 | "calculationProperties": {},
582 | "relations": {}
583 | }
584 | ```
585 |
586 |
587 |
588 | Action
589 |
590 | ```json
591 | [
592 | {
593 | "identifier":"trigger_windmill_pipeline",
594 | "title":"Trigger Windmill Pipeline",
595 | "icon":"DefaultProperty",
596 | "userInputs":{
597 | "properties":{
598 | "workspace":{
599 | "title":"Workspace",
600 | "description":"The Workspace identifier",
601 | "type":"string"
602 | },
603 | "file_path":{
604 | "title":"File Path",
605 | "description":"The path of the job script in the workspace, including the /u and /f prefix",
606 | "type":"string"
607 | },
608 | "job_data":{
609 | "title":"Job Data",
610 | "description":"The data to be passed to the job in order to execute successfully",
611 | "type":"object"
612 | }
613 | },
614 | "required":[
615 | "workspace",
616 | "file_path",
617 | "job_data"
618 | ],
619 | "order":[
620 | "workspace",
621 | "file_path",
622 | "job_data"
623 | ]
624 | },
625 | "invocationMethod":{
626 | "type":"WEBHOOK",
627 | "agent":true,
628 | "synchronized":false,
629 | "method":"POST",
630 | "url":"https://app.windmill.dev/api"
631 | },
632 | "trigger":"CREATE",
633 | "requiredApproval":false
634 | }
635 | ]
636 | ```
637 |
638 |
639 |
640 | Mapping - (Should be saved as `invocations.json`)
641 |
642 | ```json
643 | [
644 | {
645 | "enabled": ".action == \"trigger_windmill_pipeline\"",
646 | "url": "\"https://app.windmill.dev\" as $baseUrl | .payload.properties.workspace as $workspace | .payload.properties.file_path as $path | $baseUrl + \"/api/w/\" + $workspace + \"/jobs/run_wait_result/f/\" + $path",
647 | "headers": {
648 | "Authorization": "\"Bearer \" + env.WINDMILL_TOKEN",
649 | "Content-Type": "\"application/json\""
650 | },
651 | "body": ".payload.properties.job_data",
652 | "report": {
653 | "status": "if .response.statusCode == 201 and (.response.json.error | not) then \"SUCCESS\" else \"FAILURE\" end",
654 | "link": "\"https://app.windmill.dev/api/w/\" + .body.payload.properties.workspace + \"/jobs/run_wait_result/f/\" + .body.payload.properties.file_path"
655 | }
656 | }
657 | ]
658 | ```
659 |
660 |
661 | **Port agent installation for Windmill example**:
662 |
663 | ```sh
664 | helm repo add port-labs https://port-labs.github.io/helm-charts
665 |
666 | helm repo update
667 |
668 | helm install my-port-agent port-labs/port-agent \
669 | --create-namespace --namespace port-agent \
670 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
671 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
672 | --set env.secret.KAFKA_CONSUMER_USERNAME=YOUR_KAFKA_USERNAME \
673 | --set env.secret.KAFKA_CONSUMER_PASSWORD=YOUR_KAFKA_PASSWORD
674 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
675 | --set env.normal.STREAMER_NAME=KAFKA \
676 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
677 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
678 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
679 | --set env.secret.WINDMILL_TOKEN=YOUR_WINDMILL_TOKEN \
680 | --set-file controlThePayloadConfig=./invocations.json
681 | ```
682 | #### Run action
683 | Run this action with some input
684 |
685 | ```json showLineNumbers
686 | {
687 | "workspace": "demo",
688 | "file_path": "f/examples/ban_user_example",
689 | "job_data": {
690 | "value": "batman",
691 | "reason": "Gotham city in need of superhero",
692 | "database": "$res:f/examples/demo_windmillshowcases",
693 | "username": "Jack",
694 | "slack_channel": "bans"
695 | }
696 | }
697 | ```
698 | #### Opsgenie Example
699 |
700 | This example helps internal developer teams to trigger [Opsgenie](https://www.atlassian.com/software/opsgenie) incidents using Port's self service actions. In particular, you will create a blueprint for `opsgenieIncident` that will be connected to a backend action. You will then add some configuration files (`invocations.json`) to control the payload and trigger your Opsgenie incident directly from Port using the sync execution method.
701 |
702 |
703 | Create the following blueprint, action and mapping to trigger a Opsgenie incident.
704 |
705 |
706 | Blueprint
707 |
708 | ```json
709 | {
710 | "identifier": "opsgenieIncident",
711 | "description": "This blueprint represent an incident in opsgenie",
712 | "title": "OpsGenie Incident",
713 | "icon": "OpsGenie",
714 | "schema": {
715 | "properties": {
716 | "message": {
717 | "title": "Message",
718 | "type": "string"
719 | },
720 | "description": {
721 | "title": "Description",
722 | "type": "string"
723 | },
724 | "details":{
725 | "title": "Details",
726 | "type": "object"
727 | },
728 | "priority":{
729 | "title": "Priority",
730 | "type" : "string"
731 | }
732 | },
733 | "required": []
734 | },
735 | "mirrorProperties": {},
736 | "calculationProperties": {},
737 | "aggregationProperties": {},
738 | "relations": {}
739 | }
740 | ```
741 |
742 |
743 |
744 | Action
745 |
746 | ```json
747 | {
748 | "identifier": "create_opsgenie_incident",
749 | "title": "Create Opsgenie Incident",
750 | "icon": "OpsGenie",
751 | "userInputs": {
752 | "properties": {
753 | "message": {
754 | "title": "message",
755 | "description": "Message of the incident",
756 | "icon": "OpsGenie",
757 | "type": "string",
758 | "maxLength": 130
759 | },
760 | "description": {
761 | "icon": "OpsGenie",
762 | "title": "description",
763 | "type": "string",
764 | "maxLength": 15000,
765 | "description": "Description field of the incident that is generally used to provide a detailed information about the incident"
766 | },
767 | "details": {
768 | "title": "details",
769 | "description": "Map of key-value pairs to use as custom properties of the incident",
770 | "icon": "OpsGenie",
771 | "type": "object"
772 | },
773 | "priority": {
774 | "title": "Priority",
775 | "description": "Priority level of the incident. Possible values are P1, P2, P3, P4 and P5. Default value is P3.",
776 | "icon": "OpsGenie",
777 | "type": "string",
778 | "default": "P3",
779 | "enum": [
780 | "P1",
781 | "P2",
782 | "P3",
783 | "P4",
784 | "P5"
785 | ],
786 | "enumColors": {
787 | "P1": "red",
788 | "P2": "orange",
789 | "P3": "yellow",
790 | "P4": "green",
791 | "P5": "green"
792 | }
793 | }
794 | },
795 | "required": [
796 | "message",
797 | "description"
798 | ],
799 | "order": [
800 | "message",
801 | "description",
802 | "details",
803 | "priority"
804 | ]
805 | },
806 | "invocationMethod": {
807 | "type": "WEBHOOK",
808 | "url": "https://api.opsgenie.com/v1/incidents/create",
809 | "agent": true,
810 | "synchronized": true,
811 | "method": "POST"
812 | },
813 | "trigger": "CREATE",
814 | "description": "Create Opsgenie incident",
815 | "requiredApproval": false
816 | }
817 | ```
818 |
819 |
820 |
821 |
822 | Mapping - (Should be saved as `invocations.json`)
823 |
824 | ```json
825 | [
826 | {
827 | "enabled": ".action == \"create_opsgenie_incident\"",
828 | "url": ".payload.action.url",
829 | "headers": {
830 | "Authorization": "\"GenieKey \" + env.OPSGENIE_API_KEY",
831 | "Content-Type": "\"application/json\""
832 | },
833 | "body": {
834 | "message": ".payload.properties.message",
835 | "description": ".payload.properties.description",
836 | "details": ".payload.properties.details",
837 | "priority": ".payload.properties.priority"
838 | },
839 | "report": {
840 | "status": "if .response.statusCode == 202 then \"SUCCESS\" else \"FAILURE\" end"
841 | }
842 | }
843 | ]
844 | ```
845 |
846 |
847 | **Port agent installation for Opsgenie example**:
848 |
849 | ```sh
850 | helm repo add port-labs https://port-labs.github.io/helm-charts
851 |
852 | helm repo update
853 |
854 | helm install my-port-agent port-labs/port-agent \
855 | --create-namespace --namespace port-agent \
856 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
857 | --set env.normal.PORT_CLIENT_ID=YOUR_CLIENT \
858 | --set env.secret.PORT_CLIENT_SECRET=YOUR_PORT_CLIENT_SECRET \
859 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
860 | --set env.secret.KAFKA_CONSUMER_USERNAME=YOUR_KAFKA_USERNAME \
861 | --set env.secret.KAFKA_CONSUMER_PASSWORD=YOUR_KAFKA_PASSWORD
862 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
863 | --set env.normal.STREAMER_NAME=KAFKA \
864 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
865 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
866 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
867 | --set en.secret.OPSGENIE_API_KEY=YOUR_OPSGENIE_API_KEY \
868 | --set-file controlThePayloadConfig=./invocations.json
869 | ```
870 |
871 | #### ArgoWorkflow Example
872 |
873 | This example helps internal developer teams to trigger an [Argo Workflow](https://argoproj.github.io/workflows/) using Port's self service actions. In particular, you will create a blueprint for `argoWorkflow` that will be connected to a backend action. You will then add some configuration files (`invocations.json`) to control the payload and trigger your Argo Workflow directly from Port using the sync execution method.
874 |
875 |
876 | Create the following blueprint, action and mapping to trigger a workflow.
877 |
878 |
879 | Blueprint
880 |
881 | ```json
882 | {
883 | "identifier": "argoWorkflow",
884 | "description": "This blueprint represents an Argo Workflow.",
885 | "title": "Argo Workflow",
886 | "icon": "Argo",
887 | "schema": {
888 | "properties": {
889 | "metadata": {
890 | "icon": "Argo",
891 | "title": "Metadata",
892 | "description": "Metadata information for the Argo Workflow.",
893 | "type": "object"
894 | },
895 | "spec": {
896 | "icon": "Argo",
897 | "title": "Specification",
898 | "description": "Specification details of the Argo Workflow.",
899 | "type": "object"
900 | },
901 | "status": {
902 | "type": "object",
903 | "title": "Status",
904 | "description": "Status information for the Argo Workflow.",
905 | "icon": "Argo"
906 | }
907 | },
908 | "required": []
909 | },
910 | "mirrorProperties": {},
911 | "calculationProperties": {},
912 | "aggregationProperties": {},
913 | "relations": {}
914 | }
915 | ```
916 |
917 |
918 | >**Note** Register an existing Argo Workflow in the catalog (this is a one time operation). The workflow should exist in your Argo workflow deployment instance
919 |
920 |
921 | Blueprint Entity Example
922 |
923 | ```json
924 | {
925 | "identifier": "f7d561c3-2791-4092-b960-8f2428ef9d79",
926 | "title": "hello-world-x9w5h",
927 | "icon": "Argo",
928 | "team": [],
929 | "properties": {
930 | "metadata": {
931 | "name": "hello-world-x9w5h",
932 | "generateName": "hello-world-",
933 | "namespace": "argo",
934 | "uid": "f7d561c3-2791-4092-b960-8f2428ef9d79",
935 | "resourceVersion": "484158",
936 | "generation": 7,
937 | "creationTimestamp": "2024-01-22T20:53:35Z",
938 | "labels": {
939 | "workflows.argoproj.io/completed": "false",
940 | "workflows.argoproj.io/creator": "system-serviceaccount-argo-argo-server",
941 | "workflows.argoproj.io/phase": "Failed"
942 | },
943 | "annotations": {
944 | "workflows.argoproj.io/pod-name-format": "v2"
945 | },
946 | "managedFields": [
947 | {
948 | "manager": "argo",
949 | "operation": "Update",
950 | "apiVersion": "argoproj.io/v1alpha1",
951 | "time": "2024-02-28T08:52:25Z",
952 | "fieldsType": "FieldsV1",
953 | "fieldsV1": {
954 | "f:metadata": {
955 | "f:generateName": {},
956 | "f:labels": {
957 | ".": {},
958 | "f:workflows.argoproj.io/completed": {},
959 | "f:workflows.argoproj.io/creator": {}
960 | }
961 | },
962 | "f:spec": {}
963 | }
964 | },
965 | {
966 | "manager": "workflow-controller",
967 | "operation": "Update",
968 | "apiVersion": "argoproj.io/v1alpha1",
969 | "time": "2024-02-28T08:52:35Z",
970 | "fieldsType": "FieldsV1",
971 | "fieldsV1": {
972 | "f:metadata": {
973 | "f:annotations": {
974 | ".": {},
975 | "f:workflows.argoproj.io/pod-name-format": {}
976 | },
977 | "f:labels": {
978 | "f:workflows.argoproj.io/phase": {}
979 | }
980 | },
981 | "f:status": {}
982 | }
983 | }
984 | ]
985 | },
986 | "spec": {
987 | "templates": [
988 | {
989 | "name": "whalesay",
990 | "inputs": {},
991 | "outputs": {},
992 | "metadata": {},
993 | "container": {
994 | "name": "",
995 | "image": "docker/whalesay:latest",
996 | "command": [
997 | "cowsay"
998 | ],
999 | "args": [
1000 | "hello world"
1001 | ],
1002 | "resources": {}
1003 | }
1004 | }
1005 | ],
1006 | "entrypoint": "whalesay",
1007 | "arguments": {},
1008 | "shutdown": "Stop"
1009 | },
1010 | "status": {},
1011 | "relations": {}
1012 | }
1013 | }
1014 | ```
1015 |
1016 |
1017 |
1018 | Action
1019 |
1020 | ```json
1021 | {
1022 | "identifier": "trigger_a_workflow",
1023 | "title": "Trigger A Workflow",
1024 | "icon": "Argo",
1025 | "userInputs": {
1026 | "properties": {
1027 | "namespace": {
1028 | "title": "Namespace",
1029 | "description": "Name of the namespace",
1030 | "icon": "Argo",
1031 | "type": "string",
1032 | "default": {
1033 | "jqQuery": ".entity.properties.metadata.namespace"
1034 | }
1035 | },
1036 | "memoized": {
1037 | "title": "Memoized",
1038 | "description": "Turning on memoized enables all steps to be executed again regardless of previous outputs",
1039 | "icon": "Argo",
1040 | "type": "boolean",
1041 | "default": false
1042 | }
1043 | },
1044 | "required": [],
1045 | "order": [
1046 | "memoized"
1047 | ]
1048 | },
1049 | "invocationMethod": {
1050 | "type": "WEBHOOK",
1051 | "url": "https://{your-argo-workflow-domain}.com",
1052 | "agent": true,
1053 | "synchronized": true,
1054 | "method": "PUT"
1055 | },
1056 | "trigger": "DAY-2",
1057 | "description": "Trigger the execution of an argo workflow",
1058 | "requiredApproval": false
1059 | }
1060 | ```
1061 |
1062 |
1063 |
1064 | Mapping - (Should be saved as `invocations.json`)
1065 |
1066 | ```json
1067 | [
1068 | {
1069 | "enabled": ".action == \"trigger_a_workflow\"",
1070 | "url": ".payload.action.invocationMethod.url as $baseUrl | .payload.properties.namespace as $namespace | .payload.entity.title as $workflow_name | $baseUrl + \"/api/v1/workflows/\" + $namespace + \"/\" + $workflow_name + \"/resubmit\"",
1071 | "headers": {
1072 | "Authorization": "\"Bearer \" + env.ARGO_WORKFLOW_TOKEN",
1073 | "Content-Type": "\"application/json\""
1074 | },
1075 | "body": {
1076 | "memoized": ".payload.properties.memoized"
1077 | },
1078 | "report": {
1079 | "status": "if .response.statusCode == 200 then \"SUCCESS\" else \"FAILURE\" end",
1080 | "link": ".body.payload.action.invocationMethod.url as $baseUrl | $baseUrl + \"/workflows/\"+ .response.json.metadata.namespace + \"/\" +.response.json.metadata.name"
1081 | }
1082 | }
1083 | ]
1084 | ```
1085 |
1086 |
1087 |
1088 | Decrypting Encrypted Fields in Payloads
1089 |
1090 | ```json
1091 | {
1092 | "action": "deploy_service",
1093 | "resourceType": "service",
1094 | "status": "TRIGGERED",
1095 | "trigger": {
1096 | "by": {
1097 | "orgId": "org_123",
1098 | "userId": "auth0|abc123",
1099 | "user": {
1100 | "email": "user@example.com",
1101 | "firstName": "Alice",
1102 | "lastName": "Smith"
1103 | }
1104 | },
1105 | "origin": "UI",
1106 | "at": "2024-06-01T12:00:00.000Z"
1107 | },
1108 | "context": {
1109 | "entity": "e_456",
1110 | "blueprint": "microservice",
1111 | "runId": "r_789"
1112 | },
1113 | "payload": {
1114 | "entity": {
1115 | "identifier": "e_456",
1116 | "title": "My Service",
1117 | "blueprint": "microservice",
1118 | "team": ["devops"],
1119 | "properties": {
1120 | "api_key": "",
1121 | "db_password": "",
1122 | "region": "us-east-1"
1123 | }
1124 | },
1125 | "action": {
1126 | "invocationMethod": {
1127 | "type": "WEBHOOK",
1128 | "agent": true,
1129 | "synchronized": false,
1130 | "method": "POST",
1131 | "url": "https://myservice.com/deploy"
1132 | },
1133 | "trigger": "DEPLOY"
1134 | },
1135 | "properties": {},
1136 | "censoredProperties": []
1137 | }
1138 | }
1139 | ```
1140 |
1141 | To have the agent automatically decrypt the `api_key` and `db_password` fields, add their dot-separated paths to `fieldsToDecryptPaths` in your mapping configuration:
1142 |
1143 | ```json
1144 | [
1145 | {
1146 | "fieldsToDecryptPaths": [
1147 | "payload.entity.properties.api_key",
1148 | "payload.entity.properties.db_password"
1149 | ],
1150 | // ... other mapping fields ...
1151 | }
1152 | ]
1153 | ```
1154 |
1155 | **How it works:**
1156 | - The agent will look for the fields at `payload.entity.properties.api_key` and `payload.entity.properties.db_password` in the event.
1157 | - If it finds encrypted values there, it will decrypt them using your configured secret.
1158 | - You can add more fields to the list as needed.
1159 |
1160 |
1161 | **Port agent installation for ArgoWorkflow example**:
1162 |
1163 | ```sh
1164 | helm repo add port-labs https://port-labs.github.io/helm-charts
1165 |
1166 | helm repo update
1167 |
1168 | helm install my-port-agent port-labs/port-agent \
1169 | --create-namespace --namespace port-agent \
1170 | --set env.normal.PORT_ORG_ID=YOUR_ORG_ID \
1171 | --set env.normal.PORT_CLIENT_ID=YOUR_CLIENT \
1172 | --set env.secret.PORT_CLIENT_SECRET=YOUR_PORT_CLIENT_SECRET \
1173 | --set env.normal.KAFKA_CONSUMER_GROUP_ID=YOUR_KAFKA_CONSUMER_GROUP \
1174 | --set env.secret.KAFKA_CONSUMER_USERNAME=YOUR_KAFKA_USERNAME \
1175 | --set env.secret.KAFKA_CONSUMER_PASSWORD=YOUR_KAFKA_PASSWORD
1176 | --set env.normal.KAFKA_CONSUMER_BROKERS=PORT_KAFKA_BROKERS \
1177 | --set env.normal.STREAMER_NAME=KAFKA \
1178 | --set env.normal.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM=SCRAM-SHA-512 \
1179 | --set env.normal.KAFKA_CONSUMER_AUTO_OFFSET_RESET=earliest \
1180 | --set env.normal.KAFKA_CONSUMER_SECURITY_PROTOCOL=SASL_SSL \
1181 | --set en.secret.ARGO_WORKFLOW_TOKEN=YOUR_ARGO_WORKFLOW_TOKEN \
1182 | --set-file controlThePayloadConfig=./invocations.json
1183 | ```
--------------------------------------------------------------------------------
/app/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/__init__.py
--------------------------------------------------------------------------------
/app/consumers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/consumers/__init__.py
--------------------------------------------------------------------------------
/app/consumers/base_consumer.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class BaseConsumer(ABC):
5 | @abstractmethod
6 | def start(self) -> None:
7 | pass
8 |
--------------------------------------------------------------------------------
/app/consumers/kafka_consumer.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import signal
3 | from typing import Any, Callable
4 |
5 | from confluent_kafka import Consumer, KafkaException, Message
6 | from consumers.base_consumer import BaseConsumer
7 | from core.config import settings
8 | from core.consts import consts
9 | from port_client import get_kafka_credentials
10 |
11 | logging.basicConfig(level=settings.LOG_LEVEL)
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | class KafkaConsumer(BaseConsumer):
16 | def __init__(
17 | self, msg_process: Callable[[Message], None], consumer: Consumer = None
18 | ) -> None:
19 | self.running = False
20 | signal.signal(signal.SIGINT, self.exit_gracefully)
21 | signal.signal(signal.SIGTERM, self.exit_gracefully)
22 |
23 | self.msg_process = msg_process
24 |
25 | if consumer:
26 | self.consumer = consumer
27 | else:
28 | conf = {
29 | "client.id": consts.KAFKA_CONSUMER_CLIENT_ID,
30 | "security.protocol": settings.KAFKA_CONSUMER_SECURITY_PROTOCOL,
31 | "sasl.mechanism": settings.KAFKA_CONSUMER_AUTHENTICATION_MECHANISM,
32 | "group.id": settings.KAFKA_CONSUMER_GROUP_ID,
33 | "session.timeout.ms": settings.KAFKA_CONSUMER_SESSION_TIMEOUT_MS,
34 | "auto.offset.reset": settings.KAFKA_CONSUMER_AUTO_OFFSET_RESET,
35 | "enable.auto.commit": "false",
36 | }
37 | if not settings.USING_LOCAL_PORT_INSTANCE:
38 | logger.info("Getting Kafka credentials")
39 | brokers, username, password = get_kafka_credentials()
40 | conf["sasl.username"] = username
41 | conf["sasl.password"] = password
42 | conf["bootstrap.servers"] = ",".join(brokers)
43 |
44 | self.consumer = Consumer(conf)
45 |
46 | def _on_assign(self, consumer: Consumer, partitions: Any) -> None:
47 | logger.info("Assignment: %s", partitions)
48 | if not partitions:
49 | logger.error(
50 | "No partitions assigned. This usually means that there is"
51 | " already a consumer with the same group id running. To run"
52 | " another consumer please change the group id in the"
53 | " `KAFKA_CONSUMER_GROUP_ID` environment variable to a unique"
54 | " value prefixed with your organization id."
55 | )
56 | self.exit_gracefully()
57 |
58 | def start(self) -> None:
59 | try:
60 | self.consumer.subscribe(
61 | [settings.KAFKA_RUNS_TOPIC, settings.KAFKA_CHANGE_LOG_TOPIC],
62 | on_assign=self._on_assign,
63 | )
64 | self.running = True
65 | while self.running:
66 | try:
67 | msg = self.consumer.poll(timeout=1.0)
68 | if msg is None:
69 | continue
70 | if msg.error():
71 | raise KafkaException(msg.error())
72 | else:
73 | try:
74 | logger.info(
75 | "Process message"
76 | " from topic %s, partition %d, offset %d",
77 | msg.topic(),
78 | msg.partition(),
79 | msg.offset(),
80 | )
81 | self.msg_process(msg)
82 | except Exception as process_error:
83 | logger.error(
84 | "Failed process message"
85 | " from topic %s, partition %d, offset %d: %s",
86 | msg.topic(),
87 | msg.partition(),
88 | msg.offset(),
89 | str(process_error),
90 | )
91 | finally:
92 | self.consumer.commit(asynchronous=False)
93 | except Exception as message_error:
94 | logger.error(str(message_error))
95 | finally:
96 | self.consumer.close()
97 |
98 | def exit_gracefully(self, *_: Any) -> None:
99 | logger.info("Exiting gracefully...")
100 | self.running = False
101 |
--------------------------------------------------------------------------------
/app/control_the_payload_config.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "enabled": ".payload.action.invocationMethod.type == \"GITLAB\"",
4 | "url": "(env.GITLAB_URL // \"https://gitlab.com/\") as $baseUrl | (.payload.action.invocationMethod.groupName + \"/\" +.payload.action.invocationMethod.projectName) | @uri as $path | $baseUrl + \"api/v4/projects/\" + $path + \"/trigger/pipeline\"",
5 | "body": {
6 | "ref": ".payload.properties.ref // .payload.action.invocationMethod.defaultRef // \"main\"",
7 | "token": ".payload.action.invocationMethod.groupName as $gitlab_group | .payload.action.invocationMethod.projectName as $gitlab_project | env[($gitlab_group | gsub(\"/\"; \"_\")) + \"_\" + $gitlab_project]",
8 | "variables": ".payload.action.invocationMethod as $invocationMethod | .payload.properties | to_entries | map({(.key): (.value | tostring)}) | add | if $invocationMethod.omitUserInputs then {} else . end",
9 | "port_payload": "if .payload.action.invocationMethod.omitPayload then {} else . end"
10 | },
11 | "report": {
12 | "link": ".response.json.web_url",
13 | "externalRunId": ".response.json.id | tostring"
14 | }
15 | },
16 | {
17 | "enabled": true,
18 | "url": ".payload.action.invocationMethod.url // .changelogDestination.url",
19 | "method": ".payload.action.invocationMethod.method // \"POST\"",
20 | "fieldsToDecryptPaths": []
21 | }
22 | ]
23 |
--------------------------------------------------------------------------------
/app/core/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/core/__init__.py
--------------------------------------------------------------------------------
/app/core/config.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import Any, Optional
3 |
4 | from dotenv import find_dotenv
5 | from pydantic import (
6 | AnyHttpUrl,
7 | BaseModel,
8 | BaseSettings,
9 | Field,
10 | parse_file_as,
11 | parse_obj_as,
12 | validator,
13 | )
14 |
15 |
16 | class ActionReport(BaseModel):
17 | status: str | None = None
18 | link: str | None = None
19 | summary: str | None = None
20 | external_run_id: str | None = Field(None, alias="externalRunId")
21 |
22 |
23 | class Mapping(BaseModel):
24 | enabled: bool | str = True
25 | method: str | None = None
26 | url: str | None = None
27 | body: dict[str, Any] | str | None = None
28 | headers: dict[str, str] | str | None = None
29 | query: dict[str, str] | str | None = None
30 | report: ActionReport | None = None
31 | fieldsToDecryptPaths: list[str] = []
32 |
33 |
34 | class Settings(BaseSettings):
35 | USING_LOCAL_PORT_INSTANCE: bool = False
36 | LOG_LEVEL: str = "INFO"
37 |
38 | STREAMER_NAME: str
39 |
40 | PORT_ORG_ID: str
41 | PORT_API_BASE_URL: AnyHttpUrl = parse_obj_as(AnyHttpUrl, "https://api.getport.io")
42 | PORT_CLIENT_ID: str
43 | PORT_CLIENT_SECRET: str
44 | KAFKA_CONSUMER_SECURITY_PROTOCOL: str = "plaintext"
45 | KAFKA_CONSUMER_AUTHENTICATION_MECHANISM: str = "none"
46 | KAFKA_CONSUMER_SESSION_TIMEOUT_MS: int = 45000
47 | KAFKA_CONSUMER_AUTO_OFFSET_RESET: str = "earliest"
48 | KAFKA_CONSUMER_GROUP_ID: str = ""
49 |
50 | KAFKA_RUNS_TOPIC: str = ""
51 |
52 | CONTROL_THE_PAYLOAD_CONFIG_PATH: Path = Path("./control_the_payload_config.json")
53 |
54 | @validator("KAFKA_RUNS_TOPIC", always=True)
55 | def set_kafka_runs_topic(cls, v: Optional[str], values: dict) -> str:
56 | if isinstance(v, str) and v:
57 | return v
58 | return f"{values.get('PORT_ORG_ID')}.runs"
59 |
60 | KAFKA_CHANGE_LOG_TOPIC: str = ""
61 |
62 | @validator("KAFKA_CHANGE_LOG_TOPIC", always=True)
63 | def set_kafka_change_log_topic(cls, v: Optional[str], values: dict) -> str:
64 | if isinstance(v, str) and v:
65 | return v
66 | return f"{values.get('PORT_ORG_ID')}.change.log"
67 |
68 | class Config:
69 | case_sensitive = True
70 | env_file = find_dotenv()
71 | env_file_encoding = "utf-8"
72 |
73 | WEBHOOK_INVOKER_TIMEOUT: float = 30
74 |
75 |
76 | settings = Settings()
77 |
78 | control_the_payload_config = parse_file_as(
79 | list[Mapping], settings.CONTROL_THE_PAYLOAD_CONFIG_PATH
80 | )
81 |
--------------------------------------------------------------------------------
/app/core/consts.py:
--------------------------------------------------------------------------------
1 | class Consts:
2 | KAFKA_CONSUMER_CLIENT_ID = "port-agent"
3 | DEFAULT_HTTP_METHOD = "POST"
4 |
5 |
6 | consts = Consts()
7 |
--------------------------------------------------------------------------------
/app/invokers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/invokers/__init__.py
--------------------------------------------------------------------------------
/app/invokers/base_invoker.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class BaseInvoker(ABC):
5 | @abstractmethod
6 | def invoke(self, message: dict, destination: dict) -> None:
7 | pass
8 |
--------------------------------------------------------------------------------
/app/invokers/webhook_invoker.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import time
4 | from typing import Any, Callable
5 |
6 | import pyjq as jq
7 | import requests
8 | from core.config import Mapping, control_the_payload_config, settings
9 | from core.consts import consts
10 | from flatten_dict import flatten, unflatten
11 | from invokers.base_invoker import BaseInvoker
12 | from port_client import report_run_response, report_run_status, run_logger_factory
13 | from pydantic import BaseModel, Field
14 | from requests import Response
15 | from utils import (
16 | decrypt_payload_fields,
17 | get_invocation_method_object,
18 | get_response_body,
19 | response_to_dict,
20 | sign_sha_256,
21 | )
22 |
23 | logging.basicConfig(level=settings.LOG_LEVEL)
24 | logger = logging.getLogger(__name__)
25 |
26 |
27 | class RequestPayload(BaseModel):
28 | method: str
29 | url: str
30 | body: dict
31 | headers: dict
32 | query: dict
33 |
34 |
35 | class ReportPayload(BaseModel):
36 | status: Any | None = None
37 | link: Any | None = None
38 | summary: Any | None = None
39 | external_run_id: Any | None = Field(None, alias="externalRunId")
40 |
41 |
42 | class WebhookInvoker(BaseInvoker):
43 | def _jq_exec(self, expression: str, context: dict) -> dict | None:
44 | try:
45 | return jq.first(expression, context)
46 | except Exception as e:
47 | logger.warning(
48 | "WebhookInvoker - jq error - expression: %s, error: %s", expression, e
49 | )
50 | return None
51 |
52 | def _apply_jq_on_field(self, mapping: Any, body: dict) -> Any:
53 | if isinstance(mapping, dict):
54 | flatten_dict = flatten(mapping)
55 | parsed_jq = {
56 | key: self._apply_jq_on_field(value, body)
57 | for key, value in flatten_dict.items()
58 | }
59 | return unflatten(parsed_jq)
60 | elif isinstance(mapping, list):
61 | return [self._apply_jq_on_field(item, body) for item in mapping]
62 | elif isinstance(mapping, str):
63 | return self._jq_exec(mapping, body)
64 | return mapping
65 |
66 | def _prepare_payload(
67 | self, mapping: Mapping, body: dict, invocation_method: dict
68 | ) -> RequestPayload:
69 | request_payload: RequestPayload = RequestPayload(
70 | method=invocation_method.get("method", consts.DEFAULT_HTTP_METHOD),
71 | url=invocation_method.get("url", ""),
72 | body=body,
73 | headers={},
74 | query={},
75 | )
76 |
77 | raw_mapping: dict = mapping.dict(exclude_none=True)
78 | raw_mapping.pop("enabled")
79 | raw_mapping.pop("report", None)
80 | raw_mapping.pop("fieldsToDecryptPaths", None)
81 | for key, value in raw_mapping.items():
82 | result = self._apply_jq_on_field(value, body)
83 | setattr(request_payload, key, result)
84 |
85 | return request_payload
86 |
87 | def _prepare_report(
88 | self,
89 | mapping: Mapping | None,
90 | response_context: Response,
91 | request_context: dict,
92 | body_context: dict,
93 | ) -> ReportPayload:
94 | # We don't want to update the run status if the request succeeded and the
95 | # invocation method is synchronized
96 | is_sync = get_invocation_method_object(body_context).get("synchronized")
97 | success_status = "SUCCESS" if is_sync else None
98 | default_status = success_status if response_context.ok else "FAILURE"
99 |
100 | failure_summary = (
101 | f"Failed to invoke the webhook with status code: "
102 | f"{response_context.status_code}. Response: {response_context.text}."
103 | )
104 | default_summary = None if response_context.ok else failure_summary
105 | report_payload: ReportPayload = ReportPayload(
106 | status=default_status, summary=default_summary
107 | )
108 | if not mapping or not mapping.report:
109 | return report_payload
110 |
111 | context = {
112 | "body": body_context,
113 | "request": request_context,
114 | "response": response_to_dict(response_context),
115 | }
116 |
117 | raw_mapping: dict = mapping.report.dict(exclude_none=True)
118 | for key, value in raw_mapping.items():
119 | result = self._apply_jq_on_field(value, context)
120 | setattr(report_payload, key, result)
121 |
122 | return report_payload
123 |
124 | def _find_mapping(self, body: dict) -> Mapping | None:
125 | return next(
126 | (
127 | action_mapping
128 | for action_mapping in control_the_payload_config
129 | if (
130 | type(action_mapping.enabled) != bool
131 | and self._jq_exec(action_mapping.enabled, body) is True
132 | )
133 | or action_mapping.enabled is True
134 | ),
135 | None,
136 | )
137 |
138 | @staticmethod
139 | def _request(
140 | request_payload: RequestPayload, run_logger: Callable[[str], None]
141 | ) -> Response:
142 | logger.info(
143 | "WebhookInvoker - request - " "method: %s, url: %s, body: %s",
144 | request_payload.method,
145 | request_payload.url,
146 | request_payload.body,
147 | )
148 | run_logger("Sending the request")
149 | request_payload.headers["X-Port-Timestamp"] = str(int(time.time()))
150 | request_payload.headers["X-Port-Signature"] = sign_sha_256(
151 | json.dumps(request_payload.body, separators=(",", ":")),
152 | settings.PORT_CLIENT_SECRET,
153 | request_payload.headers["X-Port-Timestamp"],
154 | )
155 |
156 | res = requests.request(
157 | request_payload.method,
158 | request_payload.url,
159 | json=request_payload.body,
160 | headers=request_payload.headers,
161 | params=request_payload.query,
162 | timeout=settings.WEBHOOK_INVOKER_TIMEOUT,
163 | )
164 |
165 | if res.ok:
166 | logger.info(
167 | "WebhookInvoker - request - status_code: %s, body: %s",
168 | res.status_code,
169 | res.text,
170 | )
171 | run_logger(
172 | f"Action invocation has completed successfully with "
173 | f"status code: {res.status_code}"
174 | )
175 | else:
176 | logger.warning(
177 | "WebhookInvoker - request - status_code: %s, response: %s",
178 | res.status_code,
179 | res.text,
180 | )
181 | run_logger(
182 | f"Action invocation failed with status code: {res.status_code} "
183 | f"and response: {res.text}"
184 | )
185 |
186 | return res
187 |
188 | @staticmethod
189 | def _report_run_status(
190 | run_id: str, data_to_patch: dict, run_logger: Callable[[str], None]
191 | ) -> Response:
192 | res = report_run_status(run_id, data_to_patch)
193 |
194 | if res.ok:
195 | logger.info(
196 | "WebhookInvoker - report run - run_id: %s, status_code: %s",
197 | run_id,
198 | res.status_code,
199 | )
200 | else:
201 | logger.warning(
202 | "WebhookInvoker - report run - "
203 | "run_id: %s, status_code: %s, response: %s",
204 | run_id,
205 | res.status_code,
206 | res.text,
207 | )
208 | run_logger(
209 | f"The run state failed to be reported "
210 | f"with status code: {res.status_code} and response: {res.text}"
211 | )
212 |
213 | return res
214 |
215 | @staticmethod
216 | def _report_run_response(
217 | run_id: str, response_body: dict | str | None, run_logger: Callable[[str], None]
218 | ) -> Response:
219 | logger.info(
220 | "WebhookInvoker - report run response - run_id: %s, response: %s",
221 | run_id,
222 | response_body,
223 | )
224 | run_logger("Reporting the run response")
225 |
226 | res = report_run_response(run_id, response_body)
227 |
228 | if res.ok:
229 | logger.info(
230 | "WebhookInvoker - report run response - " "run_id: %s, status_code: %s",
231 | run_id,
232 | res.status_code,
233 | )
234 | run_logger("The run response was reported successfully ")
235 | else:
236 | logger.warning(
237 | "WebhookInvoker - report run response - "
238 | "run_id: %s, status_code: %s, response: %s",
239 | run_id,
240 | res.status_code,
241 | res.text,
242 | )
243 | run_logger(
244 | f"The run response failed to be reported "
245 | f"with status code: {res.status_code} and response: {res.text}"
246 | )
247 |
248 | return res
249 |
250 | def _invoke_run(
251 | self, run_id: str, mapping: Mapping, body: dict, invocation_method: dict
252 | ) -> None:
253 | run_logger = run_logger_factory(run_id)
254 | run_logger("An action message has been received")
255 |
256 | logger.info(
257 | "WebhookInvoker - mapping - mapping: %s",
258 | mapping.dict() if mapping else None,
259 | )
260 | run_logger("Preparing the payload for the request")
261 | request_payload = self._prepare_payload(mapping, body, invocation_method)
262 | res = self._request(request_payload, run_logger)
263 |
264 | response_body = get_response_body(res)
265 | if invocation_method.get("synchronized") and response_body:
266 | self._report_run_response(run_id, response_body, run_logger)
267 |
268 | report_payload = self._prepare_report(
269 | mapping, res, request_payload.dict(), body
270 | )
271 | if report_dict := report_payload.dict(exclude_none=True, by_alias=True):
272 | logger.info(
273 | "WebhookInvoker - report mapping - report_payload: %s",
274 | report_payload.dict(exclude_none=True, by_alias=True),
275 | )
276 | self._report_run_status(run_id, report_dict, run_logger)
277 | else:
278 | logger.info(
279 | "WebhookInvoker - report mapping "
280 | "- no report mapping found - run_id: %s",
281 | run_id,
282 | )
283 | res.raise_for_status()
284 | run_logger("Port agent finished processing the action run")
285 |
286 | def validate_incoming_signature(
287 | self, msg: dict, invocation_method_name: str
288 | ) -> bool:
289 | if "changelogDestination" in msg:
290 | return True
291 |
292 | port_signature = msg.get("headers", {}).get("X-Port-Signature")
293 | port_timestamp = msg.get("headers", {}).get("X-Port-Timestamp")
294 |
295 | if not port_signature or not port_timestamp:
296 | logger.warning(
297 | "WebhookInvoker - Could not find the required headers, skipping the"
298 | " event invocation method for the event"
299 | )
300 | return False
301 |
302 | # Remove Port's generated headers to avoid them being
303 | # used in the signature verification
304 | if invocation_method_name == "GITLAB":
305 | del msg["headers"]
306 | else:
307 | del msg["headers"]["X-Port-Signature"]
308 | del msg["headers"]["X-Port-Timestamp"]
309 |
310 | expected_sig = sign_sha_256(
311 | json.dumps(msg, separators=(",", ":"), ensure_ascii=False),
312 | settings.PORT_CLIENT_SECRET,
313 | port_timestamp,
314 | )
315 | if expected_sig != port_signature:
316 | logger.warning(
317 | "WebhookInvoker - Could not verify signature, skipping the event"
318 | )
319 | return False
320 | return True
321 |
322 | def invoke(self, msg: dict, invocation_method: dict) -> None:
323 | logger.info("WebhookInvoker - start - destination: %s", invocation_method)
324 | run_id = msg["context"].get("runId")
325 |
326 | invocation_method_name = invocation_method.get("type", "WEBHOOK")
327 | if not self.validate_incoming_signature(msg, invocation_method_name):
328 | return
329 |
330 | logger.info("WebhookInvoker - validating signature")
331 |
332 | mapping = self._find_mapping(msg)
333 | if mapping is None:
334 | logger.info(
335 | "WebhookInvoker - Could not find suitable mapping for the event"
336 | f" - msg: {msg} {', run_id: ' + run_id if run_id else ''}",
337 | )
338 | return
339 |
340 | self._replace_encrypted_fields(msg, mapping)
341 |
342 | if run_id:
343 | self._invoke_run(run_id, mapping, msg, invocation_method)
344 | # Used for changelog destination event trigger
345 | elif invocation_method.get("url"):
346 | request_payload = self._prepare_payload(mapping, msg, invocation_method)
347 | res = self._request(request_payload, lambda _: None)
348 | res.raise_for_status()
349 | else:
350 | logger.warning(
351 | "WebhookInvoker - Could not find suitable "
352 | "invocation method for the event"
353 | )
354 | logger.info("Finished processing the event")
355 |
356 | def _replace_encrypted_fields(self, msg: dict, mapping: Mapping) -> None:
357 | fields_to_decrypt = getattr(mapping, "fieldsToDecryptPaths", None)
358 | if not settings.PORT_CLIENT_SECRET or not fields_to_decrypt:
359 | return
360 | logger.info(
361 | "WebhookInvoker - decrypting fields - fields: %s", fields_to_decrypt
362 | )
363 | decryption_key = settings.PORT_CLIENT_SECRET
364 | decrypted_payload = decrypt_payload_fields(
365 | msg, fields_to_decrypt, decryption_key
366 | )
367 | msg.update(decrypted_payload)
368 |
369 |
370 | webhook_invoker = WebhookInvoker()
371 |
--------------------------------------------------------------------------------
/app/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from core.config import settings
4 | from streamers.streamer_factory import StreamerFactory
5 |
6 | logging.basicConfig(level=settings.LOG_LEVEL)
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def main() -> None:
11 | streamer_factory = StreamerFactory()
12 | streamer = streamer_factory.get_streamer(settings.STREAMER_NAME)
13 | logger.info("Starting streaming with streamer: %s", settings.STREAMER_NAME)
14 | streamer.stream()
15 |
16 |
17 | if __name__ == "__main__":
18 | main()
19 |
--------------------------------------------------------------------------------
/app/port_client.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger
2 | from typing import Callable
3 |
4 | import requests
5 | from core.config import settings
6 | from requests import Response
7 |
8 | logger = getLogger(__name__)
9 |
10 |
11 | def get_port_api_headers() -> dict[str, str]:
12 | credentials = {
13 | "clientId": settings.PORT_CLIENT_ID,
14 | "clientSecret": settings.PORT_CLIENT_SECRET,
15 | }
16 |
17 | token_response = requests.post(
18 | f"{settings.PORT_API_BASE_URL}/v1/auth/access_token", json=credentials
19 | )
20 |
21 | if not token_response.ok:
22 | logger.error(
23 | f"Failed to get Port API access token - "
24 | f"status: {token_response.status_code}, "
25 | f"response: {token_response.text}"
26 | )
27 |
28 | token_response.raise_for_status()
29 |
30 | return {
31 | "Authorization": f"Bearer {token_response.json()['accessToken']}",
32 | "User-Agent": "port-agent",
33 | }
34 |
35 |
36 | def run_logger_factory(run_id: str) -> Callable[[str], None]:
37 | def send_run_log(message: str) -> None:
38 | headers = get_port_api_headers()
39 |
40 | requests.post(
41 | f"{settings.PORT_API_BASE_URL}/v1/actions/runs/{run_id}/logs",
42 | json={"message": message},
43 | headers=headers,
44 | )
45 |
46 | return send_run_log
47 |
48 |
49 | def report_run_status(run_id: str, data_to_patch: dict) -> Response:
50 | headers = get_port_api_headers()
51 | res = requests.patch(
52 | f"{settings.PORT_API_BASE_URL}/v1/actions/runs/{run_id}",
53 | json=data_to_patch,
54 | headers=headers,
55 | )
56 | return res
57 |
58 |
59 | def report_run_response(run_id: str, response: dict | str | None) -> Response:
60 | headers = get_port_api_headers()
61 | res = requests.patch(
62 | f"{settings.PORT_API_BASE_URL}/v1/actions/runs/{run_id}/response",
63 | json={"response": response},
64 | headers=headers,
65 | )
66 | return res
67 |
68 |
69 | def get_kafka_credentials() -> tuple[list[str], str, str]:
70 | headers = get_port_api_headers()
71 | res = requests.get(
72 | f"{settings.PORT_API_BASE_URL}/v1/kafka-credentials", headers=headers
73 | )
74 | res.raise_for_status()
75 | data = res.json()["credentials"]
76 | return data["brokers"], data["username"], data["password"]
77 |
--------------------------------------------------------------------------------
/app/processors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/processors/__init__.py
--------------------------------------------------------------------------------
/app/processors/kafka/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/processors/kafka/__init__.py
--------------------------------------------------------------------------------
/app/processors/kafka/kafka_to_webhook_processor.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from confluent_kafka import Message
5 | from core.config import settings
6 | from invokers.webhook_invoker import webhook_invoker
7 |
8 | logging.basicConfig(level=settings.LOG_LEVEL)
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class KafkaToWebhookProcessor:
13 | @staticmethod
14 | def msg_process(msg: Message, invocation_method: dict, topic: str) -> None:
15 | logger.info("Raw message value: %s", msg.value())
16 | msg_value = json.loads(msg.value().decode())
17 |
18 | webhook_invoker.invoke(msg_value, invocation_method)
19 | logger.info(
20 | "Successfully processed message from topic %s, partition %d, offset %d",
21 | topic,
22 | msg.partition(),
23 | msg.offset(),
24 | )
25 |
--------------------------------------------------------------------------------
/app/streamers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/streamers/__init__.py
--------------------------------------------------------------------------------
/app/streamers/base_streamer.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class BaseStreamer(ABC):
5 | @abstractmethod
6 | def stream(self) -> None:
7 | pass
8 |
--------------------------------------------------------------------------------
/app/streamers/kafka/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/app/streamers/kafka/__init__.py
--------------------------------------------------------------------------------
/app/streamers/kafka/kafka_streamer.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from confluent_kafka import Consumer, Message
5 | from consumers.kafka_consumer import KafkaConsumer
6 | from core.config import settings
7 | from processors.kafka.kafka_to_webhook_processor import KafkaToWebhookProcessor
8 | from streamers.base_streamer import BaseStreamer
9 |
10 | logging.basicConfig(level=settings.LOG_LEVEL)
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | class KafkaStreamer(BaseStreamer):
15 | def __init__(self, consumer: Consumer = None) -> None:
16 | self.kafka_consumer = KafkaConsumer(self.msg_process, consumer)
17 |
18 | def msg_process(self, msg: Message) -> None:
19 | logger.info("Raw message value: %s", msg.value())
20 | msg_value = json.loads(msg.value().decode())
21 | topic = msg.topic()
22 | invocation_method = self.get_invocation_method(msg_value, topic)
23 |
24 | if not invocation_method.pop("agent", False):
25 | logger.info(
26 | "Skip process message"
27 | " from topic %s, partition %d, offset %d: not for agent",
28 | topic,
29 | msg.partition(),
30 | msg.offset(),
31 | )
32 | return
33 |
34 | KafkaToWebhookProcessor.msg_process(msg, invocation_method, topic)
35 |
36 | @staticmethod
37 | def get_invocation_method(msg_value: dict, topic: str) -> dict:
38 | if topic == settings.KAFKA_RUNS_TOPIC:
39 | return (
40 | msg_value.get("payload", {})
41 | .get("action", {})
42 | .get("invocationMethod", {})
43 | )
44 |
45 | if topic == settings.KAFKA_CHANGE_LOG_TOPIC:
46 | return msg_value.get("changelogDestination", {})
47 |
48 | return {}
49 |
50 | def stream(self) -> None:
51 | self.kafka_consumer.start()
52 |
--------------------------------------------------------------------------------
/app/streamers/streamer_factory.py:
--------------------------------------------------------------------------------
1 | from core.config import settings
2 | from streamers.base_streamer import BaseStreamer
3 | from streamers.kafka.kafka_streamer import KafkaStreamer
4 |
5 |
6 | class StreamerFactory:
7 | @staticmethod
8 | def get_streamer(name: str) -> BaseStreamer:
9 | if settings.STREAMER_NAME == "KAFKA":
10 | return KafkaStreamer()
11 |
12 | raise Exception("Not found streamer for name: %s" % name)
13 |
--------------------------------------------------------------------------------
/app/utils.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import hashlib
3 | import hmac
4 | import logging
5 | from typing import Any, Dict, List
6 |
7 | from Crypto.Cipher import AES
8 | from glom import assign, glom
9 | from requests import Response
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def response_to_dict(response: Response) -> dict:
15 | response_dict = {
16 | "statusCode": response.status_code,
17 | "headers": dict(response.headers),
18 | "text": response.text,
19 | "json": None,
20 | }
21 |
22 | try:
23 | response_dict["json"] = response.json()
24 | except ValueError:
25 | logger.debug(
26 | "Failed to parse response body as JSON: Response is not JSON serializable"
27 | )
28 |
29 | return response_dict
30 |
31 |
32 | def get_invocation_method_object(body: dict) -> dict:
33 | return body.get("payload", {}).get("action", {}).get("invocationMethod", {})
34 |
35 |
36 | def get_response_body(response: Response) -> dict | str | None:
37 | try:
38 | return response.json()
39 | except ValueError:
40 | return response.text
41 |
42 |
43 | def sign_sha_256(input: str, secret: str, timestamp: str) -> str:
44 | to_sign = f"{timestamp}.{input}"
45 | new_hmac = hmac.new(bytes(secret, "utf-8"), digestmod=hashlib.sha256)
46 | new_hmac.update(bytes(to_sign, "utf-8"))
47 | signed = base64.b64encode(new_hmac.digest()).decode("utf-8")
48 | return f"v1,{signed}"
49 |
50 |
51 | def decrypt_field(encrypted_value: str, key: str) -> str:
52 | encrypted_data = base64.b64decode(encrypted_value)
53 | if len(encrypted_data) < 32:
54 | raise ValueError("Encrypted data is too short")
55 |
56 | iv = encrypted_data[:16]
57 | ciphertext = encrypted_data[16:-16]
58 | tag = encrypted_data[-16:]
59 |
60 | key_bytes = key.encode("utf-8")
61 | if len(key_bytes) < 32:
62 | raise ValueError("Encryption key must be at least 32 bytes")
63 | key_bytes = key_bytes[:32]
64 |
65 | cipher = AES.new(key_bytes, AES.MODE_GCM, nonce=iv)
66 | decrypted = cipher.decrypt_and_verify(ciphertext, tag)
67 | return decrypted.decode("utf-8")
68 |
69 |
70 | def decrypt_payload_fields(
71 | payload: Dict[str, Any], fields: List[str], key: str
72 | ) -> Dict[str, Any]:
73 | for path in fields:
74 | encrypted = glom(payload, path, default=None)
75 | if encrypted is not None:
76 | try:
77 | decrypted = decrypt_field(encrypted, key)
78 | assign(payload, path, decrypted)
79 | except Exception as e:
80 | logger.warning(f"Decryption failed for '{path}': {e}")
81 | return payload
82 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | plugins = pydantic.mypy
3 | ignore_missing_imports = True
4 | disallow_untyped_defs = True
5 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "port-agent"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["tankilevitch "]
6 | license = "Apache"
7 | readme = "README.md"
8 | packages = [{include = "app"}]
9 |
10 | [tool.poetry.dependencies]
11 | python = "^3.11"
12 | confluent-kafka = ">=2.1.1,<2.2.0"
13 | pydantic = ">=1.10.2,<1.11.0"
14 | pyjq = ">=2.6.0,<2.7.0"
15 | flatten-dict = ">=0.4.2,<0.5.0"
16 | requests = "^2.32.3"
17 | setuptools = ">=78.1.1"
18 | python-dotenv = "^1.0.1"
19 | pycryptodome = "^3.23.0"
20 | glom = "^24.11.0"
21 |
22 |
23 | [tool.poetry.group.dev.dependencies]
24 | pytest = "^7.2.0"
25 | pytest-mock = "^3.12.0"
26 | pytest-cov = "^6.0.0"
27 | mypy = "^0.991"
28 | types-requests = "^2.28.11.5"
29 | isort = "^5.10.1"
30 | autoflake = "^1.7.7"
31 | black = "^24.3.0"
32 | flake8 = "^5.0.4"
33 | pre-commit = "^2.20.0"
34 |
35 | [build-system]
36 | requires = ["poetry-core>=1.0.0,<2.0.0"]
37 | build-backend = "poetry.core.masonry.api"
38 |
--------------------------------------------------------------------------------
/scripts/format.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh -e
2 | set -x
3 |
4 | autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place app tests --exclude=__init__.py
5 | black app tests
6 | isort --profile black app tests
7 |
--------------------------------------------------------------------------------
/scripts/lint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -x
4 |
5 | mypy app tests
6 | black app tests --check
7 | isort --profile black --check-only app tests
8 | flake8
9 |
--------------------------------------------------------------------------------
/scripts/test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 | set -x
5 |
6 | cd ./app && PYTHONPATH=./ STREAMER_NAME=test PORT_ORG_ID=test_org PORT_CLIENT_ID=test PORT_CLIENT_SECRET=test pytest --cov --cov-report= --cov-append ../tests "${@}"
7 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/__init__.py
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/unit/__init__.py
--------------------------------------------------------------------------------
/tests/unit/invokers/test_webhook_invoker.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List
2 | from unittest import mock
3 |
4 | import pytest
5 | from glom import assign, glom
6 | from glom.core import PathAssignError
7 | from invokers.webhook_invoker import WebhookInvoker
8 |
9 | from app.core.config import Mapping
10 | from app.utils import decrypt_field, decrypt_payload_fields
11 |
12 |
13 | def inplace_decrypt_mock(
14 | payload: Dict[str, Any], fields: List[str], key: str
15 | ) -> Dict[str, Any]:
16 | for field_path in fields:
17 | if not field_path:
18 | continue
19 | try:
20 | assign(payload, field_path, f"decrypted_{glom(payload, field_path)}")
21 | except Exception:
22 | # If the path does not exist, skip
23 | continue
24 | return payload
25 |
26 |
27 | @mock.patch(
28 | "invokers.webhook_invoker.decrypt_payload_fields", side_effect=inplace_decrypt_mock
29 | )
30 | def test_decrypt_simple_fields(_mock_decrypt: object) -> None:
31 | invoker = WebhookInvoker()
32 | message: Dict[str, Any] = {
33 | "field1": "encrypted_value1",
34 | "field2": "encrypted_value2",
35 | }
36 | mapping = Mapping.construct()
37 | object.__setattr__(mapping, "fieldsToDecryptPaths", ["field1", "field2"])
38 | invoker._replace_encrypted_fields(message, mapping)
39 | assert message["field1"] == "decrypted_encrypted_value1"
40 | assert message["field2"] == "decrypted_encrypted_value2"
41 |
42 |
43 | @mock.patch(
44 | "invokers.webhook_invoker.decrypt_payload_fields", side_effect=inplace_decrypt_mock
45 | )
46 | def test_decrypt_complex_fields(_mock_decrypt: object) -> None:
47 | invoker = WebhookInvoker()
48 | msg: Dict[str, Any] = {
49 | "nested": {"field1": "encrypted_value1", "field2": "encrypted_value2"},
50 | "field3": "encrypted_value3",
51 | }
52 | mapping = Mapping.construct()
53 | object.__setattr__(
54 | mapping, "fieldsToDecryptPaths", ["nested.field1", "nested.field2", "field3"]
55 | )
56 | invoker._replace_encrypted_fields(msg, mapping)
57 | assert msg["nested"]["field1"] == "decrypted_encrypted_value1"
58 | assert msg["nested"]["field2"] == "decrypted_encrypted_value2"
59 | assert msg["field3"] == "decrypted_encrypted_value3"
60 |
61 |
62 | @mock.patch(
63 | "invokers.webhook_invoker.decrypt_payload_fields", side_effect=inplace_decrypt_mock
64 | )
65 | def test_partial_decryption(_mock_decrypt: object) -> None:
66 | invoker = WebhookInvoker()
67 | msg: Dict[str, Any] = {
68 | "field1": "encrypted_value1",
69 | "field2": "encrypted_value2",
70 | "field3": "plain_value3",
71 | }
72 | mapping = Mapping.construct()
73 | object.__setattr__(mapping, "fieldsToDecryptPaths", ["field1", "field2"])
74 | invoker._replace_encrypted_fields(msg, mapping)
75 | assert msg["field1"] == "decrypted_encrypted_value1"
76 | assert msg["field2"] == "decrypted_encrypted_value2"
77 | assert msg["field3"] == "plain_value3"
78 |
79 |
80 | @mock.patch(
81 | "invokers.webhook_invoker.decrypt_payload_fields", side_effect=inplace_decrypt_mock
82 | )
83 | def test_decrypt_with_complex_jq(_mock_decrypt: object) -> None:
84 | invoker = WebhookInvoker()
85 | msg: Dict[str, Any] = {
86 | "field1": "encrypted_value1",
87 | "nested": {"field2": "encrypted_value2"},
88 | "field3": "plain_value3",
89 | }
90 | mapping = Mapping.construct()
91 | object.__setattr__(mapping, "fieldsToDecryptPaths", ["field1", "nested.field2"])
92 | invoker._replace_encrypted_fields(msg, mapping)
93 | assert msg["field1"] == "decrypted_encrypted_value1"
94 | assert msg["nested"]["field2"] == "decrypted_encrypted_value2"
95 | assert msg["field3"] == "plain_value3"
96 | assert msg == {
97 | "field1": "decrypted_encrypted_value1",
98 | "nested": {"field2": "decrypted_encrypted_value2"},
99 | "field3": "plain_value3",
100 | }
101 |
102 |
103 | def encrypt_field(plain_text: str, key: str) -> str:
104 | import base64
105 | import os
106 |
107 | from Crypto.Cipher import AES
108 |
109 | key_bytes = key.encode("utf-8")
110 | if len(key_bytes) < 32:
111 | raise ValueError("Encryption key must be at least 32 bytes")
112 | key_bytes = key_bytes[:32]
113 | iv = os.urandom(16)
114 | cipher = AES.new(key_bytes, AES.MODE_GCM, nonce=iv)
115 | ciphertext, tag = cipher.encrypt_and_digest(plain_text.encode("utf-8"))
116 | encrypted_data = iv + ciphertext + tag
117 | return base64.b64encode(encrypted_data).decode("utf-8")
118 |
119 |
120 | def test_decrypt_payload_fields_complex() -> None:
121 | # Simulate a nested payload with encrypted fields
122 | key = "a" * 32
123 | encrypted_value = encrypt_field("secret_value", key)
124 | payload = {
125 | "level1": {
126 | "level2": {"secret": encrypted_value, "other": "not encrypted"},
127 | "list": [
128 | {"deep": {"secret": encrypted_value}},
129 | {"deep": {"not_secret": "foo"}},
130 | ],
131 | },
132 | "top_secret": encrypted_value,
133 | }
134 | fields_to_decrypt = [
135 | "level1.level2.secret",
136 | "top_secret",
137 | "level1.list.0.deep.secret",
138 | ]
139 | result = decrypt_payload_fields(payload, fields_to_decrypt, key)
140 | assert result["level1"]["level2"]["secret"] == "secret_value"
141 | assert result["top_secret"] == "secret_value"
142 | assert result["level1"]["list"][0]["deep"]["secret"] == "secret_value"
143 | assert result["level1"]["level2"]["other"] == "not encrypted"
144 | assert result["level1"]["list"][1]["deep"]["not_secret"] == "foo"
145 |
146 |
147 | def test_decrypt_field_too_short() -> None:
148 | with pytest.raises(ValueError, match="Encrypted data is too short"):
149 | decrypt_field("aGVsbG8=", "a" * 32)
150 |
151 |
152 | def test_decrypt_field_key_too_short() -> None:
153 | import base64
154 |
155 | # 32 bytes of data
156 | data = base64.b64encode(b"a" * 32).decode()
157 | with pytest.raises(ValueError, match="Encryption key must be at least 32 bytes"):
158 | decrypt_field(data, "short")
159 |
160 |
161 | def test_decrypt_field_decrypt_failure() -> None:
162 | import base64
163 |
164 | # 48 bytes: 16 IV + 16 ciphertext + 16 tag
165 | data = base64.b64encode(b"a" * 48).decode()
166 | with pytest.raises(Exception):
167 | decrypt_field(data, "a" * 32)
168 |
169 |
170 | def test_decrypt_payload_fields_decrypt_exception() -> None:
171 | payload = {"a": "encrypted"}
172 |
173 | def bad_decrypt_field(val: str, key: str) -> None:
174 | raise Exception("fail")
175 |
176 | with mock.patch("app.utils.decrypt_field", bad_decrypt_field):
177 | result = decrypt_payload_fields(payload, ["a"], "key")
178 | assert result["a"] == "encrypted"
179 |
180 |
181 | def test_get_nested_and_set_nested() -> None:
182 | data = {
183 | "a": {"b": [1, {"c": "value"}]},
184 | "x": [0, {"y": "z"}],
185 | }
186 | # Test glom (get_nested)
187 | assert glom(data, "a.b.1.c") == "value"
188 | assert glom(data, "x.1.y") == "z"
189 | assert glom(data, "a.b.2", default=None) is None
190 | assert glom(data, "a.b.1.d", default=None) is None
191 | # Test assign (set_nested)
192 | assign(data, "a.b.1.c", 42)
193 | assert dict(data["a"]["b"][1])["c"] == 42
194 | assign(data, "x.1.y", "changed")
195 | assert dict(data["x"][1])["y"] == "changed"
196 | # assign will create missing keys in dicts, but not in lists
197 | with pytest.raises(PathAssignError):
198 | assign(data, "a.b.2", "fail")
199 | assign(data, "a.b.1.d", "fail")
200 | assert dict(data["a"]["b"][1])["d"] == "fail"
201 |
--------------------------------------------------------------------------------
/tests/unit/processors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/unit/processors/__init__.py
--------------------------------------------------------------------------------
/tests/unit/processors/kafka/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/unit/processors/kafka/__init__.py
--------------------------------------------------------------------------------
/tests/unit/processors/kafka/conftest.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from signal import SIGINT
4 | from typing import Any, Callable, Generator, Optional
5 |
6 | import port_client
7 | import pytest
8 | import requests
9 | from _pytest.monkeypatch import MonkeyPatch
10 | from confluent_kafka import Consumer as _Consumer
11 | from core.config import Mapping
12 | from pydantic import parse_obj_as
13 |
14 | from app.utils import sign_sha_256
15 |
16 |
17 | @pytest.fixture
18 | def mock_requests(monkeypatch: MonkeyPatch, request: Any) -> None:
19 | class MockResponse:
20 | status_code = request.param.get("status_code")
21 | text = "Invoker failed with status code: %d" % status_code
22 |
23 | def json(self) -> dict:
24 | return request.param.get("json")
25 |
26 | @property
27 | def ok(self) -> bool:
28 | return 200 <= self.status_code <= 299
29 |
30 | def raise_for_status(self) -> None:
31 | if 400 <= self.status_code <= 599:
32 | raise Exception(self.text)
33 |
34 | def mock_request(*args: Any, **kwargs: Any) -> MockResponse:
35 | return MockResponse()
36 |
37 | monkeypatch.setattr(port_client, "get_port_api_headers", lambda *args: {})
38 | monkeypatch.setattr(requests, "request", mock_request)
39 | monkeypatch.setattr(requests, "get", mock_request)
40 | monkeypatch.setattr(requests, "post", mock_request)
41 | monkeypatch.setattr(requests, "delete", mock_request)
42 | monkeypatch.setattr(requests, "put", mock_request)
43 |
44 |
45 | def terminate_consumer() -> None:
46 | os.kill(os.getpid(), SIGINT)
47 |
48 |
49 | class Consumer(_Consumer):
50 | def __init__(self) -> None:
51 | pass
52 |
53 | def subscribe(
54 | self, topics: Any, on_assign: Any = None, *args: Any, **kwargs: Any
55 | ) -> None:
56 | pass
57 |
58 | def poll(self, timeout: Any = None) -> None:
59 | pass
60 |
61 | def commit(self, message: Any = None, *args: Any, **kwargs: Any) -> None:
62 | pass
63 |
64 | def close(self, *args: Any, **kwargs: Any) -> None:
65 | pass
66 |
67 |
68 | @pytest.fixture
69 | def mock_kafka(monkeypatch: MonkeyPatch, request: Any) -> None:
70 | class MockKafkaMessage:
71 | def error(self) -> None:
72 | return None
73 |
74 | def topic(self, *args: Any, **kwargs: Any) -> str:
75 | return request.param[2]
76 |
77 | def partition(self, *args: Any, **kwargs: Any) -> int:
78 | return 0
79 |
80 | def offset(self, *args: Any, **kwargs: Any) -> int:
81 | return 0
82 |
83 | def value(self) -> bytes:
84 | return request.getfixturevalue(request.param[0])(request.param[1])
85 |
86 | def mock_subscribe(
87 | self: Any, topics: Any, on_assign: Any = None, *args: Any, **kwargs: Any
88 | ) -> None:
89 | pass
90 |
91 | def generate_kafka_messages() -> Generator[Optional[MockKafkaMessage], None, None]:
92 | yield MockKafkaMessage()
93 | while True:
94 | yield None
95 |
96 | kafka_messages_generator = generate_kafka_messages()
97 |
98 | def mock_poll(self: Any, timeout: Any = None) -> Optional[MockKafkaMessage]:
99 | return next(kafka_messages_generator)
100 |
101 | def mock_commit(self: Any, message: Any = None, *args: Any, **kwargs: Any) -> None:
102 | return None
103 |
104 | def mock_close(self: Any, *args: Any, **kwargs: Any) -> None:
105 | pass
106 |
107 | monkeypatch.setattr(Consumer, "subscribe", mock_subscribe)
108 | monkeypatch.setattr(Consumer, "poll", mock_poll)
109 | monkeypatch.setattr(Consumer, "commit", mock_commit)
110 | monkeypatch.setattr(Consumer, "close", mock_close)
111 | result_fixture = request.getfixturevalue(request.param[0])
112 | return json.loads(result_fixture(request.param[1]).decode("utf-8"))
113 |
114 |
115 | @pytest.fixture(scope="module")
116 | def mock_webhook_change_log_message() -> Callable[[dict], bytes]:
117 | change_log_message = {
118 | "action": "Create",
119 | "resourceType": "run",
120 | "status": "TRIGGERED",
121 | "trigger": {
122 | "by": {"orgId": "test_org", "userId": "test_user"},
123 | "origin": "UI",
124 | "at": "2022-11-16T16:31:32.447Z",
125 | },
126 | "context": {
127 | "entity": None,
128 | "blueprint": "Service",
129 | "runId": "r_jE5FhDURh4Uen2Qr",
130 | },
131 | "diff": {
132 | "before": None,
133 | "after": {
134 | "id": "r_jE5FhDURh4Uen2Qr",
135 | "status": "IN_PROGRESS",
136 | "blueprint": {"identifier": "Service", "title": "Service"},
137 | "action": "Create",
138 | "endedAt": None,
139 | "source": "UI",
140 | "relatedEntityExists": False,
141 | "relatedBlueprintExists": True,
142 | "properties": {},
143 | "createdAt": "2022-11-16T16:31:32.447Z",
144 | "updatedAt": "2022-11-16T16:31:32.447Z",
145 | "createdBy": "test_user",
146 | "updatedBy": "test_user",
147 | },
148 | },
149 | "changelogDestination": {
150 | "type": "WEBHOOK",
151 | "agent": True,
152 | "url": "http://localhost:80/api/test",
153 | },
154 | }
155 |
156 | def get_change_log_message(invocation_method: dict) -> bytes:
157 | if invocation_method is not None:
158 | change_log_message["changelogDestination"] = invocation_method
159 | return json.dumps(change_log_message).encode()
160 |
161 | return get_change_log_message
162 |
163 |
164 | @pytest.fixture(scope="module")
165 | def webhook_run_payload() -> dict:
166 | return {
167 | "action": "Create",
168 | "resourceType": "run",
169 | "status": "TRIGGERED",
170 | "trigger": {
171 | "by": {"orgId": "test_org", "userId": "test_user"},
172 | "origin": "UI",
173 | "at": "2022-11-16T16:31:32.447Z",
174 | },
175 | "context": {
176 | "entity": None,
177 | "blueprint": "Service",
178 | "runId": "r_jE5FhDURh4Uen2Qr",
179 | },
180 | "payload": {
181 | "entity": None,
182 | "action": {
183 | "id": "action_34aweFQtayw7SCVb",
184 | "identifier": "Create",
185 | "title": "Create",
186 | "icon": "DefaultBlueprint",
187 | "userInputs": {
188 | "properties": {
189 | "foo": {"type": "string", "description": "Description"},
190 | "bar": {"type": "number", "description": "Description"},
191 | },
192 | "required": [],
193 | },
194 | "invocationMethod": {
195 | "type": "WEBHOOK",
196 | "agent": True,
197 | "url": "http://localhost:80/api/test",
198 | },
199 | "trigger": "CREATE",
200 | "description": "",
201 | "blueprint": "Service",
202 | "createdAt": "2022-11-15T09:58:52.863Z",
203 | "createdBy": "test_user",
204 | "updatedAt": "2022-11-15T09:58:52.863Z",
205 | "updatedBy": "test_user",
206 | },
207 | "properties": {},
208 | },
209 | "headers": {
210 | "X-Port-Signature": "v1,uuBMfcio3oscejO5bOtL97K1AmiZjxDvou7sChjMNeE=",
211 | # the real signature of this payload using the secret
212 | # key test and the hardcoded timestamp mock
213 | "X-Port-Timestamp": 1713277889,
214 | },
215 | }
216 |
217 |
218 | @pytest.fixture(scope="module")
219 | def mock_webhook_run_message(webhook_run_payload: dict) -> Callable[[dict], bytes]:
220 | def get_run_message(invocation_method: dict) -> bytes:
221 | if invocation_method is not None:
222 | webhook_run_payload["payload"]["action"][
223 | "invocationMethod"
224 | ] = invocation_method
225 | # When mutating the payload, we need to ensure that the
226 | # headers are also updated
227 | timestamp = webhook_run_payload["headers"]["X-Port-Timestamp"]
228 | webhook_run_payload["headers"] = {}
229 | webhook_run_payload["headers"]["X-Port-Signature"] = sign_sha_256(
230 | json.dumps(webhook_run_payload, separators=(",", ":")),
231 | "test",
232 | str(timestamp),
233 | )
234 | webhook_run_payload["headers"]["X-Port-Timestamp"] = timestamp
235 | return json.dumps(webhook_run_payload).encode()
236 |
237 | return get_run_message
238 |
239 |
240 | @pytest.fixture()
241 | def mock_control_the_payload_config(monkeypatch: MonkeyPatch) -> list[dict[str, Any]]:
242 | mapping = [
243 | {
244 | "enabled": ".payload.non-existing-field",
245 | "body": ".",
246 | "headers": {
247 | "MY-HEADER": ".resourceType",
248 | },
249 | "query": {},
250 | },
251 | {
252 | "enabled": True,
253 | "body": ".",
254 | "headers": {
255 | "MY-HEADER": ".resourceType",
256 | },
257 | "query": {},
258 | "report": {"link": '"http://test.com"'},
259 | },
260 | ]
261 | control_the_payload_config = parse_obj_as(list[Mapping], mapping)
262 |
263 | monkeypatch.setattr(
264 | "invokers.webhook_invoker.control_the_payload_config",
265 | control_the_payload_config,
266 | )
267 |
268 | return control_the_payload_config
269 |
270 |
271 | @pytest.fixture
272 | def mock_timestamp(monkeypatch: MonkeyPatch, request: Any) -> None:
273 | def mock_timestamp() -> int:
274 | return 1713277889
275 |
276 | monkeypatch.setattr("time.time", mock_timestamp)
277 |
--------------------------------------------------------------------------------
/tests/unit/processors/kafka/test_kafka_to_webhook_processor.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | from copy import deepcopy
4 | from threading import Timer
5 | from unittest import mock
6 | from unittest.mock import ANY, call
7 |
8 | import pytest
9 | from _pytest.monkeypatch import MonkeyPatch
10 | from consumers.kafka_consumer import logger as consumer_logger
11 | from core.config import Mapping, settings
12 | from pytest_mock import MockFixture
13 | from streamers.kafka.kafka_streamer import KafkaStreamer
14 |
15 | from app.utils import sign_sha_256
16 | from tests.unit.processors.kafka.conftest import Consumer, terminate_consumer
17 |
18 |
19 | @pytest.mark.parametrize("mock_requests", [{"status_code": 200}], indirect=True)
20 | @pytest.mark.parametrize(
21 | "mock_kafka",
22 | [
23 | ("mock_webhook_change_log_message", None, settings.KAFKA_CHANGE_LOG_TOPIC),
24 | ("mock_webhook_run_message", None, settings.KAFKA_RUNS_TOPIC),
25 | ],
26 | indirect=True,
27 | )
28 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
29 | def test_single_stream_success(
30 | mock_requests: None, mock_kafka: dict, mock_timestamp: None
31 | ) -> None:
32 | Timer(0.01, terminate_consumer).start()
33 |
34 | with mock.patch.object(consumer_logger, "error") as mock_error:
35 | streamer = KafkaStreamer(Consumer())
36 | streamer.stream()
37 |
38 | mock_error.assert_not_called()
39 |
40 |
41 | @pytest.mark.parametrize("mock_requests", [{"status_code": 500}], indirect=True)
42 | @pytest.mark.parametrize(
43 | "mock_kafka",
44 | [
45 | ("mock_webhook_change_log_message", None, settings.KAFKA_CHANGE_LOG_TOPIC),
46 | ("mock_webhook_run_message", None, settings.KAFKA_RUNS_TOPIC),
47 | ],
48 | indirect=True,
49 | )
50 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
51 | def test_single_stream_failed(
52 | mock_requests: None, mock_kafka: dict, mock_timestamp: None
53 | ) -> None:
54 | Timer(0.01, terminate_consumer).start()
55 |
56 | with mock.patch.object(consumer_logger, "error") as mock_error:
57 | streamer = KafkaStreamer(Consumer())
58 | streamer.stream()
59 |
60 | mock_error.assert_called_once_with(
61 | "Failed process message from topic %s, partition %d, offset %d: %s",
62 | ANY,
63 | 0,
64 | 0,
65 | "Invoker failed with status code: 500",
66 | )
67 |
68 |
69 | @pytest.mark.parametrize(
70 | "mock_requests",
71 | [{"status_code": 200}],
72 | indirect=True,
73 | )
74 | @pytest.mark.parametrize(
75 | "mock_kafka",
76 | [
77 | ("mock_webhook_change_log_message", None, settings.KAFKA_CHANGE_LOG_TOPIC),
78 | ("mock_webhook_run_message", None, settings.KAFKA_RUNS_TOPIC),
79 | ],
80 | indirect=True,
81 | )
82 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
83 | def test_single_stream_success_control_the_payload(
84 | monkeypatch: MonkeyPatch,
85 | mocker: MockFixture,
86 | mock_requests: None,
87 | mock_kafka: dict,
88 | mock_timestamp: None,
89 | mock_control_the_payload_config: list[Mapping],
90 | ) -> None:
91 | expected_body = deepcopy(mock_kafka)
92 | expected_headers = {"MY-HEADER": mock_kafka["resourceType"]}
93 | expected_query: dict[str, ANY] = {}
94 | if "changelogDestination" not in mock_kafka:
95 | del expected_body["headers"]["X-Port-Signature"]
96 | del expected_body["headers"]["X-Port-Timestamp"]
97 |
98 | expected_headers["X-Port-Timestamp"] = ANY
99 | expected_headers["X-Port-Signature"] = ANY
100 | Timer(0.01, terminate_consumer).start()
101 | request_mock = mocker.patch("requests.request")
102 | request_mock.return_value.headers = {}
103 | request_mock.return_value.text = "test"
104 | request_mock.return_value.status_code = 200
105 | request_mock.return_value.json.return_value = {}
106 | mocker.patch("pathlib.Path.is_file", side_effect=(True,))
107 |
108 | with mock.patch.object(consumer_logger, "error") as mock_error:
109 | streamer = KafkaStreamer(Consumer())
110 | streamer.stream()
111 | request_mock.assert_called_once_with(
112 | "POST",
113 | ANY,
114 | json=expected_body,
115 | headers=expected_headers,
116 | params=expected_query,
117 | timeout=settings.WEBHOOK_INVOKER_TIMEOUT,
118 | )
119 |
120 | mock_error.assert_not_called()
121 |
122 |
123 | @pytest.mark.parametrize(
124 | "mock_requests",
125 | [{"status_code": 200}],
126 | indirect=True,
127 | )
128 | @pytest.mark.parametrize(
129 | "mock_kafka",
130 | [
131 | (
132 | "mock_webhook_run_message",
133 | {
134 | "type": "WEBHOOK",
135 | "agent": True,
136 | "url": "http://localhost:80/api/test",
137 | "synchronized": True,
138 | },
139 | settings.KAFKA_RUNS_TOPIC,
140 | ),
141 | ],
142 | indirect=True,
143 | )
144 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
145 | def test_invocation_method_synchronized(
146 | monkeypatch: MonkeyPatch,
147 | mocker: MockFixture,
148 | mock_requests: None,
149 | mock_kafka: dict,
150 | mock_timestamp: None,
151 | mock_control_the_payload_config: list[Mapping],
152 | webhook_run_payload: dict,
153 | ) -> None:
154 | expected_body = deepcopy(webhook_run_payload)
155 | expected_headers = {"MY-HEADER": mock_kafka["resourceType"]}
156 |
157 | expected_query: dict[str, ANY] = {}
158 | Timer(0.01, terminate_consumer).start()
159 | request_mock = mocker.patch("requests.request")
160 | request_patch_mock = mocker.patch("requests.patch")
161 | mocker.patch("pathlib.Path.is_file", side_effect=(True,))
162 |
163 | del expected_body["headers"]["X-Port-Signature"]
164 | del expected_body["headers"]["X-Port-Timestamp"]
165 |
166 | expected_headers["X-Port-Timestamp"] = ANY
167 | expected_headers["X-Port-Signature"] = ANY
168 | with mock.patch.object(consumer_logger, "error") as mock_error:
169 | streamer = KafkaStreamer(Consumer())
170 | streamer.stream()
171 | request_mock.assert_called_once_with(
172 | "POST",
173 | ANY,
174 | json=expected_body,
175 | headers=expected_headers,
176 | params=expected_query,
177 | timeout=settings.WEBHOOK_INVOKER_TIMEOUT,
178 | )
179 |
180 | request_patch_mock.assert_has_calls(
181 | calls=[
182 | call(
183 | f"{settings.PORT_API_BASE_URL}/v1/actions/runs/"
184 | f"{webhook_run_payload['context']['runId']}/response",
185 | json=ANY,
186 | headers={},
187 | ),
188 | call().ok.__bool__(),
189 | call(
190 | f"{settings.PORT_API_BASE_URL}/v1/actions/runs/"
191 | f"{webhook_run_payload['context']['runId']}",
192 | json={"status": "SUCCESS"},
193 | headers={},
194 | ),
195 | call().ok.__bool__(),
196 | ]
197 | )
198 |
199 | mock_error.assert_not_called()
200 |
201 |
202 | @pytest.mark.parametrize("mock_requests", [{"status_code": 200}], indirect=True)
203 | @pytest.mark.parametrize(
204 | "mock_kafka",
205 | [
206 | (
207 | "mock_webhook_change_log_message",
208 | {
209 | "type": "WEBHOOK",
210 | "agent": True,
211 | "url": "http://localhost:80/api/test",
212 | "method": "GET",
213 | },
214 | settings.KAFKA_CHANGE_LOG_TOPIC,
215 | ),
216 | (
217 | "mock_webhook_run_message",
218 | {
219 | "type": "WEBHOOK",
220 | "agent": True,
221 | "url": "http://localhost:80/api/test",
222 | "method": "GET",
223 | },
224 | settings.KAFKA_RUNS_TOPIC,
225 | ),
226 | ],
227 | indirect=True,
228 | )
229 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
230 | def test_invocation_method_method_override(
231 | monkeypatch: MonkeyPatch,
232 | mocker: MockFixture,
233 | mock_requests: None,
234 | mock_kafka: dict,
235 | mock_timestamp: None,
236 | mock_control_the_payload_config: list[Mapping],
237 | ) -> None:
238 | expected_body = mock_kafka
239 | expected_headers = {
240 | "MY-HEADER": mock_kafka["resourceType"],
241 | }
242 |
243 | if "changelogDestination" not in mock_kafka:
244 | del expected_body["headers"]["X-Port-Signature"]
245 | del expected_body["headers"]["X-Port-Timestamp"]
246 |
247 | expected_headers["X-Port-Timestamp"] = str(time.time())
248 | expected_headers["X-Port-Signature"] = sign_sha_256(
249 | json.dumps(expected_body, separators=(",", ":")), "test", str(time.time())
250 | )
251 |
252 | expected_query: dict[str, ANY] = {}
253 | Timer(0.01, terminate_consumer).start()
254 | request_mock = mocker.patch("requests.request")
255 | mocker.patch("pathlib.Path.is_file", side_effect=(True,))
256 | with mock.patch.object(consumer_logger, "error") as mock_error:
257 | streamer = KafkaStreamer(Consumer())
258 | streamer.stream()
259 | request_mock.assert_called_once_with(
260 | "GET",
261 | ANY,
262 | json=expected_body,
263 | # we are removing the signature headers from the
264 | # body is it shouldn't concern the invoked webhook
265 | headers=expected_headers,
266 | params=expected_query,
267 | timeout=settings.WEBHOOK_INVOKER_TIMEOUT,
268 | )
269 |
270 | mock_error.assert_not_called()
271 |
--------------------------------------------------------------------------------
/tests/unit/streamers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/unit/streamers/__init__.py
--------------------------------------------------------------------------------
/tests/unit/streamers/kafka/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/port-labs/port-agent/b7488dec608aec512d523d0618f61f04cfc3502a/tests/unit/streamers/kafka/__init__.py
--------------------------------------------------------------------------------
/tests/unit/streamers/kafka/conftest.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from signal import SIGINT
4 | from typing import Any, Callable, Generator, Optional
5 |
6 | import port_client
7 | import pytest
8 | import requests
9 | from _pytest.monkeypatch import MonkeyPatch
10 | from confluent_kafka import Consumer as _Consumer
11 |
12 | from app.utils import sign_sha_256
13 |
14 |
15 | @pytest.fixture
16 | def mock_timestamp(monkeypatch: MonkeyPatch, request: Any) -> None:
17 | def mock_timestamp() -> int:
18 | return 1713277889
19 |
20 | monkeypatch.setattr("time.time", mock_timestamp)
21 |
22 |
23 | @pytest.fixture
24 | def mock_requests(monkeypatch: MonkeyPatch, request: Any) -> None:
25 | class MockResponse:
26 | status_code = request.param.get("status_code")
27 | text = "Invoker failed with status code: %d" % status_code
28 |
29 | def json(self) -> dict:
30 | return request.param.get("json")
31 |
32 | @property
33 | def ok(self) -> bool:
34 | return 200 <= self.status_code <= 299
35 |
36 | def raise_for_status(self) -> None:
37 | if 400 <= self.status_code <= 599:
38 | raise Exception(self.text)
39 |
40 | def mock_request(*args: Any, **kwargs: Any) -> MockResponse:
41 | return MockResponse()
42 |
43 | monkeypatch.setattr(port_client, "get_port_api_headers", lambda *args: {})
44 | monkeypatch.setattr(requests, "request", mock_request)
45 | monkeypatch.setattr(requests, "get", mock_request)
46 | monkeypatch.setattr(requests, "post", mock_request)
47 | monkeypatch.setattr(requests, "delete", mock_request)
48 | monkeypatch.setattr(requests, "put", mock_request)
49 |
50 |
51 | def terminate_consumer() -> None:
52 | os.kill(os.getpid(), SIGINT)
53 |
54 |
55 | class Consumer(_Consumer):
56 | def __init__(self) -> None:
57 | pass
58 |
59 | def subscribe(
60 | self, topics: Any, on_assign: Any = None, *args: Any, **kwargs: Any
61 | ) -> None:
62 | pass
63 |
64 | def poll(self, timeout: Any = None) -> None:
65 | pass
66 |
67 | def commit(self, message: Any = None, *args: Any, **kwargs: Any) -> None:
68 | pass
69 |
70 | def close(self, *args: Any, **kwargs: Any) -> None:
71 | pass
72 |
73 |
74 | @pytest.fixture
75 | def mock_kafka(monkeypatch: MonkeyPatch, request: Any) -> None:
76 | class MockKafkaMessage:
77 | def error(self) -> None:
78 | return None
79 |
80 | def topic(self, *args: Any, **kwargs: Any) -> str:
81 | return request.param[2]
82 |
83 | def partition(self, *args: Any, **kwargs: Any) -> int:
84 | return 0
85 |
86 | def offset(self, *args: Any, **kwargs: Any) -> int:
87 | return 0
88 |
89 | def value(self) -> bytes:
90 | return request.getfixturevalue(request.param[0])(request.param[1])
91 |
92 | def mock_subscribe(
93 | self: Any, topics: Any, on_assign: Any = None, *args: Any, **kwargs: Any
94 | ) -> None:
95 | pass
96 |
97 | def generate_kafka_messages() -> Generator[Optional[MockKafkaMessage], None, None]:
98 | yield MockKafkaMessage()
99 | while True:
100 | yield None
101 |
102 | kafka_messages_generator = generate_kafka_messages()
103 |
104 | def mock_poll(self: Any, timeout: Any = None) -> Optional[MockKafkaMessage]:
105 | return next(kafka_messages_generator)
106 |
107 | def mock_commit(self: Any, message: Any = None, *args: Any, **kwargs: Any) -> None:
108 | return None
109 |
110 | def mock_close(self: Any, *args: Any, **kwargs: Any) -> None:
111 | pass
112 |
113 | monkeypatch.setattr(Consumer, "subscribe", mock_subscribe)
114 | monkeypatch.setattr(Consumer, "poll", mock_poll)
115 | monkeypatch.setattr(Consumer, "commit", mock_commit)
116 | monkeypatch.setattr(Consumer, "close", mock_close)
117 |
118 |
119 | @pytest.fixture(scope="module")
120 | def mock_webhook_run_message() -> Callable[[dict], bytes]:
121 | run_message: dict = {
122 | "action": "Create",
123 | "resourceType": "run",
124 | "status": "TRIGGERED",
125 | "trigger": {
126 | "by": {"orgId": "test_org", "userId": "test_user"},
127 | "origin": "UI",
128 | "at": "2022-11-16T16:31:32.447Z",
129 | },
130 | "context": {
131 | "entity": None,
132 | "blueprint": "Service",
133 | "runId": "r_jE5FhDURh4Uen2Qr",
134 | },
135 | "payload": {
136 | "entity": None,
137 | "action": {
138 | "id": "action_34aweFQtayw7SCVb",
139 | "identifier": "Create",
140 | "title": "Create",
141 | "icon": "DefaultBlueprint",
142 | "userInputs": {
143 | "properties": {
144 | "foo": {"type": "string", "description": "Description"},
145 | "bar": {"type": "number", "description": "Description"},
146 | },
147 | "required": [],
148 | },
149 | "invocationMethod": {
150 | "type": "WEBHOOK",
151 | "agent": True,
152 | "url": "http://localhost:80/api/test",
153 | },
154 | "trigger": "CREATE",
155 | "description": "",
156 | "blueprint": "Service",
157 | "createdAt": "2022-11-15T09:58:52.863Z",
158 | "createdBy": "test_user",
159 | "updatedAt": "2022-11-15T09:58:52.863Z",
160 | "updatedBy": "test_user",
161 | },
162 | "properties": {},
163 | },
164 | "headers": {
165 | "X-Port-Signature": "v1,uuBMfcio3oscejO5bOtL97K1AmiZjxDvou7sChjMNeE=",
166 | # the real signature of this payload using the secret
167 | # key test and the hardcoded timestamp mock
168 | "X-Port-Timestamp": 1713277889,
169 | },
170 | }
171 |
172 | def get_run_message(invocation_method: dict) -> bytes:
173 | if invocation_method is not None:
174 | run_message["payload"]["action"]["invocationMethod"] = invocation_method
175 | # When mutating the payload, we need to ensure that the
176 | # headers are also updated
177 | timestamp = run_message["headers"]["X-Port-Timestamp"]
178 | run_message["headers"] = {}
179 | run_message["headers"]["X-Port-Signature"] = sign_sha_256(
180 | json.dumps(run_message, separators=(",", ":")),
181 | "test",
182 | str(timestamp),
183 | )
184 | run_message["headers"]["X-Port-Timestamp"] = timestamp
185 | return json.dumps(run_message).encode()
186 |
187 | return get_run_message
188 |
--------------------------------------------------------------------------------
/tests/unit/streamers/kafka/test_kafka_streamer.py:
--------------------------------------------------------------------------------
1 | from threading import Timer
2 | from unittest import mock
3 | from unittest.mock import ANY, call
4 |
5 | import pytest
6 | from consumers.kafka_consumer import logger as consumer_logger
7 | from core.config import settings
8 | from streamers.kafka.kafka_streamer import KafkaStreamer
9 | from streamers.kafka.kafka_streamer import logger as streamer_logger
10 |
11 | from tests.unit.streamers.kafka.conftest import Consumer, terminate_consumer
12 |
13 |
14 | @pytest.mark.parametrize("mock_requests", [{"status_code": 200}], indirect=True)
15 | @pytest.mark.parametrize(
16 | "mock_kafka",
17 | [
18 | ("mock_webhook_run_message", None, settings.KAFKA_RUNS_TOPIC),
19 | ],
20 | indirect=True,
21 | )
22 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
23 | def test_single_stream_success(
24 | mock_requests: None, mock_kafka: None, mock_timestamp: None
25 | ) -> None:
26 | Timer(0.01, terminate_consumer).start()
27 |
28 | with mock.patch.object(consumer_logger, "error") as mock_error:
29 | streamer = KafkaStreamer(Consumer())
30 | streamer.stream()
31 |
32 | mock_error.assert_not_called()
33 |
34 |
35 | @pytest.mark.parametrize("mock_requests", [{"status_code": 500}], indirect=True)
36 | @pytest.mark.parametrize(
37 | "mock_kafka",
38 | [
39 | ("mock_webhook_run_message", None, settings.KAFKA_RUNS_TOPIC),
40 | ],
41 | indirect=True,
42 | )
43 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
44 | def test_single_stream_failed(
45 | mock_requests: None, mock_kafka: None, mock_timestamp: None
46 | ) -> None:
47 | Timer(0.01, terminate_consumer).start()
48 |
49 | with mock.patch.object(consumer_logger, "error") as mock_error:
50 | streamer = KafkaStreamer(Consumer())
51 | streamer.stream()
52 |
53 | mock_error.assert_called_once_with(
54 | "Failed process message from topic %s, partition %d, offset %d: %s",
55 | ANY,
56 | 0,
57 | 0,
58 | "Invoker failed with status code: 500",
59 | )
60 |
61 |
62 | @pytest.mark.parametrize(
63 | "mock_kafka",
64 | [
65 | ("mock_webhook_run_message", {"agent": False}, settings.KAFKA_RUNS_TOPIC),
66 | ],
67 | indirect=True,
68 | )
69 | @pytest.mark.parametrize("mock_timestamp", [{}], indirect=True)
70 | def test_single_stream_skipped_due_to_agentless(
71 | mock_kafka: None, mock_timestamp: None
72 | ) -> None:
73 | Timer(0.01, terminate_consumer).start()
74 | with mock.patch.object(consumer_logger, "error") as mock_error, mock.patch.object(
75 | streamer_logger, "info"
76 | ) as mock_info:
77 | streamer = KafkaStreamer(Consumer())
78 | streamer.stream()
79 |
80 | mock_error.assert_not_called()
81 |
82 | mock_info.assert_has_calls(
83 | [
84 | call(ANY, ANY),
85 | call(
86 | "Skip process message"
87 | " from topic %s, partition %d, offset %d: not for agent",
88 | ANY,
89 | 0,
90 | 0,
91 | ),
92 | ]
93 | )
94 |
--------------------------------------------------------------------------------