├── .github ├── codeql-config.yml └── workflows │ ├── build.yml │ ├── lint-pr.yml │ └── release.yml ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── .release-please-manifest.json ├── CODEOWNERS ├── CONTRIBUTING.md ├── README.md ├── hooks ├── README.md └── openfeature-hooks-opentelemetry │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ └── openfeature │ │ └── contrib │ │ └── hook │ │ └── opentelemetry │ │ └── __init__.py │ └── tests │ ├── __init__.py │ └── test_otel.py ├── mypy.ini ├── providers ├── README.md ├── openfeature-provider-flagd │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── pytest.ini │ ├── src │ │ └── openfeature │ │ │ ├── .gitignore │ │ │ └── contrib │ │ │ └── provider │ │ │ └── flagd │ │ │ ├── __init__.py │ │ │ ├── config.py │ │ │ ├── flag_type.py │ │ │ ├── provider.py │ │ │ ├── resolvers │ │ │ ├── __init__.py │ │ │ ├── grpc.py │ │ │ ├── in_process.py │ │ │ ├── process │ │ │ │ ├── connector │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── file_watcher.py │ │ │ │ │ └── grpc_watcher.py │ │ │ │ ├── custom_ops.py │ │ │ │ ├── flags.py │ │ │ │ └── targeting.py │ │ │ └── protocol.py │ │ │ └── sync_metadata_hook.py │ └── tests │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── e2e │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── file │ │ │ ├── __init__.py │ │ │ ├── conftest.py │ │ │ └── test_flaqd.py │ │ ├── flagd_container.py │ │ ├── inprocess │ │ │ ├── __init__.py │ │ │ ├── conftest.py │ │ │ └── test_flaqd.py │ │ ├── parsers.py │ │ ├── paths.py │ │ ├── rpc │ │ │ ├── __init__.py │ │ │ ├── conftest.py │ │ │ └── test_flaqd.py │ │ ├── step │ │ │ ├── _utils.py │ │ │ ├── config_steps.py │ │ │ ├── context_steps.py │ │ │ ├── event_steps.py │ │ │ ├── flag_step.py │ │ │ └── provider_steps.py │ │ └── testfilter.py │ │ ├── flags │ │ ├── .gitignore │ │ ├── basic-flag-broken-default.json │ │ ├── basic-flag-broken-state.json │ │ ├── basic-flag-broken-targeting.json │ │ ├── basic-flag-broken-variants.json │ │ ├── basic-flag-combined-metadata.json │ │ ├── basic-flag-disabled.json │ │ ├── basic-flag-invalid.not-json │ │ ├── basic-flag-metadata.json │ │ ├── basic-flag-no-state.json │ │ ├── basic-flag-set-metadata.json │ │ ├── basic-flag-wrong-structure.json │ │ ├── basic-flag-wrong-variant.json │ │ ├── basic-flag.json │ │ ├── basic-flag.yaml │ │ ├── invalid-flag-metadata-list.json │ │ ├── invalid-flag-metadata.json │ │ ├── invalid-flag-set-metadata-list.json │ │ ├── invalid-flag-set-metadata.json │ │ ├── invalid-fractional-args-wrong-content.json │ │ ├── invalid-fractional-args.json │ │ ├── invalid-fractional-weights-strings.json │ │ ├── invalid-fractional-weights.json │ │ ├── invalid-semver-args.json │ │ ├── invalid-semver-op.json │ │ └── invalid-stringcomp-args.json │ │ ├── test_config.py │ │ ├── test_errors.py │ │ ├── test_file_store.py │ │ ├── test_flagd.py │ │ ├── test_in_process.py │ │ ├── test_metadata.py │ │ └── test_targeting.py ├── openfeature-provider-flipt │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── openfeature │ │ │ └── contrib │ │ │ └── provider │ │ │ └── flipt │ │ │ └── __init__.py │ └── tests │ │ ├── __init__.py │ │ ├── conftest.py │ │ └── test_provider.py └── openfeature-provider-ofrep │ ├── CHANGELOG.md │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ └── openfeature │ │ └── contrib │ │ └── provider │ │ └── ofrep │ │ └── __init__.py │ └── tests │ ├── __init__.py │ ├── conftest.py │ └── test_provider.py ├── release-please-config.json ├── renovate.json └── ruff.toml /.github/codeql-config.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL config" 2 | 3 | paths-ignore: 4 | - tests 5 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: "Build, lint, and test" 7 | 8 | on: 9 | push: 10 | branches: 11 | - main 12 | pull_request: 13 | types: 14 | - opened 15 | - synchronize 16 | - reopened 17 | branches: 18 | - main 19 | 20 | permissions: 21 | contents: read 22 | 23 | jobs: 24 | build: 25 | runs-on: ubuntu-latest 26 | strategy: 27 | matrix: 28 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 29 | package: 30 | - "hooks/openfeature-hooks-opentelemetry" 31 | - "providers/openfeature-provider-flagd" 32 | - "providers/openfeature-provider-ofrep" 33 | 34 | steps: 35 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 36 | with: 37 | submodules: recursive 38 | 39 | - name: Set up Python ${{ matrix.python-version }} 40 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 41 | with: 42 | python-version: ${{ matrix.python-version }} 43 | cache: "pip" 44 | 45 | - name: Install hatch 46 | run: pip install hatch 47 | 48 | - name: Building first to generate files 49 | run: hatch build 50 | working-directory: ${{ matrix.package }} 51 | 52 | - name: Type checking 53 | if: matrix.python-version == '3.13' 54 | working-directory: ${{ matrix.package }} 55 | run: hatch run mypy:run 56 | 57 | - name: Test with pytest 58 | run: hatch test -c 59 | working-directory: ${{ matrix.package }} 60 | 61 | - if: matrix.python-version == '3.13' 62 | name: Upload coverage to Codecov 63 | uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 64 | with: 65 | name: Code Coverage for ${{ matrix.package }} on Python ${{ matrix.python-version }} 66 | directory: ${{ matrix.package }} 67 | fail_ci_if_error: true # optional (default = false) 68 | verbose: true # optional (default = false) 69 | token: ${{ secrets.CODECOV_UPLOAD_TOKEN }} 70 | 71 | lint: 72 | runs-on: ubuntu-latest 73 | 74 | steps: 75 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 76 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 77 | with: 78 | python-version: "3.13" 79 | cache: "pip" 80 | 81 | - name: Run pre-commit 82 | uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 83 | 84 | sast: 85 | runs-on: ubuntu-latest 86 | permissions: 87 | actions: read 88 | contents: read 89 | security-events: write 90 | steps: 91 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 92 | 93 | - name: Initialize CodeQL 94 | uses: github/codeql-action/init@fca7ace96b7d713c7035871441bd52efbe39e27e # v3 95 | with: 96 | languages: python 97 | config-file: ./.github/codeql-config.yml 98 | 99 | - name: Perform CodeQL Analysis 100 | uses: github/codeql-action/analyze@fca7ace96b7d713c7035871441bd52efbe39e27e # v3 101 | -------------------------------------------------------------------------------- /.github/workflows/lint-pr.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: "Lint PR" 7 | 8 | on: 9 | pull_request_target: 10 | types: 11 | - opened 12 | - edited 13 | - synchronize 14 | 15 | permissions: 16 | pull-requests: write 17 | 18 | jobs: 19 | main: 20 | name: Validate PR title 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5 24 | id: lint_pr_title 25 | env: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | 28 | - uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2 29 | # When the previous steps fails, the workflow would stop. By adding this 30 | # condition you can continue the execution with the populated error message. 31 | if: always() && (steps.lint_pr_title.outputs.error_message != null) 32 | with: 33 | header: pr-title-lint-error 34 | message: | 35 | Hey there and thank you for opening this pull request! 👋🏼 36 | 37 | We require pull request titles to follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) and it looks like your proposed title needs to be adjusted. 38 | 39 | Details: 40 | 41 | ``` 42 | ${{ steps.lint_pr_title.outputs.error_message }} 43 | ``` 44 | 45 | # Delete a previous comment when the issue has been resolved 46 | - if: ${{ steps.lint_pr_title.outputs.error_message == null }} 47 | uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2 48 | with: 49 | header: pr-title-lint-error 50 | delete: true 51 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Run Release Please 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | 7 | push: 8 | branches: 9 | - main 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | release-please: 16 | runs-on: ubuntu-latest 17 | permissions: 18 | contents: write # for googleapis/release-please-action to create release commit 19 | pull-requests: write # for googleapis/release-please-action to create release PR 20 | # Release-please creates a PR that tracks all changes 21 | steps: 22 | - uses: googleapis/release-please-action@a02a34c4d625f9be7cb89156071d8567266a2445 # v4 23 | id: release 24 | with: 25 | token: ${{secrets.GITHUB_TOKEN}} 26 | target-branch: main 27 | - name: Dump Release Please Output 28 | env: 29 | RELEASE_PLEASE_OUTPUT: ${{ toJson(steps.release.outputs) }} 30 | run: | 31 | echo "$RELEASE_PLEASE_OUTPUT" 32 | outputs: 33 | release_created: ${{ steps.release.outputs.releases_created }} 34 | all: ${{ toJSON(steps.release.outputs) }} 35 | paths_released: ${{ steps.release.outputs.paths_released }} 36 | 37 | pypi-release: 38 | needs: release-please 39 | runs-on: ubuntu-latest 40 | if: ${{ fromJSON(needs.release-please.outputs.release_created || false) }} 41 | strategy: 42 | matrix: 43 | path: ${{ fromJSON(needs.release-please.outputs.paths_released || '[]') }} 44 | environment: publish 45 | permissions: 46 | # IMPORTANT: this permission is mandatory for trusted publishing to pypi 47 | id-token: write 48 | 49 | steps: 50 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 51 | with: 52 | submodules: recursive 53 | 54 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 55 | with: 56 | python-version: '3.13' 57 | 58 | - name: Upgrade pip 59 | run: pip install --upgrade pip 60 | 61 | - name: Install hatch 62 | run: pip install hatch 63 | 64 | - name: Build a binary wheel and a source tarball 65 | run: hatch build 66 | working-directory: ${{ matrix.path }} 67 | 68 | - name: Publish a Python distribution to PyPI 69 | uses: pypa/gh-action-pypi-publish@release/v1 70 | with: 71 | packages-dir: ${{ matrix.path }}/dist 72 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | bin/ 10 | build/ 11 | develop-eggs/ 12 | dist/ 13 | eggs/ 14 | lib64/ 15 | parts/ 16 | sdist/ 17 | var/ 18 | *.egg-info/ 19 | .installed.cfg 20 | *.egg 21 | 22 | # Installer logs 23 | pip-log.txt 24 | pip-delete-this-directory.txt 25 | 26 | # Unit test / coverage reports 27 | .tox/ 28 | .coverage 29 | .cache 30 | nosetests.xml 31 | coverage.xml 32 | 33 | # Translations 34 | *.mo 35 | 36 | # Mr Developer 37 | .mr.developer.cfg 38 | .project 39 | .pydevproject 40 | .idea 41 | 42 | # Rope 43 | .ropeproject 44 | 45 | # Django stuff: 46 | *.log 47 | *.pot 48 | 49 | # Sphinx documentation 50 | docs/_build/ 51 | 52 | # Virtual env directories 53 | .venv 54 | 55 | # vscode 56 | .vscode/ 57 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "schemas"] 2 | path = providers/openfeature-provider-flagd/openfeature/schemas 3 | url = https://github.com/open-feature/schemas 4 | branch = protobuf-v0.6.1 5 | [submodule "providers/openfeature-provider-flagd/spec"] 6 | path = providers/openfeature-provider-flagd/openfeature/spec 7 | url = https://github.com/open-feature/spec 8 | [submodule "providers/openfeature-provider-flagd/openfeature/test-harness"] 9 | path = providers/openfeature-provider-flagd/openfeature/test-harness 10 | url = https://github.com/open-feature/flagd-testbed.git 11 | branch = v2.8.0 12 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_stages: [pre-commit] 2 | repos: 3 | - repo: https://github.com/astral-sh/ruff-pre-commit 4 | rev: v0.11.13 5 | hooks: 6 | - id: ruff 7 | args: [--fix] 8 | - id: ruff-format 9 | 10 | - repo: https://github.com/pre-commit/pre-commit-hooks 11 | rev: v5.0.0 12 | hooks: 13 | - id: check-toml 14 | - id: check-yaml 15 | - id: trailing-whitespace 16 | - id: check-merge-conflict 17 | -------------------------------------------------------------------------------- /.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "hooks/openfeature-hooks-opentelemetry": "0.2.0", 3 | "providers/openfeature-provider-flagd": "0.2.3", 4 | "providers/openfeature-provider-ofrep": "0.1.1", 5 | "providers/openfeature-provider-flipt": "0.1.3" 6 | } 7 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # These owners will be the default owners for everything in 2 | # the repo. Unless a later match takes precedence 3 | # 4 | # Managed by Peribolos: https://github.com/open-feature/community/blob/main/config/open-feature/sdk-python/workgroup.yaml 5 | # 6 | * @open-feature/sdk-python-maintainers @open-feature/maintainers 7 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Development 4 | 5 | ### System Requirements 6 | 7 | Python 3.9 and above are required. 8 | 9 | ### Target version(s) 10 | 11 | Python 3.9 and above are supported by the SDK. 12 | 13 | ### Installation and Dependencies 14 | 15 | We use [Hatch](https://hatch.pypa.io/) to manage the project. Hatch does not have built-in support for monorepos yet, so you must run it inside each package directory. 16 | 17 | To install Hatch, just run `pip install hatch`. 18 | 19 | You will also need to setup the `pre-commit` hooks. Run `pre-commit install` in the root directory of the repository. If you don't have `pre-commit` installed, you can install it with `pip install pre-commit`. 20 | 21 | > **Note** 22 | > Currently our protobuf files will be generated during `hatch build` 23 | > Please run this command once, to generate all necessary files. 24 | 25 | ### Testing 26 | 27 | Run tests by entering the package directory and running `hatch test`. 28 | 29 | We use `pytest` for our unit testing, making use of `parametrized` to inject cases at scale. 30 | 31 | ### Integration tests 32 | 33 | The Flagd provider utilizes the [gherkin integration tests](https://github.com/open-feature/test-harness/blob/main/features/evaluation.feature) to validate against a live, seeded Flagd instance. 34 | 35 | To run the integration tests you need to have a container runtime, like docker, ranger, etc. installed. 36 | 37 | ```bash 38 | hatch run test 39 | ``` 40 | 41 | ### Type checking 42 | 43 | Run `mypy` by entering the package directory and running `hatch run mypy:run`. 44 | 45 | ## Pull Request 46 | 47 | All contributions to the OpenFeature project are welcome via GitHub pull requests. 48 | 49 | To create a new PR, you will need to first fork the GitHub repository and clone upstream. 50 | 51 | ```bash 52 | git clone https://github.com/open-feature/python-sdk-contrib.git openfeature-python-sdk-contrib 53 | ``` 54 | 55 | Navigate to the repository folder 56 | 57 | ```bash 58 | cd python-sdk-contrib 59 | ``` 60 | 61 | Checkout submodules 62 | 63 | ```bash 64 | git submodule update --init --recursive 65 | ``` 66 | 67 | 68 | Add your fork as an origin 69 | 70 | ```bash 71 | git remote add fork https://github.com/YOUR_GITHUB_USERNAME/python-sdk-contrib.git 72 | ``` 73 | 74 | Ensure your development environment is all set up by building and testing 75 | 76 | ```bash 77 | cd 78 | hatch build 79 | hatch test 80 | ``` 81 | 82 | To start working on a new feature or bugfix, create a new branch and start working on it. 83 | 84 | ```bash 85 | git checkout -b feat/NAME_OF_FEATURE 86 | # Make your changes 87 | git commit -s -m "feat: my feature" 88 | git push fork feat/NAME_OF_FEATURE 89 | ``` 90 | 91 | Open a pull request against the main python-sdk-contrib repository. 92 | 93 | ### How to Receive Comments 94 | 95 | - If the PR is not ready for review, please mark it as 96 | [`draft`](https://github.blog/2019-02-14-introducing-draft-pull-requests/). 97 | - Make sure all required CI checks are clear. 98 | - Submit small, focused PRs addressing a single concern/issue. 99 | - Make sure the PR title reflects the contribution. 100 | - Write a summary that explains the change. 101 | - Include usage examples in the summary, where applicable. 102 | 103 | ### How to Get PRs Merged 104 | 105 | A PR is considered to be **ready to merge** when: 106 | 107 | - Major feedback is resolved. 108 | - Urgent fix can take exception as long as it has been actively communicated. 109 | 110 | Any Maintainer can merge the PR once it is **ready to merge**. Note, that some 111 | PRs may not be merged immediately if the repo is in the process of a release and 112 | the maintainers decided to defer the PR to the next release train. 113 | 114 | If a PR has been stuck (e.g. there are lots of debates and people couldn't agree 115 | on each other), the owner should try to get people aligned by: 116 | 117 | - Consolidating the perspectives and putting a summary in the PR. It is 118 | recommended to add a link into the PR description, which points to a comment 119 | with a summary in the PR conversation. 120 | - Tagging domain experts (by looking at the change history) in the PR asking 121 | for suggestion. 122 | - Reaching out to more people on the [CNCF OpenFeature Slack channel](https://cloud-native.slack.com/archives/C0344AANLA1). 123 | - Stepping back to see if it makes sense to narrow down the scope of the PR or 124 | split it up. 125 | - If none of the above worked and the PR has been stuck for more than 2 weeks, 126 | the owner should bring it to the OpenFeatures [meeting](README.md#contributing). 127 | 128 | ## Design Choices 129 | 130 | As with other OpenFeature SDKs, python-sdk follows the 131 | [openfeature-specification](https://github.com/open-feature/spec). 132 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenFeature Python Contributions 2 | 3 | This repository is intended for OpenFeature contributions which are not included in the [OpenFeature SDK](https://github.com/open-feature/python-sdk). 4 | 5 | The project includes: 6 | 7 | - [Providers](./providers) 8 | - [Hooks](./hooks) 9 | 10 | ## License 11 | 12 | Apache 2.0 - See [LICENSE](./LICENSE) for more information. 13 | -------------------------------------------------------------------------------- /hooks/README.md: -------------------------------------------------------------------------------- 1 | # OpenFeature Python Hooks 2 | 3 | Hooks are a mechanism whereby application developers can add arbitrary behavior to flag evaluation. 4 | They operate similarly to middleware in many web frameworks. 5 | Please see the [spec](https://openfeature.dev/specification/sections/hooks) for more details. 6 | -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [0.2.0](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-hooks-opentelemetry/v0.1.3...openfeature-hooks-opentelemetry/v0.2.0) (2025-02-21) 4 | 5 | 6 | ### ⚠ BREAKING CHANGES 7 | 8 | * drop Python 3.8 support ([#187](https://github.com/open-feature/python-sdk-contrib/issues/187)) 9 | 10 | ### 🐛 Bug Fixes 11 | 12 | * fix types for hints in OTel TracingHook ([#73](https://github.com/open-feature/python-sdk-contrib/issues/73)) ([24260fb](https://github.com/open-feature/python-sdk-contrib/commit/24260fb519d18590d82e9aae1cf824b9e6ca7f17)) 13 | * remove modifications to license files ([#81](https://github.com/open-feature/python-sdk-contrib/issues/81)) ([a23f61e](https://github.com/open-feature/python-sdk-contrib/commit/a23f61e1c14c70e45a4bce4a014d5599813f1d28)) 14 | 15 | 16 | ### 🧹 Chore 17 | 18 | * drop Python 3.8 support ([#187](https://github.com/open-feature/python-sdk-contrib/issues/187)) ([b55cc1e](https://github.com/open-feature/python-sdk-contrib/commit/b55cc1e0f823d05a330c12af6861dbd3bec69c3a)) 19 | * update ruff ([#70](https://github.com/open-feature/python-sdk-contrib/issues/70)) ([9411d0f](https://github.com/open-feature/python-sdk-contrib/commit/9411d0f578238037b3c508faf8f23fe35a374b6c)) 20 | * update test config ([c16883e](https://github.com/open-feature/python-sdk-contrib/commit/c16883eb06b870b3a8fa4821492f48291208336b)) 21 | 22 | 23 | ### 🔄 Refactoring 24 | 25 | * add mypy and fix typing issues ([#72](https://github.com/open-feature/python-sdk-contrib/issues/72)) ([b405925](https://github.com/open-feature/python-sdk-contrib/commit/b4059255045cdb7054a35bc338207e23c42ce068)) 26 | 27 | ## [0.1.3](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-hooks-opentelemetry/v0.1.2...openfeature-hooks-opentelemetry/v0.1.3) (2024-03-17) 28 | 29 | 30 | ### 📚 Documentation 31 | 32 | * fix add_hooks call in otel hooks README ([#68](https://github.com/open-feature/python-sdk-contrib/issues/68)) ([8f26275](https://github.com/open-feature/python-sdk-contrib/commit/8f26275a1504086effeb1ca92817671bf9ac27ff)) 33 | 34 | ## [0.1.2](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-hooks-opentelemetry/v0.1.1...openfeature-hooks-opentelemetry/v0.1.2) (2024-03-13) 35 | 36 | 37 | ### 📚 Documentation 38 | 39 | * update OTel hook readme with correct import paths ([#65](https://github.com/open-feature/python-sdk-contrib/issues/65)) ([689662b](https://github.com/open-feature/python-sdk-contrib/commit/689662b59ac6117314d707322f26ab4f7b216cc0)) 40 | 41 | ## [0.1.1](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-hooks-opentelemetry-v0.1.0...openfeature-hooks-opentelemetry/v0.1.1) (2024-03-11) 42 | 43 | 44 | ### 📚 Documentation 45 | 46 | * update OpenTelemetry hook README ([#49](https://github.com/open-feature/python-sdk-contrib/issues/49)) ([02157f4](https://github.com/open-feature/python-sdk-contrib/commit/02157f447ac7b4dfd7ceced3c6808cf512c680f0)) 47 | 48 | 49 | ### 🔄 Refactoring 50 | 51 | * move OTel hooks to openfeature.contrib.hook ([#64](https://github.com/open-feature/python-sdk-contrib/issues/64)) ([7075656](https://github.com/open-feature/python-sdk-contrib/commit/707565669cfaf134fe445d54c26e0643756f7f8c)) 52 | -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/README.md: -------------------------------------------------------------------------------- 1 | 2 | # OpenTelemetry Hook 3 | 4 | The OpenTelemetry tracing hook for OpenFeature provides a [spec compliant][otel-spec] way to automatically add a feature flag evaluation to a span as a span event. Since feature flags are dynamic and affect runtime behavior, it’s important to collect relevant feature flag telemetry signals. This can be used to determine the impact a feature has on a request, enabling enhanced observability use cases, such as A/B testing or progressive feature releases. 5 | 6 | ## Installation 7 | 8 | ``` 9 | pip install openfeature-hooks-opentelemetry 10 | ``` 11 | 12 | 13 | ## Usage 14 | 15 | OpenFeature provides various ways to register hooks. The location that a hook is registered affects when the hook is run. It's recommended to register the `TracingHook` globally in most situations but it's possible to only enable the hook on specific clients. You should **never** register the `TracingHook` globally and on a client. 16 | 17 | More information on hooks can be found in the [OpenFeature documentation][hook-concept]. 18 | 19 | ### Register Globally 20 | 21 | The `TracingHook` can be set globally. This will ensure that every flag evaluation will always create a span event, if an active span is available. 22 | 23 | ```python 24 | from openfeature import api 25 | from openfeature.contrib.hook.opentelemetry import TracingHook 26 | 27 | api.add_hooks([TracingHook()]) 28 | ``` 29 | 30 | ### Register Per Client 31 | 32 | The `TracingHook` can also be set on an individual client. This should only be done if it wasn't set globally and other clients shouldn't use this hook. Setting the hook on the client will ensure that every flag evaluation performed by this client will always create a span event, if an active span is available. 33 | 34 | ```python 35 | from openfeature import api 36 | from openfeature.contrib.hook.opentelemetry import TracingHook 37 | 38 | client = api.get_client("my-app") 39 | client.add_hooks([TracingHook()]) 40 | ``` 41 | 42 | ## License 43 | 44 | Apache 2.0 - See [LICENSE](./LICENSE) for more information. 45 | 46 | 47 | [otel-spec]: https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/feature-flags/ 48 | [hook-concept]: https://openfeature.dev/docs/reference/concepts/hooks 49 | -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/pyproject.toml: -------------------------------------------------------------------------------- 1 | # pyproject.toml 2 | [build-system] 3 | requires = ["hatchling"] 4 | build-backend = "hatchling.build" 5 | 6 | [project] 7 | name = "openfeature-hooks-opentelemetry" 8 | version = "0.2.0" 9 | description = "OpenTelemetry hooks for the OpenFeature Python SDK" 10 | readme = "README.md" 11 | authors = [{ name = "OpenFeature", email = "openfeature-core@groups.io" }] 12 | license = { file = "LICENSE" } 13 | classifiers = [ 14 | "License :: OSI Approved :: Apache Software License", 15 | "Programming Language :: Python", 16 | "Programming Language :: Python :: 3", 17 | ] 18 | keywords = [] 19 | dependencies = [ 20 | "openfeature-sdk>=0.6.0", 21 | "opentelemetry-api", 22 | ] 23 | requires-python = ">=3.9" 24 | 25 | [project.urls] 26 | Homepage = "https://github.com/open-feature/python-sdk-contrib" 27 | 28 | [tool.hatch] 29 | 30 | [tool.hatch.envs.hatch-test] 31 | dependencies = [ 32 | "coverage[toml]>=6.5", 33 | "pytest", 34 | ] 35 | pre-install-commands = [ 36 | "hatch build", 37 | ] 38 | 39 | [tool.hatch.envs.hatch-test.scripts] 40 | run = "pytest {args:tests}" 41 | run-cov = "coverage run -m pytest {args:tests}" 42 | cov-combine = "coverage combine" 43 | cov-report = [ 44 | "coverage xml", 45 | "coverage html", 46 | "coverage report", 47 | ] 48 | cov = [ 49 | "test-cov", 50 | "cov-report", 51 | ] 52 | 53 | [tool.hatch.envs.mypy] 54 | dependencies = [ 55 | "mypy[faster-cache]>=1.13.0", 56 | ] 57 | 58 | [tool.hatch.envs.mypy.scripts] 59 | run = "mypy" 60 | 61 | [tool.hatch.build.targets.sdist] 62 | exclude = [ 63 | ".gitignore", 64 | ] 65 | 66 | [tool.hatch.build.targets.wheel] 67 | packages = ["src/openfeature"] 68 | 69 | [tool.mypy] 70 | mypy_path = "src" 71 | files = "src" 72 | 73 | python_version = "3.9" # should be identical to the minimum supported version 74 | namespace_packages = true 75 | explicit_package_bases = true 76 | local_partial_types = true 77 | pretty = true 78 | 79 | strict = true 80 | disallow_any_generics = false 81 | -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from openfeature.flag_evaluation import FlagEvaluationDetails 4 | from openfeature.hook import Hook, HookContext, HookHints 5 | from opentelemetry import trace 6 | 7 | OTEL_EVENT_NAME = "feature_flag" 8 | 9 | 10 | class EventAttributes: 11 | FLAG_KEY = f"{OTEL_EVENT_NAME}.key" 12 | FLAG_VARIANT = f"{OTEL_EVENT_NAME}.variant" 13 | PROVIDER_NAME = f"{OTEL_EVENT_NAME}.provider_name" 14 | 15 | 16 | class TracingHook(Hook): 17 | def after( 18 | self, 19 | hook_context: HookContext, 20 | details: FlagEvaluationDetails, 21 | hints: HookHints, 22 | ) -> None: 23 | current_span = trace.get_current_span() 24 | 25 | variant = details.variant 26 | if variant is None: 27 | if isinstance(details.value, str): 28 | variant = str(details.value) 29 | else: 30 | variant = json.dumps(details.value) 31 | 32 | event_attributes = { 33 | EventAttributes.FLAG_KEY: details.flag_key, 34 | EventAttributes.FLAG_VARIANT: variant, 35 | } 36 | 37 | if hook_context.provider_metadata: 38 | event_attributes[EventAttributes.PROVIDER_NAME] = ( 39 | hook_context.provider_metadata.name 40 | ) 41 | 42 | current_span.add_event(OTEL_EVENT_NAME, event_attributes) 43 | 44 | def error( 45 | self, hook_context: HookContext, exception: Exception, hints: HookHints 46 | ) -> None: 47 | current_span = trace.get_current_span() 48 | current_span.record_exception(exception) 49 | -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/hooks/openfeature-hooks-opentelemetry/tests/__init__.py -------------------------------------------------------------------------------- /hooks/openfeature-hooks-opentelemetry/tests/test_otel.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | import pytest 4 | from opentelemetry import trace 5 | from opentelemetry.trace import Span 6 | 7 | from openfeature.contrib.hook.opentelemetry import TracingHook 8 | from openfeature.evaluation_context import EvaluationContext 9 | from openfeature.flag_evaluation import FlagEvaluationDetails, FlagType 10 | from openfeature.hook import HookContext 11 | 12 | 13 | @pytest.fixture 14 | def mock_get_current_span(monkeypatch): 15 | monkeypatch.setattr(trace, "get_current_span", Mock()) 16 | 17 | 18 | def test_before(mock_get_current_span): 19 | # Given 20 | hook = TracingHook() 21 | hook_context = HookContext( 22 | flag_key="flag_key", 23 | flag_type=FlagType.BOOLEAN, 24 | default_value=False, 25 | evaluation_context=EvaluationContext(), 26 | ) 27 | details = FlagEvaluationDetails( 28 | flag_key="flag_key", 29 | value=True, 30 | variant="enabled", 31 | reason=None, 32 | error_code=None, 33 | error_message=None, 34 | ) 35 | 36 | mock_span = Mock(spec=Span) 37 | trace.get_current_span.return_value = mock_span 38 | 39 | # When 40 | hook.after(hook_context, details, hints={}) 41 | 42 | # Then 43 | mock_span.add_event.assert_called_once_with( 44 | "feature_flag", 45 | { 46 | "feature_flag.key": "flag_key", 47 | "feature_flag.variant": "enabled", 48 | }, 49 | ) 50 | 51 | 52 | def test_error(mock_get_current_span): 53 | # Given 54 | hook = TracingHook() 55 | hook_context = HookContext( 56 | flag_key="flag_key", 57 | flag_type=FlagType.BOOLEAN, 58 | default_value=False, 59 | evaluation_context=EvaluationContext(), 60 | ) 61 | exception = Exception() 62 | 63 | mock_span = Mock(spec=Span) 64 | trace.get_current_span.return_value = mock_span 65 | 66 | # When 67 | hook.error(hook_context, exception, hints={}) 68 | 69 | # Then 70 | mock_span.record_exception.assert_called_once_with(exception) 71 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | files = hooks,providers/openfeature-provider-ofrep 3 | exclude = proto|tests 4 | untyped_calls_exclude = flagd.proto 5 | 6 | namespace_packages = True 7 | explicit_package_bases = True 8 | local_partial_types = True 9 | pretty = True 10 | strict = True 11 | disallow_any_generics = False 12 | 13 | [mypy-flagd.proto.*] 14 | follow_imports = silent 15 | 16 | [mypy-grpc] 17 | ignore_missing_imports = True 18 | -------------------------------------------------------------------------------- /providers/README.md: -------------------------------------------------------------------------------- 1 | # OpenFeature Python Providers 2 | 3 | Providers are responsible for performing flag evaluation. 4 | They provide an abstraction between the underlying flag management system and OpenFeature itself. 5 | This allows providers to be changed without requiring a major code refactor. 6 | Please see the [spec](https://openfeature.dev/specification/sections/providers) for more details. 7 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/README.md: -------------------------------------------------------------------------------- 1 | # flagd Provider for OpenFeature 2 | 3 | This provider is designed to use flagd's [evaluation protocol](https://github.com/open-feature/schemas/blob/main/protobuf/schema/v1/schema.proto), or locally evaluate flags defined in a flagd [flag definition](https://github.com/open-feature/schemas/blob/main/json/flagd-definitions.json) via the OpenFeature Python SDK. 4 | 5 | ## Installation 6 | 7 | ``` 8 | pip install openfeature-provider-flagd 9 | ``` 10 | 11 | ## Configuration and Usage 12 | 13 | The flagd provider can operate in two modes: [RPC](#remote-resolver-rpc) (evaluation takes place in flagd, via gRPC calls) or [in-process](#in-process-resolver) (evaluation takes place in-process, with the provider getting a ruleset from a compliant sync-source). 14 | 15 | ### Remote resolver (RPC) 16 | 17 | This is the default mode of operation of the provider. 18 | In this mode, `FlagdProvider` communicates with [flagd](https://github.com/open-feature/flagd) via the gRPC protocol. 19 | Flag evaluations take place remotely at the connected flagd instance. 20 | 21 | Instantiate a new FlagdProvider instance and configure the OpenFeature SDK to use it: 22 | 23 | ```python 24 | from openfeature import api 25 | from openfeature.contrib.provider.flagd import FlagdProvider 26 | 27 | api.set_provider(FlagdProvider()) 28 | ``` 29 | 30 | ### In-process resolver 31 | 32 | This mode performs flag evaluations locally (in-process). Flag configurations for evaluation are obtained via gRPC protocol using [sync protobuf schema](https://buf.build/open-feature/flagd/file/main:sync/v1/sync_service.proto) service definition. 33 | 34 | Consider the following example to create a `FlagdProvider` with in-process evaluations, 35 | 36 | ```python 37 | from openfeature import api 38 | from openfeature.contrib.provider.flagd import FlagdProvider 39 | from openfeature.contrib.provider.flagd.config import ResolverType 40 | 41 | api.set_provider(FlagdProvider( 42 | resolver_type=ResolverType.IN_PROCESS, 43 | )) 44 | ``` 45 | 46 | In the above example, in-process handlers attempt to connect to a sync service on address `localhost:8013` to obtain [flag definitions](https://github.com/open-feature/schemas/blob/main/json/flags.json). 47 | 48 | 56 | ### File mode 57 | 58 | In-process resolvers can also work in an offline mode. 59 | To enable this mode, you should provide a valid flag configuration file with the option `offlineFlagSourcePath`. 60 | 61 | ```python 62 | from openfeature import api 63 | from openfeature.contrib.provider.flagd import FlagdProvider 64 | from openfeature.contrib.provider.flagd.config import ResolverType 65 | 66 | api.set_provider(FlagdProvider( 67 | resolver_type=ResolverType.FILE, 68 | offline_flag_source_path="my-flag.json", 69 | )) 70 | ``` 71 | 72 | Provider will attempt to detect file changes using polling. 73 | Polling happens at 5 second intervals and this is currently unconfigurable. 74 | This mode is useful for local development, tests and offline applications. 75 | 76 | ### Configuration options 77 | 78 | The default options can be defined in the FlagdProvider constructor. 79 | 80 | | Option name | Environment variable name | Type & Values | Default | Compatible resolver | 81 | |--------------------------|--------------------------------|----------------------------|-------------------------------|---------------------| 82 | | resolver_type | FLAGD_RESOLVER | enum - `rpc`, `in-process` | rpc | | 83 | | host | FLAGD_HOST | str | localhost | rpc & in-process | 84 | | port | FLAGD_PORT | int | 8013 (rpc), 8015 (in-process) | rpc & in-process | 85 | | tls | FLAGD_TLS | bool | false | rpc & in-process | 86 | | cert_path | FLAGD_SERVER_CERT_PATH | String | null | rpc & in-process | 87 | | deadline | FLAGD_DEADLINE_MS | int | 500 | rpc & in-process | 88 | | stream_deadline_ms | FLAGD_STREAM_DEADLINE_MS | int | 600000 | rpc & in-process | 89 | | keep_alive_time | FLAGD_KEEP_ALIVE_TIME_MS | int | 0 | rpc & in-process | 90 | | selector | FLAGD_SOURCE_SELECTOR | str | null | in-process | 91 | | cache_type | FLAGD_CACHE | enum - `lru`, `disabled` | lru | rpc | 92 | | max_cache_size | FLAGD_MAX_CACHE_SIZE | int | 1000 | rpc | 93 | | retry_backoff_ms | FLAGD_RETRY_BACKOFF_MS | int | 1000 | rpc | 94 | | offline_flag_source_path | FLAGD_OFFLINE_FLAG_SOURCE_PATH | str | null | in-process | 95 | 96 | 102 | 103 | > [!NOTE] 104 | > Some configurations are only applicable for RPC resolver. 105 | 106 | 112 | 113 | ### Reconnection 114 | 115 | Reconnection is supported by the underlying gRPC connections. 116 | If the connection to flagd is lost, it will reconnect automatically. 117 | A failure to connect will result in an [error event](https://openfeature.dev/docs/reference/concepts/events#provider_error) from the provider, though it will attempt to reconnect indefinitely. 118 | 119 | ### Deadlines 120 | 121 | Deadlines are used to define how long the provider waits to complete initialization or flag evaluations. 122 | They behave differently based on the resolver type. 123 | 124 | #### Deadlines with Remote resolver (RPC) 125 | 126 | If the remote evaluation call is not completed within this deadline, the gRPC call is terminated with the error `DEADLINE_EXCEEDED` 127 | and the evaluation will default. 128 | 129 | ### TLS 130 | 131 | TLS is available in situations where flagd is running on another host. 132 | 133 | 134 | You may optionally supply an X.509 certificate in PEM format. Otherwise, the default certificate store will be used. 135 | 136 | ```python 137 | from openfeature import api 138 | from openfeature.contrib.provider.flagd import FlagdProvider 139 | 140 | api.set_provider(FlagdProvider( 141 | tls=True, # use TLS 142 | cert_path="etc/cert/ca.crt" # PEM cert 143 | )) 144 | ``` 145 | 146 | ## License 147 | 148 | Apache 2.0 - See [LICENSE](./LICENSE) for more information. 149 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/pyproject.toml: -------------------------------------------------------------------------------- 1 | # pyproject.toml 2 | [build-system] 3 | requires = ["hatchling"] 4 | build-backend = "hatchling.build" 5 | 6 | [project] 7 | name = "openfeature-provider-flagd" 8 | version = "0.2.3" 9 | description = "OpenFeature provider for the flagd flag evaluation engine" 10 | readme = "README.md" 11 | authors = [{ name = "OpenFeature", email = "openfeature-core@groups.io" }] 12 | license = { file = "LICENSE" } 13 | classifiers = [ 14 | "License :: OSI Approved :: Apache Software License", 15 | "Programming Language :: Python", 16 | "Programming Language :: Python :: 3", 17 | ] 18 | keywords = [] 19 | dependencies = [ 20 | "openfeature-sdk>=0.6.0", 21 | "grpcio>=1.68.1", 22 | "protobuf>=4.29.2", 23 | "mmh3>=4.1.0", 24 | "panzi-json-logic>=1.0.1", 25 | "semver>=3,<4", 26 | "pyyaml>=6.0.1", 27 | "cachebox" 28 | ] 29 | requires-python = ">=3.9" 30 | 31 | [project.urls] 32 | Homepage = "https://github.com/open-feature/python-sdk-contrib" 33 | 34 | [tool.hatch] 35 | 36 | [tool.hatch.envs.hatch-test] 37 | dependencies = [ 38 | "coverage[toml]>=6.5", 39 | "pytest", 40 | "pytest-bdd", 41 | "testcontainers", 42 | "asserts", 43 | "grpcio-health-checking==1.72.1", 44 | ] 45 | pre-install-commands = [ 46 | "hatch build", 47 | ] 48 | 49 | [tool.hatch.envs.hatch-test.scripts] 50 | run = "pytest {args:tests}" 51 | run-cov = "coverage run -m pytest {args:tests}" 52 | cov-combine = "coverage combine" 53 | cov-report = [ 54 | "coverage xml", 55 | "coverage html", 56 | "coverage report", 57 | ] 58 | cov = [ 59 | "test-cov", 60 | "cov-report", 61 | ] 62 | 63 | 64 | [tool.hatch.envs.mypy] 65 | dependencies = [ 66 | "mypy[faster-cache]>=1.13.0", 67 | "types-protobuf", 68 | "types-pyyaml", 69 | ] 70 | pre-install-commands = [ 71 | "hatch build", 72 | ] 73 | 74 | [tool.hatch.envs.mypy.scripts] 75 | run = "mypy" 76 | 77 | [tool.hatch.build.hooks.protobuf] 78 | generate_pyi = false 79 | dependencies = [ 80 | "hatch-protobuf", 81 | "mypy-protobuf~=3.0", 82 | ] 83 | proto_paths = [ 84 | ".", 85 | ] 86 | output_path = "src/" 87 | 88 | [[tool.hatch.build.hooks.protobuf.generators]] 89 | name = "mypy" 90 | outputs = ["{proto_path}/{proto_name}_pb2.pyi"] 91 | 92 | [[tool.hatch.build.hooks.protobuf.generators]] 93 | name = "mypy_grpc" 94 | outputs = ["{proto_path}/{proto_name}_pb2_grpc.pyi"] 95 | 96 | [tool.hatch.build.targets.sdist] 97 | exclude = [ 98 | ".gitignore", 99 | "/openfeature", 100 | ] 101 | 102 | [tool.hatch.build.targets.wheel] 103 | packages = ["src/openfeature"] 104 | 105 | [tool.coverage.run] 106 | omit = [ 107 | # exclude generated files 108 | "src/openfeature/schemas/*", 109 | "tests/**", 110 | ] 111 | 112 | [tool.mypy] 113 | mypy_path = "src" 114 | files = "src" 115 | 116 | python_version = "3.9" # should be identical to the minimum supported version 117 | namespace_packages = true 118 | explicit_package_bases = true 119 | local_partial_types = true 120 | pretty = true 121 | 122 | strict = true 123 | disallow_any_generics = false 124 | 125 | [[tool.mypy.overrides]] 126 | module = [ 127 | "grpc.*", 128 | "json_logic.*", 129 | ] 130 | ignore_missing_imports = true 131 | 132 | [[tool.mypy.overrides]] 133 | module = [ 134 | "openfeature.schemas.*" 135 | ] 136 | warn_unused_ignores = false 137 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | rpc: tests for rpc mode. 4 | in-process: tests for in-process mode. 5 | file: tests for file mode. 6 | unavailable: tests for unavailable providers. 7 | customCert: Supports custom certs. 8 | unixsocket: Supports unixsockets. 9 | targetURI: Supports targetURI. 10 | grace: Supports grace attempts. 11 | targeting: Supports targeting. 12 | fractional: Supports fractional. 13 | string: Supports string. 14 | semver: Supports semver. 15 | reconnect: Supports reconnect. 16 | events: Supports events. 17 | sync: Supports sync. 18 | caching: Supports caching. 19 | offline: Supports offline. 20 | os.linux: linux mark. 21 | stream: Supports streams. 22 | bdd_features_base_dir = tests/features 23 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | schemas -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/__init__.py: -------------------------------------------------------------------------------- 1 | from .provider import FlagdProvider 2 | 3 | __all__ = ["FlagdProvider"] 4 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import os 3 | import typing 4 | from enum import Enum 5 | 6 | import grpc 7 | 8 | 9 | class ResolverType(Enum): 10 | RPC = "rpc" 11 | IN_PROCESS = "in-process" 12 | FILE = "file" 13 | 14 | 15 | class CacheType(Enum): 16 | LRU = "lru" 17 | DISABLED = "disabled" 18 | 19 | 20 | DEFAULT_CACHE = CacheType.LRU 21 | DEFAULT_CACHE_SIZE = 1000 22 | DEFAULT_DEADLINE = 500 23 | DEFAULT_HOST = "localhost" 24 | DEFAULT_KEEP_ALIVE = 0 25 | DEFAULT_OFFLINE_SOURCE_PATH: typing.Optional[str] = None 26 | DEFAULT_OFFLINE_POLL_MS = 5000 27 | DEFAULT_PORT_IN_PROCESS = 8015 28 | DEFAULT_PORT_RPC = 8013 29 | DEFAULT_RESOLVER_TYPE = ResolverType.RPC 30 | DEFAULT_RETRY_BACKOFF = 1000 31 | DEFAULT_RETRY_BACKOFF_MAX = 120000 32 | DEFAULT_RETRY_GRACE_PERIOD_SECONDS = 5 33 | DEFAULT_STREAM_DEADLINE = 600000 34 | DEFAULT_TLS = False 35 | DEFAULT_TLS_CERT: typing.Optional[str] = None 36 | 37 | ENV_VAR_CACHE_SIZE = "FLAGD_MAX_CACHE_SIZE" 38 | ENV_VAR_CACHE_TYPE = "FLAGD_CACHE" 39 | ENV_VAR_DEADLINE_MS = "FLAGD_DEADLINE_MS" 40 | ENV_VAR_HOST = "FLAGD_HOST" 41 | ENV_VAR_KEEP_ALIVE_TIME_MS = "FLAGD_KEEP_ALIVE_TIME_MS" 42 | ENV_VAR_OFFLINE_FLAG_SOURCE_PATH = "FLAGD_OFFLINE_FLAG_SOURCE_PATH" 43 | ENV_VAR_OFFLINE_POLL_MS = "FLAGD_OFFLINE_POLL_MS" 44 | ENV_VAR_PORT = "FLAGD_PORT" 45 | ENV_VAR_RESOLVER_TYPE = "FLAGD_RESOLVER" 46 | ENV_VAR_RETRY_BACKOFF_MS = "FLAGD_RETRY_BACKOFF_MS" 47 | ENV_VAR_RETRY_BACKOFF_MAX_MS = "FLAGD_RETRY_BACKOFF_MAX_MS" 48 | ENV_VAR_RETRY_GRACE_PERIOD_SECONDS = "FLAGD_RETRY_GRACE_PERIOD" 49 | ENV_VAR_SELECTOR = "FLAGD_SOURCE_SELECTOR" 50 | ENV_VAR_PROVIDER_ID = "FLAGD_PROVIDER_ID" 51 | ENV_VAR_STREAM_DEADLINE_MS = "FLAGD_STREAM_DEADLINE_MS" 52 | ENV_VAR_TLS = "FLAGD_TLS" 53 | ENV_VAR_TLS_CERT = "FLAGD_SERVER_CERT_PATH" 54 | ENV_VAR_DEFAULT_AUTHORITY = "FLAGD_DEFAULT_AUTHORITY" 55 | 56 | T = typing.TypeVar("T") 57 | 58 | 59 | def str_to_bool(val: str) -> bool: 60 | return val.lower() == "true" 61 | 62 | 63 | def convert_resolver_type(val: typing.Union[str, ResolverType]) -> ResolverType: 64 | if isinstance(val, str): 65 | v = val.lower() 66 | return ResolverType(v) 67 | else: 68 | return ResolverType(val) 69 | 70 | 71 | def env_or_default( 72 | env_var: str, default: T, cast: typing.Optional[typing.Callable[[str], T]] = None 73 | ) -> typing.Union[str, T]: 74 | val = os.environ.get(env_var) 75 | if val is None: 76 | return default 77 | return val if cast is None else cast(val) 78 | 79 | 80 | @dataclasses.dataclass 81 | class Config: 82 | def __init__( # noqa: PLR0913 83 | self, 84 | host: typing.Optional[str] = None, 85 | port: typing.Optional[int] = None, 86 | tls: typing.Optional[bool] = None, 87 | selector: typing.Optional[str] = None, 88 | provider_id: typing.Optional[str] = None, 89 | resolver: typing.Optional[ResolverType] = None, 90 | offline_flag_source_path: typing.Optional[str] = None, 91 | offline_poll_interval_ms: typing.Optional[int] = None, 92 | retry_backoff_ms: typing.Optional[int] = None, 93 | retry_backoff_max_ms: typing.Optional[int] = None, 94 | retry_grace_period: typing.Optional[int] = None, 95 | deadline_ms: typing.Optional[int] = None, 96 | stream_deadline_ms: typing.Optional[int] = None, 97 | keep_alive_time: typing.Optional[int] = None, 98 | cache: typing.Optional[CacheType] = None, 99 | max_cache_size: typing.Optional[int] = None, 100 | cert_path: typing.Optional[str] = None, 101 | default_authority: typing.Optional[str] = None, 102 | channel_credentials: typing.Optional[grpc.ChannelCredentials] = None, 103 | sync_metadata_disabled: typing.Optional[bool] = None, 104 | ): 105 | self.host = env_or_default(ENV_VAR_HOST, DEFAULT_HOST) if host is None else host 106 | 107 | self.tls = ( 108 | env_or_default(ENV_VAR_TLS, DEFAULT_TLS, cast=str_to_bool) 109 | if tls is None 110 | else tls 111 | ) 112 | 113 | self.retry_backoff_ms: int = ( 114 | int( 115 | env_or_default( 116 | ENV_VAR_RETRY_BACKOFF_MS, DEFAULT_RETRY_BACKOFF, cast=int 117 | ) 118 | ) 119 | if retry_backoff_ms is None 120 | else retry_backoff_ms 121 | ) 122 | self.retry_backoff_max_ms: int = ( 123 | int( 124 | env_or_default( 125 | ENV_VAR_RETRY_BACKOFF_MAX_MS, DEFAULT_RETRY_BACKOFF_MAX, cast=int 126 | ) 127 | ) 128 | if retry_backoff_max_ms is None 129 | else retry_backoff_max_ms 130 | ) 131 | 132 | self.retry_grace_period: int = ( 133 | int( 134 | env_or_default( 135 | ENV_VAR_RETRY_GRACE_PERIOD_SECONDS, 136 | DEFAULT_RETRY_GRACE_PERIOD_SECONDS, 137 | cast=int, 138 | ) 139 | ) 140 | if retry_grace_period is None 141 | else retry_grace_period 142 | ) 143 | 144 | self.resolver = ( 145 | env_or_default( 146 | ENV_VAR_RESOLVER_TYPE, DEFAULT_RESOLVER_TYPE, cast=convert_resolver_type 147 | ) 148 | if resolver is None 149 | else resolver 150 | ) 151 | 152 | default_port = ( 153 | DEFAULT_PORT_RPC 154 | if self.resolver is ResolverType.RPC 155 | else DEFAULT_PORT_IN_PROCESS 156 | ) 157 | 158 | self.port: int = ( 159 | int(env_or_default(ENV_VAR_PORT, default_port, cast=int)) 160 | if port is None 161 | else port 162 | ) 163 | 164 | self.offline_flag_source_path = ( 165 | env_or_default( 166 | ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, DEFAULT_OFFLINE_SOURCE_PATH 167 | ) 168 | if offline_flag_source_path is None 169 | else offline_flag_source_path 170 | ) 171 | 172 | if ( 173 | self.offline_flag_source_path is not None 174 | and self.resolver is ResolverType.IN_PROCESS 175 | ): 176 | self.resolver = ResolverType.FILE 177 | 178 | if self.resolver is ResolverType.FILE and self.offline_flag_source_path is None: 179 | raise AttributeError( 180 | "Resolver Type 'FILE' requires a offlineFlagSourcePath" 181 | ) 182 | 183 | self.offline_poll_interval_ms: int = ( 184 | int( 185 | env_or_default( 186 | ENV_VAR_OFFLINE_POLL_MS, DEFAULT_OFFLINE_POLL_MS, cast=int 187 | ) 188 | ) 189 | if offline_poll_interval_ms is None 190 | else offline_poll_interval_ms 191 | ) 192 | 193 | self.deadline_ms: int = ( 194 | int(env_or_default(ENV_VAR_DEADLINE_MS, DEFAULT_DEADLINE, cast=int)) 195 | if deadline_ms is None 196 | else deadline_ms 197 | ) 198 | 199 | self.stream_deadline_ms: int = ( 200 | int( 201 | env_or_default( 202 | ENV_VAR_STREAM_DEADLINE_MS, DEFAULT_STREAM_DEADLINE, cast=int 203 | ) 204 | ) 205 | if stream_deadline_ms is None 206 | else stream_deadline_ms 207 | ) 208 | 209 | self.keep_alive_time: int = ( 210 | int( 211 | env_or_default(ENV_VAR_KEEP_ALIVE_TIME_MS, DEFAULT_KEEP_ALIVE, cast=int) 212 | ) 213 | if keep_alive_time is None 214 | else keep_alive_time 215 | ) 216 | 217 | self.cache = ( 218 | CacheType(env_or_default(ENV_VAR_CACHE_TYPE, DEFAULT_CACHE)) 219 | if cache is None 220 | else cache 221 | ) 222 | 223 | self.max_cache_size: int = ( 224 | int(env_or_default(ENV_VAR_CACHE_SIZE, DEFAULT_CACHE_SIZE, cast=int)) 225 | if max_cache_size is None 226 | else max_cache_size 227 | ) 228 | 229 | self.cert_path = ( 230 | env_or_default(ENV_VAR_TLS_CERT, DEFAULT_TLS_CERT) 231 | if cert_path is None 232 | else cert_path 233 | ) 234 | 235 | self.selector = ( 236 | env_or_default(ENV_VAR_SELECTOR, None) if selector is None else selector 237 | ) 238 | 239 | self.provider_id = ( 240 | env_or_default(ENV_VAR_PROVIDER_ID, None) 241 | if provider_id is None 242 | else provider_id 243 | ) 244 | 245 | self.default_authority = ( 246 | env_or_default(ENV_VAR_DEFAULT_AUTHORITY, None) 247 | if default_authority is None 248 | else default_authority 249 | ) 250 | 251 | self.channel_credentials = channel_credentials 252 | 253 | # TODO: remove the metadata call entirely after https://github.com/open-feature/flagd/issues/1584 254 | # This is a temporary stop-gap solutions to support servers that don't implement sync.GetMetadata 255 | # (see: https://buf.build/open-feature/flagd/docs/main:flagd.sync.v1#flagd.sync.v1.FlagSyncService.GetMetadata). 256 | # Using this option disables call to sync.GetMetadata 257 | # Disabling will prevent static context from flagd being used in evaluations. 258 | # GetMetadata and this option will be removed. 259 | self.sync_metadata_disabled = sync_metadata_disabled 260 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/flag_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class FlagType(Enum): 5 | BOOLEAN = "BOOLEAN" 6 | STRING = "STRING" 7 | FLOAT = "FLOAT" 8 | INTEGER = "INTEGER" 9 | OBJECT = "OBJECT" 10 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py: -------------------------------------------------------------------------------- 1 | """ 2 | # This is a Python Provider to interact with flagd 3 | # 4 | # -- Usage -- 5 | # open_feature_api.set_provider(flagd_provider.FlagdProvider()) 6 | # flag_value = open_feature_client.get_string_value( 7 | # key="foo", 8 | # default_value="missingflag" 9 | # ) 10 | # print(f"Flag Value is: {flag_value}") 11 | # OR the more verbose option 12 | # flag = open_feature_client.get_string_details(key="foo", default_value="missingflag") 13 | # print(f"Flag is: {flag.value}") 14 | # OR 15 | # print(f"Flag Details: {vars(flag)}"") 16 | # 17 | # -- Customisation -- 18 | # Follows flagd defaults: 'http' protocol on 'localhost' on port '8013' 19 | # But can be overridden: 20 | # provider = open_feature_api.get_provider() 21 | # provider.initialise(schema="https",endpoint="example.com",port=1234,timeout=10) 22 | """ 23 | 24 | import typing 25 | import warnings 26 | 27 | import grpc 28 | 29 | from openfeature.evaluation_context import EvaluationContext 30 | from openfeature.event import ProviderEventDetails 31 | from openfeature.flag_evaluation import FlagResolutionDetails 32 | from openfeature.hook import Hook 33 | from openfeature.provider import AbstractProvider 34 | from openfeature.provider.metadata import Metadata 35 | 36 | from .config import CacheType, Config, ResolverType 37 | from .resolvers import AbstractResolver, GrpcResolver, InProcessResolver 38 | from .sync_metadata_hook import SyncMetadataHook 39 | 40 | T = typing.TypeVar("T") 41 | 42 | 43 | class FlagdProvider(AbstractProvider): 44 | """Flagd OpenFeature Provider""" 45 | 46 | def __init__( # noqa: PLR0913 47 | self, 48 | host: typing.Optional[str] = None, 49 | port: typing.Optional[int] = None, 50 | tls: typing.Optional[bool] = None, 51 | deadline_ms: typing.Optional[int] = None, 52 | timeout: typing.Optional[int] = None, 53 | retry_backoff_ms: typing.Optional[int] = None, 54 | selector: typing.Optional[str] = None, 55 | provider_id: typing.Optional[str] = None, 56 | resolver_type: typing.Optional[ResolverType] = None, 57 | offline_flag_source_path: typing.Optional[str] = None, 58 | stream_deadline_ms: typing.Optional[int] = None, 59 | keep_alive_time: typing.Optional[int] = None, 60 | cache: typing.Optional[CacheType] = None, 61 | max_cache_size: typing.Optional[int] = None, 62 | retry_backoff_max_ms: typing.Optional[int] = None, 63 | retry_grace_period: typing.Optional[int] = None, 64 | cert_path: typing.Optional[str] = None, 65 | default_authority: typing.Optional[str] = None, 66 | channel_credentials: typing.Optional[grpc.ChannelCredentials] = None, 67 | sync_metadata_disabled: typing.Optional[bool] = None, 68 | ): 69 | """ 70 | Create an instance of the FlagdProvider 71 | 72 | :param host: the host to make requests to 73 | :param port: the port the flagd service is available on 74 | :param tls: enable/disable secure TLS connectivity 75 | :param deadline_ms: the maximum to wait before a request times out 76 | :param timeout: the maximum time to wait before a request times out 77 | :param retry_backoff_ms: the number of milliseconds to backoff 78 | :param offline_flag_source_path: the path to the flag source file 79 | :param stream_deadline_ms: the maximum time to wait before a request times out 80 | :param keep_alive_time: the number of milliseconds to keep alive 81 | :param resolver_type: the type of resolver to use 82 | """ 83 | if deadline_ms is None and timeout is not None: 84 | deadline_ms = timeout * 1000 85 | warnings.warn( 86 | "'timeout' property is deprecated, please use 'deadline' instead, be aware that 'deadline' is in milliseconds", 87 | DeprecationWarning, 88 | stacklevel=2, 89 | ) 90 | 91 | self.config = Config( 92 | host=host, 93 | port=port, 94 | tls=tls, 95 | deadline_ms=deadline_ms, 96 | retry_backoff_ms=retry_backoff_ms, 97 | retry_backoff_max_ms=retry_backoff_max_ms, 98 | retry_grace_period=retry_grace_period, 99 | selector=selector, 100 | provider_id=provider_id, 101 | resolver=resolver_type, 102 | offline_flag_source_path=offline_flag_source_path, 103 | stream_deadline_ms=stream_deadline_ms, 104 | keep_alive_time=keep_alive_time, 105 | cache=cache, 106 | max_cache_size=max_cache_size, 107 | cert_path=cert_path, 108 | default_authority=default_authority, 109 | channel_credentials=channel_credentials, 110 | sync_metadata_disabled=sync_metadata_disabled, 111 | ) 112 | self.enriched_context: dict = {} 113 | 114 | self.resolver = self.setup_resolver() 115 | self.hooks: list[Hook] = [SyncMetadataHook(self.get_enriched_context)] 116 | 117 | def get_enriched_context(self) -> EvaluationContext: 118 | return EvaluationContext(attributes=self.enriched_context) 119 | 120 | def get_provider_hooks(self) -> list[Hook]: 121 | return self.hooks 122 | 123 | def setup_resolver(self) -> AbstractResolver: 124 | if self.config.resolver == ResolverType.RPC: 125 | return GrpcResolver( 126 | self.config, 127 | self.emit_provider_ready, 128 | self.emit_provider_error, 129 | self.emit_provider_stale, 130 | self.emit_provider_configuration_changed, 131 | ) 132 | elif ( 133 | self.config.resolver == ResolverType.IN_PROCESS 134 | or self.config.resolver == ResolverType.FILE 135 | ): 136 | return InProcessResolver( 137 | self.config, 138 | self.emit_provider_ready_with_context, 139 | self.emit_provider_error, 140 | self.emit_provider_stale, 141 | self.emit_provider_configuration_changed, 142 | ) 143 | else: 144 | raise ValueError( 145 | f"`resolver_type` parameter invalid: {self.config.resolver}" 146 | ) 147 | 148 | def initialize(self, evaluation_context: EvaluationContext) -> None: 149 | self.resolver.initialize(evaluation_context) 150 | 151 | def shutdown(self) -> None: 152 | if self.resolver: 153 | self.resolver.shutdown() 154 | 155 | def get_metadata(self) -> Metadata: 156 | """Returns provider metadata""" 157 | return Metadata(name="FlagdProvider") 158 | 159 | def resolve_boolean_details( 160 | self, 161 | flag_key: str, 162 | default_value: bool, 163 | evaluation_context: typing.Optional[EvaluationContext] = None, 164 | ) -> FlagResolutionDetails[bool]: 165 | return self.resolver.resolve_boolean_details( 166 | flag_key, default_value, evaluation_context 167 | ) 168 | 169 | def resolve_string_details( 170 | self, 171 | flag_key: str, 172 | default_value: str, 173 | evaluation_context: typing.Optional[EvaluationContext] = None, 174 | ) -> FlagResolutionDetails[str]: 175 | return self.resolver.resolve_string_details( 176 | flag_key, default_value, evaluation_context 177 | ) 178 | 179 | def resolve_float_details( 180 | self, 181 | flag_key: str, 182 | default_value: float, 183 | evaluation_context: typing.Optional[EvaluationContext] = None, 184 | ) -> FlagResolutionDetails[float]: 185 | return self.resolver.resolve_float_details( 186 | flag_key, default_value, evaluation_context 187 | ) 188 | 189 | def resolve_integer_details( 190 | self, 191 | flag_key: str, 192 | default_value: int, 193 | evaluation_context: typing.Optional[EvaluationContext] = None, 194 | ) -> FlagResolutionDetails[int]: 195 | return self.resolver.resolve_integer_details( 196 | flag_key, default_value, evaluation_context 197 | ) 198 | 199 | def resolve_object_details( 200 | self, 201 | flag_key: str, 202 | default_value: typing.Union[dict, list], 203 | evaluation_context: typing.Optional[EvaluationContext] = None, 204 | ) -> FlagResolutionDetails[typing.Union[dict, list]]: 205 | return self.resolver.resolve_object_details( 206 | flag_key, default_value, evaluation_context 207 | ) 208 | 209 | def emit_provider_ready_with_context( 210 | self, details: ProviderEventDetails, context: dict 211 | ) -> None: 212 | self.enriched_context = context 213 | self.emit_provider_ready(details) 214 | pass 215 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/__init__.py: -------------------------------------------------------------------------------- 1 | from .grpc import GrpcResolver 2 | from .in_process import InProcessResolver 3 | from .protocol import AbstractResolver 4 | 5 | __all__ = ["AbstractResolver", "GrpcResolver", "InProcessResolver"] 6 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import ( 4 | FileWatcher, 5 | ) 6 | from openfeature.evaluation_context import EvaluationContext 7 | from openfeature.event import ProviderEventDetails 8 | from openfeature.exception import FlagNotFoundError, ParseError 9 | from openfeature.flag_evaluation import FlagResolutionDetails, Reason 10 | 11 | from ..config import Config 12 | from .process.connector import FlagStateConnector 13 | from .process.connector.grpc_watcher import GrpcWatcher 14 | from .process.flags import FlagStore 15 | from .process.targeting import targeting 16 | 17 | T = typing.TypeVar("T") 18 | 19 | 20 | def _merge_metadata( 21 | flag_metadata: typing.Optional[ 22 | typing.Mapping[str, typing.Union[float, int, str, bool]] 23 | ], 24 | flag_set_metadata: typing.Optional[ 25 | typing.Mapping[str, typing.Union[float, int, str, bool]] 26 | ], 27 | ) -> typing.Mapping[str, typing.Union[float, int, str, bool]]: 28 | metadata = {} if flag_set_metadata is None else dict(flag_set_metadata) 29 | 30 | if flag_metadata is not None: 31 | for key, value in flag_metadata.items(): 32 | metadata[key] = value 33 | 34 | return metadata 35 | 36 | 37 | class InProcessResolver: 38 | def __init__( 39 | self, 40 | config: Config, 41 | emit_provider_ready: typing.Callable[[ProviderEventDetails, dict], None], 42 | emit_provider_error: typing.Callable[[ProviderEventDetails], None], 43 | emit_provider_stale: typing.Callable[[ProviderEventDetails], None], 44 | emit_provider_configuration_changed: typing.Callable[ 45 | [ProviderEventDetails], None 46 | ], 47 | ): 48 | self.config = config 49 | self.flag_store = FlagStore(emit_provider_configuration_changed) 50 | self.connector: FlagStateConnector = ( 51 | FileWatcher( 52 | self.config, self.flag_store, emit_provider_ready, emit_provider_error 53 | ) 54 | if self.config.offline_flag_source_path 55 | else GrpcWatcher( 56 | self.config, 57 | self.flag_store, 58 | emit_provider_ready, 59 | emit_provider_error, 60 | emit_provider_stale, 61 | ) 62 | ) 63 | 64 | def initialize(self, evaluation_context: EvaluationContext) -> None: 65 | self.connector.initialize(evaluation_context) 66 | 67 | def shutdown(self) -> None: 68 | self.connector.shutdown() 69 | 70 | def resolve_boolean_details( 71 | self, 72 | key: str, 73 | default_value: bool, 74 | evaluation_context: typing.Optional[EvaluationContext] = None, 75 | ) -> FlagResolutionDetails[bool]: 76 | return self._resolve(key, default_value, evaluation_context) 77 | 78 | def resolve_string_details( 79 | self, 80 | key: str, 81 | default_value: str, 82 | evaluation_context: typing.Optional[EvaluationContext] = None, 83 | ) -> FlagResolutionDetails[str]: 84 | return self._resolve(key, default_value, evaluation_context) 85 | 86 | def resolve_float_details( 87 | self, 88 | key: str, 89 | default_value: float, 90 | evaluation_context: typing.Optional[EvaluationContext] = None, 91 | ) -> FlagResolutionDetails[float]: 92 | result = self._resolve(key, default_value, evaluation_context) 93 | if isinstance(result.value, int): 94 | result.value = float(result.value) 95 | return result 96 | 97 | def resolve_integer_details( 98 | self, 99 | key: str, 100 | default_value: int, 101 | evaluation_context: typing.Optional[EvaluationContext] = None, 102 | ) -> FlagResolutionDetails[int]: 103 | return self._resolve(key, default_value, evaluation_context) 104 | 105 | def resolve_object_details( 106 | self, 107 | key: str, 108 | default_value: typing.Union[dict, list], 109 | evaluation_context: typing.Optional[EvaluationContext] = None, 110 | ) -> FlagResolutionDetails[typing.Union[dict, list]]: 111 | return self._resolve(key, default_value, evaluation_context) 112 | 113 | def _resolve( 114 | self, 115 | key: str, 116 | default_value: T, 117 | evaluation_context: typing.Optional[EvaluationContext] = None, 118 | ) -> FlagResolutionDetails[T]: 119 | flag = self.flag_store.get_flag(key) 120 | if not flag: 121 | raise FlagNotFoundError(f"Flag with key {key} not present in flag store.") 122 | 123 | metadata = _merge_metadata(flag.metadata, self.flag_store.flag_set_metadata) 124 | 125 | if flag.state == "DISABLED": 126 | return FlagResolutionDetails( 127 | default_value, flag_metadata=metadata, reason=Reason.DISABLED 128 | ) 129 | 130 | if not flag.targeting: 131 | variant, value = flag.default 132 | return FlagResolutionDetails( 133 | value, variant=variant, flag_metadata=metadata, reason=Reason.STATIC 134 | ) 135 | 136 | variant = targeting(flag.key, flag.targeting, evaluation_context) 137 | 138 | if variant is None: 139 | variant, value = flag.default 140 | return FlagResolutionDetails( 141 | value, variant=variant, flag_metadata=metadata, reason=Reason.DEFAULT 142 | ) 143 | if not isinstance(variant, (str, bool)): 144 | raise ParseError( 145 | "Parsed JSONLogic targeting did not return a string or bool" 146 | ) 147 | 148 | variant, value = flag.get_variant(variant) 149 | if value is None: 150 | raise ParseError(f"Resolved variant {variant} not in variants config.") 151 | 152 | return FlagResolutionDetails( 153 | value, 154 | variant=variant, 155 | reason=Reason.TARGETING_MATCH, 156 | flag_metadata=metadata, 157 | ) 158 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from openfeature.evaluation_context import EvaluationContext 4 | 5 | 6 | class FlagStateConnector(typing.Protocol): 7 | def initialize( 8 | self, evaluation_context: EvaluationContext 9 | ) -> None: ... # pragma: no cover 10 | 11 | def shutdown(self) -> None: ... # pragma: no cover 12 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import threading 5 | import time 6 | import typing 7 | 8 | import yaml 9 | 10 | from openfeature.contrib.provider.flagd.config import Config 11 | from openfeature.contrib.provider.flagd.resolvers.process.connector import ( 12 | FlagStateConnector, 13 | ) 14 | from openfeature.contrib.provider.flagd.resolvers.process.flags import FlagStore 15 | from openfeature.evaluation_context import EvaluationContext 16 | from openfeature.event import ProviderEventDetails 17 | from openfeature.exception import ErrorCode, ParseError, ProviderNotReadyError 18 | 19 | logger = logging.getLogger("openfeature.contrib") 20 | 21 | 22 | class FileWatcher(FlagStateConnector): 23 | def __init__( 24 | self, 25 | config: Config, 26 | flag_store: FlagStore, 27 | emit_provider_ready: typing.Callable[[ProviderEventDetails, dict], None], 28 | emit_provider_error: typing.Callable[[ProviderEventDetails], None], 29 | ): 30 | if config.offline_flag_source_path is None: 31 | raise ValueError( 32 | f"`config.offline_flag_source_path` parameter invalid: {config.offline_flag_source_path}" 33 | ) 34 | else: 35 | self.file_path = config.offline_flag_source_path 36 | 37 | self.emit_provider_ready = emit_provider_ready 38 | self.emit_provider_error = emit_provider_error 39 | self.deadline_seconds = config.deadline_ms * 0.001 40 | 41 | self.last_modified = 0.0 42 | self.flag_store = flag_store 43 | self.should_emit_ready_on_success = False 44 | 45 | def initialize(self, evaluation_context: EvaluationContext) -> None: 46 | self.active = True 47 | self.thread = threading.Thread( 48 | target=self.refresh_file, daemon=True, name="FlagdFileWatcherWorkerThread" 49 | ) 50 | self.thread.start() 51 | 52 | # Let this throw exceptions so that provider status is set correctly 53 | try: 54 | self.should_emit_ready_on_success = True 55 | self._load_data() 56 | except Exception as err: 57 | raise ProviderNotReadyError from err 58 | 59 | def shutdown(self) -> None: 60 | self.active = False 61 | 62 | def refresh_file(self) -> None: 63 | while self.active: 64 | time.sleep(self.deadline_seconds) 65 | logger.debug("checking for new flag store contents from file") 66 | self.safe_load_data() 67 | 68 | def safe_load_data(self) -> None: 69 | try: 70 | last_modified = os.path.getmtime(self.file_path) 71 | if last_modified > self.last_modified: 72 | self._load_data(last_modified) 73 | except FileNotFoundError: 74 | self.handle_error("Provided file path not valid") 75 | except json.JSONDecodeError: 76 | self.handle_error("Could not parse JSON flag data from file") 77 | except yaml.error.YAMLError: 78 | self.handle_error("Could not parse YAML flag data from file") 79 | except ParseError as e: 80 | self.handle_error( 81 | "Could not parse flag data using flagd syntax: " 82 | + ( 83 | "no error message provided" 84 | if e is None or e.error_message is None 85 | else e.error_message 86 | ) 87 | ) 88 | except Exception: 89 | self.handle_error("Could not read flags from file") 90 | 91 | def _load_data(self, modified_time: typing.Optional[float] = None) -> None: 92 | with open(self.file_path) as file: 93 | if self.file_path.endswith(".yaml"): 94 | data = yaml.safe_load(file) 95 | else: 96 | data = json.load(file) 97 | 98 | self.flag_store.update(data) 99 | 100 | if self.should_emit_ready_on_success: 101 | self.emit_provider_ready( 102 | ProviderEventDetails( 103 | message="Reloading file contents recovered from error state" 104 | ), 105 | {}, 106 | ) 107 | self.should_emit_ready_on_success = False 108 | 109 | self.last_modified = modified_time or os.path.getmtime(self.file_path) 110 | 111 | def handle_error(self, error_message: str) -> None: 112 | logger.exception(error_message) 113 | self.should_emit_ready_on_success = True 114 | self.emit_provider_error( 115 | ProviderEventDetails( 116 | message=error_message, error_code=ErrorCode.PARSE_ERROR 117 | ) 118 | ) 119 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import threading 4 | import time 5 | import typing 6 | 7 | import grpc 8 | from google.protobuf.json_format import MessageToDict 9 | from google.protobuf.struct_pb2 import Struct 10 | 11 | from openfeature.evaluation_context import EvaluationContext 12 | from openfeature.event import ProviderEventDetails 13 | from openfeature.exception import ErrorCode, ParseError, ProviderNotReadyError 14 | from openfeature.schemas.protobuf.flagd.sync.v1 import ( 15 | sync_pb2, 16 | sync_pb2_grpc, 17 | ) 18 | 19 | from ....config import Config 20 | from ..connector import FlagStateConnector 21 | from ..flags import FlagStore 22 | 23 | logger = logging.getLogger("openfeature.contrib") 24 | 25 | 26 | class GrpcWatcher(FlagStateConnector): 27 | def __init__( 28 | self, 29 | config: Config, 30 | flag_store: FlagStore, 31 | emit_provider_ready: typing.Callable[[ProviderEventDetails, dict], None], 32 | emit_provider_error: typing.Callable[[ProviderEventDetails], None], 33 | emit_provider_stale: typing.Callable[[ProviderEventDetails], None], 34 | ): 35 | self.flag_store = flag_store 36 | self.config = config 37 | 38 | self.channel = self._generate_channel(config) 39 | self.stub = sync_pb2_grpc.FlagSyncServiceStub(self.channel) 40 | self.retry_backoff_seconds = config.retry_backoff_ms * 0.001 41 | self.retry_backoff_max_seconds = config.retry_backoff_ms * 0.001 42 | self.retry_grace_period = config.retry_grace_period 43 | self.streamline_deadline_seconds = config.stream_deadline_ms * 0.001 44 | self.deadline = config.deadline_ms * 0.001 45 | self.selector = config.selector 46 | self.provider_id = config.provider_id 47 | self.emit_provider_ready = emit_provider_ready 48 | self.emit_provider_error = emit_provider_error 49 | self.emit_provider_stale = emit_provider_stale 50 | 51 | self.connected = False 52 | self.thread: typing.Optional[threading.Thread] = None 53 | self.timer: typing.Optional[threading.Timer] = None 54 | 55 | self.start_time = time.time() 56 | 57 | def _generate_channel(self, config: Config) -> grpc.Channel: 58 | target = f"{config.host}:{config.port}" 59 | # Create the channel with the service config 60 | options: list[tuple[str, typing.Any]] = [ 61 | ("grpc.keepalive_time_ms", config.keep_alive_time), 62 | ("grpc.initial_reconnect_backoff_ms", config.retry_backoff_ms), 63 | ("grpc.max_reconnect_backoff_ms", config.retry_backoff_max_ms), 64 | ("grpc.min_reconnect_backoff_ms", config.stream_deadline_ms), 65 | ] 66 | if config.default_authority is not None: 67 | options.append(("grpc.default_authority", config.default_authority)) 68 | 69 | if config.channel_credentials is not None: 70 | channel_args = { 71 | "options": options, 72 | "credentials": config.channel_credentials, 73 | } 74 | channel = grpc.secure_channel(target, **channel_args) 75 | 76 | elif config.tls: 77 | channel_args = { 78 | "options": options, 79 | "credentials": grpc.ssl_channel_credentials(), 80 | } 81 | if config.cert_path: 82 | with open(config.cert_path, "rb") as f: 83 | channel_args["credentials"] = grpc.ssl_channel_credentials(f.read()) 84 | 85 | channel = grpc.secure_channel(target, **channel_args) 86 | 87 | else: 88 | channel = grpc.insecure_channel( 89 | target, 90 | options=options, 91 | ) 92 | 93 | return channel 94 | 95 | def initialize(self, context: EvaluationContext) -> None: 96 | self.connect() 97 | 98 | def connect(self) -> None: 99 | self.active = True 100 | 101 | # Run monitoring in a separate thread 102 | self.monitor_thread = threading.Thread( 103 | target=self.monitor, daemon=True, name="FlagdGrpcSyncServiceMonitorThread" 104 | ) 105 | self.monitor_thread.start() 106 | ## block until ready or deadline reached 107 | timeout = self.deadline + time.time() 108 | while not self.connected and time.time() < timeout: 109 | time.sleep(0.05) 110 | logger.debug("Finished blocking gRPC state initialization") 111 | 112 | if not self.connected: 113 | raise ProviderNotReadyError( 114 | "Blocking init finished before data synced. Consider increasing startup deadline to avoid inconsistent evaluations." 115 | ) 116 | 117 | def monitor(self) -> None: 118 | self.channel.subscribe(self._state_change_callback, try_to_connect=True) 119 | 120 | def _state_change_callback(self, new_state: grpc.ChannelConnectivity) -> None: 121 | logger.debug(f"gRPC state change: {new_state}") 122 | if ( 123 | new_state == grpc.ChannelConnectivity.READY 124 | or new_state == grpc.ChannelConnectivity.IDLE 125 | ): 126 | if not self.thread or not self.thread.is_alive(): 127 | self.thread = threading.Thread( 128 | target=self.listen, 129 | daemon=True, 130 | name="FlagdGrpcSyncWorkerThread", 131 | ) 132 | self.thread.start() 133 | 134 | if self.timer and self.timer.is_alive(): 135 | logger.debug("gRPC error timer expired") 136 | self.timer.cancel() 137 | 138 | elif new_state == grpc.ChannelConnectivity.TRANSIENT_FAILURE: 139 | # this is the failed reconnect attempt so we are going into stale 140 | self.emit_provider_stale( 141 | ProviderEventDetails( 142 | message="gRPC sync disconnected, reconnecting", 143 | ) 144 | ) 145 | self.start_time = time.time() 146 | # adding a timer, so we can emit the error event after time 147 | self.timer = threading.Timer(self.retry_grace_period, self.emit_error) 148 | 149 | logger.debug("gRPC error timer started") 150 | self.timer.start() 151 | self.connected = False 152 | 153 | def emit_error(self) -> None: 154 | logger.debug("gRPC error emitted") 155 | self.emit_provider_error( 156 | ProviderEventDetails( 157 | message="gRPC sync disconnected, reconnecting", 158 | error_code=ErrorCode.GENERAL, 159 | ) 160 | ) 161 | 162 | def shutdown(self) -> None: 163 | self.active = False 164 | self.channel.close() 165 | 166 | def _create_request_args(self) -> dict: 167 | request_args = {} 168 | if self.selector is not None: 169 | request_args["selector"] = self.selector 170 | if self.provider_id is not None: 171 | request_args["provider_id"] = self.provider_id 172 | 173 | return request_args 174 | 175 | def listen(self) -> None: 176 | call_args = ( 177 | {"timeout": self.streamline_deadline_seconds} 178 | if self.streamline_deadline_seconds > 0 179 | else {} 180 | ) 181 | request_args = self._create_request_args() 182 | 183 | while self.active: 184 | try: 185 | context_values_response: sync_pb2.GetMetadataResponse 186 | if self.config.sync_metadata_disabled: 187 | context_values_response = sync_pb2.GetMetadataResponse( 188 | metadata=Struct() 189 | ) 190 | else: 191 | context_values_request = sync_pb2.GetMetadataRequest() 192 | context_values_response = self.stub.GetMetadata( 193 | context_values_request, wait_for_ready=True 194 | ) 195 | 196 | context_values = MessageToDict(context_values_response) 197 | 198 | request = sync_pb2.SyncFlagsRequest(**request_args) 199 | 200 | logger.debug("Setting up gRPC sync flags connection") 201 | for flag_rsp in self.stub.SyncFlags( 202 | request, wait_for_ready=True, **call_args 203 | ): 204 | flag_str = flag_rsp.flag_configuration 205 | logger.debug( 206 | f"Received flag configuration - {abs(hash(flag_str)) % (10**8)}" 207 | ) 208 | self.flag_store.update(json.loads(flag_str)) 209 | 210 | if not self.connected: 211 | self.emit_provider_ready( 212 | ProviderEventDetails( 213 | message="gRPC sync connection established" 214 | ), 215 | context_values["metadata"], 216 | ) 217 | self.connected = True 218 | 219 | if not self.active: 220 | logger.debug("Terminating gRPC sync thread") 221 | return 222 | except grpc.RpcError as e: # noqa: PERF203 223 | logger.error(f"SyncFlags stream error, {e.code()=} {e.details()=}") 224 | except json.JSONDecodeError: 225 | logger.exception( 226 | f"Could not parse JSON flag data from SyncFlags endpoint: {flag_str=}" 227 | ) 228 | except ParseError: 229 | logger.exception( 230 | f"Could not parse flag data using flagd syntax: {flag_str=}" 231 | ) 232 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import typing 3 | from dataclasses import dataclass 4 | 5 | import mmh3 6 | import semver 7 | 8 | JsonPrimitive = typing.Union[str, bool, float, int] 9 | JsonLogicArg = typing.Union[JsonPrimitive, typing.Sequence[JsonPrimitive]] 10 | 11 | logger = logging.getLogger("openfeature.contrib") 12 | 13 | 14 | @dataclass 15 | class Fraction: 16 | variant: str 17 | weight: int = 1 18 | 19 | 20 | def fractional(data: dict, *args: JsonLogicArg) -> typing.Optional[str]: 21 | if not args: 22 | logger.error("No arguments provided to fractional operator.") 23 | return None 24 | 25 | bucket_by = None 26 | if isinstance(args[0], str): 27 | bucket_by = args[0] 28 | args = args[1:] 29 | else: 30 | seed = data.get("$flagd", {}).get("flagKey", "") 31 | targeting_key = data.get("targetingKey") 32 | if not targeting_key: 33 | logger.error("No targetingKey provided for fractional shorthand syntax.") 34 | return None 35 | bucket_by = seed + targeting_key 36 | 37 | if not bucket_by: 38 | logger.error("No hashKey value resolved") 39 | return None 40 | 41 | hash_ratio = abs(mmh3.hash(bucket_by)) / (2**31 - 1) 42 | bucket = hash_ratio * 100 43 | 44 | total_weight = 0 45 | fractions = [] 46 | try: 47 | for arg in args: 48 | fraction = _parse_fraction(arg) 49 | if fraction: 50 | fractions.append(fraction) 51 | total_weight += fraction.weight 52 | 53 | except ValueError: 54 | logger.debug(f"Invalid {args} configuration") 55 | return None 56 | 57 | range_end: float = 0 58 | for fraction in fractions: 59 | range_end += fraction.weight * 100 / total_weight 60 | if bucket < range_end: 61 | return fraction.variant 62 | return None 63 | 64 | 65 | def _parse_fraction(arg: JsonLogicArg) -> Fraction: 66 | if not isinstance(arg, (tuple, list)) or not arg or len(arg) > 2: 67 | raise ValueError( 68 | "Fractional variant weights must be (str, int) tuple or [str] list" 69 | ) 70 | 71 | if not isinstance(arg[0], str): 72 | raise ValueError( 73 | "Fractional variant identifier (first element) isn't of type 'str'" 74 | ) 75 | 76 | if len(arg) >= 2 and not isinstance(arg[1], int): 77 | raise ValueError( 78 | "Fractional variant weight value (second element) isn't of type 'int'" 79 | ) 80 | 81 | fraction = Fraction(variant=arg[0]) 82 | if len(arg) >= 2: 83 | fraction.weight = arg[1] 84 | 85 | return fraction 86 | 87 | 88 | def starts_with(data: dict, *args: JsonLogicArg) -> typing.Optional[bool]: 89 | def f(s1: str, s2: str) -> bool: 90 | return s1.startswith(s2) 91 | 92 | return string_comp(f, data, *args) 93 | 94 | 95 | def ends_with(data: dict, *args: JsonLogicArg) -> typing.Optional[bool]: 96 | def f(s1: str, s2: str) -> bool: 97 | return s1.endswith(s2) 98 | 99 | return string_comp(f, data, *args) 100 | 101 | 102 | def string_comp( 103 | comparator: typing.Callable[[str, str], bool], data: dict, *args: JsonLogicArg 104 | ) -> typing.Optional[bool]: 105 | if not args: 106 | logger.error("No arguments provided to string_comp operator.") 107 | return None 108 | if len(args) != 2: 109 | logger.error("Exactly 2 args expected for string_comp operator.") 110 | return None 111 | arg1, arg2 = args 112 | if not isinstance(arg1, str): 113 | logger.debug(f"incorrect argument for first argument, expected string: {arg1}") 114 | return False 115 | if not isinstance(arg2, str): 116 | logger.debug(f"incorrect argument for second argument, expected string: {arg2}") 117 | return False 118 | 119 | return comparator(arg1, arg2) 120 | 121 | 122 | def sem_ver(data: dict, *args: JsonLogicArg) -> typing.Optional[bool]: # noqa: C901 123 | if not args: 124 | logger.error("No arguments provided to sem_ver operator.") 125 | return None 126 | if len(args) != 3: 127 | logger.error("Exactly 3 args expected for sem_ver operator.") 128 | return None 129 | 130 | arg1, op, arg2 = args 131 | 132 | try: 133 | v1 = parse_version(arg1) 134 | v2 = parse_version(arg2) 135 | except ValueError as e: 136 | logger.exception(e) 137 | return None 138 | 139 | if op == "=": 140 | return v1 == v2 141 | elif op == "!=": 142 | return v1 != v2 143 | elif op == "<": 144 | return v1 < v2 145 | elif op == "<=": 146 | return v1 <= v2 147 | elif op == ">": 148 | return v1 > v2 149 | elif op == ">=": 150 | return v1 >= v2 151 | elif op == "^": 152 | return v1.major == v2.major 153 | elif op == "~": 154 | return v1.major == v2.major and v1.minor == v2.minor 155 | else: 156 | logger.error(f"Op not supported by sem_ver: {op}") 157 | return None 158 | 159 | 160 | def parse_version(arg: typing.Any) -> semver.Version: 161 | version = str(arg) 162 | if version.startswith(("v", "V")): 163 | version = version[1:] 164 | 165 | return semver.Version.parse(version) 166 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import typing 4 | from dataclasses import dataclass 5 | 6 | from openfeature.event import ProviderEventDetails 7 | from openfeature.exception import ParseError 8 | 9 | 10 | def _validate_metadata(key: str, value: typing.Union[float, int, str, bool]) -> None: 11 | if key is None: 12 | raise ParseError("Metadata key must be set") 13 | elif not isinstance(key, str): 14 | raise ParseError(f"Metadata key {key} must be of type str, but is {type(key)}") 15 | elif not key: 16 | raise ParseError("key must not be empty") 17 | if value is None: 18 | raise ParseError(f"Metadata value for key {key} must be set") 19 | elif not isinstance(value, (float, int, str, bool)): 20 | raise ParseError( 21 | f"Metadata value {value} for key {key} must be of type float, int, str or bool, but is {type(value)}" 22 | ) 23 | 24 | 25 | class FlagStore: 26 | def __init__( 27 | self, 28 | emit_provider_configuration_changed: typing.Callable[ 29 | [ProviderEventDetails], None 30 | ], 31 | ): 32 | self.emit_provider_configuration_changed = emit_provider_configuration_changed 33 | self.flags: typing.Mapping[str, Flag] = {} 34 | self.flag_set_metadata: typing.Mapping[ 35 | str, typing.Union[float, int, str, bool] 36 | ] = {} 37 | 38 | def get_flag(self, key: str) -> typing.Optional["Flag"]: 39 | return self.flags.get(key) 40 | 41 | def update(self, flags_data: dict) -> None: 42 | flags = flags_data.get("flags", {}) 43 | metadata = flags_data.get("metadata", {}) 44 | evaluators: typing.Optional[dict] = flags_data.get("$evaluators") 45 | if evaluators: 46 | transposed = json.dumps(flags) 47 | for name, rule in evaluators.items(): 48 | transposed = re.sub( 49 | rf"{{\s*\"\$ref\":\s*\"{name}\"\s*}}", json.dumps(rule), transposed 50 | ) 51 | flags = json.loads(transposed) 52 | 53 | if not isinstance(flags, dict): 54 | raise ParseError("`flags` key of configuration must be a dictionary") 55 | if not isinstance(metadata, dict): 56 | raise ParseError("`metadata` key of configuration must be a dictionary") 57 | for key, value in metadata.items(): 58 | _validate_metadata(key, value) 59 | 60 | self.flags = {key: Flag.from_dict(key, data) for key, data in flags.items()} 61 | self.flag_set_metadata = metadata 62 | 63 | self.emit_provider_configuration_changed( 64 | ProviderEventDetails( 65 | flags_changed=list(self.flags.keys()), metadata=metadata 66 | ) 67 | ) 68 | 69 | 70 | @dataclass 71 | class Flag: 72 | key: str 73 | state: str 74 | variants: typing.Mapping[str, typing.Any] 75 | default_variant: typing.Union[bool, str] 76 | targeting: typing.Optional[dict] = None 77 | metadata: typing.Optional[ 78 | typing.Mapping[str, typing.Union[float, int, str, bool]] 79 | ] = None 80 | 81 | def __post_init__(self) -> None: 82 | if not self.state or not isinstance(self.state, str): 83 | raise ParseError("Incorrect 'state' value provided in flag config") 84 | 85 | if not self.variants or not isinstance(self.variants, dict): 86 | raise ParseError("Incorrect 'variants' value provided in flag config") 87 | 88 | if not self.default_variant or not isinstance( 89 | self.default_variant, (str, bool) 90 | ): 91 | raise ParseError("Incorrect 'defaultVariant' value provided in flag config") 92 | 93 | if self.targeting and not isinstance(self.targeting, dict): 94 | raise ParseError("Incorrect 'targeting' value provided in flag config") 95 | 96 | if self.default_variant not in self.variants: 97 | raise ParseError("Default variant does not match set of variants") 98 | 99 | if self.metadata: 100 | if not isinstance(self.metadata, dict): 101 | raise ParseError("Flag metadata is not a valid json object") 102 | for key, value in self.metadata.items(): 103 | _validate_metadata(key, value) 104 | 105 | @classmethod 106 | def from_dict(cls, key: str, data: dict) -> "Flag": 107 | if "defaultVariant" in data: 108 | data["default_variant"] = data["defaultVariant"] 109 | del data["defaultVariant"] 110 | 111 | data.pop("source", None) 112 | data.pop("selector", None) 113 | try: 114 | flag = cls(key=key, **data) 115 | return flag 116 | except ParseError as parseError: 117 | raise parseError 118 | except Exception as err: 119 | raise ParseError from err 120 | 121 | @property 122 | def default(self) -> tuple[str, typing.Any]: 123 | return self.get_variant(self.default_variant) 124 | 125 | def get_variant( 126 | self, variant_key: typing.Union[str, bool] 127 | ) -> tuple[str, typing.Any]: 128 | if isinstance(variant_key, bool): 129 | variant_key = str(variant_key).lower() 130 | 131 | return variant_key, self.variants.get(variant_key) 132 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/targeting.py: -------------------------------------------------------------------------------- 1 | import time 2 | import typing 3 | 4 | from json_logic import builtins, jsonLogic 5 | from json_logic.types import JsonValue 6 | 7 | from openfeature.evaluation_context import EvaluationContext 8 | 9 | from .custom_ops import ( 10 | ends_with, 11 | fractional, 12 | sem_ver, 13 | starts_with, 14 | ) 15 | 16 | OPERATORS = { 17 | **builtins.BUILTINS, 18 | "fractional": fractional, 19 | "starts_with": starts_with, 20 | "ends_with": ends_with, 21 | "sem_ver": sem_ver, 22 | } 23 | 24 | 25 | def targeting( 26 | key: str, 27 | targeting: dict, 28 | evaluation_context: typing.Optional[EvaluationContext] = None, 29 | ) -> JsonValue: 30 | json_logic_context = evaluation_context.attributes if evaluation_context else {} 31 | json_logic_context["$flagd"] = {"flagKey": key, "timestamp": int(time.time())} 32 | json_logic_context["targetingKey"] = ( 33 | evaluation_context.targeting_key if evaluation_context else None 34 | ) 35 | return jsonLogic(targeting, json_logic_context, OPERATORS) 36 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/protocol.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from typing_extensions import Protocol 4 | 5 | from openfeature.evaluation_context import EvaluationContext 6 | from openfeature.flag_evaluation import FlagResolutionDetails 7 | 8 | 9 | class AbstractResolver(Protocol): 10 | def initialize(self, evaluation_context: EvaluationContext) -> None: ... 11 | 12 | def shutdown(self) -> None: ... 13 | 14 | def resolve_boolean_details( 15 | self, 16 | key: str, 17 | default_value: bool, 18 | evaluation_context: typing.Optional[EvaluationContext] = None, 19 | ) -> FlagResolutionDetails[bool]: ... 20 | 21 | def resolve_string_details( 22 | self, 23 | key: str, 24 | default_value: str, 25 | evaluation_context: typing.Optional[EvaluationContext] = None, 26 | ) -> FlagResolutionDetails[str]: ... 27 | 28 | def resolve_float_details( 29 | self, 30 | key: str, 31 | default_value: float, 32 | evaluation_context: typing.Optional[EvaluationContext] = None, 33 | ) -> FlagResolutionDetails[float]: ... 34 | 35 | def resolve_integer_details( 36 | self, 37 | key: str, 38 | default_value: int, 39 | evaluation_context: typing.Optional[EvaluationContext] = None, 40 | ) -> FlagResolutionDetails[int]: ... 41 | 42 | def resolve_object_details( 43 | self, 44 | key: str, 45 | default_value: typing.Union[dict, list], 46 | evaluation_context: typing.Optional[EvaluationContext] = None, 47 | ) -> FlagResolutionDetails[typing.Union[dict, list]]: ... 48 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/sync_metadata_hook.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from openfeature.evaluation_context import EvaluationContext 4 | from openfeature.hook import Hook, HookContext, HookHints 5 | 6 | 7 | class SyncMetadataHook(Hook): 8 | def __init__(self, context_supplier: typing.Callable[[], EvaluationContext]): 9 | self.context_supplier = context_supplier 10 | 11 | def before( 12 | self, hook_context: HookContext, hints: HookHints 13 | ) -> typing.Optional[EvaluationContext]: 14 | return self.context_supplier() 15 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flagd/tests/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from openfeature import api 7 | from openfeature.contrib.provider.flagd import FlagdProvider 8 | 9 | 10 | @pytest.fixture() 11 | def flagd_provider_client(): 12 | provider = FlagdProvider() 13 | api.set_provider(provider) 14 | yield api.get_client() 15 | provider.shutdown() 16 | 17 | 18 | def setup_flag_file(base_dir: str, flag_file: str) -> str: 19 | contents = (Path(__file__).parent / "../test-harness/flags" / flag_file).read_text() 20 | dst_path = os.path.join(base_dir, flag_file) 21 | with open(dst_path, "w") as dst_file: 22 | dst_file.write(contents) 23 | return dst_path 24 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flagd/tests/e2e/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/conftest.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | from tests.e2e.step.config_steps import * # noqa: F403 4 | from tests.e2e.step.context_steps import * # noqa: F403 5 | from tests.e2e.step.event_steps import * # noqa: F403 6 | from tests.e2e.step.flag_step import * # noqa: F403 7 | from tests.e2e.step.provider_steps import * # noqa: F403 8 | 9 | JsonPrimitive = typing.Union[str, bool, float, int] 10 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/file/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flagd/tests/e2e/file/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/file/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.flagd.config import ResolverType 4 | from tests.e2e.testfilter import TestFilter 5 | 6 | resolver = ResolverType.FILE 7 | feature_list = { 8 | "~targetURI", 9 | "~customCert", 10 | "~unixsocket", 11 | "~reconnect", 12 | "~sync", 13 | "~caching", 14 | "~grace", 15 | "~contextEnrichment", 16 | } 17 | 18 | 19 | def pytest_collection_modifyitems(config, items): 20 | test_filter = TestFilter( 21 | config, feature_list=feature_list, resolver=resolver.value, base_path=__file__ 22 | ) 23 | test_filter.filter_items(items) 24 | 25 | 26 | @pytest.fixture() 27 | def resolver_type() -> ResolverType: 28 | return resolver 29 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/file/test_flaqd.py: -------------------------------------------------------------------------------- 1 | from pytest_bdd import scenarios 2 | 3 | from tests.e2e.paths import TEST_HARNESS_PATH 4 | 5 | scenarios(f"{TEST_HARNESS_PATH}/gherkin") 6 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/flagd_container.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import time 3 | import typing 4 | from pathlib import Path 5 | 6 | import grpc 7 | from grpc_health.v1 import health_pb2, health_pb2_grpc 8 | from testcontainers.core.container import DockerContainer 9 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 10 | 11 | from openfeature.contrib.provider.flagd.config import ResolverType 12 | 13 | HEALTH_CHECK = 8014 14 | LAUNCHPAD = 8080 15 | 16 | 17 | class FlagdContainer(DockerContainer): 18 | def __init__( 19 | self, 20 | feature: typing.Optional[str] = None, 21 | **kwargs, 22 | ) -> None: 23 | image: str = "ghcr.io/open-feature/flagd-testbed" 24 | if feature is not None: 25 | image = f"{image}-{feature}" 26 | path = Path(__file__).parents[2] / "openfeature/test-harness/version.txt" 27 | data = path.read_text().rstrip() 28 | super().__init__(f"{image}:v{data}", **kwargs) 29 | self.rpc = 8013 30 | self.ipr = 8015 31 | self.flagDir = Path("./flags") 32 | self.flagDir.mkdir(parents=True, exist_ok=True) 33 | self.with_exposed_ports(self.rpc, self.ipr, HEALTH_CHECK, LAUNCHPAD) 34 | self.with_volume_mapping(os.path.abspath(self.flagDir.name), "/flags", "rw") 35 | 36 | def get_port(self, resolver_type: ResolverType): 37 | if resolver_type == ResolverType.RPC: 38 | return self.get_exposed_port(self.rpc) 39 | else: 40 | return self.get_exposed_port(self.ipr) 41 | 42 | def get_launchpad_url(self): 43 | return f"http://localhost:{self.get_exposed_port(LAUNCHPAD)}" 44 | 45 | def start(self) -> "FlagdContainer": 46 | super().start() 47 | self._checker(self.get_container_host_ip(), self.get_exposed_port(HEALTH_CHECK)) 48 | return self 49 | 50 | @wait_container_is_ready(ConnectionError) 51 | def _checker(self, host: str, port: str) -> None: 52 | # First we wait for Flagd to say it's listening 53 | wait_for_logs( 54 | self, 55 | "listening", 56 | 5, 57 | ) 58 | 59 | time.sleep(1) 60 | # Second we use the GRPC health check endpoint 61 | with grpc.insecure_channel(host + ":" + port) as channel: 62 | health_stub = health_pb2_grpc.HealthStub(channel) 63 | 64 | def health_check_call(stub: health_pb2_grpc.HealthStub): 65 | request = health_pb2.HealthCheckRequest() 66 | resp = stub.Check(request) 67 | if resp.status == health_pb2.HealthCheckResponse.SERVING: 68 | return True 69 | elif resp.status == health_pb2.HealthCheckResponse.NOT_SERVING: 70 | return False 71 | 72 | # Should succeed 73 | # Check health status every 1 second for 30 seconds 74 | ok = False 75 | for _ in range(30): 76 | ok = health_check_call(health_stub) 77 | if ok: 78 | break 79 | time.sleep(1) 80 | 81 | if not ok: 82 | raise ConnectionError("flagD not ready in time") 83 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/inprocess/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flagd/tests/e2e/inprocess/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/inprocess/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.flagd.config import ResolverType 4 | from tests.e2e.testfilter import TestFilter 5 | 6 | resolver = ResolverType.IN_PROCESS 7 | feature_list = ["~targetURI", "~unixsocket"] 8 | 9 | 10 | def pytest_collection_modifyitems(config, items): 11 | test_filter = TestFilter( 12 | config, feature_list=feature_list, resolver=resolver.value, base_path=__file__ 13 | ) 14 | test_filter.filter_items(items) 15 | 16 | 17 | @pytest.fixture() 18 | def resolver_type() -> ResolverType: 19 | return resolver 20 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/inprocess/test_flaqd.py: -------------------------------------------------------------------------------- 1 | from pytest_bdd import scenarios 2 | 3 | from tests.e2e.paths import TEST_HARNESS_PATH 4 | 5 | scenarios(f"{TEST_HARNESS_PATH}/gherkin") 6 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/parsers.py: -------------------------------------------------------------------------------- 1 | def to_bool(s: str) -> bool: 2 | return s.lower() == "true" 3 | 4 | 5 | def to_list(s: str) -> list: 6 | values = s.replace('"', "").split(",") 7 | return [s.strip() for s in values] 8 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/paths.py: -------------------------------------------------------------------------------- 1 | TEST_HARNESS_PATH = "../../openfeature/test-harness" 2 | SPEC_PATH = "../../openfeature/spec" 3 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/rpc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flagd/tests/e2e/rpc/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/rpc/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.flagd.config import ResolverType 4 | from tests.e2e.testfilter import TestFilter 5 | 6 | resolver = ResolverType.RPC 7 | feature_list = ["~targetURI", "~unixsocket", "~sync", "~metadata"] 8 | 9 | 10 | def pytest_collection_modifyitems(config, items): 11 | test_filter = TestFilter( 12 | config, feature_list=feature_list, resolver=resolver.value, base_path=__file__ 13 | ) 14 | test_filter.filter_items(items) 15 | 16 | 17 | @pytest.fixture() 18 | def resolver_type() -> ResolverType: 19 | return resolver 20 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/rpc/test_flaqd.py: -------------------------------------------------------------------------------- 1 | from pytest_bdd import scenarios 2 | 3 | from tests.e2e.paths import TEST_HARNESS_PATH 4 | 5 | scenarios(f"{TEST_HARNESS_PATH}/gherkin") 6 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/_utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | import typing 4 | 5 | from asserts import assert_true 6 | 7 | 8 | def str2bool(v): 9 | return v.lower() in ("yes", "true", "t", "1") 10 | 11 | 12 | type_cast = { 13 | "Integer": int, 14 | "Float": float, 15 | "String": str, 16 | "Boolean": str2bool, 17 | "Object": json.loads, 18 | } 19 | 20 | 21 | JsonObject = typing.Union[dict, list] 22 | JsonPrimitive = typing.Union[str, bool, float, int, JsonObject] 23 | 24 | 25 | def wait_for(pred, poll_sec=2, timeout_sec=10): 26 | start = time.time() 27 | while not (ok := pred()) and (time.time() - start < timeout_sec): 28 | time.sleep(poll_sec) 29 | assert_true(pred()) 30 | return ok 31 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/config_steps.py: -------------------------------------------------------------------------------- 1 | import re 2 | import typing 3 | 4 | import pytest 5 | from asserts import assert_equal, assert_true 6 | from pytest_bdd import given, parsers, then, when 7 | 8 | from openfeature.contrib.provider.flagd.config import CacheType, Config, ResolverType 9 | 10 | 11 | def camel_to_snake(name): 12 | name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) 13 | return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() 14 | 15 | 16 | def str2bool(v): 17 | return v.lower() in ("yes", "true", "t", "1") 18 | 19 | 20 | def convert_resolver_type(val: typing.Union[str, ResolverType]) -> ResolverType: 21 | if isinstance(val, str): 22 | v = val.lower() 23 | return ResolverType(v) 24 | else: 25 | return ResolverType(val) 26 | 27 | 28 | type_cast = { 29 | "Integer": int, 30 | "Long": int, 31 | "String": str, 32 | "Boolean": str2bool, 33 | "ResolverType": convert_resolver_type, 34 | "CacheType": CacheType, 35 | } 36 | 37 | 38 | @pytest.fixture() 39 | def option_values() -> dict: 40 | return {} 41 | 42 | 43 | @given( 44 | parsers.cfparse( 45 | 'an option "{option}" of type "{type_info}" with value "{value}"', 46 | ), 47 | ) 48 | def option_with_value(option: str, value: str, type_info: str, option_values: dict): 49 | value = type_cast[type_info](value) 50 | option_values[camel_to_snake(option)] = value 51 | 52 | 53 | @given( 54 | parsers.cfparse( 55 | 'an environment variable "{env}" with value "{value}"', 56 | ), 57 | ) 58 | def env_with_value(monkeypatch, env: str, value: str): 59 | monkeypatch.setenv(env, value) 60 | 61 | 62 | @when( 63 | parsers.cfparse( 64 | "a config was initialized", 65 | ), 66 | target_fixture="config_or_error", 67 | ) 68 | def initialize_config(option_values): 69 | try: 70 | return Config(**option_values), False 71 | except AttributeError: 72 | return None, True 73 | 74 | 75 | @when( 76 | parsers.cfparse( 77 | 'a config was initialized for "{resolver_type}"', 78 | ), 79 | target_fixture="config_or_error", 80 | ) 81 | def initialize_config_for(resolver_type: str, option_values: dict): 82 | try: 83 | return Config(resolver=ResolverType(resolver_type), **option_values), False 84 | except AttributeError: 85 | return None, True 86 | 87 | 88 | @then( 89 | parsers.cfparse( 90 | 'the option "{option}" of type "{type_info}" should have the value "{value}"', 91 | ) 92 | ) 93 | def check_option_value(option, value, type_info, config_or_error): 94 | value = type_cast[type_info](value) 95 | value = value if value != "null" else None 96 | config, _ = config_or_error 97 | assert_equal(config.__getattribute__(camel_to_snake(option)), value) 98 | 99 | 100 | @then( 101 | parsers.cfparse( 102 | "we should have an error", 103 | ) 104 | ) 105 | def check_option_error(config_or_error): 106 | _, error = config_or_error 107 | assert_true(error) 108 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/context_steps.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | import pytest 4 | from pytest_bdd import given, parsers, when 5 | from tests.e2e.parsers import to_bool, to_list 6 | 7 | from openfeature.evaluation_context import EvaluationContext 8 | 9 | from ._utils import type_cast 10 | 11 | 12 | @pytest.fixture 13 | def evaluation_context() -> EvaluationContext: 14 | return EvaluationContext() 15 | 16 | 17 | @given( 18 | parsers.cfparse( 19 | 'a context containing a targeting key with value "{targeting_key}"' 20 | ), 21 | ) 22 | def assign_targeting_context(evaluation_context: EvaluationContext, targeting_key: str): 23 | """a context containing a targeting key with value .""" 24 | evaluation_context.targeting_key = targeting_key 25 | 26 | 27 | @given( 28 | parsers.cfparse( 29 | 'a context containing a key "{key}", with type "{type_info}" and with value "{value}"' 30 | ), 31 | ) 32 | def update_context( 33 | evaluation_context: EvaluationContext, key: str, type_info: str, value: str 34 | ): 35 | """a context containing a key and value.""" 36 | evaluation_context.attributes[key] = type_cast[type_info](value) 37 | 38 | 39 | @when( 40 | parsers.cfparse( 41 | 'context contains keys {fields:s} with values "{svalue}", "{svalue2}", {ivalue:d}, "{bvalue:bool}"', 42 | extra_types={"bool": to_bool, "s": to_list}, 43 | ), 44 | ) 45 | def assign_targeting_context_2( 46 | evaluation_context: EvaluationContext, 47 | fields: list, 48 | svalue: str, 49 | svalue2: str, 50 | ivalue: int, 51 | bvalue: bool, 52 | ): 53 | evaluation_context.attributes[fields[0]] = svalue 54 | evaluation_context.attributes[fields[1]] = svalue2 55 | evaluation_context.attributes[fields[2]] = ivalue 56 | evaluation_context.attributes[fields[3]] = bvalue 57 | 58 | 59 | @given( 60 | parsers.cfparse( 61 | 'a context containing a nested property with outer key "{outer}" and inner key "{inner}", with value "{value}"' 62 | ), 63 | ) 64 | def update_context_nested( 65 | evaluation_context: EvaluationContext, 66 | outer: str, 67 | inner: str, 68 | value: typing.Union[str, int], 69 | ): 70 | """a context containing a nested property with outer key, and inner key, and value.""" 71 | if outer not in evaluation_context.attributes: 72 | evaluation_context.attributes[outer] = {} 73 | evaluation_context.attributes[outer][inner] = value 74 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/event_steps.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | import pytest 5 | from asserts import assert_greater 6 | from pytest_bdd import given, parsers, then, when 7 | 8 | from openfeature.client import OpenFeatureClient 9 | from openfeature.event import ProviderEvent 10 | 11 | logger = logging.getLogger("openfeature.contrib.tests") 12 | 13 | events = { 14 | "ready": ProviderEvent.PROVIDER_READY, 15 | "error": ProviderEvent.PROVIDER_ERROR, 16 | "stale": ProviderEvent.PROVIDER_STALE, 17 | "change": ProviderEvent.PROVIDER_CONFIGURATION_CHANGED, 18 | } 19 | 20 | 21 | @pytest.fixture() 22 | def event_handles() -> list: 23 | return [] 24 | 25 | 26 | @given( 27 | parsers.cfparse( 28 | "a {event_type} event handler", 29 | ), 30 | ) 31 | def add_event_handler(client: OpenFeatureClient, event_type: str, event_handles: list): 32 | def handler(event): 33 | logger.warning((event_type, event)) 34 | event_handles.append( 35 | { 36 | "type": event_type, 37 | "event": event, 38 | } 39 | ) 40 | 41 | client.add_handler(events[event_type], handler) 42 | 43 | logger.warning(("handler added", event_type)) 44 | 45 | 46 | def assert_handlers(handles, event_type: str, max_wait: int = 2): 47 | poll_interval = 0.2 48 | while max_wait > 0: 49 | found = any(h["type"] == event_type for h in handles) 50 | if not found: 51 | max_wait -= poll_interval 52 | time.sleep(poll_interval) 53 | continue 54 | break 55 | return handles 56 | 57 | 58 | @when( 59 | parsers.cfparse( 60 | "a {event_type} event was fired", 61 | ), 62 | target_fixture="event_details", 63 | ) 64 | def pass_for_event_fired(event_type: str, event_handles): 65 | events = assert_handlers(event_handles, event_type, 30000) 66 | events = [e for e in events if e["type"] == event_type] 67 | assert_greater(len(events), 0) 68 | for event in event_handles: 69 | event_handles.remove(event) 70 | return events[0]["event"] 71 | 72 | 73 | @then( 74 | parsers.cfparse( 75 | "the {event_type} event handler should have been executed", 76 | ) 77 | ) 78 | def assert_handler_run(event_type, event_handles): 79 | assert_handler_run_within(event_type, event_handles, 30000) 80 | 81 | 82 | @then( 83 | parsers.cfparse( 84 | "the {event_type} event handler should have been executed within {time:d}ms", 85 | ) 86 | ) 87 | def assert_handler_run_within(event_type, event_handles, time: int): 88 | events = assert_handlers(event_handles, event_type, max_wait=int(time / 1000)) 89 | assert_greater(len(events), 0) 90 | 91 | for event in event_handles: 92 | event_handles.remove(event) 93 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/flag_step.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from asserts import assert_equal 3 | from pytest_bdd import given, parsers, then, when 4 | 5 | from openfeature.client import OpenFeatureClient 6 | from openfeature.evaluation_context import EvaluationContext 7 | from openfeature.event import EventDetails 8 | from openfeature.flag_evaluation import FlagEvaluationDetails, Reason 9 | 10 | from ._utils import JsonPrimitive, type_cast 11 | 12 | 13 | @given( 14 | parsers.cfparse( 15 | 'a {type_info}-flag with key "{key}" and a default value "{default}"' 16 | ), 17 | target_fixture="key_and_default_and_type", 18 | ) 19 | def setup_key_and_default( 20 | key: str, default: JsonPrimitive, type_info: str 21 | ) -> tuple[str, JsonPrimitive, str]: 22 | return key, default, type_info 23 | 24 | 25 | @when("the flag was evaluated with details", target_fixture="details") 26 | def evaluate_with_details( 27 | client: OpenFeatureClient, 28 | key_and_default_and_type: tuple, 29 | evaluation_context: EvaluationContext, 30 | ): 31 | key, default, type_info = key_and_default_and_type 32 | default = type_cast[type_info](default) 33 | if type_info == "Boolean": 34 | return client.get_boolean_details(key, default, evaluation_context) 35 | elif type_info == "String": 36 | return client.get_string_details(key, default, evaluation_context) 37 | elif type_info == "Integer": 38 | return client.get_integer_details(key, default, evaluation_context) 39 | elif type_info == "Float": 40 | return client.get_float_details(key, default, evaluation_context) 41 | elif type_info == "Object": 42 | return client.get_object_details(key, default, evaluation_context) 43 | raise AssertionError("no valid object type") 44 | 45 | 46 | @when("the flag was modified") 47 | def assert_flag_change_event(container): 48 | requests.post(f"{container.get_launchpad_url()}/change", timeout=1) 49 | 50 | 51 | @then("the flag should be part of the event payload") 52 | def assert_flag_change(key_and_default_and_type: tuple, event_details: EventDetails): 53 | key, _, _ = key_and_default_and_type 54 | assert key in event_details.flags_changed 55 | 56 | 57 | @then( 58 | parsers.cfparse('the resolved details value should be ""'), 59 | ) 60 | def resolve_details_value_string( 61 | details: FlagEvaluationDetails[JsonPrimitive], 62 | key_and_default_and_type: tuple, 63 | ): 64 | resolve_details_value(details, key_and_default_and_type, "") 65 | 66 | 67 | @then( 68 | parsers.cfparse('the resolved details value should be "{value}"'), 69 | ) 70 | def resolve_details_value( 71 | details: FlagEvaluationDetails[JsonPrimitive], 72 | key_and_default_and_type: tuple, 73 | value: str, 74 | ): 75 | _, _, type_info = key_and_default_and_type 76 | assert_equal(details.value, type_cast[type_info](value)) 77 | 78 | 79 | @then( 80 | parsers.cfparse('the variant should be "{variant}"'), 81 | ) 82 | def resolve_details_variant( 83 | details: FlagEvaluationDetails[JsonPrimitive], 84 | variant: str, 85 | ): 86 | assert_equal(details.variant, variant) 87 | 88 | 89 | @then( 90 | parsers.cfparse('the reason should be "{reason}"'), 91 | ) 92 | def resolve_details_reason( 93 | details: FlagEvaluationDetails[JsonPrimitive], 94 | reason: str, 95 | ): 96 | assert_equal(details.reason, Reason(reason)) 97 | 98 | 99 | @then(parsers.cfparse("the resolved metadata should contain")) 100 | def metadata_contains(details: FlagEvaluationDetails[JsonPrimitive], datatable): 101 | assert_equal(len(details.flag_metadata), len(datatable) - 1) # skip table header 102 | for i in range(1, len(datatable)): 103 | key, metadata_type, expected = datatable[i] 104 | assert_equal(details.flag_metadata[key], type_cast[metadata_type](expected)) 105 | 106 | 107 | @then("the resolved metadata is empty") 108 | def empty_metadata(details: FlagEvaluationDetails[JsonPrimitive]): 109 | assert_equal(len(details.flag_metadata), 0) 110 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/step/provider_steps.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import time 4 | from enum import Enum 5 | from pathlib import Path 6 | 7 | import pytest 8 | import requests 9 | from pytest_bdd import given, parsers, when 10 | from tests.e2e.flagd_container import FlagdContainer 11 | from tests.e2e.step._utils import wait_for 12 | 13 | from openfeature import api 14 | from openfeature.client import OpenFeatureClient 15 | from openfeature.contrib.provider.flagd import FlagdProvider 16 | from openfeature.contrib.provider.flagd.config import ResolverType 17 | from openfeature.provider import ProviderStatus 18 | 19 | KEY_EVALUATORS = "$evaluators" 20 | 21 | KEY_FLAGS = "flags" 22 | 23 | MERGED_FILE = "merged_file" 24 | 25 | logger = logging.getLogger("openfeature.contrib.tests") 26 | 27 | 28 | class TestProviderType(Enum): 29 | UNAVAILABLE = "unavailable" 30 | STABLE = "stable" 31 | UNSTABLE = "unstable" 32 | SSL = "ssl" 33 | SOCKET = "socket" 34 | METADATA = "metadata" 35 | 36 | 37 | @given("a provider is registered", target_fixture="client") 38 | def setup_provider_old( 39 | container: FlagdContainer, 40 | resolver_type: ResolverType, 41 | option_values: dict, 42 | ) -> OpenFeatureClient: 43 | setup_provider(container, resolver_type, "stable", dict) 44 | 45 | 46 | def get_default_options_for_provider( 47 | provider_type: str, resolver_type: ResolverType, container, option_values: dict 48 | ) -> tuple[dict, bool]: 49 | launchpad = "default" 50 | t = TestProviderType(provider_type) 51 | options: dict = { 52 | "resolver_type": resolver_type, 53 | "deadline_ms": 1000, 54 | "stream_deadline_ms": 0, 55 | "retry_backoff_ms": 1000, 56 | "retry_grace_period": 3, 57 | "port": container.get_port(resolver_type), 58 | } 59 | 60 | if t == TestProviderType.UNAVAILABLE: 61 | return {}, False 62 | elif t == TestProviderType.SSL: 63 | path = ( 64 | Path(__file__).parents[3] 65 | / "openfeature/test-harness/ssl/custom-root-cert.crt" 66 | ) 67 | options["cert_path"] = str(path.absolute()) 68 | options["tls"] = True 69 | launchpad = "ssl" 70 | elif t == TestProviderType.SOCKET: 71 | return options, True 72 | elif t == TestProviderType.METADATA: 73 | launchpad = "metadata" 74 | 75 | if resolver_type == ResolverType.FILE: 76 | if "selector" in option_values: 77 | path = option_values["selector"] 78 | path = path.replace("rawflags/", "") 79 | options["offline_flag_source_path"] = os.path.join( 80 | Path(__file__).parents[3], "openfeature", "test-harness", "flags", path 81 | ) 82 | else: 83 | options["offline_flag_source_path"] = os.path.join( 84 | container.flagDir.name, "allFlags.json" 85 | ) 86 | 87 | requests.post( 88 | f"{container.get_launchpad_url()}/start?config={launchpad}", timeout=1 89 | ) 90 | time.sleep(0.1) 91 | return options, True 92 | 93 | 94 | @given( 95 | parsers.cfparse("a {provider_type} flagd provider"), target_fixture="provider_type" 96 | ) 97 | def setup_provider( 98 | container: FlagdContainer, 99 | resolver_type: ResolverType, 100 | provider_type: str, 101 | option_values: dict, 102 | ) -> OpenFeatureClient: 103 | default_options, wait = get_default_options_for_provider( 104 | provider_type, resolver_type, container, option_values 105 | ) 106 | 107 | combined_options = {**default_options, **option_values} 108 | api.set_provider( 109 | FlagdProvider(**combined_options), 110 | provider_type, 111 | ) 112 | client = api.get_client(provider_type) 113 | 114 | wait_for( 115 | lambda: client.get_provider_status() == ProviderStatus.READY 116 | ) if wait else None 117 | return provider_type 118 | 119 | 120 | @pytest.fixture() 121 | def client(provider_type: str) -> OpenFeatureClient: 122 | return api.get_client(provider_type) 123 | 124 | 125 | @when(parsers.cfparse("the connection is lost for {seconds}s")) 126 | def flagd_restart( 127 | seconds, 128 | container: FlagdContainer, 129 | provider_type: str, 130 | resolver_type: ResolverType, 131 | ): 132 | requests.post( 133 | f"{container.get_launchpad_url()}/restart?seconds={seconds}", 134 | timeout=float(seconds) + 2, 135 | ) 136 | pass 137 | 138 | 139 | @pytest.fixture(autouse=True, scope="package") 140 | def container(request): 141 | container = FlagdContainer() 142 | 143 | container.start() 144 | 145 | def fin(): 146 | try: 147 | container.stop() 148 | except: # noqa: E722 - we want to ensure all containers are stopped, even if we do have an exception here 149 | logger.debug("container was not running anymore") 150 | 151 | # Teardown code 152 | request.addfinalizer(fin) 153 | 154 | return container 155 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/e2e/testfilter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | logger = logging.getLogger("openfeature.contrib.tests") 5 | 6 | 7 | class TestFilter: 8 | def __init__(self, config, feature_list=None, resolver=None, base_path=None): 9 | """ 10 | Initialize the TestFilter. 11 | 12 | Args: 13 | config: pytest config object. 14 | feature_list: List of tags to include/exclude (e.g., ["rpc", "~slow"]). 15 | resolver: Mode-specific tag to include (e.g., "rpc" or "in-process"). 16 | base_path: Base path to scope filtering; defaults to the current working directory. 17 | """ 18 | self.config = config 19 | self.include_tags, self.exclude_tags = ( 20 | self._parse_tags(feature_list) if feature_list else (set(), set()) 21 | ) 22 | self.resolver = resolver 23 | if resolver: 24 | self.include_tags.add(resolver) 25 | self.base_path = os.path.abspath(base_path) if base_path else os.getcwd() 26 | self.base_path = os.path.abspath(os.path.join(self.base_path, os.pardir)) 27 | 28 | def filter_items(self, items): 29 | """ 30 | Filter collected items based on include/exclude tags and resolver. 31 | 32 | Args: 33 | items: List of pytest test items. 34 | 35 | Returns: 36 | None: Updates the `items` in place by deselecting unwanted tests. 37 | """ 38 | deselected_items = [] 39 | selected_items = [] 40 | 41 | for item in items: 42 | all_tags = self._get_item_tags(item) 43 | 44 | # Debug: Print collected tags for each item 45 | logger.debug(f"Item: {item.nodeid}, Tags: {all_tags}") 46 | 47 | # Include-only logic: Skip items that do not match include_tags 48 | if ( 49 | self.include_tags 50 | and not all_tags.intersection(self.include_tags) 51 | and self._is_in_base_path(item) 52 | ): 53 | deselected_items.append(item) 54 | continue 55 | 56 | # Exclude logic: Skip items that match any exclude_tags 57 | if ( 58 | self.exclude_tags 59 | and all_tags.intersection(self.exclude_tags) 60 | and self._is_in_base_path(item) 61 | ): 62 | deselected_items.append(item) 63 | continue 64 | 65 | selected_items.append(item) 66 | 67 | # Apply deselection 68 | if deselected_items: 69 | self.config.hook.pytest_deselected(items=deselected_items) 70 | items[:] = ( 71 | selected_items # Update the collection to only include selected items 72 | ) 73 | 74 | def _is_in_base_path(self, item): 75 | """ 76 | Check if a test item is within the specified base path. 77 | """ 78 | return os.path.abspath(os.path.join(item.fspath, os.pardir)) == self.base_path 79 | 80 | @staticmethod 81 | def _parse_tags(tags_option): 82 | """ 83 | Parse the tags option to separate include and exclude tags. 84 | """ 85 | include_tags = set() 86 | exclude_tags = set() 87 | 88 | for tag in tags_option: 89 | if tag.startswith("~"): 90 | exclude_tags.add(tag[1:]) 91 | else: 92 | include_tags.add(tag) 93 | 94 | return include_tags, exclude_tags 95 | 96 | @staticmethod 97 | def _get_item_tags(item): 98 | """ 99 | Get all tags (markers) associated with a test item. 100 | """ 101 | tags = set() 102 | if hasattr(item, "iter_markers"): 103 | for marker in item.iter_markers(): # Newer pytest versions 104 | tags.add(marker.name) 105 | elif hasattr(item, "keywords"): 106 | for marker in item.keywords: # Older pytest versions 107 | tags.add(marker) 108 | 109 | scenario = getattr(item, "_obj", None) 110 | if ( 111 | scenario 112 | and hasattr(scenario, "__scenario__") 113 | and hasattr(scenario.__scenario__, "tags") 114 | ): 115 | tags.update(scenario.__scenario__.tags) 116 | 117 | return tags 118 | 119 | @staticmethod 120 | def _get_feature_file(item): 121 | """ 122 | Get the path to the feature file for a given test item. 123 | """ 124 | scenario = getattr(item, "_obj", None) 125 | if scenario and hasattr(scenario, "__scenario__"): 126 | return scenario.__scenario__.feature.filename 127 | return None 128 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/.gitignore: -------------------------------------------------------------------------------- 1 | allFlags.json 2 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-broken-default.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": 3, 10 | "targeting": {} 11 | } 12 | } 13 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-broken-state.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": 3, 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | } 13 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-broken-targeting.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": [ 11 | {"<": [1,3]} 12 | ] 13 | } 14 | } 15 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-broken-variants.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": [ 6 | { 7 | "true": true, 8 | "false": false 9 | } 10 | ], 11 | "defaultVariant": "false", 12 | "targeting": {} 13 | } 14 | } 15 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-combined-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {}, 11 | "metadata": { 12 | "string": "a", 13 | "integer": 1, 14 | "float": 1.2, 15 | "bool": true 16 | } 17 | } 18 | }, 19 | "metadata": { 20 | "string": "b", 21 | "integer": 2, 22 | "float": 2.2, 23 | "bool": false, 24 | "flag-set-string": "c", 25 | "flag-set-integer": 3, 26 | "flag-set-float": 3.2, 27 | "flag-set-bool": false 28 | } 29 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-disabled.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "DISABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | } 13 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-invalid.not-json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | }, 13 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {}, 11 | "metadata": { 12 | "string": "a", 13 | "integer": 1, 14 | "float": 1.2, 15 | "bool": true 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-no-state.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "variants": { 5 | "true": true, 6 | "false": false 7 | }, 8 | "defaultVariant": "false", 9 | "targeting": {} 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-set-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | }, 13 | "metadata": { 14 | "string": "a", 15 | "integer": 1, 16 | "float": 1.2, 17 | "bool": true 18 | } 19 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-wrong-structure.json: -------------------------------------------------------------------------------- 1 | { 2 | "basic-flag": { 3 | "state": "ENABLED", 4 | "variants": { 5 | "true": true, 6 | "false": false 7 | }, 8 | "defaultVariant": "false", 9 | "targeting": {} 10 | } 11 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag-wrong-variant.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "a-variant" 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | } 13 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/basic-flag.yaml: -------------------------------------------------------------------------------- 1 | flags: 2 | basic-flag: 3 | state: ENABLED 4 | variants: 5 | "true": true 6 | "false": false 7 | defaultVariant: "false" 8 | targeting: {} -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-flag-metadata-list.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {}, 11 | "metadata": ["a"] 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-flag-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {}, 11 | "metadata": { 12 | "string": { 13 | "a": "a" 14 | }, 15 | "integer": 1, 16 | "float": 1.2, 17 | "bool": true 18 | } 19 | } 20 | }, 21 | "metadata": { 22 | "bool": true 23 | } 24 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-flag-set-metadata-list.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | }, 13 | "metadata": ["a"] 14 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-flag-set-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "true": true, 7 | "false": false 8 | }, 9 | "defaultVariant": "false", 10 | "targeting": {} 11 | } 12 | }, 13 | "metadata": { 14 | "string": { 15 | "a": "a" 16 | }, 17 | "integer": 1, 18 | "float": 1.2, 19 | "bool": true 20 | } 21 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-fractional-args-wrong-content.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "fractional": [[]] 13 | } 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-fractional-args.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "fractional": [] 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-fractional-weights-strings.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "fractional": [ 13 | ["a", "one"], 14 | ["b", "one"] 15 | ] 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-fractional-weights.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "fractional": [ 13 | [3, 50], 14 | [4, 50] 15 | ] 16 | } 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-semver-args.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "sem_ver": ["1.0.0", "similar to", "1.0.0", "2.0.0"] 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-semver-op.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "sem_ver": ["1.0.0", "similar to", "1.0.0"] 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/flags/invalid-stringcomp-args.json: -------------------------------------------------------------------------------- 1 | { 2 | "flags": { 3 | "basic-flag": { 4 | "state": "ENABLED", 5 | "variants": { 6 | "default": "default", 7 | "true": "true", 8 | "false": "false" 9 | }, 10 | "defaultVariant": "default", 11 | "targeting": { 12 | "starts_with": ["abcdefg", "abc", "def"] 13 | } 14 | } 15 | } 16 | } -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | # not sure if we still need this test, as this is also covered with gherkin tests. 4 | from openfeature.contrib.provider.flagd.config import ( 5 | DEFAULT_CACHE, 6 | DEFAULT_CACHE_SIZE, 7 | DEFAULT_DEADLINE, 8 | DEFAULT_HOST, 9 | DEFAULT_KEEP_ALIVE, 10 | DEFAULT_OFFLINE_SOURCE_PATH, 11 | DEFAULT_PORT_IN_PROCESS, 12 | DEFAULT_PORT_RPC, 13 | DEFAULT_RESOLVER_TYPE, 14 | DEFAULT_RETRY_BACKOFF, 15 | DEFAULT_STREAM_DEADLINE, 16 | DEFAULT_TLS, 17 | ENV_VAR_CACHE_SIZE, 18 | ENV_VAR_CACHE_TYPE, 19 | ENV_VAR_DEADLINE_MS, 20 | ENV_VAR_HOST, 21 | ENV_VAR_KEEP_ALIVE_TIME_MS, 22 | ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, 23 | ENV_VAR_PORT, 24 | ENV_VAR_RETRY_BACKOFF_MS, 25 | ENV_VAR_STREAM_DEADLINE_MS, 26 | ENV_VAR_TLS, 27 | CacheType, 28 | Config, 29 | ResolverType, 30 | ) 31 | 32 | 33 | def test_return_default_values_rpc(): 34 | config = Config() 35 | assert config.cache == DEFAULT_CACHE 36 | assert config.max_cache_size == DEFAULT_CACHE_SIZE 37 | assert config.deadline_ms == DEFAULT_DEADLINE 38 | assert config.host == DEFAULT_HOST 39 | assert config.keep_alive_time == DEFAULT_KEEP_ALIVE 40 | assert config.offline_flag_source_path == DEFAULT_OFFLINE_SOURCE_PATH 41 | assert config.port == DEFAULT_PORT_RPC 42 | assert config.resolver == DEFAULT_RESOLVER_TYPE 43 | assert config.retry_backoff_ms == DEFAULT_RETRY_BACKOFF 44 | assert config.stream_deadline_ms == DEFAULT_STREAM_DEADLINE 45 | assert config.tls is DEFAULT_TLS 46 | 47 | 48 | def test_return_default_values_in_process(): 49 | config = Config(resolver=ResolverType.IN_PROCESS) 50 | assert config.cache == DEFAULT_CACHE 51 | assert config.max_cache_size == DEFAULT_CACHE_SIZE 52 | assert config.deadline_ms == DEFAULT_DEADLINE 53 | assert config.host == DEFAULT_HOST 54 | assert config.keep_alive_time == DEFAULT_KEEP_ALIVE 55 | assert config.offline_flag_source_path == DEFAULT_OFFLINE_SOURCE_PATH 56 | assert config.port == DEFAULT_PORT_IN_PROCESS 57 | assert config.resolver == ResolverType.IN_PROCESS 58 | assert config.retry_backoff_ms == DEFAULT_RETRY_BACKOFF 59 | assert config.stream_deadline_ms == DEFAULT_STREAM_DEADLINE 60 | assert config.tls is DEFAULT_TLS 61 | 62 | 63 | @pytest.fixture(params=ResolverType, scope="module") 64 | def resolver_type(request): 65 | return request.param 66 | 67 | 68 | def test_overrides_defaults_with_environment(monkeypatch, resolver_type): # noqa: PLR0915 69 | cache = CacheType.DISABLED 70 | cache_size = 456 71 | deadline = 1 72 | host = "flagd" 73 | keep_alive = 2 74 | offline_path = "path" 75 | port = 1234 76 | retry_backoff = 3 77 | stream_deadline = 4 78 | tls = True 79 | 80 | monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value)) 81 | monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size)) 82 | monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline)) 83 | monkeypatch.setenv(ENV_VAR_HOST, host) 84 | monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive)) 85 | monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path) 86 | monkeypatch.setenv(ENV_VAR_PORT, str(port)) 87 | monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff)) 88 | monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline)) 89 | monkeypatch.setenv(ENV_VAR_TLS, str(tls)) 90 | 91 | config = Config() 92 | assert config.cache == cache 93 | assert config.max_cache_size == cache_size 94 | assert config.deadline_ms == deadline 95 | assert config.host == host 96 | assert config.keep_alive_time == keep_alive 97 | assert config.offline_flag_source_path == offline_path 98 | assert config.port == port 99 | assert config.retry_backoff_ms == retry_backoff 100 | assert config.stream_deadline_ms == stream_deadline 101 | assert config.tls is tls 102 | 103 | 104 | def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_type): # noqa: PLR0915 105 | cache = CacheType.LRU 106 | cache_size = 456 107 | deadline = 1 108 | host = "flagd" 109 | keep_alive = 2 110 | offline_path = "path" 111 | port = 1234 112 | retry_backoff = 3 113 | stream_deadline = 4 114 | tls = True 115 | 116 | monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value) + "value") 117 | monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size) + "value") 118 | monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline) + "value") 119 | monkeypatch.setenv(ENV_VAR_HOST, host + "value") 120 | monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive) + "value") 121 | monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path + "value") 122 | monkeypatch.setenv(ENV_VAR_PORT, str(port) + "value") 123 | monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff) + "value") 124 | monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline) + "value") 125 | monkeypatch.setenv(ENV_VAR_TLS, str(tls) + "value") 126 | 127 | config = Config( 128 | cache=cache, 129 | max_cache_size=cache_size, 130 | deadline_ms=deadline, 131 | host=host, 132 | port=port, 133 | resolver=resolver_type, 134 | retry_backoff_ms=retry_backoff, 135 | stream_deadline_ms=stream_deadline, 136 | tls=tls, 137 | keep_alive_time=keep_alive, 138 | offline_flag_source_path=offline_path, 139 | ) 140 | assert config.cache == cache 141 | assert config.max_cache_size == cache_size 142 | assert config.deadline_ms == deadline 143 | assert config.host == host 144 | assert config.keep_alive_time == keep_alive 145 | assert config.offline_flag_source_path == offline_path 146 | assert config.port == port 147 | assert config.retry_backoff_ms == retry_backoff 148 | assert config.stream_deadline_ms == stream_deadline 149 | assert config.tls is tls 150 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_errors.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | 4 | import pytest 5 | 6 | from openfeature import api 7 | from openfeature.contrib.provider.flagd import FlagdProvider 8 | from openfeature.contrib.provider.flagd.config import ResolverType 9 | from openfeature.evaluation_context import EvaluationContext 10 | from openfeature.event import ProviderEvent 11 | from openfeature.exception import ErrorCode 12 | from openfeature.flag_evaluation import Reason 13 | 14 | 15 | def create_client(provider: FlagdProvider): 16 | api.set_provider(provider) 17 | return api.get_client() 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "file_name", 22 | [ 23 | "not-a-flag.json", 24 | "basic-flag-wrong-structure.json", 25 | "basic-flag-invalid.not-json", 26 | "basic-flag-wrong-variant.json", 27 | "basic-flag-broken-state.json", 28 | "basic-flag-broken-variants.json", 29 | "basic-flag-broken-default.json", 30 | "basic-flag-broken-targeting.json", 31 | ], 32 | ) 33 | def test_file_load_errors(file_name: str): 34 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 35 | client = create_client( 36 | FlagdProvider( 37 | resolver_type=ResolverType.IN_PROCESS, 38 | offline_flag_source_path=f"{path}/{file_name}", 39 | ) 40 | ) 41 | 42 | res = client.get_boolean_details("basic-flag", False) 43 | 44 | assert res.value is False 45 | assert res.reason == Reason.ERROR 46 | assert res.error_code == ErrorCode.FLAG_NOT_FOUND 47 | 48 | 49 | @pytest.mark.parametrize( 50 | "file_name", 51 | [ 52 | "invalid-semver-op.json", 53 | "invalid-semver-args.json", 54 | "invalid-stringcomp-args.json", 55 | "invalid-fractional-args.json", 56 | "invalid-fractional-args-wrong-content.json", 57 | "invalid-fractional-weights.json", 58 | "invalid-fractional-weights-strings.json", 59 | ], 60 | ) 61 | def test_json_logic_parse_errors(file_name: str): 62 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 63 | client = create_client( 64 | FlagdProvider( 65 | resolver_type=ResolverType.IN_PROCESS, 66 | offline_flag_source_path=f"{path}/{file_name}", 67 | ) 68 | ) 69 | 70 | res = client.get_string_details("basic-flag", "fallback", EvaluationContext("123")) 71 | 72 | assert res.value == "default" 73 | assert res.reason == Reason.DEFAULT 74 | 75 | 76 | def test_flag_disabled(): 77 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 78 | client = create_client( 79 | FlagdProvider( 80 | resolver_type=ResolverType.IN_PROCESS, 81 | offline_flag_source_path=f"{path}/basic-flag-disabled.json", 82 | ) 83 | ) 84 | 85 | res = client.get_string_details("basic-flag", "fallback", EvaluationContext("123")) 86 | 87 | assert res.value == "fallback" 88 | assert res.reason == Reason.DISABLED 89 | 90 | 91 | @pytest.mark.parametrize("wait", (500, 250)) 92 | def test_grpc_sync_fail_deadline(wait: int): 93 | init_failed = False 94 | 95 | def fail(*args, **kwargs): 96 | nonlocal init_failed 97 | init_failed = True 98 | 99 | api.get_client().add_handler(ProviderEvent.PROVIDER_ERROR, fail) 100 | 101 | t = time.time() 102 | api.set_provider( 103 | FlagdProvider( 104 | resolver_type=ResolverType.IN_PROCESS, 105 | port=99999, # dead port to test failure 106 | deadline_ms=wait, 107 | ) 108 | ) 109 | 110 | elapsed = time.time() - t 111 | assert abs(elapsed - wait * 0.001) < 0.15 112 | assert init_failed 113 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_file_store.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest.mock import Mock 3 | 4 | import pytest 5 | 6 | from openfeature import api 7 | from openfeature.contrib.provider.flagd import FlagdProvider 8 | from openfeature.contrib.provider.flagd.config import Config 9 | from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import ( 10 | FileWatcher, 11 | ) 12 | from openfeature.contrib.provider.flagd.resolvers.process.flags import Flag, FlagStore 13 | 14 | 15 | def create_client(provider: FlagdProvider): 16 | api.set_provider(provider) 17 | return api.get_client() 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "file_name", 22 | [ 23 | "basic-flag.json", 24 | "basic-flag.yaml", 25 | ], 26 | ) 27 | def test_file_load(file_name: str): 28 | emit_provider_configuration_changed = Mock() 29 | emit_provider_ready = Mock() 30 | emit_provider_error = Mock() 31 | flag_store = FlagStore(emit_provider_configuration_changed) 32 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 33 | file_watcher = FileWatcher( 34 | Config( 35 | offline_flag_source_path=f"{path}/{file_name}", 36 | ), 37 | flag_store, 38 | emit_provider_ready, 39 | emit_provider_error, 40 | ) 41 | file_watcher.initialize(None) 42 | 43 | flag = flag_store.get_flag("basic-flag") 44 | 45 | assert flag is not None 46 | assert isinstance(flag, Flag) 47 | 48 | flag_set_metadata = flag_store.flag_set_metadata 49 | 50 | assert flag_set_metadata is not None 51 | assert isinstance(flag_set_metadata, dict) 52 | assert len(flag_set_metadata) == 0 53 | 54 | 55 | def test_file_load_metadata(): 56 | emit_provider_configuration_changed = Mock() 57 | emit_provider_ready = Mock() 58 | emit_provider_error = Mock() 59 | flag_store = FlagStore(emit_provider_configuration_changed) 60 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 61 | file_watcher = FileWatcher( 62 | Config( 63 | offline_flag_source_path=f"{path}/basic-flag-set-metadata.json", 64 | ), 65 | flag_store, 66 | emit_provider_ready, 67 | emit_provider_error, 68 | ) 69 | file_watcher.initialize(None) 70 | 71 | flag_set_metadata = flag_store.flag_set_metadata 72 | 73 | assert flag_set_metadata is not None 74 | assert isinstance(flag_set_metadata, dict) 75 | assert len(flag_set_metadata) == 4 76 | assert flag_set_metadata["string"] == "a" 77 | assert flag_set_metadata["integer"] == 1 78 | assert flag_set_metadata["float"] == 1.2 79 | assert flag_set_metadata["bool"] 80 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_flagd.py: -------------------------------------------------------------------------------- 1 | from numbers import Number 2 | 3 | from openfeature.contrib.provider.flagd import FlagdProvider 4 | 5 | 6 | def test_should_get_boolean_flag_from_flagd(flagd_provider_client): 7 | # Given 8 | client = flagd_provider_client 9 | 10 | # When 11 | flag = client.get_boolean_details(flag_key="Key", default_value=True) 12 | 13 | # Then 14 | assert flag is not None 15 | assert flag.value 16 | assert isinstance(flag.value, bool) 17 | 18 | 19 | def test_should_get_integer_flag_from_flagd(flagd_provider_client): 20 | # Given 21 | client = flagd_provider_client 22 | 23 | # When 24 | flag = client.get_integer_details(flag_key="Key", default_value=100) 25 | 26 | # Then 27 | assert flag is not None 28 | assert flag.value == 100 29 | assert isinstance(flag.value, Number) 30 | 31 | 32 | def test_should_get_float_flag_from_flagd(flagd_provider_client): 33 | # Given 34 | client = flagd_provider_client 35 | 36 | # When 37 | flag = client.get_float_details(flag_key="Key", default_value=100) 38 | 39 | # Then 40 | assert flag is not None 41 | assert flag.value == 100 42 | assert isinstance(flag.value, Number) 43 | 44 | 45 | def test_should_get_string_flag_from_flagd(flagd_provider_client): 46 | # Given 47 | client = flagd_provider_client 48 | 49 | # When 50 | flag = client.get_string_details(flag_key="Key", default_value="String") 51 | 52 | # Then 53 | assert flag is not None 54 | assert flag.value == "String" 55 | assert isinstance(flag.value, str) 56 | 57 | 58 | def test_should_get_object_flag_from_flagd(flagd_provider_client): 59 | # Given 60 | client = flagd_provider_client 61 | return_value = { 62 | "String": "string", 63 | "Number": 2, 64 | "Boolean": True, 65 | } 66 | 67 | # When 68 | flag = client.get_object_details(flag_key="Key", default_value=return_value) 69 | 70 | # Then 71 | assert flag is not None 72 | assert flag.value == return_value 73 | assert isinstance(flag.value, dict) 74 | 75 | 76 | def test_get_metadata_returns_metadata_object_with_name(): 77 | provider = FlagdProvider() 78 | metadata = provider.get_metadata() 79 | assert metadata.name == "FlagdProvider" 80 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_in_process.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock, create_autospec 2 | 3 | import pytest 4 | 5 | from openfeature.contrib.provider.flagd.config import Config 6 | from openfeature.contrib.provider.flagd.resolvers.in_process import InProcessResolver 7 | from openfeature.contrib.provider.flagd.resolvers.process.flags import Flag, FlagStore 8 | from openfeature.evaluation_context import EvaluationContext 9 | from openfeature.exception import FlagNotFoundError, ParseError 10 | 11 | 12 | def targeting(): 13 | return { 14 | "if": [ 15 | {"==": [{"var": "targetingKey"}, "target_variant"]}, 16 | "target_variant", 17 | None, 18 | ] 19 | } 20 | 21 | 22 | def context(targeting_key): 23 | return EvaluationContext(targeting_key=targeting_key) 24 | 25 | 26 | @pytest.fixture 27 | def config(): 28 | return create_autospec(Config) 29 | 30 | 31 | @pytest.fixture 32 | def flag_store(): 33 | return create_autospec(FlagStore) 34 | 35 | 36 | @pytest.fixture 37 | def flag(): 38 | return Flag( 39 | key="flag", 40 | state="ENABLED", 41 | variants={"default_variant": False, "target_variant": True}, 42 | default_variant="default_variant", 43 | targeting=targeting(), 44 | ) 45 | 46 | 47 | @pytest.fixture 48 | def resolver(config): 49 | config.offline_flag_source_path = "flag.json" 50 | config.deadline_ms = 100 51 | return InProcessResolver( 52 | config=config, 53 | emit_provider_ready=Mock(), 54 | emit_provider_error=Mock(), 55 | emit_provider_stale=Mock(), 56 | emit_provider_configuration_changed=Mock(), 57 | ) 58 | 59 | 60 | def test_resolve_boolean_details_flag_not_found(resolver): 61 | resolver.flag_store.get_flag = Mock(return_value=None) 62 | with pytest.raises(FlagNotFoundError): 63 | resolver.resolve_boolean_details("nonexistent_flag", False) 64 | 65 | 66 | def test_resolve_boolean_details_disabled_flag(flag, resolver): 67 | flag.state = "DISABLED" 68 | resolver.flag_store.get_flag = Mock(return_value=flag) 69 | 70 | result = resolver.resolve_boolean_details("disabled_flag", False) 71 | 72 | assert result.reason == "DISABLED" 73 | assert result.variant is None 74 | assert not result.value 75 | 76 | 77 | def test_resolve_boolean_details_invalid_variant(resolver, flag): 78 | flag.targeting = {"var": ["targetingKey", "invalid_variant"]} 79 | 80 | resolver.flag_store.get_flag = Mock(return_value=flag) 81 | 82 | with pytest.raises(ParseError): 83 | resolver.resolve_boolean_details("flag", False) 84 | 85 | 86 | @pytest.mark.parametrize( 87 | "input_config, resolve_config, expected", 88 | [ 89 | ( 90 | { 91 | "variants": {"default_variant": False, "target_variant": True}, 92 | "targeting": None, 93 | }, 94 | { 95 | "context": None, 96 | "method": "resolve_boolean_details", 97 | "default_value": False, 98 | }, 99 | {"reason": "STATIC", "variant": "default_variant", "value": False}, 100 | ), 101 | ( 102 | { 103 | "variants": {"default_variant": False, "target_variant": True}, 104 | "targeting": targeting(), 105 | }, 106 | { 107 | "context": context("no_target_variant"), 108 | "method": "resolve_boolean_details", 109 | "default_value": False, 110 | }, 111 | {"reason": "DEFAULT", "variant": "default_variant", "value": False}, 112 | ), 113 | ( 114 | { 115 | "variants": {"default_variant": False, "target_variant": True}, 116 | "targeting": targeting(), 117 | }, 118 | { 119 | "context": context("target_variant"), 120 | "method": "resolve_boolean_details", 121 | "default_value": False, 122 | }, 123 | {"reason": "TARGETING_MATCH", "variant": "target_variant", "value": True}, 124 | ), 125 | ( 126 | { 127 | "variants": {"default_variant": "default", "target_variant": "target"}, 128 | "targeting": targeting(), 129 | }, 130 | { 131 | "context": context("target_variant"), 132 | "method": "resolve_string_details", 133 | "default_value": "placeholder", 134 | }, 135 | { 136 | "reason": "TARGETING_MATCH", 137 | "variant": "target_variant", 138 | "value": "target", 139 | }, 140 | ), 141 | ( 142 | { 143 | "variants": {"default_variant": 1.0, "target_variant": 2.0}, 144 | "targeting": targeting(), 145 | }, 146 | { 147 | "context": context("target_variant"), 148 | "method": "resolve_float_details", 149 | "default_value": 0.0, 150 | }, 151 | {"reason": "TARGETING_MATCH", "variant": "target_variant", "value": 2.0}, 152 | ), 153 | ( 154 | { 155 | "variants": {"default_variant": True, "target_variant": False}, 156 | "targeting": targeting(), 157 | }, 158 | { 159 | "context": context("target_variant"), 160 | "method": "resolve_boolean_details", 161 | "default_value": True, 162 | }, 163 | {"reason": "TARGETING_MATCH", "variant": "target_variant", "value": False}, 164 | ), 165 | ( 166 | { 167 | "variants": {"default_variant": 10, "target_variant": 0}, 168 | "targeting": targeting(), 169 | }, 170 | { 171 | "context": context("target_variant"), 172 | "method": "resolve_integer_details", 173 | "default_value": 1, 174 | }, 175 | {"reason": "TARGETING_MATCH", "variant": "target_variant", "value": 0}, 176 | ), 177 | ( 178 | { 179 | "variants": {"default_variant": {}, "target_variant": {}}, 180 | "targeting": targeting(), 181 | }, 182 | { 183 | "context": context("target_variant"), 184 | "method": "resolve_object_details", 185 | "default_value": {}, 186 | }, 187 | {"reason": "TARGETING_MATCH", "variant": "target_variant", "value": {}}, 188 | ), 189 | ], 190 | ids=[ 191 | "static_flag", 192 | "boolean_default_fallback", 193 | "boolean_targeting_match", 194 | "string_targeting_match", 195 | "float_targeting_match", 196 | "boolean_falsy_target", 197 | "integer_falsy_target", 198 | "object_falsy_target", 199 | ], 200 | ) 201 | def test_resolver_details( 202 | resolver, 203 | flag, 204 | input_config, 205 | resolve_config, 206 | expected, 207 | ): 208 | flag.variants = input_config["variants"] 209 | flag.targeting = input_config["targeting"] 210 | resolver.flag_store.get_flag = Mock(return_value=flag) 211 | 212 | result = getattr(resolver, resolve_config["method"])( 213 | "flag", resolve_config["default_value"], resolve_config["context"] 214 | ) 215 | 216 | assert result.reason == expected["reason"] 217 | assert result.variant == expected["variant"] 218 | assert result.value == expected["value"] 219 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_metadata.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | from time import sleep 4 | 5 | import pytest 6 | 7 | from openfeature import api 8 | from openfeature.contrib.provider.flagd import FlagdProvider 9 | from openfeature.contrib.provider.flagd.config import ResolverType 10 | from openfeature.contrib.provider.flagd.resolvers.process.flags import ( 11 | _validate_metadata, 12 | ) 13 | from openfeature.event import EventDetails, ProviderEvent 14 | from openfeature.exception import ErrorCode, ParseError 15 | 16 | 17 | def create_client(file_name): 18 | path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) 19 | provider = FlagdProvider( 20 | resolver_type=ResolverType.FILE, 21 | offline_flag_source_path=f"{path}/{file_name}", 22 | ) 23 | 24 | api.set_provider(provider) 25 | return api.get_client() 26 | 27 | 28 | def test_should_load_flag_set_metadata(): 29 | client = create_client("basic-flag-set-metadata.json") 30 | res = client.get_boolean_details("basic-flag", False) 31 | 32 | assert res.flag_metadata is not None 33 | assert isinstance(res.flag_metadata, dict) 34 | assert len(res.flag_metadata) == 4 35 | assert res.flag_metadata["string"] == "a" 36 | assert res.flag_metadata["integer"] == 1 37 | assert res.flag_metadata["float"] == 1.2 38 | assert res.flag_metadata["bool"] 39 | 40 | 41 | def test_should_load_flag_metadata(): 42 | client = create_client("basic-flag-metadata.json") 43 | res = client.get_boolean_details("basic-flag", False) 44 | 45 | assert res.flag_metadata is not None 46 | assert isinstance(res.flag_metadata, dict) 47 | assert len(res.flag_metadata) == 4 48 | assert res.flag_metadata["string"] == "a" 49 | assert res.flag_metadata["integer"] == 1 50 | assert res.flag_metadata["float"] == 1.2 51 | assert res.flag_metadata["bool"] 52 | 53 | 54 | def test_should_load_flag_combined_metadata(): 55 | client = create_client("basic-flag-combined-metadata.json") 56 | res = client.get_boolean_details("basic-flag", False) 57 | 58 | assert res.flag_metadata is not None 59 | assert isinstance(res.flag_metadata, dict) 60 | assert len(res.flag_metadata) == 8 61 | assert res.flag_metadata["string"] == "a" 62 | assert res.flag_metadata["integer"] == 1 63 | assert res.flag_metadata["float"] == 1.2 64 | assert res.flag_metadata["bool"] 65 | assert res.flag_metadata["flag-set-string"] == "c" 66 | assert res.flag_metadata["flag-set-integer"] == 3 67 | assert res.flag_metadata["flag-set-float"] == 3.2 68 | assert not res.flag_metadata["flag-set-bool"] 69 | 70 | 71 | class Channel: 72 | parse_error_received = False 73 | 74 | 75 | def create_error_handler(): 76 | channel = Channel() 77 | 78 | def error_handler(details: EventDetails): 79 | nonlocal channel 80 | if details.error_code == ErrorCode.PARSE_ERROR: 81 | channel.parse_error_received = True 82 | 83 | return error_handler, channel 84 | 85 | 86 | @pytest.mark.parametrize( 87 | "file_name", 88 | [ 89 | "invalid-flag-set-metadata.json", 90 | "invalid-flag-set-metadata-list.json", 91 | "invalid-flag-metadata.json", 92 | "invalid-flag-metadata-list.json", 93 | ], 94 | ) 95 | def test_invalid_flag_set_metadata(file_name): 96 | error_handler, channel = create_error_handler() 97 | 98 | client = create_client(file_name) 99 | client.add_handler(ProviderEvent.PROVIDER_ERROR, error_handler) 100 | 101 | # keep the test thread alive 102 | max_timeout = 2 103 | start = time.time() 104 | while not channel.parse_error_received: 105 | now = time.time() 106 | if now - start > max_timeout: 107 | raise AssertionError() 108 | sleep(0.01) 109 | 110 | 111 | def test_validate_metadata_with_none_key(): 112 | try: 113 | _validate_metadata(None, "a") 114 | except ParseError: 115 | return 116 | raise AssertionError() 117 | 118 | 119 | def test_validate_metadata_with_empty_key(): 120 | try: 121 | _validate_metadata("", "a") 122 | except ParseError: 123 | return 124 | raise AssertionError() 125 | 126 | 127 | def test_validate_metadata_with_non_string_key(): 128 | try: 129 | _validate_metadata(1, "a") 130 | except ParseError: 131 | return 132 | raise AssertionError() 133 | 134 | 135 | def test_validate_metadata_with_non_string_value(): 136 | try: 137 | _validate_metadata("a", []) 138 | except ParseError: 139 | return 140 | raise AssertionError() 141 | 142 | 143 | def test_validate_metadata_with_none_value(): 144 | try: 145 | _validate_metadata("a", None) 146 | except ParseError: 147 | return 148 | raise AssertionError() 149 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flagd/tests/test_targeting.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import time 3 | import typing 4 | import unittest 5 | from dataclasses import dataclass 6 | from enum import Enum 7 | from math import floor 8 | 9 | import pytest 10 | from json_logic import builtins, jsonLogic # type: ignore[import-untyped] 11 | 12 | from openfeature.contrib.provider.flagd.resolvers.process.custom_ops import ( 13 | ends_with, 14 | fractional, 15 | sem_ver, 16 | starts_with, 17 | ) 18 | from openfeature.contrib.provider.flagd.resolvers.process.targeting import targeting 19 | from openfeature.evaluation_context import EvaluationContext 20 | 21 | OPERATORS = { 22 | **builtins.BUILTINS, 23 | "fractional": fractional, 24 | "starts_with": starts_with, 25 | "ends_with": ends_with, 26 | "sem_ver": sem_ver, 27 | } 28 | 29 | flag_key = "flagKey" 30 | 31 | 32 | class BasicTests(unittest.TestCase): 33 | def test_should_inject_flag_key_as_a_property(self): 34 | rule = {"===": [{"var": "$flagd.flagKey"}, flag_key]} 35 | 36 | result = targeting(flag_key, rule) 37 | 38 | assert result 39 | 40 | def test_should_inject_current_timestamp_as_a_property(self): 41 | ts = floor(time.time() / 1000) 42 | 43 | rule = {">=": [{"var": "$flagd.timestamp"}, ts]} 44 | 45 | assert targeting(flag_key, rule) 46 | 47 | def test_should_override_injected_properties_if_already_present_in_context(self): 48 | rule = {"===": [{"var": "$flagd.flagKey"}, flag_key]} 49 | 50 | ctx = { 51 | "$flagd": { 52 | "flagKey": "someOtherFlag", 53 | }, 54 | } 55 | 56 | assert targeting(flag_key, rule, EvaluationContext(attributes=ctx)) 57 | 58 | 59 | class StringComparisonOperator(unittest.TestCase): 60 | def test_should_evaluate_starts_with_calls(self): 61 | rule = {"starts_with": [{"var": "email"}, "admin"]} 62 | context = {"email": "admin@abc.com"} 63 | 64 | assert targeting(flag_key, rule, EvaluationContext(attributes=context)) 65 | 66 | def test_should_evaluate_ends_with_calls(self): 67 | rule = {"ends_with": [{"var": "email"}, "abc.com"]} 68 | context = {"email": "admin@abc.com"} 69 | 70 | assert targeting(flag_key, rule, EvaluationContext(attributes=context)) 71 | 72 | def test_missing_targeting(self): 73 | rule = {"starts_with": [{"var": "email"}]} 74 | context = {"email": "admin@abc.com"} 75 | 76 | assert not targeting(flag_key, rule, EvaluationContext(attributes=context)) 77 | 78 | def test_non_string_variable(self): 79 | rule = {"ends_with": [{"var": "number"}, "abc.com"]} 80 | context = {"number": 11111} 81 | 82 | assert not targeting(flag_key, rule, EvaluationContext(attributes=context)) 83 | 84 | def test_non_string_comparator(self): 85 | rule = {"ends_with": [{"var": "email"}, 111111]} 86 | context = {"email": "admin@abc.com"} 87 | 88 | assert not targeting(flag_key, rule, EvaluationContext(attributes=context)) 89 | 90 | 91 | class VersionPrefixed(Enum): 92 | NONE = "None" 93 | FIRST = "First" 94 | SECOND = "Second" 95 | BOTH = "Both" 96 | 97 | 98 | @dataclass 99 | class SemVerTest: 100 | title: str 101 | rule: list[str] 102 | result: typing.Optional[bool] 103 | 104 | 105 | semver_operations: list[SemVerTest] = [ 106 | # Successful and working rules 107 | SemVerTest("equals", ["1.2.3", "=", "1.2.3"], True), 108 | SemVerTest("not equals", ["1.2.3", "!=", "1.2.4"], True), 109 | SemVerTest("lesser", ["1.2.3", "<", "1.2.4"], True), 110 | SemVerTest("lesser equals", ["1.2.3", "<=", "1.2.3"], True), 111 | SemVerTest("greater", ["1.2.4", ">", "1.2.3"], True), 112 | SemVerTest("greater equals", ["1.2.3", ">=", "1.2.3"], True), 113 | SemVerTest("match major", ["1.2.3", "^", "1.0.0"], True), 114 | SemVerTest("match minor", ["5.0.3", "~", "5.0.8"], True), 115 | # Wrong rules 116 | SemVerTest("wrong operator", ["1.0.0", "-", "1.0.0"], None), 117 | SemVerTest("wrong versions", ["myVersion_1", "=", "myVersion_1"], None), 118 | SemVerTest( 119 | "too many arguments", ["myVersion_2", "+", "myVersion_1", "myVersion_1"], None 120 | ), 121 | SemVerTest("too many arguments", ["1.2.3", "=", "1.2.3", "myVersion_1"], None), 122 | ] 123 | 124 | 125 | def semver_test_naming(vals): 126 | if isinstance(vals, SemVerTest): 127 | return vals.title 128 | elif isinstance(vals, VersionPrefixed): 129 | return f"prefixing '{vals.value}'" 130 | elif isinstance(vals, str): 131 | return f"with '{vals}'" 132 | 133 | 134 | @pytest.mark.parametrize( 135 | ("semver_test", "prefix_state", "prefix"), 136 | itertools.product(semver_operations, VersionPrefixed, ["V", "v"]), 137 | ids=semver_test_naming, 138 | ) 139 | def test_sem_ver_operator(semver_test: SemVerTest, prefix_state, prefix): 140 | """Testing SemVer operator `semver_test.title` for `semver_test.rule` prefixing `prefix_state.value` version(s) with `prefix`""" 141 | version1 = semver_test.rule[0] 142 | operator = semver_test.rule[1] 143 | version2 = semver_test.rule[2] 144 | 145 | if prefix_state is VersionPrefixed.FIRST or prefix_state is VersionPrefixed.BOTH: 146 | version1 = prefix + version1 147 | 148 | if prefix_state is VersionPrefixed.SECOND or prefix_state is VersionPrefixed.BOTH: 149 | version2 = prefix + version2 150 | 151 | semver_rule = [version1, operator, version2] 152 | semver_rule.extend(semver_test.rule[3:]) 153 | 154 | gen_rule = {"sem_ver": semver_rule} 155 | 156 | assert targeting(flag_key, gen_rule) is semver_test.result 157 | 158 | 159 | class FractionalOperator(unittest.TestCase): 160 | def test_should_evaluate_valid_rule(self): 161 | rule = { 162 | "fractional": [ 163 | {"cat": [{"var": "$flagd.flagKey"}, {"var": "key"}]}, 164 | ["red", 50], 165 | ["blue", 50], 166 | ], 167 | } 168 | 169 | logic = targeting( 170 | "flagA", rule, EvaluationContext(attributes={"key": "bucketKeyA"}) 171 | ) 172 | assert logic == "red" 173 | 174 | def test_should_evaluate_valid_rule2(self): 175 | rule = { 176 | "fractional": [ 177 | {"cat": [{"var": "$flagd.flagKey"}, {"var": "key"}]}, 178 | ["red", 50], 179 | ["blue", 50], 180 | ], 181 | } 182 | 183 | logic = targeting( 184 | "flagA", rule, EvaluationContext(attributes={"key": "bucketKeyB"}) 185 | ) 186 | assert logic == "blue" 187 | 188 | def test_should_evaluate_valid_rule_with_targeting_key(self): 189 | rule = { 190 | "fractional": [ 191 | ["red", 50], 192 | ["blue", 50], 193 | ], 194 | } 195 | 196 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="bucketKeyB")) 197 | assert logic == "blue" 198 | 199 | def test_should_evaluate_valid_rule_with_targeting_key_although_one_does_not_have_a_fraction( 200 | self, 201 | ): 202 | rule = { 203 | "fractional": [["red", 1], ["blue"]], 204 | } 205 | 206 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="bucketKeyB")) 207 | assert logic == "blue" 208 | 209 | def test_should_return_null_if_targeting_key_is_missing(self): 210 | rule = { 211 | "fractional": [ 212 | ["red", 1], 213 | ["blue", 1], 214 | ], 215 | } 216 | 217 | logic = jsonLogic(rule, {}, OPERATORS) 218 | assert logic is None 219 | 220 | def test_bucket_sum_with_sum_bigger_than_100(self): 221 | rule = { 222 | "fractional": [ 223 | ["red", 55], 224 | ["blue", 55], 225 | ], 226 | } 227 | 228 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="key")) 229 | assert logic == "blue" 230 | 231 | def test_bucket_sum_with_sum_lower_than_100(self): 232 | rule = { 233 | "fractional": [ 234 | ["red", 45], 235 | ["blue", 45], 236 | ], 237 | } 238 | 239 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="key")) 240 | assert logic == "blue" 241 | 242 | def test_buckets_properties_to_have_variant_and_fraction(self): 243 | rule = { 244 | "fractional": [ 245 | ["red", 50], 246 | [100, 50], 247 | ], 248 | } 249 | 250 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="key")) 251 | assert logic is None 252 | 253 | def test_buckets_properties_to_have_variant_and_fraction2(self): 254 | rule = { 255 | "fractional": [ 256 | ["red", 45, 1256], 257 | ["blue", 4, 455], 258 | ], 259 | } 260 | 261 | logic = targeting("flagA", rule, EvaluationContext(targeting_key="key")) 262 | assert logic is None 263 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [0.1.3](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-provider-flipt/v0.1.2...openfeature-provider-flipt/v0.1.3) (2025-01-21) 4 | 5 | 6 | ### 🧹 Chore 7 | 8 | * update readme ([01fd932](https://github.com/open-feature/python-sdk-contrib/commit/01fd932aca73a24c681bc6cde7ab272c75082b20)) 9 | 10 | ## [0.1.2](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-provider-flipt/v0.1.1...openfeature-provider-flipt/v0.1.2) (2025-01-21) 11 | 12 | 13 | ### 🧹 Chore 14 | 15 | * update readme ([#157](https://github.com/open-feature/python-sdk-contrib/issues/157)) ([91b49b5](https://github.com/open-feature/python-sdk-contrib/commit/91b49b581d7dcf7b2cf419beb5b7bf9874b18334)) 16 | 17 | ## [0.1.1](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-provider-flipt-v0.1.0...openfeature-provider-flipt/v0.1.1) (2025-01-21) 18 | 19 | 20 | ### ✨ New Features 21 | 22 | * Add Flipt provider ([#143](https://github.com/open-feature/python-sdk-contrib/issues/143)) ([599a6dd](https://github.com/open-feature/python-sdk-contrib/commit/599a6dd0a5abffedaa8a176933547edad19e53f4)) 23 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/README.md: -------------------------------------------------------------------------------- 1 | # OpenFeature Flipt Provider 2 | 3 | This provider is designed to evaluate feature flags against the Flipt api using Flipt's [OpenFeature remote evaluation protocol (OFREP)](https://docs.flipt.io/reference/openfeature/overview) API. 4 | This provider performs flag evaluations via HTTP. 5 | 6 | ----- 7 | 8 | ## Table of Contents 9 | 10 | - [Installation](#installation) 11 | - [License](#license) 12 | 13 | ## Installation 14 | 15 | ```console 16 | pip install openfeature-provider-flipt 17 | ``` 18 | 19 | ## Configuration 20 | 21 | ```python 22 | from openfeature import api 23 | from openfeature.contrib.provider.flipt import FliptProvider 24 | 25 | api.set_provider(FliptProvider(base_url="", namespace="")) 26 | client = api.get_client() 27 | client.get_boolean_value("", True) 28 | ``` 29 | 30 | ## License 31 | 32 | Apache 2.0 - See [LICENSE](./LICENSE) for more information. 33 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/pyproject.toml: -------------------------------------------------------------------------------- 1 | # pyproject.toml 2 | [build-system] 3 | requires = ["hatchling"] 4 | build-backend = "hatchling.build" 5 | 6 | [project] 7 | name = "openfeature-provider-flipt" 8 | version = "0.1.3" 9 | description = "OpenFeature provider for the Flipt feature flagging service" 10 | readme = "README.md" 11 | authors = [{ name = "OpenFeature", email = "openfeature-core@groups.io" }] 12 | license = { file = "LICENSE" } 13 | classifiers = [ 14 | "License :: OSI Approved :: Apache Software License", 15 | "Programming Language :: Python", 16 | "Programming Language :: Python :: 3", 17 | ] 18 | keywords = [] 19 | dependencies = [ 20 | "openfeature-sdk>=0.7.0", 21 | "openfeature-provider-ofrep>=0.1.0", 22 | ] 23 | requires-python = ">=3.9" 24 | 25 | [project.urls] 26 | Homepage = "https://github.com/open-feature/python-sdk-contrib" 27 | 28 | [tool.hatch] 29 | 30 | [tool.hatch.envs.hatch-test] 31 | dependencies = [ 32 | "coverage[toml]>=6.5", 33 | "pytest", 34 | "requests-mock", 35 | ] 36 | 37 | [tool.hatch.envs.hatch-test.scripts] 38 | run = "pytest {args:tests}" 39 | run-cov = "coverage run -m pytest {args:tests}" 40 | cov-combine = "coverage combine" 41 | cov-report = [ 42 | "coverage xml", 43 | "coverage html", 44 | "coverage report", 45 | ] 46 | cov = [ 47 | "test-cov", 48 | "cov-report", 49 | ] 50 | 51 | [tool.hatch.envs.mypy] 52 | dependencies = [ 53 | "mypy[faster-cache]>=1.13.0", 54 | "types-requests", 55 | ] 56 | 57 | [tool.hatch.envs.mypy.scripts] 58 | run = "mypy" 59 | 60 | [tool.hatch.build.targets.sdist] 61 | exclude = [ 62 | ".gitignore", 63 | "schemas", 64 | ] 65 | 66 | [tool.hatch.build.targets.wheel] 67 | packages = ["src/openfeature"] 68 | 69 | [tool.coverage.run] 70 | omit = [ 71 | "tests/**", 72 | ] 73 | 74 | [tool.mypy] 75 | mypy_path = "src" 76 | files = "src" 77 | 78 | python_version = "3.9" # should be identical to the minimum supported version 79 | namespace_packages = true 80 | explicit_package_bases = true 81 | local_partial_types = true 82 | pretty = true 83 | 84 | strict = true 85 | disallow_any_generics = false 86 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/src/openfeature/contrib/provider/flipt/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Optional 2 | 3 | from openfeature.contrib.provider.ofrep import OFREPProvider 4 | from openfeature.provider import Metadata 5 | 6 | # Only export the FliptProvider class symbol form the package 7 | __all__ = ["FliptProvider"] 8 | 9 | 10 | class FliptProvider(OFREPProvider): 11 | """A provider for Flipt feature flags service that extends the OFREP Provider for interacting with Flipt's OFREP API""" 12 | 13 | def __init__( 14 | self, 15 | base_url: str, 16 | namespace: str, 17 | *, 18 | headers_factory: Optional[Callable[[], dict[str, str]]] = None, 19 | timeout: float = 5.0, 20 | ): 21 | """Override the OFREPProvider constructor to add a namespace parameter""" 22 | 23 | # Build a headers factory function that includes the Flipt namespace header 24 | headers_factory = self._resolve_header_factory(namespace, headers_factory) 25 | super().__init__(base_url, headers_factory=headers_factory, timeout=timeout) 26 | 27 | def _resolve_header_factory( 28 | self, namespace: str, headers_factory: Optional[Callable[[], dict[str, str]]] 29 | ) -> Callable[[], dict[str, str]]: 30 | """ 31 | Resolves and returns a headers factory callable that includes the "X-Flipt-Namespace" header. 32 | 33 | If a headers factory is provided, it will be called and its headers will be merged with the 34 | "X-Flipt-Namespace" header. If no headers factory is provided, a new factory will be created 35 | that only includes the "X-Flipt-Namespace" header. 36 | 37 | Args: 38 | namespace (str): The namespace value to be included in the "X-Flipt-Namespace" header. 39 | headers_factory (Optional[Callable[[], Dict[str, str]]]): An optional callable that returns 40 | a dictionary of headers. 41 | 42 | Returns: 43 | Callable[[], Dict[str, str]]: A callable that returns a dictionary of headers including 44 | the "X-Flipt-Namespace" header. 45 | """ 46 | if headers_factory is None: 47 | headers = {"X-Flipt-Namespace": namespace} 48 | else: 49 | headers = { 50 | **headers_factory(), 51 | "X-Flipt-Namespace": namespace, 52 | } 53 | 54 | return lambda: headers 55 | 56 | def get_metadata(self) -> Metadata: 57 | return Metadata(name="Flipt Provider") 58 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-flipt/tests/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.flipt import FliptProvider 4 | 5 | 6 | @pytest.fixture 7 | def flipt_provider(): 8 | return FliptProvider("http://localhost:8080") 9 | -------------------------------------------------------------------------------- /providers/openfeature-provider-flipt/tests/test_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.flipt import FliptProvider 4 | from openfeature.evaluation_context import EvaluationContext 5 | from openfeature.exception import ( 6 | FlagNotFoundError, 7 | GeneralError, 8 | InvalidContextError, 9 | ParseError, 10 | TypeMismatchError, 11 | ) 12 | from openfeature.flag_evaluation import FlagResolutionDetails, Reason 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "headers_factory_fixture", [None, lambda: {"Authorization": "Bearer token"}] 17 | ) 18 | def test_flipt_provider_init(headers_factory_fixture): 19 | """Test that the FliptProvider correctly initializes and merges the headers factory with the namespace header""" 20 | provider = FliptProvider( 21 | "http://localhost:8080", 22 | "test-namespace", 23 | headers_factory=headers_factory_fixture, 24 | ) 25 | assert provider.base_url == "http://localhost:8080" 26 | if headers_factory_fixture: 27 | assert provider.headers_factory() == { 28 | **headers_factory_fixture(), 29 | "X-Flipt-Namespace": "test-namespace", 30 | } 31 | else: 32 | assert provider.headers_factory() == { 33 | "X-Flipt-Namespace": "test-namespace", 34 | } 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "get_method, resolved_value, default_value", 39 | ( 40 | ("resolve_boolean_details", True, False), 41 | ("resolve_string_details", "resolved_flag_str", "default_flag_str"), 42 | ("resolve_integer_details", 100, 0), 43 | ("resolve_float_details", 10.23, 0.0), 44 | ( 45 | "resolve_object_details", 46 | { 47 | "String": "string", 48 | "Number": 2, 49 | "Boolean": True, 50 | }, 51 | {}, 52 | ), 53 | ("resolve_object_details", ["string1", "string2"], []), 54 | ), 55 | ) 56 | def test_flipt_provider_successful_resolution( 57 | get_method, resolved_value, default_value, requests_mock 58 | ): 59 | """Mock any call to Flipt OFREP api and validat that the resolution function for each type returns the expected FlagResolutionDetails""" 60 | 61 | provider = FliptProvider("http://localhost:8080", "test-namespace") 62 | requests_mock.post( 63 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 64 | json={ 65 | "key": "flag_key", 66 | "reason": "TARGETING_MATCH", 67 | "variant": str(resolved_value), 68 | "metadata": {"foo": "bar"}, 69 | "value": resolved_value, 70 | }, 71 | ) 72 | 73 | resolution = getattr(provider, get_method)("flag_key", default_value) 74 | 75 | assert resolution == FlagResolutionDetails( 76 | value=resolved_value, 77 | reason=Reason.TARGETING_MATCH, 78 | variant=str(resolved_value), 79 | flag_metadata={"foo": "bar"}, 80 | ) 81 | 82 | 83 | def test_flipt_provider_flag_not_found(requests_mock): 84 | provider = FliptProvider("http://localhost:8080", "test-namespace") 85 | requests_mock.post( 86 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 87 | status_code=404, 88 | json={ 89 | "key": "flag_key", 90 | "errorCode": "FLAG_NOT_FOUND", 91 | "errorDetails": "Flag 'flag_key' not found", 92 | }, 93 | ) 94 | 95 | with pytest.raises(FlagNotFoundError): 96 | provider.resolve_boolean_details("flag_key", False) 97 | 98 | 99 | def test_flipt_provider_invalid_context(requests_mock): 100 | provider = FliptProvider("http://localhost:8080", "test-namespace") 101 | requests_mock.post( 102 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 103 | status_code=400, 104 | json={ 105 | "key": "flag_key", 106 | "errorCode": "INVALID_CONTEXT", 107 | "errorDetails": "Invalid context provided", 108 | }, 109 | ) 110 | 111 | with pytest.raises(InvalidContextError): 112 | provider.resolve_boolean_details("flag_key", False) 113 | 114 | 115 | def test_flipt_provider_invalid_response(requests_mock): 116 | provider = FliptProvider("http://localhost:8080", "test-namespace") 117 | requests_mock.post( 118 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", text="invalid" 119 | ) 120 | 121 | with pytest.raises(ParseError): 122 | provider.resolve_boolean_details("flag_key", False) 123 | 124 | 125 | def test_flipt_provider_evaluation_context(requests_mock): 126 | provider = FliptProvider("http://localhost:8080", "test-namespace") 127 | 128 | def match_request_json(request): 129 | return request.json() == {"context": {"targetingKey": "1", "foo": "bar"}} 130 | 131 | requests_mock.post( 132 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 133 | json={ 134 | "key": "flag_key", 135 | "reason": "TARGETING_MATCH", 136 | "variant": "true", 137 | "metadata": {}, 138 | "value": True, 139 | }, 140 | additional_matcher=match_request_json, 141 | ) 142 | 143 | context = EvaluationContext("1", {"foo": "bar"}) 144 | resolution = provider.resolve_boolean_details( 145 | "flag_key", False, evaluation_context=context 146 | ) 147 | 148 | assert resolution == FlagResolutionDetails( 149 | value=True, 150 | reason=Reason.TARGETING_MATCH, 151 | variant="true", 152 | ) 153 | 154 | 155 | def test_flipt_provider_retry_after_shortcircuit_resolution(requests_mock): 156 | provider = FliptProvider("http://localhost:8080", "test-namespace") 157 | requests_mock.post( 158 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 159 | status_code=429, 160 | headers={"Retry-After": "1"}, 161 | ) 162 | 163 | with pytest.raises(GeneralError, match="Rate limited, retry after: 1"): 164 | provider.resolve_boolean_details("flag_key", False) 165 | with pytest.raises( 166 | GeneralError, match="OFREP evaluation paused due to TooManyRequests" 167 | ): 168 | provider.resolve_boolean_details("flag_key", False) 169 | 170 | 171 | def test_flipt_provider_typecheck_flag_value(requests_mock): 172 | provider = FliptProvider("http://localhost:8080", "test-namespace") 173 | requests_mock.post( 174 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 175 | json={ 176 | "key": "flag_key", 177 | "reason": "TARGETING_MATCH", 178 | "variant": "true", 179 | "metadata": {}, 180 | "value": "true", 181 | }, 182 | ) 183 | 184 | with pytest.raises(TypeMismatchError): 185 | provider.resolve_boolean_details("flag_key", False) 186 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [0.1.1](https://github.com/open-feature/python-sdk-contrib/compare/openfeature-provider-ofrep/v0.1.0...openfeature-provider-ofrep/v0.1.1) (2025-01-21) 4 | 5 | 6 | ### 🐛 Bug Fixes 7 | 8 | * Allow configuring ofrep provider requests to api at base … ([#142](https://github.com/open-feature/python-sdk-contrib/issues/142)) ([09402df](https://github.com/open-feature/python-sdk-contrib/commit/09402df3129c1b9d219465b82807fa53aafec039)) 9 | * Pin the ofrep provider's requests dep >=2.27.0 because of… ([#161](https://github.com/open-feature/python-sdk-contrib/issues/161)) ([9ffc7ec](https://github.com/open-feature/python-sdk-contrib/commit/9ffc7ec91d1fff0d436099e878fac9c080a42b51)) 10 | 11 | ## 0.1.0 (2024-10-02) 12 | 13 | 14 | ### ✨ New Features 15 | 16 | * add initial skeleton for OFREP provider ([#86](https://github.com/open-feature/python-sdk-contrib/issues/86)) ([00a5a18](https://github.com/open-feature/python-sdk-contrib/commit/00a5a18a76ef1435de67f312e384a97823bd185b)) 17 | * implement basic functionality of OFREP provider ([#88](https://github.com/open-feature/python-sdk-contrib/issues/88)) ([cb42cc0](https://github.com/open-feature/python-sdk-contrib/commit/cb42cc0001e19793f391351a1ce5bafe1831025f)) 18 | 19 | ## Changelog 20 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/README.md: -------------------------------------------------------------------------------- 1 | # OFREP Provider for OpenFeature 2 | 3 | This provider is designed to use the [OpenFeature Remote Evaluation Protocol (OFREP)](https://openfeature.dev/specification/appendix-c). 4 | 5 | ## Installation 6 | 7 | ``` 8 | pip install openfeature-provider-ofrep 9 | ``` 10 | 11 | ## Configuration and Usage 12 | 13 | Instantiate a new OFREPProvider instance and configure the OpenFeature SDK to use it: 14 | 15 | ```python 16 | from openfeature import api 17 | from openfeature.contrib.provider.ofrep import OFREPProvider 18 | 19 | api.set_provider(OFREPProvider()) 20 | ``` 21 | 22 | ### Configuration options 23 | 24 | 25 | 26 | ## License 27 | 28 | Apache 2.0 - See [LICENSE](./LICENSE) for more information. 29 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/pyproject.toml: -------------------------------------------------------------------------------- 1 | # pyproject.toml 2 | [build-system] 3 | requires = ["hatchling"] 4 | build-backend = "hatchling.build" 5 | 6 | [project] 7 | name = "openfeature-provider-ofrep" 8 | version = "0.1.1" 9 | description = "OpenFeature provider for the OpenFeature Remote Evaluation Protocol (OFREP)" 10 | readme = "README.md" 11 | authors = [{ name = "OpenFeature", email = "openfeature-core@groups.io" }] 12 | license = { file = "LICENSE" } 13 | classifiers = [ 14 | "License :: OSI Approved :: Apache Software License", 15 | "Programming Language :: Python", 16 | "Programming Language :: Python :: 3", 17 | ] 18 | keywords = [] 19 | dependencies = [ 20 | "openfeature-sdk>=0.7.0", 21 | "requests>=2.27.0" 22 | ] 23 | requires-python = ">=3.9" 24 | 25 | [project.urls] 26 | Homepage = "https://github.com/open-feature/python-sdk-contrib" 27 | 28 | [tool.hatch] 29 | 30 | [tool.hatch.envs.hatch-test] 31 | dependencies = [ 32 | "coverage[toml]>=6.5", 33 | "pytest", 34 | "requests-mock", 35 | ] 36 | 37 | [tool.hatch.envs.hatch-test.scripts] 38 | run = "pytest {args:tests}" 39 | run-cov = "coverage run -m pytest {args:tests}" 40 | cov-combine = "coverage combine" 41 | cov-report = [ 42 | "coverage xml", 43 | "coverage html", 44 | "coverage report", 45 | ] 46 | cov = [ 47 | "test-cov", 48 | "cov-report", 49 | ] 50 | 51 | [tool.hatch.envs.mypy] 52 | dependencies = [ 53 | "mypy[faster-cache]>=1.13.0", 54 | "types-requests", 55 | ] 56 | 57 | [tool.hatch.envs.mypy.scripts] 58 | run = "mypy" 59 | 60 | [tool.hatch.build.targets.sdist] 61 | exclude = [ 62 | ".gitignore", 63 | "schemas", 64 | ] 65 | 66 | [tool.hatch.build.targets.wheel] 67 | packages = ["src/openfeature"] 68 | 69 | [tool.coverage.run] 70 | omit = [ 71 | "tests/**", 72 | ] 73 | 74 | [tool.mypy] 75 | mypy_path = "src" 76 | files = "src" 77 | 78 | python_version = "3.9" # should be identical to the minimum supported version 79 | namespace_packages = true 80 | explicit_package_bases = true 81 | local_partial_types = true 82 | pretty = true 83 | 84 | strict = true 85 | disallow_any_generics = false 86 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/src/openfeature/contrib/provider/ofrep/__init__.py: -------------------------------------------------------------------------------- 1 | import re 2 | from datetime import datetime, timedelta, timezone 3 | from email.utils import parsedate_to_datetime 4 | from typing import Any, Callable, NoReturn, Optional, Union 5 | from urllib.parse import urljoin 6 | 7 | import requests 8 | from requests.exceptions import JSONDecodeError 9 | 10 | from openfeature.evaluation_context import EvaluationContext 11 | from openfeature.exception import ( 12 | ErrorCode, 13 | FlagNotFoundError, 14 | GeneralError, 15 | InvalidContextError, 16 | OpenFeatureError, 17 | ParseError, 18 | TargetingKeyMissingError, 19 | TypeMismatchError, 20 | ) 21 | from openfeature.flag_evaluation import FlagResolutionDetails, FlagType, Reason 22 | from openfeature.hook import Hook 23 | from openfeature.provider import AbstractProvider, Metadata 24 | 25 | __all__ = ["OFREPProvider"] 26 | 27 | 28 | TypeMap = dict[ 29 | FlagType, 30 | Union[ 31 | type[bool], 32 | type[int], 33 | type[float], 34 | type[str], 35 | tuple[type[dict], type[list]], 36 | ], 37 | ] 38 | 39 | 40 | class OFREPProvider(AbstractProvider): 41 | def __init__( 42 | self, 43 | base_url: str, 44 | *, 45 | headers_factory: Optional[Callable[[], dict[str, str]]] = None, 46 | timeout: float = 5.0, 47 | ): 48 | self.base_url = base_url 49 | self.headers_factory = headers_factory 50 | self.timeout = timeout 51 | self.retry_after: Optional[datetime] = None 52 | self.session = requests.Session() 53 | 54 | def get_metadata(self) -> Metadata: 55 | return Metadata(name="OpenFeature Remote Evaluation Protocol Provider") 56 | 57 | def get_provider_hooks(self) -> list[Hook]: 58 | return [] 59 | 60 | def resolve_boolean_details( 61 | self, 62 | flag_key: str, 63 | default_value: bool, 64 | evaluation_context: Optional[EvaluationContext] = None, 65 | ) -> FlagResolutionDetails[bool]: 66 | return self._resolve( 67 | FlagType.BOOLEAN, flag_key, default_value, evaluation_context 68 | ) 69 | 70 | def resolve_string_details( 71 | self, 72 | flag_key: str, 73 | default_value: str, 74 | evaluation_context: Optional[EvaluationContext] = None, 75 | ) -> FlagResolutionDetails[str]: 76 | return self._resolve( 77 | FlagType.STRING, flag_key, default_value, evaluation_context 78 | ) 79 | 80 | def resolve_integer_details( 81 | self, 82 | flag_key: str, 83 | default_value: int, 84 | evaluation_context: Optional[EvaluationContext] = None, 85 | ) -> FlagResolutionDetails[int]: 86 | return self._resolve( 87 | FlagType.INTEGER, flag_key, default_value, evaluation_context 88 | ) 89 | 90 | def resolve_float_details( 91 | self, 92 | flag_key: str, 93 | default_value: float, 94 | evaluation_context: Optional[EvaluationContext] = None, 95 | ) -> FlagResolutionDetails[float]: 96 | return self._resolve( 97 | FlagType.FLOAT, flag_key, default_value, evaluation_context 98 | ) 99 | 100 | def resolve_object_details( 101 | self, 102 | flag_key: str, 103 | default_value: Union[dict, list], 104 | evaluation_context: Optional[EvaluationContext] = None, 105 | ) -> FlagResolutionDetails[Union[dict, list]]: 106 | return self._resolve( 107 | FlagType.OBJECT, flag_key, default_value, evaluation_context 108 | ) 109 | 110 | def _get_ofrep_api_url(self, api_version: str = "v1") -> str: 111 | ofrep_base_url = ( 112 | self.base_url if self.base_url.endswith("/") else f"{self.base_url}/" 113 | ) 114 | return urljoin(ofrep_base_url, f"ofrep/{api_version}/") 115 | 116 | def _resolve( 117 | self, 118 | flag_type: FlagType, 119 | flag_key: str, 120 | default_value: Union[bool, str, int, float, dict, list], 121 | evaluation_context: Optional[EvaluationContext] = None, 122 | ) -> FlagResolutionDetails[Any]: 123 | now = datetime.now(timezone.utc) 124 | if self.retry_after and now <= self.retry_after: 125 | raise GeneralError( 126 | f"OFREP evaluation paused due to TooManyRequests until {self.retry_after}" 127 | ) 128 | elif self.retry_after: 129 | self.retry_after = None 130 | 131 | try: 132 | response = self.session.post( 133 | urljoin(self._get_ofrep_api_url(), f"evaluate/flags/{flag_key}"), 134 | json=_build_request_data(evaluation_context), 135 | timeout=self.timeout, 136 | headers=self.headers_factory() if self.headers_factory else None, 137 | ) 138 | response.raise_for_status() 139 | 140 | except requests.RequestException as e: 141 | self._handle_error(e) 142 | 143 | try: 144 | data = response.json() 145 | except JSONDecodeError as e: 146 | raise ParseError(str(e)) from e 147 | 148 | _typecheck_flag_value(data["value"], flag_type) 149 | 150 | return FlagResolutionDetails( 151 | value=data["value"], 152 | reason=Reason[data["reason"]], 153 | variant=data["variant"], 154 | flag_metadata=data["metadata"], 155 | ) 156 | 157 | def _handle_error(self, exception: requests.RequestException) -> NoReturn: 158 | response = exception.response 159 | if response is None: 160 | raise GeneralError(str(exception)) from exception 161 | 162 | if response.status_code == 429: 163 | retry_after = response.headers.get("Retry-After") 164 | self.retry_after = _parse_retry_after(retry_after) 165 | raise GeneralError( 166 | f"Rate limited, retry after: {retry_after}" 167 | ) from exception 168 | 169 | try: 170 | data = response.json() 171 | except JSONDecodeError: 172 | raise ParseError(str(exception)) from exception 173 | 174 | error_code = ErrorCode(data["errorCode"]) 175 | error_details = data["errorDetails"] 176 | 177 | if response.status_code == 404: 178 | raise FlagNotFoundError(error_details) from exception 179 | 180 | if error_code == ErrorCode.PARSE_ERROR: 181 | raise ParseError(error_details) from exception 182 | if error_code == ErrorCode.TARGETING_KEY_MISSING: 183 | raise TargetingKeyMissingError(error_details) from exception 184 | if error_code == ErrorCode.INVALID_CONTEXT: 185 | raise InvalidContextError(error_details) from exception 186 | if error_code == ErrorCode.GENERAL: 187 | raise GeneralError(error_details) from exception 188 | 189 | raise OpenFeatureError(error_code, error_details) from exception 190 | 191 | 192 | def _build_request_data( 193 | evaluation_context: Optional[EvaluationContext], 194 | ) -> dict[str, Any]: 195 | data: dict[str, Any] = {} 196 | if evaluation_context: 197 | data["context"] = {} 198 | if evaluation_context.targeting_key: 199 | data["context"]["targetingKey"] = evaluation_context.targeting_key 200 | data["context"].update(evaluation_context.attributes) 201 | return data 202 | 203 | 204 | def _parse_retry_after(retry_after: Optional[str]) -> Optional[datetime]: 205 | if retry_after is None: 206 | return None 207 | if re.match(r"^\s*[0-9]+\s*$", retry_after): 208 | seconds = int(retry_after) 209 | return datetime.now(timezone.utc) + timedelta(seconds=seconds) 210 | return parsedate_to_datetime(retry_after) 211 | 212 | 213 | def _typecheck_flag_value(value: Any, flag_type: FlagType) -> None: 214 | type_map: TypeMap = { 215 | FlagType.BOOLEAN: bool, 216 | FlagType.STRING: str, 217 | FlagType.OBJECT: (dict, list), 218 | FlagType.FLOAT: float, 219 | FlagType.INTEGER: int, 220 | } 221 | _type = type_map.get(flag_type) 222 | if not _type: 223 | raise GeneralError(error_message="Unknown flag type") 224 | if not isinstance(value, _type): 225 | raise TypeMismatchError(f"Expected type {_type} but got {type(value)}") 226 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/open-feature/python-sdk-contrib/77daa45ff4e2962143f73049bf0836e7d33649dc/providers/openfeature-provider-ofrep/tests/__init__.py -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.ofrep import OFREPProvider 4 | 5 | 6 | @pytest.fixture 7 | def ofrep_provider(): 8 | return OFREPProvider("http://localhost:8080") 9 | -------------------------------------------------------------------------------- /providers/openfeature-provider-ofrep/tests/test_provider.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from openfeature.contrib.provider.ofrep import OFREPProvider 4 | from openfeature.evaluation_context import EvaluationContext 5 | from openfeature.exception import ( 6 | FlagNotFoundError, 7 | GeneralError, 8 | InvalidContextError, 9 | ParseError, 10 | TypeMismatchError, 11 | ) 12 | from openfeature.flag_evaluation import FlagResolutionDetails, Reason 13 | 14 | 15 | def test_provider_init(): 16 | OFREPProvider( 17 | "http://localhost:8080", 18 | headers_factory=lambda: {"Authorization": "Bearer token"}, 19 | ) 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "flag_type, resolved_value, default_value, get_method", 24 | ( 25 | (bool, True, False, "resolve_boolean_details"), 26 | (str, "String", "default", "resolve_string_details"), 27 | (int, 100, 0, "resolve_integer_details"), 28 | (float, 10.23, 0.0, "resolve_float_details"), 29 | ( 30 | dict, 31 | { 32 | "String": "string", 33 | "Number": 2, 34 | "Boolean": True, 35 | }, 36 | {}, 37 | "resolve_object_details", 38 | ), 39 | ( 40 | list, 41 | ["string1", "string2"], 42 | [], 43 | "resolve_object_details", 44 | ), 45 | ), 46 | ) 47 | def test_provider_successful_resolution( 48 | flag_type, resolved_value, default_value, get_method, ofrep_provider, requests_mock 49 | ): 50 | requests_mock.post( 51 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 52 | json={ 53 | "key": "flag_key", 54 | "reason": "TARGETING_MATCH", 55 | "variant": str(resolved_value), 56 | "metadata": {"foo": "bar"}, 57 | "value": resolved_value, 58 | }, 59 | ) 60 | 61 | resolution = getattr(ofrep_provider, get_method)("flag_key", default_value) 62 | 63 | assert resolution == FlagResolutionDetails( 64 | value=resolved_value, 65 | reason=Reason.TARGETING_MATCH, 66 | variant=str(resolved_value), 67 | flag_metadata={"foo": "bar"}, 68 | ) 69 | 70 | 71 | def test_provider_flag_not_found(ofrep_provider, requests_mock): 72 | requests_mock.post( 73 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 74 | status_code=404, 75 | json={ 76 | "key": "flag_key", 77 | "errorCode": "FLAG_NOT_FOUND", 78 | "errorDetails": "Flag 'flag_key' not found", 79 | }, 80 | ) 81 | 82 | with pytest.raises(FlagNotFoundError): 83 | ofrep_provider.resolve_boolean_details("flag_key", False) 84 | 85 | 86 | def test_provider_invalid_context(ofrep_provider, requests_mock): 87 | requests_mock.post( 88 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 89 | status_code=400, 90 | json={ 91 | "key": "flag_key", 92 | "errorCode": "INVALID_CONTEXT", 93 | "errorDetails": "Invalid context provided", 94 | }, 95 | ) 96 | 97 | with pytest.raises(InvalidContextError): 98 | ofrep_provider.resolve_boolean_details("flag_key", False) 99 | 100 | 101 | def test_provider_invalid_response(ofrep_provider, requests_mock): 102 | requests_mock.post( 103 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", text="invalid" 104 | ) 105 | 106 | with pytest.raises(ParseError): 107 | ofrep_provider.resolve_boolean_details("flag_key", False) 108 | 109 | 110 | def test_provider_evaluation_context(ofrep_provider, requests_mock): 111 | def match_request_json(request): 112 | return request.json() == {"context": {"targetingKey": "1", "foo": "bar"}} 113 | 114 | requests_mock.post( 115 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 116 | json={ 117 | "key": "flag_key", 118 | "reason": "TARGETING_MATCH", 119 | "variant": "true", 120 | "metadata": {}, 121 | "value": True, 122 | }, 123 | additional_matcher=match_request_json, 124 | ) 125 | 126 | context = EvaluationContext("1", {"foo": "bar"}) 127 | resolution = ofrep_provider.resolve_boolean_details( 128 | "flag_key", False, evaluation_context=context 129 | ) 130 | 131 | assert resolution == FlagResolutionDetails( 132 | value=True, 133 | reason=Reason.TARGETING_MATCH, 134 | variant="true", 135 | ) 136 | 137 | 138 | def test_provider_retry_after_shortcircuit_resolution(ofrep_provider, requests_mock): 139 | requests_mock.post( 140 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 141 | status_code=429, 142 | headers={"Retry-After": "1"}, 143 | ) 144 | 145 | with pytest.raises(GeneralError, match="Rate limited, retry after: 1"): 146 | ofrep_provider.resolve_boolean_details("flag_key", False) 147 | with pytest.raises( 148 | GeneralError, match="OFREP evaluation paused due to TooManyRequests" 149 | ): 150 | ofrep_provider.resolve_boolean_details("flag_key", False) 151 | 152 | 153 | def test_provider_typecheck_flag_value(ofrep_provider, requests_mock): 154 | requests_mock.post( 155 | "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", 156 | json={ 157 | "key": "flag_key", 158 | "reason": "TARGETING_MATCH", 159 | "variant": "true", 160 | "metadata": {}, 161 | "value": "true", 162 | }, 163 | ) 164 | 165 | with pytest.raises(TypeMismatchError): 166 | ofrep_provider.resolve_boolean_details("flag_key", False) 167 | 168 | 169 | @pytest.mark.parametrize( 170 | "base_url", 171 | [ 172 | "https://localhost:8080", 173 | "https://localhost:8080/", 174 | "https://localhost:8080/tools/feature_flags", 175 | "https://localhost:8080/tools/feature_flags/", 176 | ], 177 | ) 178 | def test_provider_api_path_resolution(base_url): 179 | provider = OFREPProvider(base_url=base_url) 180 | assert provider._get_ofrep_api_url() == f"{base_url.rstrip('/')}/ofrep/v1/" 181 | -------------------------------------------------------------------------------- /release-please-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "bootstrap-sha": "1cef37c5d8c7d97ee52a8e6b71f12150063b8503", 3 | "separate-pull-requests": true, 4 | "release-type": "python", 5 | "bump-minor-pre-major": true, 6 | "bump-patch-for-minor-pre-major": true, 7 | "include-component-in-tag": true, 8 | "tag-separator": "/", 9 | "signoff": "OpenFeature Bot <109696520+openfeaturebot@users.noreply.github.com>", 10 | "packages": { 11 | "providers/openfeature-provider-flagd": { 12 | "package-name": "openfeature-provider-flagd", 13 | "bump-minor-pre-major": true, 14 | "bump-patch-for-minor-pre-major": true, 15 | "versioning": "default", 16 | "extra-files": [ 17 | "README.md" 18 | ] 19 | }, 20 | "providers/openfeature-provider-ofrep": { 21 | "package-name": "openfeature-provider-ofrep", 22 | "bump-minor-pre-major": true, 23 | "bump-patch-for-minor-pre-major": true, 24 | "versioning": "default", 25 | "extra-files": [ 26 | "README.md" 27 | ] 28 | }, 29 | "providers/openfeature-provider-flipt": { 30 | "package-name": "openfeature-provider-flipt", 31 | "bump-minor-pre-major": true, 32 | "bump-patch-for-minor-pre-major": true, 33 | "versioning": "default", 34 | "extra-files": [ 35 | "README.md" 36 | ] 37 | }, 38 | "hooks/openfeature-hooks-opentelemetry": { 39 | "package-name": "openfeature-hooks-opentelemetry", 40 | "bump-minor-pre-major": true, 41 | "bump-patch-for-minor-pre-major": true, 42 | "versioning": "default", 43 | "extra-files": [ 44 | "README.md" 45 | ] 46 | } 47 | }, 48 | "changelog-sections": [ 49 | { 50 | "type": "fix", 51 | "section": "🐛 Bug Fixes" 52 | }, 53 | { 54 | "type": "feat", 55 | "section": "✨ New Features" 56 | }, 57 | { 58 | "type": "chore", 59 | "section": "🧹 Chore" 60 | }, 61 | { 62 | "type": "docs", 63 | "section": "📚 Documentation" 64 | }, 65 | { 66 | "type": "perf", 67 | "section": "🚀 Performance" 68 | }, 69 | { 70 | "type": "build", 71 | "hidden": true, 72 | "section": "🛠️ Build" 73 | }, 74 | { 75 | "type": "deps", 76 | "section": "📦 Dependencies" 77 | }, 78 | { 79 | "type": "ci", 80 | "hidden": true, 81 | "section": "🚦 CI" 82 | }, 83 | { 84 | "type": "refactor", 85 | "section": "🔄 Refactoring" 86 | }, 87 | { 88 | "type": "revert", 89 | "section": "🔙 Reverts" 90 | }, 91 | { 92 | "type": "style", 93 | "hidden": true, 94 | "section": "🎨 Styling" 95 | }, 96 | { 97 | "type": "test", 98 | "hidden": true, 99 | "section": "🧪 Tests" 100 | } 101 | ], 102 | "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json" 103 | } 104 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["github>open-feature/community-tooling"], 4 | "pep621": { 5 | "enabled": true 6 | }, 7 | "pre-commit": { 8 | "enabled": true 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /ruff.toml: -------------------------------------------------------------------------------- 1 | exclude = [ 2 | ".git", 3 | ".venv", 4 | "__pycache__", 5 | "venv", 6 | "providers/openfeature-provider-flagd/src/openfeature/schemas/**" 7 | ] 8 | target-version = "py39" 9 | 10 | [lint] 11 | select = [ 12 | "A", 13 | "B", 14 | "C4", 15 | "C90", 16 | "E", 17 | "F", 18 | "FLY", 19 | "FURB", 20 | "I", 21 | "LOG", 22 | "N", 23 | "PERF", 24 | "PGH", 25 | "PLC", 26 | "PLR0913", 27 | "PLR0915", 28 | "RUF", 29 | "S", 30 | "SIM", 31 | "T10", 32 | "T20", 33 | "UP", 34 | "W", 35 | "YTT", 36 | ] 37 | ignore = [ 38 | "E501", # the formatter will handle any too long line 39 | ] 40 | 41 | [lint.isort] 42 | known-first-party = ["openfeature"] 43 | 44 | [lint.per-file-ignores] 45 | "**/tests/**/*" = ["S101"] 46 | 47 | [lint.pylint] 48 | max-args = 6 49 | max-statements = 30 50 | 51 | [lint.pyupgrade] 52 | # Preserve types, even if a file imports `from __future__ import annotations`. 53 | keep-runtime-typing = true 54 | --------------------------------------------------------------------------------