├── .adr-dir
├── seeds
└── .gitkeep
├── featureflags
├── rpc
│ ├── __init__.py
│ ├── container.py
│ ├── utils.py
│ ├── metrics.py
│ ├── app.py
│ ├── servicer.py
│ └── db.py
├── web
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── health.py
│ │ ├── index.py
│ │ └── graph.py
│ ├── types.py
│ ├── constants.py
│ ├── middlewares.py
│ ├── container.py
│ ├── lifecycle.py
│ └── app.py
├── graph
│ ├── __init__.py
│ ├── constants.py
│ ├── context.py
│ ├── metrics.py
│ ├── proto_adapter.py
│ ├── utils.py
│ └── types.py
├── http
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── health.py
│ │ └── flags.py
│ ├── repositories
│ │ ├── __init__.py
│ │ └── flags.py
│ ├── container.py
│ ├── lifecycle.py
│ ├── app.py
│ └── types.py
├── protobuf
│ ├── __init__.py
│ ├── service.proto
│ ├── backend.proto
│ ├── graph.proto
│ └── service_grpc.py
├── services
│ ├── __init__.py
│ ├── db.py
│ └── ldap.py
├── tests
│ ├── __init__.py
│ ├── test_config.py
│ └── conftest.py
├── migrations
│ ├── __init__.py
│ ├── versions
│ │ ├── __init__.py
│ │ ├── a327a3ea7a5f_added_flags_and_values_name_idx.py
│ │ ├── 94e4203113b0_removed_auth_session_user_column.py
│ │ ├── 8df4e7dd1897_delete_changelog_on_flag_delete.py
│ │ ├── 2fa54f8b55c1_cleaup_condition_checks.py
│ │ ├── 4d42cf3d11de_added_created_and_reported_timestamps.py
│ │ ├── cce98484f923_auth_user_table_added.py
│ │ ├── 33ace31b89cc_changelog_added.py
│ │ ├── 69f91d9fab0f_add_condition_position_column.py
│ │ ├── 1876f90b58e8_added_feature_values_tables.py
│ │ └── b3fbbe647373_init.py
│ ├── script.py.mako
│ └── env.py
├── __main__.py
├── __init__.py
├── errors.py
├── alembic.py
├── cli.py
├── logging.py
├── metrics.py
├── config.py
├── sentry.py
└── utils.py
├── .semgrepignore
├── docs
├── deployment.rst
├── _static
│ ├── images
│ │ ├── ui.png
│ │ └── condition-set-included-in.png
│ ├── fixes.css
│ └── style.css
├── changelog
│ ├── server
│ │ ├── index.rst
│ │ └── changes_1.rst
│ ├── index.rst
│ └── python-client
│ │ ├── index.rst
│ │ └── changes_0.rst
├── clients.rst
├── index.rst
├── conf.py
├── clients
│ └── javascript.rst
├── client.rst
├── development.rst
├── server.rst
└── conditions.rst
├── scripts
├── disable-hooks.sh
├── enable-hooks.sh
└── release.sh
├── ui
├── src
│ ├── Dashboard
│ │ ├── index.jsx
│ │ ├── Check.less
│ │ ├── Conditions.less
│ │ ├── Flag.less
│ │ ├── Value.less
│ │ ├── ValueConditions.less
│ │ ├── Dashboard.less
│ │ ├── Flags.less
│ │ ├── Values.less
│ │ ├── utils.js
│ │ ├── Tabs.jsx
│ │ ├── context.jsx
│ │ ├── Conditions.jsx
│ │ ├── constants.js
│ │ ├── Settings.jsx
│ │ ├── ValueConditions.jsx
│ │ ├── ValueCheck.jsx
│ │ ├── queries.js
│ │ ├── Flags.jsx
│ │ └── Values.jsx
│ ├── graphiql.jsx
│ ├── hooks.jsx
│ ├── components
│ │ ├── Spinner.jsx
│ │ ├── Version.jsx
│ │ └── Logo.jsx
│ ├── Base.less
│ ├── main.jsx
│ ├── favicon.svg
│ ├── context
│ │ └── auth.jsx
│ ├── Auth.jsx
│ └── Base.jsx
├── .gitignore
├── index.html
├── package.json
└── vite.config.js
├── .readthedocs.yaml
├── .hooks
└── pre-commit
├── adr
├── 0001-record-architecture-decisions.md
└── 0002-split-project-into-client-server-protobuf-clients.md
├── .dockerignore
├── .gitignore
├── configs
├── test.yaml
└── local.yaml
├── .github
└── workflows
│ ├── test.yaml
│ └── release.yaml
├── CHANGELOG.md
├── README.rst
├── Dockerfile
├── docker-compose.yaml
├── lets.yaml
└── pyproject.toml
/.adr-dir:
--------------------------------------------------------------------------------
1 | adr
2 |
--------------------------------------------------------------------------------
/seeds/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/rpc/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/web/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/graph/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/http/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/http/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/protobuf/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/services/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/web/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.semgrepignore:
--------------------------------------------------------------------------------
1 | /docker-compose.yaml
--------------------------------------------------------------------------------
/featureflags/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/http/repositories/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/deployment.rst:
--------------------------------------------------------------------------------
1 | Deployment
2 | ==========
3 |
4 | TODO
--------------------------------------------------------------------------------
/scripts/disable-hooks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | git config --local --unset core.hooksPath
--------------------------------------------------------------------------------
/scripts/enable-hooks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | git config --local core.hooksPath .hooks
3 |
--------------------------------------------------------------------------------
/docs/_static/images/ui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/evo-company/featureflags/HEAD/docs/_static/images/ui.png
--------------------------------------------------------------------------------
/featureflags/__main__.py:
--------------------------------------------------------------------------------
1 | from featureflags.cli import cli
2 |
3 | if __name__ == "__main__":
4 | cli()
5 |
--------------------------------------------------------------------------------
/featureflags/graph/constants.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 |
3 | AUTH_SESSION_TTL = timedelta(days=14)
4 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/index.jsx:
--------------------------------------------------------------------------------
1 | import { Dashboard } from './Dashboard';
2 |
3 | export {
4 | Dashboard
5 | }
6 |
--------------------------------------------------------------------------------
/featureflags/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | __version__ = "1.24.4"
4 | __build_version__ = os.getenv("BUILD_VERSION", "0")
5 |
--------------------------------------------------------------------------------
/docs/changelog/server/index.rst:
--------------------------------------------------------------------------------
1 | Server Changelog
2 | ================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 |
7 | changes_1
--------------------------------------------------------------------------------
/docs/changelog/index.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | .. toctree::
5 | :maxdepth: 3
6 |
7 | server/index
8 | python-client/index
9 |
--------------------------------------------------------------------------------
/docs/_static/images/condition-set-included-in.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/evo-company/featureflags/HEAD/docs/_static/images/condition-set-included-in.png
--------------------------------------------------------------------------------
/docs/changelog/python-client/index.rst:
--------------------------------------------------------------------------------
1 | Python Client Changelog
2 | =======================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 |
7 | changes_0
--------------------------------------------------------------------------------
/ui/src/Dashboard/Check.less:
--------------------------------------------------------------------------------
1 | .empty {
2 | border: 1px solid rgba(255, 255, 0, 0.85);
3 | }
4 |
5 | .check-variable-select {
6 | width: 280px;
7 | }
--------------------------------------------------------------------------------
/ui/src/Dashboard/Conditions.less:
--------------------------------------------------------------------------------
1 | .condition-block {
2 | background: #eaeaea;
3 | border-radius: 5px;
4 | padding: 10px;
5 | margin-bottom: 10px;
6 | }
7 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Flag.less:
--------------------------------------------------------------------------------
1 | .flag-name {
2 |
3 | }
4 |
5 | .flag-name:hover {
6 | cursor: pointer;
7 | }
8 |
9 | .ant-timeline-item-content p {
10 | margin: 0;
11 | }
12 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Value.less:
--------------------------------------------------------------------------------
1 | .value-name {
2 |
3 | }
4 |
5 | .value-name:hover {
6 | cursor: pointer;
7 | }
8 |
9 | .ant-timeline-item-content p {
10 | margin: 0;
11 | }
12 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/ValueConditions.less:
--------------------------------------------------------------------------------
1 | .condition-block {
2 | background: #eaeaea;
3 | border-radius: 5px;
4 | padding: 10px;
5 | margin-bottom: 10px;
6 | }
7 |
8 | .value-condition-override {
9 | margin-bottom: 10px;
10 | }
--------------------------------------------------------------------------------
/docs/clients.rst:
--------------------------------------------------------------------------------
1 | Clients
2 | =======
3 |
4 | This section covers how to use the FeatureFlags clients in different programming languages.
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 |
9 | clients/python
10 | clients/javascript
11 |
--------------------------------------------------------------------------------
/featureflags/web/types.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from pydantic import BaseModel
4 |
5 |
6 | class GraphQueryRequest(BaseModel):
7 | query: str
8 | operationName: str | None = None # noqa: N815
9 | variables: dict[str, Any] | None = None
10 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-24.04
5 | tools:
6 | python: "3.11"
7 | commands:
8 | - pip install pdm==2.26.1
9 | - pdm sync -G dev -G docs
10 | - pdm run docs
11 | - mkdir -p _readthedocs
12 | - mv docs/build _readthedocs/html
13 |
--------------------------------------------------------------------------------
/featureflags/web/constants.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from pathlib import Path
3 |
4 | COOKIE_ACCESS_TOKEN = "access_token"
5 | COOKIE_ACCESS_TOKEN_MAX_AGE = 365 * 24 * 3600
6 |
7 | ACCESS_TOKEN_TTL = timedelta(minutes=10)
8 |
9 | STATIC_DIR = Path(__file__).parent / "static"
10 |
--------------------------------------------------------------------------------
/featureflags/http/api/health.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 | from fastapi.responses import ORJSONResponse
3 |
4 | router = APIRouter(prefix="/~health", tags=["health"])
5 |
6 |
7 | @router.get("")
8 | async def health() -> ORJSONResponse:
9 | return ORJSONResponse(content={"status": "ok"})
10 |
--------------------------------------------------------------------------------
/featureflags/web/api/health.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter
2 | from fastapi.responses import ORJSONResponse
3 |
4 | router = APIRouter(prefix="/~health", tags=["health"])
5 |
6 |
7 | @router.get("")
8 | async def health() -> ORJSONResponse:
9 | return ORJSONResponse(content={"status": "ok"})
10 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
3 | .. image:: _static/images/ui.png
4 | :width: 700
5 | :alt: UI
6 |
7 | User's Guide
8 | ------------
9 |
10 | .. toctree::
11 | :maxdepth: 3
12 |
13 | getting-started
14 | server
15 | client
16 | clients
17 | conditions
18 | deployment
19 | development
20 | changelog/index
21 |
--------------------------------------------------------------------------------
/ui/src/graphiql.jsx:
--------------------------------------------------------------------------------
1 | import { createGraphiQLFetcher } from '@graphiql/toolkit';
2 | import { GraphiQL } from 'graphiql';
3 | import React from 'react';
4 | import 'graphiql/style.css'
5 |
6 | const fetcher = createGraphiQLFetcher({ url: '/graphql' });
7 |
8 | export const GraphiQLRoot = () => {
9 | return ;
10 | }
11 |
12 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Dashboard.less:
--------------------------------------------------------------------------------
1 | .sidebar {
2 | overflow: auto;
3 | height: calc(100vh - 64px);
4 | overflow-y: scroll;
5 | }
6 |
7 | .search {
8 | color: #141414;
9 | width: 275px;
10 | padding-left: 25px;
11 | position: fixed;
12 | z-index: 90;
13 | }
14 |
15 | .site-layout .site-layout-background {
16 | height: calc(100vh - 64px);
17 | }
18 |
--------------------------------------------------------------------------------
/ui/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Flags.less:
--------------------------------------------------------------------------------
1 | :root {
2 | --root-width: 930px;
3 | }
4 |
5 | .flag-card {
6 | width: var(--root-width);
7 | border-radius: 5px;
8 | }
9 |
10 | .flag-card-content {
11 | width: 100%;
12 | }
13 |
14 | .search-flags {
15 | color: #141414;
16 | width: var(--root-width);
17 | z-index: 90;
18 | border: 1px solid rgba(255, 255, 0, 0.85);
19 | }
20 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Values.less:
--------------------------------------------------------------------------------
1 | :root {
2 | --root-width: 930px;
3 | }
4 |
5 | .value-card {
6 | width: var(--root-width);
7 | border-radius: 5px;
8 | }
9 |
10 | .value-card-content {
11 | width: 100%;
12 | }
13 |
14 | .search-values {
15 | color: #141414;
16 | width: var(--root-width);
17 | z-index: 90;
18 | border: 1px solid rgba(255, 255, 0, 0.85);
19 | }
20 |
--------------------------------------------------------------------------------
/docs/_static/fixes.css:
--------------------------------------------------------------------------------
1 | /* fixes https://github.com/rtfd/sphinx_rtd_theme/issues/381 */
2 |
3 | .rst-content .highlight > pre, .rst-content .linenodiv > pre {
4 | line-height: normal;
5 | }
6 |
7 | /* customizations */
8 |
9 | .ui.tabular.menu {
10 | font-size: 13px;
11 | }
12 |
13 | .ui.tabular.menu .item {
14 | padding: .3em 1em;
15 | }
16 |
17 | .ui.attached.segment {
18 | padding: 0.5em;
19 | }
20 |
--------------------------------------------------------------------------------
/ui/src/hooks.jsx:
--------------------------------------------------------------------------------
1 | import { useContext } from 'react';
2 | import { AuthContext } from './context/auth';
3 |
4 | export const useAuth = () => {
5 | return useContext(AuthContext)
6 | }
7 |
8 | export const useSignIn = () => {
9 | const {actions} = useContext(AuthContext)
10 | return actions.signIn;
11 | }
12 |
13 | export const useSignOut = () => {
14 | const {actions} = useContext(AuthContext)
15 | return actions.signOut;
16 | }
17 |
--------------------------------------------------------------------------------
/featureflags/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | ${imports if imports else ""}
5 |
6 |
7 | revision = ${repr(up_revision)}
8 | down_revision = ${repr(down_revision)}
9 | branch_labels = ${repr(branch_labels)}
10 | depends_on = ${repr(depends_on)}
11 |
12 |
13 | def upgrade():
14 | ${upgrades if upgrades else "pass"}
15 |
16 | def downgrade():
17 | ${downgrades if downgrades else "pass"}
18 |
--------------------------------------------------------------------------------
/featureflags/tests/test_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import PosixPath
3 |
4 | import pytest
5 |
6 | from featureflags.config import CONFIG_PATH_ENV_VAR, CONFIGS_DIR, _load_config
7 |
8 |
9 | @pytest.mark.parametrize(
10 | "path",
11 | CONFIGS_DIR.iterdir(),
12 | )
13 | def test_configs_smoke(path: PosixPath) -> None:
14 | """Test that the config loads."""
15 | os.environ[CONFIG_PATH_ENV_VAR] = path.as_posix()
16 | _load_config()
17 |
--------------------------------------------------------------------------------
/ui/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | FeatureFlags
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/ui/src/components/Spinner.jsx:
--------------------------------------------------------------------------------
1 | import { LoadingOutlined } from '@ant-design/icons';
2 | import { Spin } from 'antd';
3 |
4 | const antIcon = ;
5 |
6 | const Centered = ({ children }) => {
7 | return (
8 |
9 | {children}
10 |
11 | )
12 | }
13 |
14 | export const CenteredSpinner = () => ;
15 |
--------------------------------------------------------------------------------
/featureflags/errors.py:
--------------------------------------------------------------------------------
1 | from starlette import status
2 |
3 |
4 | class BaseInternalServerError(Exception):
5 | """Internal server error."""
6 |
7 | status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR
8 | detail: str = "Internal server error."
9 |
10 |
11 | class UserNotAuthorizedError(BaseInternalServerError):
12 | """User not authorized."""
13 |
14 | status_code: int = status.HTTP_401_UNAUTHORIZED
15 | detail: str = "User not authorized."
16 |
--------------------------------------------------------------------------------
/featureflags/migrations/env.py:
--------------------------------------------------------------------------------
1 | from alembic import context
2 | from sqlalchemy import create_engine, pool
3 |
4 | from featureflags.models import metadata
5 |
6 | dsn = context.config.get_main_option("url")
7 | assert dsn
8 |
9 | engine = create_engine(dsn, poolclass=pool.NullPool)
10 | with engine.connect() as connection:
11 | context.configure(connection=connection, target_metadata=metadata)
12 | with context.begin_transaction():
13 | context.run_migrations()
14 |
--------------------------------------------------------------------------------
/docs/changelog/python-client/changes_0.rst:
--------------------------------------------------------------------------------
1 | Changes in 0.X
2 | ==============
3 |
4 | 0.7.0
5 | ------
6 |
7 | Added
8 | ~~~~~
9 |
10 | - Drop python3.7/3.8 support, add python3.13 support
11 |
12 |
13 | 0.6.1
14 | ------
15 |
16 | Fixed
17 | ~~~~~
18 |
19 | - Fix value default
20 |
21 | 0.6.0
22 | ------
23 |
24 | Added
25 | ~~~~~
26 |
27 | - added feature values (`#10 `_)
28 | - fix types for python 3.9
29 | - split flags and value states
--------------------------------------------------------------------------------
/.hooks/pre-commit:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | HAS_STAGED_PY=$(git diff --staged --diff-filter=d --name-only '*.py')
4 |
5 | if [ -n "$HAS_STAGED_PY" ]; then
6 |
7 | echo "Running mypy ..."
8 | lets mypy
9 | if [[ $? -ne 0 ]]; then
10 | exit 1
11 | fi
12 |
13 | echo "Running black ..."
14 | lets black --diff --check
15 | if [[ $? -ne 0 ]]; then
16 | exit 1
17 | fi
18 |
19 | echo "Running ruff ..."
20 | lets ruff-diff
21 | if [[ $? -ne 0 ]]; then
22 | exit 1
23 | fi
24 |
25 | fi
26 |
27 | exit 0
28 |
--------------------------------------------------------------------------------
/featureflags/web/api/index.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter, Request
2 | from fastapi.templating import Jinja2Templates
3 |
4 | from featureflags.web.constants import STATIC_DIR
5 |
6 | router = APIRouter(tags=["index"])
7 |
8 | templates = Jinja2Templates(directory=STATIC_DIR)
9 |
10 |
11 | @router.get("/", response_model=None)
12 | async def index(request: Request) -> templates.TemplateResponse: # type: ignore
13 | return templates.TemplateResponse(
14 | name="index.html",
15 | context={"request": request},
16 | )
17 |
--------------------------------------------------------------------------------
/docs/_static/style.css:
--------------------------------------------------------------------------------
1 | .wy-nav-side {
2 | background: #2d3a48;
3 | }
4 | .wy-side-nav-search {
5 | background: #181f27;
6 | }
7 | .wy-menu-vertical a:hover {
8 | background-color: #475260;
9 | }
10 | .rst-content code {
11 | border-radius: 3px;
12 | }
13 | .rst-content code.literal {
14 | color: black;
15 | }
16 | .rst-content dl:not(.docutils) code {
17 | font-weight: normal;
18 | }
19 | .rst-content a code.literal {
20 | font-weight: normal;
21 | color: #2980B9;
22 | }
23 | .rst-content a:hover code.literal {
24 | color: #3091d1;
25 | }
26 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | extensions = [
2 | "sphinx.ext.autodoc",
3 | "sphinx.ext.intersphinx",
4 | "sphinx_inline_tabs",
5 | ]
6 |
7 | autoclass_content = "both"
8 | autodoc_member_order = "bysource"
9 |
10 | intersphinx_mapping = {
11 | "python": ("https://docs.python.org/3.6", None),
12 | }
13 |
14 | source_suffix = ".rst"
15 | master_doc = "index"
16 |
17 | project = "Feature Flags"
18 | copyright = "2025, Evo Company"
19 | author = "Evo Company"
20 |
21 | templates_path = []
22 |
23 | html_theme = "furo"
24 | html_static_path = ["_static"]
25 | html_theme_options = {}
26 |
--------------------------------------------------------------------------------
/featureflags/services/db.py:
--------------------------------------------------------------------------------
1 | from collections.abc import AsyncGenerator
2 |
3 | import aiopg.sa
4 |
5 | from featureflags.config import config
6 |
7 |
8 | async def init_db_engine() -> AsyncGenerator[aiopg.sa.Engine, None]:
9 | async with aiopg.sa.create_engine(
10 | dsn=config.postgres.dsn,
11 | echo=config.debug,
12 | enable_hstore=False,
13 | timeout=config.postgres.timeout,
14 | ) as engine:
15 | try:
16 | yield engine
17 | finally:
18 | engine.close()
19 | await engine.wait_closed()
20 |
--------------------------------------------------------------------------------
/adr/0001-record-architecture-decisions.md:
--------------------------------------------------------------------------------
1 | # 1. Record architecture decisions
2 |
3 | Date: 2023-10-09
4 |
5 | ## Status
6 |
7 | Accepted
8 |
9 | ## Context
10 |
11 | We need to record the architectural decisions made on this project.
12 |
13 | ## Decision
14 |
15 | We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions).
16 |
17 | ## Consequences
18 |
19 | See Michael Nygard's article, linked above. For a lightweight ADR toolset, see Nat Pryce's [adr-tools](https://github.com/npryce/adr-tools).
20 |
--------------------------------------------------------------------------------
/featureflags/alembic.py:
--------------------------------------------------------------------------------
1 | import alembic.config
2 |
3 | from featureflags.config import config
4 |
5 |
6 | def main(args: list[str]) -> None:
7 | alembic_cfg = alembic.config.Config()
8 |
9 | alembic_main_options = {
10 | "script_location": "featureflags:migrations",
11 | "url": config.postgres.dsn,
12 | }
13 | for name, value in alembic_main_options.items():
14 | alembic_cfg.set_main_option(name=name, value=value)
15 |
16 | alembic_cli = alembic.config.CommandLine()
17 | alembic_options = alembic_cli.parser.parse_args(args)
18 |
19 | alembic_cli.run_cmd(alembic_cfg, alembic_options)
20 |
--------------------------------------------------------------------------------
/ui/src/Base.less:
--------------------------------------------------------------------------------
1 | .header {
2 | padding-left: 25px;
3 | padding-right: 25px;
4 | }
5 |
6 | .title {
7 | font-size: 1.5rem;
8 | }
9 |
10 | @keyframes shake {
11 | 8%, 41% {
12 | transform: translateX(-10px);
13 | }
14 | 25%, 58% {
15 | transform: translateX(10px);
16 | }
17 | 75% {
18 | transform: translateX(-5px);
19 | }
20 | 92% {
21 | transform: translateX(5px);
22 | }
23 | 0%, 100% {
24 | transform: translateX(0);
25 | }
26 | }
27 |
28 | .invalid {
29 | animation: shake .5s linear;
30 | border: 1px solid #ff4d00;
31 | }
32 |
33 | .root {
34 | height: 100vh;
35 | }
36 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/utils.js:
--------------------------------------------------------------------------------
1 | import { message } from "antd";
2 |
3 | export function copyToClipboard(text, msg) {
4 | navigator.clipboard.writeText(text).then(() => {
5 | message.success(msg);
6 | });
7 | }
8 |
9 | export function replaceValueInArray(array, value, newValue) {
10 | let idx = array.indexOf(value);
11 | if (idx >= 0) {
12 | array.splice(idx, 1, newValue);
13 | } else {
14 | throw `Value ${value} not found in array ${array}`
15 | }
16 | }
17 |
18 | export function formatTimestamp(timestamp) {
19 | if (timestamp && timestamp !== "N/A") {
20 | timestamp = timestamp.replace('T', ' ');
21 | return timestamp.split('.')[0]
22 | }
23 | else {
24 | return "N/A"
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Ignore Python bytecode files / cache
2 | *.pyc
3 | *.pyo
4 | *.pyd
5 | .mypy_cache
6 | .pytest_cache
7 | .ruff_cache
8 | __pycache__
9 | __pypackages__
10 |
11 | # Ignore Python virtual environment files
12 | venv/
13 | .venv/
14 | .ve/
15 | .pdm.toml
16 | .pdm-build
17 | .pdm-python
18 |
19 | # Ignore local development configuration files
20 | .git
21 | .env
22 | .flake8
23 | mypy.ini
24 | .dockerignore
25 | .gitignore
26 | .gitlab-ci.yml
27 | .python-version
28 | .hooks
29 | .lets
30 | helm
31 | .ipython
32 | .ptpython
33 | .secrets
34 |
35 | # Ignore IDE settings
36 | .vscode
37 | .idea
38 |
39 | # Ignore any compiled Python extension modules
40 | *.so
41 |
42 | # Ignore any build artifacts
43 | build/
44 | dist/
45 |
--------------------------------------------------------------------------------
/featureflags/rpc/container.py:
--------------------------------------------------------------------------------
1 | from dependency_injector import containers, providers
2 | from hiku.engine import Engine
3 | from hiku.executors.asyncio import AsyncIOExecutor
4 |
5 | from featureflags.services.db import init_db_engine
6 |
7 |
8 | class Container(containers.DeclarativeContainer):
9 | """
10 | Container with rpc dependencies.
11 | """
12 |
13 | wiring_config = containers.WiringConfiguration(
14 | packages=[
15 | "featureflags.services",
16 | "featureflags.rpc",
17 | ]
18 | )
19 |
20 | graph_engine: Engine = providers.Factory(
21 | Engine,
22 | providers.Callable(AsyncIOExecutor),
23 | )
24 | db_engine = providers.Resource(init_db_engine)
25 |
--------------------------------------------------------------------------------
/docs/clients/javascript.rst:
--------------------------------------------------------------------------------
1 | JavaScript Client
2 | =================
3 |
4 | The JavaScript client for FeatureFlags provides a way to integrate feature flags into your JavaScript and Node.js applications.
5 |
6 | Installation
7 | ------------
8 |
9 | Install the JavaScript client using npm:
10 |
11 | .. code-block:: shell
12 |
13 | $ npm install @evo-company/featureflags
14 |
15 | Usage
16 | -----
17 |
18 | .. note::
19 |
20 | **TODO**: This section will be updated once the JavaScript client is available.
21 |
22 |
23 | Repository
24 | ----------
25 |
26 | The JavaScript client source code is available at:
27 | `https://github.com/evo-company/featureflags-js`
28 |
29 | For updates and contributions, please visit the repository.
30 |
--------------------------------------------------------------------------------
/adr/0002-split-project-into-client-server-protobuf-clients.md:
--------------------------------------------------------------------------------
1 | # 2. Split project into client/server, protobuf, clients
2 |
3 | Date: 2023-10-16
4 |
5 | ## Status
6 |
7 | Accepted
8 |
9 | ## Context
10 |
11 | Developing project further with old architecture is difficult. Namespace packages is poorly supported by IDEs and will
12 | add complexity to the project infrastructure setup.
13 |
14 | ## Decision
15 |
16 | We need to split project into client/server, protobuf, clients and develop them separately as it is more straightforward
17 | and will allow to use more mature tools.
18 |
19 | ## Consequences
20 |
21 | - We will have to develop and maintain more projects
22 | - We will have more flexibility in choosing tools
23 | - Project will be easier to understand and develop, test
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore Python bytecode files / cache
2 | *.pyc
3 | *.pyo
4 | *.pyd
5 | .mypy_cache
6 | .pytest_cache
7 | .ruff_cache
8 | __pycache__
9 | __pypackages__
10 | featureflags_protobuf
11 | seeds/seeds.sql
12 |
13 | # Ignore Python virtual environment files
14 | venv/
15 | .venv/
16 | .ve/
17 | .pdm.toml
18 | .pdm-python
19 | .pdm-build
20 | featureflags/web/static
21 |
22 | # Ignore local development configuration files
23 | .env
24 | .lets
25 | .ipython
26 | .ptpython
27 | .python-version
28 | .secrets
29 |
30 | # Ignore IDE settings
31 | .vscode
32 | .idea
33 |
34 | # Ignore any compiled Python extension modules
35 | *.so
36 |
37 | # Ignore any build artifacts
38 | build/
39 | dist/
40 |
41 | # Production configs
42 | configs/*
43 | !configs/local.yaml
44 | !configs/test.yaml
45 |
46 | .DS_Store
--------------------------------------------------------------------------------
/featureflags/migrations/versions/a327a3ea7a5f_added_flags_and_values_name_idx.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 |
5 |
6 | revision = "a327a3ea7a5f"
7 | down_revision = "4d42cf3d11de"
8 | branch_labels = None
9 | depends_on = None
10 |
11 |
12 | def upgrade():
13 | # ### commands auto generated by Alembic - please adjust! ###
14 | op.create_index("flag_name_idx", "flag", ["name"], unique=False)
15 | op.create_index("value_name_idx", "value", ["name"], unique=False)
16 | # ### end Alembic commands ###
17 |
18 |
19 | def downgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.drop_index("value_name_idx", table_name="value")
22 | op.drop_index("flag_name_idx", table_name="flag")
23 | # ### end Alembic commands ###
24 |
--------------------------------------------------------------------------------
/featureflags/graph/context.py:
--------------------------------------------------------------------------------
1 | import aiopg.sa
2 |
3 | from featureflags.graph.types import (
4 | Changes,
5 | DirtyProjects,
6 | GraphContext,
7 | ValuesChanges,
8 | )
9 | from featureflags.services.auth import BaseUserSession
10 | from featureflags.services.ldap import BaseLDAP
11 |
12 |
13 | def init_graph_context(
14 | session: BaseUserSession,
15 | ldap: BaseLDAP,
16 | engine: aiopg.sa.Engine,
17 | ) -> dict:
18 | return {
19 | GraphContext.DB_ENGINE: engine,
20 | GraphContext.USER_SESSION: session,
21 | GraphContext.LDAP_SERVICE: ldap,
22 | GraphContext.DIRTY_PROJECTS: DirtyProjects(),
23 | GraphContext.CHANGES: Changes(),
24 | GraphContext.VALUES_CHANGES: ValuesChanges(),
25 | GraphContext.CHECK_IDS: {},
26 | }
27 |
--------------------------------------------------------------------------------
/ui/src/components/Version.jsx:
--------------------------------------------------------------------------------
1 | import { useQuery } from '@apollo/client';
2 | import { Typography } from 'antd';
3 | const { Text } = Typography;
4 |
5 | import { VERSION_QUERY } from '../Dashboard/queries';
6 |
7 | function Version() {
8 | const { data: versionData } = useQuery(VERSION_QUERY);
9 |
10 | if (!versionData?.version?.serverVersion) {
11 | return null;
12 | }
13 |
14 | const { serverVersion, buildVersion } = versionData.version;
15 |
16 | return (
17 |
27 | v{serverVersion} (build: {buildVersion})
28 |
29 | );
30 | }
31 |
32 | export { Version };
--------------------------------------------------------------------------------
/configs/test.yaml:
--------------------------------------------------------------------------------
1 | debug: true
2 | test_environ: true
3 |
4 | app:
5 | port: 8080
6 | host: 0.0.0.0
7 | reload: True
8 |
9 | http:
10 | port: 8081
11 | host: 0.0.0.0
12 | reload: True
13 |
14 | rpc:
15 | port: 50051
16 | host: 0.0.0.0
17 |
18 | logging:
19 | level_app: debug
20 | level_libs: info
21 | handlers: [console]
22 | syslog_app: null
23 | syslog_facility: null
24 | syslog_mapping: null
25 | syslog_defaults: null
26 |
27 | postgres:
28 | host: postgres-test
29 | port: 5432
30 | database: featureflags-test
31 | timeout: 10
32 |
33 | ldap:
34 | host: null
35 | base_dn: null
36 |
37 | instrumentation:
38 | prometheus_port: null
39 |
40 | sentry:
41 | enabled: false
42 | dsn: null
43 | env: local
44 | enable_tracing: true
45 | traces_sample_rate: 1
46 | shutdown_timeout: 1
47 |
--------------------------------------------------------------------------------
/configs/local.yaml:
--------------------------------------------------------------------------------
1 | debug: true
2 | test_environ: true
3 |
4 | app:
5 | port: 8080
6 | host: 0.0.0.0
7 | reload: True
8 |
9 | http:
10 | port: 8081
11 | host: 0.0.0.0
12 | reload: True
13 |
14 | rpc:
15 | port: 50051
16 | host: 0.0.0.0
17 |
18 | logging:
19 | level_app: debug
20 | level_libs: info
21 | handlers: [console]
22 | syslog_app: null
23 | syslog_facility: null
24 | syslog_mapping: null
25 | syslog_defaults: null
26 |
27 | postgres:
28 | host: postgres
29 | port: 5432
30 | database: featureflags
31 | timeout: 10
32 |
33 | ldap:
34 | host: ldap://ldap:3893
35 | base_dn: dc=evo,dc=dev
36 |
37 | instrumentation:
38 | prometheus_port: 9100
39 |
40 | sentry:
41 | enabled: false
42 | dsn: null
43 | env: local
44 | enable_tracing: true
45 | traces_sample_rate: 1
46 | shutdown_timeout: 1
47 |
--------------------------------------------------------------------------------
/featureflags/web/middlewares.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Awaitable, Callable
2 |
3 | from fastapi import FastAPI, Request, Response
4 |
5 | from featureflags.services.auth import (
6 | set_user_session_from_cookie,
7 | set_user_session_to_response,
8 | )
9 | from featureflags.web.container import Container
10 |
11 |
12 | def configure_middlewares(app: FastAPI, container: Container) -> None:
13 | @app.middleware("http")
14 | async def set_user_session_and_auth_cookie(
15 | request: Request,
16 | call_next: Callable[[Request], Awaitable[Response]],
17 | ) -> Response:
18 | engine = await container.db_engine()
19 | await set_user_session_from_cookie(request, engine)
20 |
21 | response = await call_next(request)
22 | await set_user_session_to_response(response)
23 |
24 | return response
25 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/94e4203113b0_removed_auth_session_user_column.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.sql import table, column
5 | from sqlalchemy.dialects import postgresql
6 |
7 |
8 | revision = "94e4203113b0"
9 | down_revision = "cce98484f923"
10 | branch_labels = None
11 | depends_on = None
12 |
13 |
14 | def upgrade():
15 | auth_session = table("auth_session", column("auth_user", postgresql.UUID()))
16 |
17 | op.execute(auth_session.delete().where(auth_session.c.auth_user.is_(None)))
18 |
19 | # ### commands auto generated by Alembic - please adjust! ###
20 | op.alter_column(
21 | "auth_session",
22 | "auth_user",
23 | existing_type=postgresql.UUID(),
24 | nullable=False,
25 | )
26 | op.drop_column("auth_session", "user")
27 | # ### end Alembic commands ###
28 |
--------------------------------------------------------------------------------
/docs/changelog/server/changes_1.rst:
--------------------------------------------------------------------------------
1 | Changes in 1.X
2 | ==============
3 |
4 | 1.24.0
5 | ------
6 |
7 | Added
8 | ~~~~~
9 |
10 | - **Version Display Feature**: Added version display in the UI footer
11 | - Server version from ``featureflags.__version__``
12 | - Build version from ``featureflags.__build_version__`` (which is taken from ``BUILD_VERSION`` environment variable)
13 | - **GraphQL Version API**: New GraphQL field ``version`` for version information
14 | - ``version`` query returns ``Version`` node with ``serverVersion`` and ``buildVersion`` fields
15 |
16 | Internal
17 | ~~~~~~~~
18 |
19 | - **Cross-platform Release Script**: Fixed release script compatibility
20 | - Added macOS (BSD sed) and Linux (GNU sed) compatibility
21 | - Fixed ``sed -i`` command for cross-platform deployment
22 |
23 | 1.22.0
24 | ------
25 |
26 | Initial release with core feature flags functionality.
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/scripts/release.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | USAGE="Usage: VERSION=<> MESSAGE=<> pdm run release"
4 |
5 | if [ -z "${VERSION}" ]; then
6 | echo "$USAGE"
7 | echo "VERSION is not set"
8 | exit 1
9 | fi
10 | if [ -z "${MESSAGE}" ]; then
11 | echo "$USAGE"
12 | echo "MESSAGE is not set"
13 | exit 1
14 | fi
15 |
16 | echo "Releasing ${VERSION} with message: ${MESSAGE}"
17 |
18 | # Cross-platform sed command that works on both macOS and Linux
19 | if [[ "$OSTYPE" == "darwin"* ]]; then
20 | # macOS (BSD sed)
21 | sed -i '' "s/^__version__ = \".*\"/__version__ = \"${VERSION}\"/" featureflags/__init__.py
22 | else
23 | # Linux (GNU sed)
24 | sed -i "s/^__version__ = \".*\"/__version__ = \"${VERSION}\"/" featureflags/__init__.py
25 | fi
26 |
27 | git add featureflags/__init__.py
28 | git commit -m "Release ${VERSION}"
29 |
30 | git tag -a v${VERSION} -m "${MESSAGE}"
31 | git push origin main --tags
32 |
--------------------------------------------------------------------------------
/featureflags/http/container.py:
--------------------------------------------------------------------------------
1 | from dependency_injector import containers, providers
2 | from hiku.engine import Engine
3 | from hiku.executors.asyncio import AsyncIOExecutor
4 |
5 | from featureflags.http.repositories.flags import FlagsRepository
6 | from featureflags.services.db import init_db_engine
7 |
8 |
9 | class Container(containers.DeclarativeContainer):
10 | """
11 | Container with http dependencies.
12 | """
13 |
14 | wiring_config = containers.WiringConfiguration(
15 | packages=[
16 | "featureflags.services",
17 | "featureflags.http",
18 | ]
19 | )
20 |
21 | graph_engine: Engine = providers.Factory(
22 | Engine,
23 | providers.Callable(AsyncIOExecutor),
24 | )
25 | db_engine = providers.Resource(init_db_engine)
26 |
27 | # Repos
28 | flags_repo = providers.Factory(
29 | FlagsRepository,
30 | db_engine=db_engine,
31 | graph_engine=graph_engine,
32 | )
33 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/8df4e7dd1897_delete_changelog_on_flag_delete.py:
--------------------------------------------------------------------------------
1 | from alembic import op
2 |
3 |
4 | revision = "8df4e7dd1897"
5 | down_revision = "33ace31b89cc"
6 | branch_labels = None
7 | depends_on = None
8 |
9 |
10 | def upgrade():
11 | op.execute("SET timescaledb.allow_install_without_preload = 'on';")
12 | # ### commands auto generated by Alembic - please adjust! ###
13 | op.drop_constraint("changelog_flag_fkey", "changelog", type_="foreignkey")
14 | op.create_foreign_key(
15 | "changelog_flag_fkey",
16 | "changelog",
17 | "flag",
18 | ["flag"],
19 | ["id"],
20 | ondelete="CASCADE",
21 | )
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_constraint(None, "changelog", type_="foreignkey")
28 | op.create_foreign_key(
29 | "changelog_flag_fkey", "changelog", "flag", ["flag"], ["id"]
30 | )
31 | # ### end Alembic commands ###
32 |
--------------------------------------------------------------------------------
/ui/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ui",
3 | "private": true,
4 | "version": "0.0.0",
5 | "scripts": {
6 | "dev": "vite",
7 | "build": "vite build --base /static",
8 | "build-dev-watch": "vite build --mode development --watch --base /static --outDir ../featureflags/web/static",
9 | "preview": "vite preview"
10 | },
11 | "dependencies": {
12 | "@ant-design/icons": "^4.7.0",
13 | "@apollo/client": "^3.6.2",
14 | "antd": "^5.10.0",
15 | "fuzzysearch": "^1.0.3",
16 | "graphiql": "5.2.0",
17 | "graphql": "^16.4.0",
18 | "lodash": "^4.17.21",
19 | "moment": "^2.30.1",
20 | "react": "^18.0.0",
21 | "react-dom": "^18.0.0",
22 | "react-router-dom": "^6.3.0",
23 | "uuid": "^8.3.2"
24 | },
25 | "devDependencies": {
26 | "@types/react": "^18.0.0",
27 | "@types/react-dom": "^18.0.0",
28 | "@vitejs/plugin-react": "^1.3.0",
29 | "less": "^4.1.2",
30 | "less-loader": "^10.2.0",
31 | "vite": "^5.4.9",
32 | "vite-plugin-monaco-editor": "^1.1.0"
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/.github/workflows/test.yaml:
--------------------------------------------------------------------------------
1 | name: Test
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - main
7 | - migrate-to-http-client
8 | types:
9 | - assigned
10 | - opened
11 | - synchronize
12 | - reopened
13 |
14 | jobs:
15 | test:
16 | runs-on: ubuntu-latest
17 |
18 | steps:
19 | - name: Checkout
20 | uses: actions/checkout@v4
21 | with:
22 | fetch-depth: 0 # Important for git describe to work correctly
23 |
24 | - name: Install lets
25 | uses: lets-cli/lets-action@v1.1
26 |
27 | - name: Set up Node
28 | uses: actions/setup-node@v4.0.1
29 | with:
30 | node-version: 20
31 | cache-dependency-path: ./ui/package-lock.json
32 | cache: 'npm'
33 |
34 | - name: Install dependencies
35 | working-directory: ./ui
36 | run: npm install
37 |
38 | - name: Build UI
39 | working-directory: ./ui
40 | run: npm run build
41 |
42 | - name: Run server tests
43 | run: lets test
44 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [1.24.0] - 2025-07-29
9 |
10 | ### Added
11 | - **Version Display Feature**: Added version display in the UI footer
12 | - Server version from `featureflags.__version__`
13 | - Build version from `featureflags.__build_version__` (which is taken from `BUILD_VERSION` environment variable)
14 | - **GraphQL Version API**: New GraphQL field `version` for version information
15 | - `version` query returns `Version` node with `serverVersion` and `buildVersion` fields
16 |
17 | ### Internal
18 | - **Cross-platform Release Script**: Fixed release script compatibility
19 | - Added macOS (BSD sed) and Linux (GNU sed) compatibility
20 | - Fixed `sed -i` command for cross-platform deployment
21 |
22 | ## [1.22.0] - Previous Release
23 |
24 | Initial release with core feature flags functionality.
25 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Tabs.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { Tabs } from 'antd';
3 | import { SettingOutlined } from '@ant-design/icons';
4 | import { useSearchParams } from 'react-router-dom';
5 |
6 | const HeaderTabs = () => {
7 | const [searchParams, setSearchParams] = useSearchParams();
8 |
9 | const tab = searchParams.get('tab') || 'flags';
10 | const project = searchParams.get('project');
11 | const searchTerm = searchParams.get('term');
12 |
13 | const onTabChange = (key) => {
14 | const newParams = new URLSearchParams(searchParams);
15 | newParams.set('tab', key);
16 | setSearchParams(newParams);
17 | };
18 |
19 | let tabs = [
20 | { key: 'flags', label: 'Flags' },
21 | { key: 'values', label: 'Values' },
22 | ];
23 |
24 | if (project && !searchTerm) {
25 | tabs.push({ key: 'settings', label: 'Settings', icon: });
26 | }
27 |
28 | return (
29 |
35 | );
36 | }
37 |
38 | export { HeaderTabs };
39 |
--------------------------------------------------------------------------------
/ui/src/main.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ReactDOM from 'react-dom/client'
3 | import {
4 | ApolloClient,
5 | InMemoryCache,
6 | ApolloProvider,
7 | } from "@apollo/client";
8 | import {
9 | HashRouter,
10 | Routes,
11 | Route,
12 | } from "react-router-dom";
13 |
14 | import { Dashboard } from './Dashboard'
15 | import { Auth } from './Auth'
16 | import { GraphiQLRoot } from './graphiql'
17 | import { AuthProvider } from './context/auth';
18 |
19 | const client = new ApolloClient({
20 | uri: '/graphql',
21 | cache: new InMemoryCache()
22 | });
23 |
24 | ReactDOM.createRoot(document.getElementById('root')).render(
25 |
26 |
27 |
28 |
29 |
30 | } />
31 | } />
32 | } />
33 |
34 |
35 |
36 |
37 |
38 | )
39 |
--------------------------------------------------------------------------------
/featureflags/graph/metrics.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 |
3 | from prometheus_client import Counter, Histogram
4 |
5 | from featureflags.metrics import wrap_metric
6 |
7 | ACTION_TIME_HISTOGRAM = Histogram(
8 | "action_time",
9 | "Action latency (seconds)",
10 | ["action"],
11 | buckets=(0.010, 0.050, 0.100, 1.000, float("inf")),
12 | )
13 | ACTION_ERRORS_COUNTER = Counter(
14 | "action_errors",
15 | "Action errors count",
16 | ["action"],
17 | )
18 |
19 | GRAPH_PULL_TIME_HISTOGRAM = Histogram(
20 | "graph_pull_time",
21 | "Graph pull time (seconds)",
22 | [],
23 | buckets=(0.050, 0.100, 0.250, 1, float("inf")),
24 | )
25 |
26 | GRAPH_PULL_ERRORS_COUNTER = Counter(
27 | "graph_pull_errors",
28 | "Graph pull errors count",
29 | [],
30 | )
31 |
32 |
33 | def track(func: Callable) -> Callable:
34 | func_name = func.__name__
35 | func = wrap_metric(ACTION_TIME_HISTOGRAM.labels(func_name).time())(func)
36 | func = wrap_metric(
37 | ACTION_ERRORS_COUNTER.labels(func_name).count_exceptions()
38 | )(func)
39 | return func
40 |
--------------------------------------------------------------------------------
/featureflags/http/api/flags.py:
--------------------------------------------------------------------------------
1 | from dependency_injector.wiring import Provide, inject
2 | from fastapi import APIRouter, Depends
3 |
4 | from featureflags.http.container import Container
5 | from featureflags.http.repositories.flags import FlagsRepository
6 | from featureflags.http.types import (
7 | PreloadFlagsRequest,
8 | PreloadFlagsResponse,
9 | SyncFlagsRequest,
10 | SyncFlagsResponse,
11 | )
12 |
13 | router = APIRouter(prefix="/flags", tags=["flags"])
14 |
15 |
16 | @router.post("/load")
17 | @inject
18 | async def load_flags(
19 | request: PreloadFlagsRequest,
20 | flags_repo: FlagsRepository = Depends(Provide[Container.flags_repo]),
21 | ) -> PreloadFlagsResponse:
22 | """
23 | Init flags for project and load flags.
24 | """
25 |
26 | return await flags_repo.load(request)
27 |
28 |
29 | @router.post("/sync")
30 | @inject
31 | async def sync_flags(
32 | request: SyncFlagsRequest,
33 | flags_repo: FlagsRepository = Depends(Provide[Container.flags_repo]),
34 | ) -> SyncFlagsResponse:
35 | """
36 | Sync flags for project.
37 | """
38 |
39 | return await flags_repo.sync(request)
40 |
--------------------------------------------------------------------------------
/featureflags/rpc/utils.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | import time
4 | from collections.abc import Callable
5 | from typing import Any
6 |
7 | log = logging.getLogger(__name__)
8 |
9 |
10 | def debug_cancellation(func: Callable) -> Callable:
11 | async def wrapper(self: Any, stream: Any, *args: Any, **kwargs: Any) -> Any:
12 | start_time = time.monotonic()
13 | try:
14 | return await func(self, stream, *args, **kwargs)
15 | except asyncio.CancelledError:
16 | if stream.deadline:
17 | deadline = stream.deadline.time_remaining()
18 | log.exception(
19 | "Request cancelled, elapsed: %.4fs;" " remaining: %.4fs",
20 | time.monotonic() - start_time,
21 | deadline,
22 | )
23 | else:
24 | log.exception(
25 | "Request cancelled, elapsed: %.4fs;"
26 | " user-agent: %s;"
27 | " metadata: %s;",
28 | time.monotonic() - start_time,
29 | stream.user_agent,
30 | stream.metadata,
31 | )
32 |
33 | raise
34 |
35 | return wrapper
36 |
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: Build & publish
2 |
3 | on:
4 | push:
5 | tags:
6 | - "v*"
7 |
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
12 | strategy:
13 | matrix:
14 | python-version: [3.11]
15 | steps:
16 | - name: Checkout
17 | uses: actions/checkout@v4
18 |
19 | - name: Install lets
20 | uses: lets-cli/lets-action@v1.1
21 |
22 | - name: Set up Node
23 | uses: actions/setup-node@v4.0.1
24 | with:
25 | node-version: 20
26 | cache-dependency-path: ./ui/package-lock.json
27 | cache: 'npm'
28 |
29 | - name: Install dependencies
30 | working-directory: ./ui
31 | run: npm install
32 |
33 | - name: Build UI and copy bundle
34 | working-directory: ./ui
35 | run: npm run build -- --outDir ../featureflags/web/static
36 |
37 | - name: Set up Python with PDM ${{ matrix.python-version }}
38 | uses: pdm-project/setup-pdm@v3
39 | with:
40 | python-version: ${{ matrix.python-version }}
41 | version: 2.18.0
42 |
43 | - name: Upload package to pypi.org
44 | run: pdm publish -u "__token__" -P ${{ secrets.PYPI_TOKEN }}
45 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/2fa54f8b55c1_cleaup_condition_checks.py:
--------------------------------------------------------------------------------
1 | from alembic import op
2 | from sqlalchemy.sql import text
3 |
4 | revision = "2fa54f8b55c1"
5 | down_revision = "a327a3ea7a5f"
6 | branch_labels = None
7 | depends_on = None
8 |
9 |
10 | def upgrade():
11 | conn = op.get_bind()
12 |
13 | conditions = list(conn.execute("SELECT id, checks FROM condition"))
14 | checks = list(conn.execute('SELECT id FROM "check"'))
15 |
16 | print("Found conditions: {}".format(len(conditions)))
17 | print("Found checks: {}".format(len(checks)))
18 |
19 | check_ids = {check.id for check in checks}
20 |
21 | for condition in conditions:
22 | new_checks = {check for check in condition.checks if check in check_ids}
23 | if set(condition.checks) != new_checks:
24 | print(
25 | "Updating condition {} with new checks: before={}, after={}".format(
26 | condition.id, condition.checks, new_checks
27 | )
28 | )
29 |
30 | conn.execute(
31 | text("UPDATE condition SET checks = :checks WHERE id = :id"),
32 | {"checks": list(new_checks), "id": condition.id},
33 | )
34 |
35 |
36 | def downgrade():
37 | ...
38 |
--------------------------------------------------------------------------------
/featureflags/web/api/graph.py:
--------------------------------------------------------------------------------
1 | import aiopg.sa
2 | from dependency_injector.wiring import Provide, inject
3 | from fastapi import APIRouter, Depends
4 | from fastapi.responses import ORJSONResponse
5 | from hiku.endpoint.graphql import AsyncBatchGraphQLEndpoint
6 |
7 | from featureflags.graph.context import init_graph_context
8 | from featureflags.services.auth import user_session
9 | from featureflags.services.ldap import BaseLDAP
10 | from featureflags.web.container import Container
11 | from featureflags.web.types import GraphQueryRequest
12 |
13 | router = APIRouter(
14 | prefix="/graphql",
15 | tags=["graphql"],
16 | )
17 |
18 |
19 | @router.post("")
20 | @inject
21 | async def graphql(
22 | query: GraphQueryRequest,
23 | ldap_service: BaseLDAP = Depends(Provide[Container.ldap_service]),
24 | db_engine: aiopg.sa.Engine = Depends(Provide[Container.db_engine]),
25 | graphql_endpoint: AsyncBatchGraphQLEndpoint = Depends(
26 | Provide[Container.graphql_endpoint],
27 | ),
28 | ) -> ORJSONResponse:
29 | ctx = init_graph_context(
30 | session=user_session.get(),
31 | ldap=ldap_service,
32 | engine=db_engine,
33 | )
34 | result = await graphql_endpoint.dispatch(query.model_dump(), ctx)
35 | return ORJSONResponse(result)
36 |
--------------------------------------------------------------------------------
/ui/vite.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 | import $monacoEditorPlugin from 'vite-plugin-monaco-editor'
4 | const monacoEditorPlugin = $monacoEditorPlugin.default ?? $monacoEditorPlugin
5 |
6 | // https://vitejs.dev/config/
7 | export default defineConfig({
8 | server: {
9 | port: 3001,
10 | proxy: {
11 | '/graphql': 'http://localhost:8080'
12 | },
13 | },
14 | plugins: [
15 | react(),
16 | monacoEditorPlugin({
17 | languageWorkers: ['editorWorkerService', 'json'],
18 | customWorkers: [
19 | {
20 | label: 'graphql',
21 | entry: 'monaco-graphql/esm/graphql.worker.js'
22 | }
23 | ]
24 | }),
25 | ],
26 | build: {
27 | rollupOptions: {
28 | onwarn(warning, warn) {
29 | // Suppress "Module level directives cause errors when bundled" warnings
30 | if (warning.code === "MODULE_LEVEL_DIRECTIVE") {
31 | return;
32 | }
33 | warn(warning);
34 | },
35 | },
36 | },
37 | css: {
38 | preprocessorOptions: {
39 | less: {
40 | javascriptEnabled: true,
41 | }
42 | }
43 | },
44 | resolve: {
45 | alias: [
46 | { find: /^~/, replacement: '' }
47 | ],
48 | }
49 | })
50 |
--------------------------------------------------------------------------------
/featureflags/cli.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Annotated
3 |
4 | import typer
5 |
6 | from featureflags.logging import configure_logging
7 |
8 | configure_logging(__package__)
9 |
10 | log = logging.getLogger(__name__)
11 | cli = typer.Typer()
12 |
13 |
14 | @cli.command(
15 | name="alembic",
16 | help="Run alembic",
17 | )
18 | def alembic(args: Annotated[list[str], typer.Argument()]) -> None:
19 | from featureflags.alembic import main as alembic_main
20 |
21 | log.info("Executing command: `alembic`")
22 | alembic_main(args)
23 |
24 |
25 | @cli.command(name="web", help="Run web server")
26 | def web() -> None:
27 | from featureflags.web.app import main as web_main
28 |
29 | log.info("Executing command: `web`")
30 | web_main()
31 |
32 |
33 | @cli.command(name="rpc", help="Run rpc server")
34 | def rpc() -> None:
35 | import asyncio
36 |
37 | import uvloop
38 |
39 | from featureflags.rpc.app import main as rpc_main
40 |
41 | log.info("Executing command: `rpc`")
42 | asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
43 | asyncio.run(rpc_main())
44 |
45 |
46 | @cli.command(name="http", help="Run http server")
47 | def http() -> None:
48 | from featureflags.http.app import main as http_main
49 |
50 | log.info("Executing command: `http`")
51 | http_main()
52 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/4d42cf3d11de_added_created_and_reported_timestamps.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.dialects import postgresql
5 |
6 |
7 | revision = "4d42cf3d11de"
8 | down_revision = "1876f90b58e8"
9 | branch_labels = None
10 | depends_on = None
11 |
12 |
13 | def upgrade():
14 | # ### commands auto generated by Alembic - please adjust! ###
15 | op.add_column(
16 | "flag",
17 | sa.Column("created_timestamp", postgresql.TIMESTAMP(), nullable=True),
18 | )
19 | op.add_column(
20 | "flag",
21 | sa.Column("reported_timestamp", postgresql.TIMESTAMP(), nullable=True),
22 | )
23 | op.add_column(
24 | "value",
25 | sa.Column("created_timestamp", postgresql.TIMESTAMP(), nullable=True),
26 | )
27 | op.add_column(
28 | "value",
29 | sa.Column("reported_timestamp", postgresql.TIMESTAMP(), nullable=True),
30 | )
31 | # ### end Alembic commands ###
32 |
33 |
34 | def downgrade():
35 | # ### commands auto generated by Alembic - please adjust! ###
36 | op.drop_column("value", "reported_timestamp")
37 | op.drop_column("value", "created_timestamp")
38 | op.drop_column("flag", "reported_timestamp")
39 | op.drop_column("flag", "created_timestamp")
40 | # ### end Alembic commands ###
41 |
--------------------------------------------------------------------------------
/featureflags/rpc/metrics.py:
--------------------------------------------------------------------------------
1 | import time
2 | from collections.abc import Callable
3 | from functools import wraps
4 | from typing import Any
5 |
6 | from prometheus_client import (
7 | Counter,
8 | Gauge,
9 | Histogram,
10 | )
11 |
12 | GRPC_METHOD_TIME = Histogram(
13 | "grpc_method_time_seconds",
14 | "time spent in requests to grpc method",
15 | ["method"],
16 | )
17 |
18 | GRPC_METHOD_COUNT = Counter(
19 | "grpc_method_call_count", "how many times grpc method called", ["method"]
20 | )
21 |
22 | GRPC_METHOD_IN_PROGRESS = Gauge(
23 | "grpc_method_call_in_progress",
24 | "how many grpc method calls in progress",
25 | ["method"],
26 | )
27 |
28 |
29 | def track(func: Callable) -> Callable:
30 | @wraps(func)
31 | async def wrapper(*args: Any, **kwargs: Any) -> Any:
32 | func_name = func.__name__
33 |
34 | GRPC_METHOD_COUNT.labels(func_name).inc()
35 | GRPC_METHOD_IN_PROGRESS.labels(func_name).inc()
36 |
37 | start_time = time.perf_counter()
38 | try:
39 | result = await func(*args, **kwargs)
40 | finally:
41 | GRPC_METHOD_TIME.labels(func_name).observe(
42 | time.perf_counter() - start_time
43 | )
44 | GRPC_METHOD_IN_PROGRESS.labels(func_name).dec()
45 |
46 | return result
47 |
48 | return wrapper
49 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | FeatureFlags service
2 | ====================
3 |
4 | |project|_ |documentation|_ |version|_ |tag|_ |license|_
5 |
6 | FeatureFlags is a client/server solution for feature flags integration
7 |
8 | Installation
9 | ------------
10 |
11 | Server
12 | ~~~~~~
13 |
14 | .. code-block:: shell
15 |
16 | $ pip3 install evo-featureflags-server
17 |
18 |
19 | Client
20 | ~~~~~~
21 |
22 | * Python - https://github.com/evo-company/featureflags-py
23 | * JavaScript - https://github.com/evo-company/featureflags-js
24 |
25 | Documentation
26 | -------------
27 |
28 | Read documentation_
29 |
30 |
31 | .. |project| image:: https://img.shields.io/badge/evo-company%2Ffeatureflags-blueviolet.svg?logo=github
32 | .. _project: https://github.com/evo-company/featureflags
33 | .. |documentation| image:: https://img.shields.io/badge/docs-featureflags.rtfd.io-blue.svg
34 | .. _documentation: https://featureflags.readthedocs.io/en/latest/
35 | .. |version| image:: https://img.shields.io/pypi/v/evo-featureflags-server.svg?label=stable&color=green
36 | .. _version: https://pypi.org/project/featureflags/
37 | .. |tag| image:: https://img.shields.io/github/tag/evo-company/featureflags.svg?label=latest
38 | .. _tag: https://pypi.org/project/evo-featureflags-server/#history
39 | .. |license| image:: https://img.shields.io/pypi/l/featureflags.svg
40 | .. _license: https://github.com/evo-company/featureflags/blob/master/LICENSE.txt
41 |
42 |
--------------------------------------------------------------------------------
/docs/client.rst:
--------------------------------------------------------------------------------
1 | Client
2 | ======
3 |
4 | This guide will help you get started with the FeatureFlags python client library.
5 |
6 | Installation
7 | ------------
8 |
9 | .. code-block:: shell
10 |
11 | $ pip install evo-featureflags-client
12 |
13 |
14 | Here's a simple example of using the sync FeatureFlags client in a Flask application:
15 |
16 | .. code-block:: python
17 |
18 | from flask import Flask, request, jsonify
19 | from featureflags_client.http.client import FeatureFlagsClient
20 | from featureflags_client.http.managers.requests import RequestsManager
21 | from featureflags_client.http.types import Variable, VariableType
22 |
23 | app = Flask(__name__)
24 |
25 | REQUEST_QUERY = Variable("user.name", VariableType.STRING)
26 |
27 | class Flags:
28 | TEST = False
29 |
30 | manager = RequestsManager(
31 | url="http://localhost:8080",
32 | project="my-project",
33 | variables=[],
34 | defaults=Flags,
35 | request_timeout=5,
36 | refresh_interval=10,
37 | )
38 | ff_client = FeatureFlagsClient(manager)
39 |
40 | @app.route('/hello/')
41 | def hello(username):
42 | flags = client.flags({"user.name": username})
43 | if flags.TEST:
44 | return f"Hello, {username}! TEST is enabled"
45 | else:
46 | return f"Hello, {username}! TEST is disabled"
47 |
48 | if __name__ == '__main__':
49 | app.run(debug=True)
50 |
--------------------------------------------------------------------------------
/featureflags/protobuf/service.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | package featureflags.service;
4 |
5 | import 'hiku/protobuf/query.proto';
6 | import "google/protobuf/empty.proto";
7 | import 'google/protobuf/timestamp.proto';
8 | import 'featureflags/protobuf/graph.proto';
9 |
10 | message Variable {
11 | string name = 1;
12 | featureflags.graph.Variable.Type type = 2;
13 | }
14 |
15 | message FlagUsage {
16 | string name = 1;
17 | google.protobuf.Timestamp interval = 2;
18 | uint32 negative_count = 3;
19 | uint32 positive_count = 4;
20 | }
21 |
22 | message ExchangeRequest {
23 | string project = 1;
24 | uint32 version = 2;
25 | repeated Variable variables = 3;
26 | repeated FlagUsage flags_usage = 4;
27 | hiku.protobuf.query.Node query = 5;
28 | }
29 |
30 | message ExchangeReply {
31 | uint32 version = 1;
32 | featureflags.graph.Result result = 2;
33 | }
34 |
35 | message StoreStatsTask {
36 | string flag_id = 1;
37 | google.protobuf.Timestamp interval = 2;
38 | uint32 negative_count = 3;
39 | uint32 positive_count = 4;
40 | }
41 |
42 | service FeatureFlags {
43 | rpc exchange (ExchangeRequest) returns (ExchangeReply) {
44 | option deprecated = true;
45 | };
46 | rpc Exchange (ExchangeRequest) returns (ExchangeReply) {};
47 | rpc store_stats (StoreStatsTask) returns (google.protobuf.Empty) {
48 | option deprecated = true;
49 | };
50 | rpc StoreStats (StoreStatsTask) returns (google.protobuf.Empty) {};
51 | }
52 |
--------------------------------------------------------------------------------
/ui/src/favicon.svg:
--------------------------------------------------------------------------------
1 |
44 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/cce98484f923_auth_user_table_added.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.dialects import postgresql
5 |
6 |
7 | revision = "cce98484f923"
8 | down_revision = "b3fbbe647373"
9 | branch_labels = None
10 | depends_on = None
11 |
12 |
13 | def upgrade():
14 | # ### commands auto generated by Alembic - please adjust! ###
15 | op.create_table(
16 | "auth_user",
17 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
18 | sa.Column("username", sa.String(), nullable=False),
19 | sa.PrimaryKeyConstraint("id"),
20 | sa.UniqueConstraint("username"),
21 | )
22 | op.create_index(
23 | "auth_user_username_idx", "auth_user", ["username"], unique=False
24 | )
25 | op.add_column(
26 | "auth_session",
27 | sa.Column("auth_user", postgresql.UUID(as_uuid=True), nullable=True),
28 | )
29 | op.alter_column(
30 | "auth_session", "user", existing_type=sa.VARCHAR(), nullable=True
31 | )
32 | op.create_index(
33 | "auth_session_expiration_time_idx",
34 | "auth_session",
35 | ["expiration_time"],
36 | unique=False,
37 | )
38 | op.drop_index("auth_session_expiration_time", table_name="auth_session")
39 | op.drop_index("auth_session_user_creation_time", table_name="auth_session")
40 | op.create_foreign_key(
41 | None, "auth_session", "auth_user", ["auth_user"], ["id"]
42 | )
43 | # ### end Alembic commands ###
44 |
--------------------------------------------------------------------------------
/featureflags/logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from logging import StreamHandler
3 | from logging.handlers import SysLogHandler
4 |
5 | from metricslog.ext.formatter import CEELogstashFormatter, ColorFormatter
6 |
7 | from featureflags.config import config
8 |
9 |
10 | def create_console_handler() -> StreamHandler:
11 | handler = StreamHandler()
12 | handler.setFormatter(ColorFormatter())
13 | return handler
14 |
15 |
16 | def create_syslog_handler(package: str) -> SysLogHandler:
17 | assert config.logging.syslog_app, config.logging.syslog_app
18 | assert config.logging.syslog_facility, config.logging.syslog_facility
19 | formatter = CEELogstashFormatter(
20 | config.logging.syslog_app,
21 | mapping=config.logging.syslog_mapping,
22 | defaults=config.logging.syslog_defaults,
23 | extra_only={package},
24 | )
25 | facility = SysLogHandler.facility_names[config.logging.syslog_facility]
26 | handler = SysLogHandler("/dev/log", facility=facility)
27 | handler.setFormatter(formatter)
28 | return handler
29 |
30 |
31 | def configure_logging(package: str) -> None:
32 | logging.captureWarnings(True)
33 | logging.root.setLevel(config.logging.level_libs.upper())
34 | logging.getLogger(package).setLevel(config.logging.level_app.upper())
35 |
36 | if "console" in config.logging.handlers:
37 | logging.root.addHandler(create_console_handler())
38 |
39 | if "syslog" in config.logging.handlers:
40 | logging.root.addHandler(create_syslog_handler(package))
41 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/33ace31b89cc_changelog_added.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.dialects import postgresql
5 |
6 |
7 | revision = "33ace31b89cc"
8 | down_revision = "94e4203113b0"
9 | branch_labels = None
10 | depends_on = None
11 |
12 |
13 | def upgrade():
14 | op.execute('ALTER TYPE "type" RENAME TO "variable_type";')
15 | op.execute('ALTER TYPE "operator" RENAME TO "check_operator";')
16 |
17 | # ### commands auto generated by Alembic - please adjust! ###
18 | op.create_table(
19 | "changelog",
20 | sa.Column("id", sa.Integer(), nullable=False),
21 | sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=False),
22 | sa.Column("auth_user", postgresql.UUID(as_uuid=True), nullable=False),
23 | sa.Column("flag", postgresql.UUID(as_uuid=True), nullable=False),
24 | sa.Column(
25 | "actions",
26 | postgresql.ARRAY(
27 | sa.Enum(
28 | "ENABLE_FLAG",
29 | "DISABLE_FLAG",
30 | "ADD_CONDITION",
31 | "DISABLE_CONDITION",
32 | "RESET_FLAG",
33 | name="changelog_actions",
34 | )
35 | ),
36 | nullable=True,
37 | ),
38 | sa.ForeignKeyConstraint(
39 | ["auth_user"],
40 | ["auth_user.id"],
41 | ),
42 | sa.ForeignKeyConstraint(
43 | ["flag"],
44 | ["flag.id"],
45 | ),
46 | sa.PrimaryKeyConstraint("id"),
47 | )
48 | # ### end Alembic commands ###
49 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/context.jsx:
--------------------------------------------------------------------------------
1 | import { createContext, useContext } from 'react';
2 |
3 | export const ProjectsMapContext = createContext([[]]);
4 |
5 | export const useProjectsMap = () => {
6 | const [project] = useContext(ProjectsMapContext);
7 | return project;
8 | }
9 |
10 | export const FlagContext = createContext({});
11 | export const useFlagState = () => {
12 | const { state } = useContext(FlagContext);
13 | return state;
14 | }
15 | export const useFlagCtx = () => useContext(FlagContext);
16 | export const useConditions = () => {
17 | const { conditions } = useContext(FlagContext);
18 | return conditions;
19 | }
20 | export const useChecks = () => {
21 | const {
22 | checks, setVariable, setOperator,
23 | setValueString, setValueNumber, setValueTimestamp, setValueSet
24 | } = useContext(FlagContext);
25 | return {
26 | checks, setVariable, setOperator,
27 | setValueString, setValueNumber, setValueTimestamp, setValueSet
28 | };
29 | }
30 |
31 | export const ValueContext = createContext({});
32 | export const useValueState = () => {
33 | const { state } = useContext(ValueContext);
34 | return state;
35 | }
36 | export const useValueCtx = () => useContext(ValueContext);
37 | export const useValueConditions = () => {
38 | const { conditions } = useContext(ValueContext);
39 | return conditions;
40 | }
41 | export const useValueChecks = () => {
42 | const {
43 | checks, setVariable, setOperator, setValueString, setValueNumber,
44 | setValueTimestamp, setValueSet
45 | } = useContext(ValueContext);
46 | return {
47 | checks, setVariable, setOperator, setValueString, setValueNumber,
48 | setValueTimestamp, setValueSet
49 | };
50 | }
--------------------------------------------------------------------------------
/featureflags/metrics.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from collections.abc import Callable
3 | from contextlib import AbstractContextManager
4 | from typing import Any
5 |
6 | from fastapi import FastAPI
7 | from prometheus_client import start_http_server
8 | from prometheus_client.decorator import decorator as prometheus_decorator
9 | from prometheus_fastapi_instrumentator import Instrumentator
10 |
11 | log = logging.getLogger(__name__)
12 |
13 |
14 | def wrap_metric(metric: AbstractContextManager) -> Callable:
15 | async def wrapper(fn: Callable, *args: Any, **kwargs: Any) -> None:
16 | with metric:
17 | return await fn(*args, **kwargs)
18 |
19 | return prometheus_decorator(wrapper)
20 |
21 |
22 | def configure_metrics(
23 | port: int | None = None,
24 | app: FastAPI | None = None,
25 | ) -> None:
26 | if port:
27 | start_http_server(port=port)
28 | log.info("Prometheus metrics initialized")
29 |
30 | if app:
31 | instrumentator = Instrumentator(
32 | should_instrument_requests_inprogress=True,
33 | inprogress_labels=True,
34 | excluded_handlers=["/metrics", "/~health"],
35 | )
36 | instrumentator.instrument(
37 | app=app,
38 | latency_lowr_buckets=[
39 | 0.001,
40 | 0.005,
41 | 0.01,
42 | 0.025,
43 | 0.05,
44 | 0.1,
45 | 0.25,
46 | 0.5,
47 | ],
48 | )
49 | log.info("Http instrumentation initialized")
50 |
51 | else:
52 | log.info("Prometheus metrics disabled")
53 |
--------------------------------------------------------------------------------
/featureflags/web/container.py:
--------------------------------------------------------------------------------
1 | from typing import Literal
2 |
3 | from dependency_injector import containers, providers
4 | from hiku.endpoint.graphql import AsyncBatchGraphQLEndpoint
5 | from hiku.engine import Engine
6 | from hiku.executors.asyncio import AsyncIOExecutor
7 |
8 | from featureflags.config import config
9 | from featureflags.graph import graph
10 | from featureflags.services.db import init_db_engine
11 | from featureflags.services.ldap import LDAP, DummyLDAP
12 |
13 |
14 | def select_main_or_testing_dependency() -> Literal["main", "testing"]:
15 | return "testing" if config.test_environ else "main"
16 |
17 |
18 | class Container(containers.DeclarativeContainer):
19 | """
20 | Container with app dependencies.
21 | """
22 |
23 | wiring_config = containers.WiringConfiguration(
24 | packages=[
25 | "featureflags.services",
26 | "featureflags.web",
27 | ]
28 | )
29 |
30 | graph_engine: Engine = providers.Factory(
31 | Engine,
32 | providers.Callable(AsyncIOExecutor),
33 | )
34 | graphql_endpoint: AsyncBatchGraphQLEndpoint = providers.Singleton(
35 | AsyncBatchGraphQLEndpoint,
36 | engine=graph_engine,
37 | query_graph=graph.GRAPH,
38 | mutation_graph=graph.MUTATION_GRAPH,
39 | )
40 |
41 | db_engine = providers.Resource(init_db_engine)
42 |
43 | ldap_service = providers.Selector(
44 | selector=select_main_or_testing_dependency,
45 | main=providers.Factory(
46 | LDAP,
47 | host=config.ldap.host,
48 | base_dn=config.ldap.base_dn,
49 | ),
50 | testing=providers.Factory(
51 | DummyLDAP,
52 | user_is_bound=True,
53 | ),
54 | )
55 |
--------------------------------------------------------------------------------
/docs/development.rst:
--------------------------------------------------------------------------------
1 | Development
2 | ===========
3 |
4 | Run all this commands:
5 |
6 | - ``lets postgres``
7 | - ``lets apply-migrations-dev``
8 | - ``lets apply-seeds-dev`` if you have data in ``seeds/`` directory
9 | - ``lets web`` in separate terminal
10 | - ``lets ui`` in separate terminal, this will start vite dev server
11 |
12 | .. note:: You might need to install npm dependencies: ``cd ui && npm install``
13 |
14 | - `http://localhost:8080` - web application with `lets ui`
15 | - `http://localhost:3001` - web application with `lets ui-build-dev`
16 | - `http://localhost:8081` - http api
17 | - `localhost:50051` - grpc api
18 |
19 | To create a project for development purposes you can run:
20 |
21 | ``lets http``
22 |
23 | and then execute this command:
24 |
25 | .. code-block:: shell
26 |
27 | curl -X POST http://localhost:8080/flags/load -H "Content-Type: application/json" \
28 | -d '{"project": "test", "version": 1, "variables": [{"name": "user.id", "type": 2}], "flags": ["TEST_FLAG"], "values": [["TEST_VALUE", 1]]}'
29 |
30 | Default username is ``admin`` and password is ``admin`` if you run with `configs/local.yaml` configuration file (default)
31 |
32 | To start API handlers (not required for web application):
33 |
34 | - ``lets http`` in separate terminal (this will start http server on ``http://localhost:8080``)
35 | - ``lets rpc`` in separate terminal (this will start grpc server on ``localhost:50051``)
36 |
37 | To build UI and copy it to ``web/static`` directory:
38 |
39 | - ``lets ui-build-dev``
40 |
41 | To release package:
42 |
43 | - ``lets release 1.0.0 --message="Added feature"``
44 |
45 | Pre-commit
46 |
47 | ``./scripts/enable-hooks.sh``
48 |
49 | ``./scripts/disable-hooks.sh``
50 |
51 | Architecture
52 | ------------
53 |
54 | Check important architecture decisions in ``adr/`` directory.
--------------------------------------------------------------------------------
/ui/src/components/Logo.jsx:
--------------------------------------------------------------------------------
1 | import Icon from '@ant-design/icons';
2 |
3 |
4 | const LogoIcon = () => (
5 |
54 | );
55 |
56 | export const Logo = () => {
57 | return (
58 |
59 | );
60 | }
61 |
--------------------------------------------------------------------------------
/docs/server.rst:
--------------------------------------------------------------------------------
1 | Server
2 | ======
3 |
4 | This guide will help you get started with the FeatureFlags server
5 |
6 | Installation
7 | ------------
8 |
9 | .. code-block:: shell
10 |
11 | $ pip install evo-featureflags-server
12 |
13 | Running the Server Locally
14 | --------------------------
15 |
16 | Using Docker
17 | ~~~~~~~~~~~~
18 |
19 | We recommend using `Lets `_, a CLI task runner for developers, as an alternative to Docker. Lets provides a simple YAML-based configuration for running development tasks.
20 |
21 | First, install Lets https://lets-cli.org/
22 |
23 | Then run the server using Lets:
24 |
25 | .. code-block:: shell
26 |
27 | # Clone the repository
28 | $ git clone https://github.com/evo-company/featureflags
29 | $ cd featureflags
30 |
31 | # Start PostgreSQL
32 | $ lets postgres
33 |
34 | # Apply database migrations
35 | $ lets apply-migrations-dev
36 |
37 | # Start the web server for UI
38 | $ lets web
39 |
40 | # Start the http api to which the client will connect
41 | $ lets http
42 |
43 | # Start the UI dev server
44 | $ lets ui
45 |
46 | The server will be available at `http://localhost:8080`.
47 | The api will be available at `http://localhost:8081`.
48 |
49 | To create a project for development purposes you can run:
50 |
51 | ``lets http``
52 |
53 | and then execute this command:
54 |
55 | .. code-block:: shell
56 |
57 | curl -X POST http://localhost:8080/flags/load -H "Content-Type: application/json" \
58 | -d '{"project": "test", "version": 1, "variables": [{"name": "user.id", "type": 2}], "flags": ["TEST_FLAG"], "values": [["TEST_VALUE", 1]]}'
59 |
60 | Configuration
61 | ~~~~~~~~~~~~~
62 |
63 | The server uses YAML configuration files. You can specify the configuration file path using the `CONFIG_PATH` environment variable.
64 |
65 | Basic config is in `configs/local.yaml` file.
66 |
--------------------------------------------------------------------------------
/featureflags/web/lifecycle.py:
--------------------------------------------------------------------------------
1 | import anyio.to_thread
2 | from dependency_injector.containers import DeclarativeContainer
3 | from fastapi import FastAPI, HTTPException, Request
4 | from fastapi.responses import ORJSONResponse
5 |
6 | from featureflags.config import config
7 | from featureflags.errors import BaseInternalServerError
8 |
9 |
10 | def configure_lifecycle(app: FastAPI, container: DeclarativeContainer) -> None:
11 | @app.on_event("startup")
12 | async def startup() -> None:
13 | """Application startup functions."""
14 |
15 | # https://github.com/tiangolo/fastapi/discussions/8587
16 | # Adjust this value to limit the number of concurrent threads.
17 | limiter = anyio.to_thread.current_default_thread_limiter()
18 | limiter.total_tokens = config.app.max_concurrent_threads
19 |
20 | await container.init_resources()
21 |
22 | @app.on_event("shutdown")
23 | async def shutdown() -> None:
24 | """Application shutdown functions."""
25 |
26 | await container.shutdown_resources()
27 |
28 | @app.exception_handler(HTTPException)
29 | async def http_exception_handler(
30 | _: Request,
31 | exc: HTTPException,
32 | ) -> ORJSONResponse:
33 | headers = getattr(exc, "headers", None)
34 |
35 | return ORJSONResponse(
36 | content={"detail": exc.detail},
37 | status_code=exc.status_code,
38 | headers=headers,
39 | )
40 |
41 | @app.exception_handler(BaseInternalServerError)
42 | async def internal_server_error_handler(
43 | _: Request,
44 | exc: BaseInternalServerError,
45 | ) -> ORJSONResponse:
46 | headers = getattr(exc, "headers", None)
47 |
48 | return ORJSONResponse(
49 | content={"detail": exc.detail},
50 | status_code=exc.status_code,
51 | headers=headers,
52 | )
53 |
--------------------------------------------------------------------------------
/featureflags/http/lifecycle.py:
--------------------------------------------------------------------------------
1 | import anyio.to_thread
2 | from dependency_injector.containers import DeclarativeContainer
3 | from fastapi import FastAPI, HTTPException, Request
4 | from fastapi.responses import ORJSONResponse
5 |
6 | from featureflags.config import config
7 | from featureflags.errors import BaseInternalServerError
8 |
9 |
10 | def configure_lifecycle(app: FastAPI, container: DeclarativeContainer) -> None:
11 | @app.on_event("startup")
12 | async def startup() -> None:
13 | """Application startup functions."""
14 |
15 | # https://github.com/tiangolo/fastapi/discussions/8587
16 | # Adjust this value to limit the number of concurrent threads.
17 | limiter = anyio.to_thread.current_default_thread_limiter()
18 | limiter.total_tokens = config.http.max_concurrent_threads
19 |
20 | await container.init_resources()
21 |
22 | @app.on_event("shutdown")
23 | async def shutdown() -> None:
24 | """Application shutdown functions."""
25 |
26 | await container.shutdown_resources()
27 |
28 | @app.exception_handler(HTTPException)
29 | async def http_exception_handler(
30 | _: Request,
31 | exc: HTTPException,
32 | ) -> ORJSONResponse:
33 | headers = getattr(exc, "headers", None)
34 |
35 | return ORJSONResponse(
36 | content={"detail": exc.detail},
37 | status_code=exc.status_code,
38 | headers=headers,
39 | )
40 |
41 | @app.exception_handler(BaseInternalServerError)
42 | async def internal_server_error_handler(
43 | _: Request,
44 | exc: BaseInternalServerError,
45 | ) -> ORJSONResponse:
46 | headers = getattr(exc, "headers", None)
47 |
48 | return ORJSONResponse(
49 | content={"detail": exc.detail},
50 | status_code=exc.status_code,
51 | headers=headers,
52 | )
53 |
--------------------------------------------------------------------------------
/featureflags/http/app.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from fastapi import FastAPI
4 | from fastapi.responses import ORJSONResponse
5 |
6 | from featureflags.config import config
7 | from featureflags.http.api.flags import router as flags_router
8 | from featureflags.http.api.health import router as health_router
9 | from featureflags.http.container import Container
10 | from featureflags.http.lifecycle import configure_lifecycle
11 | from featureflags.metrics import configure_metrics
12 | from featureflags.services.auth import set_internal_user_session
13 |
14 | log = logging.getLogger(__name__)
15 |
16 |
17 | def create_app() -> FastAPI:
18 | app = FastAPI(
19 | debug=config.debug,
20 | default_response_class=ORJSONResponse,
21 | )
22 |
23 | container = Container()
24 | app.container = container # type: ignore
25 |
26 | app.include_router(health_router)
27 | app.include_router(flags_router)
28 |
29 | set_internal_user_session()
30 |
31 | configure_metrics(port=config.instrumentation.prometheus_port, app=app)
32 | configure_lifecycle(app, container)
33 |
34 | if config.sentry.enabled:
35 | from featureflags.sentry import SentryMode, configure_sentry
36 |
37 | configure_sentry(config.sentry, env_prefix="http", mode=SentryMode.HTTP)
38 |
39 | return app
40 |
41 |
42 | def main() -> None:
43 | import uvicorn
44 |
45 | from featureflags import __build_version__, __version__
46 |
47 | log.info(
48 | "Starting http server. Version: %s, Build version: %s",
49 | __version__,
50 | __build_version__,
51 | )
52 |
53 | uvicorn.run(
54 | app="featureflags.http.app:create_app",
55 | factory=True,
56 | host=config.http.host,
57 | port=config.http.port,
58 | loop="uvloop",
59 | http="httptools",
60 | reload=config.http.reload,
61 | log_level="debug" if config.debug else "info",
62 | )
63 |
--------------------------------------------------------------------------------
/ui/src/context/auth.jsx:
--------------------------------------------------------------------------------
1 | import { createContext, useState } from 'react';
2 | import { gql, useMutation, useQuery } from '@apollo/client';
3 | import { useNavigate } from 'react-router-dom';
4 |
5 | // {auth, loading, actions}
6 | export const AuthContext = createContext({});
7 |
8 | const AUTH_QUERY = gql`
9 | query Auth {
10 | authenticated
11 | }
12 | `;
13 |
14 | const SIGN_IN_MUTATION = gql`
15 | mutation SignIn($username: String!, $password: String!) {
16 | signIn(username: $username, password: $password) {
17 | error
18 | }
19 | }
20 | `;
21 |
22 | const SIGN_OUT_MUTATION = gql`
23 | mutation SignOut {
24 | signOut {
25 | error
26 | }
27 | }
28 | `;
29 |
30 |
31 | export function AuthProvider({ children }) {
32 | const navigate = useNavigate();
33 | const [signInError, setSignInError] = useState(null);
34 | const { data, loading, refetch } = useQuery(AUTH_QUERY);
35 | const [signInMutation, signInInfo] = useMutation(SIGN_IN_MUTATION, {
36 | onCompleted: (data) => {
37 | if (!!data.signIn.error) {
38 | setSignInError(data.signIn.error);
39 | return;
40 | }
41 | refetch().then(() => {
42 | navigate('/');
43 | });
44 | }
45 | });
46 | const [signOutMutation, signOutInfo] = useMutation(SIGN_OUT_MUTATION, {
47 | onCompleted: () => {
48 | navigate('/sign-in');
49 | }
50 | });
51 | const authenticated = loading ? false : data.authenticated;
52 |
53 | const auth = {
54 | authenticated,
55 | };
56 |
57 | const actions = {
58 | signIn: [
59 | (username, password) => {
60 | setSignInError(null);
61 | signInMutation({ variables: { username, password } });
62 | },
63 | signInError || signInInfo.error,
64 | ],
65 | signOut: [
66 | () => {
67 | signOutMutation();
68 | },
69 | signOutInfo.error
70 | ]
71 | }
72 | return (
73 |
74 | {children}
75 |
76 | );
77 | }
78 |
--------------------------------------------------------------------------------
/featureflags/protobuf/backend.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | package featureflags.backend;
4 |
5 | import 'hiku/protobuf/query.proto';
6 | import 'google/protobuf/timestamp.proto';
7 | import 'featureflags/protobuf/graph.proto';
8 |
9 | message Id {
10 | string value = 1;
11 | }
12 |
13 | message LocalId {
14 | string value = 1;
15 | string scope = 2;
16 | }
17 |
18 | message EitherId {
19 | oneof kind {
20 | Id id = 1;
21 | LocalId local_id = 2;
22 | }
23 | }
24 |
25 | message SignIn {
26 | // string session = 1;
27 | string username = 2;
28 | string password = 3;
29 | }
30 |
31 | message SignOut {
32 | // string session = 1;
33 | }
34 |
35 | message EnableFlag {
36 | Id flag_id = 1;
37 | }
38 |
39 | message DisableFlag {
40 | Id flag_id = 1;
41 | }
42 |
43 | message ResetFlag {
44 | Id flag_id = 1;
45 | }
46 |
47 | message AddCheck {
48 | LocalId local_id = 1;
49 | Id variable = 2;
50 | featureflags.graph.Check.Operator operator = 3;
51 | oneof kind {
52 | string value_string = 4;
53 | double value_number = 5;
54 | google.protobuf.Timestamp value_timestamp = 6;
55 | featureflags.graph.Set value_set = 7;
56 | }
57 | }
58 |
59 | message AddCondition {
60 | Id flag_id = 1;
61 | LocalId local_id = 2;
62 | repeated EitherId checks = 3;
63 | }
64 |
65 | message DisableCondition {
66 | Id condition_id = 1;
67 | }
68 |
69 | message Operation {
70 | oneof op {
71 | EnableFlag enable_flag = 1;
72 | DisableFlag disable_flag = 2;
73 | AddCondition add_condition = 3;
74 | DisableCondition disable_condition = 4;
75 | AddCheck add_check = 5;
76 | ResetFlag reset_flag = 6;
77 | SignIn sign_in = 7;
78 | SignOut sign_out = 8;
79 | }
80 | }
81 |
82 | message Request {
83 | repeated Operation operations = 1;
84 | hiku.protobuf.query.Node query = 2;
85 | }
86 |
87 | message Reply {
88 | featureflags.graph.Result result = 1;
89 | }
90 |
91 | service Backend {
92 | rpc call (Request) returns (Reply) {
93 | option deprecated = true;
94 | };
95 | rpc Call (Request) returns (Reply) {};
96 | }
97 |
--------------------------------------------------------------------------------
/featureflags/web/app.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from fastapi import FastAPI
4 | from fastapi.responses import ORJSONResponse
5 | from fastapi.staticfiles import StaticFiles
6 |
7 | from featureflags.config import config
8 | from featureflags.metrics import configure_metrics
9 | from featureflags.web.api.graph import router as graphql_router
10 | from featureflags.web.api.health import router as health_router
11 | from featureflags.web.api.index import STATIC_DIR
12 | from featureflags.web.api.index import router as index_router
13 | from featureflags.web.container import Container
14 | from featureflags.web.lifecycle import configure_lifecycle
15 | from featureflags.web.middlewares import configure_middlewares
16 |
17 | log = logging.getLogger(__name__)
18 |
19 |
20 | def create_app() -> FastAPI:
21 | app = FastAPI(
22 | debug=config.debug,
23 | default_response_class=ORJSONResponse,
24 | )
25 |
26 | container = Container()
27 | app.container = container # type: ignore
28 |
29 | app.include_router(health_router)
30 | app.include_router(index_router)
31 | app.include_router(graphql_router)
32 |
33 | static_files = StaticFiles(
34 | directory=STATIC_DIR,
35 | check_dir=not config.debug,
36 | html=True,
37 | )
38 | app.mount("/static", static_files, name="static")
39 |
40 | configure_metrics(port=config.instrumentation.prometheus_port, app=app)
41 | configure_middlewares(app, container)
42 | configure_lifecycle(app, container)
43 |
44 | if config.sentry.enabled:
45 | from featureflags.sentry import SentryMode, configure_sentry
46 |
47 | configure_sentry(config.sentry, env_prefix="web", mode=SentryMode.HTTP)
48 |
49 | return app
50 |
51 |
52 | def main() -> None:
53 | import uvicorn
54 |
55 | from featureflags import __build_version__, __version__
56 |
57 | log.info(
58 | "Starting web server. Version: %s, Build version: %s",
59 | __version__,
60 | __build_version__,
61 | )
62 |
63 | uvicorn.run(
64 | app="featureflags.web.app:create_app",
65 | factory=True,
66 | host=config.app.host,
67 | port=config.app.port,
68 | loop="uvloop",
69 | http="httptools",
70 | reload=config.app.reload,
71 | log_level="debug" if config.debug else "info",
72 | )
73 |
--------------------------------------------------------------------------------
/featureflags/protobuf/graph.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | package featureflags.graph;
4 |
5 | import 'google/protobuf/timestamp.proto';
6 | import 'google/protobuf/wrappers.proto';
7 |
8 | message Ref {
9 | oneof to {
10 | string Project = 1;
11 | string Flag = 2;
12 | string Condition = 3;
13 | string Check = 4;
14 | string Variable = 5;
15 | }
16 | }
17 |
18 | message Set {
19 | repeated string items = 1;
20 | }
21 |
22 | message Variable {
23 | enum Type {
24 | __DEFAULT__ = 0;
25 | STRING = 1;
26 | NUMBER = 2;
27 | TIMESTAMP = 3;
28 | SET = 4;
29 | }
30 | string id = 1;
31 | string name = 2;
32 | Type type = 3;
33 | }
34 |
35 | message Check {
36 | enum Operator {
37 | __DEFAULT__ = 0;
38 | EQUAL = 1;
39 | LESS_THAN = 2;
40 | LESS_OR_EQUAL = 3;
41 | GREATER_THAN = 4;
42 | GREATER_OR_EQUAL = 5;
43 | CONTAINS = 6;
44 | PERCENT = 7;
45 | REGEXP = 8;
46 | WILDCARD = 9;
47 | SUBSET = 10;
48 | SUPERSET = 11;
49 | }
50 | string id = 1;
51 | Ref variable = 2;
52 | Operator operator = 3;
53 | oneof kind {
54 | string value_string = 4;
55 | double value_number = 5;
56 | google.protobuf.Timestamp value_timestamp = 6;
57 | Set value_set = 7;
58 | }
59 | }
60 |
61 | message Condition {
62 | string id = 1;
63 | repeated Ref checks = 2;
64 | }
65 |
66 | message Flag {
67 | string id = 1;
68 | string name = 2;
69 | Ref project = 3;
70 | google.protobuf.BoolValue enabled = 4;
71 | repeated Ref conditions = 5;
72 | google.protobuf.BoolValue overridden = 6;
73 | }
74 |
75 | message Project {
76 | string id = 1;
77 | string name = 2;
78 | uint32 version = 4;
79 | repeated Ref variables = 3;
80 | }
81 |
82 | message Root {
83 | Ref flag = 1;
84 | repeated Ref flags = 2;
85 | repeated Ref projects = 3;
86 | // string access_token = 4;
87 | bool authenticated = 5;
88 | repeated Ref flags_by_ids = 6;
89 | }
90 |
91 | message Result {
92 | Root Root = 1;
93 | map Project = 2;
94 | map Flag = 3;
95 | map Condition = 4;
96 | map Check = 5;
97 | map Variable = 6;
98 | }
99 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/69f91d9fab0f_add_condition_position_column.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 |
5 | revision = '69f91d9fab0f'
6 | down_revision = '2fa54f8b55c1'
7 | branch_labels = None
8 | depends_on = None
9 |
10 |
11 | def upgrade():
12 | # Add position column to condition table
13 | op.add_column(
14 | "condition",
15 | sa.Column("position", sa.Integer(), nullable=False, server_default="0"),
16 | )
17 | # Add position column to value_condition table
18 | op.add_column(
19 | "value_condition",
20 | sa.Column("position", sa.Integer(), nullable=False, server_default="0"),
21 | )
22 | # Initialize positions for existing records to avoid conflicts
23 | # For conditions: set position based on current order
24 | op.execute(
25 | """
26 | WITH numbered AS (
27 | SELECT id, ROW_NUMBER() OVER (PARTITION BY flag ORDER BY id) - 1 as pos
28 | FROM condition
29 | )
30 | UPDATE condition SET position = numbered.pos
31 | FROM numbered WHERE condition.id = numbered.id
32 | """
33 | )
34 |
35 | # Add unique constraint for flag + position
36 | op.create_unique_constraint(
37 | "condition_flag_position_unique", "condition", ["flag", "position"]
38 | )
39 |
40 | # For value_conditions: set position based on current order
41 | op.execute(
42 | """
43 | WITH numbered AS (
44 | SELECT id, ROW_NUMBER() OVER (PARTITION BY value ORDER BY id) - 1 as pos
45 | FROM value_condition
46 | )
47 | UPDATE value_condition SET position = numbered.pos
48 | FROM numbered WHERE value_condition.id = numbered.id
49 | """
50 | )
51 | # Add unique constraint for value + position
52 | op.create_unique_constraint(
53 | "value_condition_value_position_unique",
54 | "value_condition",
55 | ["value", "position"],
56 | )
57 |
58 |
59 | def downgrade():
60 | # Drop unique constraints
61 | op.drop_constraint(
62 | "value_condition_value_position_unique", "value_condition", type_="unique"
63 | )
64 | op.drop_constraint(
65 | "condition_flag_position_unique", "condition", type_="unique"
66 | )
67 |
68 | # Drop position columns
69 | op.drop_column("value_condition", "position")
70 | op.drop_column("condition", "position")
71 |
--------------------------------------------------------------------------------
/featureflags/rpc/app.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import logging
3 |
4 | import aiopg.sa
5 | from dependency_injector.wiring import Provide, inject
6 | from grpclib.health.service import Health
7 | from grpclib.reflection.service import ServerReflection
8 | from grpclib.server import Server
9 | from grpclib.utils import graceful_exit
10 | from hiku.engine import Engine
11 |
12 | from featureflags.config import config
13 | from featureflags.metrics import configure_metrics
14 | from featureflags.rpc.container import Container
15 | from featureflags.rpc.servicer import FeatureFlagsServicer
16 | from featureflags.services.auth import set_internal_user_session
17 |
18 | log = logging.getLogger(__name__)
19 |
20 |
21 | @inject
22 | async def create_server(
23 | db_engine: aiopg.sa.Engine = Provide[Container.db_engine],
24 | graph_engine: Engine = Provide[Container.graph_engine],
25 | ) -> Server:
26 | configure_metrics(port=config.instrumentation.prometheus_port)
27 |
28 | main_servicer = FeatureFlagsServicer(
29 | db_engine=db_engine,
30 | graph_engine=graph_engine,
31 | )
32 | health_servicer = Health()
33 |
34 | handlers = ServerReflection.extend([main_servicer, health_servicer])
35 | return Server(handlers)
36 |
37 |
38 | async def main() -> None:
39 | from featureflags import __build_version__, __version__
40 |
41 | log.info(
42 | "Starting rpc server. Version: %s, Build version: %s",
43 | __version__,
44 | __build_version__,
45 | )
46 |
47 | async with contextlib.AsyncExitStack() as stack:
48 | container = Container()
49 | await container.init_resources()
50 |
51 | log.info("Using internal user session")
52 | set_internal_user_session()
53 |
54 | if config.sentry.enabled:
55 | from featureflags.sentry import SentryMode, configure_sentry
56 |
57 | configure_sentry(
58 | config.sentry, env_prefix="rpc", mode=SentryMode.GRPC
59 | )
60 |
61 | server = await create_server()
62 | stack.enter_context(graceful_exit([server])) # type: ignore
63 |
64 | await server.start(
65 | host=config.rpc.host,
66 | port=config.rpc.port,
67 | )
68 | log.info(
69 | "gRPC server listening on %s:%s",
70 | config.rpc.host,
71 | config.rpc.port,
72 | )
73 |
74 | await server.wait_closed()
75 | await container.shutdown_resources()
76 | log.info("Exiting...")
77 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Conditions.jsx:
--------------------------------------------------------------------------------
1 | import {
2 | Button,
3 | Col,
4 | Row,
5 | Space,
6 | } from 'antd';
7 | import {
8 | CloseOutlined,
9 | PlusOutlined,
10 | } from '@ant-design/icons';
11 |
12 | import './Conditions.less';
13 | import {
14 | useChecks,
15 | useConditions,
16 | useFlagCtx,
17 | useFlagState,
18 | } from './context';
19 | import { Check } from './Check';
20 |
21 |
22 | const Condition = ({ onRemove, condition, projectName }) => {
23 | const { addCheck, deleteCheck } = useFlagCtx();
24 | const { checks } = useChecks();
25 |
26 | return (
27 |
28 | {condition.checks.map((checkId, idx) => {
29 | return (
30 |
deleteCheck(condition.id, checkId)}
36 | projectName={projectName}
37 | />
38 | )
39 | })}
40 |
41 |
42 |
43 |
47 |
51 |
52 |
53 |
54 |
55 | );
56 | }
57 |
58 | export const Conditions = () => {
59 | const flagState = useFlagState();
60 | const conditions = useConditions();
61 | const { addCondition, deleteCondition } = useFlagCtx();
62 |
63 | const noConditions = flagState.conditions.length === 0;
64 |
65 | return (
66 |
67 |
68 | {flagState.conditions.map((conditionId, idx) => {
69 | return (
70 | deleteCondition(conditions[conditionId])}
75 | projectName={flagState.projectName}
76 | />
77 | )
78 | })}
79 |
84 |
85 |
86 | );
87 | }
88 |
--------------------------------------------------------------------------------
/featureflags/http/types.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from pydantic import BaseModel, Field, model_validator
4 |
5 | from featureflags.models import Operator, VariableType
6 |
7 |
8 | class CheckVariable(BaseModel):
9 | name: str
10 | type: VariableType
11 |
12 |
13 | # Possible value fields for different types of variables.
14 | CHECK_VALUE_FIELDS = (
15 | "value_string",
16 | "value_number",
17 | "value_timestamp",
18 | "value_set",
19 | )
20 |
21 |
22 | class Check(BaseModel):
23 | variable: CheckVariable
24 | operator: Operator
25 | value: str | float | list | None = None
26 |
27 | @model_validator(mode="before")
28 | def check_and_assign_value(
29 | cls, # noqa: N805
30 | values: dict[str, Any],
31 | ) -> dict[str, Any]:
32 | """
33 | Value can be any type from `CHECK_VALUE_FIELDS`, but we want
34 | to find one that is not not and assign to `Check.value`.
35 | """
36 |
37 | for field in CHECK_VALUE_FIELDS:
38 | if field in values and values[field] is not None:
39 | values["value"] = values[field]
40 | break
41 | return values
42 |
43 |
44 | class Condition(BaseModel):
45 | checks: list[Check]
46 |
47 |
48 | class Flag(BaseModel):
49 | name: str
50 | enabled: bool
51 | overridden: bool
52 | conditions: list[Condition]
53 |
54 |
55 | class ValueCondition(BaseModel):
56 | checks: list[Check]
57 | value_override: str
58 |
59 |
60 | class Value(BaseModel):
61 | name: str
62 | enabled: bool
63 | overridden: bool
64 | conditions: list[ValueCondition]
65 | value_default: str
66 | value_override: str
67 |
68 |
69 | class Variable(BaseModel):
70 | name: str
71 | type: VariableType
72 |
73 |
74 | class PreloadFlagsRequest(BaseModel):
75 | project: str
76 | version: int
77 | variables: list[Variable] = Field(default_factory=list)
78 | flags: list[str] = Field(default_factory=list)
79 | values: list[tuple[str, str | int]] = Field(default_factory=list)
80 |
81 |
82 | class PreloadFlagsResponse(BaseModel):
83 | flags: list[Flag] = Field(default_factory=list)
84 | values: list[Value] = Field(default_factory=list)
85 | version: int
86 |
87 |
88 | class SyncFlagsRequest(BaseModel):
89 | project: str
90 | version: int
91 | flags: list[str] = Field(default_factory=list)
92 | values: list[str] = Field(default_factory=list)
93 |
94 |
95 | class SyncFlagsResponse(BaseModel):
96 | flags: list[Flag] = Field(default_factory=list)
97 | values: list[Value] = Field(default_factory=list)
98 | version: int
99 |
--------------------------------------------------------------------------------
/featureflags/services/ldap.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from abc import ABC, abstractmethod
3 | from string import Template
4 |
5 | import ldap3
6 | from ldap3.core.exceptions import (
7 | LDAPBindError,
8 | LDAPException,
9 | )
10 |
11 | from featureflags.utils import escape_dn_chars
12 |
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | class BaseLDAP(ABC):
17 | @abstractmethod
18 | async def check_credentials(
19 | self,
20 | user: str | None,
21 | password: str | None,
22 | *,
23 | connect_timeout: int = 5,
24 | receive_timeout: int = 5,
25 | ) -> tuple[bool, str | None]:
26 | raise NotImplementedError()
27 |
28 |
29 | class DummyLDAP(BaseLDAP):
30 | def __init__(self, *, user_is_bound: bool = False) -> None:
31 | self.user_is_bound = user_is_bound
32 |
33 | async def check_credentials(
34 | self,
35 | user: str | None,
36 | password: str | None,
37 | *,
38 | connect_timeout: int = 5,
39 | receive_timeout: int = 5,
40 | ) -> tuple[bool, str | None]:
41 | return self.user_is_bound, None
42 |
43 |
44 | class LDAP(BaseLDAP):
45 | def __init__(self, host: str, base_dn: str) -> None:
46 | self._host = host
47 | self._base_dn = base_dn
48 |
49 | async def check_credentials(
50 | self,
51 | user: str | None,
52 | password: str | None,
53 | *,
54 | connect_timeout: int = 5,
55 | receive_timeout: int = 5,
56 | ) -> tuple[bool, str | None]:
57 | server = ldap3.Server(self._host, connect_timeout=connect_timeout)
58 |
59 | dn_tpl = Template(self._base_dn)
60 | dn = dn_tpl.safe_substitute(user=escape_dn_chars(user))
61 |
62 | error_msg = None
63 | try:
64 | with ldap3.Connection(
65 | server=server,
66 | user=dn,
67 | password=password,
68 | receive_timeout=receive_timeout,
69 | ) as connection:
70 | user_is_bound = connection.bind()
71 | log.debug(
72 | "LDAP -> Who am I: %s",
73 | connection.extend.standard.who_am_i(),
74 | )
75 | except LDAPException as e:
76 | user_is_bound = False
77 | if type(e) is LDAPBindError:
78 | error_msg = "Invalid username or password"
79 | else:
80 | try:
81 | error_msg = e.message
82 | except AttributeError:
83 | error_msg = str(e)
84 | error_msg = f"Error: {error_msg}"
85 | log.error(f"LDAP -> Bind error: {error_msg}")
86 |
87 | return user_is_bound, error_msg
88 |
--------------------------------------------------------------------------------
/featureflags/config.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | from pathlib import Path
4 |
5 | import yaml
6 | from pydantic import Field
7 | from pydantic_settings import BaseSettings
8 |
9 | log = logging.getLogger(__name__)
10 |
11 | CONFIG_PATH_ENV_VAR = "CONFIG_PATH"
12 |
13 | CONFIGS_DIR = Path().parent / "configs"
14 | DEFAULT_CONFIG_PATH = CONFIGS_DIR / "local.yaml"
15 |
16 |
17 | class LoggingSettings(BaseSettings):
18 | level_app: str
19 | level_libs: str
20 | handlers: list[str]
21 | syslog_app: str | None
22 | syslog_facility: str | None
23 | syslog_mapping: dict | None
24 | syslog_defaults: dict | None
25 |
26 |
27 | class PostgresSettings(BaseSettings):
28 | host: str
29 | port: int
30 | user: str = Field(..., alias="PGUSER")
31 | password: str = Field(..., alias="PGPASS")
32 | database: str
33 | timeout: int = 10
34 |
35 | @property
36 | def dsn(self) -> str:
37 | return (
38 | f"postgresql://{self.user}"
39 | f":{self.password}"
40 | f"@{self.host}"
41 | f":{self.port}"
42 | f"/{self.database}"
43 | )
44 |
45 |
46 | class LdapSettings(BaseSettings):
47 | host: str | None
48 | base_dn: str | None
49 |
50 |
51 | class InstrumentationSettings(BaseSettings):
52 | prometheus_port: int | None = None
53 |
54 |
55 | class AppSettings(BaseSettings):
56 | port: int = 8080
57 | host: str = "0.0.0.0"
58 | reload: bool = False
59 | max_concurrent_threads: int = 40
60 |
61 |
62 | class HttpSettings(BaseSettings):
63 | port: int = 8081
64 | host: str = "0.0.0.0"
65 | reload: bool = False
66 | max_concurrent_threads: int = 40
67 |
68 |
69 | class RpcSettings(BaseSettings):
70 | port: int = 50051
71 | host: str = "0.0.0.0"
72 |
73 |
74 | class SentrySettings(BaseSettings):
75 | enabled: bool = False
76 | dsn: str | None = None
77 | env: str | None = None
78 | enable_tracing: bool = True
79 | traces_sample_rate: float = 1.0
80 | shutdown_timeout: int = 1
81 |
82 |
83 | class Config(BaseSettings):
84 | debug: bool
85 | secret: str = Field(..., alias="SECRET")
86 | test_environ: bool = False
87 |
88 | postgres: PostgresSettings
89 | ldap: LdapSettings
90 | logging: LoggingSettings
91 | instrumentation: InstrumentationSettings
92 | sentry: SentrySettings
93 |
94 | app: AppSettings
95 | rpc: RpcSettings
96 | http: HttpSettings
97 |
98 |
99 | def _load_config() -> Config:
100 | config_path = os.environ.get(CONFIG_PATH_ENV_VAR, DEFAULT_CONFIG_PATH)
101 | log.info("Reading config from %s", config_path)
102 |
103 | with open(config_path) as f:
104 | config_data = yaml.safe_load(f)
105 |
106 | return Config(**config_data)
107 |
108 |
109 | config = _load_config()
110 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/constants.js:
--------------------------------------------------------------------------------
1 | export const Type = {
2 | __DEFAULT__: 0,
3 | STRING: 1,
4 | NUMBER: 2,
5 | TIMESTAMP: 3,
6 | SET: 4
7 | }
8 |
9 | export const Operator = {
10 | __DEFAULT__: 0,
11 | EQUAL: 1,
12 | LESS_THAN: 2,
13 | LESS_OR_EQUAL: 3,
14 | GREATER_THAN: 4,
15 | GREATER_OR_EQUAL: 5,
16 | CONTAINS: 6,
17 | PERCENT: 7,
18 | REGEXP: 8,
19 | WILDCARD: 9,
20 | SUBSET: 10,
21 | SUPERSET: 11
22 | };
23 |
24 | export const TYPES = {
25 | [Type.STRING]: {
26 | operators: [
27 | Operator.EQUAL,
28 | Operator.LESS_THAN,
29 | Operator.LESS_OR_EQUAL,
30 | Operator.GREATER_THAN,
31 | Operator.GREATER_OR_EQUAL,
32 | Operator.PERCENT,
33 | Operator.CONTAINS,
34 | Operator.REGEXP,
35 | Operator.WILDCARD
36 | ]
37 | },
38 | [Type.NUMBER]: {
39 | operators: [
40 | Operator.EQUAL,
41 | Operator.LESS_THAN,
42 | Operator.LESS_OR_EQUAL,
43 | Operator.GREATER_THAN,
44 | Operator.GREATER_OR_EQUAL,
45 | Operator.PERCENT
46 | ]
47 | },
48 | [Type.TIMESTAMP]: {
49 | operators: [
50 | Operator.EQUAL,
51 | Operator.LESS_THAN,
52 | Operator.LESS_OR_EQUAL,
53 | Operator.GREATER_THAN,
54 | Operator.GREATER_OR_EQUAL
55 | ]
56 | },
57 | [Type.SET]: {
58 | operators: [
59 | Operator.EQUAL,
60 | Operator.SUBSET,
61 | Operator.SUPERSET
62 | ]
63 | }
64 | };
65 |
66 | export const Operators = {
67 | [Operator.EQUAL]: {title: 'equal'},
68 | [Operator.LESS_THAN]: {title: 'less than'},
69 | [Operator.LESS_OR_EQUAL]: {title: 'less or equal'},
70 | [Operator.GREATER_THAN]: {title: 'greater than'},
71 | [Operator.GREATER_OR_EQUAL]: {title: 'greater or equal'},
72 | [Operator.PERCENT]: {title: 'percent'},
73 | [Operator.CONTAINS]: {title: 'contains'},
74 | [Operator.REGEXP]: {title: 'regexp'},
75 | [Operator.WILDCARD]: {title: 'wildcard'},
76 | [Operator.SUPERSET]: {title: 'includes'},
77 | [Operator.SUBSET]: {title: 'included in'}
78 | };
79 |
80 | export const KIND = {
81 | VALUE_STRING: 'value_string',
82 | VALUE_NUMBER: 'value_number',
83 | VALUE_TIMESTAMP: 'value_timestamp',
84 | VALUE_SET: 'value_set',
85 | }
86 |
87 | export const KIND_TO_TYPE = {
88 | [KIND.VALUE_STRING]: Type.STRING,
89 | [KIND.VALUE_NUMBER]: Type.NUMBER,
90 | [KIND.VALUE_TIMESTAMP]: Type.TIMESTAMP,
91 | [KIND.VALUE_SET]: Type.SET,
92 | };
93 |
94 | export const TYPE_TO_KIND = {
95 | [Type.STRING]: KIND.VALUE_STRING,
96 | [Type.NUMBER]: KIND.VALUE_NUMBER,
97 | [Type.TIMESTAMP]: KIND.VALUE_TIMESTAMP,
98 | [Type.SET]: KIND.VALUE_SET,
99 | };
100 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Settings.jsx:
--------------------------------------------------------------------------------
1 | import { useNavigate } from "react-router-dom";
2 | import { useState } from "react";
3 | import { Button, message, Space, Typography, Modal, Card } from 'antd';
4 | import { useMutation } from '@apollo/client';
5 | import { DELETE_PROJECT_MUTATION } from "./queries";
6 | import { HeaderTabs } from "./Tabs";
7 |
8 |
9 | const View = ({ children }) => {
10 | return (
11 |
18 |
19 | {children}
20 |
21 | );
22 | };
23 |
24 |
25 | const DeleteButton = ({ projectName, onDelete }) => {
26 | const [modalOpen, setModalOpen] = useState(false);
27 |
28 | const handleOk = () => {
29 | onDelete();
30 | setModalOpen(false);
31 | };
32 |
33 | const handleClose = () => {
34 | setModalOpen(false);
35 | };
36 |
37 | return (
38 | <>
39 |
46 |
56 | Are you sure you want to delete project
57 | {projectName}?
58 |
59 | >
60 | );
61 | };
62 |
63 | export const SettingsContainer = ({ projectName, projectsMap }) => {
64 | const project = projectsMap[projectName];
65 | const navigate = useNavigate();
66 |
67 | const [deleteProject] = useMutation(DELETE_PROJECT_MUTATION, {
68 | variables: { id: project.id },
69 | onCompleted: (data) => {
70 | if (data.deleteProject && data.deleteProject.error) {
71 | message.error(data.deleteProject.error);
72 | } else {
73 | message.success(`Project "${project.name}" removed successfully`);
74 | setTimeout(() => {
75 | navigate(`/`);
76 | window.location.reload();
77 | }, 2000);
78 | }
79 | },
80 | onError: (error) => {
81 | message.error(`Error removing project "${project.name}": ${error.message}`);
82 | }
83 | });
84 |
85 | const handleRemove = () => {
86 | deleteProject();
87 | };
88 |
89 | return (
90 |
91 | Project settings}
95 | >
96 |
97 |
101 |
102 |
103 |
104 | );
105 | }
106 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim-bullseye as base
2 |
3 | ENV PIP_VERSION=23.2.1
4 | ENV PDM_VERSION=2.26.0
5 | ENV UV_VERSION=0.4.20
6 | ENV PDM_USE_VENV=no
7 |
8 | WORKDIR /app
9 |
10 | COPY ./pyproject.toml .
11 | COPY ./pdm.lock .
12 |
13 | # for pyproject.toml to extract version
14 | COPY ./featureflags/__init__.py ./featureflags/__init__.py
15 | # for pyproject.toml to read readme
16 | COPY ./README.rst .
17 |
18 | RUN apt-get update && \
19 | apt-get install -y --no-install-recommends \
20 | libpq-dev \
21 | gcc \
22 | make \
23 | g++ \
24 | git && \
25 | # install tools
26 | pip install --upgrade pip==${PIP_VERSION} && \
27 | pip install pdm==${PDM_VERSION} && \
28 | pip install uv==${UV_VERSION} && \
29 | # configure
30 | pdm config cache_dir /pdm_cache && \
31 | pdm config check_update false && \
32 | # install base deps \
33 | pdm export --prod -o requirements-base.txt -f requirements && \
34 | uv pip install --system -r requirements-base.txt --no-deps --no-cache-dir --index-strategy unsafe-best-match && \
35 | # cleanup base layer to keep image size small
36 | apt purge --auto-remove -y \
37 | gcc \
38 | make \
39 | g++ \
40 | git && \
41 | rm -rf /var/cache/apt && \
42 | rm -rf /var/lib/apt/list && \
43 | rm -rf $HOME/.cache
44 |
45 | FROM node:18-bullseye-slim as assets-base
46 | ENV NODE_PATH=/node_modules
47 | ENV PATH=$PATH:/node_modules/.bin
48 |
49 | COPY ui ui
50 | RUN cd ui && npm ci
51 |
52 | FROM assets-base as assets-dev
53 |
54 | FROM assets-base as assets-prod
55 | RUN cd ui && npm run build
56 |
57 | FROM base as dev
58 | RUN pdm export -G dev -G sentry -G lint -o requirements-dev.txt -f requirements && \
59 | uv pip install --system -r requirements-dev.txt --no-deps --no-cache-dir --index-strategy unsafe-best-match
60 |
61 | FROM base as test
62 |
63 | RUN pdm export -G test -o requirements-test.txt -f requirements && \
64 | uv pip install --system -r requirements-test.txt --no-deps --no-cache-dir --index-strategy unsafe-best-match
65 |
66 | FROM base as docs
67 | RUN pdm export -G docs -o requirements-docs.txt -f requirements && \
68 | uv pip install --system -r requirements-docs.txt --no-deps --no-cache-dir --index-strategy unsafe-best-match
69 |
70 | FROM base AS prd
71 |
72 | ARG APP_VERSION=0.0.0-dev
73 | RUN echo "${APP_VERSION}" > /app_version
74 |
75 | ENV TINI_VERSION=v0.19.0
76 | ADD "https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini" "/tini"
77 | RUN chmod +x /tini
78 |
79 | ENV GRPC_HEALTH_PROBE_VERSION=v0.4.19
80 | ADD "https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64" "/usr/local/bin/grpc_health_probe"
81 | RUN chmod +x /usr/local/bin/grpc_health_probe
82 |
83 | COPY ./featureflags /app/featureflags
84 | # clear static folder in case it exists on host machine
85 | RUN rm -rf /app/featureflags/web/static
86 |
87 | COPY --from=assets-prod "ui/dist" "featureflags/web/static"
88 |
89 | RUN python3 -m compileall -j=0 -o=2 -f featureflags
90 |
91 | ENTRYPOINT ["/tini", "--", "python3", "-m"]
92 |
--------------------------------------------------------------------------------
/featureflags/sentry.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from enum import Enum
3 |
4 | try:
5 | import sentry_sdk
6 | from sentry_sdk.integrations.asyncio import AsyncioIntegration
7 | from sentry_sdk.integrations.atexit import AtexitIntegration
8 | from sentry_sdk.integrations.dedupe import DedupeIntegration
9 | from sentry_sdk.integrations.excepthook import ExcepthookIntegration
10 | from sentry_sdk.integrations.fastapi import FastApiIntegration
11 | from sentry_sdk.integrations.grpc import GRPCIntegration
12 | from sentry_sdk.integrations.logging import LoggingIntegration
13 | from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
14 | from sentry_sdk.integrations.starlette import StarletteIntegration
15 | from sentry_sdk.integrations.stdlib import StdlibIntegration
16 | from sentry_sdk.integrations.threading import ThreadingIntegration
17 | except ImportError:
18 | raise ImportError(
19 | "`sentry_sdk` is not installed, please install it to use `sentry` "
20 | "like this `pip install 'evo-featureflags-server[sentry]'`"
21 | ) from None
22 |
23 | from featureflags import __version__
24 | from featureflags.config import SentrySettings
25 |
26 | log = logging.getLogger(__name__)
27 |
28 |
29 | class SentryMode(Enum):
30 | HTTP = "http"
31 | GRPC = "grpc"
32 |
33 |
34 | def configure_sentry(
35 | config: SentrySettings,
36 | env_prefix: str | None = None,
37 | mode: SentryMode = SentryMode.HTTP,
38 | ) -> None:
39 | """
40 | Configure error logging to Sentry.
41 | """
42 |
43 | env = f"{env_prefix}-{config.env}" if env_prefix else config.env
44 |
45 | integrations = [
46 | AsyncioIntegration(),
47 | AtexitIntegration(),
48 | ExcepthookIntegration(),
49 | DedupeIntegration(),
50 | StdlibIntegration(),
51 | ThreadingIntegration(),
52 | LoggingIntegration(),
53 | SqlalchemyIntegration(),
54 | ]
55 |
56 | match mode:
57 | case mode.HTTP:
58 | # Add FastApi specific integrations.
59 | integrations.extend(
60 | [
61 | StarletteIntegration(transaction_style="endpoint"),
62 | FastApiIntegration(transaction_style="endpoint"),
63 | ]
64 | )
65 | case mode.GRPC:
66 | # Add gRPC specific integrations.
67 | integrations.append(GRPCIntegration())
68 | case _:
69 | raise ValueError(f"{mode} option is not supported")
70 |
71 | sentry_sdk.init(
72 | dsn=config.dsn,
73 | environment=env,
74 | release=__version__,
75 | shutdown_timeout=config.shutdown_timeout,
76 | send_default_pii=True,
77 | default_integrations=False,
78 | auto_enabling_integrations=False,
79 | max_breadcrumbs=1000,
80 | enable_tracing=config.enable_tracing,
81 | traces_sample_rate=config.traces_sample_rate,
82 | integrations=integrations,
83 | )
84 | log.info(f"Sentry initialized with env: `{env}` in mode: `{mode}`")
85 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/ValueConditions.jsx:
--------------------------------------------------------------------------------
1 | import {
2 | Button,
3 | Col,
4 | Row,
5 | Space,
6 | Input,
7 | } from 'antd';
8 | import {
9 | CloseOutlined,
10 | PlusOutlined,
11 | } from '@ant-design/icons';
12 |
13 | import './ValueConditions.less';
14 | import {
15 | useValueChecks,
16 | useValueConditions,
17 | useValueCtx,
18 | useValueState,
19 | } from './context';
20 | import { ValueCheck } from './ValueCheck';
21 |
22 |
23 | const ValueCondition = ({ onRemove, condition, onValueConditionOverrideChange, projectName }) => {
24 | const { addCheck, deleteCheck } = useValueCtx();
25 | const { checks } = useValueChecks();
26 |
27 | return (
28 |
64 | );
65 | }
66 |
67 | export const ValueConditions = () => {
68 | const value = useValueState();
69 | const conditions = useValueConditions();
70 | const { addCondition, deleteCondition, updateValueConditionOverride } = useValueCtx();
71 |
72 | const noConditions = value.conditions.length === 0;
73 |
74 | return (
75 |
76 |
77 | {value.conditions.map((conditionId, idx) => {
78 | return (
79 | deleteCondition(conditions[conditionId])}
84 | onValueConditionOverrideChange={(e) => updateValueConditionOverride(conditionId, e.target.value)}
85 | projectName={value.projectName}
86 | />
87 | )
88 | })}
89 |
94 |
95 |
96 | );
97 | }
98 |
--------------------------------------------------------------------------------
/featureflags/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from collections.abc import AsyncGenerator
3 | from typing import TYPE_CHECKING
4 |
5 | import aiopg.sa
6 | import pytest
7 | import pytest_asyncio
8 | from fastapi import FastAPI
9 | from hiku.engine import Engine
10 | from sqlalchemy import text
11 |
12 | from featureflags.alembic import main as alembic_main
13 | from featureflags.graph.types import Changes, DirtyProjects, ValuesChanges
14 | from featureflags.models import metadata
15 | from featureflags.services.auth import TestSession, user_session
16 | from featureflags.services.ldap import BaseLDAP
17 | from featureflags.web.app import create_app
18 |
19 | if TYPE_CHECKING:
20 | from featureflags.web.container import Container
21 |
22 |
23 | @pytest.fixture
24 | def app() -> FastAPI:
25 | return create_app()
26 |
27 |
28 | @pytest.fixture
29 | async def container(app: FastAPI) -> AsyncGenerator["Container", None]:
30 | try:
31 | yield app.container # type: ignore
32 | finally:
33 | await app.container.shutdown_resources() # type: ignore
34 |
35 |
36 | def migrate_up() -> None:
37 | alembic_main(["upgrade", "head"])
38 |
39 |
40 | async def migrate_down(
41 | db_engine: aiopg.sa.Engine,
42 | skip_tables: list | None = None,
43 | ) -> None:
44 | skip_tables = skip_tables or []
45 | table_names = ", ".join(
46 | [
47 | f'"{table.name}"'
48 | for table in metadata.sorted_tables
49 | if table.name not in skip_tables
50 | ]
51 | )
52 |
53 | if table_names:
54 | async with db_engine.acquire() as connection:
55 | await connection.execute(
56 | text(f"TRUNCATE TABLE {table_names} RESTART IDENTITY CASCADE")
57 | )
58 |
59 |
60 | @pytest_asyncio.fixture(autouse=True)
61 | async def db_engine(
62 | container: "Container",
63 | ) -> AsyncGenerator[aiopg.sa.Engine, None]:
64 | engine = await container.db_engine()
65 | try:
66 | migrate_up()
67 | yield engine
68 | finally:
69 | await migrate_down(engine)
70 |
71 |
72 | @pytest_asyncio.fixture
73 | async def conn(
74 | db_engine: aiopg.sa.Engine,
75 | ) -> AsyncGenerator[aiopg.sa.SAConnection, None]:
76 | async with db_engine.acquire() as connection:
77 | yield connection
78 |
79 |
80 | @pytest.fixture(autouse=True)
81 | def test_session() -> TestSession:
82 | user_session.set(TestSession(user=uuid.uuid4())) # type: ignore
83 | return user_session.get() # type: ignore
84 |
85 |
86 | @pytest.fixture
87 | def dirty_projects() -> DirtyProjects:
88 | return DirtyProjects()
89 |
90 |
91 | @pytest.fixture
92 | def changes() -> Changes:
93 | return Changes()
94 |
95 |
96 | @pytest.fixture
97 | def values_changes() -> ValuesChanges:
98 | return ValuesChanges()
99 |
100 |
101 | @pytest.fixture
102 | def graph_engine(container: "Container") -> Engine:
103 | return container.graph_engine()
104 |
105 |
106 | @pytest.fixture
107 | def ldap(container: "Container") -> BaseLDAP:
108 | return container.ldap_service()
109 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | postgres:
3 | image: postgres:13-alpine
4 | ports:
5 | - "127.0.0.1:5432:5432"
6 | environment:
7 | POSTGRES_USER: postgres
8 | POSTGRES_PASSWORD: postgres
9 | POSTGRES_DB: featureflags
10 | volumes:
11 | - "pg-data:/var/lib/postgresql/data"
12 | - "./seeds:/var/lib/postgresql/seeds"
13 | networks:
14 | - main
15 | healthcheck:
16 | test: [ "CMD-SHELL", "pg_isready", "-d", "featureflags" ]
17 | interval: 30s
18 | timeout: 60s
19 | retries: 5
20 | start_period: 80s
21 |
22 | postgres-test:
23 | image: postgres:13-alpine
24 | environment:
25 | POSTGRES_USER: postgres
26 | POSTGRES_PASSWORD: postgres
27 | POSTGRES_DB: featureflags-test
28 | networks:
29 | - main
30 | healthcheck:
31 | test: [ "CMD-SHELL", "pg_isready", "-d", "featureflags-test" ]
32 | interval: 30s
33 | timeout: 60s
34 | retries: 5
35 | start_period: 80s
36 |
37 | ldap:
38 | image: glauth/glauth
39 | volumes:
40 | - ./configs/ldap.cfg:/app/config/config.cfg
41 | networks:
42 | - main
43 |
44 | backend: &backend
45 | image: featureflags-server-dev
46 | init: true
47 | tty: true
48 | stdin_open: true
49 | environment: &env
50 | LANG: C.UTF-8
51 | LC_ALL: C.UTF-8
52 | PYTHONIOENCODING: UTF-8
53 | PYTHONUNBUFFERED: 1
54 | CONFIG_PATH: configs/local.yaml
55 | PGUSER: postgres
56 | PGPASS: postgres
57 | SECRET: 023517f06f444118986877e636b4a226
58 | BUILD_VERSION: ${BUILD_VERSION}
59 | networks:
60 | - main
61 | volumes:
62 | - ./featureflags:/app/featureflags
63 | - ./pyproject.toml:/app/pyproject.toml
64 | - ./scripts:/app/scripts
65 | - ./configs:/app/configs
66 | - ./.ipython:/app/.ipython
67 | - ./README.rst:/app/README.rst
68 | - ./docs:/app/docs
69 |
70 | web:
71 | <<: *backend
72 | depends_on:
73 | postgres:
74 | condition: service_healthy
75 | ldap:
76 | condition: service_started
77 | ports:
78 | - "127.0.0.1:8080:8080"
79 | command: watchfiles --filter python "python3 -m featureflags web" featureflags
80 |
81 | rpc:
82 | <<: *backend
83 | depends_on:
84 | postgres:
85 | condition: service_healthy
86 | ports:
87 | - "127.0.0.1:50051:50051"
88 | command: watchfiles --filter python "python3 -m featureflags rpc" featureflags
89 |
90 | http:
91 | <<: *backend
92 | depends_on:
93 | postgres:
94 | condition: service_healthy
95 | ports:
96 | - "127.0.0.1:8081:8081"
97 | command: watchfiles --filter python "python3 -m featureflags http" featureflags
98 |
99 | ishell:
100 | <<: *backend
101 | command: ipython --ipython-dir=/app/.ipython
102 |
103 | test:
104 | <<: *backend
105 | image: featureflags-server-test
106 |
107 | environment:
108 | <<: *env
109 | CONFIG_PATH: configs/test.yaml
110 | depends_on:
111 | postgres-test:
112 | condition: service_healthy
113 | command: python -m pytest ${ARGS}
114 |
115 | docs:
116 | <<: *backend
117 | image: featureflags-server-docs
118 | command: sphinx-build -a -b html docs public
119 |
120 | volumes:
121 | pg-data:
122 | driver: local
123 |
124 | networks:
125 | main:
126 | driver: bridge
127 |
--------------------------------------------------------------------------------
/ui/src/Auth.jsx:
--------------------------------------------------------------------------------
1 | import { Layout, Typography, Input, Button, Form } from 'antd';
2 |
3 | const { Content } = Layout;
4 | const { Title } = Typography;
5 |
6 | import { Base } from './Base';
7 | import { useSignIn } from './hooks';
8 | import { LockOutlined, UserOutlined } from '@ant-design/icons';
9 | import { useEffect, useState } from 'react';
10 |
11 |
12 | const AuthForm = () => {
13 | const [signIn, error] = useSignIn();
14 | const onFinish = ({ username, password }) => {
15 | signIn(username, password);
16 | };
17 |
18 | return (
19 |
38 | }
42 | />
43 |
44 |
45 |
54 | }
58 | />
59 |
60 |
61 |
62 |
70 |
71 | {error && {error}
}
72 |
73 | )
74 | }
75 |
76 | function Auth() {
77 | const [_, error] = useSignIn();
78 | const [invalid, setInvalid] = useState(false);
79 |
80 | useEffect(() => {
81 | if (error) {
82 | setInvalid(true);
83 | setTimeout(() => {
84 | setInvalid(false);
85 | }, 500);
86 | }
87 | }, [error]);
88 |
89 | return (
90 |
91 |
92 |
97 |
104 |
105 |
106 | Sign in to FeatureFlags
107 |
108 |
109 |
121 |
122 |
123 |
124 |
125 | )
126 | }
127 |
128 | export { Auth };
129 |
--------------------------------------------------------------------------------
/ui/src/Base.jsx:
--------------------------------------------------------------------------------
1 | import { useEffect, useRef, useState } from 'react'
2 | import { Navigate, useLocation, useNavigate } from 'react-router-dom';
3 | import { Layout, Typography, Space, Button, Flex, Input } from 'antd';
4 | const { Header } = Layout;
5 | const { Link } = Typography;
6 |
7 | import { Logo } from './components/Logo';
8 | import { Version } from './components/Version';
9 | import './Base.less';
10 | import { useAuth, useSignOut } from './hooks';
11 | import { CenteredSpinner } from './components/Spinner';
12 | import { SearchOutlined } from "@ant-design/icons";
13 |
14 |
15 | function Base({ children }) {
16 | const location = useLocation();
17 | const { auth, loading } = useAuth();
18 | const [signOut] = useSignOut();
19 | const [inputValue, setInputValue] = useState('');
20 |
21 | if (!loading && !auth.authenticated && location.pathname !== '/sign-in') {
22 | return
23 | }
24 |
25 | const navigate = useNavigate();
26 | const queryParams = new URLSearchParams(location.search);
27 | const tab = queryParams.get('tab') === 'values' ? 'values' : 'flags';
28 |
29 | const handleSearchTermChange = (e) => {
30 | const value = e.target.value;
31 | setInputValue(value);
32 |
33 | if (value === '') {
34 | queryParams.delete('term');
35 | navigate(`/?${queryParams.toString()}`);
36 | }
37 | };
38 |
39 | const setSearchTermToUrl = (e) => {
40 | const value = e.target.value;
41 | queryParams.set('term', value);
42 | queryParams.set('tab', tab);
43 | navigate(`/?${queryParams.toString()}`);
44 | };
45 |
46 | const inputRef = useRef(null);
47 |
48 | useEffect(() => {
49 | const handleKeyDown = (event) => {
50 | if (event.key === '/') {
51 | event.preventDefault();
52 | inputRef.current.focus();
53 | }
54 | };
55 | window.addEventListener('keydown', handleKeyDown);
56 | return () => {
57 | window.removeEventListener('keydown', handleKeyDown);
58 | };
59 | }, []);
60 |
61 | return (
62 |
68 |
108 | {loading ? : (
109 | <>
110 | {children}
111 |
112 | >
113 | )}
114 |
115 | )
116 | }
117 |
118 | export { Base };
119 |
--------------------------------------------------------------------------------
/featureflags/graph/proto_adapter.py:
--------------------------------------------------------------------------------
1 | from hiku.result import ROOT
2 |
3 |
4 | class IdMixin:
5 | def id(self, obj, value):
6 | obj.id = value.hex
7 |
8 |
9 | class RootBinding:
10 | def flag(self, obj, value):
11 | if value is not None:
12 | obj.flag.Flag = value.ident.hex
13 |
14 | def flags(self, obj, value):
15 | for ref in value:
16 | obj.flags.add().Flag = ref.ident.hex
17 |
18 | def flags_by_ids(self, obj, value):
19 | for ref in value:
20 | obj.flags_by_ids.add().Flag = ref.ident.hex
21 |
22 | def projects(self, obj, value):
23 | for ref in value:
24 | obj.projects.add().Project = ref.ident.hex
25 |
26 | def authenticated(self, obj, value):
27 | obj.authenticated = value
28 |
29 |
30 | class ProjectBinding(IdMixin):
31 | def name(self, obj, value):
32 | obj.name = value
33 |
34 | def version(self, obj, value):
35 | obj.version = value
36 |
37 | def variables(self, obj, value):
38 | for ref in value:
39 | obj.variables.add().Variable = ref.ident.hex
40 |
41 |
42 | class VariableBinding(IdMixin):
43 | def name(self, obj, value):
44 | obj.name = value
45 |
46 | def type(self, obj, value):
47 | obj.type = value.to_pb()
48 |
49 |
50 | class FlagBinding(IdMixin):
51 | def name(self, obj, value):
52 | obj.name = value
53 |
54 | def _project(self, obj, value):
55 | pass
56 |
57 | def project(self, obj, value):
58 | obj.project.Project = value.ident.hex
59 |
60 | def enabled(self, obj, value):
61 | obj.enabled.value = value
62 |
63 | def conditions(self, obj, value):
64 | for ref in value:
65 | obj.conditions.add().Condition = ref.ident.hex
66 |
67 | def overridden(self, obj, value):
68 | obj.overridden.value = value
69 |
70 |
71 | class ConditionBinding(IdMixin):
72 | def _checks(self, obj, value):
73 | pass
74 |
75 | def checks(self, obj, value):
76 | for ref in value:
77 | obj.checks.add().Check = ref.ident.hex
78 |
79 |
80 | class CheckBinding(IdMixin):
81 | def _variable(self, obj, value):
82 | pass
83 |
84 | def variable(self, obj, value):
85 | obj.variable.Variable = value.ident.hex
86 |
87 | def operator(self, obj, value):
88 | obj.operator = value.to_pb()
89 |
90 | def value_string(self, obj, value):
91 | if value is not None:
92 | obj.value_string = value
93 |
94 | def value_number(self, obj, value):
95 | if value is not None:
96 | obj.value_number = value
97 |
98 | def value_timestamp(self, obj, value):
99 | if value is not None:
100 | obj.value_timestamp.FromDatetime(value)
101 |
102 | def value_set(self, obj, value):
103 | if value is not None:
104 | obj.value_set.items[:] = value
105 |
106 |
107 | BINDINGS = {
108 | "Root": RootBinding(),
109 | "Project": ProjectBinding(),
110 | "Variable": VariableBinding(),
111 | "Flag": FlagBinding(),
112 | "Condition": ConditionBinding(),
113 | "Check": CheckBinding(),
114 | }
115 |
116 |
117 | def populate_result_proto(result, result_proto):
118 | binding = BINDINGS["Root"]
119 | for name, value in result.__idx__.root.items():
120 | getattr(binding, name.partition("[")[0])(result_proto.Root, value)
121 | for node_name, node_index in result.__idx__.items():
122 | if node_name != ROOT.node:
123 | binding = BINDINGS[node_name]
124 | idx_proto = getattr(result_proto, node_name)
125 | for id_, obj in node_index.items():
126 | obj_proto = idx_proto[id_.hex]
127 | for name, value in obj.items():
128 | getattr(binding, name.partition("[")[0])(obj_proto, value)
129 | return result_proto
130 |
--------------------------------------------------------------------------------
/featureflags/graph/utils.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from collections.abc import Iterable
3 | from datetime import datetime
4 | from uuid import UUID, uuid4
5 |
6 | import sqlalchemy
7 | from aiopg.sa import SAConnection
8 | from hiku.sources.aiopg import (
9 | LinkQuery as _LinkQuery,
10 | )
11 | from sqlalchemy import and_, select
12 | from sqlalchemy.dialects.postgresql import insert
13 | from sqlalchemy.sql.selectable import Select
14 |
15 | from featureflags.graph.types import (
16 | LocalId,
17 | )
18 | from featureflags.models import (
19 | AuthUser,
20 | LocalIdMap,
21 | )
22 | from featureflags.utils import select_scalar
23 |
24 |
25 | def is_valid_uuid(value: str) -> bool:
26 | try:
27 | uuid.UUID(value)
28 | except ValueError:
29 | return False
30 | else:
31 | return True
32 |
33 |
34 | def update_map(map_: dict, update: dict) -> dict:
35 | map_ = map_.copy()
36 | map_.update(update)
37 | return map_
38 |
39 |
40 | async def gen_id(local_id: LocalId, *, conn: SAConnection) -> UUID:
41 | assert local_id.scope and local_id.value, local_id
42 |
43 | # try to insert new local id. if it already exists, return None,
44 | # otherwise return the id
45 | id_ = await select_scalar(
46 | conn,
47 | (
48 | insert(LocalIdMap.__table__)
49 | .values(
50 | {
51 | LocalIdMap.scope: local_id.scope,
52 | LocalIdMap.value: local_id.value,
53 | LocalIdMap.id: uuid4(),
54 | LocalIdMap.timestamp: datetime.utcnow(),
55 | }
56 | )
57 | .on_conflict_do_nothing()
58 | .returning(LocalIdMap.id)
59 | ),
60 | )
61 | # if previous insert returned None, it means that the id already exists,
62 | # try to select the id
63 | if id_ is None:
64 | id_ = await select_scalar(
65 | conn,
66 | (
67 | select([LocalIdMap.id]).where(
68 | and_(
69 | LocalIdMap.scope == local_id.scope,
70 | LocalIdMap.value == local_id.value,
71 | )
72 | )
73 | ),
74 | )
75 | return id_
76 |
77 |
78 | async def get_auth_user(username: str, *, conn: SAConnection) -> UUID:
79 | user_id_select = select([AuthUser.id]).where(AuthUser.username == username)
80 | user_id = await select_scalar(conn, user_id_select)
81 | if user_id is None:
82 | user_id = await select_scalar(
83 | conn,
84 | (
85 | insert(AuthUser.__table__)
86 | .values(
87 | {
88 | AuthUser.id: uuid4(),
89 | AuthUser.username: username,
90 | }
91 | )
92 | .on_conflict_do_nothing()
93 | .returning(AuthUser.id)
94 | ),
95 | )
96 | if user_id is None:
97 | user_id = await select_scalar(conn, user_id_select)
98 | assert user_id is not None
99 | return user_id
100 |
101 |
102 | class LinkQuery(_LinkQuery):
103 | """LinkQuery with support for ordering results."""
104 |
105 | def __init__(
106 | self,
107 | engine_key: str,
108 | *,
109 | from_column: sqlalchemy.Column,
110 | to_column: sqlalchemy.Column,
111 | order_by: list[sqlalchemy.Column] | None = None,
112 | ) -> None:
113 | super().__init__(
114 | engine_key, from_column=from_column, to_column=to_column
115 | )
116 | self.order_by = order_by or []
117 |
118 | def select_expr(self, ids: Iterable) -> Select | None:
119 | expr = super().select_expr(ids)
120 | if expr is not None and self.order_by:
121 | expr = expr.order_by(*self.order_by)
122 | return expr
123 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/1876f90b58e8_added_feature_values_tables.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.dialects import postgresql
5 |
6 |
7 | revision = "1876f90b58e8"
8 | down_revision = "8df4e7dd1897"
9 | branch_labels = None
10 | depends_on = None
11 |
12 |
13 | def upgrade():
14 | # ### commands auto generated by Alembic - please adjust! ###
15 | op.create_table(
16 | "value",
17 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
18 | sa.Column("name", sa.String(), nullable=False),
19 | sa.Column("enabled", sa.Boolean(), nullable=True),
20 | sa.Column("value_default", sa.String(), nullable=False),
21 | sa.Column("value_override", sa.String(), nullable=False),
22 | sa.Column("project", postgresql.UUID(as_uuid=True), nullable=False),
23 | sa.ForeignKeyConstraint(
24 | ["project"],
25 | ["project.id"],
26 | ),
27 | sa.PrimaryKeyConstraint("id"),
28 | sa.UniqueConstraint("project", "name"),
29 | )
30 | op.create_index(
31 | "value_project_name_idx", "value", ["project", "name"], unique=False
32 | )
33 | op.create_table(
34 | "value_changelog",
35 | sa.Column("id", sa.Integer(), nullable=False),
36 | sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=False),
37 | sa.Column("auth_user", postgresql.UUID(as_uuid=True), nullable=False),
38 | sa.Column("value", postgresql.UUID(as_uuid=True), nullable=False),
39 | sa.Column(
40 | "actions",
41 | postgresql.ARRAY(
42 | sa.Enum(
43 | "ENABLE_VALUE",
44 | "DISABLE_VALUE",
45 | "ADD_CONDITION",
46 | "DISABLE_CONDITION",
47 | "RESET_VALUE",
48 | "DELETE_VALUE",
49 | "UPDATE_VALUE_VALUE_OVERRIDE",
50 | name="value_changelog_actions",
51 | )
52 | ),
53 | nullable=True,
54 | ),
55 | sa.ForeignKeyConstraint(
56 | ["auth_user"],
57 | ["auth_user.id"],
58 | ),
59 | sa.ForeignKeyConstraint(["value"], ["value.id"], ondelete="CASCADE"),
60 | sa.PrimaryKeyConstraint("id"),
61 | )
62 | op.create_table(
63 | "value_condition",
64 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
65 | sa.Column("value", postgresql.UUID(as_uuid=True), nullable=False),
66 | sa.Column("value_override", sa.String(), nullable=False),
67 | sa.Column(
68 | "checks",
69 | postgresql.ARRAY(postgresql.UUID(as_uuid=True), as_tuple=True),
70 | nullable=True,
71 | ),
72 | sa.ForeignKeyConstraint(
73 | ["value"],
74 | ["value.id"],
75 | ),
76 | sa.PrimaryKeyConstraint("id"),
77 | )
78 | op.drop_table("stats")
79 | # ### end Alembic commands ###
80 |
81 |
82 | def downgrade():
83 | # ### commands auto generated by Alembic - please adjust! ###
84 | op.create_table(
85 | "stats",
86 | sa.Column(
87 | "flag", postgresql.UUID(), autoincrement=False, nullable=False
88 | ),
89 | sa.Column(
90 | "interval",
91 | postgresql.TIMESTAMP(),
92 | autoincrement=False,
93 | nullable=False,
94 | ),
95 | sa.Column(
96 | "positive_count", sa.INTEGER(), autoincrement=False, nullable=True
97 | ),
98 | sa.Column(
99 | "negative_count", sa.INTEGER(), autoincrement=False, nullable=True
100 | ),
101 | sa.PrimaryKeyConstraint("flag", "interval", name="stats_pkey"),
102 | )
103 | op.drop_table("value_condition")
104 | op.drop_table("value_changelog")
105 | op.drop_index("value_project_name_idx", table_name="value")
106 | op.drop_table("value")
107 | # ### end Alembic commands ###
108 |
--------------------------------------------------------------------------------
/featureflags/utils.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from typing import Any
3 | from uuid import UUID
4 |
5 | from aiopg.sa import Engine, SAConnection
6 | from sqlalchemy import cast
7 | from sqlalchemy.dialects.postgresql import ARRAY
8 |
9 |
10 | class ArrayOfEnum(ARRAY):
11 | """
12 | See: http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#using
13 | -enum-with-array # noqa: E501
14 | """
15 |
16 | def bind_expression(self, bind_value: Any) -> Any:
17 | return cast(bind_value, self)
18 |
19 | def result_processor(self, dialect: Any, col_type: Any) -> Any:
20 | proc = super().result_processor(dialect, col_type)
21 |
22 | def wrapper(value: Any) -> Any:
23 | if value is None:
24 | return None
25 | else:
26 | value = value[1:-1]
27 | if value:
28 | return proc(value.split(","))
29 | elif self.as_tuple:
30 | return ()
31 |
32 | return []
33 |
34 | return wrapper
35 |
36 |
37 | class EntityCache:
38 | """
39 | Caches entity ids during request
40 | """
41 |
42 | def __init__(self) -> None:
43 | self.project: dict[str, UUID] = {}
44 | self.flag: defaultdict[UUID, dict[str, UUID]] = defaultdict(dict)
45 | self.value: defaultdict[UUID, dict[str, UUID]] = defaultdict(dict)
46 | self.variable: defaultdict[UUID, dict[str, UUID]] = defaultdict(dict)
47 |
48 |
49 | class FlagAggStats(defaultdict):
50 | """
51 | Used to collect flags statistics in aggregated state
52 |
53 | acc[interval][flag] -> [positive_count, negative_count]
54 | """
55 |
56 | def __init__(self) -> None:
57 | super().__init__(lambda: defaultdict(lambda: [0, 0]))
58 |
59 |
60 | class ValueAggStats(defaultdict):
61 | """
62 | Used to collect values statistics in aggregated state
63 |
64 | acc[interval][value] -> [positive_count, negative_count]
65 | """
66 |
67 | def __init__(self) -> None:
68 | super().__init__(lambda: defaultdict(lambda: [0, 0]))
69 |
70 |
71 | async def select_scalar(conn: SAConnection, stmt: Any) -> Any:
72 | result = await conn.execute(stmt)
73 | return await result.scalar()
74 |
75 |
76 | async def exec_scalar(engine: Engine, stmt: Any) -> Any:
77 | async with engine.acquire() as conn:
78 | return await select_scalar(conn, stmt)
79 |
80 |
81 | async def select_first(conn: SAConnection, stmt: Any) -> Any:
82 | result = await conn.execute(stmt)
83 | row = await result.first()
84 | if row is not None:
85 | return row # FIXME: KeyedTuple?
86 | else:
87 | return None
88 |
89 |
90 | async def exec_expression(engine: Engine, stmt: Any) -> Any:
91 | async with engine.acquire() as conn:
92 | result = await conn.execute(stmt)
93 | return [r[0] for r in await result.fetchall()]
94 |
95 |
96 | async def exec_many(engine: Engine, stmt: Any) -> Any:
97 | async with engine.acquire() as conn:
98 | result = await conn.execute(stmt)
99 | return list(await result.fetchall())
100 |
101 |
102 | def escape_dn_chars(s: str) -> str:
103 | """
104 | Escape all DN special characters found in s
105 | with a back-slash (see RFC 4514, section 2.4)
106 |
107 | From python-ldap, which is distributed under Python-style license.
108 | """
109 | if s:
110 | s = (
111 | s.replace("\\", "\\\\")
112 | .replace(",", "\\,")
113 | .replace("+", "\\+")
114 | .replace('"', '\\"')
115 | .replace("<", "\\<")
116 | .replace(">", "\\>")
117 | .replace(";", "\\;")
118 | .replace("=", "\\=")
119 | .replace("\000", "\\\000")
120 | )
121 |
122 | if s.startswith("#") or s.startswith(" "):
123 | s = "".join(("\\", s))
124 |
125 | if s.endswith(" "):
126 | s = "".join((s[:-1], "\\ "))
127 |
128 | return s
129 |
--------------------------------------------------------------------------------
/featureflags/protobuf/service_grpc.py:
--------------------------------------------------------------------------------
1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT!
2 | # source: featureflags/protobuf/service.proto
3 | # plugin: grpclib.plugin.main
4 | import abc
5 | import typing
6 |
7 | import grpclib.const
8 | import grpclib.client
9 | if typing.TYPE_CHECKING:
10 | import grpclib.server
11 |
12 | import hiku.protobuf.query_pb2
13 | import google.protobuf.empty_pb2
14 | import google.protobuf.timestamp_pb2
15 | import featureflags.protobuf.graph_pb2
16 | import featureflags.protobuf.service_pb2
17 |
18 |
19 | class FeatureFlagsBase(abc.ABC):
20 |
21 | @abc.abstractmethod
22 | async def exchange(self, stream: 'grpclib.server.Stream[featureflags.protobuf.service_pb2.ExchangeRequest, featureflags.protobuf.service_pb2.ExchangeReply]') -> None:
23 | pass
24 |
25 | @abc.abstractmethod
26 | async def Exchange(self, stream: 'grpclib.server.Stream[featureflags.protobuf.service_pb2.ExchangeRequest, featureflags.protobuf.service_pb2.ExchangeReply]') -> None:
27 | pass
28 |
29 | @abc.abstractmethod
30 | async def store_stats(self, stream: 'grpclib.server.Stream[featureflags.protobuf.service_pb2.StoreStatsTask, google.protobuf.empty_pb2.Empty]') -> None:
31 | pass
32 |
33 | @abc.abstractmethod
34 | async def StoreStats(self, stream: 'grpclib.server.Stream[featureflags.protobuf.service_pb2.StoreStatsTask, google.protobuf.empty_pb2.Empty]') -> None:
35 | pass
36 |
37 | def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]:
38 | return {
39 | '/featureflags.service.FeatureFlags/exchange': grpclib.const.Handler(
40 | self.exchange,
41 | grpclib.const.Cardinality.UNARY_UNARY,
42 | featureflags.protobuf.service_pb2.ExchangeRequest,
43 | featureflags.protobuf.service_pb2.ExchangeReply,
44 | ),
45 | '/featureflags.service.FeatureFlags/Exchange': grpclib.const.Handler(
46 | self.Exchange,
47 | grpclib.const.Cardinality.UNARY_UNARY,
48 | featureflags.protobuf.service_pb2.ExchangeRequest,
49 | featureflags.protobuf.service_pb2.ExchangeReply,
50 | ),
51 | '/featureflags.service.FeatureFlags/store_stats': grpclib.const.Handler(
52 | self.store_stats,
53 | grpclib.const.Cardinality.UNARY_UNARY,
54 | featureflags.protobuf.service_pb2.StoreStatsTask,
55 | google.protobuf.empty_pb2.Empty,
56 | ),
57 | '/featureflags.service.FeatureFlags/StoreStats': grpclib.const.Handler(
58 | self.StoreStats,
59 | grpclib.const.Cardinality.UNARY_UNARY,
60 | featureflags.protobuf.service_pb2.StoreStatsTask,
61 | google.protobuf.empty_pb2.Empty,
62 | ),
63 | }
64 |
65 |
66 | class FeatureFlagsStub:
67 |
68 | def __init__(self, channel: grpclib.client.Channel) -> None:
69 | self.exchange = grpclib.client.UnaryUnaryMethod(
70 | channel,
71 | '/featureflags.service.FeatureFlags/exchange',
72 | featureflags.protobuf.service_pb2.ExchangeRequest,
73 | featureflags.protobuf.service_pb2.ExchangeReply,
74 | )
75 | self.Exchange = grpclib.client.UnaryUnaryMethod(
76 | channel,
77 | '/featureflags.service.FeatureFlags/Exchange',
78 | featureflags.protobuf.service_pb2.ExchangeRequest,
79 | featureflags.protobuf.service_pb2.ExchangeReply,
80 | )
81 | self.store_stats = grpclib.client.UnaryUnaryMethod(
82 | channel,
83 | '/featureflags.service.FeatureFlags/store_stats',
84 | featureflags.protobuf.service_pb2.StoreStatsTask,
85 | google.protobuf.empty_pb2.Empty,
86 | )
87 | self.StoreStats = grpclib.client.UnaryUnaryMethod(
88 | channel,
89 | '/featureflags.service.FeatureFlags/StoreStats',
90 | featureflags.protobuf.service_pb2.StoreStatsTask,
91 | google.protobuf.empty_pb2.Empty,
92 | )
93 |
--------------------------------------------------------------------------------
/featureflags/rpc/servicer.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | import weakref
4 |
5 | import aiopg.sa
6 | from google.protobuf.empty_pb2 import Empty
7 | from grpclib.server import Stream
8 | from hiku.engine import Engine
9 | from hiku.readers import protobuf
10 | from sqlalchemy import select
11 |
12 | from featureflags.graph.graph import exec_graph
13 | from featureflags.graph.proto_adapter import populate_result_proto
14 | from featureflags.models import Project
15 | from featureflags.protobuf import service_grpc, service_pb2
16 | from featureflags.rpc.db import add_statistics
17 | from featureflags.rpc.metrics import track
18 | from featureflags.rpc.utils import debug_cancellation
19 | from featureflags.services.auth import (
20 | user_session,
21 | )
22 | from featureflags.utils import EntityCache, FlagAggStats, select_scalar
23 |
24 | log = logging.getLogger(__name__)
25 |
26 |
27 | class FeatureFlagsServicer(service_grpc.FeatureFlagsBase):
28 | def __init__(
29 | self,
30 | db_engine: aiopg.sa.Engine,
31 | graph_engine: Engine,
32 | ) -> None:
33 | self._flag_agg_stats = FlagAggStats()
34 | self._graph_engine = graph_engine
35 | self._db_engine = db_engine
36 |
37 | # for debugging
38 | self._tasks = weakref.WeakSet() # type: ignore
39 |
40 | async def exchange(self, stream: Stream) -> None:
41 | # backward compatibility
42 | await self.Exchange(stream)
43 |
44 | @debug_cancellation
45 | @track
46 | async def Exchange(self, stream: Stream) -> None: # noqa: N802
47 | entity_cache = EntityCache()
48 | self._tasks.add(asyncio.current_task())
49 | try:
50 | request: service_pb2.ExchangeRequest = await stream.recv_message()
51 | except asyncio.CancelledError:
52 | h2_conn = stream._stream._h2_connection
53 | window = h2_conn._inbound_flow_control_window_manager
54 | log.info(
55 | "Stuck;"
56 | " streams: %d;"
57 | " tasks: %d;"
58 | " max_window_size: %d;"
59 | " current_window_size: %d;"
60 | " bytes_processed: %d;"
61 | " user_agent: %s;"
62 | " metadata: %s;",
63 | len(h2_conn.streams),
64 | len(self._tasks),
65 | window.max_window_size,
66 | window.current_window_size,
67 | window._bytes_processed,
68 | stream.user_agent,
69 | stream.metadata,
70 | )
71 | raise
72 |
73 | async with self._db_engine.acquire() as conn:
74 | await add_statistics(
75 | request,
76 | conn=conn,
77 | entity_cache=entity_cache,
78 | flag_agg_stats=self._flag_agg_stats,
79 | )
80 | version = await select_scalar(
81 | conn,
82 | select([Project.version]).where(
83 | Project.name == request.project
84 | ),
85 | )
86 |
87 | exchange_reply = service_pb2.ExchangeReply()
88 |
89 | if request.version != version and request.HasField("query"):
90 | result = await exec_graph(
91 | graph_engine=self._graph_engine,
92 | query=protobuf.transform(request.query),
93 | db_engine=self._db_engine,
94 | session=user_session.get(),
95 | )
96 | populate_result_proto(result, exchange_reply.result)
97 |
98 | exchange_reply.version = version
99 |
100 | await stream.send_message(exchange_reply)
101 | await stream.send_trailing_metadata()
102 |
103 | async def store_stats(self, stream: Stream) -> None:
104 | # backward compatibility
105 | await self.StoreStats(stream)
106 |
107 | @track
108 | async def StoreStats(self, stream: Stream) -> None: # noqa: N802
109 | await stream.send_message(Empty())
110 |
--------------------------------------------------------------------------------
/docs/conditions.rst:
--------------------------------------------------------------------------------
1 | Conditions
2 | ==========
3 |
4 | Conditions determine whether a Flag is enabled or a Value is returned for a specific context. A Condition consists of one or more Checks. All Checks within a Condition must pass (logical AND) for the Condition to be true. If a Flag has multiple Conditions, only one of them needs to be true (logical OR) for the Flag to be enabled.
5 |
6 | Variable Types
7 | --------------
8 |
9 | The FeatureFlags system supports the following variable types for context evaluation:
10 |
11 | * **String**: Textual data (e.g., user IDs, emails, country codes).
12 | * **Number**: Numeric values (e.g., age, count, balance).
13 | * **Timestamp**: Date and time values.
14 | * **Set**: A collection of unique strings (e.g., user roles, permissions).
15 |
16 | Operators
17 | ---------
18 |
19 | Operators define how the variable in the context is compared against the value defined in the Check.
20 |
21 | String Operators
22 | ~~~~~~~~~~~~~~~~
23 |
24 | Applicable to **String** variables.
25 |
26 | * **Equal**: Checks if the context variable matches the value exactly (case-sensitive).
27 | * **Contains**: Checks if the context variable contains the specified value as a substring.
28 | * **Regexp**: Checks if the context variable matches the specified Regular Expression.
29 | * **Wildcard**: Checks if the context variable matches a wildcard pattern (e.g., ``user-*``).
30 | * **Percent**: Used for gradual rollouts. Hashes the context variable (usually a user ID) and checks if the result modulo 100 is less than the specified value (0-100).
31 |
32 | Number Operators
33 | ~~~~~~~~~~~~~~~~
34 |
35 | Applicable to **Number** variables.
36 |
37 | * **Equal**: Checks if the context variable equals the specified value.
38 | * **Less Than**: Checks if the context variable is strictly less than the specified value.
39 | * **Less Or Equal**: Checks if the context variable is less than or equal to the specified value.
40 | * **Greater Than**: Checks if the context variable is strictly greater than the specified value.
41 | * **Greater Or Equal**: Checks if the context variable is greater than or equal to the specified value.
42 |
43 | Timestamp Operators
44 | ~~~~~~~~~~~~~~~~~~~
45 |
46 | Applicable to **Timestamp** variables.
47 |
48 | * **Equal**: Checks if the context timestamp is exactly equal to the specified timestamp.
49 | * **Less Than**: Checks if the context timestamp is before the specified timestamp.
50 | * **Less Or Equal**: Checks if the context timestamp is before or at the same time as the specified timestamp.
51 | * **Greater Than**: Checks if the context timestamp is after the specified timestamp.
52 | * **Greater Or Equal**: Checks if the context timestamp is after or at the same time as the specified timestamp.
53 |
54 | Set Operators
55 | ~~~~~~~~~~~~~
56 |
57 | ``Set`` variables are sets of ``strings``. In order to use ``set`` conditions, you need to pass a set of strings as the value to the context.
58 |
59 | Applicable to **Set** variables.
60 |
61 | * **Subset (Included In)**: Checks if the set in the context is a subset of the defined set (i.e., all elements in the context set must exist in the defined set).
62 |
63 | On web ui side you create new condition with ``user_roles`` variable, ``included in`` operator and provide values separated by comma, e.g ``admin,superadmin```
64 |
65 | .. image:: _static/images/condition-set-included-in.png
66 | :width: 700
67 | :alt: UI
68 |
69 | .. code-block:: python
70 |
71 | class FlagsDefaults:
72 | ADMIN_ACCESS = False
73 |
74 | manager = HttpxManager(
75 | url="http://localhost:8080",
76 | project="my-project",
77 | variables=[Variable("user_roles", VariableType.SET)],
78 | defaults=FlagsDefaults,
79 | )
80 | client = FeatureFlagsClient(manager)
81 |
82 | @app.get("/admin")
83 | def admin():
84 | auth_user = User(role="admin")
85 |
86 | with client.flags({"set": {auth_user.role}}) as f:
87 | if f.ADMIN_ACCESS:
88 | return "Admin access granted"
89 |
90 | return "Admin access denied"
91 |
92 | * **Superset (Includes)**: Checks if the set in the context is a superset of the defined set (i.e., the context set must contain all elements of the defined set).
--------------------------------------------------------------------------------
/ui/src/Dashboard/ValueCheck.jsx:
--------------------------------------------------------------------------------
1 | import {
2 | Button,
3 | Col,
4 | Input,
5 | InputNumber,
6 | DatePicker,
7 | Row,
8 | Flex,
9 | Select,
10 | } from 'antd';
11 | import {
12 | MinusOutlined,
13 | } from '@ant-design/icons';
14 |
15 | import './Check.less';
16 | import { Operators, TYPES, Type } from './constants';
17 | import {
18 | useValueChecks,
19 | useProjectsMap,
20 | } from './context';
21 |
22 | const { Option } = Select;
23 |
24 | const defaultInputProps = {
25 | style: { width: '100%' },
26 | size: "middle"
27 | }
28 |
29 | const CheckInput = ({ conditionId, check, projectName }) => {
30 | const projectsMap = useProjectsMap();
31 | const project = projectsMap[projectName];
32 | const {
33 | setValueString,
34 | setValueNumber,
35 | setValueTimestamp,
36 | setValueSet
37 | } = useValueChecks();
38 | const variable = project.variablesMap[check.variable];
39 | const variableType = variable ? variable.type : undefined;
40 |
41 | switch (variableType) {
42 | case Type.STRING:
43 | return setValueString(conditionId, check.id, e.target.value)}
49 | />;
50 | case Type.NUMBER:
51 | return {
56 | setValueNumber(conditionId, check.id, value)
57 | }}
58 | />;
59 | case Type.TIMESTAMP:
60 | return {
64 | setValueTimestamp(conditionId, check.id, date)
65 | }}
66 | />;
67 | case Type.SET:
68 | return setValueSet(conditionId, check.id, e.target.value)}
75 | />;
76 | }
77 | return ;
83 | }
84 |
85 | export const ValueCheck = ({ conditionId, check, onDeleteCheck, projectName }) => {
86 | const projectsMap = useProjectsMap();
87 | const project = projectsMap[projectName];
88 | const { setVariable, setOperator } = useValueChecks();
89 |
90 | const onVariableOptionChange = (value) => {
91 | setVariable(conditionId, check.id, value);
92 | }
93 | const onOperatorOptionChange = (value) => {
94 | setOperator(conditionId, check.id, value);
95 | }
96 |
97 | const variable = project.variablesMap[check.variable];
98 | const variableType = variable ? variable.type : undefined;
99 | const operators = variableType ? TYPES[variableType].operators.map((op) => {
100 | return { id: op, title: Operators[op].title };
101 | }) : [];
102 |
103 | return (
104 |
105 |
106 |
109 |
110 |
111 |
112 |
123 |
134 |
135 |
136 |
137 |
138 |
139 |
140 | );
141 | }
142 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/queries.js:
--------------------------------------------------------------------------------
1 | import { gql } from '@apollo/client';
2 |
3 | export const VERSION_QUERY = gql`
4 | query Version {
5 | version {
6 | serverVersion
7 | buildVersion
8 | }
9 | }
10 | `;
11 |
12 | export const PROJECTS_QUERY = gql`
13 | query Projects {
14 | projects {
15 | id
16 | name
17 | variables {
18 | id
19 | name
20 | type
21 | }
22 | }
23 | }
24 | `;
25 |
26 | const FLAG_FRAGMENT = gql`
27 | fragment FlagFragment on Flag {
28 | id
29 | name
30 | enabled
31 | overridden
32 | created_timestamp
33 | reported_timestamp
34 | project {
35 | name
36 | }
37 | conditions {
38 | id
39 | position
40 | checks {
41 | id
42 | variable {
43 | id
44 | name
45 | type
46 | }
47 | operator
48 | value_string
49 | value_number
50 | value_timestamp
51 | value_set
52 | }
53 | }
54 | }
55 | `;
56 |
57 |
58 | export const SAVE_FLAG_MUTATION = gql`
59 | mutation SaveFlag($operations: [SaveFlagOperation!]!) {
60 | saveFlag(operations: $operations) {
61 | errors
62 | }
63 | }
64 | `;
65 |
66 | export const RESET_FLAG_MUTATION = gql`
67 | mutation ResetFlag($id: String!) {
68 | resetFlag(id: $id) {
69 | error
70 |
71 | }
72 | }
73 | `;
74 |
75 | export const DELETE_FLAG_MUTATION = gql`
76 | mutation DeleteFlag($id: String!) {
77 | deleteFlag(id: $id) {
78 | error
79 | }
80 | }
81 | `;
82 |
83 | export const FLAG_QUERY = gql`
84 | ${FLAG_FRAGMENT}
85 | query Flag($id: String!) {
86 | flag(id: $id) {
87 | ...FlagFragment
88 | }
89 | }
90 | `;
91 |
92 | export const FLAGS_QUERY = gql`
93 | ${FLAG_FRAGMENT}
94 | query Flags($project: String, $flag_name: String) {
95 | flags(project_name: $project, flag_name: $flag_name) {
96 | ...FlagFragment
97 | }
98 | }
99 | `;
100 |
101 | export const FLAG_HISTORY_QUERY = gql`
102 | query FlagHistory($id: String!) {
103 | flag(id: $id) {
104 | changes {
105 | timestamp
106 | actions
107 | user {
108 | id
109 | username
110 | }
111 | }
112 | }
113 | }
114 | `;
115 |
116 | const VALUE_FRAGMENT = gql`
117 | fragment ValueFragment on Value {
118 | id
119 | name
120 | enabled
121 | overridden
122 | value_default
123 | value_override
124 | created_timestamp
125 | reported_timestamp
126 | project {
127 | name
128 | }
129 | conditions {
130 | id
131 | position
132 | value_override
133 | checks {
134 | id
135 | variable {
136 | id
137 | name
138 | type
139 | }
140 | operator
141 | value_string
142 | value_number
143 | value_timestamp
144 | value_set
145 | }
146 | }
147 | }
148 | `;
149 |
150 |
151 | export const SAVE_VALUE_MUTATION = gql`
152 | mutation SaveValue($operations: [SaveValueOperation!]!) {
153 | saveValue(operations: $operations) {
154 | errors
155 | }
156 | }
157 | `;
158 |
159 | export const RESET_VALUE_MUTATION = gql`
160 | mutation ResetValue($id: String!) {
161 | resetValue(id: $id) {
162 | error
163 | }
164 | }
165 | `;
166 |
167 | export const DELETE_VALUE_MUTATION = gql`
168 | mutation DeleteValue($id: String!) {
169 | deleteValue(id: $id) {
170 | error
171 | }
172 | }
173 | `;
174 |
175 | export const DELETE_VARIABLE_MUTATION = gql`
176 | mutation DeleteVariable($id: String!) {
177 | deleteVariable(id: $id) {
178 | error
179 | }
180 | }
181 | `;
182 |
183 | export const VALUE_QUERY = gql`
184 | ${VALUE_FRAGMENT}
185 | query Value($id: String!) {
186 | value(id: $id) {
187 | ...ValueFragment
188 | }
189 | }
190 | `;
191 |
192 | export const VALUES_QUERY = gql`
193 | ${VALUE_FRAGMENT}
194 | query Values($project: String, $value_name: String) {
195 | values(project_name: $project, value_name: $value_name) {
196 | ...ValueFragment
197 | }
198 | }
199 | `;
200 |
201 | export const VALUE_HISTORY_QUERY = gql`
202 | query ValueHistory($id: String!) {
203 | value(id: $id) {
204 | changes {
205 | timestamp
206 | actions
207 | user {
208 | id
209 | username
210 | }
211 | }
212 | }
213 | }
214 | `;
215 |
216 | export const DELETE_PROJECT_MUTATION = gql`
217 | mutation DeleteProject($id: String!) {
218 | deleteProject(id: $id) {
219 | error
220 | }
221 | }
222 | `;
223 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Flags.jsx:
--------------------------------------------------------------------------------
1 | import fuzzysearch from 'fuzzysearch';
2 | import { useLocation, useNavigate } from 'react-router-dom';
3 | import { useMemo, useState, useEffect } from 'react';
4 |
5 | import {
6 | AutoComplete,
7 | List,
8 | Input,
9 | Typography,
10 | } from 'antd';
11 | import { SearchOutlined } from '@ant-design/icons';
12 | import { useQuery } from '@apollo/client';
13 |
14 | import './Flags.less';
15 |
16 | import { CenteredSpinner } from '../components/Spinner';
17 | import { ProjectsMapContext } from './context';
18 | import { FLAGS_QUERY } from './queries';
19 | import { Flag } from './Flag';
20 | import { HeaderTabs } from "./Tabs";
21 |
22 | const getShowAllMatches = (count, searchText) => ({
23 | label: `Show all matches(${count})`,
24 | value: searchText
25 | });
26 |
27 | function getFlagKey(flag) {
28 | return `${flag.name}_${flag.project.name}`
29 | }
30 |
31 | const View = ({ children }) => {
32 | return (
33 |
40 |
41 | {children}
42 |
43 | );
44 | };
45 |
46 |
47 | const Flags = ({ flags, isSearch }) => {
48 | const location = useLocation();
49 | const navigate = useNavigate();
50 | const queryParams = new URLSearchParams(location.search);
51 | const flagFromQuery = queryParams.get('flag');
52 |
53 | const [searchOptions, setSearchOptions] = useState([]);
54 | const [searchSet, setSearchSet] = useState(null);
55 | const [selected, setSelected] = useState('');
56 |
57 | const setFlagToUrl = (flag) => {
58 | if (!flag) {
59 | queryParams.delete('flag');
60 | } else {
61 | queryParams.set('flag', flag);
62 | }
63 | navigate(`/?${queryParams.toString()}`);
64 | }
65 |
66 | useEffect(() => {
67 | if (flagFromQuery) {
68 | setSearchSet(new Set([flagFromQuery]));
69 | setSelected(flagFromQuery);
70 | }
71 | }, [flagFromQuery]);
72 |
73 | const flagsMap = useMemo(() => flags.reduce((acc, flag) => {
74 | acc[getFlagKey(flag)] = flag;
75 | return acc;
76 | }, {}), [flags]);
77 |
78 | if (!flags.length) {
79 | return
80 |
81 | No flags
82 |
83 | ;
84 | }
85 |
86 | const listData = flags
87 | .filter((flag) => {
88 | return selected ? searchSet.has(flag.name) : true;
89 | })
90 | .map((flag) => {
91 | return {
92 | title: flag.name,
93 | key: getFlagKey(flag),
94 | };
95 | });
96 |
97 | /**
98 | * filter autocomplete options
99 | */
100 | const onSearch = (searchText) => {
101 | if (!searchText) {
102 | setSearchOptions([]);
103 | return
104 | };
105 |
106 | const res = flags
107 | .filter(({ name }) => fuzzysearch(
108 | searchText.toLowerCase(), name.toLowerCase()
109 | ))
110 | .map(({ name }) => ({ label: name, value: name }));
111 |
112 | setSearchOptions([getShowAllMatches(res.length, searchText)].concat(res));
113 | setSearchSet(new Set(res.map(({ value }) => value)));
114 | };
115 |
116 | const onSelect = (data) => {
117 | setSelected(data);
118 | setFlagToUrl(data);
119 | };
120 |
121 | const onChange = (data) => {
122 | if (!data) {
123 | setSelected('');
124 | setSearchSet(null);
125 | setFlagToUrl(null);
126 | }
127 | }
128 |
129 | return (
130 |
131 | {!isSearch && (
132 |
140 | }
142 | size="middle"
143 | allowClear
144 | placeholder="Filter flags"
145 | />
146 |
147 | )}
148 | (
155 |
156 |
157 |
158 | )}
159 | />
160 |
161 | );
162 | };
163 |
164 |
165 | export const FlagsContainer = ({ projectName, searchTerm, projectsMap }) => {
166 | const { data, loading, error, networkStatus } = useQuery(FLAGS_QUERY, {
167 | variables: {
168 | project: searchTerm ? null : projectName,
169 | flag_name: searchTerm,
170 | },
171 | });
172 | if (loading) {
173 | return ;
174 | }
175 |
176 | const _projectsMap = Object.keys(projectsMap).reduce((acc, key) => {
177 | const _project = projectsMap[key];
178 | acc[key] = {
179 | ..._project,
180 | variablesMap: _project.variables.reduce((variableAcc, variable) => {
181 | variableAcc[variable.id] = variable;
182 | return variableAcc;
183 | }, {}),
184 | };
185 | return acc;
186 | }, {});
187 |
188 | return (
189 |
190 |
191 |
192 | );
193 | }
194 |
--------------------------------------------------------------------------------
/ui/src/Dashboard/Values.jsx:
--------------------------------------------------------------------------------
1 | import fuzzysearch from 'fuzzysearch';
2 | import { useLocation, useNavigate } from 'react-router-dom';
3 | import { useMemo, useState, useEffect } from 'react';
4 |
5 | import {
6 | AutoComplete,
7 | List,
8 | Input,
9 | Typography,
10 | } from 'antd';
11 | import { SearchOutlined } from '@ant-design/icons';
12 | import { useQuery } from '@apollo/client';
13 |
14 | import './Values.less';
15 |
16 | import { CenteredSpinner } from '../components/Spinner';
17 | import { ProjectsMapContext } from './context';
18 | import { VALUES_QUERY } from './queries';
19 | import { Value } from './Value';
20 | import { HeaderTabs } from "./Tabs";
21 |
22 | const getShowAllMatches = (count, searchText) => ({
23 | label: `Show all matches(${count})`,
24 | value: searchText
25 | });
26 |
27 | function getValueKey(value) {
28 | return `${value.name}_${value.project.name}`
29 | }
30 |
31 | const View = ({ children }) => {
32 | return (
33 |
40 |
41 | {children}
42 |
43 | );
44 | };
45 |
46 | const Values = ({ values, isSearch }) => {
47 | const location = useLocation();
48 | const navigate = useNavigate();
49 | const queryParams = new URLSearchParams(location.search);
50 | const valueFromQuery = queryParams.get('value');
51 |
52 | const [searchOptions, setSearchOptions] = useState([]);
53 | const [searchSet, setSearchSet] = useState(null);
54 | const [selected, setSelected] = useState('');
55 |
56 | const setValueToUrl = (value) => {
57 | if (!value) {
58 | queryParams.delete('value');
59 | } else {
60 | queryParams.set('value', value);
61 | }
62 | navigate(`/?${queryParams.toString()}`);
63 | }
64 |
65 | useEffect(() => {
66 | if (valueFromQuery) {
67 | setSearchSet(new Set([valueFromQuery]));
68 | setSelected(valueFromQuery);
69 | }
70 | }, [valueFromQuery]);
71 |
72 | const valuesMap = useMemo(() => values.reduce((acc, value) => {
73 | acc[getValueKey(value)] = value;
74 | return acc;
75 | }, {}), [values]);
76 |
77 | if (!values.length) {
78 | return
79 |
80 | No values
81 |
82 | ;
83 | }
84 |
85 | const listData = values
86 | .filter((value) => {
87 | return selected ? searchSet.has(value.name) : true;
88 | })
89 | .map((value) => {
90 | return {
91 | title: value.name,
92 | key: getValueKey(value),
93 | };
94 | });
95 |
96 | /**
97 | * filter autocomplete options
98 | */
99 | const onSearch = (searchText) => {
100 | if (!searchText) {
101 | setSearchOptions([]);
102 | return
103 | };
104 |
105 | const res = values
106 | .filter(({ name }) => fuzzysearch(
107 | searchText.toLowerCase(), name.toLowerCase()
108 | ))
109 | .map(({ name }) => ({ label: name, value: name }));
110 |
111 | setSearchOptions([getShowAllMatches(res.length, searchText)].concat(res));
112 | setSearchSet(new Set(res.map(({ value }) => value)));
113 | };
114 |
115 | const onSelect = (data) => {
116 | setSelected(data);
117 | setValueToUrl(data);
118 | };
119 |
120 | const onChange = (data) => {
121 | if (!data) {
122 | setSelected('');
123 | setSearchSet(null);
124 | setValueToUrl(null);
125 | }
126 | }
127 |
128 | return (
129 |
130 | {!isSearch && (
131 |
139 | }
141 | size="middle"
142 | allowClear
143 | placeholder="Filter values"
144 | />
145 |
146 | )}
147 | (
154 |
155 |
156 |
157 | )}
158 | />
159 |
160 | );
161 | };
162 |
163 |
164 | export const ValuesContainer = ({ projectName, searchTerm, projectsMap }) => {
165 | const { data, loading, error, networkStatus } = useQuery(VALUES_QUERY, {
166 | variables: {
167 | project: searchTerm ? null : projectName,
168 | value_name: searchTerm,
169 | },
170 | });
171 | if (loading) {
172 | return ;
173 | }
174 |
175 | const _projectsMap = Object.keys(projectsMap).reduce((acc, key) => {
176 | const _project = projectsMap[key];
177 | acc[key] = {
178 | ..._project,
179 | variablesMap: _project.variables.reduce((variableAcc, variable) => {
180 | variableAcc[variable.id] = variable;
181 | return variableAcc;
182 | }, {}),
183 | };
184 | return acc;
185 | }, {});
186 |
187 | return (
188 |
189 |
190 |
191 | );
192 | }
193 |
--------------------------------------------------------------------------------
/featureflags/http/repositories/flags.py:
--------------------------------------------------------------------------------
1 | import aiopg.sa
2 | from hiku.builder import Q, build
3 | from hiku.engine import Engine
4 | from hiku.query import Node as QueryNode
5 | from sqlalchemy import select
6 |
7 | from featureflags.graph.graph import exec_denormalize_graph
8 | from featureflags.http.db import prepare_flags_project
9 | from featureflags.http.types import (
10 | Flag,
11 | PreloadFlagsRequest,
12 | PreloadFlagsResponse,
13 | SyncFlagsRequest,
14 | SyncFlagsResponse,
15 | Value,
16 | )
17 | from featureflags.models import Project
18 | from featureflags.services.auth import user_session
19 | from featureflags.utils import EntityCache, select_scalar
20 |
21 |
22 | def load_data_query(project: str) -> QueryNode:
23 | return build(
24 | [
25 | Q.flags(project_name=project)[
26 | Q.id,
27 | Q.name,
28 | Q.enabled,
29 | Q.overridden,
30 | Q.conditions[
31 | Q.id,
32 | Q.checks[
33 | Q.id,
34 | Q.variable[
35 | Q.id,
36 | Q.name,
37 | Q.type,
38 | ],
39 | Q.operator,
40 | Q.value_string,
41 | Q.value_number,
42 | Q.value_timestamp,
43 | Q.value_set,
44 | ],
45 | ],
46 | ],
47 | Q.values(project_name=project)[
48 | Q.id,
49 | Q.name,
50 | Q.enabled,
51 | Q.overridden,
52 | Q.value_default,
53 | Q.value_override,
54 | Q.conditions[
55 | Q.id,
56 | Q.value_override,
57 | Q.checks[
58 | Q.id,
59 | Q.variable[
60 | Q.id,
61 | Q.name,
62 | Q.type,
63 | ],
64 | Q.operator,
65 | Q.value_string,
66 | Q.value_number,
67 | Q.value_timestamp,
68 | Q.value_set,
69 | ],
70 | ],
71 | ],
72 | ]
73 | )
74 |
75 |
76 | class FlagsRepository:
77 | def __init__(
78 | self,
79 | db_engine: aiopg.sa.Engine,
80 | graph_engine: Engine,
81 | ) -> None:
82 | self._db_engine = db_engine
83 | self._graph_engine = graph_engine
84 |
85 | async def get_project_version(self, project: str) -> int | None:
86 | async with self._db_engine.acquire() as conn:
87 | return await select_scalar(
88 | conn,
89 | select([Project.version]).where(Project.name == project),
90 | )
91 |
92 | async def prepare_project(
93 | self,
94 | request: PreloadFlagsRequest,
95 | ) -> None:
96 | """
97 | Initialize project from request, create/update entities in the database.
98 | """
99 |
100 | entity_cache = EntityCache()
101 |
102 | async with self._db_engine.acquire() as conn:
103 | await prepare_flags_project(
104 | request,
105 | conn=conn,
106 | entity_cache=entity_cache,
107 | )
108 |
109 | async def load(self, request: PreloadFlagsRequest) -> PreloadFlagsResponse:
110 | """
111 | Initialize project from request, create/update entities in the database
112 | and return available flags.
113 | """
114 |
115 | await self.prepare_project(request)
116 |
117 | current_version = await self.get_project_version(request.project)
118 |
119 | result = await exec_denormalize_graph(
120 | graph_engine=self._graph_engine,
121 | query=load_data_query(request.project),
122 | db_engine=self._db_engine,
123 | session=user_session.get(),
124 | )
125 | flags = [Flag(**flag) for flag in result["flags"]]
126 | values = [Value(**value) for value in result["values"]]
127 |
128 | return PreloadFlagsResponse(
129 | flags=flags,
130 | values=values,
131 | version=current_version,
132 | )
133 |
134 | async def sync(self, request: SyncFlagsRequest) -> SyncFlagsResponse:
135 | """
136 | Return updated flags if project version
137 | is different from the requested one.
138 | """
139 |
140 | current_version = await self.get_project_version(request.project)
141 |
142 | if request.version != current_version:
143 | result = await exec_denormalize_graph(
144 | graph_engine=self._graph_engine,
145 | query=load_data_query(request.project),
146 | db_engine=self._db_engine,
147 | session=user_session.get(),
148 | )
149 | flags = [Flag(**flag) for flag in result["flags"]]
150 | values = [Value(**value) for value in result["values"]]
151 | else:
152 | # Don't load flags and values if version is the same.
153 | flags = []
154 | values = []
155 |
156 | return SyncFlagsResponse(
157 | flags=flags,
158 | values=values,
159 | version=current_version,
160 | )
161 |
--------------------------------------------------------------------------------
/featureflags/migrations/versions/b3fbbe647373_init.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | from alembic import op
4 | from sqlalchemy.dialects import postgresql
5 |
6 |
7 | revision = "b3fbbe647373"
8 | down_revision = None
9 | branch_labels = None
10 | depends_on = None
11 |
12 |
13 | def upgrade():
14 | # ### commands auto generated by Alembic - please adjust! ###
15 | op.create_table(
16 | "auth_session",
17 | sa.Column("session", sa.String(), nullable=False),
18 | sa.Column("user", sa.String(), nullable=False),
19 | sa.Column("creation_time", postgresql.TIMESTAMP(), nullable=False),
20 | sa.Column("expiration_time", postgresql.TIMESTAMP(), nullable=False),
21 | sa.PrimaryKeyConstraint("session"),
22 | )
23 | op.create_index(
24 | "auth_session_expiration_time",
25 | "auth_session",
26 | ["expiration_time"],
27 | unique=False,
28 | )
29 | op.create_index(
30 | "auth_session_user_creation_time",
31 | "auth_session",
32 | ["user", "creation_time"],
33 | unique=False,
34 | )
35 | op.create_table(
36 | "local_id_map",
37 | sa.Column("scope", sa.String(), nullable=False),
38 | sa.Column("value", sa.String(), nullable=False),
39 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
40 | sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=False),
41 | sa.PrimaryKeyConstraint("scope", "value"),
42 | )
43 | op.create_table(
44 | "project",
45 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
46 | sa.Column("name", sa.String(), nullable=False),
47 | sa.Column("version", sa.Integer(), nullable=False),
48 | sa.PrimaryKeyConstraint("id"),
49 | sa.UniqueConstraint("name"),
50 | )
51 | op.create_index("project_name_idx", "project", ["name"], unique=False)
52 | op.create_table(
53 | "stats",
54 | sa.Column("flag", postgresql.UUID(as_uuid=True), nullable=False),
55 | sa.Column("interval", postgresql.TIMESTAMP(), nullable=False),
56 | sa.Column("positive_count", sa.Integer(), nullable=True),
57 | sa.Column("negative_count", sa.Integer(), nullable=True),
58 | sa.PrimaryKeyConstraint("flag", "interval"),
59 | )
60 | op.create_table(
61 | "flag",
62 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
63 | sa.Column("name", sa.String(), nullable=False),
64 | sa.Column("enabled", sa.Boolean(), nullable=True),
65 | sa.Column("project", postgresql.UUID(as_uuid=True), nullable=False),
66 | sa.ForeignKeyConstraint(
67 | ["project"],
68 | ["project.id"],
69 | ),
70 | sa.PrimaryKeyConstraint("id"),
71 | sa.UniqueConstraint("project", "name"),
72 | )
73 | op.create_index(
74 | "flag_project_name_idx", "flag", ["project", "name"], unique=False
75 | )
76 | op.create_table(
77 | "variable",
78 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
79 | sa.Column("name", sa.String(), nullable=False),
80 | sa.Column(
81 | "type",
82 | sa.Enum("STRING", "NUMBER", "TIMESTAMP", "SET", name="type"),
83 | nullable=False,
84 | ),
85 | sa.Column("project", postgresql.UUID(as_uuid=True), nullable=False),
86 | sa.ForeignKeyConstraint(
87 | ["project"],
88 | ["project.id"],
89 | ),
90 | sa.PrimaryKeyConstraint("id"),
91 | sa.UniqueConstraint("project", "name"),
92 | )
93 | op.create_index(
94 | "variable_project_name_idx",
95 | "variable",
96 | ["project", "name"],
97 | unique=False,
98 | )
99 | op.create_table(
100 | "check",
101 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
102 | sa.Column(
103 | "operator",
104 | sa.Enum(
105 | "EQUAL",
106 | "LESS_THAN",
107 | "LESS_OR_EQUAL",
108 | "GREATER_THAN",
109 | "GREATER_OR_EQUAL",
110 | "CONTAINS",
111 | "PERCENT",
112 | "REGEXP",
113 | "WILDCARD",
114 | "SUBSET",
115 | "SUPERSET",
116 | name="operator",
117 | ),
118 | nullable=False,
119 | ),
120 | sa.Column("value_string", sa.String(), nullable=True),
121 | sa.Column("value_number", postgresql.DOUBLE_PRECISION(), nullable=True),
122 | sa.Column("value_timestamp", postgresql.TIMESTAMP(), nullable=True),
123 | sa.Column("value_set", postgresql.ARRAY(sa.String()), nullable=True),
124 | sa.Column("variable", postgresql.UUID(as_uuid=True), nullable=False),
125 | sa.ForeignKeyConstraint(
126 | ["variable"],
127 | ["variable.id"],
128 | ),
129 | sa.PrimaryKeyConstraint("id"),
130 | )
131 | op.create_table(
132 | "condition",
133 | sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
134 | sa.Column("flag", postgresql.UUID(as_uuid=True), nullable=False),
135 | sa.Column(
136 | "checks",
137 | postgresql.ARRAY(postgresql.UUID(as_uuid=True), as_tuple=True),
138 | nullable=True,
139 | ),
140 | sa.ForeignKeyConstraint(
141 | ["flag"],
142 | ["flag.id"],
143 | ),
144 | sa.PrimaryKeyConstraint("id"),
145 | )
146 | # ### end Alembic commands ###
147 |
--------------------------------------------------------------------------------
/lets.yaml:
--------------------------------------------------------------------------------
1 | shell: bash
2 |
3 | env:
4 | DOCKER_BUILDKIT: "1"
5 | COMPOSE_DOCKER_CLI_BUILD: "1"
6 | CURRENT_UID:
7 | sh: echo "`id -u`:`id -g`"
8 | BUILD_VERSION:
9 | sh: git describe --dirty
10 |
11 | commands:
12 | build-target:
13 | options: |
14 | Usage: lets build-target
15 | cmd: |
16 | docker build \
17 | -t featureflags-server-${LETSOPT_TARGET} \
18 | -f Dockerfile \
19 | --target ${LETSOPT_TARGET} \
20 | .
21 |
22 | build-prod:
23 | description: Build server image with embeded frontend
24 | options: |
25 | Usage: lets build-prod --repo= --tag=
26 |
27 | Options:
28 | --repo= Set container repo
29 | --tag= Set container tag
30 | cmd: |
31 | docker build \
32 | -f Dockerfile \
33 | -t ${LETSOPT_REPO}:${LETSOPT_TAG} \
34 | --target prd \
35 | .
36 |
37 | build-dev:
38 | description: Build dev server image
39 | ref: build-target
40 | args: dev
41 |
42 | build-test:
43 | description: Build test server image
44 | ref: build-target
45 | args: test
46 |
47 | build-docs:
48 | description: Build docs image
49 | ref: build-target
50 | args: docs
51 |
52 | build-ui:
53 | description: Build client image
54 | ref: build-target
55 | args: assets-dev
56 |
57 | run:
58 | description: Run whole application
59 | depends:
60 | - build-dev
61 | cmd: docker-compose up web rpc client
62 |
63 | web:
64 | description: Run web server
65 | depends:
66 | - build-dev
67 | cmd: docker-compose up web
68 |
69 | rpc:
70 | description: Run grpc server
71 | depends:
72 | - build-dev
73 | cmd: docker-compose up rpc
74 |
75 | http:
76 | description: Run http server
77 | depends:
78 | - build-dev
79 | cmd: docker-compose up http
80 |
81 | test:
82 | description: Run tests
83 | depends:
84 | - build-test
85 | - postgres-test
86 | cmd: |
87 | docker compose run --rm test
88 |
89 | ldap:
90 | description: Run ldap server
91 | cmd: docker-compose up ldap
92 |
93 | postgres:
94 | description: Run postgres
95 | cmd: docker-compose up postgres
96 |
97 | postgres-test:
98 | description: Run postgres test db
99 | cmd: docker compose up -d postgres-test
100 |
101 | apply-migrations-dev:
102 | description: Apply migrations to local postgres
103 | depends: [build-dev]
104 | cmd: |
105 | docker-compose run --rm backend python3 -m featureflags alembic -- upgrade head
106 |
107 | rollback-migration-dev:
108 | description: Rollback migrations to local postgres
109 | depends: [build-dev]
110 | cmd: |
111 | docker-compose run --rm backend python3 -m featureflags alembic -- downgrade -1
112 |
113 | create-migration:
114 | description: |
115 | Create new migration file
116 | Example: lets create-migration add_date_created_column_to_user_table
117 | depends: [build-dev]
118 | cmd: |
119 | docker-compose run --rm backend python3 -m featureflags alembic -- revision --autogenerate -m ${LETS_COMMAND_ARGS}
120 |
121 | apply-seeds-dev:
122 | description: Apply seeds to local postgres
123 | cmd: |
124 | docker-compose exec postgres \
125 | psql -U postgres -d featureflags -f /var/lib/postgresql/seeds/seeds.sql
126 |
127 | ishell:
128 | description: Run app with ipython
129 | depends: [build-dev]
130 | cmd: docker-compose run --rm ishell
131 |
132 | psql:
133 | cmd: docker-compose exec postgres psql -U postgres -d featureflags
134 |
135 | gen-docs:
136 | description: Generate docs
137 | depends: [build-docs]
138 | cmd: |
139 | docker-compose run --rm docs \
140 | sphinx-build -a -b html docs docs/build
141 | echo "Docs generated at docs/build/index.html"
142 |
143 | ui:
144 | description: Build and run UI with dev server
145 | work_dir: ./ui
146 | cmd: npm run dev
147 |
148 | ui-build-dev:
149 | description: Build UI in development mode, copy static, watch changes
150 | work_dir: ./ui
151 | cmd: npm run build-dev-watch
152 |
153 | ruff:
154 | description: Run ruff and fix errors
155 | depends: [build-dev]
156 | cmd: |
157 | docker-compose run -T --rm backend ruff check featureflags --fix ${LETS_COMMAND_ARGS}
158 |
159 | ruff-diff:
160 | description: Run ruff to check diff
161 | depends: [build-dev]
162 | cmd: |
163 | docker-compose run -T --rm backend ruff check featureflags ${LETS_COMMAND_ARGS}
164 |
165 | mypy:
166 | description: Run mypy
167 | depends: [build-dev]
168 | cmd: |
169 | docker-compose run -T --rm backend mypy featureflags ${LETS_COMMAND_ARGS}
170 |
171 | black:
172 | description: Run black
173 | depends: [build-dev]
174 | cmd: |
175 | docker-compose run -T --rm backend black featureflags ${LETS_COMMAND_ARGS}
176 |
177 | fmt:
178 | description: Run black and ruff
179 | depends: [build-dev]
180 | cmd: |
181 | docker-compose run -T --rm backend black featureflags
182 | docker-compose run -T --rm backend ruff check featureflags --fix
183 |
184 | release:
185 | description: |
186 | Update version in featureflags/__init__.py
187 | Create new annotated tag
188 | Push changes to remote
189 | options: |
190 | Usage: lets release --message=
191 | Options:
192 | Set version
193 | --message=, -m Release message
194 | Example:
195 | lets release 1.0.0 --message="Added feature"
196 | cmd: |
197 | VERSION=${LETSOPT_VERSION} MESSAGE=${LETSOPT_MESSAGE} ./scripts/release.sh
198 |
--------------------------------------------------------------------------------
/featureflags/rpc/db.py:
--------------------------------------------------------------------------------
1 | """
2 | This module defines client -> server feedback, which is used to
3 | notify server about new projects/variables/flags
4 | """
5 | from uuid import UUID, uuid4
6 |
7 | from aiopg.sa import SAConnection
8 | from sqlalchemy import and_, select
9 | from sqlalchemy.dialects.postgresql import insert
10 |
11 | from featureflags.models import Flag, Project, Variable, VariableType
12 | from featureflags.protobuf import service_pb2
13 | from featureflags.utils import EntityCache, FlagAggStats
14 |
15 |
16 | async def _select_project(name: str, *, conn: SAConnection) -> UUID:
17 | result = await conn.execute(
18 | select([Project.id]).where(Project.name == name)
19 | )
20 | return await result.scalar()
21 |
22 |
23 | async def _insert_project(name: str, *, conn: SAConnection) -> UUID:
24 | result = await conn.execute(
25 | insert(Project.__table__)
26 | .values({Project.id: uuid4(), Project.name: name, Project.version: 0})
27 | .on_conflict_do_nothing()
28 | .returning(Project.id)
29 | )
30 | return await result.scalar()
31 |
32 |
33 | async def _get_or_create_project(
34 | name: str,
35 | *,
36 | conn: SAConnection,
37 | entity_cache: EntityCache,
38 | ) -> UUID:
39 | assert name
40 | id_ = entity_cache.project.get(name)
41 | if id_ is None: # not in cache
42 | id_ = await _select_project(name, conn=conn)
43 | if id_ is None: # not in db
44 | id_ = await _insert_project(name, conn=conn)
45 | if id_ is None: # conflicting insert
46 | id_ = await _select_project(name, conn=conn)
47 | assert id_ is not None # must be in db
48 | entity_cache.project[name] = id_
49 | return id_
50 |
51 |
52 | async def _select_variable(
53 | project: UUID, name: str, *, conn: SAConnection
54 | ) -> UUID:
55 | result = await conn.execute(
56 | select([Variable.id]).where(
57 | and_(Variable.project == project, Variable.name == name)
58 | )
59 | )
60 | return await result.scalar()
61 |
62 |
63 | async def _insert_variable(
64 | project: UUID,
65 | name: str,
66 | type_: VariableType,
67 | *,
68 | conn: SAConnection,
69 | ) -> UUID:
70 | result = await conn.execute(
71 | insert(Variable.__table__)
72 | .values(
73 | {
74 | Variable.id: uuid4(),
75 | Variable.project: project,
76 | Variable.name: name,
77 | Variable.type: type_,
78 | }
79 | )
80 | .on_conflict_do_nothing()
81 | .returning(Variable.id)
82 | )
83 | return await result.scalar()
84 |
85 |
86 | async def _get_or_create_variable(
87 | project: UUID,
88 | name: str,
89 | type_: VariableType,
90 | *,
91 | conn: SAConnection,
92 | entity_cache: EntityCache,
93 | ) -> UUID:
94 | assert project and name and type_, (project, name, type_)
95 | id_ = entity_cache.variable[project].get(name)
96 | if id_ is None: # not in cache
97 | id_ = await _select_variable(project, name, conn=conn)
98 | if id_ is None: # not in db
99 | id_ = await _insert_variable(project, name, type_, conn=conn)
100 | if id_ is None: # conflicting insert
101 | id_ = await _select_variable(project, name, conn=conn)
102 | assert id_ is not None # must be in db
103 | entity_cache.variable[project][name] = id_
104 | return id_
105 |
106 |
107 | async def _select_flag(project: UUID, name: str, *, conn: SAConnection) -> UUID:
108 | result = await conn.execute(
109 | select([Flag.id]).where(
110 | and_(Flag.project == project, Flag.name == name)
111 | )
112 | )
113 | return await result.scalar()
114 |
115 |
116 | async def _insert_flag(project: UUID, name: str, *, conn: SAConnection) -> UUID:
117 | result = await conn.execute(
118 | insert(Flag.__table__)
119 | .values({Flag.id: uuid4(), Flag.project: project, Flag.name: name})
120 | .on_conflict_do_nothing()
121 | .returning(Flag.id)
122 | )
123 | return await result.scalar()
124 |
125 |
126 | async def _get_or_create_flag(
127 | project: UUID,
128 | name: str,
129 | *,
130 | conn: SAConnection,
131 | entity_cache: EntityCache,
132 | ) -> UUID:
133 | assert project and name, (project, name)
134 | id_ = entity_cache.flag[project].get(name)
135 | if id_ is None: # not in cache
136 | id_ = await _select_flag(project, name, conn=conn)
137 | if id_ is None: # not in db
138 | id_ = await _insert_flag(project, name, conn=conn)
139 | if id_ is None: # conflicting insert
140 | id_ = await _select_flag(project, name, conn=conn)
141 | assert id_ is not None # must be in db
142 | entity_cache.flag[project][name] = id_
143 | return id_
144 |
145 |
146 | async def add_statistics(
147 | op: service_pb2.ExchangeRequest,
148 | *,
149 | conn: SAConnection,
150 | entity_cache: EntityCache,
151 | flag_agg_stats: FlagAggStats,
152 | ) -> None:
153 | project = await _get_or_create_project(
154 | op.project,
155 | conn=conn,
156 | entity_cache=entity_cache,
157 | )
158 | for v in op.variables:
159 | await _get_or_create_variable(
160 | project,
161 | v.name,
162 | VariableType.from_pb(v.type),
163 | conn=conn,
164 | entity_cache=entity_cache,
165 | )
166 | for flag_usage in op.flags_usage:
167 | flag = await _get_or_create_flag(
168 | project,
169 | flag_usage.name,
170 | conn=conn,
171 | entity_cache=entity_cache,
172 | )
173 |
174 | s = flag_agg_stats[flag][flag_usage.interval.ToDatetime()]
175 | s[0] += flag_usage.positive_count
176 | s[1] += flag_usage.negative_count
177 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "evo-featureflags-server"
3 | dynamic = ["version"]
4 | description = "Feature flags server"
5 | readme = "README.rst"
6 | authors = [
7 | { name = "d.zakharchuk", email = "d.zakharchuk@smartweb.com.ua" },
8 | { name = "m.kindritskiy", email = "m.kindritskiy@smartweb.com.ua" },
9 | { name = "Vladimir Magamedov", email = "vladimir@magamedov.com" },
10 | ]
11 | dependencies = [
12 | "grpclib==0.4.6",
13 | "hiku==0.7.5",
14 | "protobuf<4.0.0",
15 | "sqlalchemy[mypy]==1.4.42",
16 | "aiopg[sa]==1.4.0",
17 | "psycopg2==2.9.7",
18 | "graphql-core==3.2.3",
19 | "prometheus-client==0.17.1",
20 | "alembic==1.12.0",
21 | "metricslog==0.1.3",
22 | "pyyaml==6.0.1",
23 | "pyjwt==2.8.0",
24 | "ldap3==2.9.1",
25 | "pydantic>=2.3.0",
26 | "pydantic-settings>=2.0.3",
27 | "setuptools==68.2.2",
28 | "fastapi>=0.103.2",
29 | "orjson>=3.9.8",
30 | "uvloop>=0.17.0",
31 | "httptools>=0.6.0",
32 | "uvicorn[standart]>=0.23.2",
33 | "typer>=0.9.0",
34 | "jinja2>=3.1.2",
35 | "dependency-injector>=4.41.0",
36 | "grpcio>=1.59.0",
37 | "prometheus-fastapi-instrumentator>=6.1.0",
38 | ]
39 |
40 | requires-python = ">=3.11"
41 | license = { text = "MIT" }
42 |
43 | [project.urls]
44 | Documentation = "https://featureflags.readthedocs.io/en/latest/"
45 | Repository = "https://github.com/evo-company/featureflags"
46 | Homepage = "https://github.com/evo-company/featureflags"
47 |
48 | [project.optional-dependencies]
49 | sentry = ["sentry-sdk[fastapi,grpcio]>=1.40.5"]
50 |
51 | [build-system]
52 | requires = ["pdm-backend"]
53 | build-backend = "pdm.backend"
54 |
55 | [dependency-groups]
56 | lint = [
57 | "black~=23.9.1",
58 | "ruff>=0.12.5",
59 | "mypy>=1.6.0",
60 | "types-protobuf>=4.24.0.20240106",
61 | "types-PyYAML>=6.0.12.12",
62 | "sqlalchemy-stubs>=0.4",
63 | ]
64 | test = [
65 | "pytest~=7.4",
66 | "pytest-asyncio~=0.21",
67 | "faker~=19.6.1",
68 | "tox-pdm~=0.7.0",
69 | ]
70 | dev = ["watchfiles~=0.20.0", "ipython~=8.15.0"]
71 | docs = [
72 | "sphinx>=7.1.2",
73 | "furo>=2024.8.6",
74 | "sphinx-inline-tabs>=2023.4.21",
75 | "sphinx-autobuild>=2021.3.14",
76 | ]
77 |
78 | [tool]
79 |
80 | [tool.pdm.version]
81 | source = "file"
82 | path = "featureflags/__init__.py"
83 |
84 | [tool.pdm.scripts]
85 | release = "./scripts/release.sh"
86 | test = "python -m pytest {args}"
87 | docs = "sphinx-build -b html docs docs/build"
88 | docs-dev = "sphinx-autobuild -b html docs docs/build"
89 | ruff = "ruff check featureflags {args} --fix"
90 | ruff-diff = "ruff check featureflags {args}"
91 | mypy = "mypy featureflags {args}"
92 | black = "black featureflags {args}"
93 | fmt = { composite = ["black", "ruff"] }
94 |
95 | [tool.pytest.ini_options]
96 | addopts = "-p no:warnings -p no:cacheprovider -q --tb=native"
97 | filterwarnings = ["ignore::DeprecationWarning:google.*"]
98 | log_cli = true
99 | testpaths = ["featureflags/tests"]
100 | asyncio_mode = "auto"
101 |
102 | [tool.black]
103 | line-length = 80
104 | target-version = ['py311']
105 | extend-exclude = '''
106 | /(
107 | | .git
108 | | __pycache__
109 | | __pypackages__
110 | | .venv
111 | | venv
112 | | .ve
113 | | featureflags/protobuf
114 | )/
115 | '''
116 |
117 | [tool.ruff]
118 | line-length = 80
119 | target-version = "py311"
120 |
121 | exclude = [
122 | ".bzr",
123 | ".direnv",
124 | ".eggs",
125 | ".git",
126 | ".git-rewrite",
127 | ".hg",
128 | ".mypy_cache",
129 | ".nox",
130 | ".pants.d",
131 | ".pytype",
132 | ".ruff_cache",
133 | ".svn",
134 | ".tox",
135 | ".venv",
136 | "__pypackages__",
137 | "_build",
138 | "buck-out",
139 | "build",
140 | "dist",
141 | "node_modules",
142 | ".venv",
143 | "venv",
144 | ".ve",
145 | "__pycache__",
146 | "featureflags/migrations",
147 | "featureflags/protobuf",
148 | ]
149 |
150 | [tool.ruff.lint]
151 | select = [
152 | "E", # pycodestyle errors
153 | "W", # pycodestyle warnings
154 | "F", # Pyflakes
155 | "C90", # McCabe
156 | "I", # isort
157 | "N", # pep8-naming
158 | "UP", # pyupgrade
159 | "ASYNC", # flake8-async
160 | "S", # bandit
161 | "DTZ", # flake8-datetimez
162 | "A", # flake8-builtins
163 | "B", # flake8-bugbear
164 | "C4", # flake8-comprehensions
165 | "T20", # flake8-print
166 | "SIM", # flake8-simplify
167 | "ERA", # eradicate
168 | "PL", # pylint
169 | "RUF", # ruff-specific
170 | ]
171 | ignore = ["S101", "S104", "S105", "DTZ003", "DTZ006", "A003", "B008", "PLR0913", "PLC0415"]
172 | # Allow unused variables when underscore-prefixed.
173 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
174 |
175 | [tool.ruff.lint.per-file-ignores]
176 | "featureflags/tests/*" = [
177 | "A002",
178 | "E501",
179 | "S101",
180 | "S105",
181 | "S106",
182 | "PLR2004",
183 | "PLR0913",
184 | "DTZ005",
185 | "DTZ003",
186 | ]
187 |
188 | [tool.ruff.lint.mccabe]
189 | max-complexity = 10
190 |
191 | [tool.ruff.lint.isort]
192 | known-first-party = ["featureflags"]
193 |
194 | [tool.mypy]
195 | python_version = "3.11"
196 | follow_imports = "skip"
197 | pretty = true
198 | strict_optional = false
199 | warn_no_return = true
200 | disallow_incomplete_defs = true
201 | disallow_untyped_defs = true
202 | ignore_missing_imports = true
203 | plugins = ["sqlmypy", "sqlalchemy.ext.mypy.plugin"]
204 | exclude = [
205 | ".git",
206 | "__pycache__",
207 | "__pypackages__",
208 | ".venv",
209 | "venv",
210 | ".ve",
211 | "featureflags/migrations",
212 | "featureflags/protobuf",
213 | "featureflags/tests",
214 | ]
215 |
216 | [[tool.mypy.overrides]]
217 | module = "hiku.*"
218 | follow_imports = "skip"
219 |
220 | [[tool.mypy.overrides]]
221 | module = "featureflags.protobuf.*"
222 | follow_imports = "skip"
223 | disallow_untyped_decorators = false
224 | disable_error_code = ["no-untyped-def", "attr-defined"]
225 |
226 | [[tool.mypy.overrides]]
227 | module = "featureflags.graph.proto_adapter"
228 | disallow_untyped_decorators = false
229 | disable_error_code = ["no-untyped-def", "attr-defined"]
230 |
--------------------------------------------------------------------------------
/featureflags/graph/types.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from dataclasses import dataclass
3 | from enum import Enum
4 | from typing import NamedTuple
5 | from uuid import UUID
6 |
7 |
8 | class GraphContext(Enum):
9 | DB_ENGINE = "DB_ENGINE"
10 | USER_SESSION = "USER_SESSION"
11 | LDAP_SERVICE = "LDAP_SERVICE"
12 | DIRTY_PROJECTS = "DIRTY_PROJECTS"
13 | CHANGES = "CHANGES"
14 | VALUES_CHANGES = "VALUES_CHANGES"
15 | CHECK_IDS = "CHECK_IDS"
16 |
17 |
18 | class Operation(Enum):
19 | DISABLE_FLAG = "disable_flag"
20 | ENABLE_FLAG = "enable_flag"
21 | ADD_CHECK = "add_check"
22 | ADD_CONDITION = "add_condition"
23 | DISABLE_CONDITION = "disable_condition"
24 | DISABLE_VALUE = "disable_value"
25 | ENABLE_VALUE = "enable_value"
26 | ADD_VALUE_CONDITION = "add_value_condition"
27 | DISABLE_VALUE_CONDITION = "disable_value_condition"
28 | UPDATE_VALUE_VALUE_OVERRIDE = "update_value_value_override"
29 |
30 |
31 | class Action(Enum):
32 | ENABLE_FLAG = 1
33 | DISABLE_FLAG = 2
34 | ADD_CONDITION = 3
35 | DISABLE_CONDITION = 4
36 | RESET_FLAG = 5
37 | DELETE_FLAG = 6
38 |
39 |
40 | class ValueAction(Enum):
41 | ENABLE_VALUE = 1
42 | DISABLE_VALUE = 2
43 | ADD_CONDITION = 3
44 | DISABLE_CONDITION = 4
45 | RESET_VALUE = 5
46 | DELETE_VALUE = 6
47 | UPDATE_VALUE_VALUE_OVERRIDE = 7
48 |
49 |
50 | class DirtyProjects:
51 | def __init__(self) -> None:
52 | self.by_flag: set[UUID] = set()
53 | self.by_value: set[UUID] = set()
54 | self.by_variable: set[UUID] = set()
55 |
56 |
57 | class Changes:
58 | _data: dict[UUID, list[Action]]
59 |
60 | def __init__(self) -> None:
61 | self._data = defaultdict(list)
62 |
63 | def add(self, flag_id: UUID, action: Action) -> None:
64 | self._data[flag_id].append(action)
65 |
66 | def get_actions(self) -> list[tuple[UUID, list[Action]]]:
67 | return list(self._data.items())
68 |
69 |
70 | class ValuesChanges:
71 | _data: dict[UUID, list[ValueAction]]
72 |
73 | def __init__(self) -> None:
74 | self._data = defaultdict(list)
75 |
76 | def add(self, value_id: UUID, action: ValueAction) -> None:
77 | self._data[value_id].append(action)
78 |
79 | def get_actions(self) -> list[tuple[UUID, list[ValueAction]]]:
80 | return list(self._data.items())
81 |
82 |
83 | @dataclass
84 | class LocalId:
85 | scope: str
86 | value: str
87 |
88 | def __hash__(self) -> int:
89 | return hash((self.scope, self.value))
90 |
91 |
92 | @dataclass
93 | class AddCheckOp:
94 | local_id: LocalId
95 | variable: str
96 | operator: int
97 | kind: str
98 | value_string: str | None = None
99 | value_number: int | float | None = None
100 | value_timestamp: str | None = None
101 | value_set: list | None = None
102 |
103 | def __init__(self, op: dict):
104 | self.local_id = LocalId(
105 | scope=op["local_id"]["scope"],
106 | value=op["local_id"]["value"],
107 | )
108 | self.variable = op["variable"]
109 | self.operator = int(op["operator"])
110 | self.kind = op["kind"]
111 | self.value_string = op.get("value_string")
112 | self.value_number = op.get("value_number")
113 | self.value_timestamp = op.get("value_timestamp")
114 | self.value_set = op.get("value_set")
115 |
116 |
117 | @dataclass
118 | class AddConditionOp:
119 | @dataclass
120 | class Check:
121 | local_id: LocalId | None = None
122 | id: str | None = None
123 |
124 | flag_id: str
125 | local_id: LocalId
126 | checks: list[Check]
127 | position: int
128 |
129 | def __init__(self, op: dict):
130 | self.local_id = LocalId(
131 | scope=op["local_id"]["scope"],
132 | value=op["local_id"]["value"],
133 | )
134 | self.flag_id = op["flag_id"]
135 | self.position = op.get("position", 0)
136 | self.checks = [
137 | self.Check(
138 | local_id=LocalId(
139 | scope=check["local_id"]["scope"],
140 | value=check["local_id"]["value"],
141 | )
142 | if "local_id" in check
143 | else None,
144 | id=check.get("id"),
145 | )
146 | for check in op["checks"]
147 | ]
148 |
149 |
150 | @dataclass
151 | class AddValueConditionOp:
152 | @dataclass
153 | class Check:
154 | local_id: LocalId | None = None
155 | id: str | None = None
156 |
157 | value_id: str
158 | local_id: LocalId
159 | checks: list[Check]
160 | position: int
161 |
162 | def __init__(self, op: dict):
163 | self.local_id = LocalId(
164 | scope=op["local_id"]["scope"],
165 | value=op["local_id"]["value"],
166 | )
167 | self.value_id = op["value_id"]
168 | self.position = op.get("position", 0)
169 | self.checks = [
170 | self.Check(
171 | local_id=LocalId(
172 | scope=check["local_id"]["scope"],
173 | value=check["local_id"]["value"],
174 | )
175 | if "local_id" in check
176 | else None,
177 | id=check.get("id"),
178 | )
179 | for check in op["checks"]
180 | ]
181 |
182 |
183 | class AuthResult(NamedTuple):
184 | error: str | None = None
185 |
186 |
187 | class SaveFlagResult(NamedTuple):
188 | errors: list[str] | None = None
189 |
190 |
191 | class ResetFlagResult(NamedTuple):
192 | error: str | None = None
193 |
194 |
195 | class DeleteFlagResult(NamedTuple):
196 | error: str | None = None
197 |
198 |
199 | class SaveValueResult(NamedTuple):
200 | errors: list[str] | None = None
201 |
202 |
203 | class ResetValueResult(NamedTuple):
204 | error: str | None = None
205 |
206 |
207 | class DeleteValueResult(NamedTuple):
208 | error: str | None = None
209 |
210 |
211 | class DeleteVariableResult(NamedTuple):
212 | error: str | None = None
213 |
214 |
215 | class DeleteProjectResult(NamedTuple):
216 | error: str | None = None
217 |
--------------------------------------------------------------------------------