├── .env.example
├── .github
├── FUNDING.yml
├── dependabot.yml
├── labeler.yml
├── release.yml
└── workflows
│ ├── builds.yml
│ ├── docs.yml
│ ├── publish.yml
│ ├── pull_requests.yml
│ ├── push.yml
│ ├── scripts.yml
│ ├── style.yml
│ └── tests.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── README.md
├── client
├── .conda
│ └── meta.yaml
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── README.md
├── docs
│ ├── Makefile
│ ├── make.bat
│ └── source
│ │ ├── _static
│ │ ├── css
│ │ │ └── custom.css
│ │ ├── images
│ │ │ ├── favicon.ico
│ │ │ └── logo.png
│ │ └── js
│ │ │ └── custom.js
│ │ ├── changelog.rst
│ │ ├── client.rst
│ │ ├── conf.py
│ │ ├── index.rst
│ │ └── installing.rst
├── pyproject.toml
├── pyroclient
│ ├── __init__.py
│ ├── client.py
│ └── exceptions.py
├── setup.py
└── tests
│ ├── conftest.py
│ ├── test_client.py
│ └── test_exceptions.py
├── docker-compose.dev.yml
├── docker-compose.yml
├── poetry.lock
├── pyproject.toml
├── scripts
├── dbdiagram.txt
├── localstack
│ └── setup-s3.sh
├── pg_extract.sh
├── requirements.txt
└── test_e2e.py
└── src
├── Dockerfile
├── alembic.ini
├── app
├── api
│ ├── api_v1
│ │ ├── endpoints
│ │ │ ├── cameras.py
│ │ │ ├── detections.py
│ │ │ ├── login.py
│ │ │ ├── organizations.py
│ │ │ ├── sequences.py
│ │ │ ├── users.py
│ │ │ └── webhooks.py
│ │ └── router.py
│ └── dependencies.py
├── core
│ ├── config.py
│ └── security.py
├── crud
│ ├── __init__.py
│ ├── base.py
│ ├── crud_camera.py
│ ├── crud_detection.py
│ ├── crud_organization.py
│ ├── crud_sequence.py
│ ├── crud_user.py
│ └── crud_webhook.py
├── db.py
├── main.py
├── models.py
├── schemas
│ ├── __init__.py
│ ├── base.py
│ ├── cameras.py
│ ├── detections.py
│ ├── login.py
│ ├── organizations.py
│ ├── sequences.py
│ ├── users.py
│ └── webhooks.py
└── services
│ ├── __init__.py
│ ├── storage.py
│ ├── telegram.py
│ └── telemetry.py
├── migrations
├── README.md
├── env.py
├── script.py.mako
└── versions
│ ├── 2024_05_30_1200-f84a0ed81bdc_init.py
│ └── 2024_06_17_1521-4265426f8438_create_stes_table.py
└── tests
├── conftest.py
├── endpoints
├── test_cameras.py
├── test_detections.py
├── test_login.py
├── test_organizations.py
├── test_sequences.py
├── test_users.py
└── test_webhooks.py
├── services
├── test_storage.py
└── test_telegram.py
├── test_dependencies.py
└── test_security.py
/.env.example:
--------------------------------------------------------------------------------
1 | # Database
2 | POSTGRES_DB=postgres
3 | POSTGRES_USER=postgres
4 | POSTGRES_PASSWORD='An0th3rDumm1PassW0rdz!'
5 |
6 | # Storage
7 | S3_ACCESS_KEY='na'
8 | S3_SECRET_KEY='na'
9 | S3_REGION='us-east-1'
10 | S3_ENDPOINT_URL='http://localstack:4566'
11 |
12 | # Initialization
13 | SUPERADMIN_LOGIN='pyroadmin'
14 | SUPERADMIN_PWD='LetsProtectForests!'
15 | SUPERADMIN_ORG='pyronear'
16 |
17 | # Optional variables
18 | JWT_SECRET=
19 | SENTRY_DSN=
20 | SERVER_NAME=
21 | POSTHOG_HOST='https://eu.posthog.com'
22 | POSTHOG_KEY=
23 | SUPPORT_EMAIL=
24 | TELEGRAM_TOKEN=
25 |
26 | # Production-only
27 | ACME_EMAIL=
28 | BACKEND_HOST=
29 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: pyronear
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with an OpenCollective account
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions"
9 | directory: "/"
10 | schedule:
11 | interval: "weekly"
12 | - package-ecosystem: "pip"
13 | directory: "/"
14 | schedule:
15 | interval: "daily"
16 | allow:
17 | - dependency-name: "ruff"
18 | - dependency-name: "mypy"
19 | - dependency-name: "pre-commit"
20 | - dependency-name: "fastapi"
21 | - dependency-name: "sqlmodel"
22 | - dependency-name: "uvicorn"
23 | - dependency-name: "pytest"
24 | - package-ecosystem: "docker"
25 | directory: "/"
26 | schedule:
27 | interval: "daily"
28 | allow:
29 | - dependency-name: "ghcr.io/astral-sh/uv"
30 | - dependency-name: "localstack/localstack"
31 |
--------------------------------------------------------------------------------
/.github/labeler.yml:
--------------------------------------------------------------------------------
1 | 'ext: client/tests':
2 | - changed-files:
3 | - any-glob-to-any-file: client/tests/*
4 |
5 | 'ext: client/docs':
6 | - changed-files:
7 | - any-glob-to-any-file: client/docs/*
8 |
9 | 'ext: client':
10 | - changed-files:
11 | - any-glob-to-any-file: client/pyroclient/*
12 |
13 | 'ext: tests':
14 | - changed-files:
15 | - any-glob-to-any-file: src/tests/*
16 |
17 | 'ext: scripts':
18 | - changed-files:
19 | - any-glob-to-any-file: scripts/*
20 |
21 | 'module: core':
22 | - changed-files:
23 | - any-glob-to-any-file:
24 | - src/app/main.py
25 | - src/app/core/*
26 |
27 | 'module: database':
28 | - changed-files:
29 | - any-glob-to-any-file: src/app/db.py
30 |
31 | 'module: schemas':
32 | - changed-files:
33 | - any-glob-to-any-file: src/app/schemas/*
34 |
35 | 'module: models':
36 | - changed-files:
37 | - any-glob-to-any-file: src/app/models.py
38 |
39 | 'module: services':
40 | - changed-files:
41 | - any-glob-to-any-file: src/app/services/*
42 |
43 | 'module: crud':
44 | - changed-files:
45 | - any-glob-to-any-file: src/app/crud/base.py
46 |
47 | 'endpoint: cameras':
48 | - changed-files:
49 | - any-glob-to-any-file:
50 | - src/app/api/*/endpoints/cameras.py
51 | - src/app/crud/cameras.py
52 |
53 | 'endpoint: detections':
54 | - changed-files:
55 | - any-glob-to-any-file:
56 | - src/app/api/*/endpoints/detections.py
57 | - src/app/crud/detections.py
58 |
59 | 'endpoint: sequences':
60 | - changed-files:
61 | - any-glob-to-any-file:
62 | - src/app/api/*/endpoints/sequences.py
63 | - src/app/crud/sequences.py
64 |
65 | 'endpoint: login':
66 | - changed-files:
67 | - any-glob-to-any-file: src/app/api/*/endpoints/login.py
68 |
69 | 'endpoint: users':
70 | - changed-files:
71 | - any-glob-to-any-file:
72 | - src/app/api/*/endpoints/users.py
73 | - src/app/crud/users.py
74 |
75 | 'endpoint: organizations':
76 | - changed-files:
77 | - any-glob-to-any-file:
78 | - src/app/api/*/endpoints/organizations.py
79 | - src/app/crud/organizations.py
80 |
81 | 'endpoint: webhooks':
82 | - changed-files:
83 | - any-glob-to-any-file:
84 | - src/app/api/*/endpoints/webhooks.py
85 | - src/app/crud/webhooks.py
86 |
87 |
88 | 'topic: build':
89 | - changed-files:
90 | - any-glob-to-any-file:
91 | - pyproject.toml
92 | - poetry.lock
93 | - client/pyproject.toml
94 | - client/setup.py
95 |
96 | 'topic: migration':
97 | - changed-files:
98 | - any-glob-to-any-file:
99 | - src/alembic.ini
100 | - src/migrations/*
101 |
102 | 'topic: ci':
103 | - changed-files:
104 | - any-glob-to-any-file: .github/*
105 |
106 | 'topic: docker':
107 | - changed-files:
108 | - any-glob-to-any-file:
109 | - docker-compose.*
110 | - src/Dockerfile
111 | - .env.example
112 |
113 | 'topic: docs':
114 | - changed-files:
115 | - any-glob-to-any-file:
116 | - README.md
117 | - CONTRIBUTING.md
118 | - CODE_OF_CONDUCT.md
119 | - client/README.md
120 | - client/CONTRIBUTING.md
121 | - scripts/dbdiagram.txt
122 |
123 | 'topic: style':
124 | - changed-files:
125 | - any-glob-to-any-file: .pre-commit-config.yaml
126 |
--------------------------------------------------------------------------------
/.github/release.yml:
--------------------------------------------------------------------------------
1 | changelog:
2 | exclude:
3 | labels:
4 | - ignore-for-release
5 | categories:
6 | - title: Breaking Changes 🛠
7 | labels: ["type: breaking change"]
8 | - title: New Features ✨
9 | labels: ["type: feat"]
10 | - title: Bug Fixes 🐛
11 | labels: ["type: fix"]
12 | - title: Dependencies
13 | labels: ["dependencies"]
14 | - title: Documentation 📖
15 | labels: ["topic: docs", "ext: docs"]
16 | - title: Improvements
17 | labels: ["type: improvement"]
18 | - title: Other changes
19 | labels: ["*"]
20 |
--------------------------------------------------------------------------------
/.github/workflows/builds.yml:
--------------------------------------------------------------------------------
1 | name: builds
2 |
3 | on:
4 | push:
5 | branches: main
6 | pull_request:
7 | branches: main
8 |
9 | env:
10 | PYTHON_VERSION: "3.11"
11 | UV_VERSION: "0.5.13"
12 | POETRY_VERSION: "1.8.3"
13 |
14 |
15 | jobs:
16 | docker:
17 | runs-on: ubuntu-latest
18 | steps:
19 | - uses: actions/checkout@v4
20 | - uses: actions/setup-python@v5
21 | with:
22 | python-version: ${{ env.PYTHON_VERSION }}
23 | architecture: x64
24 | - uses: abatilo/actions-poetry@v4
25 | with:
26 | poetry-version: ${{ env.POETRY_VERSION }}
27 | - name: Resolve dependencies
28 | run: poetry export -f requirements.txt --without-hashes --output requirements.txt
29 | - name: Build, run & check docker
30 | env:
31 | SUPERADMIN_LOGIN: dummy_login
32 | SUPERADMIN_PWD: dummy&P@ssw0rd!
33 | SUPERADMIN_ORG: dummyorga
34 | POSTGRES_USER: dummy_pg_user
35 | POSTGRES_PASSWORD: dummy_pg_pwd
36 | POSTGRES_DB: dummy_pg_db
37 | run: |
38 | docker compose up -d --build --wait
39 | docker compose logs
40 | curl http://localhost:5050/status
41 |
42 | client:
43 | runs-on: ${{ matrix.os }}
44 | strategy:
45 | fail-fast: false
46 | matrix:
47 | os: [ubuntu-latest, macos-latest, windows-latest]
48 | python: [3.8, 3.9, '3.10', 3.11, 3.12]
49 | exclude:
50 | - os: macos-latest
51 | python: 3.8
52 | - os: macos-latest
53 | python: 3.9
54 | - os: macos-latest
55 | python: '3.10'
56 | steps:
57 | - uses: actions/checkout@v4
58 | - uses: actions/setup-python@v5
59 | with:
60 | python-version: ${{ matrix.python }}
61 | architecture: x64
62 | - uses: astral-sh/setup-uv@v6
63 | with:
64 | version: ${{ env.UV_VERSION }}
65 | - name: Install project
66 | run: |
67 | uv pip install --system -e client/.
68 | python -c "import pyroclient; print(pyroclient.__version__)"
69 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: docs
2 | on:
3 | push:
4 | branches: main
5 |
6 | env:
7 | UV_VERSION: "0.5.13"
8 |
9 | jobs:
10 | gh-pages:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | with:
15 | persist-credentials: false
16 | - uses: actions/setup-python@v5
17 | with:
18 | python-version: 3.9
19 | architecture: x64
20 | - uses: astral-sh/setup-uv@v6
21 | with:
22 | version: ${{ env.UV_VERSION }}
23 | - name: Install dependencies
24 | run: uv pip install --system -e "client/.[docs]"
25 |
26 | - name: Build documentation
27 | run: sphinx-build client/docs/source client/docs/build -a -v
28 |
29 | - name: Documentation sanity check
30 | run: test -e client/docs/build/index.html || exit
31 |
32 | - name: Install SSH Client 🔑
33 | uses: webfactory/ssh-agent@v0.9.1
34 | with:
35 | ssh-private-key: ${{ secrets.SSH_DEPLOY_KEY }}
36 |
37 | - name: Deploy to Github Pages
38 | uses: JamesIves/github-pages-deploy-action@v4.7.3
39 | with:
40 | branch: gh-pages
41 | folder: 'client/docs/build'
42 | commit-message: '[skip ci] Documentation updates'
43 | clean: true
44 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: publish
2 |
3 | on:
4 | release:
5 | types: [ published ]
6 |
7 | env:
8 | PYTHON_VERSION: "3.11"
9 | UV_VERSION: "0.5.13"
10 |
11 | jobs:
12 | pypi:
13 | if: "!github.event.release.prerelease"
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: ${{ env.PYTHON_VERSION }}
20 | architecture: x64
21 | - uses: astral-sh/setup-uv@v6
22 | with:
23 | version: ${{ env.UV_VERSION }}
24 | - name: Install dependencies
25 | run: uv pip install --system setuptools wheel twine --upgrade
26 | - name: Build and publish
27 | env:
28 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
29 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
30 | run: |
31 | echo "BUILD_VERSION=${GITHUB_REF#refs/*/}" | cut -c 2- >> $GITHUB_ENV
32 | cd client && python setup.py sdist bdist_wheel && cd ..
33 | twine check client/dist/*
34 | twine upload client/dist/*
35 |
36 | pypi-check:
37 | if: "!github.event.release.prerelease"
38 | needs: pypi-publish-client
39 | runs-on: ${{ matrix.os }}
40 | strategy:
41 | matrix:
42 | os: [ubuntu-latest, macos-latest, windows-latest]
43 | python: [3.8, 3.9, '3.10', 3.11, 3.12]
44 | steps:
45 | - uses: actions/checkout@v4
46 | - uses: actions/setup-python@v5
47 | with:
48 | python-version: ${{ matrix.python }}
49 | architecture: x64
50 | - uses: astral-sh/setup-uv@v6
51 | with:
52 | version: ${{ env.UV_VERSION }}
53 | - name: Install package
54 | run: |
55 | uv pip install --system pyroclient
56 | python -c "import pyroclient; print(pyroclient.__version__)"
57 |
58 | conda:
59 | if: "!github.event.release.prerelease"
60 | runs-on: ubuntu-latest
61 | steps:
62 | - uses: actions/checkout@v4
63 | - name: Miniconda setup
64 | uses: conda-incubator/setup-miniconda@v3
65 | with:
66 | auto-update-conda: true
67 | python-version: ${{ env.PYTHON_VERSION }}
68 | - name: Install dependencies
69 | shell: bash -el {0}
70 | run: conda install -y conda-build conda-verify anaconda-client
71 | - name: Build and publish
72 | shell: bash -el {0}
73 | env:
74 | ANACONDA_API_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
75 | run: |
76 | echo "BUILD_VERSION=${GITHUB_REF#refs/*/}" | cut -c 2- >> $GITHUB_ENV
77 | cd client && python setup.py sdist && cd ..
78 | mkdir client/conda-dist
79 | cd client && conda-build .conda/ --output-folder conda-dist && cd ..
80 | ls -l client/conda-dist/noarch/*tar.bz2
81 | anaconda upload client/conda-dist/noarch/*tar.bz2 -u pyronear
82 |
83 | conda-check:
84 | if: "!github.event.release.prerelease"
85 | needs: conda-publish-client
86 | runs-on: ${{ matrix.os }}
87 | strategy:
88 | matrix:
89 | os: [ubuntu-latest, macos-latest, windows-latest]
90 | python: [3.8, 3.9, '3.10', 3.11, 3.12]
91 | steps:
92 | - name: Miniconda setup
93 | uses: conda-incubator/setup-miniconda@v3
94 | with:
95 | auto-update-conda: true
96 | python-version: ${{ matrix.python }}
97 | - name: Install package
98 | shell: bash -el {0}
99 | run: |
100 | conda install -c pyronear pyroclient
101 | python -c "import pyroclient; print(pyroclient.__version__)"
102 |
--------------------------------------------------------------------------------
/.github/workflows/pull_requests.yml:
--------------------------------------------------------------------------------
1 | name: pull_requests
2 |
3 | on:
4 | pull_request:
5 | branches: main
6 |
7 | env:
8 | UV_VERSION: "0.5.13"
9 |
10 | jobs:
11 | docs-client:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | with:
16 | persist-credentials: false
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: 3.9
20 | architecture: x64
21 | - uses: astral-sh/setup-uv@v6
22 | with:
23 | version: ${{ env.UV_VERSION }}
24 | - name: Install dependencies
25 | run: uv pip install --system -e "client/.[docs]"
26 |
27 | - name: Build documentation
28 | run: sphinx-build client/docs/source client/docs/build -a -v
29 |
30 | - name: Documentation sanity check
31 | run: test -e client/docs/build/index.html || exit
32 |
33 | triage:
34 | permissions:
35 | contents: read
36 | pull-requests: write
37 | runs-on: ubuntu-latest
38 | steps:
39 | - uses: actions/labeler@v5
40 | with:
41 | repo-token: "${{ secrets.GITHUB_TOKEN }}"
42 |
--------------------------------------------------------------------------------
/.github/workflows/push.yml:
--------------------------------------------------------------------------------
1 | name: push
2 | on:
3 | push:
4 | branches: main
5 |
6 | env:
7 | BACKEND_IMAGE_NAME: alert-api
8 | DOCKERHUB_USER: ${{ secrets.DOCKERHUB_LOGIN }}
9 | PYTHON_VERSION: "3.11"
10 | POETRY_VERSION: "1.8.3"
11 |
12 | jobs:
13 | docker:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: ${{ env.PYTHON_VERSION }}
20 | architecture: x64
21 | - uses: abatilo/actions-poetry@v4
22 | with:
23 | poetry-version: ${{ env.POETRY_VERSION }}
24 | - name: Resolve dependencies
25 | run: poetry export -f requirements.txt --without-hashes --output requirements.txt
26 | - name: Build docker
27 | run: docker build -f src/Dockerfile . -t $DOCKERHUB_USER/$BACKEND_IMAGE_NAME:latest
28 | - name: Login to DockerHub
29 | uses: docker/login-action@v3
30 | with:
31 | username: ${{ secrets.DOCKERHUB_LOGIN }}
32 | password: ${{ secrets.DOCKERHUB_PW }}
33 | - name: Push to hub
34 | run: docker push $DOCKERHUB_USER/$BACKEND_IMAGE_NAME:latest
35 | - name: Login to GHCR
36 | uses: docker/login-action@v3
37 | with:
38 | registry: ghcr.io
39 | username: ${{ github.repository_owner }}
40 | password: ${{ secrets.GITHUB_TOKEN }}
41 | - name: Push to container registry
42 | run: |
43 | IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$BACKEND_IMAGE_NAME
44 | IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
45 | docker tag $DOCKERHUB_USER/$BACKEND_IMAGE_NAME:latest $IMAGE_ID:latest
46 | docker push $IMAGE_ID:latest
47 |
48 | deploy-dev:
49 | needs: docker
50 | runs-on: ubuntu-latest
51 | steps:
52 | - uses: appleboy/ssh-action@v1.2.2
53 | with:
54 | host: ${{ secrets.SSH_DEV_HOST }}
55 | username: ${{ secrets.SSH_DEV_USERNAME }}
56 | key: ${{ secrets.SSH_DEPLOY_DEV }}
57 | script: |
58 | # Ensure we have max disk space
59 | docker rmi -f $(docker images -f "dangling=true" -q)
60 | docker volume rm -f $(docker volume ls -f "dangling=true" -q)
61 | # Update the service
62 | docker compose pull backend
63 | docker compose stop backend && docker compose up -d --wait
64 | # Check update
65 | docker inspect -f '{{ .Created }}' $(docker compose images -q backend)
66 | # Clean up
67 | docker rmi -f $(docker images -f "dangling=true" -q)
68 | docker volume rm -f $(docker volume ls -f "dangling=true" -q)
69 | - name: Ping server
70 | env:
71 | DEV_ENDPOINT: ${{ secrets.DEV_ENDPOINT }}
72 | run: sleep 10 && curl $DEV_ENDPOINT
73 |
--------------------------------------------------------------------------------
/.github/workflows/scripts.yml:
--------------------------------------------------------------------------------
1 | name: scripts
2 |
3 | on:
4 | push:
5 | branches: main
6 | pull_request:
7 | branches: main
8 |
9 | env:
10 | PYTHON_VERSION: "3.11"
11 | UV_VERSION: "0.5.13"
12 | POETRY_VERSION: "1.8.3"
13 |
14 | jobs:
15 | end-to-end:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: actions/checkout@v4
19 | - uses: actions/setup-python@v5
20 | with:
21 | python-version: ${{ env.PYTHON_VERSION }}
22 | architecture: x64
23 | - uses: abatilo/actions-poetry@v4
24 | with:
25 | poetry-version: ${{ env.POETRY_VERSION }}
26 | - uses: astral-sh/setup-uv@v6
27 | with:
28 | version: ${{ env.UV_VERSION }}
29 | - name: Resolve dependencies
30 | run: |
31 | poetry export -f requirements.txt --without-hashes --output requirements.txt
32 | uv pip install --system -r scripts/requirements.txt
33 | - name: Run the backend & the test
34 | env:
35 | SUPERUSER_LOGIN: dummy_login
36 | SUPERUSER_PWD: dummy&P@ssw0rd!
37 | POSTGRES_USER: dummy_pg_user
38 | POSTGRES_PASSWORD: dummy_pg_pwd
39 | POSTGRES_DB: dummy_pg_db
40 | run: |
41 | docker compose -f docker-compose.dev.yml up -d --build --wait
42 | python scripts/test_e2e.py
43 |
--------------------------------------------------------------------------------
/.github/workflows/style.yml:
--------------------------------------------------------------------------------
1 | name: style
2 |
3 | on:
4 | push:
5 | branches: main
6 | pull_request:
7 | branches: main
8 |
9 | env:
10 | PYTHON_VERSION: "3.11"
11 | UV_VERSION: "0.5.13"
12 | POETRY_VERSION: "1.8.3"
13 |
14 | jobs:
15 | ruff:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: actions/checkout@v4
19 | - uses: actions/setup-python@v5
20 | with:
21 | python-version: ${{ env.PYTHON_VERSION }}
22 | architecture: x64
23 | - uses: abatilo/actions-poetry@v4
24 | with:
25 | poetry-version: ${{ env.POETRY_VERSION }}
26 | - uses: astral-sh/setup-uv@v6
27 | with:
28 | version: ${{ env.UV_VERSION }}
29 | - name: Install dependencies
30 | run: |
31 | poetry export -f requirements.txt --without-hashes --only quality --output requirements.txt
32 | uv pip install --system -r requirements.txt
33 | - name: Run ruff
34 | run: |
35 | ruff --version
36 | ruff format --check --diff .
37 | ruff check --diff .
38 |
39 | mypy:
40 | runs-on: ubuntu-latest
41 | steps:
42 | - uses: actions/checkout@v4
43 | - uses: actions/setup-python@v5
44 | with:
45 | python-version: ${{ env.PYTHON_VERSION }}
46 | architecture: x64
47 | - uses: abatilo/actions-poetry@v4
48 | with:
49 | poetry-version: ${{ env.POETRY_VERSION }}
50 | - uses: astral-sh/setup-uv@v6
51 | with:
52 | version: ${{ env.UV_VERSION }}
53 | - name: Install dependencies
54 | run: |
55 | poetry export -f requirements.txt --without-hashes --with quality --output requirements.txt
56 | uv pip install --system -r requirements.txt
57 | - name: Run mypy
58 | run: |
59 | mypy --version
60 | mypy
61 |
62 | mypy-client:
63 | runs-on: ubuntu-latest
64 | steps:
65 | - uses: actions/checkout@v4
66 | - uses: actions/setup-python@v5
67 | with:
68 | python-version: ${{ env.PYTHON_VERSION }}
69 | architecture: x64
70 | - uses: astral-sh/setup-uv@v6
71 | with:
72 | version: ${{ env.UV_VERSION }}
73 | - name: Install dependencies
74 | run: uv pip install --system -e "client/.[quality]"
75 | - name: Run mypy
76 | run: |
77 | mypy --version
78 | cd client && mypy
79 |
80 | precommit-hooks:
81 | runs-on: ubuntu-latest
82 | steps:
83 | - uses: actions/checkout@v4
84 | - uses: actions/setup-python@v5
85 | with:
86 | python-version: ${{ env.PYTHON_VERSION }}
87 | architecture: x64
88 | - uses: abatilo/actions-poetry@v4
89 | with:
90 | poetry-version: ${{ env.POETRY_VERSION }}
91 | - uses: astral-sh/setup-uv@v6
92 | with:
93 | version: ${{ env.UV_VERSION }}
94 | - name: Install dependencies
95 | run: |
96 | poetry export -f requirements.txt --without-hashes --only quality --output requirements.txt
97 | uv pip install --system -r requirements.txt
98 | - name: Run pre-commit hooks
99 | run: |
100 | git checkout -b temp
101 | pre-commit install
102 | pre-commit --version
103 | pre-commit run --all-files
104 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: tests
2 |
3 | on:
4 | push:
5 | branches: main
6 | pull_request:
7 | branches: main
8 |
9 | env:
10 | PYTHON_VERSION: "3.11"
11 | UV_VERSION: "0.5.13"
12 | POETRY_VERSION: "1.8.3"
13 |
14 | jobs:
15 | pytest:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: actions/checkout@v4
19 | - uses: actions/setup-python@v5
20 | with:
21 | python-version: ${{ env.PYTHON_VERSION }}
22 | architecture: x64
23 | - uses: abatilo/actions-poetry@v4
24 | with:
25 | poetry-version: ${{ env.POETRY_VERSION }}
26 | - name: Resolve dependencies
27 | run: poetry export -f requirements.txt --without-hashes --with test --output requirements.txt
28 | - name: Run the backend & the test
29 | env:
30 | SUPERUSER_LOGIN: dummy_login
31 | SUPERUSER_PWD: dummy&P@ssw0rd!
32 | POSTGRES_USER: dummy_pg_user
33 | POSTGRES_PASSWORD: dummy_pg_pwd
34 | POSTGRES_DB: dummy_pg_db
35 | run: |
36 | docker compose -f docker-compose.dev.yml up -d --build --wait
37 | docker compose -f docker-compose.dev.yml exec -T backend pytest --cov=app --cov-report xml tests/
38 | docker compose -f docker-compose.dev.yml cp backend:/app/coverage.xml ./coverage-src.xml
39 | - name: Upload coverage to Codecov
40 | uses: codecov/codecov-action@v5
41 | with:
42 | token: ${{ secrets.CODECOV_TOKEN }}
43 | file: ./coverage-src.xml
44 | flags: backend
45 | fail_ci_if_error: true
46 |
47 | pytest-client:
48 | runs-on: ubuntu-latest
49 | steps:
50 | - uses: actions/checkout@v4
51 | - uses: actions/setup-python@v5
52 | with:
53 | python-version: ${{ env.PYTHON_VERSION }}
54 | architecture: x64
55 | - uses: abatilo/actions-poetry@v4
56 | with:
57 | poetry-version: ${{ env.POETRY_VERSION }}
58 | - uses: astral-sh/setup-uv@v6
59 | with:
60 | version: ${{ env.UV_VERSION }}
61 | - name: Resolve dependencies
62 | run: |
63 | poetry export -f requirements.txt --without-hashes --output requirements.txt
64 | uv pip install --system -e "client/.[test]"
65 | - name: Run the backend & the test
66 | env:
67 | SUPERUSER_LOGIN: dummy_login
68 | SUPERUSER_PWD: dummy&P@ssw0rd!
69 | POSTGRES_USER: dummy_pg_user
70 | POSTGRES_PASSWORD: dummy_pg_pwd
71 | POSTGRES_DB: dummy_pg_db
72 | run: |
73 | docker compose -f docker-compose.dev.yml up -d --build --wait
74 | cd client && pytest --cov=pyroclient --cov-report xml tests/
75 | - name: Upload coverage to Codecov
76 | uses: codecov/codecov-action@v5
77 | with:
78 | token: ${{ secrets.CODECOV_TOKEN }}
79 | file: ./client/coverage.xml
80 | flags: client
81 | fail_ci_if_error: true
82 |
83 | headers:
84 | runs-on: ${{ matrix.os }}
85 | strategy:
86 | matrix:
87 | os: [ubuntu-latest]
88 | steps:
89 | - uses: actions/checkout@v4
90 | with:
91 | persist-credentials: false
92 | - name: Check the headers
93 | uses: frgfm/validate-python-headers@main
94 | with:
95 | license: 'Apache-2.0'
96 | owner: 'Pyronear'
97 | starting-year: 2020
98 | folders: 'src,client,.github'
99 | ignore-files: 'version.py,__init__.py'
100 | ignore-folders: 'client/tests/,src/tests/,src/migrations/versions/'
101 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # IDE
2 | .idea/
3 | *.iml
4 | *.iws
5 | .vscode
6 |
7 | # Mac
8 | .DS_Store
9 |
10 | # Byte-compiled / optimized / DLL files
11 | __pycache__/
12 | *.py[cod]
13 | *$py.class
14 |
15 | # C extensions
16 | *.so
17 |
18 | # Distribution / packaging
19 | .Python
20 | build/
21 | develop-eggs/
22 | dist/
23 | downloads/
24 | eggs/
25 | .eggs/
26 | lib/
27 | lib64/
28 | parts/
29 | sdist/
30 | var/
31 | wheels/
32 | pip-wheel-metadata/
33 | share/python-wheels/
34 | *.egg-info/
35 | .installed.cfg
36 | *.egg
37 | MANIFEST
38 |
39 | # PyInstaller
40 | # Usually these files are written by a python script from a template
41 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
42 | *.manifest
43 | *.spec
44 |
45 | # Installer logs
46 | pip-log.txt
47 | pip-delete-this-directory.txt
48 |
49 | # Unit test / coverage reports
50 | htmlcov/
51 | .tox/
52 | .nox/
53 | .coverage
54 | .coverage.*
55 | .cache
56 | nosetests.xml
57 | coverage.xml
58 | *.cover
59 | *.py,cover
60 | .hypothesis/
61 | .pytest_cache/
62 |
63 | # Translations
64 | *.mo
65 | *.pot
66 |
67 | # Django stuff:
68 | *.log
69 | local_settings.py
70 | db.sqlite3
71 | db.sqlite3-journal
72 |
73 | # Flask stuff:
74 | instance/
75 | .webassets-cache
76 |
77 | # Scrapy stuff:
78 | .scrapy
79 |
80 | # Sphinx documentation
81 | docs/_build/
82 |
83 | # PyBuilder
84 | target/
85 |
86 | # Jupyter Notebook
87 | .ipynb_checkpoints
88 |
89 | # IPython
90 | profile_default/
91 | ipython_config.py
92 |
93 | # pyenv
94 | .python-version
95 |
96 | # pipenv
97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
100 | # install all needed dependencies.
101 | #Pipfile.lock
102 |
103 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
104 | __pypackages__/
105 |
106 | # Celery stuff
107 | celerybeat-schedule
108 | celerybeat.pid
109 |
110 | # SageMath parsed files
111 | *.sage.py
112 |
113 | # Environments
114 | .env
115 | .venv
116 | env/
117 | venv/
118 | ENV/
119 | env.bak/
120 | venv.bak/
121 |
122 | # Spyder project settings
123 | .spyderproject
124 | .spyproject
125 |
126 | # Rope project settings
127 | .ropeproject
128 |
129 | # mkdocs documentation
130 | /site
131 |
132 | # mypy
133 | .mypy_cache/
134 | .dmypy.json
135 | dmypy.json
136 |
137 | # Pyre type checker
138 | .pyre/
139 |
140 | # Client
141 | client/docs/build
142 | client/build
143 | client/conda-dist
144 | client/pyroclient/version.py
145 |
146 | # Test DB
147 | *.db
148 | _build/
149 | *.sql
150 |
151 | # Poetry
152 | requirements.txt
153 | src/app/requirements.txt
154 | src/requirements.txt
155 | src/requirements-dev.txt
156 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_language_version:
2 | python: python3.11
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v5.0.0
6 | hooks:
7 | - id: check-yaml
8 | exclude: .conda
9 | - id: check-toml
10 | - id: check-added-large-files
11 | - id: end-of-file-fixer
12 | - id: trailing-whitespace
13 | - id: check-ast
14 | - id: check-json
15 | - id: check-merge-conflict
16 | - id: no-commit-to-branch
17 | args: ['--branch', 'main']
18 | - id: debug-statements
19 | language_version: python3
20 | - repo: https://github.com/compilerla/conventional-pre-commit
21 | rev: 'v4.0.0'
22 | hooks:
23 | - id: conventional-pre-commit
24 | stages: [commit-msg]
25 | - repo: https://github.com/charliermarsh/ruff-pre-commit
26 | rev: 'v0.11.9'
27 | hooks:
28 | - id: ruff
29 | args:
30 | - --fix
31 | - id: ruff-format
32 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | contact@pyronear.org.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # this target runs checks on all files
2 | quality:
3 | ruff format --check .
4 | ruff check .
5 | mypy
6 |
7 | # this target runs checks on all files and potentially modifies some of them
8 | style:
9 | ruff format .
10 | ruff check --fix .
11 |
12 | # Pin the dependencies
13 | lock:
14 | poetry lock
15 |
16 | # Build the docker
17 | build:
18 | poetry export -f requirements.txt --without-hashes --output requirements.txt
19 | docker build -f src/Dockerfile . -t pyronear/alert-api:latest
20 |
21 | # Run the docker
22 | run:
23 | poetry export -f requirements.txt --without-hashes --output requirements.txt
24 | docker compose up -d --build --wait
25 |
26 | # Run the docker
27 | stop:
28 | docker compose down
29 |
30 | # Run tests for the library
31 | # the "-" are used to launch the next command even if a command fail
32 | test:
33 | poetry export -f requirements.txt --without-hashes --with test --output requirements.txt
34 | docker compose -f docker-compose.dev.yml up -d --build --wait
35 | - docker compose -f docker-compose.dev.yml exec -T backend pytest --cov=app
36 | docker compose -f docker-compose.dev.yml down
37 |
38 | build-client:
39 | pip install -e client/.
40 |
41 | # Run tests for the Python client
42 | # the "-" are used to launch the next command even if a command fail
43 | test-client: build-client
44 | poetry export -f requirements.txt --without-hashes --output requirements.txt
45 | docker compose -f docker-compose.dev.yml up -d --build --wait
46 | - cd client && pytest --cov=pyroclient tests/ && cd ..
47 | docker compose -f docker-compose.dev.yml down
48 |
49 | # Check that docs can build for client
50 | docs-client:
51 | sphinx-build client/docs/source client/docs/_build -a
52 |
53 |
54 | e2e:
55 | poetry export -f requirements.txt --without-hashes --output requirements.txt
56 | docker compose -f docker-compose.dev.yml up -d --build --wait
57 | - python scripts/test_e2e.py
58 | docker compose -f docker-compose.dev.yml down
59 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pyronear API
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 | The building blocks of our wildfire detection & monitoring API.
34 |
35 | ## Quick Tour
36 |
37 | ### Running/stopping the service
38 |
39 | You can run the API containers using this command:
40 |
41 | ```shell
42 | make run
43 | ```
44 |
45 | You can now navigate to `http://localhost:8080/docs` to interact with the API (or do it through HTTP requests) and explore the documentation.
46 |
47 | 
48 |
49 | In order to stop the service, run:
50 | ```shell
51 | make stop
52 | ```
53 |
54 | ## Installation
55 |
56 | ### Prerequisites
57 |
58 | - [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
59 | - [Docker](https://docs.docker.com/engine/install/)
60 | - [Docker compose](https://docs.docker.com/compose/)
61 |
62 | ### Starting your service
63 |
64 | #### 1 - Clone the repository
65 | ```shell
66 | git clone https://github.com/pyronear/pyro-api.git && cd pyro-api
67 | ```
68 | #### 2 - Set your environment variables
69 | First copy the example environment setup
70 | ```shell
71 | cp .env.example .env
72 | ```
73 |
74 | #### 3 - Start the services
75 |
76 | ```shell
77 | docker compose pull
78 | docker compose up
79 | ```
80 |
81 | #### 4 - Check how what you've deployed
82 |
83 | You can now access your backend API at [http://localhost:5050/docs](http://localhost:5050/docs)
84 |
85 |
86 | ## More goodies
87 |
88 | ### Python client
89 |
90 | This project is a REST-API, and you can interact with the service through HTTP requests. However, if you want to ease the integration into a Python project, take a look at our [Python client](client).
91 |
92 |
93 | ## Contributing
94 |
95 | Any sort of contribution is greatly appreciated!
96 |
97 | You can find a short guide in [`CONTRIBUTING`](CONTRIBUTING.md) to help grow this project!
98 |
99 |
100 |
101 | ## License
102 |
103 | Distributed under the Apache 2.0 License. See [`LICENSE`](LICENSE) for more information.
104 |
--------------------------------------------------------------------------------
/client/.conda/meta.yaml:
--------------------------------------------------------------------------------
1 | # https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#loading-data-from-other-files
2 | # https://github.com/conda/conda-build/pull/4480
3 | # for conda-build > 3.21.9
4 | # {% set pyproject = load_file_data('../pyproject.toml', from_recipe_dir=True) %}
5 | # {% set project = pyproject.get('project') %}
6 | # {% set urls = pyproject.get('project', {}).get('urls') %}
7 | package:
8 | name: pyroclient
9 | version: "{{ environ.get('BUILD_VERSION', '0.2.0.dev0') }}"
10 |
11 | source:
12 | fn: pyroclient-{{ environ.get('BUILD_VERSION', '0.2.0.dev0') }}.tar.gz
13 | url: ../dist/pyroclient-{{ environ.get('BUILD_VERSION', '0.2.0.dev0') }}.tar.gz
14 |
15 | build:
16 | noarch: python
17 | script: python setup.py install --single-version-externally-managed --record=record.txt
18 |
19 | requirements:
20 | host:
21 | - python>=3.8, <4.0
22 | - setuptools
23 |
24 | run:
25 | - python>=3.8, <4.0
26 | - requests >=2.31.0, <3.0.0
27 |
28 | test:
29 | # Python imports
30 | imports:
31 | - pyroclient
32 | - pyroclient.client
33 | requires:
34 | - python
35 |
36 | about:
37 | home: https://github.com/pyronear/pyro-api/client
38 | license: Apache 2.0
39 | license_file: LICENSE
40 | summary: 'Python Client for Pyronear wildfire alert API'
41 | # description: |
42 | # {{ data['long_description'] | replace("\n", "\n ") | replace("#", '\#')}}
43 | doc_url: https://pyronear.org/pyro-api
44 | dev_url: https://github.com/pyronear/pyro-api/client
45 |
--------------------------------------------------------------------------------
/client/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to pyro-client
2 |
3 | Everything you need to know to contribute efficiently to the project!
4 |
5 | Whatever the way you wish to contribute to the project, please respect the [code of conduct](../CODE_OF_CONDUCT.md).
6 |
7 |
8 |
9 | ## Codebase structure
10 |
11 | - [pyroclient](pyroclient) - The actual codebase of the API client
12 | - [tests](tests) - Unittests of the client
13 | - [docs](docs) - Documentation of the Python client
14 |
15 |
16 | ## Continuous Integration
17 |
18 | This project uses the following integrations to ensure proper codebase maintenance:
19 |
20 | - [Github Worklow](https://help.github.com/en/actions/configuring-and-managing-workflows/configuring-a-workflow) - run jobs for package build and coverage
21 | - [Codacy](https://www.codacy.com/) - analyzes commits for code quality
22 | - [Codecov](https://codecov.io/) - reports back coverage results
23 | - [Github Pages](https://pages.github.com/) - where the package documentation is hosted
24 |
25 | As a contributor, you will only have to ensure coverage of your code by adding appropriate unit testing of your code.
26 |
27 |
28 | ## Feedback
29 |
30 | ### Feature requests & bug report
31 |
32 | Whether you encountered a problem, or you have a feature suggestion, your input has value and can be used by contributors to reference it in their developments. For this purpose, we advise you to use Github [issues](https://github.com/pyronear/pyro-api/issues).
33 |
34 | First, check whether the topic wasn't already covered in an open / closed issue. If not, feel free to open a new one! When doing so, use issue templates whenever possible and provide enough information for other contributors to jump in.
35 |
36 | ### Questions
37 |
38 | If you are wondering how to do something with Pyro-API, or a more general question, you should consider checking out Github [discussions](https://github.com/pyronear/pyro-api/discussions). See it as a Q&A forum, or the Pyro-API-specific StackOverflow!
39 |
40 |
41 |
42 | ## Submitting a Pull Request
43 |
44 | ### Preparing your local branch
45 |
46 | 1 - Fork this [repository](https://github.com/pyronear/pyro-api) by clicking on the "Fork" button at the top right of the page. This will create a copy of the project under your GitHub account (cf. [Fork a repo](https://docs.github.com/en/get-started/quickstart/fork-a-repo)).
47 |
48 | 2 - [Clone your fork](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository) to your local disk and set the upstream to this repo
49 | ```shell
50 | git clone git@github.com:/pyro-api.git
51 | cd pyro-api
52 | git remote add upstream https://github.com/pyronear/pyro-api.git
53 | ```
54 |
55 | 3 - You should not work on the `main` branch, so let's create a new one
56 | ```shell
57 | git checkout -b a-short-description
58 | ```
59 |
60 | 4 - You only have to set your development environment now. First uninstall any existing installation of the library with `pip uninstall pyroclient`, then:
61 | ```shell
62 | pip install -e "client/.[dev]"
63 | ```
64 |
65 | ### Developing your feature
66 |
67 | #### Commits
68 |
69 | - **Code**: ensure to provide docstrings to your Python code. In doing so, please follow [Google-style](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html) so it can ease the process of documentation later.
70 | - **Commit message**: please follow [Udacity guide](http://udacity.github.io/git-styleguide/)
71 |
72 | #### Unit tests
73 |
74 | In order to run the same unit tests as the CI workflows, you can run unittests locally:
75 |
76 | ```shell
77 | make test
78 | ```
79 |
80 | #### Code quality
81 |
82 | To run all quality checks together
83 |
84 | ```shell
85 | make quality
86 | ```
87 |
88 | ### Submit your modifications
89 |
90 | Push your last modifications to your remote branch
91 | ```shell
92 | git push -u origin a-short-description
93 | ```
94 |
95 | Then [open a Pull Request](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) from your fork's branch. Follow the instructions of the Pull Request template and then click on "Create a pull request".
96 |
--------------------------------------------------------------------------------
/client/Makefile:
--------------------------------------------------------------------------------
1 | # Run tests for the library
2 | test:
3 | pytest --cov=pyrclient tests/
4 |
5 | # Check that docs can build for client
6 | docs-client:
7 | sphinx-build docs/source docs/_build -a
8 |
--------------------------------------------------------------------------------
/client/README.md:
--------------------------------------------------------------------------------
1 | # Alert API Client
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 | Client for the [Alert management API](https://github.com/pyronear/pyro-api)
26 |
27 |
28 | ## Quick Tour
29 |
30 | ### Interacting with the wildfire alert API
31 |
32 | General users can use the API client to request available data within their respective scope (i.e. as a private individual, you won't have access to the data from devices of firefighters; however you will have access to all the data related to your devices). You can find several examples below:
33 |
34 | ```python
35 | API_URL = os.getenv("API_URL", "http://localhost:5050/api/v1/")
36 | login = os.getenv("USER_LOGIN", "superadmin_login")
37 | pwd = os.getenv("USER_PWD", "superadmin_pwd")
38 | token = requests.post(
39 | urljoin(API_URL, "login/creds"),
40 | data={"username": login, "password": pwd},
41 | timeout=5,
42 | ).json()["access_token"]
43 | api_client = Client(token, "http://localhost:5050", timeout=10)
44 |
45 | # List organizations accessible in your scope
46 | organizations = api_client.fetch_organizations()
47 | # Get the url of the image of a detection
48 | url = api_client.get_detection_url(detection_id)
49 | ```
50 |
51 |
52 | ```python
53 | cam_token = requests.post(urljoin(API_URL, f"cameras/{cam_id}/token"), headers=admin_headers, timeout=5).json()[
54 | "access_token"
55 | ]
56 |
57 | camera_client = Client(cam_token, "http://localhost:5050", timeout=10)
58 | response = cam_client.create_detection(image, 123.2)
59 | ```
60 |
61 | ## Installation
62 |
63 | ### Prerequisites
64 |
65 | Python 3.8 (or higher) and [pip](https://pip.pypa.io/en/stable/)/[conda](https://docs.conda.io/en/latest/miniconda.html) are required to install the client of the alert API.
66 |
67 | ### Latest stable release
68 |
69 | You can install the last stable release of the package using [pypi](https://pypi.org/project/pyroclient/) as follows:
70 |
71 | ```shell
72 | pip install pyroclient
73 | ```
74 |
75 | or using [conda](https://anaconda.org/pyronear/pyroclient):
76 |
77 | ```shell
78 | conda install -c pyronear pyroclient
79 | ```
80 |
81 | ### Developer mode
82 |
83 | Alternatively, if you wish to use the latest features of the project that haven't made their way to a release yet, you can install the package from source *(install [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) first)*:
84 |
85 | ```shell
86 | git clone https://github.com/pyronear/pyro-api.git
87 | $pip install -e pyro-api/client/.
88 | ```
89 |
90 | ### Getting an access to the API
91 |
92 | What you need to use the client:
93 | - URL to the target Alert API
94 | - credentials (login & password)
95 |
96 | If you're running your local/own instance of the API, you can generate your own credentials. Otherwise, you will need to request credentials from the instance administrator. For our public API instance, at the moment, we don't provide public access to the API, it's only reserved to these type of users:
97 | - Pyronear members
98 | - firefighters and stakeholders of wildfire prevention
99 | - People looking at registering their own device
100 |
101 |
102 | ## More goodies
103 |
104 | ### Documentation
105 |
106 | The full package documentation is available [here](http://pyronear.org/pyro-api) for detailed specifications.
107 |
108 |
109 | ## Contributing
110 |
111 | Any sort of contribution is greatly appreciated!
112 |
113 | You can find a short guide in [`CONTRIBUTING`](CONTRIBUTING.md) to help grow this project!
114 |
115 |
116 |
117 | ## License
118 |
119 | Distributed under the Apache 2.0 License. See [`LICENSE`](LICENSE) for more information.
120 |
--------------------------------------------------------------------------------
/client/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/client/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/client/docs/source/_static/css/custom.css:
--------------------------------------------------------------------------------
1 | h1 {
2 | font-size: 200%;
3 | }
4 |
5 | /* Github button */
6 |
7 | .github-repo {
8 | display: flex;
9 | justify-content: center;
10 | }
11 |
--------------------------------------------------------------------------------
/client/docs/source/_static/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pyronear/pyro-api/14d58dcb104717e70bc1a9fd24be5e55a390b0a9/client/docs/source/_static/images/favicon.ico
--------------------------------------------------------------------------------
/client/docs/source/_static/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pyronear/pyro-api/14d58dcb104717e70bc1a9fd24be5e55a390b0a9/client/docs/source/_static/images/logo.png
--------------------------------------------------------------------------------
/client/docs/source/changelog.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | v0.1.2 (2022-07-01)
5 | -------------------
6 | Release note: `v0.1.2 `_
7 |
8 | v0.1.1 (2020-12-24)
9 | -------------------
10 | Release note: `v0.1.1 `_
11 |
12 |
13 | v0.1.0 (2020-11-26)
14 | -------------------
15 | Release note: `v0.1.0 `_
16 |
--------------------------------------------------------------------------------
/client/docs/source/client.rst:
--------------------------------------------------------------------------------
1 | pyroclient.client
2 | #################
3 |
4 | The client subpackage contains the core definition of the API client.
5 |
6 |
7 | .. currentmodule:: pyroclient.client
8 |
9 |
10 | API Client
11 | ----------
12 |
13 | .. autoclass:: Client
14 | :members:
15 |
--------------------------------------------------------------------------------
/client/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | # Configuration file for the Sphinx documentation builder.
7 | #
8 | # This file only contains a selection of the most common options. For a full
9 | # list see the documentation:
10 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
11 |
12 | # -- Path setup --------------------------------------------------------------
13 |
14 | # If extensions (or modules to document with autodoc) are in another directory,
15 | # add these directories to sys.path here. If the directory is relative to the
16 | # documentation root, use os.path.abspath to make it absolute, like shown here.
17 | #
18 | import sys
19 | from datetime import datetime
20 | from pathlib import Path
21 |
22 | sys.path.insert(0, Path().cwd().parent.parent)
23 | import pyroclient
24 |
25 | # -- Project information -----------------------------------------------------
26 |
27 | master_doc = "index"
28 | project = "pyroclient"
29 | copyright = f"2020-{datetime.now().year}, Pyronear"
30 | author = "Pyronear"
31 |
32 | # The full version, including alpha/beta/rc tags
33 | version = pyroclient.__version__
34 | release = pyroclient.__version__ + "-git"
35 |
36 |
37 | # -- General configuration ---------------------------------------------------
38 |
39 | # Add any Sphinx extension module names here, as strings. They can be
40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
41 | # ones.
42 | extensions = [
43 | "sphinx.ext.autodoc",
44 | "sphinx.ext.napoleon",
45 | "sphinx.ext.viewcode",
46 | "sphinx.ext.mathjax",
47 | "sphinxemoji.sphinxemoji", # cf. https://sphinxemojicodes.readthedocs.io/en/stable/
48 | "sphinx_copybutton",
49 | ]
50 |
51 | napoleon_use_ivar = True
52 |
53 | # Add any paths that contain templates here, relative to this directory.
54 | templates_path = ["_templates"]
55 |
56 | # List of patterns, relative to source directory, that match files and
57 | # directories to ignore when looking for source files.
58 | # This pattern also affects html_static_path and html_extra_path.
59 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
60 |
61 |
62 | # The name of the Pygments (syntax highlighting) style to use.
63 | pygments_style = "friendly"
64 | pygments_dark_style = "monokai"
65 | highlight_language = "python3"
66 |
67 | # -- Options for HTML output -------------------------------------------------
68 |
69 | # The theme to use for HTML and HTML Help pages. See the documentation for
70 | # a list of builtin themes.
71 | #
72 | html_theme = "furo"
73 |
74 | html_title = "Pyro-client"
75 | language = "en"
76 | html_logo = "_static/images/logo.png"
77 | html_favicon = "_static/images/favicon.ico"
78 |
79 | # Theme options are theme-specific and customize the look and feel of a theme
80 | # further. For a list of options available for each theme, see the
81 | # documentation.
82 | #
83 | html_theme_options = {
84 | "footer_icons": [
85 | {
86 | "name": "GitHub",
87 | "url": "https://github.com/pyronear/pyro-api",
88 | "html": """
89 |
90 |
91 |
92 | """,
93 | "class": "",
94 | },
95 | ],
96 | "source_repository": "https://github.com/pyronear/pyro-api/",
97 | "source_branch": "master",
98 | "source_directory": "client/docs/source/",
99 | "sidebar_hide_name": True,
100 | }
101 |
102 |
103 | # Add any paths that contain custom static files (such as style sheets) here,
104 | # relative to this directory. They are copied after the builtin static files,
105 | # so a file named "default.css" will overwrite the builtin "default.css".
106 | html_static_path = ["_static"]
107 |
108 |
109 | # Add googleanalytics id
110 | # ref: https://github.com/orenhecht/googleanalytics/blob/master/sphinxcontrib/googleanalytics.py
111 | def add_ga_javascript(app, pagename, templatename, context, doctree) -> None:
112 | metatags = context.get("metatags", "")
113 | metatags += """
114 |
115 |
116 |
122 | """.format(app.config.googleanalytics_id)
123 | context["metatags"] = metatags
124 |
125 |
126 | def setup(app) -> None:
127 | # app.add_config_value("googleanalytics_id", "", "html")
128 | app.add_css_file("css/custom.css")
129 | app.add_js_file("js/custom.js")
130 | # app.connect("html-page-context", add_ga_javascript)
131 |
--------------------------------------------------------------------------------
/client/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | ****************************************
2 | Pyro-client: alerts for forest wildfires
3 | ****************************************
4 |
5 | The :mod:`pyroclient` package lets you interact with the Pyronear API in a simplified pythonic way.
6 |
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 | :caption: Getting Started
11 | :hidden:
12 |
13 | installing
14 |
15 |
16 | .. toctree::
17 | :maxdepth: 1
18 | :caption: Package Reference
19 | :hidden:
20 |
21 | client
22 |
23 | .. toctree::
24 | :maxdepth: 2
25 | :caption: Notes
26 | :hidden:
27 |
28 | changelog
29 |
--------------------------------------------------------------------------------
/client/docs/source/installing.rst:
--------------------------------------------------------------------------------
1 |
2 | ************
3 | Installation
4 | ************
5 |
6 | Python 3.8 (or higher) and [pip](https://pip.pypa.io/en/stable/)/[conda](https://docs.conda.io/en/latest/miniconda.html) are required to install the client of the alert API.
7 |
8 | Via Python Package
9 | ==================
10 |
11 | Install the last stable release of the package using pip:
12 |
13 | .. code:: bash
14 |
15 | pip install pyroclient
16 |
17 |
18 | Via Conda
19 | =========
20 |
21 | Install the last stable release of the package using conda:
22 |
23 | .. code:: bash
24 |
25 | conda install -c pyronear pyroclient
26 |
27 |
28 | Via Git
29 | =======
30 |
31 | Install the library in developper mode:
32 |
33 | .. code:: bash
34 |
35 | git clone https://github.com/pyronear/pyro-api.git
36 | pip install -e pyro-api/client/.
37 |
--------------------------------------------------------------------------------
/client/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "pyroclient"
7 | description = "Python Client for Pyronear wildfire alert API"
8 | authors = [
9 | {name = "Pyronear", email = "contact@pyronear.org"}
10 | ]
11 | readme = "README.md"
12 | requires-python = ">=3.8,<4"
13 | license = {file = "LICENSE"}
14 | keywords = ["backend", "wildfire", "alert", "client", "api"]
15 | classifiers = [
16 | "Development Status :: 4 - Beta",
17 | "Intended Audience :: Developers",
18 | "Intended Audience :: System Administrators",
19 | "License :: OSI Approved :: Apache Software License",
20 | "Natural Language :: English",
21 | "Operating System :: OS Independent",
22 | "Programming Language :: Python :: 3",
23 | "Programming Language :: Python :: 3.8",
24 | "Programming Language :: Python :: 3.9",
25 | "Programming Language :: Python :: 3.10",
26 | "Programming Language :: Python :: 3.11",
27 | "Programming Language :: Python :: 3.12",
28 | "Topic :: Software Development",
29 | "Topic :: Software Development :: Libraries :: Python Modules",
30 | ]
31 | dynamic = ["version"]
32 | dependencies = [
33 | "requests>=2.31.0,<3.0.0",
34 | ]
35 |
36 | [project.optional-dependencies]
37 | test = [
38 | "pytest==8.3.4",
39 | "pytest-cov>=4.0.0,<5.0.0",
40 | "pytest-pretty>=1.0.0,<2.0.0",
41 | "types-requests>=2.0.0",
42 | ]
43 | quality = [
44 | "mypy==1.10.0",
45 | ]
46 | docs = [
47 | "sphinx>=3.0.0,!=3.5.0",
48 | "sphinxemoji>=0.1.8",
49 | "sphinx-copybutton>=0.3.1",
50 | "docutils<0.18",
51 | # cf. https://github.com/readthedocs/readthedocs.org/issues/9038
52 | "Jinja2<3.1",
53 | "furo>=2022.3.4",
54 | ]
55 |
56 | [project.urls]
57 | documentation = "https://pyronear.org/pyro-api"
58 | repository = "https://github.com/pyronear/pyro-api/client"
59 | tracker = "https://github.com/pyronear/pyro-api/issues"
60 |
61 | [tool.setuptools]
62 | zip-safe = true
63 |
64 | [tool.setuptools.packages.find]
65 | exclude = ["docs*", "tests*"]
66 |
67 | [tool.mypy]
68 | python_version = "3.8"
69 | files = "pyroclient/"
70 | show_error_codes = true
71 | pretty = true
72 | warn_unused_ignores = true
73 | warn_redundant_casts = true
74 | no_implicit_optional = true
75 | check_untyped_defs = true
76 | implicit_reexport = false
77 |
78 | [[tool.mypy.overrides]]
79 | module = [
80 | "requests.*",
81 | ]
82 | ignore_missing_imports = true
83 |
84 | [tool.pytest.ini_options]
85 | testpaths = ["pyroclient/"]
86 |
--------------------------------------------------------------------------------
/client/pyroclient/__init__.py:
--------------------------------------------------------------------------------
1 | from .client import *
2 | from .exceptions import *
3 | from .version import __version__
4 |
--------------------------------------------------------------------------------
/client/pyroclient/exceptions.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import Union
7 |
8 | __all__ = ["HTTPRequestError"]
9 |
10 |
11 | class HTTPRequestError(Exception):
12 | def __init__(self, status_code: int, response_message: Union[str, None] = None) -> None:
13 | self.status_code = status_code
14 | self.response_message = response_message
15 |
16 | def __repr__(self) -> str:
17 | class_name = self.__class__.__name__
18 | return f"{class_name}(status_code={self.status_code!r}, response_message={self.response_message!r})"
19 |
--------------------------------------------------------------------------------
/client/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 |
7 | import os
8 | from pathlib import Path
9 |
10 | from setuptools import setup
11 |
12 | PKG_NAME = "pyroclient"
13 | VERSION = os.getenv("BUILD_VERSION", "0.2.0.dev0")
14 |
15 |
16 | if __name__ == "__main__":
17 | print(f"Building wheel {PKG_NAME}-{VERSION}")
18 |
19 | # Dynamically set the __version__ attribute
20 | cwd = Path(__file__).parent.absolute()
21 | with cwd.joinpath("pyroclient", "version.py").open("w", encoding="utf-8") as f:
22 | f.write(f"__version__ = '{VERSION}'\n")
23 |
24 | setup(name=PKG_NAME, version=VERSION)
25 |
--------------------------------------------------------------------------------
/client/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 | from urllib.parse import urljoin
3 |
4 | import pytest
5 | import requests
6 |
7 | API_URL = os.getenv("API_URL", "http://localhost:5050/api/v1/")
8 | SUPERADMIN_LOGIN = os.getenv("SUPERADMIN_LOGIN", "superadmin_login")
9 | SUPERADMIN_PWD = os.getenv("SUPERADMIN_PWD", "superadmin_pwd")
10 | SUPERADMIN_TOKEN = requests.post(
11 | urljoin(API_URL, "login/creds"),
12 | data={"username": SUPERADMIN_LOGIN, "password": SUPERADMIN_PWD},
13 | timeout=5,
14 | ).json()["access_token"]
15 |
16 |
17 | def pytest_configure():
18 | # api.security patching
19 | pytest.admin_token = SUPERADMIN_TOKEN
20 |
21 |
22 | @pytest.fixture(scope="session")
23 | def mock_img():
24 | # Get Pyronear logo
25 | return requests.get("https://avatars.githubusercontent.com/u/61667887?s=200&v=4", timeout=5).content
26 |
27 |
28 | @pytest.fixture(scope="session")
29 | def cam_token():
30 | admin_headers = {"Authorization": f"Bearer {SUPERADMIN_TOKEN}"}
31 | payload = {
32 | "name": "pyro-camera-01",
33 | "organization_id": 1,
34 | "angle_of_view": 120,
35 | "elevation": 1582,
36 | "lat": 44.765181,
37 | "lon": 4.51488,
38 | "is_trustable": True,
39 | }
40 | response = requests.post(urljoin(API_URL, "cameras"), json=payload, headers=admin_headers, timeout=5)
41 | assert response.status_code == 201
42 | cam_id = response.json()["id"]
43 | # Create a cam token
44 | return requests.post(urljoin(API_URL, f"cameras/{cam_id}/token"), headers=admin_headers, timeout=5).json()[
45 | "access_token"
46 | ]
47 |
48 |
49 | @pytest.fixture(scope="session")
50 | def agent_token():
51 | admin_headers = {"Authorization": f"Bearer {SUPERADMIN_TOKEN}"}
52 | agent_login, agent_pwd = "agent-1", "PickARobustOne"
53 | payload = {
54 | "role": "agent",
55 | "login": agent_login,
56 | "password": agent_pwd,
57 | "organization_id": 1,
58 | }
59 | response = requests.post(urljoin(API_URL, "users"), json=payload, headers=admin_headers, timeout=5)
60 | assert response.status_code == 201
61 | # Create a cam token
62 | return requests.post(
63 | urljoin(API_URL, "login/creds"), data={"username": agent_login, "password": agent_pwd}, timeout=5
64 | ).json()["access_token"]
65 |
66 |
67 | @pytest.fixture(scope="session")
68 | def user_token():
69 | admin_headers = {"Authorization": f"Bearer {SUPERADMIN_TOKEN}"}
70 | user_login, user_pwd = "user-1", "PickARobustOne"
71 | payload = {
72 | "role": "user",
73 | "login": user_login,
74 | "password": user_pwd,
75 | "organization_id": 1,
76 | }
77 | response = requests.post(urljoin(API_URL, "users"), json=payload, headers=admin_headers, timeout=5)
78 | assert response.status_code == 201
79 | # Create a cam token
80 | return requests.post(
81 | urljoin(API_URL, "login/creds"), data={"username": user_login, "password": user_pwd}, timeout=5
82 | ).json()["access_token"]
83 |
--------------------------------------------------------------------------------
/client/tests/test_client.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | import pytest
4 | from requests.exceptions import ConnectionError as ConnError
5 | from requests.exceptions import ReadTimeout
6 |
7 | from pyroclient.client import Client
8 | from pyroclient.exceptions import HTTPRequestError
9 |
10 |
11 | @pytest.mark.parametrize(
12 | ("token", "host", "timeout", "expected_error"),
13 | [
14 | ("invalid_token", "http://localhost:5050", 10, HTTPRequestError),
15 | (pytest.admin_token, "http://localhost:8003", 10, ConnError),
16 | (pytest.admin_token, "http://localhost:5050", 0.00001, ReadTimeout),
17 | (pytest.admin_token, "http://localhost:5050", 10, None),
18 | ],
19 | )
20 | def test_client_constructor(token, host, timeout, expected_error):
21 | if expected_error is None:
22 | Client(token, host, timeout=timeout)
23 | else:
24 | with pytest.raises(expected_error):
25 | Client(token, host, timeout=timeout)
26 |
27 |
28 | @pytest.fixture(scope="session")
29 | def test_cam_workflow(cam_token, mock_img):
30 | cam_client = Client(cam_token, "http://localhost:5050", timeout=10)
31 | response = cam_client.heartbeat()
32 | assert response.status_code == 200
33 | # Check that last_image gets changed
34 | assert response.json()["last_image"] is None
35 | response = cam_client.update_last_image(mock_img)
36 | assert response.status_code == 200, response.__dict__
37 | assert isinstance(response.json()["last_image"], str)
38 | # Check that adding bboxes works
39 | with pytest.raises(ValueError, match="bboxes must be a non-empty list of tuples"):
40 | cam_client.create_detection(mock_img, 123.2, None)
41 | with pytest.raises(ValueError, match="bboxes must be a non-empty list of tuples"):
42 | cam_client.create_detection(mock_img, 123.2, [])
43 | response = cam_client.create_detection(mock_img, 123.2, [(0, 0, 1.0, 0.9, 0.5)])
44 | assert response.status_code == 201, response.__dict__
45 | response = cam_client.create_detection(mock_img, 123.2, [(0, 0, 1.0, 0.9, 0.5), (0.2, 0.2, 0.7, 0.7, 0.8)])
46 | assert response.status_code == 201, response.__dict__
47 | response = cam_client.create_detection(mock_img, 123.2, [(0, 0, 1.0, 0.9, 0.5)])
48 | assert response.status_code == 201, response.__dict__
49 | return response.json()["id"]
50 |
51 |
52 | def test_agent_workflow(test_cam_workflow, agent_token):
53 | # Agent workflow
54 | agent_client = Client(agent_token, "http://localhost:5050", timeout=10)
55 | response = agent_client.fetch_latest_sequences().json()
56 | assert len(response) == 1
57 | response = agent_client.label_sequence(response[0]["id"], True)
58 | assert response.status_code == 200, response.__dict__
59 |
60 |
61 | def test_user_workflow(test_cam_workflow, user_token):
62 | # User workflow
63 | user_client = Client(user_token, "http://localhost:5050", timeout=10)
64 | response = user_client.get_detection_url(test_cam_workflow)
65 | assert response.status_code == 200, response.__dict__
66 | response = user_client.fetch_detections()
67 | assert response.status_code == 200, response.__dict__
68 | response = user_client.fetch_sequences_from_date("2018-06-06")
69 | assert len(response.json()) == 0
70 | assert response.status_code == 200, response.__dict__
71 | response = user_client.fetch_latest_sequences()
72 | assert response.status_code == 200, response.__dict__
73 | assert len(response.json()) == 0 # Sequence was labeled by agent
74 | response = user_client.fetch_sequences_from_date(datetime.utcnow().date().isoformat())
75 | assert len(response.json()) == 1
76 | response = user_client.fetch_sequences_detections(response.json()[0]["id"])
77 | assert response.status_code == 200, response.__dict__
78 | assert len(response.json()) == 3
79 |
--------------------------------------------------------------------------------
/client/tests/test_exceptions.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from pyroclient.exceptions import HTTPRequestError
4 |
5 |
6 | @pytest.mark.parametrize(
7 | ("status_code", "response_msg", "expected_repr"),
8 | [
9 | (404, "not found", "HTTPRequestError(status_code=404, response_message='not found')"),
10 | (502, "internal error", "HTTPRequestError(status_code=502, response_message='internal error')"),
11 | ],
12 | )
13 | def test_httprequesterror(status_code, response_msg, expected_repr):
14 | exception = HTTPRequestError(status_code, response_msg)
15 | assert repr(exception) == expected_repr
16 |
--------------------------------------------------------------------------------
/docker-compose.dev.yml:
--------------------------------------------------------------------------------
1 | name: pyronear
2 | services:
3 | db:
4 | image: postgres:15-alpine
5 | expose:
6 | - 5432
7 | environment:
8 | - POSTGRES_USER=dummy_pg_user
9 | - POSTGRES_PASSWORD=dummy_pg_pwd
10 | - POSTGRES_DB=dummy_pg_db
11 | healthcheck:
12 | test: ["CMD-SHELL", "sh -c 'pg_isready -U dummy_pg_user -d dummy_pg_db'"]
13 | interval: 10s
14 | timeout: 3s
15 | retries: 3
16 |
17 | # ref link: https://github.com/localstack/localstack/blob/master/docker-compose.yml
18 | localstack:
19 | image: localstack/localstack:1.4.0
20 | ports:
21 | - 4566:4566
22 | # environment variables details: https://docs.localstack.cloud/references/configuration/
23 | environment:
24 | - EDGE_PORT=4566
25 | - SERVICES=s3
26 | volumes:
27 | - ./scripts/localstack:/etc/localstack/init/ready.d
28 | healthcheck:
29 | test: ["CMD-SHELL", "awslocal --endpoint-url=http://localhost:4566 s3 ls s3://admin"]
30 | interval: 10s
31 | timeout: 5s
32 | retries: 10
33 |
34 | backend:
35 | build:
36 | context: .
37 | dockerfile: ./src/Dockerfile
38 | depends_on:
39 | db:
40 | condition: service_healthy
41 | localstack:
42 | condition: service_healthy
43 | ports:
44 | - "5050:5050"
45 | environment:
46 | - POSTGRES_URL=postgresql+asyncpg://dummy_pg_user:dummy_pg_pwd@db/dummy_pg_db
47 | - SUPERADMIN_LOGIN=superadmin_login
48 | - SUPERADMIN_PWD=superadmin_pwd
49 | - SUPERADMIN_ORG=admin
50 | - JWT_SECRET=${JWT_SECRET}
51 | - SUPPORT_EMAIL=${SUPPORT_EMAIL}
52 | - DEBUG=true
53 | - SQLALCHEMY_SILENCE_UBER_WARNING=1
54 | - S3_ENDPOINT_URL=http://localstack:4566
55 | - S3_ACCESS_KEY=fake
56 | - S3_SECRET_KEY=fake
57 | - S3_REGION=us-east-1
58 | volumes:
59 | - ./src/:/app/
60 | command: "sh -c 'python app/db.py && uvicorn app.main:app --reload --host 0.0.0.0 --port 5050 --proxy-headers'"
61 | restart: always
62 | healthcheck:
63 | test: ["CMD-SHELL", "curl http://localhost:5050/status"]
64 | interval: 10s
65 | timeout: 3s
66 | retries: 3
67 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | name: pyronear
2 | services:
3 | db:
4 | image: postgres:15-alpine
5 | expose:
6 | - 5432
7 | environment:
8 | - POSTGRES_USER=${POSTGRES_USER}
9 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
10 | - POSTGRES_DB=${POSTGRES_DB}
11 | volumes:
12 | - postgres_data:/var/lib/postgresql/data/
13 | healthcheck:
14 | test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}'"]
15 | interval: 10s
16 | timeout: 3s
17 | retries: 3
18 |
19 | # ref link: https://github.com/localstack/localstack/blob/master/docker-compose.yml
20 | localstack:
21 | image: localstack/localstack:1.4.0
22 | ports:
23 | - 4566:4566
24 | # environment variables details: https://docs.localstack.cloud/references/configuration/
25 | environment:
26 | - EDGE_PORT=4566
27 | - SERVICES=s3
28 | - DATA_DIR=/tmp/localstack/data
29 | - AWS_DEFAULT_REGION=${S3_REGION:-us-east-1}
30 | volumes:
31 | - ./scripts/localstack:/etc/localstack/init/ready.d
32 | - localstack_data:/tmp/localstack
33 | healthcheck:
34 | test: ["CMD-SHELL", "awslocal --endpoint-url=http://localhost:4566 s3 ls s3://admin"]
35 | interval: 10s
36 | timeout: 5s
37 | retries: 10
38 |
39 | backend:
40 | image: ghcr.io/pyronear/alert-api:latest
41 | build:
42 | context: .
43 | dockerfile: ./src/Dockerfile
44 | depends_on:
45 | db:
46 | condition: service_healthy
47 | localstack:
48 | condition: service_healthy
49 | ports:
50 | - "5050:5050"
51 | environment:
52 | - POSTGRES_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/${POSTGRES_DB}
53 | - SUPERADMIN_LOGIN=${SUPERADMIN_LOGIN}
54 | - SUPERADMIN_PWD=${SUPERADMIN_PWD}
55 | - SUPERADMIN_ORG=${SUPERADMIN_ORG}
56 | - JWT_SECRET=${JWT_SECRET}
57 | - SUPPORT_EMAIL=${SUPPORT_EMAIL}
58 | - DEBUG=true
59 | - PROMETHEUS_ENABLED=true
60 | - SQLALCHEMY_SILENCE_UBER_WARNING=1
61 | - S3_ENDPOINT_URL=${S3_ENDPOINT_URL:-http://localstack:4566}
62 | - S3_ACCESS_KEY=${S3_ACCESS_KEY:-na}
63 | - S3_SECRET_KEY=${S3_SECRET_KEY:-na}
64 | - S3_REGION=${S3_REGION:-us-east-1}
65 | - S3_PROXY_URL=${S3_PROXY_URL}
66 | - SERVER_NAME=${SERVER_NAME}
67 | volumes:
68 | - ./src/:/app/
69 | command: "sh -c 'alembic upgrade head && python app/db.py && uvicorn app.main:app --reload --host 0.0.0.0 --port 5050 --proxy-headers'"
70 | restart: always
71 | healthcheck:
72 | test: ["CMD-SHELL", "curl http://localhost:5050/status"]
73 | interval: 10s
74 | timeout: 3s
75 | retries: 3
76 |
77 | volumes:
78 | postgres_data:
79 | localstack_data:
80 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["poetry>=1.0"]
3 | build-backend = "poetry.masonry.api"
4 |
5 | [tool.poetry]
6 | name = "pyro-api"
7 | version = "0.1.2.dev0"
8 | description = "Backend for wildfire prevention, detection and monitoring"
9 | authors = ["Pyronear "]
10 | license = "Apache-2.0"
11 |
12 | [tool.poetry.dependencies]
13 | python = "^3.11"
14 | fastapi = ">=0.109.1,<1.0.0"
15 | sqlmodel = "^0.0.24"
16 | pydantic = ">=2.0.0,<3.0.0"
17 | pydantic-settings = ">=2.0.0,<3.0.0"
18 | requests = "^2.32.0"
19 | PyJWT = "^2.8.0"
20 | passlib = { version = "^1.7.4", extras = ["bcrypt"] }
21 | uvicorn = ">=0.11.1,<1.0.0"
22 | asyncpg = ">=0.25.0,<1.0.0"
23 | alembic = "^1.8.1"
24 | sentry-sdk = { version = "^2.8.0", extras = ["fastapi"] }
25 | posthog = "^3.0.0"
26 | prometheus-fastapi-instrumentator = "^6.1.0"
27 | python-multipart = "==0.0.7"
28 | python-magic = "^0.4.17"
29 | boto3 = "^1.26.0"
30 | httpx = "^0.24.0"
31 |
32 | [tool.poetry.group.quality]
33 | optional = true
34 |
35 | [tool.poetry.group.quality.dependencies]
36 | ruff = "==0.11.9"
37 | mypy = "==1.10.0"
38 | types-requests = ">=2.0.0"
39 | types-python-dateutil = "^2.8.0"
40 | sqlalchemy-stubs = "^0.4"
41 | types-passlib = ">=1.7.0"
42 | pre-commit = "^4.2.0"
43 |
44 | [tool.poetry.group.test]
45 | optional = true
46 |
47 | [tool.poetry.group.test.dependencies]
48 | pytest = "==8.3.5"
49 | pytest-asyncio = "==0.21.2"
50 | pytest-cov = "^4.0.0"
51 | pytest-pretty = "^1.0.0"
52 | httpx = ">=0.23.0"
53 | aiosqlite = ">=0.16.0,<1.0.0"
54 |
55 | [tool.coverage.run]
56 | source = ["src/app", "client/pyroclient"]
57 |
58 | [tool.ruff]
59 | line-length = 120
60 | target-version = "py39"
61 | preview = true
62 |
63 | [tool.ruff.lint]
64 | select = [
65 | "F", # pyflakes
66 | "E", # pycodestyle errors
67 | "W", # pycodestyle warnings
68 | "I", # isort
69 | "N", # pep8-naming
70 | # "D101", "D103", # pydocstyle missing docstring in public function/class
71 | "D201","D202","D207","D208","D214","D215","D300","D301","D417", "D419", # pydocstyle
72 | "YTT", # flake8-2020
73 | "ANN", # flake8-annotations
74 | "ASYNC", # flake8-async
75 | "S", # flake8-bandit
76 | "BLE", # flake8-blind-except
77 | "B", # flake8-bugbear
78 | "A", # flake8-builtins
79 | "COM", # flake8-commas
80 | "CPY", # flake8-copyright
81 | "C4", # flake8-comprehensions
82 | "T10", # flake8-debugger
83 | "ISC", # flake8-implicit-str-concat
84 | "ICN", # flake8-import-conventions
85 | "LOG", # flake8-logging
86 | "PIE", # flake8-pie
87 | "T20", # flake8-print
88 | "PYI", # flake8-pyi
89 | "PT", # flake8-pytest-style
90 | "Q", # flake8-quotes
91 | "RSE", # flake8-raise
92 | "RET", # flake8-return
93 | "SIM", # flake8-simplify
94 | "ARG", # flake8-unused-arguments
95 | "PTH", # flake8-use-pathlib
96 | "PERF", # perflint
97 | "NPY", # numpy
98 | "FAST", # fastapi
99 | "FURB", # refurb
100 | "RUF", # ruff specific
101 | ]
102 | ignore = [
103 | "E501", # line too long, handled by black
104 | "B008", # do not perform function calls in argument defaults
105 | "B904", # raise from
106 | "C901", # too complex
107 | "F403", # star imports
108 | "E731", # lambda assignment
109 | "C416", # list comprehension to list()
110 | "ANN002", # missing type annotations on *args
111 | "ANN003", # missing type annotations on **kwargs
112 | "COM812", # trailing comma missing
113 | "ISC001", # implicit string concatenation (handled by format)
114 | "FAST002", # fastapi annotated
115 | ]
116 | exclude = [".git"]
117 |
118 | [tool.ruff.lint.flake8-quotes]
119 | docstring-quotes = "double"
120 |
121 | [tool.ruff.lint.isort]
122 | known-first-party = ["app", "tests", "pyroclient"]
123 | known-third-party = ["fastapi"]
124 |
125 | [tool.ruff.lint.per-file-ignores]
126 | "**/__init__.py" = ["I001", "F401", "CPY001"]
127 | "scripts/**.py" = ["D", "T201", "S101", "ANN", "RUF030"]
128 | ".github/**.py" = ["D", "T201", "ANN"]
129 | "src/tests/**.py" = ["D103", "CPY001", "S101", "T201", "ANN001", "ANN201", "ANN202", "ARG001", "RUF030"]
130 | "src/migrations/versions/**.py" = ["CPY001"]
131 | "src/migrations/**.py" = ["ANN"]
132 | "src/app/main.py" = ["ANN"]
133 | "src/app/schemas/**.py" = ["A"]
134 | "src/app/models.py" = ["A"]
135 | "client/docs/**.py" = ["E402", "D103", "ANN", "A001", "ARG001"]
136 | "client/setup.py" = ["T201"]
137 | "client/tests/**.py" = ["D103", "CPY001", "S101", "T201", "ANN", "ARG001", "RUF030"]
138 |
139 | [tool.ruff.format]
140 | quote-style = "double"
141 | indent-style = "space"
142 |
143 | [tool.mypy]
144 | python_version = "3.11"
145 | mypy_path = "src/"
146 | files = "src/app"
147 | show_error_codes = true
148 | pretty = true
149 | warn_unused_ignores = true
150 | warn_redundant_casts = true
151 | no_implicit_optional = true
152 | check_untyped_defs = true
153 | implicit_reexport = false
154 | explicit_package_bases = true
155 | plugins = ["pydantic.mypy"]
156 |
157 | [[tool.mypy.overrides]]
158 | module = [
159 | "magic",
160 | "boto3",
161 | "botocore.*",
162 | "databases",
163 | "posthog",
164 | "prometheus_fastapi_instrumentator",
165 | "pydantic_settings",
166 | ]
167 | ignore_missing_imports = true
168 |
--------------------------------------------------------------------------------
/scripts/dbdiagram.txt:
--------------------------------------------------------------------------------
1 | Enum "userrole" {
2 | "admin"
3 | "agent"
4 | "user"
5 | }
6 |
7 | Table "User" as U {
8 | "id" int [not null]
9 | "organization_id" int [ref: > O.id, not null]
10 | "role" userrole [not null]
11 | "login" varchar [not null]
12 | "hashed_password" varchar [not null]
13 | "created_at" timestamp [not null]
14 | Indexes {
15 | (id, login) [pk]
16 | }
17 | }
18 |
19 | Table "Camera" as C {
20 | "id" int [not null]
21 | "organization_id" int [ref: > O.id, not null]
22 | "name" varchar [not null]
23 | "angle_of_view" float [not null]
24 | "elevation" float [not null]
25 | "lat" float [not null]
26 | "lon" float [not null]
27 | "is_trustable" bool [not null]
28 | "created_at" timestamp [not null]
29 | "last_active_at" timestamp
30 | "last_image" varchar
31 | Indexes {
32 | (id) [pk]
33 | }
34 | }
35 |
36 | Table "Sequence" as S {
37 | "id" int [not null]
38 | "camera_id" int [ref: > C.id, not null]
39 | "azimuth" float [not null]
40 | "is_wildfire" bool
41 | "started_at" timestamp [not null]
42 | "last_seen_at" timestamp [not null]
43 | Indexes {
44 | (id) [pk]
45 | }
46 | }
47 |
48 | Table "Detection" as D {
49 | "id" int [not null]
50 | "camera_id" int [ref: > C.id, not null]
51 | "sequence_id" int [ref: > S.id]
52 | "azimuth" float [not null]
53 | "bucket_key" varchar [not null]
54 | "bboxes" varchar [not null]
55 | "created_at" timestamp [not null]
56 | Indexes {
57 | (id) [pk]
58 | }
59 | }
60 |
61 | Table "Organization" as O {
62 | "id" int [not null]
63 | "name" varchar [not null]
64 | "telegram_id" varchar
65 | Indexes {
66 | (id) [pk]
67 | }
68 | }
69 |
70 |
71 | Table "Webhook" as W {
72 | "id" int [not null]
73 | "url" varchar [not null]
74 | Indexes {
75 | (id) [pk]
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/scripts/localstack/setup-s3.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | awslocal s3 mb s3://admin
3 | echo -n "" > my_file
4 | awslocal s3 cp my_file s3://admin/my_file
5 |
--------------------------------------------------------------------------------
/scripts/pg_extract.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | [ $# -lt 2 ] && { echo "Usage: $0 "; exit 1; }
3 | sed "/connect.*$2/,\$!d" $1 | sed "/PostgreSQL database dump complete/,\$d"
4 |
--------------------------------------------------------------------------------
/scripts/requirements.txt:
--------------------------------------------------------------------------------
1 | requests>=2.31.0,<3.0.0
2 | python-dotenv>=1.0.0,<2.0.0
3 |
--------------------------------------------------------------------------------
/scripts/test_e2e.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import argparse
7 | import time
8 | from datetime import datetime
9 | from typing import Any, Dict, Optional
10 |
11 | import requests
12 |
13 |
14 | def get_token(api_url: str, login: str, pwd: str) -> str:
15 | response = requests.post(
16 | f"{api_url}/login/creds",
17 | data={"username": login, "password": pwd},
18 | timeout=5,
19 | )
20 | if response.status_code != 200:
21 | raise ValueError(response.json()["detail"])
22 | return response.json()["access_token"]
23 |
24 |
25 | def api_request(method_type: str, route: str, headers=Dict[str, str], payload: Optional[Dict[str, Any]] = None):
26 | kwargs = {"json": payload} if isinstance(payload, dict) else {}
27 |
28 | response = getattr(requests, method_type)(route, headers=headers, **kwargs)
29 | try:
30 | detail = response.json()
31 | except (requests.exceptions.JSONDecodeError, KeyError):
32 | detail = response.text
33 | assert response.status_code // 100 == 2, print(detail)
34 | return response.json()
35 |
36 |
37 | def main(args):
38 | superuser_login = "superadmin_login"
39 | superuser_pwd = "superadmin_pwd" # noqa S105
40 |
41 | start_ts = time.time()
42 | # Retrieve superuser token
43 | superuser_auth = {
44 | "Authorization": f"Bearer {get_token(args.endpoint, superuser_login, superuser_pwd)}",
45 | "Content-Type": "application/json",
46 | }
47 |
48 | # Create an organization
49 | org_name = "my_org"
50 | org_id = api_request("post", f"{args.endpoint}/organizations/", superuser_auth, {"name": org_name})["id"]
51 |
52 | agent_login = "my_user"
53 | agent_pwd = "my_pwd" # noqa S105
54 |
55 | # create a user
56 | payload = {"organization_id": org_id, "login": agent_login, "password": agent_pwd, "role": "agent"}
57 | user_id = api_request("post", f"{args.endpoint}/users/", superuser_auth, payload)["id"]
58 | agent_auth = {
59 | "Authorization": f"Bearer {get_token(args.endpoint, agent_login, agent_pwd)}",
60 | "Content-Type": "application/json",
61 | }
62 | # Get & Fetch access
63 | api_request("get", f"{args.endpoint}/users/{user_id}/", superuser_auth)
64 | api_request("get", f"{args.endpoint}/users/", superuser_auth)
65 | # Check that redirect is working
66 | api_request("get", f"{args.endpoint}/users", superuser_auth)
67 | # Modify access
68 | new_pwd = "my_new_pwd" # noqa S105
69 | api_request("patch", f"{args.endpoint}/users/{user_id}/", superuser_auth, {"password": new_pwd})
70 |
71 | # Create a camera
72 | camera_name = "my_device"
73 | payload = {
74 | "name": camera_name,
75 | "organization_id": org_id,
76 | "angle_of_view": 70.0,
77 | "elevation": 100,
78 | "lat": 44.7,
79 | "lon": 4.5,
80 | "azimuth": 110,
81 | }
82 | cam_id = api_request("post", f"{args.endpoint}/cameras/", agent_auth, payload)["id"]
83 |
84 | cam_token = requests.post(
85 | f"{args.endpoint}/cameras/{cam_id}/token",
86 | timeout=5,
87 | headers=superuser_auth,
88 | ).json()["access_token"]
89 |
90 | cam_auth = {"Authorization": f"Bearer {cam_token}"}
91 |
92 | # Take a picture
93 | file_bytes = requests.get("https://pyronear.org/img/logo.png", timeout=5).content
94 | # Update cam last image
95 | response = requests.patch(
96 | f"{args.endpoint}/cameras/image",
97 | headers=cam_auth,
98 | files={"file": ("logo.png", file_bytes, "image/png")},
99 | timeout=5,
100 | )
101 | assert response.status_code == 200, response.text
102 | assert response.json()["last_image"] is not None
103 | # Check that URL is displayed when we fetch all cameras
104 | response = requests.get(f"{args.endpoint}/cameras", headers=agent_auth, timeout=5)
105 | assert response.status_code == 200, response.text
106 | assert response.json()[0]["last_image_url"] is not None
107 |
108 | file_bytes = requests.get("https://pyronear.org/img/logo.png", timeout=5).content
109 | # Create a detection
110 | response = requests.post(
111 | f"{args.endpoint}/detections",
112 | headers=cam_auth,
113 | data={"azimuth": 45.6, "bboxes": "[(0.1,0.1,0.8,0.8,0.5)]"},
114 | files={"file": ("logo.png", file_bytes, "image/png")},
115 | timeout=5,
116 | )
117 | assert response.status_code == 201, response.text
118 | detection_id = response.json()["id"]
119 | today = datetime.fromisoformat(response.json()["created_at"]).date()
120 |
121 | # Fetch detections & their URLs
122 | api_request("get", f"{args.endpoint}/detections", agent_auth)
123 | api_request("get", f"{args.endpoint}/detections/{detection_id}/url", agent_auth)
124 |
125 | # Create a sequence by adding two additional detections
126 | det_id_2 = requests.post(
127 | f"{args.endpoint}/detections",
128 | headers=cam_auth,
129 | data={"azimuth": 45.6, "bboxes": "[(0.1,0.1,0.8,0.8,0.5)]"},
130 | files={"file": ("logo.png", file_bytes, "image/png")},
131 | timeout=5,
132 | ).json()["id"]
133 | det_id_3 = requests.post(
134 | f"{args.endpoint}/detections",
135 | headers=cam_auth,
136 | data={"azimuth": 45.6, "bboxes": "[(0.1,0.1,0.8,0.8,0.5)]"},
137 | files={"file": ("logo.png", file_bytes, "image/png")},
138 | timeout=5,
139 | ).json()["id"]
140 | # Check that a sequence has been created
141 | sequence = api_request("get", f"{args.endpoint}/sequences/1", agent_auth)
142 | assert sequence["camera_id"] == cam_id
143 | assert sequence["started_at"] == response.json()["created_at"]
144 | assert sequence["last_seen_at"] > sequence["started_at"]
145 | assert sequence["azimuth"] == response.json()["azimuth"]
146 | # Fetch the latest sequence
147 | assert len(api_request("get", f"{args.endpoint}/sequences/unlabeled/latest", agent_auth)) == 1
148 | # Fetch from date
149 | assert len(api_request("get", f"{args.endpoint}/sequences/all/fromdate?from_date=2019-09-10", agent_auth)) == 0
150 | assert (
151 | len(api_request("get", f"{args.endpoint}/sequences/all/fromdate?from_date={today.isoformat()}", agent_auth))
152 | == 1
153 | )
154 | # Label the sequence
155 | api_request("patch", f"{args.endpoint}/sequences/{sequence['id']}/label", agent_auth, {"is_wildfire": True})
156 | # Check the sequence's detections
157 | dets = api_request("get", f"{args.endpoint}/sequences/{sequence['id']}/detections", agent_auth)
158 | assert len(dets) == 3
159 | assert dets[0]["id"] == det_id_3
160 | assert dets[1]["id"] == det_id_2
161 | assert dets[2]["id"] == detection_id
162 |
163 | # Cleaning (order is important because of foreign key protection in existing tables)
164 | api_request("delete", f"{args.endpoint}/detections/{detection_id}/", superuser_auth)
165 | api_request("delete", f"{args.endpoint}/detections/{det_id_2}/", superuser_auth)
166 | api_request("delete", f"{args.endpoint}/detections/{det_id_3}/", superuser_auth)
167 | api_request("delete", f"{args.endpoint}/sequences/{sequence['id']}/", superuser_auth)
168 | api_request("delete", f"{args.endpoint}/cameras/{cam_id}/", superuser_auth)
169 | api_request("delete", f"{args.endpoint}/users/{user_id}/", superuser_auth)
170 | api_request("delete", f"{args.endpoint}/organizations/{org_id}/", superuser_auth)
171 | print(f"SUCCESS in {time.time() - start_ts:.3}s")
172 |
173 | return
174 |
175 |
176 | def parse_args():
177 | parser = argparse.ArgumentParser(
178 | description="Pyronear API End-to-End test", formatter_class=argparse.ArgumentDefaultsHelpFormatter
179 | )
180 |
181 | parser.add_argument("--endpoint", type=str, default="http://localhost:5050/api/v1", help="the API endpoint")
182 |
183 | return parser.parse_args()
184 |
185 |
186 | if __name__ == "__main__":
187 | args = parse_args()
188 | main(args)
189 |
--------------------------------------------------------------------------------
/src/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 |
3 | WORKDIR /app
4 |
5 | # set environment variables
6 | ENV PYTHONDONTWRITEBYTECODE=1
7 | ENV PYTHONUNBUFFERED=1
8 | ENV PYTHONPATH="/app"
9 |
10 | # Install curl
11 | RUN apt-get -y update \
12 | && apt-get -y install curl libmagic1 \
13 | && apt-get clean \
14 | && rm -rf /var/lib/apt/lists/*
15 |
16 | # Install uv
17 | # Ref: https://docs.astral.sh/uv/guides/integration/docker/#installing-uv
18 | COPY --from=ghcr.io/astral-sh/uv:0.5.13 /uv /bin/uv
19 |
20 | # copy requirements file
21 | COPY requirements.txt /tmp/requirements.txt
22 | # install dependencies
23 | RUN uv pip install --no-cache --system -r /tmp/requirements.txt
24 |
25 | # copy project
26 | COPY src/alembic.ini /app/alembic.ini
27 | COPY src/migrations /app/migrations
28 | COPY src/app /app/app
29 |
--------------------------------------------------------------------------------
/src/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
10 |
11 | # sys.path path, will be prepended to sys.path if present.
12 | # defaults to the current working directory.
13 | prepend_sys_path = .
14 |
15 | # timezone to use when rendering the date within the migration file
16 | # as well as the filename.
17 | # If specified, requires the python-dateutil library that can be
18 | # installed by adding `alembic[tz]` to the pip requirements
19 | # string value is passed to dateutil.tz.gettz()
20 | # leave blank for localtime
21 | # timezone =
22 |
23 | # max length of characters to apply to the
24 | # "slug" field
25 | # truncate_slug_length = 40
26 |
27 | # set to 'true' to run the environment during
28 | # the 'revision' command, regardless of autogenerate
29 | # revision_environment = false
30 |
31 | # set to 'true' to allow .pyc and .pyo files without
32 | # a source .py file to be detected as revisions in the
33 | # versions/ directory
34 | # sourceless = false
35 |
36 | # version location specification; This defaults
37 | # to migrations/versions. When using multiple version
38 | # directories, initial revisions must be specified with --version-path.
39 | # The path separator used here should be the separator specified by "version_path_separator" below.
40 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
41 |
42 | # version path separator; As mentioned above, this is the character used to split
43 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
44 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
45 | # Valid values for version_path_separator are:
46 | #
47 | # version_path_separator = :
48 | # version_path_separator = ;
49 | # version_path_separator = space
50 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
51 |
52 | # set to 'true' to search source files recursively
53 | # in each "version_locations" directory
54 | # new in Alembic version 1.10
55 | # recursive_version_locations = false
56 |
57 | # the output encoding used when revision files
58 | # are written from script.py.mako
59 | # output_encoding = utf-8
60 |
61 | # sqlalchemy.url = driver://user:pass@localhost/dbname
62 |
63 |
64 | [post_write_hooks]
65 | # post_write_hooks defines scripts or Python functions that are run
66 | # on newly generated revision scripts. See the documentation for further
67 | # detail and examples
68 |
69 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
70 | # hooks = black
71 | # black.type = console_scripts
72 | # black.entrypoint = black
73 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
74 |
75 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
76 | # hooks = ruff
77 | # ruff.type = exec
78 | # ruff.executable = %(here)s/.venv/bin/ruff
79 | # ruff.options = --fix REVISION_SCRIPT_FILENAME
80 |
81 | # Logging configuration
82 | [loggers]
83 | keys = root,sqlalchemy,alembic
84 |
85 | [handlers]
86 | keys = console
87 |
88 | [formatters]
89 | keys = generic
90 |
91 | [logger_root]
92 | level = WARN
93 | handlers = console
94 | qualname =
95 |
96 | [logger_sqlalchemy]
97 | level = WARN
98 | handlers =
99 | qualname = sqlalchemy.engine
100 |
101 | [logger_alembic]
102 | level = INFO
103 | handlers =
104 | qualname = alembic
105 |
106 | [handler_console]
107 | class = StreamHandler
108 | args = (sys.stderr,)
109 | level = NOTSET
110 | formatter = generic
111 |
112 | [formatter_generic]
113 | format = %(levelname)-5.5s [%(name)s] %(message)s
114 | datefmt = %H:%M:%S
115 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/endpoints/cameras.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import asyncio
7 | from datetime import datetime
8 | from typing import List, cast
9 |
10 | from fastapi import APIRouter, Depends, File, HTTPException, Path, Security, UploadFile, status
11 | from pydantic import Field
12 |
13 | from app.api.dependencies import get_camera_crud, get_jwt
14 | from app.core.config import settings
15 | from app.core.security import create_access_token
16 | from app.crud import CameraCRUD
17 | from app.models import Camera, Role, UserRole
18 | from app.schemas.cameras import CameraCreate, CameraEdit, CameraName, LastActive, LastImage
19 | from app.schemas.login import Token, TokenPayload
20 | from app.services.storage import s3_service, upload_file
21 | from app.services.telemetry import telemetry_client
22 |
23 | router = APIRouter()
24 |
25 |
26 | @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new camera")
27 | async def register_camera(
28 | payload: CameraCreate,
29 | cameras: CameraCRUD = Depends(get_camera_crud),
30 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT]),
31 | ) -> Camera:
32 | telemetry_client.capture(token_payload.sub, event="cameras-create", properties={"device_login": payload.name})
33 | if token_payload.organization_id != payload.organization_id and UserRole.ADMIN not in token_payload.scopes:
34 | raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.")
35 | return await cameras.create(payload)
36 |
37 |
38 | class CameraWithLastImgUrl(Camera):
39 | last_image_url: str | None = Field(None, description="URL of the last image of the camera")
40 |
41 |
42 | @router.get("/{camera_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific camera")
43 | async def get_camera(
44 | camera_id: int = Path(..., gt=0),
45 | cameras: CameraCRUD = Depends(get_camera_crud),
46 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]),
47 | ) -> CameraWithLastImgUrl:
48 | telemetry_client.capture(token_payload.sub, event="cameras-get", properties={"camera_id": camera_id})
49 | camera = cast(Camera, await cameras.get(camera_id, strict=True))
50 | if token_payload.organization_id != camera.organization_id and UserRole.ADMIN not in token_payload.scopes:
51 | raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.")
52 | if camera.last_image is None:
53 | return CameraWithLastImgUrl(**camera.model_dump(), last_image_url=None)
54 | bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(camera.organization_id))
55 | return CameraWithLastImgUrl(
56 | **camera.model_dump(),
57 | last_image_url=bucket.get_public_url(camera.last_image),
58 | )
59 |
60 |
61 | @router.get("/", status_code=status.HTTP_200_OK, summary="Fetch all the cameras")
62 | async def fetch_cameras(
63 | cameras: CameraCRUD = Depends(get_camera_crud),
64 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]),
65 | ) -> List[CameraWithLastImgUrl]:
66 | telemetry_client.capture(token_payload.sub, event="cameras-fetch")
67 | if UserRole.ADMIN in token_payload.scopes:
68 | cams = [elt for elt in await cameras.fetch_all(order_by="id")]
69 |
70 | async def get_url_for_cam(cam: Camera) -> str | None: # noqa: RUF029
71 | if cam.last_image:
72 | bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(cam.organization_id))
73 | return bucket.get_public_url(cam.last_image)
74 | return None
75 |
76 | urls = await asyncio.gather(*[get_url_for_cam(cam) for cam in cams])
77 | else:
78 | bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(token_payload.organization_id))
79 | cams = [
80 | elt
81 | for elt in await cameras.fetch_all(
82 | order_by="id", filters=("organization_id", token_payload.organization_id)
83 | )
84 | ]
85 |
86 | async def get_url_for_cam_single_bucket(cam: Camera) -> str | None: # noqa: RUF029
87 | if cam.last_image:
88 | return bucket.get_public_url(cam.last_image)
89 | return None
90 |
91 | urls = await asyncio.gather(*[get_url_for_cam_single_bucket(cam) for cam in cams])
92 | return [CameraWithLastImgUrl(**cam.model_dump(), last_image_url=url) for cam, url in zip(cams, urls)]
93 |
94 |
95 | @router.patch("/heartbeat", status_code=status.HTTP_200_OK, summary="Update last ping of a camera")
96 | async def heartbeat(
97 | cameras: CameraCRUD = Depends(get_camera_crud),
98 | token_payload: TokenPayload = Security(get_jwt, scopes=[Role.CAMERA]),
99 | ) -> Camera:
100 | # telemetry_client.capture(f"camera|{token_payload.sub}", event="cameras-heartbeat")
101 | return await cameras.update(token_payload.sub, LastActive(last_active_at=datetime.utcnow()))
102 |
103 |
104 | @router.patch("/image", status_code=status.HTTP_200_OK, summary="Update last image of a camera")
105 | async def update_image(
106 | file: UploadFile = File(..., alias="file"),
107 | cameras: CameraCRUD = Depends(get_camera_crud),
108 | token_payload: TokenPayload = Security(get_jwt, scopes=[Role.CAMERA]),
109 | ) -> Camera:
110 | # telemetry_client.capture(f"camera|{token_payload.sub}", event="cameras-image")
111 | cam = cast(Camera, await cameras.get(token_payload.sub, strict=True))
112 | bucket_key = await upload_file(file, token_payload.organization_id, token_payload.sub)
113 | # If the upload succeeds, delete the previous image
114 | if isinstance(cam.last_image, str):
115 | s3_service.get_bucket(s3_service.resolve_bucket_name(token_payload.organization_id)).delete_file(cam.last_image)
116 | # Update the DB entry
117 | return await cameras.update(token_payload.sub, LastImage(last_image=bucket_key, last_active_at=datetime.utcnow()))
118 |
119 |
120 | @router.post("/{camera_id}/token", status_code=status.HTTP_200_OK, summary="Request an access token for the camera")
121 | async def create_camera_token(
122 | camera_id: int = Path(..., gt=0),
123 | cameras: CameraCRUD = Depends(get_camera_crud),
124 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
125 | ) -> Token:
126 | telemetry_client.capture(token_payload.sub, event="cameras-token", properties={"camera_id": camera_id})
127 | camera = cast(Camera, await cameras.get(camera_id, strict=True))
128 | # create access token using user user_id/user_scopes
129 | token_data = {"sub": str(camera_id), "scopes": ["camera"], "organization_id": camera.organization_id}
130 | token = create_access_token(token_data, settings.JWT_UNLIMITED)
131 | return Token(access_token=token, token_type="bearer") # noqa S106
132 |
133 |
134 | @router.patch("/{camera_id}/location", status_code=status.HTTP_200_OK, summary="Update the location of a camera")
135 | async def update_camera_location(
136 | payload: CameraEdit,
137 | camera_id: int = Path(..., gt=0),
138 | cameras: CameraCRUD = Depends(get_camera_crud),
139 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
140 | ) -> Camera:
141 | telemetry_client.capture(token_payload.sub, event="cameras-update-location", properties={"camera_id": camera_id})
142 | return await cameras.update(camera_id, payload)
143 |
144 |
145 | @router.patch("/{camera_id}/name", status_code=status.HTTP_200_OK, summary="Update the name of a camera")
146 | async def update_camera_name(
147 | payload: CameraName,
148 | camera_id: int = Path(..., gt=0),
149 | cameras: CameraCRUD = Depends(get_camera_crud),
150 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
151 | ) -> Camera:
152 | telemetry_client.capture(token_payload.sub, event="cameras-update-name", properties={"camera_id": camera_id})
153 | return await cameras.update(camera_id, payload)
154 |
155 |
156 | @router.delete("/{camera_id}", status_code=status.HTTP_200_OK, summary="Delete a camera")
157 | async def delete_camera(
158 | camera_id: int = Path(..., gt=0),
159 | cameras: CameraCRUD = Depends(get_camera_crud),
160 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
161 | ) -> None:
162 | telemetry_client.capture(token_payload.sub, event="cameras-deletion", properties={"camera_id": camera_id})
163 | await cameras.delete(camera_id)
164 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/endpoints/login.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from fastapi import APIRouter, Depends, HTTPException, Security, status
7 | from fastapi.security import OAuth2PasswordRequestForm
8 |
9 | from app.api.dependencies import get_jwt, get_user_crud
10 | from app.core.config import settings
11 | from app.core.security import create_access_token, verify_password
12 | from app.crud import UserCRUD
13 | from app.models import Role
14 | from app.schemas.login import Token, TokenPayload
15 | from app.services.telemetry import telemetry_client
16 |
17 | router = APIRouter(redirect_slashes=True)
18 |
19 |
20 | @router.post("/creds", status_code=status.HTTP_200_OK, summary="Request an access token using credentials")
21 | async def login_with_creds(
22 | form_data: OAuth2PasswordRequestForm = Depends(),
23 | users: UserCRUD = Depends(get_user_crud),
24 | ) -> Token:
25 | """This API follows the OAuth 2.0 specification.
26 |
27 | If the credentials are valid, creates a new access token.
28 |
29 | By default, the token expires after 1 year.
30 | """
31 | # Verify credentials
32 | user = await users.get_by_login(form_data.username)
33 | if user is None or user.hashed_password is None or not verify_password(form_data.password, user.hashed_password):
34 | raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials.")
35 | telemetry_client.capture(user.id, event="user-login", properties={"method": "credentials"})
36 | # create access token using user user_id/user_scopes
37 | token_data = {"sub": str(user.id), "scopes": user.role.split(), "organization_id": user.organization_id}
38 | token = create_access_token(token_data, settings.JWT_UNLIMITED)
39 |
40 | return Token(access_token=token, token_type="bearer") # noqa S106
41 |
42 |
43 | @router.get("/validate", status_code=status.HTTP_200_OK, summary="Check token validity")
44 | def check_token_validity(
45 | payload: TokenPayload = Security(get_jwt, scopes=[Role.USER, Role.CAMERA, Role.AGENT, Role.ADMIN]),
46 | ) -> TokenPayload:
47 | return payload
48 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/endpoints/organizations.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 |
7 | from typing import List, cast
8 |
9 | from fastapi import APIRouter, Depends, HTTPException, Path, Security, status
10 |
11 | from app.api.dependencies import get_jwt, get_organization_crud
12 | from app.crud import OrganizationCRUD
13 | from app.models import Organization, UserRole
14 | from app.schemas.login import TokenPayload
15 | from app.schemas.organizations import OrganizationCreate, TelegramChannelId
16 | from app.services.storage import s3_service
17 | from app.services.telegram import telegram_client
18 | from app.services.telemetry import telemetry_client
19 |
20 | router = APIRouter()
21 |
22 |
23 | @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new organization")
24 | async def register_organization(
25 | payload: OrganizationCreate,
26 | organizations: OrganizationCRUD = Depends(get_organization_crud),
27 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
28 | ) -> Organization:
29 | telemetry_client.capture(
30 | token_payload.sub, event="organization-create", properties={"organization_name": payload.name}
31 | )
32 | organization = await organizations.create(payload)
33 | bucket_name = s3_service.resolve_bucket_name(organization.id)
34 | if not s3_service.create_bucket(bucket_name):
35 | # Delete the organization if the bucket creation failed
36 | await organizations.delete(organization.id)
37 | raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to create bucket")
38 | return organization
39 |
40 |
41 | @router.get(
42 | "/{organization_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific organization"
43 | )
44 | async def get_organization(
45 | organization_id: int = Path(..., gt=0),
46 | organizations: OrganizationCRUD = Depends(get_organization_crud),
47 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
48 | ) -> Organization:
49 | telemetry_client.capture(
50 | token_payload.sub, event="organizations-get", properties={"organization_id": organization_id}
51 | )
52 | return cast(Organization, await organizations.get(organization_id, strict=True))
53 |
54 |
55 | @router.get("/", status_code=status.HTTP_200_OK, summary="Fetch all the organizations")
56 | async def fetch_organizations(
57 | organizations: OrganizationCRUD = Depends(get_organization_crud),
58 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
59 | ) -> List[Organization]:
60 | telemetry_client.capture(token_payload.sub, event="organizations-fetch")
61 | return [elt for elt in await organizations.fetch_all()]
62 |
63 |
64 | @router.delete("/{organization_id}", status_code=status.HTTP_200_OK, summary="Delete a organization")
65 | async def delete_organization(
66 | organization_id: int = Path(..., gt=0),
67 | organizations: OrganizationCRUD = Depends(get_organization_crud),
68 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
69 | ) -> None:
70 | telemetry_client.capture(
71 | token_payload.sub, event="organizations-deletion", properties={"organization_id": organization_id}
72 | )
73 | bucket_name = s3_service.resolve_bucket_name(organization_id)
74 | if not (await s3_service.delete_bucket(bucket_name)):
75 | raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to create bucket")
76 | await organizations.delete(organization_id)
77 |
78 |
79 | @router.patch(
80 | "/{organization_id}", status_code=status.HTTP_200_OK, summary="Update telegram channel ID of an organization"
81 | )
82 | async def update_telegram_id(
83 | payload: TelegramChannelId,
84 | organization_id: int = Path(..., gt=0),
85 | organizations: OrganizationCRUD = Depends(get_organization_crud),
86 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
87 | ) -> Organization:
88 | telemetry_client.capture(
89 | token_payload.sub, event="organizations-update-telegram-id", properties={"organization_id": organization_id}
90 | )
91 | # Check if the telegram channel ID is valid
92 | if payload.telegram_id and not telegram_client.has_channel_access(payload.telegram_id):
93 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Unable to access Telegram channel")
94 | return await organizations.update(organization_id, payload)
95 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/endpoints/users.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import List, Union, cast
7 |
8 | from fastapi import APIRouter, Depends, HTTPException, Path, Security, status
9 |
10 | from app.api.dependencies import get_jwt, get_user_crud
11 | from app.core.security import hash_password
12 | from app.crud import UserCRUD
13 | from app.models import User, UserRole
14 | from app.schemas.login import TokenPayload
15 | from app.schemas.users import Cred, CredHash, UserCreate
16 | from app.services.telemetry import telemetry_client
17 |
18 | router = APIRouter()
19 |
20 |
21 | async def _create_user(payload: UserCreate, users: UserCRUD, requester_id: Union[int, None] = None) -> User:
22 | # Check for unicity
23 | if (await users.get_by_login(payload.login, strict=False)) is not None:
24 | raise HTTPException(status.HTTP_409_CONFLICT, "Login already taken")
25 |
26 | # Create the entry
27 | user = await users.create(
28 | User(
29 | login=payload.login,
30 | organization_id=payload.organization_id,
31 | hashed_password=hash_password(payload.password),
32 | role=payload.role,
33 | )
34 | )
35 |
36 | # Enrich user data
37 | telemetry_client.alias(user.id, payload.login)
38 |
39 | # Assume the requester is the new user if none was specified
40 | telemetry_client.capture(
41 | requester_id if isinstance(requester_id, int) else user.id,
42 | event="user-creation",
43 | properties={"created_user_id": user.id},
44 | )
45 | return user
46 |
47 |
48 | @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new user")
49 | async def create_user(
50 | payload: UserCreate,
51 | users: UserCRUD = Depends(get_user_crud),
52 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
53 | ) -> User:
54 | return await _create_user(payload, users, token_payload.sub)
55 |
56 |
57 | @router.get("/{user_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific user")
58 | async def get_user(
59 | user_id: int = Path(..., gt=0),
60 | users: UserCRUD = Depends(get_user_crud),
61 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
62 | ) -> User:
63 | telemetry_client.capture(token_payload.sub, event="user-get", properties={"user_id": user_id})
64 | return cast(User, await users.get(user_id, strict=True))
65 |
66 |
67 | @router.get("/", status_code=status.HTTP_200_OK, summary="Fetch all the users")
68 | async def fetch_users(
69 | users: UserCRUD = Depends(get_user_crud),
70 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
71 | ) -> List[User]:
72 | telemetry_client.capture(token_payload.sub, event="user-fetch")
73 | return [elt for elt in await users.fetch_all()]
74 |
75 |
76 | @router.patch("/{user_id}", status_code=status.HTTP_200_OK, summary="Updates a user's password")
77 | async def update_user_password(
78 | payload: Cred,
79 | user_id: int = Path(..., gt=0),
80 | users: UserCRUD = Depends(get_user_crud),
81 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
82 | ) -> User:
83 | telemetry_client.capture(token_payload.sub, event="user-pwd", properties={"user_id": user_id})
84 | pwd = hash_password(payload.password)
85 | return await users.update(user_id, CredHash(hashed_password=pwd))
86 |
87 |
88 | @router.delete("/{user_id}", status_code=status.HTTP_200_OK, summary="Delete a user")
89 | async def delete_user(
90 | user_id: int = Path(..., gt=0),
91 | users: UserCRUD = Depends(get_user_crud),
92 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
93 | ) -> None:
94 | telemetry_client.capture(token_payload.sub, event="user-deletion", properties={"user_id": user_id})
95 | await users.delete(user_id)
96 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/endpoints/webhooks.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import List, cast
7 |
8 | from fastapi import APIRouter, Depends, Path, Security, status
9 |
10 | from app.api.dependencies import get_jwt, get_webhook_crud
11 | from app.crud import WebhookCRUD
12 | from app.models import UserRole, Webhook
13 | from app.schemas.login import TokenPayload
14 | from app.schemas.webhooks import WebhookCreate, WebhookCreation
15 | from app.services.telemetry import telemetry_client
16 |
17 | router = APIRouter()
18 |
19 |
20 | @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new webhook")
21 | async def register_webhook(
22 | payload: WebhookCreate,
23 | webhooks: WebhookCRUD = Depends(get_webhook_crud),
24 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
25 | ) -> Webhook:
26 | telemetry_client.capture(token_payload.sub, event="webhooks-create")
27 | return await webhooks.create(WebhookCreation(url=str(payload.url)))
28 |
29 |
30 | @router.get("/{webhook_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific webhook")
31 | async def get_webhook(
32 | webhook_id: int = Path(..., gt=0),
33 | webhooks: WebhookCRUD = Depends(get_webhook_crud),
34 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
35 | ) -> Webhook:
36 | telemetry_client.capture(token_payload.sub, event="webhooks-get", properties={"webhook_id": webhook_id})
37 | return cast(Webhook, await webhooks.get(webhook_id, strict=True))
38 |
39 |
40 | @router.get("/", status_code=status.HTTP_200_OK, summary="Fetch all the webhooks")
41 | async def fetch_webhooks(
42 | webhooks: WebhookCRUD = Depends(get_webhook_crud),
43 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
44 | ) -> List[Webhook]:
45 | telemetry_client.capture(token_payload.sub, event="webhooks-fetch")
46 | return [elt for elt in await webhooks.fetch_all()]
47 |
48 |
49 | @router.delete("/{webhook_id}", status_code=status.HTTP_200_OK, summary="Delete a webhook")
50 | async def delete_webhook(
51 | webhook_id: int = Path(..., gt=0),
52 | webhooks: WebhookCRUD = Depends(get_webhook_crud),
53 | token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]),
54 | ) -> None:
55 | telemetry_client.capture(token_payload.sub, event="webhooks-deletion", properties={"webhook_id": webhook_id})
56 | await webhooks.delete(webhook_id)
57 |
--------------------------------------------------------------------------------
/src/app/api/api_v1/router.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from fastapi import APIRouter
7 |
8 | from app.api.api_v1.endpoints import cameras, detections, login, organizations, sequences, users, webhooks
9 |
10 | api_router = APIRouter(redirect_slashes=True)
11 | api_router.include_router(login.router, prefix="/login", tags=["login"])
12 | api_router.include_router(users.router, prefix="/users", tags=["users"])
13 | api_router.include_router(cameras.router, prefix="/cameras", tags=["cameras"])
14 | api_router.include_router(detections.router, prefix="/detections", tags=["detections"])
15 | api_router.include_router(sequences.router, prefix="/sequences", tags=["sequences"])
16 | api_router.include_router(organizations.router, prefix="/organizations", tags=["organizations"])
17 | api_router.include_router(webhooks.router, prefix="/webhooks", tags=["webhooks"])
18 |
--------------------------------------------------------------------------------
/src/app/api/dependencies.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import logging
7 | from typing import Dict, Type, TypeVar, Union, cast
8 |
9 | from fastapi import Depends, HTTPException, status
10 | from fastapi.security import OAuth2PasswordBearer, SecurityScopes
11 | from httpx import AsyncClient, HTTPStatusError
12 | from jwt import DecodeError, ExpiredSignatureError, InvalidSignatureError
13 | from jwt import decode as jwt_decode
14 | from pydantic import BaseModel, ValidationError
15 | from sqlmodel.ext.asyncio.session import AsyncSession
16 |
17 | from app.core.config import settings
18 | from app.crud import CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, UserCRUD, WebhookCRUD
19 | from app.db import get_session
20 | from app.models import User, UserRole
21 | from app.schemas.login import TokenPayload
22 |
23 | JWTTemplate = TypeVar("JWTTemplate")
24 | logger = logging.getLogger("uvicorn.error")
25 |
26 | __all__ = ["get_user_crud"]
27 |
28 | # Scope definition
29 | oauth2_scheme = OAuth2PasswordBearer(
30 | tokenUrl=f"{settings.API_V1_STR}/login/creds",
31 | scopes={
32 | UserRole.ADMIN: "Admin rights on all routes.",
33 | UserRole.AGENT: "Read access on available information and write access on owned resources.",
34 | UserRole.USER: "Read access on available information.",
35 | },
36 | )
37 |
38 |
39 | def get_user_crud(session: AsyncSession = Depends(get_session)) -> UserCRUD:
40 | return UserCRUD(session=session)
41 |
42 |
43 | def get_camera_crud(session: AsyncSession = Depends(get_session)) -> CameraCRUD:
44 | return CameraCRUD(session=session)
45 |
46 |
47 | def get_detection_crud(session: AsyncSession = Depends(get_session)) -> DetectionCRUD:
48 | return DetectionCRUD(session=session)
49 |
50 |
51 | def get_organization_crud(session: AsyncSession = Depends(get_session)) -> OrganizationCRUD:
52 | return OrganizationCRUD(session=session)
53 |
54 |
55 | def get_webhook_crud(session: AsyncSession = Depends(get_session)) -> WebhookCRUD:
56 | return WebhookCRUD(session=session)
57 |
58 |
59 | def get_sequence_crud(session: AsyncSession = Depends(get_session)) -> SequenceCRUD:
60 | return SequenceCRUD(session=session)
61 |
62 |
63 | def decode_token(token: str, authenticate_value: Union[str, None] = None) -> Dict[str, str]:
64 | try:
65 | payload = jwt_decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM])
66 | except (ExpiredSignatureError, InvalidSignatureError):
67 | raise HTTPException(
68 | status_code=status.HTTP_401_UNAUTHORIZED,
69 | detail="Token has expired.",
70 | headers={"WWW-Authenticate": authenticate_value} if authenticate_value else None,
71 | )
72 | except DecodeError:
73 | raise HTTPException(
74 | status_code=status.HTTP_406_NOT_ACCEPTABLE,
75 | detail="Invalid token.",
76 | headers={"WWW-Authenticate": authenticate_value} if authenticate_value else None,
77 | )
78 | return payload
79 |
80 |
81 | def process_token(
82 | token: str, jwt_template: Type[JWTTemplate], authenticate_value: Union[str, None] = None
83 | ) -> JWTTemplate:
84 | payload = decode_token(token)
85 | # Verify the JWT template
86 | try:
87 | return jwt_template(**payload)
88 | except ValidationError:
89 | raise HTTPException(
90 | status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
91 | detail="Invalid token payload.",
92 | headers={"WWW-Authenticate": authenticate_value} if authenticate_value else None,
93 | )
94 |
95 |
96 | def get_jwt(
97 | security_scopes: SecurityScopes,
98 | token: str = Depends(oauth2_scheme),
99 | ) -> TokenPayload:
100 | authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' if security_scopes.scopes else "Bearer"
101 | jwt_payload = process_token(token, TokenPayload)
102 | if set(jwt_payload.scopes).isdisjoint(security_scopes.scopes):
103 | raise HTTPException(
104 | status_code=status.HTTP_403_FORBIDDEN,
105 | detail="Incompatible token scope.",
106 | headers={"WWW-Authenticate": authenticate_value},
107 | )
108 | return jwt_payload
109 |
110 |
111 | async def get_current_user(
112 | security_scopes: SecurityScopes,
113 | token: str = Depends(oauth2_scheme),
114 | users: UserCRUD = Depends(get_user_crud),
115 | ) -> User:
116 | """Dependency to use as fastapi.security.Security with scopes"""
117 | token_payload = get_jwt(security_scopes, token)
118 | return cast(User, await users.get(token_payload.sub, strict=True))
119 |
120 |
121 | async def dispatch_webhook(url: str, payload: BaseModel) -> None:
122 | async with AsyncClient(timeout=5) as client:
123 | try:
124 | response = await client.post(url, json=payload.model_dump_json())
125 | response.raise_for_status()
126 | logger.info(f"Successfully dispatched to {url}")
127 | except HTTPStatusError as e:
128 | logger.error(f"Error dispatching webhook to {url}: {e.response.status_code} - {e.response.text}")
129 |
--------------------------------------------------------------------------------
/src/app/core/config.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import os
7 | import secrets
8 | import socket
9 | from typing import Union
10 |
11 | from pydantic import field_validator
12 | from pydantic_settings import BaseSettings, SettingsConfigDict
13 |
14 | __all__ = ["settings"]
15 |
16 |
17 | class Settings(BaseSettings):
18 | # State
19 | PROJECT_NAME: str = "Pyronear - Wildfire Alert API"
20 | PROJECT_DESCRIPTION: str = "API for wildfire prevention, detection and monitoring"
21 | VERSION: str = "0.2.0.dev0"
22 | API_V1_STR: str = "/api/v1"
23 | CORS_ORIGIN: str = "*"
24 | SUPPORT_EMAIL: str = os.environ.get("SUPPORT_EMAIL", "support@pyronear.org")
25 | # Authentication
26 | SUPERADMIN_LOGIN: str = os.environ["SUPERADMIN_LOGIN"]
27 | SUPERADMIN_PWD: str = os.environ["SUPERADMIN_PWD"]
28 | SUPERADMIN_ORG: str = os.environ["SUPERADMIN_ORG"]
29 | # DB
30 | POSTGRES_URL: str = os.environ["POSTGRES_URL"]
31 |
32 | @field_validator("POSTGRES_URL")
33 | @classmethod
34 | def sqlachmey_uri(cls, v: str) -> str:
35 | # Fix for SqlAlchemy 1.4+
36 | if v.startswith("postgres://"):
37 | return v.replace("postgres://", "postgresql+asyncpg://", 1)
38 | return v
39 |
40 | # Security
41 | JWT_SECRET: str = os.environ.get("JWT_SECRET") or secrets.token_urlsafe(32)
42 | JWT_EXPIRE_MINUTES: int = 60
43 | JWT_UNLIMITED: int = 60 * 24 * 365
44 | JWT_ALGORITHM: str = "HS256"
45 |
46 | # DB conversion
47 | MAX_BOXES_PER_DETECTION: int = 5
48 | DECIMALS_PER_COORD: int = 3
49 | MAX_BBOX_STR_LENGTH: int = (
50 | 2 + MAX_BOXES_PER_DETECTION * (2 + 5 * (2 + DECIMALS_PER_COORD) + 4 * 2) + (MAX_BOXES_PER_DETECTION - 1) * 2
51 | )
52 |
53 | # Storage
54 | S3_ACCESS_KEY: str = os.environ["S3_ACCESS_KEY"]
55 | S3_SECRET_KEY: str = os.environ["S3_SECRET_KEY"]
56 | S3_REGION: str = os.environ["S3_REGION"]
57 | S3_ENDPOINT_URL: str = os.environ["S3_ENDPOINT_URL"]
58 | S3_PROXY_URL: str = os.environ.get("S3_PROXY_URL", "")
59 | S3_URL_EXPIRATION: int = int(os.environ.get("S3_URL_EXPIRATION") or 24 * 3600)
60 |
61 | # Sequence handling
62 | SEQUENCE_RELAXATION_SECONDS: int = int(os.environ.get("SEQUENCE_RELAXATION_SECONDS") or 30 * 60)
63 | SEQUENCE_MIN_INTERVAL_DETS: int = int(os.environ.get("SEQUENCE_MIN_INTERVAL_DETS") or 3)
64 | SEQUENCE_MIN_INTERVAL_SECONDS: int = int(os.environ.get("SEQUENCE_MIN_INTERVAL_SECONDS") or 5 * 60)
65 |
66 | # Notifications
67 | TELEGRAM_TOKEN: Union[str, None] = os.environ.get("TELEGRAM_TOKEN")
68 |
69 | # Error monitoring
70 | SENTRY_DSN: Union[str, None] = os.environ.get("SENTRY_DSN")
71 | SERVER_NAME: str = os.environ.get("SERVER_NAME", socket.gethostname())
72 |
73 | @field_validator("SENTRY_DSN")
74 | @classmethod
75 | def sentry_dsn_can_be_blank(cls, v: str) -> Union[str, None]:
76 | if not isinstance(v, str) or len(v) == 0:
77 | return None
78 | return v
79 |
80 | # Product analytics
81 | POSTHOG_HOST: str = os.getenv("POSTHOG_HOST", "https://eu.posthog.com")
82 | POSTHOG_KEY: Union[str, None] = os.environ.get("POSTHOG_KEY")
83 |
84 | @field_validator("POSTHOG_KEY")
85 | @classmethod
86 | def posthog_key_can_be_blank(cls, v: str) -> Union[str, None]:
87 | if not isinstance(v, str) or len(v) == 0:
88 | return None
89 | return v
90 |
91 | DEBUG: bool = os.environ.get("DEBUG", "").lower() != "false"
92 | LOGO_URL: str = ""
93 | PROMETHEUS_ENABLED: bool = os.getenv("PROMETHEUS_ENABLED", "").lower() == "true"
94 |
95 | model_config = SettingsConfigDict(case_sensitive=True)
96 |
97 |
98 | settings = Settings()
99 |
--------------------------------------------------------------------------------
/src/app/core/security.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from datetime import datetime, timedelta
7 | from typing import Any, Dict, Optional
8 |
9 | import jwt
10 | from passlib.context import CryptContext
11 |
12 | from app.core.config import settings
13 |
14 | __all__ = ["create_access_token", "hash_password", "verify_password"]
15 |
16 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
17 |
18 |
19 | def create_access_token(content: Dict[str, Any], expires_minutes: Optional[int] = None) -> str:
20 | """Encode content dict using security algorithm, setting expiration."""
21 | expire_delta = timedelta(minutes=expires_minutes or settings.JWT_EXPIRE_MINUTES)
22 | expire = datetime.utcnow() + expire_delta
23 | return jwt.encode({**content, "exp": expire}, settings.JWT_SECRET, algorithm=settings.JWT_ALGORITHM)
24 |
25 |
26 | def verify_password(plain_password: str, hashed_password: str) -> bool:
27 | return pwd_context.verify(plain_password, hashed_password)
28 |
29 |
30 | def hash_password(password: str) -> str:
31 | return pwd_context.hash(password)
32 |
--------------------------------------------------------------------------------
/src/app/crud/__init__.py:
--------------------------------------------------------------------------------
1 | from .crud_user import *
2 | from .crud_camera import *
3 | from .crud_detection import *
4 | from .crud_organization import *
5 | from .crud_sequence import *
6 | from .crud_webhook import *
7 |
--------------------------------------------------------------------------------
/src/app/crud/base.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import Any, Generic, List, Optional, Tuple, Type, TypeVar, Union, cast
7 |
8 | from fastapi import HTTPException, status
9 | from pydantic import BaseModel
10 | from sqlalchemy import desc, exc
11 | from sqlmodel import SQLModel, delete, select
12 | from sqlmodel.ext.asyncio.session import AsyncSession
13 |
14 | ModelType = TypeVar("ModelType", bound=SQLModel)
15 | CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
16 | UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
17 |
18 | __all__ = ["BaseCRUD"]
19 |
20 |
21 | class BaseCRUD(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
22 | def __init__(self, session: AsyncSession, model: Type[ModelType]) -> None:
23 | self.session = session
24 | self.model = model
25 |
26 | async def create(self, payload: CreateSchemaType) -> ModelType:
27 | entry = self.model(**payload.model_dump())
28 | try:
29 | self.session.add(entry)
30 | await self.session.commit()
31 | except exc.IntegrityError as error:
32 | await self.session.rollback()
33 | raise HTTPException(
34 | status_code=status.HTTP_409_CONFLICT,
35 | detail=f"An entry with the same index already exists : {error!s}",
36 | )
37 | await self.session.refresh(entry)
38 |
39 | return entry
40 |
41 | async def get(self, entry_id: int, strict: bool = False) -> Union[ModelType, None]:
42 | entry: Union[ModelType, None] = await self.session.get(self.model, entry_id)
43 | if strict and entry is None:
44 | raise HTTPException(
45 | status_code=status.HTTP_404_NOT_FOUND,
46 | detail=f"Table {self.model.__name__} has no corresponding entry.",
47 | )
48 | return entry
49 |
50 | async def get_by(self, field_name: str, val: Union[str, int], strict: bool = False) -> Union[ModelType, None]:
51 | statement = select(self.model).where(getattr(self.model, field_name) == val) # type: ignore[var-annotated]
52 | results = await self.session.exec(statement=statement)
53 | entry = results.one_or_none()
54 | if strict and entry is None:
55 | raise HTTPException(
56 | status_code=status.HTTP_404_NOT_FOUND,
57 | detail=f"Table {self.model.__name__} has no corresponding entry.",
58 | )
59 | return entry
60 |
61 | async def fetch_all(
62 | self,
63 | filters: Union[Tuple[str, Any], List[Tuple[str, Any]], None] = None,
64 | in_pair: Union[Tuple[str, List], None] = None,
65 | inequality_pair: Optional[Tuple[str, str, Any]] = None,
66 | order_by: Optional[str] = None,
67 | order_desc: bool = False,
68 | limit: Optional[int] = None,
69 | offset: Optional[int] = None,
70 | ) -> List[ModelType]:
71 | statement = select(self.model) # type: ignore[var-annotated]
72 | if isinstance(filters, tuple):
73 | statement = statement.where(getattr(self.model, filters[0]) == filters[1])
74 | elif isinstance(filters, list):
75 | for filter_ in filters:
76 | statement = statement.where(getattr(self.model, filter_[0]) == filter_[1])
77 |
78 | if isinstance(in_pair, tuple):
79 | statement = statement.where(getattr(self.model, in_pair[0]).in_(in_pair[1]))
80 |
81 | if isinstance(inequality_pair, tuple):
82 | field, op, value = inequality_pair
83 | if op == ">=":
84 | statement = statement.where(getattr(self.model, field) >= value)
85 | elif op == ">":
86 | statement = statement.where(getattr(self.model, field) > value)
87 | elif op == "<=":
88 | statement = statement.where(getattr(self.model, field) <= value)
89 | elif op == "<":
90 | statement = statement.where(getattr(self.model, field) < value)
91 | else:
92 | raise ValueError(f"Unsupported inequality operator: {op}")
93 |
94 | if order_by is not None:
95 | statement = statement.order_by(
96 | desc(getattr(self.model, order_by)) if order_desc else getattr(self.model, order_by)
97 | )
98 |
99 | if offset is not None:
100 | statement = statement.offset(offset)
101 |
102 | if limit is not None:
103 | statement = statement.limit(limit)
104 |
105 | result = await self.session.exec(statement=statement)
106 | return [r for r in result]
107 |
108 | async def update(self, entry_id: int, payload: UpdateSchemaType) -> ModelType:
109 | access = cast(ModelType, await self.get(entry_id, strict=True))
110 | values = payload.model_dump(exclude_unset=True)
111 |
112 | for k, v in values.items():
113 | setattr(access, k, v)
114 |
115 | self.session.add(access)
116 | await self.session.commit()
117 | await self.session.refresh(access)
118 |
119 | return access
120 |
121 | async def delete(self, entry_id: int) -> None:
122 | await self.get(entry_id, strict=True)
123 | statement = delete(self.model).where(self.model.id == entry_id)
124 |
125 | await self.session.exec(statement=statement) # type: ignore[call-overload]
126 | await self.session.commit()
127 |
128 | async def get_in(self, list_: List[Any], field_name: str) -> List[ModelType]:
129 | statement = select(self.model).where(getattr(self.model, field_name).in_(list_)) # type: ignore[var-annotated]
130 | results = await self.session.exec(statement)
131 | return results.all()
132 |
--------------------------------------------------------------------------------
/src/app/crud/crud_camera.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from sqlmodel.ext.asyncio.session import AsyncSession
7 |
8 | from app.crud.base import BaseCRUD
9 | from app.models import Camera
10 | from app.schemas.cameras import CameraCreate, CameraEdit, CameraName, LastActive
11 |
12 | __all__ = ["CameraCRUD"]
13 |
14 |
15 | class CameraCRUD(BaseCRUD[Camera, CameraCreate, LastActive | CameraEdit | CameraName]):
16 | def __init__(self, session: AsyncSession) -> None:
17 | super().__init__(session, Camera)
18 |
--------------------------------------------------------------------------------
/src/app/crud/crud_detection.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from sqlmodel.ext.asyncio.session import AsyncSession
7 |
8 | from app.crud.base import BaseCRUD
9 | from app.models import Detection
10 | from app.schemas.detections import DetectionCreate, DetectionSequence
11 |
12 | __all__ = ["DetectionCRUD"]
13 |
14 |
15 | class DetectionCRUD(BaseCRUD[Detection, DetectionCreate, DetectionSequence]):
16 | def __init__(self, session: AsyncSession) -> None:
17 | super().__init__(session, Detection)
18 |
--------------------------------------------------------------------------------
/src/app/crud/crud_organization.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from sqlmodel.ext.asyncio.session import AsyncSession
7 |
8 | from app.crud.base import BaseCRUD
9 | from app.models import Organization
10 | from app.schemas.organizations import OrganizationCreate, TelegramChannelId
11 |
12 | __all__ = ["OrganizationCRUD"]
13 |
14 |
15 | class OrganizationCRUD(BaseCRUD[Organization, OrganizationCreate, TelegramChannelId]):
16 | def __init__(self, session: AsyncSession) -> None:
17 | super().__init__(session, Organization)
18 |
--------------------------------------------------------------------------------
/src/app/crud/crud_sequence.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import Union
7 |
8 | from sqlmodel.ext.asyncio.session import AsyncSession
9 |
10 | from app.crud.base import BaseCRUD
11 | from app.models import Sequence
12 | from app.schemas.sequences import SequenceLabel, SequenceUpdate
13 |
14 | __all__ = ["SequenceCRUD"]
15 |
16 |
17 | class SequenceCRUD(BaseCRUD[Sequence, Sequence, Union[SequenceUpdate, SequenceLabel]]):
18 | def __init__(self, session: AsyncSession) -> None:
19 | super().__init__(session, Sequence)
20 |
--------------------------------------------------------------------------------
/src/app/crud/crud_user.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import Union
7 |
8 | from sqlmodel.ext.asyncio.session import AsyncSession
9 |
10 | from app.crud.base import BaseCRUD
11 | from app.models import User
12 | from app.schemas.users import CredHash
13 |
14 | __all__ = ["UserCRUD"]
15 |
16 |
17 | class UserCRUD(BaseCRUD[User, User, CredHash]):
18 | def __init__(self, session: AsyncSession) -> None:
19 | super().__init__(session, User)
20 |
21 | async def get_by_login(self, login: str, **kwargs) -> Union[User, None]:
22 | return await self.get_by("login", login, **kwargs)
23 |
--------------------------------------------------------------------------------
/src/app/crud/crud_webhook.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 |
7 | from sqlmodel.ext.asyncio.session import AsyncSession
8 |
9 | from app.crud.base import BaseCRUD
10 | from app.models import Webhook
11 | from app.schemas.webhooks import WebhookCreation
12 |
13 | __all__ = ["WebhookCRUD"]
14 |
15 |
16 | class WebhookCRUD(BaseCRUD[Webhook, WebhookCreation, WebhookCreation]):
17 | def __init__(self, session: AsyncSession) -> None:
18 | super().__init__(session, Webhook)
19 |
--------------------------------------------------------------------------------
/src/app/db.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import asyncio
7 | import logging
8 |
9 | from sqlalchemy.ext.asyncio.engine import AsyncEngine
10 | from sqlalchemy.orm import sessionmaker
11 | from sqlmodel import SQLModel, create_engine, select
12 | from sqlmodel.ext.asyncio.session import AsyncSession
13 |
14 | from app.core.config import settings
15 | from app.core.security import hash_password
16 | from app.models import Organization, User, UserRole
17 | from app.services.storage import s3_service
18 |
19 | __all__ = ["get_session", "init_db"]
20 |
21 | logger = logging.getLogger("uvicorn.error")
22 | engine = AsyncEngine(create_engine(settings.POSTGRES_URL, echo=False))
23 |
24 |
25 | async def get_session() -> AsyncSession: # type: ignore[misc]
26 | async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
27 | async with async_session() as session:
28 | yield session
29 |
30 |
31 | async def init_db() -> None:
32 | async with engine.begin() as conn:
33 | await conn.run_sync(SQLModel.metadata.create_all)
34 |
35 | async with AsyncSession(engine) as session:
36 | logger.info("Initializing PostgreSQL database...")
37 |
38 | # Create the superadmin organization
39 | statement = select(Organization).where(Organization.name == settings.SUPERADMIN_ORG) # type: ignore[var-annotated]
40 | results = await session.execute(statement=statement)
41 | organization = results.scalar_one_or_none()
42 | if not organization:
43 | new_orga = Organization(name=settings.SUPERADMIN_ORG)
44 | session.add(new_orga)
45 | await session.commit()
46 | await session.refresh(new_orga) # Refresh to get the new organization ID
47 | organization_id = new_orga.id
48 | else:
49 | organization_id = organization.id
50 | # Create the bucket
51 | s3_service.create_bucket(s3_service.resolve_bucket_name(organization_id))
52 |
53 | # Check if admin exists
54 | statement = select(User).where(User.login == settings.SUPERADMIN_LOGIN)
55 | results = await session.exec(statement=statement)
56 | user = results.one_or_none()
57 | if not user:
58 | pwd = hash_password(settings.SUPERADMIN_PWD)
59 | session.add(
60 | User(
61 | login=settings.SUPERADMIN_LOGIN,
62 | hashed_password=pwd,
63 | role=UserRole.ADMIN,
64 | organization_id=organization_id,
65 | )
66 | )
67 | await session.commit()
68 |
69 |
70 | async def main() -> None:
71 | await init_db()
72 |
73 |
74 | if __name__ == "__main__":
75 | asyncio.run(main())
76 |
--------------------------------------------------------------------------------
/src/app/main.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import logging
7 | import time
8 |
9 | import sentry_sdk
10 | from fastapi import FastAPI, Request, status
11 | from fastapi.middleware.cors import CORSMiddleware
12 | from fastapi.openapi.docs import get_swagger_ui_html
13 | from fastapi.openapi.utils import get_openapi
14 | from prometheus_fastapi_instrumentator import Instrumentator
15 | from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
16 | from sentry_sdk.integrations.fastapi import FastApiIntegration
17 | from sentry_sdk.integrations.starlette import StarletteIntegration
18 |
19 | from app.api.api_v1.router import api_router
20 | from app.core.config import settings
21 | from app.schemas.base import Status
22 |
23 | logger = logging.getLogger("uvicorn.error")
24 |
25 | # Sentry
26 | if isinstance(settings.SENTRY_DSN, str):
27 | sentry_sdk.init(
28 | settings.SENTRY_DSN,
29 | enable_tracing=False,
30 | traces_sample_rate=0.0,
31 | integrations=[
32 | StarletteIntegration(transaction_style="url"),
33 | FastApiIntegration(transaction_style="url"),
34 | ],
35 | release=settings.VERSION,
36 | server_name=settings.SERVER_NAME,
37 | debug=settings.DEBUG,
38 | environment=None if settings.DEBUG else "production",
39 | )
40 | logger.info(f"Sentry middleware enabled on server {settings.SERVER_NAME}")
41 |
42 |
43 | app = FastAPI(
44 | title=settings.PROJECT_NAME,
45 | description=settings.PROJECT_DESCRIPTION,
46 | debug=settings.DEBUG,
47 | version=settings.VERSION,
48 | openapi_url=f"{settings.API_V1_STR}/openapi.json",
49 | docs_url=None,
50 | )
51 |
52 |
53 | # Healthcheck
54 | @app.get("/status", status_code=status.HTTP_200_OK, summary="Healthcheck for the API", include_in_schema=False)
55 | def get_status() -> Status:
56 | return Status(status="ok")
57 |
58 |
59 | # Routing
60 | app.include_router(api_router, prefix=settings.API_V1_STR)
61 |
62 |
63 | # Middleware
64 | @app.middleware("http")
65 | async def add_process_time_header(request: Request, call_next):
66 | start_time = time.time()
67 | response = await call_next(request)
68 | process_time = time.time() - start_time
69 | response.headers["X-Process-Time"] = str(process_time)
70 | return response
71 |
72 |
73 | # CORS
74 | app.add_middleware(
75 | CORSMiddleware,
76 | allow_origins=settings.CORS_ORIGIN,
77 | allow_credentials=True,
78 | allow_methods=["*"],
79 | allow_headers=["*"],
80 | )
81 |
82 | if isinstance(settings.SENTRY_DSN, str):
83 | app.add_middleware(SentryAsgiMiddleware)
84 |
85 |
86 | # Overrides swagger to include favicon
87 | @app.get("/docs", include_in_schema=False)
88 | def swagger_ui_html():
89 | return get_swagger_ui_html(
90 | openapi_url=f"{settings.API_V1_STR}/openapi.json",
91 | title=settings.PROJECT_NAME,
92 | swagger_favicon_url="https://pyronear.org/img/favicon.ico",
93 | # Remove schemas from swagger
94 | swagger_ui_parameters={"defaultModelsExpandDepth": -1},
95 | )
96 |
97 |
98 | # OpenAPI config
99 | def custom_openapi():
100 | if app.openapi_schema:
101 | return app.openapi_schema
102 | # https://fastapi.tiangolo.com/tutorial/metadata/
103 | openapi_schema = get_openapi(
104 | title=settings.PROJECT_NAME,
105 | version=settings.VERSION,
106 | description=settings.PROJECT_DESCRIPTION,
107 | routes=app.routes,
108 | license_info={"name": "Apache 2.0", "url": "http://www.apache.org/licenses/LICENSE-2.0.html"},
109 | contact={
110 | "name": "API support",
111 | "email": settings.SUPPORT_EMAIL,
112 | "url": "https://github.com/pyronear/pyro-api/issues",
113 | },
114 | )
115 | openapi_schema["info"]["x-logo"] = {"url": "https://pyronear.org/img/logo_letters.png"}
116 | app.openapi_schema = openapi_schema
117 | return app.openapi_schema
118 |
119 |
120 | app.openapi = custom_openapi # type: ignore[method-assign]
121 | if settings.PROMETHEUS_ENABLED:
122 | Instrumentator(
123 | excluded_handlers=["/metrics", "/docs", ".*openapi.json"],
124 | ).instrument(app).expose(app, include_in_schema=False)
125 | logger.info("Collecting performance data with Prometheus")
126 |
--------------------------------------------------------------------------------
/src/app/models.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from datetime import datetime
7 | from enum import Enum
8 | from typing import Union
9 |
10 | from sqlmodel import Field, SQLModel
11 |
12 | from app.core.config import settings
13 |
14 | __all__ = ["Camera", "Detection", "Organization", "User"]
15 |
16 |
17 | class UserRole(str, Enum):
18 | ADMIN = "admin"
19 | AGENT = "agent"
20 | USER = "user"
21 |
22 |
23 | class Role(str, Enum):
24 | ADMIN = "admin"
25 | AGENT = "agent"
26 | CAMERA = "camera"
27 | USER = "user"
28 |
29 |
30 | class User(SQLModel, table=True):
31 | __tablename__ = "users"
32 | id: int = Field(None, primary_key=True)
33 | organization_id: int = Field(..., foreign_key="organizations.id", nullable=False)
34 | role: UserRole = Field(UserRole.USER, nullable=False)
35 | # Allow sign-up/in via login + password
36 | login: str = Field(..., index=True, unique=True, min_length=2, max_length=50, nullable=False)
37 | hashed_password: str = Field(..., min_length=5, max_length=70, nullable=False)
38 | created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
39 |
40 |
41 | class Camera(SQLModel, table=True):
42 | __tablename__ = "cameras"
43 | id: int = Field(None, primary_key=True)
44 | organization_id: int = Field(..., foreign_key="organizations.id", nullable=False)
45 | name: str = Field(..., min_length=5, max_length=100, nullable=False, unique=True)
46 | angle_of_view: float = Field(..., gt=0, le=360, nullable=False)
47 | elevation: float = Field(..., gt=0, lt=10000, nullable=False)
48 | lat: float = Field(..., gt=-90, lt=90)
49 | lon: float = Field(..., gt=-180, lt=180)
50 | is_trustable: bool = True
51 | last_active_at: Union[datetime, None] = None
52 | last_image: Union[str, None] = None
53 | created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
54 |
55 |
56 | class Detection(SQLModel, table=True):
57 | __tablename__ = "detections"
58 | id: int = Field(None, primary_key=True)
59 | camera_id: int = Field(..., foreign_key="cameras.id", nullable=False)
60 | sequence_id: Union[int, None] = Field(None, foreign_key="sequences.id", nullable=True)
61 | azimuth: float = Field(..., ge=0, lt=360)
62 | bucket_key: str
63 | bboxes: str = Field(..., min_length=2, max_length=settings.MAX_BBOX_STR_LENGTH, nullable=False)
64 | created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False)
65 |
66 |
67 | class Sequence(SQLModel, table=True):
68 | __tablename__ = "sequences"
69 | id: int = Field(None, primary_key=True)
70 | camera_id: int = Field(..., foreign_key="cameras.id", nullable=False)
71 | azimuth: float = Field(..., ge=0, lt=360)
72 | is_wildfire: Union[bool, None] = None
73 | started_at: datetime = Field(..., nullable=False)
74 | last_seen_at: datetime = Field(..., nullable=False)
75 |
76 |
77 | class Organization(SQLModel, table=True):
78 | __tablename__ = "organizations"
79 | id: int = Field(None, primary_key=True)
80 | name: str = Field(..., min_length=5, max_length=100, nullable=False, unique=True)
81 | telegram_id: Union[str, None] = Field(None, nullable=True)
82 |
83 |
84 | class Webhook(SQLModel, table=True):
85 | __tablename__ = "webhooks"
86 | id: int = Field(None, primary_key=True)
87 | url: str = Field(..., nullable=False, unique=True)
88 |
--------------------------------------------------------------------------------
/src/app/schemas/__init__.py:
--------------------------------------------------------------------------------
1 | from .base import *
2 | from .detections import *
3 | from .cameras import *
4 | from .login import *
5 | from .users import *
6 | from .organizations import *
7 | from .sequences import *
8 | from .webhooks import *
9 |
--------------------------------------------------------------------------------
/src/app/schemas/base.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from pydantic import BaseModel
7 |
8 |
9 | class Status(BaseModel):
10 | status: str
11 |
--------------------------------------------------------------------------------
/src/app/schemas/cameras.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from datetime import datetime
7 |
8 | from pydantic import BaseModel, Field
9 |
10 | __all__ = [
11 | "CameraCreate",
12 | "LastActive",
13 | ]
14 |
15 |
16 | class LastActive(BaseModel):
17 | last_active_at: datetime = Field(default_factory=datetime.utcnow)
18 |
19 |
20 | class LastImage(LastActive):
21 | last_image: str
22 |
23 |
24 | class CameraEdit(BaseModel):
25 | elevation: float = Field(
26 | ...,
27 | gt=0,
28 | lt=10000,
29 | description="number of meters from sea level",
30 | json_schema_extra={"examples": [1582]},
31 | )
32 | lat: float = Field(..., gt=-90, lt=90, description="latitude", json_schema_extra={"examples": [44.765181]})
33 | lon: float = Field(..., gt=-180, lt=180, description="longitude", json_schema_extra={"examples": [4.514880]})
34 |
35 |
36 | class CameraCreate(CameraEdit):
37 | organization_id: int = Field(..., gt=0)
38 | name: str = Field(
39 | ...,
40 | min_length=3,
41 | max_length=50,
42 | description="name of the camera",
43 | json_schema_extra={"examples": ["pyro-camera-01"]},
44 | )
45 | angle_of_view: float = Field(
46 | ...,
47 | gt=0,
48 | le=360,
49 | description="angle between left and right camera view",
50 | json_schema_extra={"examples": [120.0]},
51 | )
52 | is_trustable: bool = Field(True, description="whether the detection from this camera can be trusted")
53 |
54 |
55 | class CameraName(BaseModel):
56 | name: str = Field(..., min_length=5, max_length=100, description="name of the camera")
57 |
--------------------------------------------------------------------------------
/src/app/schemas/detections.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import re
7 | from typing import Union
8 |
9 | from pydantic import BaseModel, Field
10 |
11 | from app.core.config import settings
12 | from app.models import Detection
13 |
14 | __all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionUrl"]
15 |
16 |
17 | class DetectionLabel(BaseModel):
18 | is_wildfire: bool
19 |
20 |
21 | class Azimuth(BaseModel):
22 | azimuth: float = Field(
23 | ...,
24 | ge=0,
25 | lt=360,
26 | description="angle between north and direction in degrees",
27 | json_schema_extra={"examples": [110]},
28 | )
29 |
30 |
31 | # Regex for a float between 0 and 1, with a maximum of 3 decimals
32 | FLOAT_PATTERN = r"(0?\.[0-9]{1,3}|0|1)"
33 | BOX_PATTERN = rf"\({FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN}\)"
34 | BOXES_PATTERN = rf"^\[{BOX_PATTERN}(,{BOX_PATTERN})*\]$"
35 | COMPILED_BOXES_PATTERN = re.compile(BOXES_PATTERN)
36 |
37 |
38 | class DetectionCreate(Azimuth):
39 | camera_id: int = Field(..., gt=0)
40 | bucket_key: str
41 | bboxes: str = Field(
42 | ...,
43 | min_length=2,
44 | max_length=settings.MAX_BBOX_STR_LENGTH,
45 | description="string representation of list of tuples where each tuple is a relative coordinate in order xmin, ymin, xmax, ymax, conf",
46 | json_schema_extra={"examples": ["[(0.1, 0.1, 0.9, 0.9, 0.5)]"]},
47 | )
48 |
49 |
50 | class DetectionUrl(BaseModel):
51 | url: str = Field(..., description="temporary URL to access the media content")
52 |
53 |
54 | class DetectionWithUrl(Detection):
55 | url: str = Field(..., description="temporary URL to access the media content")
56 |
57 |
58 | class DetectionSequence(BaseModel):
59 | sequence_id: Union[int, None] = Field(..., gt=0)
60 |
--------------------------------------------------------------------------------
/src/app/schemas/login.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import List
7 |
8 | from pydantic import BaseModel, Field
9 |
10 | from app.models import Role
11 |
12 | __all__ = ["Token", "TokenPayload"]
13 |
14 |
15 | # Token
16 | class Token(BaseModel):
17 | access_token: str = Field(
18 | ...,
19 | description="access token",
20 | json_schema_extra={"examples": ["eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.423fgFGTfttrvU6D1k7vF92hH5vaJHCGFYd8E"]},
21 | )
22 | token_type: str = Field(..., description="type of token", json_schema_extra={"examples": ["bearer"]})
23 |
24 |
25 | class TokenPayload(BaseModel):
26 | sub: int = Field(..., gt=0)
27 | scopes: List[Role] = Field([], description="scopes of the token")
28 | organization_id: int = Field(..., gt=0)
29 |
--------------------------------------------------------------------------------
/src/app/schemas/organizations.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from typing import Union
7 |
8 | from pydantic import BaseModel, Field
9 |
10 | __all__ = ["OrganizationCreate", "OrganizationUpdate"]
11 |
12 |
13 | class OrganizationCreate(BaseModel):
14 | name: str = Field(
15 | ...,
16 | min_length=3,
17 | max_length=50,
18 | description="name of the organization",
19 | json_schema_extra={"examples": ["pyro-org-01"]},
20 | )
21 |
22 |
23 | class OrganizationUpdate(BaseModel):
24 | name: str = Field(
25 | ...,
26 | min_length=3,
27 | max_length=50,
28 | description="name of the organization",
29 | json_schema_extra={"examples": ["pyro-org-01"]},
30 | )
31 |
32 |
33 | class TelegramChannelId(BaseModel):
34 | telegram_id: Union[str, None] = Field(None, pattern=r"^@[a-zA-Z0-9_-]+$")
35 |
--------------------------------------------------------------------------------
/src/app/schemas/sequences.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from datetime import datetime
7 |
8 | from pydantic import BaseModel
9 |
10 | from app.models import Sequence
11 |
12 | __all__ = ["SequenceUpdate", "SequenceWithCone"]
13 |
14 |
15 | # Accesses
16 | class SequenceUpdate(BaseModel):
17 | last_seen_at: datetime
18 |
19 |
20 | class SequenceLabel(BaseModel):
21 | is_wildfire: bool
22 |
23 |
24 | class SequenceWithCone(Sequence):
25 | cone_azimuth: float
26 | cone_angle: float
27 |
--------------------------------------------------------------------------------
/src/app/schemas/users.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2020-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from pydantic import BaseModel, Field
7 |
8 | from app.models import UserRole
9 |
10 | __all__ = ["Cred", "CredHash", "UserCreate", "UserCreation"]
11 |
12 |
13 | # Accesses
14 | class Login(BaseModel):
15 | login: str = Field(..., min_length=3, max_length=50, examples=["JohnDoe"])
16 |
17 |
18 | class Cred(BaseModel):
19 | password: str = Field(..., min_length=3, examples=["PickARobustOne"])
20 |
21 |
22 | class CredHash(BaseModel):
23 | hashed_password: str
24 |
25 |
26 | class Role(BaseModel):
27 | role: UserRole = Field(UserRole.USER)
28 |
29 |
30 | class UserCreate(Role):
31 | login: str = Field(..., min_length=3, max_length=50, examples=["JohnDoe"])
32 | password: str = Field(..., min_length=3, examples=["PickARobustOne"])
33 | organization_id: int = Field(..., gt=0)
34 |
35 |
36 | class UserCreation(Role):
37 | login: str = Field(..., min_length=3, max_length=50, examples=["JohnDoe"])
38 | organization_id: int = Field(..., gt=0)
39 |
--------------------------------------------------------------------------------
/src/app/schemas/webhooks.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | from pydantic import AnyHttpUrl, BaseModel, Field
7 |
8 | __all__ = ["WebhookCreate", "WebhookCreation"]
9 |
10 |
11 | class WebhookCreate(BaseModel):
12 | url: AnyHttpUrl
13 |
14 |
15 | class WebhookCreation(BaseModel):
16 | url: str = Field(..., min_length=1, examples=["https://example.com"])
17 |
--------------------------------------------------------------------------------
/src/app/services/__init__.py:
--------------------------------------------------------------------------------
1 | from .storage import *
2 |
--------------------------------------------------------------------------------
/src/app/services/storage.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2022-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import hashlib
7 | import logging
8 | from datetime import datetime
9 | from mimetypes import guess_extension
10 | from typing import Any, Dict, Union
11 |
12 | import boto3
13 | import magic
14 | from botocore.exceptions import ClientError, EndpointConnectionError, NoCredentialsError, PartialCredentialsError
15 | from fastapi import HTTPException, UploadFile, status
16 |
17 | from app.core.config import settings
18 |
19 | __all__ = ["s3_service", "upload_file"]
20 |
21 |
22 | logger = logging.getLogger("uvicorn.warning")
23 |
24 |
25 | class S3Bucket:
26 | """S3 bucket manager
27 |
28 | Args:
29 | s3_client: the client of the S3 service
30 | bucket_name: the name of the bucket
31 | proxy_url: the proxy url
32 | """
33 |
34 | def __init__(self, s3_client, bucket_name: str, proxy_url: Union[str, None] = None) -> None: # noqa: ANN001
35 | self._s3 = s3_client
36 | try:
37 | self._s3.head_bucket(Bucket=bucket_name)
38 | except EndpointConnectionError:
39 | raise ValueError(f"unable to access endpoint {self._s3.meta.endpoint_url}")
40 | except ClientError:
41 | raise ValueError(f"unable to access bucket {bucket_name}")
42 | self.name = bucket_name
43 | self.proxy_url = proxy_url
44 |
45 | def get_file_metadata(self, bucket_key: str) -> Dict[str, Any]:
46 | # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
47 | return self._s3.head_object(Bucket=self.name, Key=bucket_key)
48 |
49 | def check_file_existence(self, bucket_key: str) -> bool:
50 | """Check whether a file exists on the bucket"""
51 | try:
52 | # Use boto3 head_object method using the Qarnot private connection attribute
53 | head_object = self.get_file_metadata(bucket_key)
54 | return head_object["ResponseMetadata"]["HTTPStatusCode"] == 200
55 | except ClientError as e:
56 | logger.warning(e)
57 | return False
58 |
59 | def upload_file(self, bucket_key: str, file_binary: bytes) -> bool:
60 | """Upload a file to bucket and return whether the upload succeeded"""
61 | # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Bucket.upload_fileobj
62 | self._s3.upload_fileobj(file_binary, self.name, bucket_key)
63 | return True
64 |
65 | def delete_file(self, bucket_key: str) -> None:
66 | """Remove bucket file and return whether the deletion succeeded"""
67 | # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.delete_object
68 | self._s3.delete_object(Bucket=self.name, Key=bucket_key)
69 |
70 | def get_public_url(self, bucket_key: str, url_expiration: int = settings.S3_URL_EXPIRATION) -> str:
71 | """Generate a temporary public URL for a bucket file"""
72 | if not self.check_file_existence(bucket_key):
73 | raise HTTPException(
74 | status_code=status.HTTP_404_NOT_FOUND, detail="File cannot be found on the bucket storage"
75 | )
76 |
77 | # Generate a public URL for it using boto3 presign URL generation\
78 | presigned_url = self._s3.generate_presigned_url(
79 | "get_object", Params={"Bucket": self.name, "Key": bucket_key}, ExpiresIn=url_expiration
80 | )
81 | if self.proxy_url:
82 | return presigned_url.replace(self._s3.meta.endpoint_url, self.proxy_url)
83 | return presigned_url
84 |
85 | async def delete_items(self) -> None:
86 | """Delete all items in the bucket"""
87 | paginator = self._s3.get_paginator("list_objects_v2")
88 | for page in paginator.paginate(Bucket=self.name):
89 | if "Contents" in page:
90 | delete_items = [{"Key": obj["Key"]} for obj in page["Contents"]]
91 | self._s3.delete_objects(Bucket=self.name, Delete={"Objects": delete_items})
92 |
93 |
94 | class S3Service:
95 | """S3 storage service manager
96 |
97 | Args:
98 | region: S3 region
99 | endpoint_url: the S3 storage endpoint
100 | access_key: the S3 access key
101 | secret_key: the S3 secret key
102 | proxy_url: the proxy url
103 | """
104 |
105 | def __init__(
106 | self, region: str, endpoint_url: str, access_key: str, secret_key: str, proxy_url: Union[str, None] = None
107 | ) -> None:
108 | session_ = boto3.Session(access_key, secret_key, region_name=region)
109 | self._s3 = session_.client("s3", endpoint_url=endpoint_url)
110 | # Ensure S3 is connected
111 | try:
112 | self._s3.list_buckets()
113 | except (NoCredentialsError, PartialCredentialsError):
114 | raise ValueError("invalid S3 credentials")
115 | except EndpointConnectionError:
116 | raise ValueError(f"unable to access endpoint {endpoint_url}")
117 | except ClientError:
118 | raise ValueError("unable to access S3")
119 | logger.info(f"S3 connected on {endpoint_url}")
120 | self.proxy_url = proxy_url
121 |
122 | def create_bucket(self, bucket_name: str) -> bool:
123 | """Create a new bucket in S3 storage"""
124 | try:
125 | # https://stackoverflow.com/questions/51912072/invalidlocationconstraint-error-while-creating-s3-bucket-when-the-used-command-i
126 | # https://github.com/localstack/localstack/issues/8000
127 | config_ = (
128 | {}
129 | if self._s3.meta.region_name == "us-east-1"
130 | else {"CreateBucketConfiguration": {"LocationConstraint": self._s3.meta.region_name}}
131 | )
132 | self._s3.create_bucket(Bucket=bucket_name, **config_)
133 | return True
134 | except ClientError as e:
135 | logger.warning(e)
136 | return False
137 |
138 | def get_bucket(self, bucket_name: str) -> S3Bucket:
139 | """Get an existing bucket in S3 storage"""
140 | return S3Bucket(self._s3, bucket_name, self.proxy_url)
141 |
142 | async def delete_bucket(self, bucket_name: str) -> bool:
143 | """Delete an existing bucket in S3 storage"""
144 | bucket = S3Bucket(self._s3, bucket_name, self.proxy_url)
145 | try:
146 | await bucket.delete_items()
147 | self._s3.delete_bucket(Bucket=bucket_name)
148 | return True
149 | except ClientError as e:
150 | logger.warning(e)
151 | return False
152 |
153 | @staticmethod
154 | def resolve_bucket_name(organization_id: int) -> str:
155 | return f"{settings.SERVER_NAME}-alert-api-{organization_id!s}"
156 |
157 |
158 | async def upload_file(file: UploadFile, organization_id: int, camera_id: int) -> str:
159 | """Upload a file to S3 storage and return the public URL"""
160 | # Concatenate the first 8 chars (to avoid system interactions issues) of SHA256 hash with file extension
161 | sha_hash = hashlib.sha256(file.file.read()).hexdigest()
162 | await file.seek(0)
163 | # Use MD5 to verify upload
164 | md5_hash = hashlib.md5(file.file.read()).hexdigest() # noqa S324
165 | await file.seek(0)
166 | # guess_extension will return none if this fails
167 | extension = guess_extension(magic.from_buffer(file.file.read(), mime=True)) or ""
168 | # Concatenate timestamp & hash
169 | bucket_key = f"{camera_id}-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{sha_hash[:8]}{extension}"
170 | # Reset byte position of the file (cf. https://fastapi.tiangolo.com/tutorial/request-files/#uploadfile)
171 | await file.seek(0)
172 | bucket_name = s3_service.resolve_bucket_name(organization_id)
173 | bucket = s3_service.get_bucket(bucket_name)
174 | # Upload the file
175 | if not bucket.upload_file(bucket_key, file.file): # type: ignore[arg-type]
176 | raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed upload")
177 | logger.info(f"File uploaded to bucket {bucket_name} with key {bucket_key}.")
178 |
179 | # Data integrity check
180 | file_meta = bucket.get_file_metadata(bucket_key)
181 | # Corrupted file
182 | if md5_hash != file_meta["ETag"].replace('"', ""):
183 | # Delete the corrupted upload
184 | bucket.delete_file(bucket_key)
185 | # Raise the exception
186 | raise HTTPException(
187 | status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
188 | detail="Data was corrupted during upload",
189 | )
190 | return bucket_key
191 |
192 |
193 | s3_service = S3Service(
194 | settings.S3_REGION, settings.S3_ENDPOINT_URL, settings.S3_ACCESS_KEY, settings.S3_SECRET_KEY, settings.S3_PROXY_URL
195 | )
196 |
--------------------------------------------------------------------------------
/src/app/services/telegram.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import logging
7 | from typing import Union
8 |
9 | import requests
10 |
11 | from app.core.config import settings
12 |
13 | logger = logging.getLogger("uvicorn.error")
14 |
15 | __all__ = ["telegram_client"]
16 |
17 |
18 | class TelegramClient:
19 | BASE_URL = "https://api.telegram.org/bot{token}"
20 |
21 | def __init__(self, token: Union[str, None] = None) -> None:
22 | self.is_enabled = isinstance(token, str)
23 | if isinstance(token, str):
24 | self.token = token
25 | # Validate token
26 | response = requests.get(
27 | f"{self.BASE_URL.format(token=self.token)}/getMe",
28 | timeout=1,
29 | )
30 | if response.status_code != 200:
31 | raise ValueError(f"Invalid Telegram Bot token: {response.text}")
32 | logger.info("Telegram notifications enabled")
33 |
34 | def has_channel_access(self, channel_id: str) -> bool:
35 | if not self.is_enabled:
36 | raise AssertionError("Telegram notifications are not enabled")
37 | response = requests.get(
38 | f"{self.BASE_URL.format(token=self.token)}/getChat",
39 | json={"chat_id": channel_id},
40 | timeout=1,
41 | )
42 | return response.status_code == 200
43 |
44 | def notify(self, channel_id: str, message: str) -> requests.Response:
45 | if not self.is_enabled:
46 | raise AssertionError("Telegram notifications are not enabled")
47 | response = requests.post(
48 | f"{self.BASE_URL.format(token=self.token)}/sendMessage",
49 | json={"chat_id": channel_id, "text": message},
50 | timeout=2,
51 | )
52 | if response.status_code != 200:
53 | logger.error(f"Failed to send message to Telegram: {response.text}")
54 |
55 | return response
56 |
57 |
58 | telegram_client = TelegramClient(token=settings.TELEGRAM_TOKEN)
59 |
--------------------------------------------------------------------------------
/src/app/services/telemetry.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2024-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import logging
7 | from typing import Union
8 |
9 | from posthog import Posthog
10 |
11 | from app.core.config import settings
12 |
13 | logger = logging.getLogger("uvicorn.error")
14 |
15 | __all__ = ["telemetry_client"]
16 |
17 |
18 | class TelemetryClient:
19 | def __init__(self, api_key: Union[str, None] = None) -> None:
20 | self.is_enabled = isinstance(api_key, str)
21 | if isinstance(api_key, str):
22 | self.ph_client = Posthog(project_api_key=api_key, host="https://eu.posthog.com")
23 | logger.info("PostHog enabled")
24 |
25 | def capture(self, *args, **kwargs) -> None:
26 | if self.is_enabled:
27 | self.ph_client.capture(*args, **kwargs)
28 |
29 | def identify(self, *args, **kwargs) -> None:
30 | if self.is_enabled:
31 | self.ph_client.identify(*args, **kwargs)
32 |
33 | def alias(self, *args, **kwargs) -> None:
34 | if self.is_enabled:
35 | self.ph_client.alias(*args, **kwargs)
36 |
37 |
38 | telemetry_client = TelemetryClient(api_key=settings.POSTHOG_KEY)
39 |
--------------------------------------------------------------------------------
/src/migrations/README.md:
--------------------------------------------------------------------------------
1 | # Alembic main commands
2 |
3 | All commands should be run after spinning the containers using
4 |
5 | ```shell
6 | docker compose up -d db backend
7 | ```
8 |
9 | ## Create a revision
10 |
11 | Alembic allows you to record migration operation using DB operations. Let's create a revision file:
12 |
13 | ```shell
14 | docker compose exec -T backend alembic revision --autogenerate
15 | ```
16 |
17 | Once generated, you should edit the revision file in src/migrations/versions that was created. See example [here](https://github.com/jonra1993/fastapi-alembic-sqlmodel-async/blob/main/fastapi-alembic-sqlmodel-async/alembic/versions/2022-09-25-19-46_60d49bf413b8.py).
18 |
19 | ## Apply revisions
20 |
21 | Now apply all the revisions
22 |
23 | ```shell
24 | docker compose exec backend alembic upgrade head
25 | ```
26 |
--------------------------------------------------------------------------------
/src/migrations/env.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2023-2025, Pyronear.
2 |
3 | # This program is licensed under the Apache License 2.0.
4 | # See LICENSE or go to for full license details.
5 |
6 | import asyncio
7 | from logging.config import fileConfig
8 |
9 | from alembic import context
10 | from sqlalchemy.engine import Connection
11 | from sqlalchemy.ext.asyncio import create_async_engine
12 | from sqlmodel import SQLModel
13 |
14 | from app.core.config import settings
15 | from app.models import *
16 |
17 | # this is the Alembic Config object, which provides
18 | # access to the values within the .ini file in use.
19 | config = context.config
20 |
21 | # Interpret the config file for Python logging.
22 | # This line sets up loggers basically.
23 | if config.config_file_name is not None:
24 | fileConfig(config.config_file_name)
25 |
26 | # add your model's MetaData object here
27 | # for 'autogenerate' support
28 | # from myapp import mymodel
29 | # target_metadata = mymodel.Base.metadata
30 | target_metadata = SQLModel.metadata
31 |
32 | db_url = settings.POSTGRES_URL
33 | # other values from the config, defined by the needs of env.py,
34 | # can be acquired:
35 | # my_important_option = config.get_main_option("my_important_option")
36 | # ... etc.
37 |
38 |
39 | def run_migrations_offline() -> None:
40 | """Run migrations in 'offline' mode.
41 |
42 | This configures the context with just a URL
43 | and not an Engine, though an Engine is acceptable
44 | here as well. By skipping the Engine creation
45 | we don't even need a DBAPI to be available.
46 |
47 | Calls to context.execute() here emit the given string to the
48 | script output.
49 |
50 | """
51 | context.configure(
52 | url=settings.POSTGRES_URL,
53 | target_metadata=target_metadata,
54 | literal_binds=True,
55 | compare_type=True,
56 | dialect_opts={"paramstyle": "named"},
57 | )
58 |
59 | with context.begin_transaction():
60 | context.run_migrations()
61 |
62 |
63 | def do_run_migrations(connection: Connection) -> None:
64 | context.configure(connection=connection, target_metadata=target_metadata)
65 |
66 | with context.begin_transaction():
67 | context.run_migrations()
68 |
69 |
70 | async def run_migrations_online() -> None:
71 | """Run migrations in 'online' mode.
72 | In this scenario we need to create an Engine
73 | and associate a connection with the context.
74 | """
75 | connectable = create_async_engine(settings.POSTGRES_URL, echo=True, future=True)
76 |
77 | async with connectable.connect() as connection:
78 | await connection.run_sync(do_run_migrations)
79 |
80 |
81 | if context.is_offline_mode():
82 | run_migrations_offline()
83 | else:
84 | asyncio.run(run_migrations_online())
85 |
--------------------------------------------------------------------------------
/src/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 | import sqlmodel
13 | ${imports if imports else ""}
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = ${repr(up_revision)}
17 | down_revision: Union[str, None] = ${repr(down_revision)}
18 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
19 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
20 |
21 |
22 | def upgrade() -> None:
23 | ${upgrades if upgrades else "pass"}
24 |
25 |
26 | def downgrade() -> None:
27 | ${downgrades if downgrades else "pass"}
28 |
--------------------------------------------------------------------------------
/src/migrations/versions/2024_05_30_1200-f84a0ed81bdc_init.py:
--------------------------------------------------------------------------------
1 | """init
2 |
3 | Revision ID: f84a0ed81bdc
4 | Revises:
5 | Create Date: 2024-05-30 12:00:11.710286
6 |
7 | """
8 |
9 | from typing import Sequence, Union
10 |
11 | import sqlalchemy as sa
12 | import sqlmodel
13 | from alembic import op
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = "f84a0ed81bdc"
17 | down_revision: Union[str, None] = None
18 | branch_labels: Union[str, Sequence[str], None] = None
19 | depends_on: Union[str, Sequence[str], None] = None
20 |
21 |
22 | def upgrade() -> None:
23 | # ### commands auto generated by Alembic - please adjust! ###
24 | op.create_table(
25 | "camera",
26 | sa.Column("id", sa.Integer(), nullable=False),
27 | sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28 | sa.Column("angle_of_view", sa.Float(), nullable=False),
29 | sa.Column("elevation", sa.Float(), nullable=False),
30 | sa.Column("lat", sa.Float(), nullable=False),
31 | sa.Column("lon", sa.Float(), nullable=False),
32 | sa.Column("is_trustable", sa.Boolean(), nullable=False),
33 | sa.Column("last_active_at", sa.DateTime(), nullable=True),
34 | sa.Column("created_at", sa.DateTime(), nullable=False),
35 | sa.PrimaryKeyConstraint("id"),
36 | sa.UniqueConstraint("name"),
37 | )
38 | op.create_table(
39 | "user",
40 | sa.Column("id", sa.Integer(), nullable=False),
41 | sa.Column("role", sa.Enum("ADMIN", "AGENT", "USER", name="userrole"), nullable=False),
42 | sa.Column("login", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
43 | sa.Column("hashed_password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
44 | sa.Column("created_at", sa.DateTime(), nullable=False),
45 | sa.PrimaryKeyConstraint("id"),
46 | )
47 | op.create_index(op.f("ix_user_login"), "user", ["login"], unique=True)
48 | op.create_table(
49 | "detection",
50 | sa.Column("id", sa.Integer(), nullable=False),
51 | sa.Column("camera_id", sa.Integer(), nullable=False),
52 | sa.Column("azimuth", sa.Float(), nullable=False),
53 | sa.Column("bucket_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
54 | sa.Column("is_wildfire", sa.Boolean(), nullable=True),
55 | sa.Column("created_at", sa.DateTime(), nullable=False),
56 | sa.Column("updated_at", sa.DateTime(), nullable=False),
57 | sa.ForeignKeyConstraint(
58 | ["camera_id"],
59 | ["camera.id"],
60 | ),
61 | sa.PrimaryKeyConstraint("id"),
62 | )
63 | # ### end Alembic commands ###
64 |
65 |
66 | def downgrade() -> None:
67 | # ### commands auto generated by Alembic - please adjust! ###
68 | op.drop_table("detection")
69 | op.drop_index(op.f("ix_user_login"), table_name="user")
70 | op.drop_table("user")
71 | op.drop_table("camera")
72 | # ### end Alembic commands ###
73 |
--------------------------------------------------------------------------------
/src/migrations/versions/2024_06_17_1521-4265426f8438_create_stes_table.py:
--------------------------------------------------------------------------------
1 | """create stes table
2 |
3 | Revision ID: 4265426f8438
4 | Revises: f84a0ed81bdc
5 | Create Date: 2024-06-17 15:21:58.003045
6 |
7 | """
8 |
9 | from typing import Sequence, Union
10 |
11 | import sqlalchemy as sa
12 | import sqlmodel
13 | from alembic import op
14 |
15 | # revision identifiers, used by Alembic.
16 | revision: str = "4265426f8438"
17 | down_revision: Union[str, None] = "f84a0ed81bdc"
18 | branch_labels: Union[str, Sequence[str], None] = None
19 | depends_on: Union[str, Sequence[str], None] = None
20 |
21 |
22 | def upgrade() -> None:
23 | op.create_table(
24 | "organization",
25 | sa.Column("id", sa.Integer(), nullable=False),
26 | sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
27 | sa.PrimaryKeyConstraint("id"),
28 | sa.UniqueConstraint("name"),
29 | )
30 |
31 | # Add the 'organization_id' column to the 'camera' and 'user' tables and create foreign key constraints
32 | op.add_column("camera", sa.Column("organization_id", sa.Integer(), nullable=True))
33 | op.create_foreign_key("fk_camera_orga", "camera", "organization", ["organization_id"], ["id"])
34 | op.add_column("user", sa.Column("organization_id", sa.Integer(), nullable=True))
35 | op.create_foreign_key("fk_user_orga", "camera", "organization", ["organization_id"], ["id"])
36 |
37 |
38 | def downgrade() -> None:
39 | # Remove the foreign key constraint and the 'organization_id' column from the 'camera' table
40 | op.drop_constraint("fk_camera_orga", "camera", type_="foreignkey")
41 | op.drop_constraint("fk_user_orga", "user", type_="foreignkey")
42 | op.drop_column("camera", "organization_id")
43 | op.drop_column("user", "organization_idation_id")
44 | op.drop_table("organization")
45 |
--------------------------------------------------------------------------------
/src/tests/endpoints/test_login.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Union
2 |
3 | import pytest
4 | from httpx import AsyncClient
5 | from sqlmodel.ext.asyncio.session import AsyncSession
6 |
7 |
8 | @pytest.mark.parametrize(
9 | ("payload", "status_code", "status_detail"),
10 | [
11 | ({"username": "foo"}, 422, None),
12 | ({"username": "foo", "password": "bar"}, 401, None),
13 | ({"username": "first_login", "password": "pwd"}, 401, None),
14 | ({"username": "first_login", "password": "first_pwd"}, 200, None),
15 | ],
16 | )
17 | @pytest.mark.asyncio
18 | async def test_login_with_creds(
19 | async_client: AsyncClient,
20 | user_session: AsyncSession,
21 | payload: Dict[str, Any],
22 | status_code: int,
23 | status_detail: Union[str, None],
24 | ):
25 | response = await async_client.post("/login/creds", data=payload)
26 | assert response.status_code == status_code
27 | if isinstance(status_detail, str):
28 | assert response.json()["detail"] == status_detail
29 | if response.status_code // 100 == 2:
30 | response_json = response.json()
31 | assert response_json["token_type"] == "bearer" # noqa: S105
32 | assert isinstance(response_json["access_token"], str)
33 | assert len(response_json["access_token"]) == 171
34 |
--------------------------------------------------------------------------------
/src/tests/endpoints/test_organizations.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List, Union
2 |
3 | import pytest
4 | from httpx import AsyncClient
5 | from sqlmodel.ext.asyncio.session import AsyncSession
6 |
7 |
8 | @pytest.mark.parametrize(
9 | ("user_idx", "payload", "status_code", "status_detail"),
10 | [
11 | (
12 | None,
13 | {"name": "pyro-organization"},
14 | 401,
15 | "Not authenticated",
16 | ),
17 | (
18 | 0,
19 | {"name": "pyro-organization"},
20 | 201,
21 | None,
22 | ),
23 | (
24 | 1,
25 | {"name": "pyro-organization2"},
26 | 403,
27 | "Incompatible token scope.",
28 | ),
29 | (
30 | 2,
31 | {"name": "pyro-organization"},
32 | 403,
33 | "Incompatible token scope.",
34 | ),
35 | ],
36 | )
37 | @pytest.mark.asyncio
38 | async def test_create_organization(
39 | async_client: AsyncClient,
40 | user_idx: Union[int, None],
41 | payload: Dict[str, Any],
42 | status_code: int,
43 | status_detail: Union[str, None],
44 | ):
45 | auth = None
46 | if isinstance(user_idx, int):
47 | auth = pytest.get_token(
48 | pytest.user_table[user_idx]["id"],
49 | pytest.user_table[user_idx]["role"].split(),
50 | pytest.user_table[user_idx]["organization_id"],
51 | )
52 |
53 | response = await async_client.post("/organizations", json=payload, headers=auth)
54 | assert response.status_code == status_code, print(response.__dict__)
55 | if isinstance(status_detail, str):
56 | assert response.json()["detail"] == status_detail
57 | if response.status_code // 100 == 2:
58 | assert {k: v for k, v in response.json().items() if k not in {"id", "telegram_id"}} == payload
59 |
60 |
61 | @pytest.mark.parametrize(
62 | ("user_idx", "organization_id", "status_code", "status_detail", "expected_idx"),
63 | [
64 | (None, 1, 401, "Not authenticated", None),
65 | (0, 1, 200, None, 0),
66 | (1, 1, 403, "Incompatible token scope.", 0),
67 | (2, 1, 403, "Incompatible token scope.", 0),
68 | ],
69 | )
70 | @pytest.mark.asyncio
71 | async def test_get_organization(
72 | async_client: AsyncClient,
73 | organization_session: AsyncSession,
74 | user_idx: Union[int, None],
75 | organization_id: int,
76 | status_code: int,
77 | status_detail: Union[str, None],
78 | expected_idx: Union[int, None],
79 | ):
80 | auth = None
81 | organization_id_from_table = pytest.user_table[user_idx]["organization_id"] if user_idx is not None else None
82 | if isinstance(user_idx, int):
83 | auth = pytest.get_token(
84 | pytest.user_table[user_idx]["id"],
85 | pytest.user_table[user_idx]["role"].split(),
86 | organization_id_from_table,
87 | )
88 |
89 | response = await async_client.get(f"/organizations/{organization_id}", headers=auth)
90 | assert response.status_code == status_code, print(response.__dict__)
91 | if isinstance(status_detail, str):
92 | assert response.json()["detail"] == status_detail
93 | if response.status_code // 100 == 2:
94 | assert response.json() == pytest.organization_table[expected_idx]
95 |
96 |
97 | @pytest.mark.parametrize(
98 | ("user_idx", "status_code", "status_detail", "expected_response"),
99 | [
100 | (None, 401, "Not authenticated", None),
101 | (0, 200, None, pytest.organization_table),
102 | (1, 403, "Incompatible token scope.", None),
103 | (2, 403, "Incompatible token scope.", None),
104 | ],
105 | )
106 | @pytest.mark.asyncio
107 | async def test_fetch_organizations(
108 | async_client: AsyncClient,
109 | organization_session: AsyncSession,
110 | user_idx: Union[int, None],
111 | status_code: int,
112 | status_detail: Union[str, None],
113 | expected_response: Union[List[Dict[str, Any]], None],
114 | ):
115 | auth = None
116 | if isinstance(user_idx, int):
117 | auth = pytest.get_token(
118 | pytest.user_table[user_idx]["id"],
119 | pytest.user_table[user_idx]["role"].split(),
120 | pytest.user_table[user_idx]["organization_id"],
121 | )
122 |
123 | response = await async_client.get("/organizations", headers=auth)
124 | assert response.status_code == status_code, print(response.__dict__)
125 | if isinstance(status_detail, str):
126 | assert response.json()["detail"] == status_detail
127 | if response.status_code // 100 == 2:
128 | assert response.json() == expected_response
129 |
130 |
131 | @pytest.mark.parametrize(
132 | ("user_idx", "organization_id", "status_code", "status_detail"),
133 | [
134 | (None, 1, 401, "Not authenticated"),
135 | (0, 1, 200, None),
136 | (1, 1, 403, "Incompatible token scope."),
137 | (1, 2, 403, "Incompatible token scope."),
138 | (2, 2, 403, "Incompatible token scope."),
139 | ],
140 | )
141 | @pytest.mark.asyncio
142 | async def test_delete_organization(
143 | async_client: AsyncClient,
144 | organization_session: AsyncSession,
145 | user_idx: Union[int, None],
146 | organization_id: int,
147 | status_code: int,
148 | status_detail: Union[str, None],
149 | ):
150 | auth = None
151 | organization_id_from_table = pytest.user_table[user_idx]["organization_id"] if user_idx is not None else None
152 | if isinstance(user_idx, int):
153 | auth = pytest.get_token(
154 | pytest.user_table[user_idx]["id"],
155 | pytest.user_table[user_idx]["role"].split(),
156 | organization_id_from_table,
157 | )
158 |
159 | response = await async_client.delete(f"/organizations/{organization_id}", headers=auth)
160 | assert response.status_code == status_code, print(response.__dict__)
161 | if isinstance(status_detail, str):
162 | assert response.json()["detail"] == status_detail
163 | if response.status_code // 100 == 2:
164 | assert response.json() is None
165 |
--------------------------------------------------------------------------------
/src/tests/endpoints/test_users.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Union
2 |
3 | import pytest
4 | from httpx import AsyncClient
5 | from sqlmodel.ext.asyncio.session import AsyncSession
6 |
7 |
8 | @pytest.mark.parametrize(
9 | ("user_idx", "payload", "status_code", "status_detail"),
10 | [
11 | (
12 | None,
13 | {"login": "pyro_user", "password": "bar", "role": "user", "organization_id": 1},
14 | 401,
15 | "Not authenticated",
16 | ),
17 | (
18 | 1,
19 | {"login": "pyro_user", "password": "bar", "role": "user", "organization_id": 1},
20 | 403,
21 | "Incompatible token scope.",
22 | ),
23 | (
24 | 2,
25 | {"login": "pyro_user", "password": "bar", "role": "user", "organization_id": 2},
26 | 403,
27 | "Incompatible token scope.",
28 | ),
29 | (
30 | 0,
31 | {"login": "first_login", "password": "bar", "role": "user", "organization_id": 1},
32 | 409,
33 | "Login already taken",
34 | ),
35 | (
36 | 0,
37 | {"login": "pyro_user", "organization_id": 1},
38 | 422,
39 | None,
40 | ),
41 | ],
42 | )
43 | @pytest.mark.asyncio
44 | async def test_create_user(
45 | async_client: AsyncClient,
46 | user_session: AsyncSession,
47 | user_idx: Union[int, None],
48 | payload: Dict[str, Any],
49 | status_code: int,
50 | status_detail: Union[str, None],
51 | ):
52 | auth = None
53 | if isinstance(user_idx, int):
54 | auth = pytest.get_token(
55 | pytest.user_table[user_idx]["id"],
56 | pytest.user_table[user_idx]["role"].split(),
57 | pytest.user_table[user_idx]["organization_id"],
58 | )
59 |
60 | response = await async_client.post("/users", json=payload, headers=auth)
61 | assert response.status_code == status_code, print(response.__dict__)
62 | if isinstance(status_detail, str):
63 | assert response.json()["detail"] == status_detail
64 | if response.status_code // 100 == 2:
65 | assert {k: v for k, v in response.json().items() if k != "created_at"} == {
66 | "login": payload["login"],
67 | "hashed_password": f"hashed_{payload['password']}",
68 | "role": payload["role"],
69 | "id": max(entry["id"] for entry in pytest.user_table) + 1,
70 | }
71 |
72 |
73 | @pytest.mark.parametrize(
74 | ("user_idx", "user_id", "status_code", "status_detail", "expected_idx"),
75 | [
76 | (None, 1, 401, "Not authenticated", None),
77 | (0, 0, 422, None, None),
78 | (0, 400, 404, "Table User has no corresponding entry.", None),
79 | (1, 1, 403, "Incompatible token scope.", None),
80 | (0, 1, 200, None, 0),
81 | (0, 2, 200, None, 1),
82 | ],
83 | )
84 | @pytest.mark.asyncio
85 | async def test_get_user(
86 | async_client: AsyncClient,
87 | user_session: AsyncSession,
88 | user_idx: Union[int, None],
89 | user_id: int,
90 | status_code: int,
91 | status_detail: Union[str, None],
92 | expected_idx: Union[int, None],
93 | ):
94 | auth = None
95 | if isinstance(user_idx, int):
96 | auth = pytest.get_token(
97 | pytest.user_table[user_idx]["id"],
98 | pytest.user_table[user_idx]["role"].split(),
99 | pytest.user_table[user_idx]["organization_id"],
100 | )
101 |
102 | response = await async_client.get(f"/users/{user_id}", headers=auth)
103 | assert response.status_code == status_code, print(response.__dict__)
104 | if isinstance(status_detail, str):
105 | assert response.json()["detail"] == status_detail
106 | if response.status_code // 100 == 2:
107 | assert response.json() == pytest.user_table[expected_idx]
108 |
109 |
110 | @pytest.mark.parametrize(
111 | ("user_idx", "status_code", "status_detail"),
112 | [
113 | (None, 401, "Not authenticated"),
114 | (0, 200, None),
115 | (1, 403, "Incompatible token scope."),
116 | ],
117 | )
118 | @pytest.mark.asyncio
119 | async def test_fetch_users(
120 | async_client: AsyncClient,
121 | user_session: AsyncSession,
122 | user_idx: Union[int, None],
123 | status_code: int,
124 | status_detail: Union[str, None],
125 | ):
126 | auth = None
127 | if isinstance(user_idx, int):
128 | auth = pytest.get_token(
129 | pytest.user_table[user_idx]["id"],
130 | pytest.user_table[user_idx]["role"].split(),
131 | pytest.user_table[user_idx]["organization_id"],
132 | )
133 |
134 | response = await async_client.get("/users/", headers=auth)
135 | assert response.status_code == status_code, print(response.__dict__)
136 | if isinstance(status_detail, str):
137 | assert response.json()["detail"] == status_detail
138 | if response.status_code // 100 == 2:
139 | assert response.json() == pytest.user_table
140 |
141 |
142 | @pytest.mark.parametrize(
143 | ("user_idx", "user_id", "status_code", "status_detail"),
144 | [
145 | (None, 1, 401, "Not authenticated"),
146 | (0, 1, 200, None),
147 | (0, 2, 200, None),
148 | (1, 1, 403, "Incompatible token scope."),
149 | (1, 2, 403, "Incompatible token scope."),
150 | ],
151 | )
152 | @pytest.mark.asyncio
153 | async def test_delete_user(
154 | async_client: AsyncClient,
155 | user_session: AsyncSession,
156 | user_idx: Union[int, None],
157 | user_id: int,
158 | status_code: int,
159 | status_detail: Union[str, None],
160 | ):
161 | auth = None
162 | if isinstance(user_idx, int):
163 | auth = pytest.get_token(
164 | pytest.user_table[user_idx]["id"],
165 | pytest.user_table[user_idx]["role"].split(),
166 | pytest.user_table[user_idx]["organization_id"],
167 | )
168 |
169 | response = await async_client.delete(f"/users/{user_id}", headers=auth)
170 | assert response.status_code == status_code, print(response.__dict__)
171 | if isinstance(status_detail, str):
172 | assert response.json()["detail"] == status_detail
173 | if response.status_code // 100 == 2:
174 | assert response.json() is None
175 |
176 |
177 | @pytest.mark.parametrize(
178 | ("user_idx", "user_id", "payload", "status_code", "status_detail", "expected_idx"),
179 | [
180 | (None, 1, {"password": "HeyPyro!"}, 401, "Not authenticated", None),
181 | (0, 1, {"login": "HeyPyro!"}, 422, None, None),
182 | (0, 1, {"password": "HeyPyro!"}, 200, None, 0),
183 | (0, 2, {"password": "HeyPyro!"}, 200, None, 1),
184 | (1, 1, {"password": "HeyPyro!"}, 403, "Incompatible token scope.", None),
185 | (1, 2, {"password": "HeyPyro!"}, 403, "Incompatible token scope.", None),
186 | ],
187 | )
188 | @pytest.mark.asyncio
189 | async def test_update_user_password(
190 | async_client: AsyncClient,
191 | user_session: AsyncSession,
192 | user_idx: Union[int, None],
193 | user_id: int,
194 | payload: Dict[str, Any],
195 | status_code: int,
196 | status_detail: Union[str, None],
197 | expected_idx: Union[int, None],
198 | ):
199 | auth = None
200 | if isinstance(user_idx, int):
201 | auth = pytest.get_token(
202 | pytest.user_table[user_idx]["id"],
203 | pytest.user_table[user_idx]["role"].split(),
204 | pytest.user_table[user_idx]["organization_id"],
205 | )
206 |
207 | response = await async_client.patch(f"/users/{user_id}", json=payload, headers=auth)
208 | assert response.status_code == status_code, print(response.__dict__)
209 | if isinstance(status_detail, str):
210 | assert response.json()["detail"] == status_detail
211 | if response.status_code // 100 == 2:
212 | assert response.json() == {
213 | "id": pytest.user_table[expected_idx]["id"],
214 | "created_at": pytest.user_table[expected_idx]["created_at"],
215 | "login": pytest.user_table[expected_idx]["login"],
216 | "hashed_password": f"hashed_{payload['password']}",
217 | "role": pytest.user_table[expected_idx]["role"],
218 | "organization_id": pytest.user_table[expected_idx]["organization_id"],
219 | }
220 |
--------------------------------------------------------------------------------
/src/tests/endpoints/test_webhooks.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List, Union
2 |
3 | import pytest
4 | from httpx import AsyncClient
5 | from sqlmodel.ext.asyncio.session import AsyncSession
6 |
7 |
8 | @pytest.mark.parametrize(
9 | ("user_idx", "payload", "status_code", "status_detail"),
10 | [
11 | (
12 | None,
13 | {"url": "http://www.google.com/"},
14 | 401,
15 | "Not authenticated",
16 | ),
17 | (
18 | 0,
19 | {"url": pytest.webhook_table[0]["url"]},
20 | 409,
21 | None,
22 | ),
23 | (
24 | 0,
25 | {"url": "http://www.google.com/"},
26 | 201,
27 | None,
28 | ),
29 | (
30 | 1,
31 | {"url": "http://www.google.com/"},
32 | 403,
33 | "Incompatible token scope.",
34 | ),
35 | (
36 | 2,
37 | {"url": "http://www.google.com/"},
38 | 403,
39 | "Incompatible token scope.",
40 | ),
41 | ],
42 | )
43 | @pytest.mark.asyncio
44 | async def test_create_webhook(
45 | async_client: AsyncClient,
46 | webhook_session: AsyncSession,
47 | user_idx: Union[int, None],
48 | payload: Dict[str, Any],
49 | status_code: int,
50 | status_detail: Union[str, None],
51 | ):
52 | auth = None
53 | if isinstance(user_idx, int):
54 | auth = pytest.get_token(
55 | pytest.user_table[user_idx]["id"],
56 | pytest.user_table[user_idx]["role"].split(),
57 | pytest.user_table[user_idx]["organization_id"],
58 | )
59 |
60 | response = await async_client.post("/webhooks", json=payload, headers=auth)
61 | assert response.status_code == status_code, print(response.__dict__)
62 | if isinstance(status_detail, str):
63 | assert response.json()["detail"] == status_detail
64 | if response.status_code // 100 == 2:
65 | assert {k: v for k, v in response.json().items() if k != "id"} == payload
66 |
67 |
68 | @pytest.mark.parametrize(
69 | ("user_idx", "webhook_id", "status_code", "status_detail", "expected_idx"),
70 | [
71 | (None, 1, 401, "Not authenticated", None),
72 | (0, 1, 200, None, 0),
73 | (1, 1, 403, "Incompatible token scope.", 0),
74 | (2, 1, 403, "Incompatible token scope.", 0),
75 | ],
76 | )
77 | @pytest.mark.asyncio
78 | async def test_get_webhook(
79 | async_client: AsyncClient,
80 | webhook_session: AsyncSession,
81 | user_idx: Union[int, None],
82 | webhook_id: int,
83 | status_code: int,
84 | status_detail: Union[str, None],
85 | expected_idx: Union[int, None],
86 | ):
87 | auth = None
88 | organization_id_from_table = pytest.user_table[user_idx]["organization_id"] if user_idx is not None else None
89 | if isinstance(user_idx, int):
90 | auth = pytest.get_token(
91 | pytest.user_table[user_idx]["id"],
92 | pytest.user_table[user_idx]["role"].split(),
93 | organization_id_from_table,
94 | )
95 |
96 | response = await async_client.get(f"/webhooks/{webhook_id}", headers=auth)
97 | assert response.status_code == status_code, print(response.__dict__)
98 | if isinstance(status_detail, str):
99 | assert response.json()["detail"] == status_detail
100 | if response.status_code // 100 == 2:
101 | assert response.json() == pytest.webhook_table[expected_idx]
102 |
103 |
104 | @pytest.mark.parametrize(
105 | ("user_idx", "status_code", "status_detail", "expected_response"),
106 | [
107 | (None, 401, "Not authenticated", None),
108 | (0, 200, None, pytest.webhook_table),
109 | (1, 403, "Incompatible token scope.", None),
110 | (2, 403, "Incompatible token scope.", None),
111 | ],
112 | )
113 | @pytest.mark.asyncio
114 | async def test_fetch_webhooks(
115 | async_client: AsyncClient,
116 | webhook_session: AsyncSession,
117 | user_idx: Union[int, None],
118 | status_code: int,
119 | status_detail: Union[str, None],
120 | expected_response: Union[List[Dict[str, Any]], None],
121 | ):
122 | auth = None
123 | if isinstance(user_idx, int):
124 | auth = pytest.get_token(
125 | pytest.user_table[user_idx]["id"],
126 | pytest.user_table[user_idx]["role"].split(),
127 | pytest.user_table[user_idx]["organization_id"],
128 | )
129 |
130 | response = await async_client.get("/webhooks", headers=auth)
131 | assert response.status_code == status_code, print(response.__dict__)
132 | if isinstance(status_detail, str):
133 | assert response.json()["detail"] == status_detail
134 | if response.status_code // 100 == 2:
135 | assert response.json() == expected_response
136 |
137 |
138 | @pytest.mark.parametrize(
139 | ("user_idx", "webhook_id", "status_code", "status_detail"),
140 | [
141 | (None, 1, 401, "Not authenticated"),
142 | (0, 1, 200, None),
143 | (1, 1, 403, "Incompatible token scope."),
144 | (1, 2, 403, "Incompatible token scope."),
145 | (2, 2, 403, "Incompatible token scope."),
146 | ],
147 | )
148 | @pytest.mark.asyncio
149 | async def test_delete_webhook(
150 | async_client: AsyncClient,
151 | webhook_session: AsyncSession,
152 | user_idx: Union[int, None],
153 | webhook_id: int,
154 | status_code: int,
155 | status_detail: Union[str, None],
156 | ):
157 | auth = None
158 | organization_id_from_table = pytest.user_table[user_idx]["organization_id"] if user_idx is not None else None
159 | if isinstance(user_idx, int):
160 | auth = pytest.get_token(
161 | pytest.user_table[user_idx]["id"],
162 | pytest.user_table[user_idx]["role"].split(),
163 | organization_id_from_table,
164 | )
165 |
166 | response = await async_client.delete(f"/webhooks/{webhook_id}", headers=auth)
167 | assert response.status_code == status_code, print(response.__dict__)
168 | if isinstance(status_detail, str):
169 | assert response.json()["detail"] == status_detail
170 | if response.status_code // 100 == 2:
171 | assert response.json() is None
172 |
--------------------------------------------------------------------------------
/src/tests/services/test_storage.py:
--------------------------------------------------------------------------------
1 | import io
2 |
3 | import boto3
4 | import pytest
5 |
6 | from app.core.config import settings
7 | from app.services.storage import S3Bucket, S3Service
8 |
9 |
10 | @pytest.mark.parametrize(
11 | (
12 | "region",
13 | "endpoint_url",
14 | "access_key",
15 | "secret_key",
16 | "proxy_url",
17 | "expected_error",
18 | ),
19 | [
20 | (None, None, None, None, None, ValueError),
21 | (
22 | "us-east-1",
23 | "http://localhost:9000",
24 | settings.S3_ACCESS_KEY,
25 | settings.S3_SECRET_KEY,
26 | settings.S3_PROXY_URL,
27 | ValueError,
28 | ),
29 | (
30 | settings.S3_REGION,
31 | settings.S3_ENDPOINT_URL,
32 | None,
33 | None,
34 | settings.S3_PROXY_URL,
35 | ValueError,
36 | ),
37 | (
38 | settings.S3_REGION,
39 | settings.S3_ENDPOINT_URL,
40 | settings.S3_ACCESS_KEY,
41 | settings.S3_SECRET_KEY,
42 | settings.S3_PROXY_URL,
43 | None,
44 | ),
45 | ],
46 | )
47 | @pytest.mark.asyncio
48 | async def test_s3_service(region, endpoint_url, access_key, secret_key, proxy_url, expected_error):
49 | if expected_error is None:
50 | service = S3Service(region, endpoint_url, access_key, secret_key, proxy_url)
51 | assert isinstance(service.resolve_bucket_name(1), str)
52 | # Create random bucket
53 | bucket_name = "dummy-bucket"
54 | service.create_bucket(bucket_name)
55 | # Delete the bucket
56 | await service.delete_bucket(bucket_name)
57 | else:
58 | with pytest.raises(expected_error):
59 | S3Service(region, endpoint_url, access_key, secret_key, proxy_url)
60 |
61 |
62 | @pytest.mark.parametrize(
63 | ("bucket_name", "proxy_url", "expected_error"),
64 | [
65 | (None, None, TypeError),
66 | ("dummy-bucket1", None, ValueError),
67 | ("dummy-bucket2", settings.S3_PROXY_URL, None),
68 | ],
69 | )
70 | @pytest.mark.asyncio
71 | async def test_s3_bucket(bucket_name, proxy_url, expected_error, mock_img):
72 | session = boto3.Session(settings.S3_ACCESS_KEY, settings.S3_SECRET_KEY, region_name=settings.S3_REGION)
73 | s3 = session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL)
74 | if expected_error is None:
75 | s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": settings.S3_REGION})
76 | bucket = S3Bucket(s3, bucket_name, proxy_url)
77 | bucket_key = "logo.png"
78 | # Create file
79 | assert not bucket.check_file_existence(bucket_key)
80 | bucket.upload_file(bucket_key, io.BytesIO(mock_img))
81 | assert bucket.check_file_existence(bucket_key)
82 | assert isinstance(bucket.get_file_metadata(bucket_key), dict)
83 | assert bucket.get_public_url(bucket_key).startswith("http://")
84 | # Delete file
85 | bucket.delete_file(bucket_key)
86 | assert not bucket.check_file_existence(bucket_key)
87 | # Delete all items
88 | bucket.upload_file(bucket_key, io.BytesIO(mock_img))
89 | assert bucket.check_file_existence(bucket_key)
90 | await bucket.delete_items()
91 | assert not bucket.check_file_existence(bucket_key)
92 | # Delete the bucket
93 | s3.delete_bucket(Bucket=bucket_name)
94 | else:
95 | with pytest.raises(expected_error):
96 | S3Bucket(s3, bucket_name, proxy_url)
97 |
--------------------------------------------------------------------------------
/src/tests/services/test_telegram.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from app.core.config import settings
4 | from app.services.telegram import TelegramClient
5 |
6 |
7 | def test_telegram_client():
8 | with pytest.raises(ValueError, match="Invalid Telegram Bot token"):
9 | TelegramClient("invalid-token")
10 |
11 | client = TelegramClient(None)
12 | assert not client.is_enabled
13 |
14 | client = TelegramClient(settings.TELEGRAM_TOKEN)
15 | assert client.is_enabled == isinstance(settings.TELEGRAM_TOKEN, str)
16 |
17 | if isinstance(settings.TELEGRAM_TOKEN, str):
18 | assert not client.has_channel_access("invalid-channel-id")
19 | assert client.notify("invalid-channel-id", "test").status_code == 404
20 | else:
21 | with pytest.raises(AssertionError, match="Telegram notifications are not enabled"):
22 | client.has_channel_access("invalid-channel-id")
23 | with pytest.raises(AssertionError, match="Telegram notifications are not enabled"):
24 | client.notify("invalid-channel-id", "test")
25 |
--------------------------------------------------------------------------------
/src/tests/test_dependencies.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from fastapi import HTTPException
3 | from fastapi.security import SecurityScopes
4 |
5 | from app.api.dependencies import get_jwt
6 | from app.core.security import create_access_token
7 |
8 |
9 | @pytest.mark.parametrize(
10 | ("scopes", "token", "expires_minutes", "error_code", "expected_payload"),
11 | [
12 | (["admin"], "", None, 406, None),
13 | (["admin"], {"user_id": "123", "scopes": ["admin"]}, None, 422, None),
14 | (["admin"], {"sub": "123", "scopes": ["admin"]}, -1, 401, None),
15 | (
16 | ["admin"],
17 | {"sub": "123", "scopes": ["admin"], "organization_id": 1},
18 | None,
19 | None,
20 | {"sub": 123, "scopes": ["admin"], "organization_id": 1},
21 | ),
22 | (
23 | ["agent"],
24 | {"sub": "123", "scopes": ["agent"], "organization_id": 1},
25 | None,
26 | None,
27 | {"sub": 123, "scopes": ["agent"], "organization_id": 1},
28 | ),
29 | (
30 | ["camera"],
31 | {"sub": "123", "scopes": ["camera"], "organization_id": 1},
32 | None,
33 | None,
34 | {"sub": 123, "scopes": ["camera"], "organization_id": 1},
35 | ),
36 | (
37 | ["user"],
38 | {"sub": "123", "scopes": ["user"], "organization_id": 1},
39 | None,
40 | None,
41 | {"sub": 123, "scopes": ["user"], "organization_id": 1},
42 | ),
43 | (["admin"], {"sub": "123", "scopes": ["user"]}, None, 403, None),
44 | (["admin"], {"sub": "123", "scopes": ["agent"]}, None, 403, None),
45 | (["admin"], {"sub": "123", "scopes": ["camera"]}, None, 403, None),
46 | ],
47 | )
48 | def test_get_jwt(scopes, token, expires_minutes, error_code, expected_payload):
49 | token_ = create_access_token(token, expires_minutes) if isinstance(token, dict) else token
50 | if isinstance(error_code, int):
51 | with pytest.raises(HTTPException):
52 | get_jwt(SecurityScopes(scopes), token_)
53 | else:
54 | payload = get_jwt(SecurityScopes(scopes), token_)
55 | if expected_payload is not None:
56 | assert payload.model_dump() == expected_payload
57 |
--------------------------------------------------------------------------------
/src/tests/test_security.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 | import jwt
4 | import pytest
5 |
6 | from app.core.config import settings
7 | from app.core.security import create_access_token, hash_password, verify_password
8 |
9 |
10 | def test_hash_password():
11 | pwd1 = "my_password"
12 | hash_pwd1 = hash_password(pwd1)
13 |
14 | assert hash_pwd1 != pwd1
15 | assert hash_pwd1 != hash_password(pwd1 + "bis")
16 | # Check that it's non deterministic
17 | assert hash_pwd1 != hash_password(pwd1)
18 |
19 |
20 | def test_verify_password():
21 | pwd1 = "my_password"
22 | hash_pwd1 = hash_password(pwd1)
23 |
24 | assert verify_password(pwd1, hash_pwd1)
25 | assert not verify_password("another_try", hash_pwd1)
26 |
27 |
28 | @pytest.mark.parametrize(
29 | ("content", "expires_minutes", "expected_delta"),
30 | [
31 | ({"data": "my_data"}, 60, 60),
32 | ({"data": "my_data"}, None, settings.JWT_EXPIRE_MINUTES),
33 | ],
34 | )
35 | def test_create_access_token(content, expires_minutes, expected_delta):
36 | payload = create_access_token(content, expires_minutes)
37 | after = datetime.utcnow()
38 | assert isinstance(payload, str)
39 | decoded_data = jwt.decode(payload, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM])
40 | # Verify data integrity
41 | assert all(v == decoded_data[k] for k, v in content.items())
42 | # Check expiration
43 | assert datetime.utcfromtimestamp(decoded_data["exp"]) - timedelta(minutes=expected_delta) < after
44 |
--------------------------------------------------------------------------------