├── .github
└── workflows
│ ├── codeql-analysis.yml
│ ├── lint_test.yml
│ ├── python-publish-pypi.yml
│ └── python-publish-testpypi.yml
├── .gitignore
├── .readthedocs.yaml
├── LICENSE
├── README.md
├── docs
├── Makefile
├── _static
├── api.rst
├── conf.py
├── getting_started.rst
├── index.rst
├── make.bat
└── requirements.txt
├── examples
├── data
│ ├── outline.geojson
│ ├── outline2.geojson
│ ├── outline3.geojson
│ ├── plots_analysis
│ │ └── example_plots.geojson
│ ├── raster1.tif
│ ├── training_area.geojson
│ ├── validation_area.geojson
│ └── xy-test-image.jpeg
├── detectors_management.py
├── nongeo_imagery.ipynb
├── nongeo_results.geojson
├── plots_analysis.py
├── raster_management.py
├── training.py
├── training_multiclass_upload.py
└── upload_and_detect.py
├── scripts
└── lint.sh
├── setup.cfg
├── setup.py
├── src
└── picterra
│ ├── __init__.py
│ ├── base_client.py
│ ├── detector_platform_client.py
│ ├── nongeo.py
│ └── plots_analysis_platform_client.py
└── tests
├── __init__.py
├── test_base_client.py
├── test_nongeo.py
├── test_platform_client.py
├── test_plots_analysis_client.py
└── utils.py
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | name: "CodeQL"
8 |
9 | on:
10 | push:
11 | branches: [ master ]
12 | pull_request:
13 | # The branches below must be a subset of the branches above
14 | branches: [ master ]
15 | schedule:
16 | - cron: '26 13 * * 5'
17 |
18 | jobs:
19 | analyze:
20 | name: Analyze
21 | runs-on: ubuntu-latest
22 | permissions:
23 | actions: read
24 | contents: read
25 | security-events: write
26 |
27 | strategy:
28 | fail-fast: false
29 | matrix:
30 | language: [ 'python' ]
31 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
32 | # Learn more:
33 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
34 |
35 | steps:
36 | - name: Checkout repository
37 | uses: actions/checkout@v2
38 |
39 | # Initializes the CodeQL tools for scanning.
40 | - name: Initialize CodeQL
41 | uses: github/codeql-action/init@v3
42 | with:
43 | languages: ${{ matrix.language }}
44 | # If you wish to specify custom queries, you can do so here or in a config file.
45 | # By default, queries listed here will override any specified in a config file.
46 | # Prefix the list here with "+" to use these queries and those in the config file.
47 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
48 |
49 | - name: Perform CodeQL Analysis
50 | uses: github/codeql-action/analyze@v3
51 |
--------------------------------------------------------------------------------
/.github/workflows/lint_test.yml:
--------------------------------------------------------------------------------
1 | name: lint and tests
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 |
8 | jobs:
9 | build:
10 | strategy:
11 | matrix:
12 | os: [ubuntu-24.04]
13 | python-version: ['3.10', '3.12']
14 | include:
15 | # We still support python 3.7 for entreprise customers running on legacy
16 | # Python versions. We have to run it on ubuntu 22.04 because it is
17 | # EOL and not available on 24.04
18 | - os: ubuntu-22.04
19 | python-version: '3.7'
20 | runs-on: ${{ matrix.os }}
21 | steps:
22 | - uses: actions/checkout@v2
23 | - name: setup python ${{ matrix.python-version }}
24 | uses: actions/setup-python@v2
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | - name: install deps
28 | # We pin twine to 6.0.1 because 6.1.0 is breaking our build, see #148
29 | run: |
30 | python -m pip install --upgrade pip setuptools wheel flake8 "twine<=6.0.1"
31 | python setup.py develop
32 | pip install -e .[test,lint]
33 | - name: lint
34 | run: |
35 | scripts/lint.sh
36 | - name: tests
37 | run: |
38 | pytest -v tests
39 | # Test the build and run twine to check we are pypi compatible
40 | - name: check build
41 | run: |
42 | python setup.py bdist && twine check dist/*
43 | # Even thought we have readthedocs build docs on every MR, this is useful
44 | # because we build with -W --keep-going (see Makefile) and this will
45 | # therefore fail on warnings that could be ignored by readthedocs and
46 | # lead to half-broken docs
47 | - name: build docs
48 | # This should make the version in .readthedocs.yaml in the repository root
49 | if: matrix.python-version == '3.12'
50 | run: |
51 | cd docs
52 | python -m pip install -r requirements.txt
53 | make html
54 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish-pypi.yml:
--------------------------------------------------------------------------------
1 | # https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
2 | # https://github.com/marketplace/actions/pypi-publish
3 | name: upload to pypi
4 |
5 | on:
6 | release:
7 | types: [created]
8 |
9 | jobs:
10 | publish:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: setup python
15 | uses: actions/setup-python@v4
16 | with:
17 | python-version: '3.7'
18 | - name: install deps
19 | run: |
20 | python -m pip install --upgrade pip setuptools wheel
21 | - name: build
22 | run: |
23 | python setup.py sdist bdist_wheel
24 | - name: publish to pypi
25 | uses: pypa/gh-action-pypi-publish@release/v1
26 | with:
27 | password: ${{ secrets.pypi_password }}
28 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish-testpypi.yml:
--------------------------------------------------------------------------------
1 | # This is intended to be run manually before creating a release to test
2 | # the publish to testpypi
3 |
4 | # https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
5 | # https://github.com/marketplace/actions/pypi-publish
6 | name: upload to testpypi
7 |
8 | on: workflow_dispatch
9 |
10 | jobs:
11 | publish:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | - name: setup python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: '3.7'
19 | - name: install deps
20 | run: |
21 | python -m pip install --upgrade pip setuptools wheel
22 | - name: build
23 | run: |
24 | python setup.py sdist bdist_wheel
25 | - name: publish to test pypi
26 | uses: pypa/gh-action-pypi-publish@release/v1
27 | with:
28 | password: ${{ secrets.test_pypi_password }}
29 | repository_url: https://test.pypi.org/legacy/
30 |
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .cache
3 | .vscode
4 | .venv
5 | *.egg-info
6 | docs/_build
7 | .eggs
8 | .ipynb_checkpoints
9 | .pytest_cache
10 | __pycache__
11 | venv
12 | .vscode
13 | dist
14 | examples/result.geojson
15 | build
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.12"
12 | # You can also specify other tool versions:
13 | # nodejs: "20"
14 | # rust: "1.70"
15 | # golang: "1.20"
16 |
17 | # Build documentation in the "docs/" directory with Sphinx
18 | sphinx:
19 | configuration: docs/conf.py
20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
21 | # builder: "dirhtml"
22 | # Fail on all warnings to avoid broken references
23 | # fail_on_warning: true
24 |
25 | # Optionally build your docs in additional formats such as PDF and ePub
26 | # formats:
27 | # - pdf
28 | # - epub
29 |
30 | # Optional but recommended, declare the Python requirements required
31 | # to build your documentation
32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
33 | python:
34 | install:
35 | - requirements: docs/requirements.txt
36 | - method: pip
37 | path: .
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Picterra
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 | # Picterra Python API Client
8 |
9 | 
10 | [](https://picterra-python.readthedocs.io/en/latest/?badge=latest)
11 | [](https://pypi.org/project/picterra/)
12 |
13 | Easily integrate state-of-the-art machine learning models in your app
14 |
15 | ```python
16 | from picterra import APIClient
17 |
18 | # Replace this with the id of one of your detectors
19 | detector_id = 'd552605b-6972-4a68-8d51-91e6cb531c24'
20 |
21 | # Set the PICTERRA_API_KEY environment variable to define your API key
22 | client = APIClient()
23 | print('Uploading raster...')
24 | raster_id = client.upload_raster('data/raster1.tif', name='a nice raster')
25 | print('Upload finished, starting detector...')
26 | result_id = client.run_detector(detector_id, raster_id)
27 | client.download_result_to_feature_collection(result_id, 'result.geojson')
28 | print('Detection finished, results are in result.geojson')
29 | ```
30 |
31 |
32 |
33 | ## Installation
34 |
35 | ```
36 | pip install picterra
37 | ```
38 |
39 | See the `examples` folder for examples.
40 |
41 | ## API Reference and User Guide available on [Read the Docs](https://picterra-python.readthedocs.io/)
42 |
43 | [](https://picterra-python.readthedocs.io/)
44 |
45 |
46 | ## Development
47 |
48 | ### Setup
49 | Make sure you have `Python` and `pip` in your OS and create a virtual environment in the root folder, eg
50 |
51 | ```bash
52 | python3 -m venv .venv
53 | source .venv/bin/activate
54 | ```
55 |
56 | Running
57 | ```bash
58 | pip install --editable '.[lint,test]'
59 | ```
60 | would allow to run test and linting locally, and also avoid re-installing the library every time you change the code.
61 |
62 | ### Running tests
63 | In order to test locally, run:
64 | ```bash
65 | pytest
66 | ```
67 |
68 | ### Documentation
69 | Run
70 |
71 | ```bash
72 | cd docs
73 | make html
74 | ```
75 |
76 | to update the HTML documentation under `_build/html`.
77 | Spawn an HTTP server in that folder to see how they would look like once deployed on Read the Docs.
78 |
79 | ### Checkling linting
80 | Run
81 | ```bash
82 | scripts/lint.sh
83 | ```
84 | ####
85 |
86 | #### Sphinx docs
87 | Run
88 | ```bash
89 | cd docs
90 | python -m pip install -r requirements.txt
91 | make html
92 | ```
93 | and verify no errors are output
94 |
95 | ## Release process
96 |
97 | 1. Bump the version number in `setup.py`
98 | 2. Manually run the [publish to testpypi workflow](https://github.com/Picterra/picterra-python/actions/workflows/python-publish-testpypi.yml)
99 | 3. Check the publication result on [testpypi](https://test.pypi.org/project/picterra/)
100 | 4. Create a release through github
101 | 4.1. Make sure you create a new tag vX.Y.Z through the release UI
102 | 4.2. Click the "generate release notes" button in the UI to get release notes
103 | 5. The 'publish to pypi' workflow should automatically run
104 | 5.1. Note this will *not* work if you create the release first as a draft - you
105 | have to create it immediatly
106 | 6. Updated package should be available on [pypi](https://pypi.org/project/picterra/)
107 |
108 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | #
7 | # -W turns warnings into errors
8 | # --keep-going avoid stopping the build on first warning (but it'll
9 | # still fail if any warnings was triggered)
10 | SPHINXOPTS ?= -W --keep-going
11 | SPHINXBUILD ?= sphinx-build
12 | SOURCEDIR = .
13 | BUILDDIR = _build
14 |
15 | # Put it first so that "make" without argument is like "make help".
16 | help:
17 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
18 |
19 | .PHONY: help Makefile
20 |
21 | # Catch-all target: route all unknown targets to Sphinx using the new
22 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
23 | %: Makefile
24 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
25 |
26 | autobuild:
27 | sphinx-autobuild . _build/html --port 9999
28 |
--------------------------------------------------------------------------------
/docs/_static:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/8ee780014ed2867f7ce03ba3960f73ad6efe737a/docs/_static
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | ===
2 | API
3 | ===
4 |
5 |
6 | Detector Platform
7 | -----------------
8 |
9 | .. autoclass:: picterra.DetectorPlatformClient
10 | :members:
11 |
12 | .. autoclass:: picterra.APIClient
13 | :members:
14 |
15 | .. automodule:: picterra.nongeo
16 | :members:
17 |
18 |
19 | Plots Analysis Platform
20 | -----------------------
21 |
22 | .. autoclass:: picterra.PlotsAnalysisPlatformClient
23 | :members:
24 |
25 |
26 | Utility classes
27 | ---------------
28 |
29 | .. autoclass:: picterra.ResultsPage
30 | :members:
31 |
32 | .. autoclass:: picterra.APIError
33 | :members:
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 | # -- Project information -----------------------------------------------------
18 |
19 | project = "Picterra Python API"
20 | copyright = "2020, Picterra Team"
21 | author = "Picterra Team"
22 |
23 |
24 | # -- General configuration ---------------------------------------------------
25 |
26 | # Add any Sphinx extension module names here, as strings. They can be
27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 | # ones.
29 | extensions = [
30 | "sphinx.ext.autodoc",
31 | "sphinx.ext.viewcode",
32 | "sphinx.ext.napoleon",
33 | "sphinx_rtd_theme",
34 | ]
35 |
36 | # Add any paths that contain templates here, relative to this directory.
37 | templates_path = ["_templates"]
38 |
39 | # List of patterns, relative to source directory, that match files and
40 | # directories to ignore when looking for source files.
41 | # This pattern also affects html_static_path and html_extra_path.
42 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "venv"]
43 |
44 |
45 | # -- Options for HTML output -------------------------------------------------
46 |
47 | # The theme to use for HTML and HTML Help pages. See the documentation for
48 | # a list of builtin themes.
49 | #
50 | html_theme = "sphinx_rtd_theme"
51 |
52 | html_theme_options = {}
53 |
54 |
55 | # Add any paths that contain custom static files (such as style sheets) here,
56 | # relative to this directory. They are copied after the builtin static files,
57 | # so a file named "default.css" will overwrite the builtin "default.css".
58 | html_static_path = ["_static"]
59 |
60 | master_doc = "index"
61 |
--------------------------------------------------------------------------------
/docs/getting_started.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Getting started
3 | ===============
4 |
5 | Installation
6 | ============
7 |
8 | Install using pip
9 |
10 | ::
11 |
12 | pip install picterra
13 |
14 | Set your Picterra API key through an environment variable
15 |
16 | ::
17 |
18 | export PICTERRA_API_KEY=
19 |
20 | Listing entities
21 | ================
22 |
23 | When listing entities (eg rasters, detectors) from your account, the Picterra Server uses a *paginated*
24 | approach; this means that every `list_`-prefixed function returns a special `ResultsPage` class instance
25 | which has the following properties, similarly to a Python list:
26 | * is iterable over the elements in the page, eg with a `for`
27 | * can be applied the builtin `len` to get the number of elements in the page
28 | * returns the `n`-th element of the page simply accessing with `[n]` (0-indexed)
29 | * has a `next()` method which returns the following `ResultsPage`, if any, otherwise `None`
30 |
31 | .. literalinclude:: ../examples/detectors_management.py
32 | .. literalinclude:: ../examples/raster_management.py
33 |
34 |
35 | Upload & Detect
36 | ===============
37 |
38 | .. literalinclude:: ../examples/upload_and_detect.py
39 |
40 | Training
41 | ========
42 |
43 | .. note::
44 |
45 | **Please note the below endpoints are still in beta and thus may be subject to change**
46 |
47 | .. literalinclude:: ../examples/training.py
48 |
49 | Detections in image coordinates
50 | ===============================
51 |
52 | If you want to use Picterra with images that are not georeferenced and want to get the detector
53 | outputs in (x, y) coordinates, have a look at our `nongeo_imagery notebook `_ .
54 |
55 | More examples
56 | =============
57 |
58 | Check the `examples directory `_ of our github repo.
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Picterra Python API
2 | ===================
3 |
4 | Easily integrate state-of-the-art machine learning models in your app
5 |
6 | .. raw:: html
7 |
8 |
9 |
10 | Learn more about `Picterra `_
11 |
12 | If you are looking for our REST API documentation, `look here `_
13 |
14 | ----
15 |
16 | .. toctree::
17 | :maxdepth: 2
18 | :caption: Contents:
19 |
20 | getting_started
21 | api
22 |
23 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx==7.2.6
2 | sphinx-rtd-theme
3 | sphinx-autobuild
--------------------------------------------------------------------------------
/examples/data/outline.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | {
5 | "type": "Feature",
6 | "properties": {},
7 | "geometry": {
8 | "type": "Polygon",
9 | "coordinates": [
10 | [
11 | [
12 | 8.714802861213684,
13 | 47.74745244535212
14 | ],
15 | [
16 | 8.714770674705505,
17 | 47.74739833982634
18 | ],
19 | [
20 | 8.714942336082457,
21 | 47.747373090561716
22 | ],
23 | [
24 | 8.714987933635712,
25 | 47.74745424886868
26 | ],
27 | [
28 | 8.714802861213684,
29 | 47.74745244535212
30 | ]
31 | ]
32 | ]
33 | }
34 | }
35 | ]
36 | }
--------------------------------------------------------------------------------
/examples/data/outline2.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "outline2",
4 | "features": [
5 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714555092482669, 47.74767278684682 ], [ 8.714546040465327, 47.747638841781779 ], [ 8.714588471796624, 47.747629789764439 ], [ 8.714595260809631, 47.74767278684682 ], [ 8.714555092482669, 47.74767278684682 ] ] ] } },
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714630903127921, 47.74767052384248 ], [ 8.714627508621417, 47.747636013026359 ], [ 8.714671071454882, 47.747635447275279 ], [ 8.714673334459219, 47.74766147182514 ], [ 8.714630903127921, 47.74767052384248 ] ] ] } },
7 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714807983217202, 47.747662037576227 ], [ 8.714807983217202, 47.747634315773112 ], [ 8.714844191286575, 47.747634315773112 ], [ 8.714842494033324, 47.74766147182514 ], [ 8.714807983217202, 47.747662037576227 ] ] ] } }
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/examples/data/outline3.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "outline3",
4 | "features": [
5 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714583380036869, 47.747500798517294 ], [ 8.714557921238089, 47.747439697400225 ], [ 8.714663150939707, 47.747434039889384 ], [ 8.714664282441875, 47.74749627250862 ], [ 8.714583380036869, 47.747500798517294 ] ], [ [ 8.714588471796624, 47.747486513302427 ], [ 8.714585643041204, 47.747477461285079 ], [ 8.714594977934089, 47.74747562259406 ], [ 8.714598513878364, 47.747484250298086 ], [ 8.714588471796624, 47.747486513302427 ] ], [ [ 8.714621426797265, 47.74747406677858 ], [ 8.714615910724195, 47.747454265490639 ], [ 8.714645188342791, 47.747450022357505 ], [ 8.714650562978088, 47.747469965083219 ], [ 8.714621426797265, 47.74747406677858 ] ] ] } }
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/examples/data/plots_analysis/example_plots.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | {
5 | "type": "Feature",
6 | "properties": { "plot_id": "PLOT-1"},
7 | "geometry": {
8 | "coordinates": [
9 | [
10 | [
11 | -49.27362953726589,
12 | -2.616445544400051
13 | ],
14 | [
15 | -49.28121040069351,
16 | -2.6284004278355297
17 | ],
18 | [
19 | -49.271919906343896,
20 | -2.627524039887618
21 | ],
22 | [
23 | -49.26726341159804,
24 | -2.6196814639718013
25 | ],
26 | [
27 | -49.27362953726589,
28 | -2.616445544400051
29 | ]
30 | ]
31 | ],
32 | "type": "Polygon"
33 | }
34 | },
35 | {
36 | "type": "Feature",
37 | "properties": { "plot_id": "PLOT-2"},
38 | "geometry": {
39 | "coordinates": [
40 | [
41 | [
42 | -49.26680302670468,
43 | -2.620018411983054
44 | ],
45 | [
46 | -49.26898505564435,
47 | -2.6240857701543234
48 | ],
49 | [
50 | -49.26509339578354,
51 | -2.625613834998731
52 | ],
53 | [
54 | -49.26291136684472,
55 | -2.6214565953891764
56 | ],
57 | [
58 | -49.26680302670468,
59 | -2.620018411983054
60 | ]
61 | ]
62 | ],
63 | "type": "Polygon"
64 | }
65 | }
66 | ]
67 | }
--------------------------------------------------------------------------------
/examples/data/raster1.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/8ee780014ed2867f7ce03ba3960f73ad6efe737a/examples/data/raster1.tif
--------------------------------------------------------------------------------
/examples/data/training_area.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | { "type": "Feature", "properties": { "type": null }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.71477033469016, 47.747693064169439 ], [ 8.714753220903143, 47.747369138417419 ], [ 8.715131267184505, 47.747360808160593 ], [ 8.715123611819278, 47.747686051231959 ], [ 8.71477033469016, 47.747693064169439 ] ] ] } }
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/examples/data/validation_area.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | { "type": "Feature", "properties": { "type": null }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.71452123764978, 47.747706421540165 ], [ 8.714504123862763, 47.747382495788145 ], [ 8.714751102777912, 47.747382686491655 ], [ 8.714751278242293, 47.7477060728949 ], [ 8.71452123764978, 47.747706421540165 ] ] ] } }
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/examples/data/xy-test-image.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/8ee780014ed2867f7ce03ba3960f73ad6efe737a/examples/data/xy-test-image.jpeg
--------------------------------------------------------------------------------
/examples/detectors_management.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from picterra import APIClient
4 |
5 | # Set the PICTERRA_API_KEY environment variable to define your API key
6 | client = APIClient()
7 |
8 | # Create a new detector (its type is 'count' by default)
9 | detector_id = client.create_detector("My first detector")
10 |
11 | # Edit the above detector
12 | client.edit_detector(detector_id, "Renamed detector", "segmentation", "bbox", 1000)
13 |
14 | # List existing detectors
15 | detectors_page_1 = client.list_detectors()
16 | print("Page has " + str(len(detectors_page_1)) + " elements")
17 | d = detectors_page_1[0]
18 | print(
19 | "detector id=%s, name=%s, detection_type=%s, output_type=%s, training_steps=%d"
20 | % (
21 | d["id"],
22 | d["name"],
23 | d["configuration"]["detection_type"],
24 | d["configuration"]["output_type"],
25 | d["configuration"]["training_steps"],
26 | )
27 | )
28 |
--------------------------------------------------------------------------------
/examples/nongeo_results.geojson:
--------------------------------------------------------------------------------
1 | { "type": "MultiPolygon", "coordinates": [ [ [ [ 0.000067822803951, -0.00006782280396 ], [ 0.00002829693145, -0.000066026173389 ], [ 0.000010330625767, -0.00006243291226 ], [ 0.000008533995199, -0.000057941335827 ], [ 0.000000449157642, -0.000061534596968 ], [ 0.000000449157642, -0.000050754813557 ], [ 0.000000449157642, -0.000053449759419 ], [ 0.000001347472926, -0.000053449759419 ], [ 0.00000224578821, -0.000052551444127 ], [ 0.000006737364631, -0.000056144705256 ], [ 0.000007635679915, -0.000045364921857 ], [ 0.000014822202188, -0.000040873345424 ], [ 0.000014822202188, -0.000034585138445 ], [ 0.000031890192586, -0.000027398616162 ], [ 0.000076805956792, -0.000025601985604 ], [ 0.000112738568157, -0.000030093562025 ], [ 0.00013519645026, -0.000029195246746 ], [ 0.000141484657249, -0.000039076714866 ], [ 0.000156756017079, -0.000031890192596 ], [ 0.000155857701795, -0.000025601985604 ], [ 0.000160349278215, -0.000021110409184 ], [ 0.000194485259012, -0.000016618832763 ], [ 0.000230417870377, -0.000015720517484 ], [ 0.00023760439265, -0.000018415463334 ], [ 0.000234909446797, -0.000022907039742 ], [ 0.000236706077365, -0.000025601985604 ], [ 0.000251079121911, -0.000030991877304 ], [ 0.000266350481741, -0.000029195246746 ], [ 0.000276231949867, -0.000033686823154 ], [ 0.000294198255549, -0.000036381769003 ], [ 0.00028791004856, -0.000050754813557 ], [ 0.000278028580435, -0.000053449759419 ], [ 0.000240299338502, -0.000055246389977 ], [ 0.000225027978672, -0.000048958182986 ], [ 0.00022233303282, -0.000045364921857 ], [ 0.000216943141115, -0.000045364921857 ], [ 0.00019179031316, -0.000053449759419 ], [ 0.000172027376909, -0.000054348074685 ], [ 0.00016573916992, -0.000049856498265 ], [ 0.000160349278215, -0.000050754813557 ], [ 0.000149569494806, -0.000055246389977 ], [ 0.000145077918385, -0.000061534596968 ], [ 0.000137891396112, -0.000063331227539 ], [ 0.000067822803951, -0.00006782280396 ], [ 0.000067822803951, -0.00006782280396 ] ] ], [ [ [ 0.000842170578862, -0.000046263237136 ], [ 0.000790966607667, -0.000044466606565 ], [ 0.000769407040848, -0.000039076714866 ], [ 0.000733474429484, -0.000039975030145 ], [ 0.000714609808517, -0.000038178399574 ], [ 0.000711016547381, -0.000034585138445 ], [ 0.000714609808517, -0.000025601985604 ], [ 0.000727186222495, -0.000022008724463 ], [ 0.000749644104598, -0.000021110409184 ], [ 0.000768508725564, -0.000026500300896 ], [ 0.000803543021645, -0.000027398616162 ], [ 0.000826000903748, -0.000019313778613 ], [ 0.000862831830397, -0.000021110409184 ], [ 0.000867323406817, -0.000030093562025 ], [ 0.000884391397216, -0.000033686823154 ], [ 0.000890679604205, -0.000033686823154 ], [ 0.000898764441762, -0.000028296931454 ], [ 0.000929307161422, -0.000030093562025 ], [ 0.000929307161422, -0.000036381769003 ], [ 0.000904154333466, -0.000036381769003 ], [ 0.000898764441762, -0.000041771660716 ], [ 0.000879001505511, -0.000040873345424 ], [ 0.000842170578862, -0.000046263237136 ], [ 0.000842170578862, -0.000046263237136 ] ] ], [ [ [ 0.001035308364948, -0.000049856498265 ], [ 0.001020037005118, -0.000045364921857 ], [ 0.001004765645288, -0.000049856498265 ], [ 0.000963443142218, -0.000046263237136 ], [ 0.000949070097672, -0.000039076714866 ], [ 0.000947273467104, -0.000030991877304 ], [ 0.001018240374549, -0.000027398616162 ], [ 0.001037104995516, -0.000029195246746 ], [ 0.001075732552733, -0.000022008724463 ], [ 0.001106275272393, -0.000021110409184 ], [ 0.001143106199042, -0.000023805355034 ], [ 0.001177242179839, -0.000033686823154 ], [ 0.001174547233986, -0.000042669975995 ], [ 0.001161970820009, -0.000046263237136 ], [ 0.001144004514326, -0.000044466606565 ], [ 0.001035308364948, -0.000049856498265 ], [ 0.001035308364948, -0.000049856498265 ] ] ], [ [ [ 0.00120598826893, -0.000063331227539 ], [ 0.001188021963248, -0.00006243291226 ], [ 0.001178140495123, -0.000056144705256 ], [ 0.001179038810407, -0.000030991877304 ], [ 0.001184428702112, -0.000034585138445 ], [ 0.00120598826893, -0.000030991877304 ], [ 0.001212276475919, -0.000032788507875 ], [ 0.001215869737056, -0.000037280084295 ], [ 0.001223954574613, -0.000038178399574 ], [ 0.001227547835749, -0.000034585138445 ], [ 0.001254497294273, -0.000030093562025 ], [ 0.001311989472457, -0.000029195246746 ], [ 0.001331752408707, -0.000033686823154 ], [ 0.001341633876833, -0.000040873345424 ], [ 0.001339837246264, -0.000045364921857 ], [ 0.001325464201718, -0.000051653128836 ], [ 0.001276056861092, -0.000050754813557 ], [ 0.001253598978989, -0.000054348074685 ], [ 0.001223056259329, -0.000054348074685 ], [ 0.00120598826893, -0.000063331227539 ], [ 0.00120598826893, -0.000063331227539 ] ] ], [ [ [ 0.000534048436409, -0.0000821958485 ], [ 0.000501709086181, -0.00007950090265 ], [ 0.000464878159532, -0.0000821958485 ], [ 0.000437030385724, -0.000076805956801 ], [ 0.000410979242485, -0.000075907641509 ], [ 0.000382233153393, -0.000071416065088 ], [ 0.000356182010153, -0.000059737966398 ], [ 0.000334622443335, -0.000061534596968 ], [ 0.00033372412805, -0.000055246389977 ], [ 0.000336419073903, -0.000049856498265 ], [ 0.000368758424131, -0.000048958182986 ], [ 0.000393012936802, -0.000044466606565 ], [ 0.000393012936802, -0.000048059867707 ], [ 0.000413674188337, -0.000056144705256 ], [ 0.000441521962145, -0.000058839651106 ], [ 0.000473861312373, -0.000057043020535 ], [ 0.000483742780498, -0.00006243291226 ], [ 0.00051248886959, -0.000064229542818 ], [ 0.000518777076579, -0.000066924488668 ], [ 0.000522370337716, -0.000072314380367 ], [ 0.000536743382261, -0.000076805956801 ], [ 0.000534048436409, -0.0000821958485 ], [ 0.000534048436409, -0.0000821958485 ] ] ], [ [ [ 0.000942781890683, -0.000111840252873 ], [ 0.000879001505511, -0.000105552045882 ], [ 0.000867323406817, -0.000097467208332 ], [ 0.000863730145681, -0.00008938237077 ], [ 0.000858340253976, -0.0000875857402 ], [ 0.000817017750907, -0.000088484055491 ], [ 0.000809831228634, -0.000086687424921 ], [ 0.000805339652213, -0.000081297533221 ], [ 0.000790966607667, -0.000083094163779 ], [ 0.000799051445224, -0.000078602587359 ], [ 0.000807136282781, -0.000068721119239 ], [ 0.000807136282781, -0.000064229542818 ], [ 0.000822407642611, -0.000061534596968 ], [ 0.000827797534316, -0.00006512785811 ], [ 0.000853848677556, -0.000066924488668 ], [ 0.000860136884544, -0.000064229542818 ], [ 0.000876306559659, -0.00006961943453 ], [ 0.000887984658352, -0.00006961943453 ], [ 0.000896069495909, -0.000073212695659 ], [ 0.000907747594603, -0.000088484055491 ], [ 0.000928408846138, -0.000093873947204 ], [ 0.000950866728241, -0.00009477226247 ], [ 0.00099218923131, -0.00008938237077 ], [ 0.000998477438299, -0.000091179001341 ], [ 0.000997579123015, -0.000101958784753 ], [ 0.000994884177162, -0.000103755415324 ], [ 0.000942781890683, -0.000111840252873 ], [ 0.000942781890683, -0.000111840252873 ] ] ], [ [ [ 0.001055071301198, -0.000128009927984 ], [ 0.001050579724778, -0.000128009927984 ], [ 0.001046986463641, -0.000123518351564 ], [ 0.001045189833073, -0.000110043622315 ], [ 0.001073037606881, -0.000086687424921 ], [ 0.001113461794666, -0.00007770427208 ], [ 0.001143106199042, -0.00007950090265 ], [ 0.001148496090747, -0.000081297533221 ], [ 0.001151191036599, -0.000085789109642 ], [ 0.001150292721315, -0.000092077316633 ], [ 0.001139512937906, -0.000103755415324 ], [ 0.00112513989336, -0.000111840252873 ], [ 0.001091003912563, -0.000123518351564 ], [ 0.001078427498585, -0.000122620036285 ], [ 0.001055071301198, -0.000128009927984 ], [ 0.001055071301198, -0.000128009927984 ] ] ], [ [ [ 0.001325464201718, -0.000152264440657 ], [ 0.001320972625298, -0.000151366125378 ], [ 0.001313786103025, -0.000139688026688 ], [ 0.001315582733593, -0.000126213297413 ], [ 0.001311989472457, -0.000110043622315 ], [ 0.001314684418309, -0.000100162154182 ], [ 0.001325464201718, -0.000091179001341 ], [ 0.00133444735456, -0.000091179001341 ], [ 0.001355108606094, -0.000097467208332 ], [ 0.001363193443651, -0.000110941937594 ], [ 0.001363193443651, -0.000129806558555 ], [ 0.001358701867231, -0.000137891396117 ], [ 0.001351515344958, -0.000143281287816 ], [ 0.001325464201718, -0.000152264440657 ], [ 0.001325464201718, -0.000152264440657 ] ] ], [ [ [ 0.000056144705257, -0.000151366125378 ], [ 0.000034585138439, -0.000151366125378 ], [ 0.000023805355029, -0.000146874548958 ], [ 0.000027398616166, -0.000136094765546 ], [ 0.000037280084291, -0.000130704873847 ], [ 0.000066026173383, -0.000123518351564 ], [ 0.000139688026681, -0.000114535198735 ], [ 0.000167535800488, -0.000104653730603 ], [ 0.000199875150717, -0.000098365523611 ], [ 0.000229519555093, -0.000098365523611 ], [ 0.000253774067764, -0.000101958784753 ], [ 0.000336419073903, -0.000101060469474 ], [ 0.000387623045098, -0.000103755415324 ], [ 0.000517878761295, -0.000101060469474 ], [ 0.000543929904534, -0.00009477226247 ], [ 0.000574472624194, -0.00009477226247 ], [ 0.000595133875729, -0.000101958784753 ], [ 0.000644541216356, -0.000106450361161 ], [ 0.000694846872266, -0.000115433514014 ], [ 0.000700236763971, -0.000119925090435 ], [ 0.000695745187551, -0.000129806558555 ], [ 0.000681372143005, -0.000135196450267 ], [ 0.000618490073116, -0.000127111612705 ], [ 0.000570879363058, -0.000125314982134 ], [ 0.000517878761295, -0.000128908243276 ], [ 0.000450505114986, -0.000125314982134 ], [ 0.000394809567371, -0.000131603189126 ], [ 0.000363368532426, -0.000127111612705 ], [ 0.000335520758619, -0.000127111612705 ], [ 0.00027263868873, -0.000136993080825 ], [ 0.000225027978672, -0.000148671179529 ], [ 0.000117230144578, -0.000146874548958 ], [ 0.000064229542815, -0.000148671179529 ], [ 0.000056144705257, -0.000151366125378 ], [ 0.000056144705257, -0.000151366125378 ] ] ], [ [ [ 0.001206886584215, -0.000181010529764 ], [ 0.001199700061942, -0.000179213899193 ], [ 0.001195208485521, -0.000173824007481 ], [ 0.001199700061942, -0.000163942539348 ], [ 0.001197903431373, -0.000160349278207 ], [ 0.001187123647964, -0.000154061071228 ], [ 0.001178140495123, -0.000154061071228 ], [ 0.001178140495123, -0.00014777286425 ], [ 0.001172750603418, -0.000151366125378 ], [ 0.001161072504724, -0.000150467810087 ], [ 0.001158377558872, -0.000134298134988 ], [ 0.001165564081145, -0.000129806558555 ], [ 0.001178140495123, -0.000130704873847 ], [ 0.001179038810407, -0.000127111612705 ], [ 0.001188021963248, -0.000127111612705 ], [ 0.001217666367624, -0.000115433514014 ], [ 0.001244615826148, -0.000116331829293 ], [ 0.001248209087284, -0.000126213297413 ], [ 0.001247310772, -0.000136993080825 ], [ 0.001257192240125, -0.000145077918387 ], [ 0.001251802348421, -0.000152264440657 ], [ 0.001254497294273, -0.000157654332357 ], [ 0.001206886584215, -0.000181010529764 ], [ 0.001206886584215, -0.000181010529764 ] ] ], [ [ [ 0.000787373346531, -0.000337317389189 ], [ 0.000741559267041, -0.000331029182198 ], [ 0.000731677798915, -0.000327435921069 ], [ 0.000698440133403, -0.000324740975207 ], [ 0.000628371541242, -0.000325639290499 ], [ 0.000583455777036, -0.000329232551628 ], [ 0.000556506318512, -0.000317554452949 ], [ 0.000549319796239, -0.000309469615388 ], [ 0.000555608003228, -0.000304978038967 ], [ 0.000574472624194, -0.000299588147255 ], [ 0.000606811974423, -0.000300486462534 ], [ 0.000647236162208, -0.000295096570834 ], [ 0.000661609206754, -0.000295994886113 ], [ 0.000679575512436, -0.000289706679122 ], [ 0.000704728340392, -0.000302283093105 ], [ 0.000734372744768, -0.000299588147255 ], [ 0.000741559267041, -0.000287910048564 ], [ 0.000773898617269, -0.000279825211015 ], [ 0.00077839019369, -0.000269943742882 ], [ 0.00077839019369, -0.0002519774372 ], [ 0.000790068292383, -0.000242994284359 ], [ 0.00081342448977, -0.000238502707939 ], [ 0.0008286958496, -0.000227722924527 ], [ 0.000848458785851, -0.000228621239819 ], [ 0.000890679604205, -0.000222333032827 ], [ 0.000916730747444, -0.000207061672995 ], [ 0.000976019556196, -0.000195383574304 ], [ 0.001087410651427, -0.000194485259013 ], [ 0.001111665164098, -0.000183705475601 ], [ 0.001131428100349, -0.000182807160322 ], [ 0.001147597775463, -0.000176518953331 ], [ 0.001153885982452, -0.000181010529764 ], [ 0.001164665765861, -0.000182807160322 ], [ 0.001169157342282, -0.000189095367313 ], [ 0.001170055657566, -0.000193586943734 ], [ 0.001165564081145, -0.000200773466004 ], [ 0.001149394406031, -0.000212451564695 ], [ 0.001101783695973, -0.000233112816226 ], [ 0.001088308966711, -0.000234011131505 ], [ 0.001082919075006, -0.000224129663398 ], [ 0.001067647715176, -0.000222333032827 ], [ 0.001000274068867, -0.000229519555098 ], [ 0.000974222925628, -0.000239401023218 ], [ 0.000990392600742, -0.0002519774372 ], [ 0.000975121240912, -0.000256469013621 ], [ 0.00093559536841, -0.000258265644179 ], [ 0.000886188027784, -0.000248384176059 ], [ 0.000870916667954, -0.000248384176059 ], [ 0.000858340253976, -0.0002519774372 ], [ 0.000849357101135, -0.000260062274762 ], [ 0.000847560470567, -0.00026904542759 ], [ 0.000853848677556, -0.000278926895723 ], [ 0.000862831830397, -0.000282520156852 ], [ 0.000865526776249, -0.000297791516684 ], [ 0.000863730145681, -0.000310367930667 ], [ 0.000851153731703, -0.000318452768228 ], [ 0.000837679002441, -0.000319351083508 ], [ 0.000829594164884, -0.000330130866919 ], [ 0.000787373346531, -0.000337317389189 ], [ 0.000787373346531, -0.000337317389189 ] ] ], [ [ [ 0.000055246389973, -0.000329232551628 ], [ 0.000047161552416, -0.000322046029357 ], [ 0.000047161552416, -0.000316656137645 ], [ 0.00005883965111, -0.000297791516684 ], [ 0.000083094163781, -0.000287011733272 ], [ 0.000101060469463, -0.000284316787435 ], [ 0.000161247593499, -0.000265452166462 ], [ 0.00018101052975, -0.000262757220612 ], [ 0.00019628188958, -0.000256469013621 ], [ 0.000204366727137, -0.000245689230209 ], [ 0.000214248195263, -0.000242095969067 ], [ 0.000220536402251, -0.000245689230209 ], [ 0.000235807762081, -0.000246587545488 ], [ 0.000247485860775, -0.000242095969067 ], [ 0.000279825211003, -0.000237604392647 ], [ 0.000376843261688, -0.000229519555098 ], [ 0.000397504513223, -0.000224129663398 ], [ 0.000461284898395, -0.000216943141115 ], [ 0.000546624850387, -0.000215146510557 ], [ 0.000596032191013, -0.000205265042424 ], [ 0.000629269856526, -0.000205265042424 ], [ 0.000688558665278, -0.000197180204862 ], [ 0.00074784747403, -0.000198078520141 ], [ 0.000754135681018, -0.000194485259013 ], [ 0.000754135681018, -0.000189095367313 ], [ 0.000764915464428, -0.000188197052034 ], [ 0.000773898617269, -0.000181908845043 ], [ 0.000799949760508, -0.000198976835433 ], [ 0.000826000903748, -0.000203468411854 ], [ 0.0008286958496, -0.000209756618845 ], [ 0.000827797534316, -0.000216943141115 ], [ 0.000762220518575, -0.000218739771686 ], [ 0.000754135681018, -0.000221434717536 ], [ 0.000725389591927, -0.000218739771686 ], [ 0.000674185620732, -0.000222333032827 ], [ 0.000650829423345, -0.000220536402257 ], [ 0.000642744585788, -0.000227722924527 ], [ 0.000689456980562, -0.000227722924527 ], [ 0.000769407040848, -0.000234011131505 ], [ 0.000773898617269, -0.000239401023218 ], [ 0.000773000301985, -0.000248384176059 ], [ 0.000775695247837, -0.000250180806629 ], [ 0.000769407040848, -0.00025467238305 ], [ 0.000741559267041, -0.000258265644179 ], [ 0.000728084537779, -0.000253774067771 ], [ 0.000716406439085, -0.000253774067771 ], [ 0.000703830025108, -0.0002573673289 ], [ 0.000703830025108, -0.000266350481741 ], [ 0.000722694646074, -0.000275333634582 ], [ 0.000727186222495, -0.000282520156852 ], [ 0.000727186222495, -0.000287011733272 ], [ 0.000719101384938, -0.000289706679122 ], [ 0.000680473827721, -0.000287910048564 ], [ 0.000609506920275, -0.000275333634582 ], [ 0.000526861914136, -0.000286113417993 ], [ 0.000519675391863, -0.000278028580431 ], [ 0.000511590554306, -0.000278028580431 ], [ 0.000500810770897, -0.000285215102714 ], [ 0.000478352888794, -0.000287011733272 ], [ 0.000459488267827, -0.000302283093105 ], [ 0.000437030385724, -0.000302283093105 ], [ 0.000401996089643, -0.000293299940264 ], [ 0.000367860108847, -0.000291503309693 ], [ 0.000348097172596, -0.000295096570834 ], [ 0.000294198255549, -0.000296893201405 ], [ 0.00028341847214, -0.000303181408384 ], [ 0.00028341847214, -0.000306774669525 ], [ 0.000289706679129, -0.000308571300096 ], [ 0.000290604994413, -0.000311266245946 ], [ 0.000282520156856, -0.000313062876516 ], [ 0.000278926895719, -0.000307672984804 ], [ 0.000270842058162, -0.000305876354246 ], [ 0.00026814711231, -0.000301384777813 ], [ 0.000260960590037, -0.000298689831976 ], [ 0.000215146510547, -0.000299588147255 ], [ 0.000205265042421, -0.000295096570834 ], [ 0.000183705475602, -0.000293299940264 ], [ 0.000177417268614, -0.000287011733272 ], [ 0.000163942539352, -0.000286113417993 ], [ 0.000143281287817, -0.000296893201405 ], [ 0.000118128459862, -0.000318452768228 ], [ 0.000083094163781, -0.000326537605778 ], [ 0.000055246389973, -0.000329232551628 ], [ 0.000055246389973, -0.000329232551628 ] ] ], [ [ [ 0.001226649520465, -0.000324740975207 ], [ 0.001214073106488, -0.000324740975207 ], [ 0.001209581530067, -0.000319351083508 ], [ 0.001178140495123, -0.000305876354246 ], [ 0.001178140495123, -0.000303181408384 ], [ 0.001153885982452, -0.000295096570834 ], [ 0.001153885982452, -0.000282520156852 ], [ 0.001169157342282, -0.000271740373453 ], [ 0.001180835440975, -0.000269943742882 ], [ 0.001200598377226, -0.000260960590028 ], [ 0.001227547835749, -0.000260062274762 ], [ 0.001229344466318, -0.000263655535891 ], [ 0.001225751205181, -0.000272638688732 ], [ 0.001215869737056, -0.000277130265152 ], [ 0.001206886584215, -0.000286113417993 ], [ 0.001211378160635, -0.000292401624985 ], [ 0.001235632673306, -0.000308571300096 ], [ 0.001236530988591, -0.000317554452949 ], [ 0.001226649520465, -0.000324740975207 ], [ 0.001226649520465, -0.000324740975207 ] ] ], [ [ [ 0.001009257221708, -0.000303181408384 ], [ 0.000975121240912, -0.000298689831976 ], [ 0.000925713900285, -0.000300486462534 ], [ 0.000912239171023, -0.000287011733272 ], [ 0.000912239171023, -0.000280723526281 ], [ 0.00091583243216, -0.000277130265152 ], [ 0.000927510530853, -0.000276231949861 ], [ 0.001005663960572, -0.000286113417993 ], [ 0.001046986463641, -0.000286113417993 ], [ 0.001051478040062, -0.000288808363843 ], [ 0.00104878309421, -0.000295096570834 ], [ 0.001009257221708, -0.000303181408384 ], [ 0.001009257221708, -0.000303181408384 ] ] ], [ [ [ 0.000869120037386, -0.000357080325442 ], [ 0.000850255416419, -0.000354385379592 ], [ 0.000843068894146, -0.000335520758619 ], [ 0.000851153731703, -0.000328334236349 ], [ 0.000852052046987, -0.000318452768228 ], [ 0.000865526776249, -0.000309469615388 ], [ 0.000880798136079, -0.000304978038967 ], [ 0.000916730747444, -0.000307672984804 ], [ 0.000968833033923, -0.000316656137645 ], [ 0.001029020157959, -0.000314859507087 ], [ 0.001062257823471, -0.000307672984804 ], [ 0.001091902227847, -0.000306774669525 ], [ 0.001120648316939, -0.000300486462534 ], [ 0.001131428100349, -0.000301384777813 ], [ 0.001135919676769, -0.000306774669525 ], [ 0.001134123046201, -0.000314859507087 ], [ 0.001126936523928, -0.000319351083508 ], [ 0.001085614020858, -0.000331927497477 ], [ 0.001055969616482, -0.000337317389189 ], [ 0.001037104995516, -0.000349893803172 ], [ 0.000985901024321, -0.000345402226751 ], [ 0.000947273467104, -0.00034899548788 ], [ 0.000902357702898, -0.00034899548788 ], [ 0.000879899820795, -0.000352588749022 ], [ 0.000869120037386, -0.000357080325442 ], [ 0.000869120037386, -0.000357080325442 ] ] ], [ [ [ 0.001212276475919, -0.000401996089647 ], [ 0.001191615224385, -0.000401097774368 ], [ 0.001178140495123, -0.000395707882643 ], [ 0.001178140495123, -0.000393012936793 ], [ 0.001130529785064, -0.000394809567377 ], [ 0.001119750001655, -0.000386724729815 ], [ 0.001118851686371, -0.000381334838103 ], [ 0.001128733154496, -0.000377741576974 ], [ 0.001165564081145, -0.000374148315845 ], [ 0.001178140495123, -0.000375944946403 ], [ 0.001179038810407, -0.000372351685274 ], [ 0.001202395007794, -0.000371453369983 ], [ 0.001214971421772, -0.000364266847712 ], [ 0.001223954574613, -0.00035618201015 ], [ 0.001230242781602, -0.000345402226751 ], [ 0.001238327619159, -0.000340012335039 ], [ 0.001270666969387, -0.000336419073898 ], [ 0.001282345068081, -0.00034180896561 ], [ 0.00129312485149, -0.000342707280902 ], [ 0.001314684418309, -0.00033462244334 ], [ 0.001333549039275, -0.000321147714066 ], [ 0.001364091758935, -0.000314859507087 ], [ 0.001363193443651, -0.00033462244334 ], [ 0.001314684418309, -0.000355283694859 ], [ 0.001301209689047, -0.000370555054704 ], [ 0.001224852889897, -0.000394809567377 ], [ 0.001212276475919, -0.000401996089647 ], [ 0.001212276475919, -0.000401996089647 ] ] ], [ [ [ 0.000678677197152, -0.000397504513227 ], [ 0.00064543953164, -0.000393012936793 ], [ 0.000640049639935, -0.000388521360373 ], [ 0.000650829423345, -0.000379538207545 ], [ 0.000718203069654, -0.000375944946403 ], [ 0.000738864321188, -0.000369656739412 ], [ 0.000749644104598, -0.000371453369983 ], [ 0.000774796932553, -0.000365165162991 ], [ 0.00081342448977, -0.000363368532433 ], [ 0.00085474699284, -0.00035618201015 ], [ 0.000868221722102, -0.000357080325442 ], [ 0.000869120037386, -0.000359775271292 ], [ 0.000867323406817, -0.000371453369983 ], [ 0.000858340253976, -0.000375046631124 ], [ 0.000832289110737, -0.000375944946403 ], [ 0.000748745789314, -0.000392114621514 ], [ 0.000705626655676, -0.000393012936793 ], [ 0.000678677197152, -0.000397504513227 ], [ 0.000678677197152, -0.000397504513227 ] ] ], [ [ [ 0.000569981047774, -0.000483742780495 ], [ 0.000533150121125, -0.000482844465216 ], [ 0.000456793321975, -0.000464878159521 ], [ 0.000442420277429, -0.000457691637251 ], [ 0.000442420277429, -0.000446911853852 ], [ 0.000448708484418, -0.000443318592711 ], [ 0.00047745457351, -0.000440623646861 ], [ 0.000542133273966, -0.000443318592711 ], [ 0.000562794525501, -0.00043613207044 ], [ 0.00058884566874, -0.000439725331582 ], [ 0.000640049639935, -0.000440623646861 ], [ 0.000695745187551, -0.000435233755161 ], [ 0.000711914862665, -0.000428047232878 ], [ 0.000734372744768, -0.000422657341179 ], [ 0.000850255416419, -0.000400199459076 ], [ 0.000864628460965, -0.000391216306235 ], [ 0.000888882973636, -0.000383131468673 ], [ 0.000899662757046, -0.000375046631124 ], [ 0.000947273467104, -0.000364266847712 ], [ 0.000973324610344, -0.000364266847712 ], [ 0.000985901024321, -0.000376843261682 ], [ 0.001019138689834, -0.000378639892253 ], [ 0.001028121842675, -0.000384928099244 ], [ 0.001023630266254, -0.000395707882643 ], [ 0.001006562275856, -0.000408284296638 ], [ 0.000940985260115, -0.000413674188338 ], [ 0.000903256018182, -0.000410979242488 ], [ 0.000843068894146, -0.000419064080037 ], [ 0.000787373346531, -0.000433437124591 ], [ 0.000756830626871, -0.00045140343026 ], [ 0.000724491276642, -0.000454996691401 ], [ 0.000709219916812, -0.000459488267822 ], [ 0.000684965404141, -0.000460386583114 ], [ 0.000680473827721, -0.000462183213672 ], [ 0.000671490674879, -0.000473861312375 ], [ 0.000664304152606, -0.000476556258225 ], [ 0.000649032792776, -0.000476556258225 ], [ 0.000632863117662, -0.000463979844255 ], [ 0.000601422082718, -0.000463979844255 ], [ 0.000587947353456, -0.000468471420663 ], [ 0.000587049038172, -0.000481047834645 ], [ 0.000569981047774, -0.000483742780495 ], [ 0.000569981047774, -0.000483742780495 ] ] ], [ [ [ 0.00127785349166, -0.000429843863449 ], [ 0.001263480447114, -0.000429843863449 ], [ 0.001259887185978, -0.000427148917599 ], [ 0.001267073708251, -0.000418165764758 ], [ 0.001285040013933, -0.000410979242488 ], [ 0.001286836644501, -0.000407385981347 ], [ 0.001295819797342, -0.000404691035484 ], [ 0.001298514743195, -0.000401097774368 ], [ 0.001362295128367, -0.000401097774368 ], [ 0.00136499007422, -0.000405589350776 ], [ 0.001364091758935, -0.0004217590259 ], [ 0.001328159147571, -0.0004217590259 ], [ 0.001287734959785, -0.00042625060232 ], [ 0.00127785349166, -0.000429843863449 ], [ 0.00127785349166, -0.000429843863449 ] ] ], [ [ [ 0.001127834839212, -0.000479251204075 ], [ 0.001106275272393, -0.000478352888796 ], [ 0.001082919075006, -0.000457691637251 ], [ 0.001076630868017, -0.000442420277432 ], [ 0.001089207281995, -0.000432538809312 ], [ 0.001102682011257, -0.00043164049402 ], [ 0.001105376957109, -0.0004217590259 ], [ 0.001101783695973, -0.000417267449479 ], [ 0.001104478641825, -0.000414572503617 ], [ 0.001124241578076, -0.000407385981347 ], [ 0.001137716307337, -0.000407385981347 ], [ 0.001148496090747, -0.000419962395329 ], [ 0.001150292721315, -0.000432538809312 ], [ 0.001142207883758, -0.00044152196214 ], [ 0.001134123046201, -0.000445115223269 ], [ 0.001124241578076, -0.000445115223269 ], [ 0.001117953371087, -0.000449606799702 ], [ 0.001120648316939, -0.000455895006693 ], [ 0.001134123046201, -0.000464878159521 ], [ 0.001134123046201, -0.000473861312375 ], [ 0.001127834839212, -0.000479251204075 ], [ 0.001127834839212, -0.000479251204075 ] ] ], [ [ [ 0.000327435921062, -0.000456793321972 ], [ 0.000304978038959, -0.000455895006693 ], [ 0.000300486462538, -0.000452301745564 ], [ 0.000300486462538, -0.00044601353856 ], [ 0.000310367930663, -0.00043613207044 ], [ 0.000365165162995, -0.000433437124591 ], [ 0.000417267449474, -0.000422657341179 ], [ 0.000437030385724, -0.000424453971737 ], [ 0.00043613207044, -0.000437928700998 ], [ 0.000427148917599, -0.000445115223269 ], [ 0.00037953820754, -0.000447810169131 ], [ 0.000327435921062, -0.000456793321972 ], [ 0.000327435921062, -0.000456793321972 ] ] ], [ [ [ 0.001024528581538, -0.000507997293156 ], [ 0.001018240374549, -0.000507997293156 ], [ 0.001010155536992, -0.000502607401469 ], [ 0.000995782492446, -0.00050440403204 ], [ 0.000985901024321, -0.000501709086177 ], [ 0.000977816186764, -0.000492725933336 ], [ 0.000976019556196, -0.000473861312375 ], [ 0.000970629664491, -0.000465776474813 ], [ 0.000945476836536, -0.000456793321972 ], [ 0.000925713900285, -0.000445115223269 ], [ 0.000932900422558, -0.000439725331582 ], [ 0.000932002107274, -0.000437030385732 ], [ 0.000936493683695, -0.000432538809312 ], [ 0.00094637515182, -0.000425352287029 ], [ 0.000954459989377, -0.000423555656458 ], [ 0.000974222925628, -0.000423555656458 ], [ 0.001003867330004, -0.000435233755161 ], [ 0.001020935320402, -0.00045140343026 ], [ 0.001028121842675, -0.000462183213672 ], [ 0.001028121842675, -0.000481047834645 ], [ 0.001032613419095, -0.000488234356928 ], [ 0.00103351173438, -0.000500810770885 ], [ 0.001029918473243, -0.000506200662598 ], [ 0.001024528581538, -0.000507997293156 ], [ 0.001024528581538, -0.000507997293156 ] ] ], [ [ [ 0.001302108004331, -0.000474759627654 ], [ 0.001278751806944, -0.000472962997083 ], [ 0.001270666969387, -0.000467573105384 ], [ 0.001269768654103, -0.000462183213672 ], [ 0.001273361915239, -0.000456793321972 ], [ 0.001287734959785, -0.000450505114981 ], [ 0.00131917599473, -0.000445115223269 ], [ 0.001350617029674, -0.00044421690799 ], [ 0.001356905236663, -0.000445115223269 ], [ 0.001359600182515, -0.000453200060843 ], [ 0.001364091758935, -0.000450505114981 ], [ 0.001364091758935, -0.000458589952543 ], [ 0.001356006921378, -0.000465776474813 ], [ 0.001329057462855, -0.000472962997083 ], [ 0.001302108004331, -0.000474759627654 ], [ 0.001302108004331, -0.000474759627654 ] ] ], [ [ [ 0.000240299338502, -0.000540336643391 ], [ 0.000218739771683, -0.000540336643391 ], [ 0.000203468411853, -0.000536743382262 ], [ 0.000192688628444, -0.000529556859979 ], [ 0.000192688628444, -0.000517878761288 ], [ 0.00019628188958, -0.00051428550016 ], [ 0.000212451564694, -0.000511590554297 ], [ 0.000230417870377, -0.000501709086177 ], [ 0.000253774067764, -0.000503505716748 ], [ 0.000263655535889, -0.000509793923739 ], [ 0.000264553851173, -0.000519675391859 ], [ 0.0002573673289, -0.000530455175271 ], [ 0.000240299338502, -0.000540336643391 ], [ 0.000240299338502, -0.000540336643391 ] ] ], [ [ [ 0.001211378160635, -0.000572675993613 ], [ 0.001199700061942, -0.000571777678334 ], [ 0.001192513539669, -0.000566387786622 ], [ 0.001191615224385, -0.000557404633794 ], [ 0.001205089953646, -0.000544828219811 ], [ 0.001200598377226, -0.000538540012833 ], [ 0.001179937125691, -0.000527760229408 ], [ 0.001183530386827, -0.000520573707138 ], [ 0.001204191638362, -0.000516082130718 ], [ 0.00121676805234, -0.00051877707658 ], [ 0.001231141096886, -0.000515183815451 ], [ 0.001247310772, -0.000515183815451 ], [ 0.001261683816546, -0.000506200662598 ], [ 0.001283243383365, -0.000505302347319 ], [ 0.001290429905638, -0.000511590554297 ], [ 0.001309294526604, -0.000519675391859 ], [ 0.001325464201718, -0.000537641697541 ], [ 0.001326362517002, -0.000549319796232 ], [ 0.001320074310014, -0.000552014742082 ], [ 0.001233836042738, -0.000551116426803 ], [ 0.001223056259329, -0.000568184417193 ], [ 0.001211378160635, -0.000572675993613 ], [ 0.001211378160635, -0.000572675993613 ] ] ], [ [ [ 0.000151366125374, -0.000664304152594 ], [ 0.000139688026681, -0.000662507522023 ], [ 0.000137891396112, -0.000652626053903 ], [ 0.000142382972533, -0.000641846270492 ], [ 0.000132501504408, -0.000631964802372 ], [ 0.000125314982135, -0.000630168171801 ], [ 0.000130704873839, -0.000615795127248 ], [ 0.000140586341965, -0.000608608604977 ], [ 0.000163044224068, -0.000609506920269 ], [ 0.000180112214466, -0.000623879964822 ], [ 0.000188197052023, -0.000626574910659 ], [ 0.000193586943728, -0.0006409479552 ], [ 0.000188197052023, -0.000649032792762 ], [ 0.000151366125374, -0.000664304152594 ], [ 0.000151366125374, -0.000664304152594 ] ] ], [ [ [ 0.001220361313476, -0.000666999098457 ], [ 0.001193411854953, -0.000665202467873 ], [ 0.001181733756259, -0.000660710891465 ], [ 0.001176343864555, -0.000663405837315 ], [ 0.001161072504724, -0.000660710891465 ], [ 0.001154784297736, -0.000654422684461 ], [ 0.001154784297736, -0.000640049639934 ], [ 0.001158377558872, -0.000630168171801 ], [ 0.001168259026997, -0.000626574910659 ], [ 0.001177242179839, -0.00063106648708 ], [ 0.001211378160635, -0.000631964802372 ], [ 0.001223056259329, -0.000642744585771 ], [ 0.001222157944045, -0.000651727738624 ], [ 0.001224852889897, -0.000657117630324 ], [ 0.001220361313476, -0.000666999098457 ], [ 0.001220361313476, -0.000666999098457 ] ] ] ] }
--------------------------------------------------------------------------------
/examples/plots_analysis.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 |
4 | from picterra import PlotsAnalysisPlatformClient
5 |
6 | # Replace this with the path to a GeoJSON file containing plot geometries
7 | # as a GeoJSON FeatureCollection of Polygons. In particular, each Feature
8 | # should have a unique "id" property.
9 | plots_feature_collection_filename = "data/plots_analysis/example_plots.geojson"
10 |
11 | client = PlotsAnalysisPlatformClient()
12 |
13 | # This will run the "EUDR Cocoa" deforestation risk analysis, discarding any
14 | # deforestation alerts happening after 2022-01-01.
15 | print("Starting analysis...")
16 | results = client.batch_analyze_plots(
17 | plots_feature_collection_filename,
18 | "eudr_cocoa",
19 | datetime.date.fromisoformat("2022-01-01")
20 | )
21 |
22 | # The output of the analysis is a JSON file containing the input plots and their
23 | # associated deforestation risk.
24 | print("Analysis completed:")
25 | print(json.dumps(results, indent=2))
26 |
--------------------------------------------------------------------------------
/examples/raster_management.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | from pprint import pprint
5 |
6 | from picterra import APIClient
7 |
8 | # Set the PICTERRA_API_KEY environment variable to define your API key
9 | client = APIClient()
10 | # The Id of a folder/project you own
11 | folder_id = "7ec40c11-f181-436a-9d33-d7b3f63e0e0f"
12 | # Upload
13 | local_raster_id = client.upload_raster("data/raster1.tif", name="A nice raster")
14 | print("Uploaded local raster=", local_raster_id)
15 | # Get the first batch of most recent images
16 | first_page = client.list_rasters()
17 | for raster in first_page:
18 | pprint("raster %s" % "\n".join(["%s=%s" % item for item in raster.items()]))
19 | # Get the second batch
20 | second_page = first_page.next()
21 | # Get the first page applying a filter
22 | for raster in client.list_rasters(folder_id):
23 | pprint("raster %s" % "\n".join(["%s=%s" % item for item in raster.items()]))
24 | # Upload, edition and removal
25 | local_raster_id = client.upload_raster("data/raster1.tif", name="A short-lived raster")
26 | print("Uploaded a second local raster=", local_raster_id)
27 | # Editing the image's band specification. See https://docs.picterra.ch/imagery/#Multispectral
28 | client.edit_raster(local_raster_id, multispectral_band_specification={
29 | "ranges": [
30 | [0, 128], [0, 128], [0, 128]
31 | ],
32 | "display_bands": [
33 | {"type": "multiband", "name": "default", "bands": [2, 1, 0]}
34 | ]
35 | })
36 | # Deleting the image
37 | client.delete_raster(local_raster_id)
38 | print("Deleted raster=", local_raster_id)
39 |
--------------------------------------------------------------------------------
/examples/training.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 |
5 | from picterra import APIClient
6 |
7 | # Set the PICTERRA_API_KEY environment variable to define your API key
8 | client = APIClient()
9 |
10 | # Create a new detector (its type is 'count' by default)
11 | detector_id = client.create_detector("My first detector")
12 |
13 | # Upload a training raster for the detector above
14 | raster_id = client.upload_raster("data/raster1.tif", name="a nice raster")
15 | client.add_raster_to_detector(raster_id, detector_id)
16 |
17 | # Add annotations
18 | with open("data/outline.geojson") as f:
19 | outlines = json.load(f)
20 | client.set_annotations(detector_id, raster_id, "outline", outlines)
21 | with open("data/training_area.geojson") as f:
22 | training_areas = json.load(f)
23 | client.set_annotations(detector_id, raster_id, "training_area", training_areas)
24 | with open("data/validation_area.geojson") as f:
25 | validation_areas = json.load(f)
26 | client.set_annotations(detector_id, raster_id, "validation_area", validation_areas)
27 |
28 | # Train the detector
29 | client.train_detector(detector_id)
30 |
31 | # At this point your detector is ready to predict: see upload_and_detect.py in order
32 | # to launch a prediction on a raster; you can also use one of the raster already added above.
33 |
--------------------------------------------------------------------------------
/examples/training_multiclass_upload.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | This example shows how to upload outlines/annotations to a mutliclass detector
4 |
5 | Instructions to test:
6 | - Upload 'raster1.tif' in a new detector
7 | - Create 2 classes in the detector (it should have 3 classes total)
8 | - Write down the raster/detector id and replace them below
9 | """
10 | import json
11 | from picterra import APIClient
12 |
13 | # TODO: Adapt IDs
14 | DETECTOR_ID = "9a16c150-ae24-4bb6-9378-085955c7a4ac"
15 | RASTER_ID = "89139314-0bc0-4243-9357-b91c502513b2"
16 |
17 | # Set the PICTERRA_API_KEY environment variable to define your API key
18 | client = APIClient()
19 | detector_info = client.get_detector(DETECTOR_ID)
20 |
21 |
22 | def get_class_id(class_name):
23 | for class_info in detector_info["classes"]:
24 | if class_info["name"] == class_name:
25 | return class_info["id"]
26 | raise RuntimeError("Class with name=%s not found" % class_name)
27 |
28 |
29 | def load_annotations(name):
30 | with open("data/%s" % name) as f:
31 | fc = json.load(f)
32 | return fc
33 |
34 |
35 | client.set_annotations(
36 | DETECTOR_ID,
37 | RASTER_ID,
38 | "outline",
39 | load_annotations("outline.geojson"),
40 | class_id=get_class_id("class0"),
41 | )
42 | client.set_annotations(
43 | DETECTOR_ID,
44 | RASTER_ID,
45 | "outline",
46 | load_annotations("outline2.geojson"),
47 | class_id=get_class_id("class1"),
48 | )
49 | client.set_annotations(
50 | DETECTOR_ID,
51 | RASTER_ID,
52 | "outline",
53 | load_annotations("outline3.geojson"),
54 | class_id=get_class_id("class2"),
55 | )
56 |
--------------------------------------------------------------------------------
/examples/upload_and_detect.py:
--------------------------------------------------------------------------------
1 | from picterra import APIClient
2 |
3 | # Replace this with the id of one of your detectors
4 | detector_id = "d552605b-6972-4a68-8d51-91e6cb531c24"
5 | # Replace this with the id of a folder in which the
6 | # raster should be uploaded.
7 | folder_id = "63207fe9-32b8-410f-a72d-00803cca7bf3"
8 |
9 | # Set the PICTERRA_API_KEY environment variable to define your API key
10 | client = APIClient()
11 | print("Uploading raster...")
12 | raster_id = client.upload_raster(
13 | "data/raster1.tif",
14 | name="a nice raster",
15 | folder_id=folder_id,
16 | captured_at="2020-01-01T12:34:45.789Z",
17 | )
18 | print("Upload finished, starting detector...")
19 | result_id = client.run_detector(detector_id, raster_id)
20 | client.download_result_to_feature_collection(result_id, "result.geojson")
21 | print("Detection finished, results are in result.geojson")
22 |
--------------------------------------------------------------------------------
/scripts/lint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Run this from the project root directory
3 | printf "==== Running flake8\n"
4 | python -m flake8
5 | printf "==== Running mypy\n"
6 | mypy src examples
7 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [aliases]
2 | test=pytest
3 |
4 | [tool:pytest]
5 | testpaths = tests
6 | addopts = --verbose
7 |
8 | [flake8]
9 | ignore=E266,W504
10 | max-line-length=100
11 | filename=src
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | # read the contents of your README file
4 | import sys
5 | from pathlib import Path
6 |
7 | from setuptools import find_packages, setup
8 |
9 | this_directory = Path(__file__).parent
10 | long_description = (this_directory / "README.md").read_text()
11 |
12 | if sys.version_info >= (3, 8):
13 | lint_deps = ["flake8", "mypy==1.8.0", "types-requests"]
14 | else:
15 | lint_deps = ["flake8", "mypy==1.4.1", "types-requests"]
16 | test_deps = ["pytest==7.1", "responses==0.22", "httpretty"]
17 |
18 | setup(
19 | name="picterra",
20 | version="2.0.5",
21 | description="Picterra API client",
22 | long_description=long_description,
23 | long_description_content_type="text/markdown",
24 | package_dir={"": "src"},
25 | packages=find_packages("src"),
26 | install_requires=[
27 | "requests",
28 | # We use the new `allowed_methods` option
29 | "urllib3>=1.26.0",
30 | ],
31 | extras_require={
32 | "test": test_deps,
33 | "lint": lint_deps,
34 | },
35 | )
36 |
--------------------------------------------------------------------------------
/src/picterra/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_client import APIError, ResultsPage
2 | # Note that we import DetectorPlatformClient twice, to export it under two names:
3 | # - DetectorPlatformClient as the name it should be used with
4 | # - APIClient to preserve backward compatibility, since that was the name it was
5 | # exported under previously (when we originally had only one platform and API client).
6 | from .detector_platform_client import DetectorPlatformClient as APIClient
7 | from .detector_platform_client import DetectorPlatformClient
8 | from .nongeo import nongeo_result_to_pixel
9 | from .plots_analysis_platform_client import PlotsAnalysisPlatformClient
10 |
11 | __all__ = ["APIClient", "DetectorPlatformClient", "PlotsAnalysisPlatformClient", "nongeo_result_to_pixel", "APIError", "ResultsPage"]
12 |
--------------------------------------------------------------------------------
/src/picterra/base_client.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import json
4 | import logging
5 | import os
6 | import sys
7 | import time
8 | from collections.abc import Callable, Iterator
9 |
10 | if sys.version_info >= (3, 8):
11 | from typing import Literal, TypedDict
12 | else:
13 | from typing_extensions import Literal, TypedDict
14 |
15 | from typing import Any, Generic, TypeVar
16 | from urllib.parse import urlencode, urljoin
17 |
18 | import requests
19 | from requests.adapters import HTTPAdapter
20 | from urllib3.util.retry import Retry
21 |
22 | logger = logging.getLogger()
23 |
24 | CHUNK_SIZE_BYTES = 8192 # 8 KiB
25 |
26 |
27 | # allow injecting an non-existing package name to test the fallback behavior
28 | # of _get_ua in tests (see test_headers_user_agent_version__fallback)
29 | def _get_distr_name():
30 | return 'picterra'
31 |
32 |
33 | def _get_ua():
34 | import platform
35 | pkg = _get_distr_name()
36 | if sys.version_info >= (3, 8):
37 | from importlib.metadata import PackageNotFoundError, version
38 | try:
39 | ver = version(pkg)
40 | except PackageNotFoundError:
41 | ver = 'no_version'
42 | else:
43 | import pkg_resources # type: ignore[import]
44 | try:
45 | ver = pkg_resources.require(pkg)[0].version
46 | except pkg_resources.DistributionNotFound:
47 | ver = 'no_version'
48 | o_s = " ".join([os.name, platform.system(), platform.release()])
49 | v_info = sys.version_info
50 | py = "Python " + str(v_info.major) + "." + str(v_info.minor)
51 | return "picterra-python/%s (%s %s)" % (ver, py, o_s,)
52 |
53 |
54 | class APIError(Exception):
55 | """Generic API error exception"""
56 |
57 | pass
58 |
59 |
60 | class _RequestsSession(requests.Session):
61 | """
62 | Override requests session to to implement a global session timeout
63 | """
64 |
65 | def __init__(self, *args, **kwargs):
66 | self.timeout = kwargs.pop("timeout")
67 | super().__init__(*args, **kwargs)
68 | self.headers.update(
69 | {
70 | "User-Agent": "%s - %s" % (_get_ua(), self.headers["User-Agent"])
71 | }
72 | )
73 |
74 | def request(self, *args, **kwargs):
75 | kwargs.setdefault("timeout", self.timeout)
76 | return super().request(*args, **kwargs)
77 |
78 |
79 | def _download_to_file(url: str, filename: str):
80 | # Given we do not use self.sess the timeout is disabled (requests default), and this
81 | # is good as file download can take a long time
82 | with requests.get(url, stream=True) as r:
83 | r.raise_for_status()
84 | with open(filename, "wb+") as f:
85 | logger.debug("Downloading to file %s.." % filename)
86 | for chunk in r.iter_content(chunk_size=CHUNK_SIZE_BYTES):
87 | if chunk: # filter out keep-alive new chunks
88 | f.write(chunk)
89 |
90 |
91 | def _upload_file_to_blobstore(upload_url: str, filename: str):
92 | if not (os.path.exists(filename) and os.path.isfile(filename)):
93 | raise ValueError("Invalid file: " + filename)
94 | with open(
95 | filename, "rb"
96 | ) as f: # binary recommended by requests stream upload (see link below)
97 | logger.debug("Opening and streaming to upload file %s" % filename)
98 | # Given we do not use self.sess the timeout is disabled (requests default), and this
99 | # is good as file upload can take a long time. Also we use requests streaming upload
100 | # (https://requests.readthedocs.io/en/latest/user/advanced/#streaming-uploads) to avoid
101 | # reading the (potentially large) layer GeoJSON in memory
102 | resp = requests.put(upload_url, data=f)
103 | if not resp.ok:
104 | logger.error("Error when uploading to blobstore %s" % upload_url)
105 | raise APIError(resp.text)
106 |
107 |
108 | def multipolygon_to_polygon_feature_collection(mp):
109 | return {
110 | "type": "FeatureCollection",
111 | "features": [{
112 | "type": "Feature",
113 | "properties": {},
114 | "geometry": {
115 | "type": "Polygon",
116 | "coordinates": p
117 | }
118 | } for p in mp["coordinates"]]
119 | }
120 |
121 | T = TypeVar("T")
122 |
123 |
124 | class ResultsPage(Generic[T]):
125 | """
126 | Interface for a paginated response from the API
127 |
128 | Typically the endpoint returning list of objects return them splitted
129 | in pages (page 1, page 2, etc..) of a fixed dimension (eg 20). Thus
130 | each `list_XX` function returns a ResultsPage (by default the first one);
131 | once you have a ResultsPage for a given list of objects, you can:
132 | * check its length with `len()` (eg `len(page)`)
133 | * access a single element with the index operator `[]` (eg `page[5]`)
134 | * turn it into a list of dictionaries with `list()` (eg `list(page)`)
135 | * get the next page with `.next()` (eg `page.next()`); this could return
136 | None if the list is finished
137 | You can also get a specific page passing the page number to the `list_XX` function
138 | """
139 |
140 | def __init__(self, url: str, fetch: Callable[[str], requests.Response]):
141 | resp = fetch(url)
142 | if not resp.ok:
143 | raise APIError(resp.text)
144 | r: dict[str, Any] = resp.json()
145 | next_url: str | None = r["next"]
146 | results: list[T] = r["results"]
147 |
148 | self._fetch = fetch
149 | self._next_url = next_url
150 | self._results = results
151 | self._url = url
152 |
153 | def next(self):
154 | return ResultsPage(self._next_url, self._fetch) if self._next_url else None
155 |
156 | def __len__(self) -> int:
157 | return len(self._results)
158 |
159 | def __getitem__(self, key: int) -> T:
160 | return self._results[key]
161 |
162 | def __iter__(self) -> Iterator[T]:
163 | return iter([self._results[i] for i in range(len(self._results))])
164 |
165 | def __str__(self) -> str:
166 | return f"{len(self._results)} results from {self._url}"
167 |
168 |
169 | class Feature(TypedDict):
170 | type: Literal["Feature"]
171 | properties: dict[str, Any]
172 | geometry: dict[str, Any]
173 |
174 |
175 | class FeatureCollection(TypedDict):
176 | type: Literal["FeatureCollection"]
177 | features: list[Feature]
178 |
179 |
180 | class BaseAPIClient:
181 | """
182 | Base class for Picterra API clients.
183 |
184 | This is subclassed for the different products we have.
185 | """
186 |
187 | def __init__(
188 | self, api_url: str, timeout: int = 30, max_retries: int = 3, backoff_factor: int = 10
189 | ):
190 | """
191 | Args:
192 | api_url: the api's base url. This is different based on the Picterra product used
193 | and is typically defined by implementations of this client
194 | timeout: number of seconds before the request times out
195 | max_retries: max attempts when ecountering gateway issues or throttles; see
196 | retry_strategy comment below
197 | backoff_factor: factor used nin the backoff algorithm; see retry_strategy comment below
198 | """
199 | base_url = os.environ.get(
200 | "PICTERRA_BASE_URL", "https://app.picterra.ch/"
201 | )
202 | api_key = os.environ.get("PICTERRA_API_KEY", None)
203 | if not api_key:
204 | raise APIError("PICTERRA_API_KEY environment variable is not defined")
205 | logger.info(
206 | "Using base_url=%s, api_url=%s; %d max retries, %d backoff and %s timeout.",
207 | base_url,
208 | api_url,
209 | max_retries,
210 | backoff_factor,
211 | timeout,
212 | )
213 | self.base_url = urljoin(base_url, api_url)
214 | # Create the session with a default timeout (30 sec), that we can then
215 | # override on a per-endpoint basis (will be disabled for file uploads and downloads)
216 | self.sess = _RequestsSession(timeout=timeout)
217 | # Retry: we set the HTTP codes for our throttle (429) plus possible gateway problems (50*),
218 | # and for polling methods (GET), as non-idempotent ones should be addressed via idempotency
219 | # key mechanism; given the algorithm is { * (2 **}, and we
220 | # default to 30s for polling and max 30 req/min, the default 5-10-20 sequence should
221 | # provide enough room for recovery
222 | retry_strategy = Retry(
223 | total=max_retries,
224 | status_forcelist=[429, 502, 503, 504],
225 | backoff_factor=backoff_factor,
226 | allowed_methods=["GET"],
227 | )
228 | adapter = HTTPAdapter(max_retries=retry_strategy)
229 | self.sess.mount("https://", adapter)
230 | self.sess.mount("http://", adapter)
231 | # Authentication
232 | self.sess.headers.update({"X-Api-Key": api_key})
233 |
234 | def _full_url(self, path: str, params: dict[str, Any] | None = None):
235 | url = urljoin(self.base_url, path)
236 | if not params:
237 | return url
238 | else:
239 | qstr = urlencode(params)
240 | return "%s?%s" % (url, qstr)
241 |
242 | def _wait_until_operation_completes(
243 | self, operation_response: dict[str, Any]
244 | ) -> dict[str, Any]:
245 | """Polls an operation an returns its data"""
246 | operation_id = operation_response["operation_id"]
247 | poll_interval = operation_response["poll_interval"]
248 | # Just sleep for a short while the first time
249 | time.sleep(poll_interval * 0.1)
250 | while True:
251 | logger.info("Polling operation id %s" % operation_id)
252 | resp = self.sess.get(
253 | self._full_url("operations/%s/" % operation_id),
254 | )
255 | if not resp.ok:
256 | raise APIError(resp.text)
257 | status = resp.json()["status"]
258 | logger.info("status=%s" % status)
259 | if status == "success":
260 | break
261 | if status == "failed":
262 | errors = resp.json()["errors"]
263 | raise APIError(
264 | "Operation %s failed: %s" % (operation_id, json.dumps(errors))
265 | )
266 | time.sleep(poll_interval)
267 | return resp.json()
268 |
269 | def _return_results_page(
270 | self, resource_endpoint: str, params: dict[str, Any] | None = None
271 | ) -> ResultsPage:
272 | if params is None:
273 | params = {}
274 | if "page_number" not in params:
275 | params["page_number"] = 1
276 |
277 | url = self._full_url("%s/" % resource_endpoint, params=params)
278 | return ResultsPage(url, self.sess.get)
279 |
280 | def get_operation_results(self, operation_id: str) -> dict[str, Any]:
281 | """
282 | Return the 'results' dict of an operation
283 |
284 | This a **beta** function, subject to change.
285 |
286 | Args:
287 | operation_id: The id of the operation
288 | """
289 | resp = self.sess.get(
290 | self._full_url("operations/%s/" % operation_id),
291 | )
292 | return resp.json()["results"]
293 |
--------------------------------------------------------------------------------
/src/picterra/detector_platform_client.py:
--------------------------------------------------------------------------------
1 | """
2 | Handles interfacing with the detection platform api v2 documented at:
3 | https://app.picterra.ch/public/apidocs/v2/
4 |
5 | Note that that Detector platform is a separate product from the Plots Analysis platform and so
6 | an API key which is valid for one may encounter permissions issues if used with the other
7 | """
8 | from __future__ import annotations
9 |
10 | import json
11 | import logging
12 | import sys
13 | import tempfile
14 | import warnings
15 |
16 | if sys.version_info >= (3, 8):
17 | from typing import Literal
18 | else:
19 | from typing_extensions import Literal
20 |
21 | from typing import Any
22 |
23 | import requests
24 |
25 | from picterra.base_client import (
26 | APIError,
27 | BaseAPIClient,
28 | Feature,
29 | FeatureCollection,
30 | _download_to_file,
31 | _upload_file_to_blobstore,
32 | )
33 |
34 | logger = logging.getLogger()
35 |
36 |
37 | class DetectorPlatformClient(BaseAPIClient):
38 | def __init__(self, **kwargs):
39 | super().__init__("public/api/v2/", **kwargs)
40 |
41 | def upload_raster(
42 | self,
43 | filename: str,
44 | name: str,
45 | folder_id: str | None = None,
46 | captured_at: str | None = None,
47 | identity_key: str | None = None,
48 | multispectral: bool = False,
49 | cloud_coverage: int | None = None,
50 | user_tag: str | None = None,
51 | ) -> str:
52 | """
53 | Upload a raster to picterra.
54 |
55 | Args:
56 | filename: Local filename of raster to upload
57 | name: A human-readable name for this raster
58 | folder_id: Id of the folder this raster
59 | belongs to; if not provided, the raster will be put in the
60 | "Picterra API Project" folder
61 | captured_at: ISO-8601 date and time at which this
62 | raster was captured, YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z];
63 | e.g. "2020-01-01T12:34:56.789Z"
64 | identity_key: Personal identifier for this raster.
65 | multispectral: If True, the raster is in multispectral mode and can have
66 | an associated band specification
67 | cloud_coverage: Raster cloud coverage %.
68 | user_tag (beta): Raster tag
69 |
70 | Returns:
71 | raster_id: The id of the uploaded raster
72 | """
73 | data: dict[str, Any] = {"name": name, "multispectral": multispectral}
74 | if folder_id is not None:
75 | data.update({"folder_id": folder_id})
76 | if captured_at is not None:
77 | data.update({"captured_at": captured_at})
78 | if identity_key is not None:
79 | data.update({"identity_key": identity_key})
80 | if cloud_coverage is not None:
81 | data.update({"cloud_coverage": cloud_coverage})
82 | if user_tag is not None:
83 | data.update({"user_tag": user_tag})
84 | resp = self.sess.post(self._full_url("rasters/upload/file/"), json=data)
85 | if not resp.ok:
86 | raise APIError(resp.text)
87 | data = resp.json()
88 | upload_url = str(data["upload_url"])
89 | raster_id: str = data["raster_id"]
90 | _upload_file_to_blobstore(upload_url, filename)
91 | resp = self.sess.post(self._full_url("rasters/%s/commit/" % raster_id))
92 | if not resp.ok:
93 | raise APIError(resp.text)
94 | self._wait_until_operation_completes(resp.json())
95 | return raster_id
96 |
97 | def list_folder_detectors(self, folder_id: str, page_number: int | None = None):
98 | """
99 | List of detectors assigned to a given folder, see `ResultsPage`
100 | for the pagination access pattern.
101 |
102 | This a **beta** function, subject to change.
103 |
104 | Args:
105 | folder_id: The id of the folder to obtain the detectors for
106 | page_number: Optional page (from 1) of the list we want to retrieve
107 |
108 | Returns:
109 | A ResultsPage object that contains a slice of the list of detector dictionaries,
110 | plus methods to retrieve the other pages
111 |
112 | Example:
113 |
114 | ::
115 |
116 | {
117 | "id": "id1",
118 | "name": "detector1",
119 | "is_runnable": True,
120 | "user_tag": "tag1",
121 | },
122 | {
123 | "id": "id2",
124 | "name": "detector2",
125 | "is_runnable": False,
126 | "user_tag": "tag2",
127 | }
128 |
129 | """
130 | return self._return_results_page(
131 | "folders/%s/detectors" % folder_id,
132 | {"page_number": page_number} if page_number is not None else None,
133 | )
134 |
135 | def list_rasters(
136 | self,
137 | folder_id: str | None = None,
138 | search_string: str | None = None,
139 | user_tag: str | None = None,
140 | max_cloud_coverage: int | None = None,
141 | captured_before: str | None = None,
142 | captured_after: str | None = None,
143 | has_vector_layers: bool | None = None,
144 | page_number: int | None = None,
145 | ):
146 | """
147 | List of rasters metadata, see `ResultsPage` for the pagination access pattern.
148 |
149 | Args:
150 | folder_id: The id of the folder to search rasters in
151 | search_string: The search term used to filter rasters by name
152 | user_tag: [beta] The user tag to filter rasters by
153 | max_cloud_coverage: [beta] The max_cloud_coverage of the rasters (between 0 and 100)
154 | captured_before: ISO 8601 -formatted date / time of capture
155 | we want to list the rasters since
156 | captured_after: ISO 8601 -formatted date / time of capture
157 | we want to list the rasters from
158 | has_vector_layers: [beta] Whether or not the rasters have at least one vector layer
159 | page_number: Optional page (from 1) of the list we want to retrieve
160 |
161 | Returns:
162 | A list of rasters dictionaries
163 |
164 | Example:
165 |
166 | ::
167 |
168 | {
169 | 'id': '42',
170 | 'status': 'ready',
171 | 'name': 'raster1',
172 | 'folder_id': 'abc'
173 | },
174 | {
175 | 'id': '43',
176 | 'status': 'ready',
177 | 'name': 'raster2',
178 | 'folder_id': 'def'
179 | }
180 |
181 | """
182 | params: dict[str, Any] = {}
183 | if folder_id:
184 | params["folder"] = folder_id
185 | if search_string:
186 | params["search"] = search_string
187 | if user_tag is not None:
188 | params["user_tag"] = user_tag.strip()
189 | if max_cloud_coverage is not None:
190 | params["max_cloud_coverage"] = max_cloud_coverage
191 | if captured_before is not None:
192 | params["captured_before"] = captured_before
193 | if captured_after is not None:
194 | params["captured_after"] = captured_after
195 | if has_vector_layers is not None:
196 | params["has_vector_layers"] = bool(has_vector_layers)
197 | if page_number is not None:
198 | params["page_number"] = page_number
199 | return self._return_results_page("rasters", params)
200 |
201 | def get_raster(self, raster_id: str) -> dict[str, Any]:
202 | """
203 | Get raster information
204 |
205 | Args:
206 | raster_id: id of the raster
207 |
208 | Raises:
209 | APIError: There was an error while getting the raster information
210 |
211 | Returns:
212 | dict: Dictionary of the information
213 | """
214 | resp = self.sess.get(self._full_url("rasters/%s/" % raster_id))
215 | if not resp.ok:
216 | raise APIError(resp.text)
217 | return resp.json()
218 |
219 | def edit_raster(
220 | self,
221 | raster_id: str,
222 | name: str | None = None,
223 | folder_id: str | None = None,
224 | captured_at: str | None = None,
225 | identity_key: str | None = None,
226 | multispectral_band_specification: dict | None = None,
227 | cloud_coverage: int | None = None,
228 | user_tag: str | None = None,
229 | ):
230 | """
231 | Edits an already existing raster.
232 |
233 | Args:
234 | name: New human-readable name for this raster
235 | folder_id: Id of the new folder for this raster (move is in another project)
236 | captured_at: new ISO-8601 date and time at which this
237 | raster was captured, YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z];
238 | e.g. "2020-01-01T12:34:56.789Z"
239 | identity_key: New personal identifier for this raster.
240 | multispectral_band_specification: The new band specification,
241 | see https://docs.picterra.ch/advanced-topics/multispectral
242 | cloud_coverage: Raster cloud coverage new percentage
243 | user_tag (beta): Raster tag
244 |
245 | Returns:
246 | raster_id: The id of the edited raster
247 | """
248 | data: dict[str, Any] = {}
249 | if name:
250 | data.update({"name": name})
251 | if folder_id is not None:
252 | data.update({"folder_id": folder_id})
253 | if captured_at is not None:
254 | data.update({"captured_at": captured_at})
255 | if identity_key is not None:
256 | data.update({"identity_key": identity_key})
257 | if multispectral_band_specification is not None:
258 | data.update(
259 | {"multispectral_band_specification": multispectral_band_specification}
260 | )
261 | if cloud_coverage is not None:
262 | data.update({"cloud_coverage": cloud_coverage})
263 | if user_tag:
264 | data.update({"user_tag": user_tag})
265 | resp = self.sess.put(self._full_url("rasters/%s/" % raster_id), json=data)
266 | if not resp.ok:
267 | raise APIError(resp.text)
268 | return raster_id
269 |
270 | def delete_raster(self, raster_id: str):
271 | """
272 | Deletes a given raster by its identifier
273 |
274 | Args:
275 | raster_id: The id of the raster to delete
276 |
277 | Raises:
278 | APIError: There was an error while trying to delete the raster
279 | """
280 |
281 | resp = self.sess.delete(self._full_url("rasters/%s/" % raster_id))
282 | if not resp.ok:
283 | raise APIError(resp.text)
284 |
285 | def download_raster_to_file(self, raster_id: str, filename: str):
286 | """
287 | Downloads a raster to a local file
288 |
289 | Args:
290 | raster_id: The id of the raster to download
291 | filename: The local filename where to save the raster image
292 |
293 | Raises:
294 | APIError: There was an error while trying to download the raster
295 | """
296 | resp = self.sess.get(self._full_url("rasters/%s/download/" % raster_id))
297 | if not resp.ok:
298 | raise APIError(resp.text)
299 | raster_url = resp.json()["download_url"]
300 | logger.debug("Trying to download raster %s from %s.." % (raster_id, raster_url))
301 | _download_to_file(raster_url, filename)
302 |
303 | def set_raster_detection_areas_from_file(self, raster_id: str, filename: str):
304 | """
305 | This is an experimental feature
306 |
307 | Set detection areas from a GeoJSON file
308 |
309 | Args:
310 | raster_id: The id of the raster to which to assign the detection areas
311 | filename: The filename of a GeoJSON file. This should contain a FeatureCollection
312 | of Polygon/MultiPolygon
313 |
314 | Raises:
315 | APIError: There was an error uploading the file to cloud storage
316 | """
317 | # Get upload URL
318 | resp = self.sess.post(
319 | self._full_url("rasters/%s/detection_areas/upload/file/" % raster_id)
320 | )
321 | if not resp.ok:
322 | raise APIError(resp.text)
323 | data = resp.json()
324 | upload_url = data["upload_url"]
325 | upload_id = data["upload_id"]
326 | # Upload to blobstore
327 | _upload_file_to_blobstore(upload_url, filename)
328 | # Commit upload
329 | resp = self.sess.post(
330 | self._full_url(
331 | "rasters/%s/detection_areas/upload/%s/commit/" % (raster_id, upload_id)
332 | )
333 | )
334 | if not resp.ok:
335 | raise APIError(resp.text)
336 | self._wait_until_operation_completes(resp.json())
337 |
338 | def remove_raster_detection_areas(self, raster_id: str):
339 | """
340 | This is an experimental feature
341 |
342 | Remove the detection areas of a raster
343 |
344 | Args:
345 | raster_id: The id of the raster whose detection areas will be removed
346 |
347 | Raises:
348 | APIError: There was an error during the operation
349 | """
350 | resp = self.sess.delete(
351 | self._full_url("rasters/%s/detection_areas/" % raster_id)
352 | )
353 | if not resp.ok:
354 | raise APIError(resp.text)
355 |
356 | def add_raster_to_detector(self, raster_id: str, detector_id: str):
357 | """
358 | Associate a raster to a detector
359 |
360 | This a **beta** function, subject to change.
361 |
362 | Args:
363 | detector_id: The id of the detector
364 | raster_id: The id of the raster
365 |
366 | Raises:
367 | APIError: There was an error uploading the file to cloud storage
368 | """
369 | resp = self.sess.post(
370 | self._full_url("detectors/%s/training_rasters/" % detector_id),
371 | json={"raster_id": raster_id},
372 | )
373 | if not resp.status_code == 201:
374 | raise APIError(resp.text)
375 |
376 | def create_detector(
377 | self,
378 | name: str | None = None,
379 | detection_type: str = "count",
380 | output_type: str = "polygon",
381 | training_steps: int = 500,
382 | backbone: str = "resnet34",
383 | tile_size: int = 256,
384 | background_sample_ratio: float = 0.25,
385 | ) -> str:
386 | """
387 | Creates a new detector
388 |
389 | This a **beta** function, subject to change.
390 |
391 | Please note that depending on your plan some setting cannot be different
392 | from the default ones
393 |
394 | Args:
395 | name: Name of the detector
396 | detection_type: Type of the detector (one of 'count', 'segmentation')
397 | output_type: Output type of the detector (one of 'polygon', 'bbox')
398 | training_steps: Training steps the detector (integer between 500 & 40000)
399 | backbone: detector backbone (one of 'resnet18', 'resnet34', 'resnet50')
400 | tile_size: tile size (see HTTP API docs for the allowed values)
401 | background_sample_ratio: bg sample ratio (between 0 and 1)
402 |
403 | Returns:
404 | The id of the detector
405 |
406 | Raises:
407 | APIError: There was an error while creating the detector
408 | """
409 | # Build request body
410 | body_data: dict[str, Any] = {"configuration": {}}
411 | if name:
412 | body_data["name"] = name
413 | for i in (
414 | "detection_type",
415 | "output_type",
416 | "training_steps",
417 | "backbone",
418 | "tile_size",
419 | "background_sample_ratio",
420 | ):
421 | body_data["configuration"][i] = locals()[i]
422 | # Call API and check response
423 | resp = self.sess.post(self._full_url("detectors/"), json=body_data)
424 | if not resp.status_code == 201:
425 | raise APIError(resp.text)
426 | return resp.json()["id"]
427 |
428 | def get_detector(self, detector_id: str):
429 | resp = self.sess.get(self._full_url("detectors/%s/" % detector_id))
430 | if not resp.status_code == 200:
431 | raise APIError(resp.text)
432 | return resp.json()
433 |
434 | def list_detectors(
435 | self,
436 | search_string: str | None = None,
437 | user_tag: str | None = None,
438 | is_shared: bool | None = None,
439 | page_number: int | None = None,
440 | ):
441 | """
442 | List all the detectors the user can access, see `ResultsPage`
443 | for the pagination access pattern.
444 |
445 | Args:
446 | search_string: The term used to filter detectors by name
447 | user_tag: [beta] User tag to filter detectors by
448 | is_shared: [beta] Share status to filter detectors by
449 | page_number: Optional page (from 1) of the list we want to retrieve
450 |
451 | Returns:
452 | A list of detectors dictionaries
453 |
454 | Example:
455 |
456 | ::
457 |
458 | {
459 | 'id': '42',
460 | 'name': 'cow detector',
461 | 'configuration': {
462 | 'detection_type': 'count',
463 | 'output_type': 'bbox',
464 | 'training_steps': 787
465 | }
466 | },
467 | {
468 | 'id': '43',
469 | 'name': 'test5',
470 | 'configuration': {
471 | 'detection_type': 'segmentation',
472 | 'output_type': 'polygon',
473 | 'training_steps': 500
474 | }
475 | }
476 |
477 | """
478 | data: dict[str, Any] = {}
479 | if search_string is not None:
480 | data["search"] = search_string.strip()
481 | if user_tag is not None:
482 | data["user_tag"] = user_tag.strip()
483 | if is_shared is not None:
484 | data["is_shared"] = is_shared
485 | if page_number is not None:
486 | data["page_number"] = page_number
487 | return self._return_results_page("detectors", data)
488 |
489 | def edit_detector(
490 | self,
491 | detector_id: str,
492 | name: str | None = None,
493 | detection_type: str | None = None,
494 | output_type: str | None = None,
495 | training_steps: int | None = None,
496 | backbone: str | None = None,
497 | tile_size: int | None = None,
498 | background_sample_ratio: float | None = None,
499 | ):
500 | """
501 | Edit a detector
502 |
503 | This a **beta** function, subject to change.
504 |
505 | Please note that depending on your plan some settings may not be editable.
506 |
507 | Args:
508 | detector_id: identifier of the detector
509 | name: Name of the detector
510 | detection_type: The type of the detector (one of 'count', 'segmentation')
511 | output_type: The output type of the detector (one of 'polygon', 'bbox')
512 | training_steps: The training steps the detector (int in [500, 40000])
513 | backbone: detector backbone (one of 'resnet18', 'resnet34', 'resnet50')
514 | tile_size: tile size (see HTTP API docs for the allowed values)
515 | background_sample_ratio: bg sample ratio (between 0 and 1)
516 |
517 | Raises:
518 | APIError: There was an error while editing the detector
519 | """
520 | # Build request body
521 | body_data: dict[str, Any] = {"configuration": {}}
522 | if name:
523 | body_data["name"] = name
524 | for i in (
525 | "detection_type",
526 | "output_type",
527 | "training_steps",
528 | "backbone",
529 | "tile_size",
530 | "background_sample_ratio",
531 | ):
532 | if locals()[i]:
533 | body_data["configuration"][i] = locals()[i]
534 | # Call API and check response
535 | resp = self.sess.put(
536 | self._full_url("detectors/%s/" % detector_id), json=body_data
537 | )
538 | if not resp.status_code == 204:
539 | raise APIError(resp.text)
540 |
541 | def delete_detector(self, detector_id: str):
542 | """
543 | Deletes a given detector by its identifier
544 |
545 | Args:
546 | detector_id: The id of the detector to delete
547 |
548 | Raises:
549 | APIError: There was an error while trying to delete the detector
550 | """
551 |
552 | resp = self.sess.delete(self._full_url("detectors/%s/" % detector_id))
553 | if not resp.ok:
554 | raise APIError(resp.text)
555 |
556 | def run_detector(
557 | self, detector_id: str, raster_id: str, secondary_raster_id: str | None = None
558 | ) -> str:
559 | """
560 | Runs a detector on a raster: predictions are subject to a minimum charge
561 | of 10 MP.
562 |
563 | Args:
564 | detector_id: The id of the detector
565 | raster_id: The id of the raster
566 | secondary_raster_id: The id of the secondary raster. This needs to be provided to
567 | run change detectors.
568 |
569 | Returns:
570 | operation_id: The id of the operation. You typically want to pass this
571 | to `download_result_to_feature_collection`
572 | """
573 | body = {"raster_id": raster_id}
574 | if secondary_raster_id is not None:
575 | body["secondary_raster_id"] = secondary_raster_id
576 | resp = self.sess.post(
577 | self._full_url("detectors/%s/run/" % detector_id),
578 | json=body,
579 | )
580 | if not resp.ok:
581 | raise APIError(resp.text)
582 | operation_response = resp.json()
583 | self._wait_until_operation_completes(operation_response)
584 | return operation_response["operation_id"]
585 |
586 | def download_result_to_feature_collection(self, operation_id: str, filename: str):
587 | """
588 | Downloads the results from a detection operation to a local GeoJSON file.
589 |
590 | Results are stored as a FeatureCollection of Multipolygon. Each feature has a 'class_name'
591 | property indicating the corresponding class name
592 |
593 | Args:
594 | operation_id: The id of the operation to download. This should be a
595 | detect operation
596 | filename: The local filename where to save the results
597 | """
598 | results = self.get_operation_results(operation_id)
599 | # We download results to a temporary directory and then assemble them into a
600 | # FeatureCollection
601 | fc: FeatureCollection = {"type": "FeatureCollection", "features": []}
602 |
603 | for class_result in results["by_class"]:
604 | with tempfile.NamedTemporaryFile() as f:
605 | self.download_vector_layer_to_file(
606 | class_result["result"]["vector_layer_id"], f.name)
607 | with open(f.name) as fr:
608 | vl_polygon_fc: FeatureCollection = json.load(fr)
609 | mp_feature: Feature = {
610 | "type": "Feature",
611 | "properties": {"class_name": class_result["class"]["name"]},
612 | "geometry": {
613 | "type": "MultiPolygon",
614 | "coordinates": []
615 | }
616 | }
617 | for poly_feat in vl_polygon_fc["features"]:
618 | mp_feature["geometry"]["coordinates"].append(
619 | poly_feat["geometry"]["coordinates"]
620 | )
621 | fc["features"].append(mp_feature)
622 | with open(filename, "w") as f:
623 | json.dump(fc, f)
624 |
625 | def download_result_to_file(self, operation_id: str, filename: str):
626 | """
627 | Downloads a set of results to a local GeoJSON file
628 |
629 | .. deprecated:: 1.0.0
630 | Use `download_result_to_feature_collection` instead
631 |
632 | Args:
633 | operation_id: The id of the operation to download
634 | filename: The local filename where to save the results
635 | """
636 | warnings.warn(
637 | "This function is deprecated. Use download_result_to_feature_collection instead",
638 | DeprecationWarning,
639 | )
640 | result_url = self.get_operation_results(operation_id)["url"]
641 | logger.debug("Trying to download result %s.." % result_url)
642 | _download_to_file(result_url, filename)
643 |
644 | def set_annotations(
645 | self,
646 | detector_id: str,
647 | raster_id: str,
648 | annotation_type: Literal[
649 | "outline", "training_area", "testing_area", "validation_area"
650 | ],
651 | annotations: dict[str, Any],
652 | class_id: str | None = None,
653 | ):
654 | """
655 | Replaces the annotations of type 'annotation_type' with 'annotations', for the
656 | given raster-detector pair.
657 |
658 | Args:
659 | detector_id: The id of the detector
660 | raster_id: The id of the raster
661 | annotation_type: One of (outline, training_area, testing_area, validation_area)
662 | annotations: GeoJSON representation of the features to upload
663 | class_id: The class id to which to associate the new annotations. Only valid if
664 | annotation_type is "outline"
665 | """
666 | # Get an upload url
667 | create_upload_resp = self.sess.post(
668 | self._full_url(
669 | "detectors/%s/training_rasters/%s/%s/upload/bulk/"
670 | % (detector_id, raster_id, annotation_type)
671 | )
672 | )
673 | if not create_upload_resp.ok:
674 | raise APIError(create_upload_resp.text)
675 |
676 | upload = create_upload_resp.json()
677 | upload_url = upload["upload_url"]
678 | upload_id = upload["upload_id"]
679 |
680 | # Given we do not use self.sess the timeout is disabled (requests default), and this
681 | # is good as file upload can take a long time
682 | upload_resp = requests.put(upload_url, json=annotations)
683 | if not upload_resp.ok:
684 | logger.error(
685 | "Error when sending annotation upload %s to blobstore at url %s"
686 | % (upload_id, upload_url)
687 | )
688 | raise APIError(upload_resp.text)
689 |
690 | # Commit upload
691 | body = {}
692 | if class_id is not None:
693 | body["class_id"] = class_id
694 | commit_upload_resp = self.sess.post(
695 | self._full_url(
696 | "detectors/%s/training_rasters/%s/%s/upload/bulk/%s/commit/"
697 | % (detector_id, raster_id, annotation_type, upload_id)
698 | ),
699 | json=body,
700 | )
701 | if not commit_upload_resp.ok:
702 | raise APIError(commit_upload_resp.text)
703 |
704 | # Poll for operation completion
705 | self._wait_until_operation_completes(commit_upload_resp.json())
706 |
707 | def train_detector(self, detector_id: str):
708 | """
709 | Start the training of a detector
710 |
711 | Args:
712 | detector_id: The id of the detector
713 | """
714 | resp = self.sess.post(self._full_url("detectors/%s/train/" % detector_id))
715 | if not resp.ok:
716 | raise APIError(resp.text)
717 | return self._wait_until_operation_completes(resp.json())
718 |
719 | def run_dataset_recommendation(self, detector_id: str):
720 | """
721 | This is an **experimental** feature
722 |
723 | Runs dataset recommendation on a detector. Note that you currently have to use
724 | the UI to be able to view the recommendation markers/report.
725 |
726 | Args:
727 | detector_id: The id of the detector
728 | """
729 | resp = self.sess.post(
730 | self._full_url("detectors/%s/dataset_recommendation/" % detector_id)
731 | )
732 | if not resp.ok:
733 | raise APIError(resp.text)
734 | return self._wait_until_operation_completes(resp.json())
735 |
736 | def run_advanced_tool(
737 | self, tool_id: str, inputs: dict[str, Any], outputs: dict[str, Any]
738 | ):
739 | """
740 | This is an experimental feature
741 |
742 | Runs a tool and waits for its execution, returning the finished operation metadata
743 |
744 | Args:
745 | tool_id: The id of the tool to run
746 | inputs: tool inputs
747 | outputs: tool outputs
748 |
749 | Raises:
750 | APIError: There was an error while launching and executing the tool
751 | """
752 | resp = self.sess.post(
753 | self._full_url("advanced_tools/%s/run/" % tool_id),
754 | json={"inputs": inputs, "outputs": outputs},
755 | )
756 | if not resp.ok:
757 | raise APIError(resp.text)
758 | return self._wait_until_operation_completes(resp.json())
759 |
760 | def upload_vector_layer(
761 | self,
762 | raster_id: str,
763 | filename: str,
764 | name: str | None = None,
765 | color: str | None = None,
766 | ) -> str:
767 | """
768 | Uploads a vector layer from a GeoJSON file
769 |
770 | This a **beta** function, subject to change.
771 |
772 | Args:
773 | raster_id: The id of the raster we want to attach the vector layer to
774 | filename: Path to the local GeoJSOn file we want to upload
775 | name: Optional name to give to the vector layer
776 | color: Optional color of the vector layer, has an HTML hex color code (eg "#aabbcc")
777 | Returns;
778 | the vector layer unique identifier
779 | """
780 | resp = self.sess.post(self._full_url("vector_layers/%s/upload/" % raster_id))
781 | if not resp.ok:
782 | raise APIError(resp.text)
783 | upload = resp.json()
784 | upload_id, upload_url = upload["upload_id"], upload["upload_url"]
785 | _upload_file_to_blobstore(upload_url, filename)
786 | data = {}
787 | if name is not None:
788 | data["name"] = name
789 | if color is not None:
790 | data["color"] = color
791 | resp = self.sess.post(
792 | self._full_url(
793 | "vector_layers/%s/upload/%s/commit/" % (raster_id, upload_id)
794 | ),
795 | json=data,
796 | )
797 | if not resp.ok:
798 | raise APIError(resp.text)
799 | op = self._wait_until_operation_completes(resp.json())
800 | return op["results"]["vector_layer_id"]
801 |
802 | def edit_vector_layer(
803 | self, vector_layer_id: str, name: str | None = None, color: str | None = None
804 | ):
805 | """
806 | Edits a vector layer
807 |
808 | This a **beta** function, subject to change.
809 |
810 | Args:
811 | vector_layer_id: The id of the vector layer to remove
812 | name: new name
813 | color: new color
814 | """
815 | data = {}
816 | if name:
817 | data.update({"name": name})
818 | if color is not None:
819 | data.update({"color": color})
820 | resp = self.sess.put(
821 | self._full_url("vector_layers/%s/" % vector_layer_id), json=data
822 | )
823 | if not resp.ok:
824 | raise APIError(resp.text)
825 |
826 | def delete_vector_layer(self, vector_layer_id: str):
827 | """
828 | Removes a vector layer
829 |
830 | This a **beta** function, subject to change.
831 |
832 | Args:
833 | vector_layer_id: The id of the vector layer to remove
834 | """
835 | resp = self.sess.delete(self._full_url("vector_layers/%s/" % vector_layer_id))
836 | if not resp.ok:
837 | raise APIError(resp.text)
838 |
839 | def download_vector_layer_to_file(self, vector_layer_id: str, filename: str):
840 | """
841 | Downloads a vector layer
842 |
843 | This a **beta** function, subject to change.
844 |
845 | Args:
846 | vector_layer_id: The id of the vector layer to download
847 | filename: existing file to save the vector layer in, as a feature collection of polygons
848 | """
849 | resp = self.sess.post(self._full_url("vector_layers/%s/download/" % vector_layer_id))
850 | if not resp.ok:
851 | raise APIError(resp.text)
852 | op = self._wait_until_operation_completes(resp.json())
853 | _download_to_file(op["results"]["download_url"], filename)
854 |
855 | def list_raster_markers(
856 | self,
857 | raster_id: str,
858 | page_number: int | None = None,
859 | ):
860 | """
861 | This a **beta** function, subject to change.
862 |
863 | List all the markers on a raster, see `ResultsPage` for the pagination access pattern.
864 |
865 | Args:
866 | raster_id: The id of the raster
867 | page_number: Optional page (from 1) of the list we want to retrieve
868 | """
869 | return self._return_results_page(
870 | "rasters/%s/markers/" % raster_id,
871 | {"page_number": page_number} if page_number is not None else None,
872 | )
873 |
874 | def create_marker(
875 | self,
876 | raster_id: str,
877 | detector_id: str | None,
878 | lng: float,
879 | lat: float,
880 | text: str,
881 | ) -> dict[str, Any]:
882 | """
883 | This is an **experimental** (beta) feature
884 |
885 | Creates a marker
886 |
887 | Args:
888 | raster_id: The id of the raster (belonging to detector) to create the marker on
889 | detector_id: The id of the detector to create the marker on. If this is None, the marker
890 | is created associated with the raster only
891 |
892 | Raises:
893 | APIError: There was an error while creating the marker
894 | """
895 | if detector_id is None:
896 | url = "rasters/%s/markers/" % raster_id
897 | else:
898 | url = "detectors/%s/training_rasters/%s/markers/" % (detector_id, raster_id)
899 | data = {
900 | "marker": {"type": "Point", "coordinates": [lng, lat]},
901 | "text": text,
902 | }
903 | resp = self.sess.post(self._full_url(url), json=data)
904 | if not resp.ok:
905 | raise APIError(resp.text)
906 | return resp.json()
907 |
908 | def import_raster_from_remote_source(
909 | self,
910 | raster_name: str,
911 | folder_id: str,
912 | source_id: str,
913 | aoi_filename: str,
914 | method: Literal["streaming"] = "streaming",
915 | ) -> str:
916 | """
917 | Import a raster from a remote imagery source given a GeoJSON file for the AOI
918 |
919 | Args:
920 | raster_name: Name of the new raster
921 | folder_id: The id of the folder / project the raster will live in
922 | source_id: The id of the remote imagery source to import from
923 | filename: The filename of a GeoJSON file. This should contain a FeatureCollection of
924 | Polygon/MultiPolygon representing the AOI of the new raster
925 |
926 | Raises:
927 | APIError: There was an error during import
928 | """
929 | # Get upload URL
930 | resp = self.sess.post(self._full_url("rasters/import/"))
931 | if not resp.ok:
932 | raise APIError(resp.text)
933 | data = resp.json()
934 | upload_url = data["upload_url"]
935 | upload_id = data["upload_id"]
936 | # Upload to blobstore
937 | _upload_file_to_blobstore(upload_url, aoi_filename)
938 | # Commit upload
939 | resp = self.sess.post(
940 | self._full_url(f"rasters/import/{upload_id}/commit/"),
941 | json={
942 | "method": method,
943 | "source_id": source_id,
944 | "folder_id": folder_id,
945 | "name": raster_name,
946 | },
947 | )
948 | if not resp.ok:
949 | raise APIError(resp.text)
950 | # Poll operation and get raster identifier
951 | operation = self._wait_until_operation_completes(resp.json())
952 | return operation["metadata"]["raster_id"]
953 |
954 | def list_raster_vector_layers(
955 | self,
956 | raster_id: str,
957 | search: str | None = None,
958 | detector_id: str | None = None,
959 | page_number: int | None = None,
960 | ):
961 | """
962 | This a **beta** function, subject to change.
963 |
964 | List all the vector layers on a raster, see `ResultsPage`
965 | for the pagination access pattern.
966 |
967 | Args:
968 | raster_id: The id of the raster
969 | search: Optional string to search layers by name
970 | page_number: Optional page (from 1) of the list we want to retrieve
971 | """
972 | params: dict[str, str | int] = {}
973 | if search is not None:
974 | params["search"] = search
975 | if detector_id is not None:
976 | params["detector"] = detector_id
977 | if page_number is not None:
978 | params["page_number"] = page_number
979 | url = "rasters/%s/vector_layers/" % raster_id
980 | return self._return_results_page(url, params)
981 |
982 | def list_detector_rasters(
983 | self,
984 | detector_id: str,
985 | page_number: int | None = None,
986 | ):
987 | """
988 | This a **beta** function, subject to change.
989 |
990 | List rasters of a detector, see `ResultsPage` for the pagination access pattern.
991 |
992 | Args:
993 | detector_id: The id of the detector
994 | page_number: Optional page (from 1) of the list we want to retrieve
995 | """
996 | params: dict[str, int] = {}
997 | if page_number is not None:
998 | params["page_number"] = page_number
999 | url = "detectors/%s/training_rasters/" % detector_id
1000 | return self._return_results_page(url, params)
1001 |
1002 |
1003 | def create_folder(self, name: str) -> str:
1004 | """
1005 | Creates a new folder with the given name
1006 |
1007 | Args:
1008 | name: Name of the new folder
1009 |
1010 | Returns:
1011 | The id of the folder
1012 |
1013 | Raises:
1014 | APIError: There was an error while creating the folder
1015 | """
1016 | resp = self.sess.post(self._full_url("folders/"), json={"name": name})
1017 | if not resp.status_code == 201:
1018 | raise APIError(resp.text)
1019 | return resp.json()["id"]
1020 |
--------------------------------------------------------------------------------
/src/picterra/nongeo.py:
--------------------------------------------------------------------------------
1 | import json
2 | import math
3 |
4 | # The projected bounds for EPSG 3857 are computed based on the earth radius
5 | # defined in the spheroid https://epsg.io/3857
6 | # https://gis.stackexchange.com/questions/144471/spherical-mercator-world-bounds
7 | _EARTH_RADIUS = 6378137
8 | # They are consistent with the EPSG.io calculator
9 | # https://epsg.io/transform#s_srs=4326&t_srs=3857&x=-180.0000000&y=0.0000000
10 | # Note that the projected bounds are a square (so ymax=xmax on purpose), but
11 | # only latitude between -85 and 85 are considered valid for this projection
12 | _EPSG_3857_X_MIN = -math.pi * _EARTH_RADIUS
13 | _EPSG_3857_Y_MIN = -math.pi * _EARTH_RADIUS
14 | _EPSG_3857_X_MAX = math.pi * _EARTH_RADIUS
15 | _EPSG_3857_Y_MAX = math.pi * _EARTH_RADIUS
16 |
17 | _EPSG_3857_X_EXTENT = _EPSG_3857_X_MAX - _EPSG_3857_X_MIN
18 | _EPSG_3857_Y_EXTENT = _EPSG_3857_Y_MAX - _EPSG_3857_Y_MIN
19 |
20 | _DEG_TO_RAD = math.pi / 180.0
21 |
22 |
23 | def _nongeo_latlng2xy(lat_deg, lng_deg):
24 | """ """
25 | lat = _DEG_TO_RAD * lat_deg
26 | lng = _DEG_TO_RAD * lng_deg
27 |
28 | # First, project to pseudo-mercator
29 | # https://en.wikipedia.org/wiki/Web_Mercator_projection#Formulas
30 | x_proj = _EPSG_3857_X_EXTENT / (2.0 * math.pi) * lng
31 | y_proj = (
32 | _EPSG_3857_Y_EXTENT
33 | / (2.0 * math.pi)
34 | * math.log(math.tan(math.pi / 4.0 + lat / 2.0))
35 | )
36 |
37 | # Then, apply the raster geotransform to get pixel coordinates
38 | # The arbitrary 3857 geotransform that Picterra sets on non-georeferenced rasters
39 | geot = [0, 0.1, 0, 0, 0, -0.1]
40 | x = (x_proj - geot[0]) / geot[1]
41 | y = (y_proj - geot[3]) / geot[5]
42 | return x, y
43 |
44 |
45 | def _load_polygons(geojson):
46 | """
47 | Loads polygons from a geojson file; should work for both MultiPolygon and
48 | FeatureCollection of Polygons
49 | """
50 | polygons = []
51 | if geojson["type"] == "MultiPolygon":
52 | for polygon in geojson["coordinates"]:
53 | polygons.append(polygon)
54 | elif geojson["type"] == "Polygon":
55 | polygons = [geojson["coordinates"]]
56 | elif geojson["type"] == "FeatureCollection":
57 | for feature in geojson["features"]:
58 | geom = feature["geometry"]
59 | polygons.extend(_load_polygons(geom))
60 | return polygons
61 |
62 |
63 | def _polygon_to_xy(polygon):
64 | xy_polygon = []
65 | for ring in polygon:
66 | xy_polygon.append([_nongeo_latlng2xy(lat, lng) for lng, lat in ring])
67 | return xy_polygon
68 |
69 |
70 | def nongeo_result_to_pixel(result_filename):
71 | """
72 | This is a helper function to convert result obtained on non-georeferenced
73 | images in pixel.
74 | Note that this will NOT work if the image was georeferenced. So only use
75 | this function if you are uploading non-georeferenced image formats like
76 | PNG or JPEG
77 |
78 | This is currently in **beta** so let us know if you find any issues
79 |
80 | Args:
81 | result_filename (str): The file path to the GeoJSON file obtained by
82 | `APIClient.download_result_to_file`
83 | Returns:
84 | A list of polygons. Each polygon is a list of rings and
85 | each ring is a list of (x, y) tuples. For example:
86 |
87 | ::
88 |
89 | [
90 | # This is a square with a square hole
91 | [[(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)],
92 | [(0.4, 0.4), (0.5, 0.4), (0.5, 0.5), (0.4, 0.5), (0.4, 0.4)],
93 | # A triangle
94 | [[(0, 0), (1, 0), (1, 1), (0, 0)]]
95 | ]
96 | """
97 | with open(result_filename) as f:
98 | geojson = json.load(f)
99 | polygons = _load_polygons(geojson)
100 | polygons = [_polygon_to_xy(p) for p in polygons]
101 | return polygons
102 |
--------------------------------------------------------------------------------
/src/picterra/plots_analysis_platform_client.py:
--------------------------------------------------------------------------------
1 | """
2 | Handles interfacing with the plots analysis api v1 documented at:
3 | https://app.picterra.ch/public/apidocs/plots_analysis/v1/
4 |
5 | Note that that Plots Analysis Platform is a separate product from the Detector platform and so
6 | an API key which is valid for one may encounter permissions issues if used with the other
7 | """
8 | import datetime
9 | import json
10 | import sys
11 |
12 | if sys.version_info >= (3, 8):
13 | from typing import Dict, List, Literal
14 | else:
15 | from typing_extensions import Literal
16 | from typing import Dict, List
17 |
18 | import requests
19 | from requests.exceptions import RequestException
20 |
21 | from picterra.base_client import APIError, BaseAPIClient
22 |
23 | AnalysisMethodology = Literal["eudr_cocoa", "eudr_soy"]
24 |
25 |
26 | class PlotsAnalysisPlatformClient(BaseAPIClient):
27 | def __init__(self, **kwargs):
28 | super().__init__("public/api/plots_analysis/v1/", **kwargs)
29 |
30 | def batch_analyze_plots(self, plots_geometries_filename: str, methodology: AnalysisMethodology, assessment_date: datetime.date):
31 | """
32 | Runs the specified methodology against the plot geometries stored in the provided file and
33 | returns the analysis results.
34 |
35 | Args:
36 | - plots_geometries_filename: Path to a file containing the geometries of the plots to run the
37 | analysis against.
38 | - methodology: which analysis to run.
39 | - assessment_date: the point in time at which the analysis should be evaluated.
40 |
41 | Returns: the analysis results as a dict.
42 | """
43 | # Get an upload URL and analysis ID
44 | resp = self.sess.post(self._full_url("batch_analysis/upload/"))
45 | if not resp.ok:
46 | raise APIError(
47 | f"Failure obtaining an upload url and plots analysis ID: {resp.text}"
48 | )
49 |
50 | analysis_id = resp.json()["analysis_id"]
51 | upload_url = resp.json()["upload_url"]
52 |
53 | # Upload the provided file
54 | with open(plots_geometries_filename, "rb") as fh:
55 | resp = requests.put(upload_url, data=fh.read())
56 | if not resp.ok:
57 | raise APIError(f"Failure uploading plots file for analysis: {resp.text}")
58 |
59 | # Start the analysis
60 | data = {"methodology": methodology, "assessment_date": assessment_date.isoformat()}
61 | resp = self.sess.post(
62 | self._full_url(f"batch_analysis/start/{analysis_id}/"), data=data
63 | )
64 | if not resp.ok:
65 | raise APIError(f"Couldn't start analysis for id: {analysis_id}: {resp.text}")
66 |
67 | # Wait for the operation to succeed
68 | op_result = self._wait_until_operation_completes(resp.json())
69 | download_url = op_result["results"]["download_url"]
70 | resp = requests.get(download_url)
71 | if not resp.ok:
72 | raise APIError(
73 | f"Failure to download results file from operation id {op_result['id']}: {resp.text}"
74 | )
75 | results = resp.json()
76 |
77 | return results
78 |
79 | def create_plots_group(self, plots_group_name: str, methodology: AnalysisMethodology, columns: Dict[str, str], plots_geometries_filename: str) -> str:
80 | """
81 | Creates a new plots group.
82 |
83 | Args:
84 | - plots_group_name: user friendly name for the group
85 | - methodology: plots group methodology
86 | - columns: columns to add to the group
87 | - plots_geometries_filename: Path to a file containing the geometries of the plots the group will have
88 |
89 | Returns: the id of the new group.
90 | """
91 | resp = self.sess.post(self._full_url("plots_groups/upload/"))
92 | if not resp.ok:
93 | raise APIError(
94 | f"Failure obtaining upload URL and ID: {resp.text}"
95 | )
96 | upload_id = resp.json()["upload_id"]
97 | upload_url = resp.json()["upload_url"]
98 | with open(plots_geometries_filename, "rb") as fh:
99 | resp = requests.put(upload_url, data=fh.read())
100 | if not resp.ok:
101 | raise APIError(f"Failure uploading plots file for group: {resp.text}")
102 | data = {
103 | "name": plots_group_name,
104 | "methodology": methodology,
105 | "upload_id": upload_id,
106 | "custom_columns_values": columns
107 | }
108 | resp = self.sess.post(self._full_url("plots_groups/commit/"), json=data)
109 | if not resp.ok:
110 | raise APIError(f"Failure starting plots group commit: {resp.text}")
111 | op_result = self._wait_until_operation_completes(resp.json())["results"]
112 | return op_result["plots_group_id"]
113 |
114 | def replace_plots_group_plots(self, plots_group_id: str, plots_geometries_filename: str) -> None:
115 | """
116 | Updates the geometries of a given plots group
117 |
118 | Args:
119 | - plots_group_id: identifier for the plots group to replace
120 | - plots_geometries_filename: Path to a file containing the geometries of the plots the group will have
121 |
122 | Returns: the analysis results as a dict.
123 | """
124 | resp = self.sess.post(self._full_url("plots_groups/upload/"))
125 | if not resp.ok:
126 | raise APIError(
127 | f"Failure obtaining upload URL and ID: {resp.text}"
128 | )
129 | upload_id = resp.json()["upload_id"]
130 | upload_url = resp.json()["upload_url"]
131 | with open(plots_geometries_filename, "rb") as fh:
132 | resp = requests.put(upload_url, data=fh.read())
133 | if not resp.ok:
134 | raise APIError(f"Failure uploading plots file for group: {resp.text}")
135 | data = {"upload_id": upload_id}
136 | resp = self.sess.post(self._full_url(f"plots_groups/{plots_group_id}/replace/"), json=data)
137 | if not resp.ok:
138 | raise APIError(f"Failure starting plots group update: {resp.text}")
139 | self._wait_until_operation_completes(resp.json())
140 |
141 | def group_analyze_plots(
142 | self,
143 | plots_group_id: str,
144 | plots_analysis_name: str,
145 | plot_ids: List[str],
146 | assessment_date: datetime.date
147 | ) -> str:
148 | """
149 | Runs the analysis for a given date over the plot ids of the specified plot group,
150 | and returns the URL where we can see the analysis in the Picterra platform.
151 |
152 | Args:
153 | - plots_group_id: id of the plots group on which we want to run the new analysis
154 | - plots_analysis_name: name to give to the new analysis
155 | - plot_ids: list of the plot ids of the plots group to select for the analysis
156 | - assessment_date: the point in time at which the analysis should be evaluated.
157 |
158 | Returns: the analysis results URL.
159 | """
160 | resp = self.sess.post(self._full_url(f"plots_groups/{plots_group_id}/analysis/upload/"))
161 | if not resp.ok:
162 | raise APIError(f"Failure obtaining an upload: {resp.text}")
163 | upload_id, upload_url = resp.json()["upload_id"], resp.json()["upload_url"]
164 | resp = requests.put(upload_url, data=json.dumps({"plot_ids": plot_ids}))
165 | if not resp.ok:
166 | raise APIError(f"Failure uploading plots file for analysis: {resp.text}")
167 | data = {
168 | "analysis_name": plots_analysis_name,
169 | "upload_id": upload_id,
170 | "assessment_date": assessment_date.isoformat()
171 | }
172 | resp = self.sess.post(self._full_url(f"plots_groups/{plots_group_id}/analysis/"), json=data)
173 | if not resp.ok:
174 | raise APIError(f"Couldn't start analysis: {resp.text}")
175 | op_result = self._wait_until_operation_completes(resp.json())
176 | analysis_id = op_result["results"]["analysis_id"]
177 | resp = self.sess.get(
178 | self._full_url(f"plots_groups/{plots_group_id}/analysis/{analysis_id}/")
179 | )
180 | if not resp.ok:
181 | raise APIError(f"Failure to get analysis {analysis_id}: {resp.text}")
182 | analysis_data = resp.json()
183 | return analysis_data["url"]
184 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/8ee780014ed2867f7ce03ba3960f73ad6efe737a/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_base_client.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | import time
4 |
5 | import httpretty
6 | import pytest
7 | import responses
8 | from requests.exceptions import ConnectionError
9 |
10 | from picterra import base_client
11 | from picterra.detector_platform_client import DetectorPlatformClient
12 | from tests.utils import _add_api_response, _client, detector_api_url
13 |
14 |
15 | def test_detector_platform_client_base_url(monkeypatch):
16 | """
17 | Sanity-check that the client defaults to the correct base url
18 | """
19 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
20 | client = DetectorPlatformClient()
21 | assert client.base_url == "https://app.picterra.ch/public/api/v2/"
22 |
23 |
24 | # Cannot test Retry with responses, @see https://github.com/getsentry/responses/issues/135
25 | @httpretty.activate
26 | def test_backoff_success(monkeypatch):
27 | data = {"count": 0, "next": None, "previous": None, "results": []}
28 | httpretty.register_uri(
29 | httpretty.GET,
30 | detector_api_url("rasters/"),
31 | responses=[
32 | httpretty.Response(body=None, status=429),
33 | httpretty.Response(body=None, status=502),
34 | httpretty.Response(body=json.dumps(data), status=200),
35 | ],
36 | )
37 | client = _client(monkeypatch, max_retries=2, backoff_factor=0.1)
38 | client.list_rasters()
39 | assert len(httpretty.latest_requests()) == 3
40 |
41 |
42 | @httpretty.activate
43 | def test_backoff_failure(monkeypatch):
44 | httpretty.register_uri(
45 | httpretty.GET,
46 | detector_api_url("rasters/"),
47 | responses=[
48 | httpretty.Response(
49 | body=None,
50 | status=429,
51 | ),
52 | httpretty.Response(body=None, status=502),
53 | httpretty.Response(body=None, status=502),
54 | ],
55 | )
56 | client = _client(monkeypatch, max_retries=1)
57 | with pytest.raises(ConnectionError):
58 | client.list_rasters()
59 | assert len(httpretty.latest_requests()) == 2
60 |
61 |
62 | @httpretty.activate
63 | def test_timeout(monkeypatch):
64 | def request_callback(request, uri, response_headers):
65 | time.sleep(2)
66 | return [200, response_headers, json.dumps([])]
67 |
68 | httpretty.register_uri(httpretty.GET, detector_api_url("rasters/"), body=request_callback)
69 | timeout = 1
70 | client = _client(monkeypatch, timeout=timeout)
71 | with pytest.raises(ConnectionError) as e:
72 | client.list_rasters()
73 | full_error = str(e.value)
74 | assert "MaxRetryError" not in full_error
75 | assert "timeout" in full_error
76 | assert "read timeout=%d" % timeout in full_error
77 | assert len(httpretty.latest_requests()) == 1
78 |
79 |
80 | @responses.activate
81 | def test_headers_user_agent_version(monkeypatch):
82 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"})
83 | client = _client(monkeypatch)
84 | client.create_detector()
85 | assert len(responses.calls) == 1
86 | ua = responses.calls[0].request.headers["User-Agent"]
87 | regex = "^picterra-python/\d+\.\d+"
88 | assert re.compile(regex).match(ua) is not None
89 |
90 |
91 | @responses.activate
92 | def test_headers_user_agent_version__fallback(monkeypatch):
93 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"},)
94 | monkeypatch.setattr(base_client, '_get_distr_name', lambda: 'foobar')
95 | client = _client(monkeypatch)
96 | client.create_detector()
97 | assert len(responses.calls) == 1
98 | ua = responses.calls[0].request.headers["User-Agent"]
99 | regex = "^picterra-python/no_version"
100 | assert re.compile(regex).match(ua) is not None
101 |
--------------------------------------------------------------------------------
/tests/test_nongeo.py:
--------------------------------------------------------------------------------
1 | import tempfile
2 |
3 | import pytest
4 |
5 | from picterra import nongeo_result_to_pixel
6 | from picterra.nongeo import _load_polygons, _nongeo_latlng2xy
7 |
8 | # The way to get the lat/lng is:
9 | # - Upload a non-georeferenced image to the platform
10 | # - Download the normalized.tif
11 | # - Open normalized.tif in QGIS, get the lat/lng coordinates of the 4 corners of the image
12 |
13 |
14 | # In this case, the image is 1520 x 1086
15 | @pytest.mark.parametrize(
16 | "latlng,xy",
17 | [
18 | # bottom-right corner
19 | ((-0.00097539, 0.00136530), (1520, 1086)),
20 | # bottom-left corner
21 | ((-0.000975470, 0.000000096), (0, 1086)),
22 | # top-left corner
23 | ((-0.000000034, 0.000000034), (0, 0)),
24 | # top-right corner
25 | ((0.000000129, 0.001365320), (1520, 0)),
26 | ],
27 | )
28 | def test_nongeo_latlng2xy(latlng, xy):
29 | x, y = _nongeo_latlng2xy(lat_deg=latlng[0], lng_deg=latlng[1])
30 | assert int(round(x)) == xy[0] and int(round(y)) == xy[1]
31 |
32 |
33 | def test_nongeo_result_to_pixel():
34 | with tempfile.NamedTemporaryFile(mode="wt") as f:
35 | # This is the Multipolygon corresponding to the corners of a
36 | # 1520x1086 non-georeferenced image
37 | f.write(
38 | """
39 | {
40 | "type": "MultiPolygon",
41 | "coordinates":[
42 | [
43 | [
44 | [0.000000096, -0.000975470],
45 | [0.00136530, -0.00097539],
46 | [0.001365320, 0.000000129],
47 | [0.000000034, -0.000000034],
48 | [0.000000096, -0.000975470]
49 | ]
50 | ]
51 | ]
52 | }
53 | """
54 | )
55 | f.flush()
56 | polygons = nongeo_result_to_pixel(f.name)
57 | assert tuple(map(round, polygons[0][0][0])) == (0, 1086)
58 | assert tuple(map(round, polygons[0][0][1])) == (1520, 1086)
59 | assert tuple(map(round, polygons[0][0][2])) == (1520, 0)
60 | assert tuple(map(round, polygons[0][0][3])) == (0, 0)
61 | assert tuple(map(round, polygons[0][0][4])) == (0, 1086)
62 |
63 |
64 | def test_load_polygons_multipoly():
65 | geojson = {
66 | "type": "MultiPolygon",
67 | "coordinates": [
68 | [
69 | [
70 | [0.000000096, -0.000975470],
71 | [0.00136530, -0.00097539],
72 | [0.001365320, 0.000000129],
73 | [0.000000034, -0.000000034],
74 | [0.000000096, -0.000975470],
75 | ]
76 | ]
77 | ],
78 | }
79 | polygons = _load_polygons(geojson)
80 | assert len(polygons) == 1
81 | assert len(polygons[0][0]) == 5
82 | assert polygons[0][0][2][1] == 0.000000129
83 |
84 |
85 | def test_load_polygons_polygon():
86 | geojson = {
87 | "type": "Polygon",
88 | "coordinates": [
89 | [
90 | [0.000000096, -0.000975470],
91 | [0.00136530, -0.00097539],
92 | [0.001365320, 0.000000129],
93 | [0.000000034, -0.000000034],
94 | [0.000000096, -0.000975470],
95 | ]
96 | ],
97 | }
98 | polygons = _load_polygons(geojson)
99 | assert len(polygons) == 1
100 | assert len(polygons[0][0]) == 5
101 | assert polygons[0][0][2][1] == 0.000000129
102 |
103 |
104 | def test_load_polygons_fc():
105 | geojson = {
106 | "type": "FeatureCollection",
107 | "features": [
108 | {
109 | "type": "Feature",
110 | "properties": {},
111 | "geometry": {
112 | "type": "Polygon",
113 | "coordinates": [
114 | [
115 | [0.000000096, -0.000975470],
116 | [0.00136530, -0.00097539],
117 | [0.001365320, 0.000000129],
118 | [0.000000034, -0.000000034],
119 | [0.000000096, -0.000975470],
120 | ]
121 | ],
122 | },
123 | },
124 | {
125 | "type": "Feature",
126 | "properties": {},
127 | "geometry": {
128 | "type": "MultiPolygon",
129 | "coordinates": [
130 | [
131 | [
132 | [0.000000096, -0.000975470],
133 | [0.00136530, -0.00097539],
134 | [0.001365320, 0.000000129],
135 | [0.000000034, -0.000000034],
136 | [0.000000096, -0.000975470],
137 | ]
138 | ],
139 | [
140 | [
141 | [0.100000096, -0.100975470],
142 | [0.10136530, -0.10097539],
143 | [0.101365320, 0.100000129],
144 | [0.100000034, -0.100000034],
145 | [0.100000096, -0.100975470],
146 | ]
147 | ],
148 | ],
149 | },
150 | },
151 | ],
152 | }
153 | polygons = _load_polygons(geojson)
154 | assert len(polygons) == 3
155 | assert len(polygons[0][0]) == 5
156 | assert polygons[0][0][2][1] == 0.000000129
157 | assert polygons[2][0][2][1] == 0.100000129
158 |
--------------------------------------------------------------------------------
/tests/test_platform_client.py:
--------------------------------------------------------------------------------
1 | import json
2 | import tempfile
3 |
4 | import pytest
5 | import responses
6 |
7 | from picterra.base_client import multipolygon_to_polygon_feature_collection
8 | from tests.utils import (
9 | OP_RESP,
10 | OPERATION_ID,
11 | TEST_POLL_INTERVAL,
12 | _add_api_response,
13 | _client,
14 | detector_api_url,
15 | )
16 |
17 |
18 | def add_mock_rasters_list_response(endpoint=detector_api_url("rasters/")):
19 | data1 = {
20 | "count": 5,
21 | "next": "%s?page_number=2" % endpoint,
22 | "previous": None,
23 | "page_size": 2,
24 | "results": [
25 | {"id": "40", "status": "ready", "name": "raster1"},
26 | {"id": "41", "status": "ready", "name": "raster2"},
27 | ],
28 | }
29 | data2 = {
30 | "count": 5,
31 | "next": "%s?page_number=3" % endpoint,
32 | "previous": None,
33 | "page_size": "2",
34 | "results": [
35 | {"id": "42", "status": "ready", "name": "raster3"},
36 | {"id": "43", "status": "ready", "name": "raster4"},
37 | ],
38 | }
39 | data3 = {
40 | "count": 5,
41 | "next": None,
42 | "previous": "%s?page_number=2" % endpoint,
43 | "page_size": 2,
44 | "results": [
45 | {"id": "44", "status": "ready", "name": "raster5"},
46 | ],
47 | }
48 | _add_api_response(
49 | endpoint,
50 | json=data1,
51 | match=responses.matchers.query_param_matcher({"page_number": "1"}),
52 | )
53 | _add_api_response(
54 | endpoint,
55 | json=data2,
56 | match=responses.matchers.query_param_matcher({"page_number": "2"}),
57 | )
58 | _add_api_response(
59 | endpoint,
60 | json=data3,
61 | match=responses.matchers.query_param_matcher({"page_number": "3"}),
62 | )
63 |
64 |
65 | def add_mock_rasters_in_folder_list_response(folder_id):
66 | data = {
67 | "count": 1,
68 | "next": None,
69 | "previous": None,
70 | "page_size": 2,
71 | "results": [
72 | {
73 | "id": "77",
74 | "status": "ready",
75 | "name": "raster_in_folder1",
76 | "folder_id": folder_id,
77 | },
78 | ],
79 | }
80 | qs = {"folder": folder_id, "page_number": "1"}
81 | _add_api_response(
82 | detector_api_url("rasters/"), json=data, match=responses.matchers.query_param_matcher(qs)
83 | )
84 |
85 |
86 | def add_mock_rasters_in_filtered_list_response(
87 | search=None, tag=None, cloud=None, before=None, after=None, has_layers=None, page=1
88 | ):
89 | name = (search + "_" if search else "") + "raster" + ("_" + tag if tag else "")
90 | data = {
91 | "count": 1,
92 | "next": None,
93 | "previous": None,
94 | "page_size": 2,
95 | "results": [
96 | {"id": "77", "status": "ready", "name": name},
97 | ],
98 | }
99 | qs = {"page_number": page}
100 | if search is not None:
101 | qs["search"] = search
102 | if tag is not None:
103 | qs["user_tag"] = tag
104 | if cloud is not None:
105 | qs["max_cloud_coverage"] = cloud
106 | if before:
107 | qs["captured_before"] = before
108 | if after:
109 | qs["captured_after"] = after
110 | if has_layers is not None:
111 | qs["has_vector_layers"] = bool(has_layers)
112 | _add_api_response(
113 | detector_api_url("rasters/"), match=responses.matchers.query_param_matcher(qs), json=data
114 | )
115 |
116 |
117 | def add_mock_vector_layers_filtered_list_response(
118 | idx, raster, search=None, detector=None
119 | ):
120 | name = f"layer_{idx}"
121 | data = {
122 | "count": 1,
123 | "next": None,
124 | "previous": None,
125 | "page_size": 1,
126 | "results": [{"id": str(idx), "count": idx, "name": name}],
127 | }
128 | qs = {"page_number": 1}
129 | if search is not None:
130 | qs["search"] = search
131 | if detector is not None:
132 | qs["detector"] = detector
133 | _add_api_response(
134 | detector_api_url(f"rasters/{raster}/vector_layers/"),
135 | match=responses.matchers.query_param_matcher(qs),
136 | json=data,
137 | )
138 |
139 |
140 | def add_mock_detectors_list_response(string=None, tag=None, shared=None):
141 | data1 = {
142 | "count": 4,
143 | "next": detector_api_url("detectors/?page_number=2"),
144 | "previous": None,
145 | "page_size": 2,
146 | "results": [
147 | {"id": "40", "type": "count", "name": string or "detector1"},
148 | {"id": "41", "type": "count", "name": string or "detector2"},
149 | ],
150 | }
151 | data2 = {
152 | "count": 4,
153 | "next": None,
154 | "previous": detector_api_url("detectors/?page_number=1"),
155 | "page_size": 2,
156 | "results": [
157 | {"id": "42", "type": "count", "name": string or "detector3"},
158 | {"id": "43", "type": "count", "name": string or "detector4"},
159 | ],
160 | }
161 | qs_params = {"page_number": "1"}
162 | if string:
163 | qs_params["search"] = string
164 | if tag:
165 | qs_params["user_tag"] = tag
166 | if shared:
167 | qs_params["is_shared"] = shared
168 | _add_api_response(
169 | detector_api_url("detectors/"),
170 | match=responses.matchers.query_param_matcher(qs_params),
171 | json=data1,
172 | )
173 | qs_params2 = {"page_number": "2"}
174 | if string:
175 | qs_params2["search"] = string
176 | if tag:
177 | qs_params2["user_tag"] = tag
178 | if shared:
179 | qs_params2["is_shared"] = shared
180 | _add_api_response(
181 | detector_api_url("detectors/"),
182 | match=responses.matchers.query_param_matcher(qs_params2),
183 | json=data2,
184 | )
185 |
186 |
187 | def add_mock_detector_creation_response(**kwargs):
188 | match = responses.json_params_matcher({"configuration": kwargs}) if kwargs else None
189 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"}, match=match)
190 |
191 |
192 | def add_mock_detector_edit_response(d_id, **kwargs):
193 | match = responses.json_params_matcher({"configuration": kwargs}) if kwargs else None
194 | _add_api_response(detector_api_url("detectors/%s/" % d_id), responses.PUT, status=204, match=match)
195 |
196 |
197 | def add_mock_detector_train_responses(detector_id):
198 | _add_api_response(detector_api_url("detectors/%s/train/" % detector_id), responses.POST, OP_RESP)
199 |
200 |
201 | def add_mock_run_dataset_recommendation_responses(detector_id):
202 | _add_api_response(
203 | detector_api_url("detectors/%s/dataset_recommendation/" % detector_id), responses.POST, OP_RESP
204 | )
205 |
206 |
207 | def add_mock_operations_responses(status, **kwargs):
208 | data = {"type": "mock_operation_type", "status": status}
209 | if kwargs:
210 | data.update(kwargs)
211 | if status == "success":
212 | data.update(
213 | {
214 | "metadata": {
215 | "raster_id": "foo",
216 | "detector_id": "bar",
217 | "folder_id": "spam",
218 | }
219 | }
220 | )
221 | _add_api_response(detector_api_url("operations/%s/" % OPERATION_ID), json=data)
222 |
223 |
224 | def add_mock_annotations_responses(
225 | detector_id, raster_id, annotation_type, class_id=None
226 | ):
227 | upload_id = 32
228 | url = "detectors/%s/training_rasters/%s/%s/upload/bulk/" % (
229 | detector_id,
230 | raster_id,
231 | annotation_type,
232 | )
233 | responses.add(responses.PUT, "http://storage.example.com", status=200)
234 | _add_api_response(
235 | detector_api_url(url),
236 | responses.POST,
237 | {"upload_url": "http://storage.example.com", "upload_id": upload_id},
238 | )
239 | url = "detectors/%s/training_rasters/%s/%s/upload/bulk/%s/commit/" % (
240 | detector_id,
241 | raster_id,
242 | annotation_type,
243 | upload_id,
244 | )
245 | if class_id is None:
246 | _add_api_response(
247 | detector_api_url(url),
248 | responses.POST,
249 | OP_RESP,
250 | # strict_match matters here because we want to disallow sending `class_id: null`
251 | # as this would lead to a server-side error. Instead, class_id shouldn't be included
252 | # if it is not defined
253 | match=responses.matchers.json_params_matcher({}, strict_match=True),
254 | )
255 | else:
256 | _add_api_response(
257 | detector_api_url(url),
258 | responses.POST,
259 | json=OP_RESP,
260 | match=responses.matchers.json_params_matcher({"class_id": class_id}),
261 | )
262 |
263 |
264 | def add_mock_raster_upload_responses(identity_key, multispectral, cloud_coverage, tag):
265 | raster_id = 42
266 | # Upload initiation
267 | data = {"upload_url": "http://storage.example.com", "raster_id": raster_id}
268 | body = {
269 | "name": "test 1",
270 | "multispectral": multispectral,
271 | "captured_at": "2020-01-10T12:34:56.789Z",
272 | "folder_id": "a-folder-uuid",
273 | }
274 | if identity_key:
275 | body["identity_key"] = identity_key
276 | if cloud_coverage is not None:
277 | body["cloud_coverage"] = cloud_coverage
278 | if tag is not None:
279 | body["user_tag"] = tag
280 | _add_api_response(
281 | detector_api_url("rasters/upload/file/"),
282 | responses.POST,
283 | data,
284 | responses.matchers.json_params_matcher(body),
285 | status=200,
286 | )
287 | # Storage PUT
288 | responses.add(responses.PUT, "http://storage.example.com", status=200)
289 | # Commit
290 | _add_api_response(detector_api_url("rasters/%s/commit/" % raster_id), responses.POST, OP_RESP)
291 | # Status, first check
292 | data = {"id": raster_id, "name": "raster1", "status": "processing"}
293 | _add_api_response(detector_api_url("rasters/%s/" % raster_id), json=data)
294 | # Status, second check
295 | data = {"id": raster_id, "name": "raster1", "status": "ready"}
296 | _add_api_response(detector_api_url("rasters/%s/" % raster_id), json=data)
297 |
298 |
299 | def add_mock_detection_areas_upload_responses(raster_id):
300 | upload_id = 42
301 |
302 | # Upload initiation
303 | data = {"upload_url": "http://storage.example.com", "upload_id": upload_id}
304 | _add_api_response(
305 | detector_api_url("rasters/%s/detection_areas/upload/file/" % raster_id), responses.POST, data
306 | )
307 | # Storage PUT
308 | responses.add(responses.PUT, "http://storage.example.com", status=200)
309 | # Commit
310 | _add_api_response(
311 | detector_api_url("rasters/%s/detection_areas/upload/%s/commit/" % (raster_id, upload_id)),
312 | responses.POST,
313 | OP_RESP,
314 | status=200,
315 | )
316 | # Status, first check
317 | data = {"status": "processing"}
318 | _add_api_response(
319 | detector_api_url("rasters/%s/detection_areas/upload/%s/" % (raster_id, upload_id)), json=data
320 | )
321 | # Status, second check
322 | data = {"status": "ready"}
323 | _add_api_response(
324 | detector_api_url("rasters/%s/detection_areas/upload/%s/" % (raster_id, upload_id)), json=data
325 | )
326 |
327 |
328 | def add_mock_remote_import_responses(upload_id, post_body):
329 | match = responses.json_params_matcher(post_body)
330 | # Upload initiation
331 | data = {"upload_url": "http://storage.example.com", "upload_id": upload_id}
332 | _add_api_response(detector_api_url("rasters/import/"), responses.POST, data)
333 | # Storage PUT
334 | responses.add(responses.PUT, "http://storage.example.com", status=200)
335 | # Commit
336 | _add_api_response(
337 | detector_api_url(f"rasters/import/{upload_id}/commit/"),
338 | responses.POST,
339 | OP_RESP,
340 | match=match,
341 | status=200,
342 | )
343 |
344 |
345 | def add_mock_detector_run_responses(detector_id, raster_id, secondary_raster_id=None):
346 | data = {"raster_id": raster_id}
347 | if secondary_raster_id:
348 | data["secondary_raster_id"] = secondary_raster_id
349 | _add_api_response(
350 | detector_api_url("detectors/%s/run/" % detector_id),
351 | responses.POST,
352 | OP_RESP,
353 | match=responses.matchers.json_params_matcher(data),
354 | )
355 | # First status check
356 | data = {"status": "running"}
357 | _add_api_response(detector_api_url("operations/%s/" % OPERATION_ID), json=data)
358 | # Second status check
359 | data = {"status": "success"}
360 | _add_api_response(detector_api_url("operations/%s/" % OPERATION_ID), json=data)
361 |
362 |
363 | def add_mock_vector_layer_responses(upload_id, raster_id, name, color):
364 | _add_api_response(
365 | detector_api_url("vector_layers/%s/upload/" % raster_id),
366 | responses.POST,
367 | json={"upload_url": "http://storage.example.com", "upload_id": upload_id},
368 | )
369 | responses.add(responses.PUT, "http://storage.example.com", status=200)
370 | qs = {}
371 | if name is not None:
372 | qs["name"] = name
373 | if color is not None:
374 | qs["color"] = color
375 | _add_api_response(
376 | detector_api_url("vector_layers/%s/upload/%s/commit/" % (raster_id, upload_id)),
377 | responses.POST,
378 | json={"operation_id": OPERATION_ID, "poll_interval": TEST_POLL_INTERVAL},
379 | match=responses.matchers.json_params_matcher(qs) if len(qs.keys()) != 0 else [],
380 | )
381 |
382 |
383 | def add_mock_vector_layer_download_responses(layer_id, polygons_num):
384 | url = "vector_layers/%s/download/" % layer_id
385 | data = {"operation_id": OPERATION_ID, "poll_interval": TEST_POLL_INTERVAL}
386 | _add_api_response(detector_api_url(url), verb=responses.POST, json=data)
387 | results = {
388 | "expiration": "2021-11-03T10:55:16.000000Z",
389 | "download_url": "http://layer.geojson.example.com",
390 | }
391 | add_mock_operations_responses("success", results=results)
392 | url = results["download_url"]
393 | polygons_fc = multipolygon_to_polygon_feature_collection(make_geojson_multipolygon(polygons_num))
394 | assert len(polygons_fc["features"]) == polygons_num
395 | responses.add(
396 | responses.GET,
397 | url,
398 | body=json.dumps(polygons_fc),
399 | )
400 | return polygons_fc
401 |
402 | def add_mock_folder_creation_response(id, name):
403 | match = responses.matchers.json_params_matcher({"name": name})
404 | _add_api_response(detector_api_url("folders/"), responses.POST, json={"id": id}, match=match)
405 |
406 |
407 | def make_geojson_polygon(base=1):
408 | return {"type": "Polygon", "coordinates": [[[0, 0], [base, 0], [base, base], [0, base], [0, 0]]]}
409 |
410 |
411 | def make_geojson_multipolygon(npolygons=1):
412 | coords = []
413 | for i in range(npolygons):
414 | coords.append(make_geojson_polygon(i + 1)["coordinates"])
415 | return {"type": "MultiPolygon", "coordinates": coords}
416 |
417 |
418 | def add_mock_download_result_response(op_id, num_classes):
419 | data = {
420 | "results": {
421 | "url": "http://storage.example.com/result_for_class_1.geojson",
422 | "by_class": [
423 | {
424 | "class": {"name": f"class_{i + 1}"},
425 | "result": {
426 | "url": f"http://storage.example.com/result_for_class_{i + 1}.geojson",
427 | "vector_layer_id": f"layer_{i + 1}"
428 | },
429 | } for i in range(num_classes)
430 | ],
431 | },
432 | }
433 | _add_api_response(detector_api_url("operations/%s/" % op_id), json=data, status=201)
434 | mock_contents = {
435 | f"class_{i + 1}": json.dumps(make_geojson_multipolygon(npolygons=i + 2))
436 | for i in range(num_classes)
437 | }
438 | for i in range(num_classes):
439 | responses.add(
440 | responses.GET,
441 | f"http://storage.example.com/result_for_class_{i + 1}.geojson",
442 | body=mock_contents[f"class_{i + 1}"],
443 | )
444 | return mock_contents
445 |
446 |
447 | def add_mock_download_raster_response(raster_id):
448 | file_url = "http://storage.example.com/%s.tiff" % raster_id
449 | data = {"download_url": file_url}
450 | _add_api_response(detector_api_url("rasters/%s/download/" % raster_id), json=data)
451 | mock_content = (1024).to_bytes(2, byteorder="big")
452 | responses.add(responses.GET, file_url, body=mock_content)
453 | return mock_content
454 |
455 |
456 | def add_mock_url_result_response(op_id, url):
457 | data = {"results": {"url": url}}
458 | _add_api_response(detector_api_url("operations/%s/" % op_id), json=data, status=201)
459 |
460 |
461 | def add_get_operation_results_url_response(op_id):
462 | url = "http://storage.example.com/42.geojson"
463 | data = {"results": {"url": url}}
464 | _add_api_response(detector_api_url("operations/%s/" % op_id), json=data, status=201)
465 | return url
466 |
467 |
468 | def add_mock_edit_raster_response(raster_id, body):
469 | _add_api_response(
470 | detector_api_url("rasters/%s/" % raster_id),
471 | responses.PUT,
472 | match=responses.matchers.json_params_matcher(body),
473 | status=204,
474 | )
475 |
476 |
477 | def add_mock_delete_raster_response(raster_id):
478 | _add_api_response(detector_api_url("rasters/%s/" % raster_id), responses.DELETE)
479 |
480 |
481 | def add_mock_delete_detectionarea_response(raster_id):
482 | _add_api_response(detector_api_url("rasters/%s/detection_areas/" % raster_id), responses.DELETE)
483 |
484 |
485 | def add_mock_delete_detector_response(detector_id):
486 | _add_api_response(detector_api_url("detectors/%s/" % detector_id), responses.DELETE)
487 |
488 |
489 | def add_mock_delete_vector_layer_response(layer_id):
490 | _add_api_response(detector_api_url("vector_layers/%s/" % layer_id), responses.DELETE)
491 |
492 |
493 | def add_mock_edit_vector_layer_response(layer_id, **kwargs):
494 | _add_api_response(
495 | detector_api_url("vector_layers/%s/" % layer_id),
496 | responses.PUT,
497 | match=responses.matchers.json_params_matcher(kwargs),
498 | )
499 |
500 |
501 | def add_mock_raster_markers_list_response(raster_id):
502 | base_url = detector_api_url("rasters/%s/markers/" % raster_id)
503 | data1 = {
504 | "count": 4,
505 | "next": base_url + "?page_number=2",
506 | "previous": base_url + "?page_number=1",
507 | "page_size": 2,
508 | "results": [{"id": "1"}, {"id": "2"}],
509 | }
510 | data2 = {
511 | "count": 4,
512 | "next": None,
513 | "previous": None,
514 | "page_size": 2,
515 | "results": [{"id": "3"}, {"id": "4"}],
516 | }
517 | _add_api_response(
518 | base_url,
519 | json=data1,
520 | match=responses.matchers.query_param_matcher({"page_number": "1"}),
521 | )
522 | _add_api_response(
523 | base_url,
524 | json=data2,
525 | match=responses.matchers.query_param_matcher({"page_number": "2"}),
526 | )
527 |
528 |
529 | def add_mock_marker_creation_response(marker_id, raster_id, detector_id, coords, text):
530 | if detector_id is None:
531 | url = "rasters/%s/markers/" % raster_id
532 | else:
533 | url = "detectors/%s/training_rasters/%s/markers/" % (detector_id, raster_id)
534 | body = {
535 | "marker": {"type": "Point", "coordinates": coords},
536 | "text": text,
537 | }
538 | match = responses.matchers.json_params_matcher(body)
539 | _add_api_response(detector_api_url(url), responses.POST, json={"id": marker_id}, match=match)
540 |
541 |
542 | def add_mock_folder_detector_response(folder_id: str):
543 | base_url = detector_api_url("folders/%s/detectors/" % folder_id)
544 | data1 = {
545 | "count": 4,
546 | "next": base_url + "?page_number=2",
547 | "previous": base_url + "?page_number=1",
548 | "page_size": 2,
549 | "results": [
550 | {
551 | "id": "id1",
552 | "name": "detector1",
553 | "is_runnable": True,
554 | "user_tag": "tag1",
555 | },
556 | {
557 | "id": "id2",
558 | "name": "detector2",
559 | "is_runnable": False,
560 | "user_tag": "tag2",
561 | },
562 | ],
563 | }
564 | data2 = {
565 | "count": 4,
566 | "next": None,
567 | "previous": None,
568 | "page_size": 2,
569 | "results": [
570 | {
571 | "id": "id3",
572 | "name": "detector3",
573 | "is_runnable": True,
574 | "user_tag": "",
575 | },
576 | {
577 | "id": "id4",
578 | "name": "detector4",
579 | "is_runnable": False,
580 | "user_tag": "",
581 | },
582 | ],
583 | }
584 | _add_api_response(
585 | base_url,
586 | json=data1,
587 | match=responses.matchers.query_param_matcher({"page_number": "1"}),
588 | )
589 | _add_api_response(
590 | base_url,
591 | json=data2,
592 | match=responses.matchers.query_param_matcher({"page_number": "2"}),
593 | )
594 |
595 |
596 | def test_multipolygon_to_polygon_feature_collection():
597 | mp = {
598 | "type": "MultiPolygon",
599 | "coordinates": [
600 | [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]],
601 | [[[1, 1], [1, 2], [2, 2], [2, 1], [1, 1]]]
602 | ]
603 | }
604 | fc = multipolygon_to_polygon_feature_collection(mp)
605 | assert fc == {
606 | "type": "FeatureCollection",
607 | "features": [{
608 | "type": "Feature",
609 | "properties": {},
610 | "geometry": {
611 | "type": "Polygon",
612 | "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0], [0, 0]]]
613 | }
614 | }, {
615 | "type": "Feature",
616 | "properties": {},
617 | "geometry": {
618 | "type": "Polygon",
619 | "coordinates": [[[1, 1], [1, 2], [2, 2], [2, 1], [1, 1]]]
620 | }
621 | }]
622 | }
623 |
624 |
625 | @pytest.mark.parametrize(
626 | ("identity_key", "multispectral", "cloud_coverage", "tag"),
627 | ((None, False, None, None), ("abc", True, 18, "spam")),
628 | )
629 | @responses.activate
630 | def test_upload_raster(monkeypatch, identity_key, multispectral, cloud_coverage, tag):
631 | client = _client(monkeypatch)
632 | add_mock_raster_upload_responses(identity_key, multispectral, cloud_coverage, tag)
633 | add_mock_operations_responses("success")
634 | with tempfile.NamedTemporaryFile() as f:
635 | # This just tests that this doesn't raise
636 | client.upload_raster(
637 | f.name,
638 | name="test 1",
639 | folder_id="a-folder-uuid",
640 | captured_at="2020-01-10T12:34:56.789Z",
641 | identity_key=identity_key,
642 | multispectral=multispectral,
643 | cloud_coverage=cloud_coverage,
644 | user_tag=tag,
645 | )
646 | assert len(responses.calls) == 4
647 |
648 |
649 | @pytest.mark.parametrize(
650 | "edited_data",
651 | (
652 | {"folder_id": "2233"},
653 | {"folder_id": "2233", "identity_key": "dr43t5zrtzz"},
654 | {"captured_at": "2020-01-01T12:34:56.789Z", "cloud_coverage": 88},
655 | {
656 | "multispectral_band_specification": {
657 | "ranges": [[2, 3], [12, 13], [22, 23]],
658 | "vizbands": [0, 1, 2],
659 | }
660 | },
661 | {"user_tag": "foobar"},
662 | ),
663 | )
664 | @responses.activate
665 | def test_edit_raster(monkeypatch, edited_data):
666 | RASTER_ID = "foobar"
667 | client = _client(monkeypatch)
668 | add_mock_edit_raster_response(RASTER_ID, {"name": "spam", **edited_data})
669 | client.edit_raster(RASTER_ID, "spam", **edited_data)
670 | assert len(responses.calls) == 1
671 |
672 |
673 | @responses.activate
674 | def test_get_raster(monkeypatch):
675 | """Test the raster information"""
676 | RASTER_ID = "foobar"
677 | client = _client(monkeypatch)
678 | _add_api_response(detector_api_url("rasters/%s/" % RASTER_ID), json={}, status=201)
679 | client.get_raster(RASTER_ID)
680 | assert len(responses.calls) == 1
681 |
682 |
683 | @responses.activate
684 | def test_delete_raster(monkeypatch):
685 | RASTER_ID = "foobar"
686 | client = _client(monkeypatch)
687 | add_mock_delete_raster_response(RASTER_ID)
688 | client.delete_raster(RASTER_ID)
689 | assert len(responses.calls) == 1
690 |
691 |
692 | @responses.activate
693 | def test_delete_detectionarea(monkeypatch):
694 | RASTER_ID = "foobar"
695 | client = _client(monkeypatch)
696 | add_mock_delete_detectionarea_response(RASTER_ID)
697 | client.remove_raster_detection_areas(RASTER_ID)
698 | assert len(responses.calls) == 1
699 |
700 |
701 | @responses.activate
702 | def test_download_raster(monkeypatch):
703 | RASTER_ID = "foobar"
704 | expected_content = add_mock_download_raster_response(RASTER_ID)
705 | client = _client(monkeypatch)
706 | with tempfile.NamedTemporaryFile() as f:
707 | client.download_raster_to_file(RASTER_ID, f.name)
708 | assert open(f.name, "rb").read() == expected_content
709 | assert len(responses.calls) == 2
710 |
711 |
712 | @responses.activate
713 | def test_list_rasters(monkeypatch):
714 | """Test the list of rasters, both generic and specifying the filters"""
715 | client = _client(monkeypatch)
716 | # Generic (check pagination)
717 | add_mock_rasters_list_response()
718 | page1 = client.list_rasters()
719 | assert len(page1) == 2
720 | assert page1[0]["name"] == "raster1" and page1[1]["name"] == "raster2"
721 | assert len(responses.calls) == 1
722 | page2 = page1.next()
723 | assert len(page2) == 2
724 | assert page2[0]["name"] == "raster3" and page2[1]["name"] == "raster4"
725 | page3 = page2.next()
726 | assert len(responses.calls) == 3
727 | assert len(page3) == 1 and page3.next() is None
728 | assert page3[0]["name"] == "raster5"
729 | assert len(responses.calls) == 3
730 | pairs = zip(list(page2), list(client.list_rasters(page_number=2)))
731 | assert all(x == y for x, y in pairs) is True
732 | assert len(responses.calls) == 4
733 | # Folder list
734 | add_mock_rasters_in_folder_list_response("foobar")
735 | rasters = client.list_rasters("foobar")
736 | assert rasters[0]["name"] == "raster_in_folder1"
737 | assert rasters[0]["folder_id"] == "foobar"
738 | # Search list
739 | add_mock_rasters_in_filtered_list_response(search="spam")
740 | rasters = client.list_rasters("", search_string="spam")
741 | assert rasters[0]["name"] == "spam_raster"
742 | # Filter list 1
743 | add_mock_rasters_in_filtered_list_response(tag="foobar")
744 | rasters = client.list_rasters("", user_tag="foobar")
745 | assert rasters[0]["name"] == "raster_foobar"
746 | # Filter list 2
747 | add_mock_rasters_in_filtered_list_response(tag="foobar", cloud=44)
748 | rasters = client.list_rasters("", user_tag="foobar", max_cloud_coverage=44)
749 | assert rasters[0]["name"] == "raster_foobar"
750 | # Filter list 3
751 | add_mock_rasters_in_filtered_list_response(
752 | has_layers=False, search="foo", before="2018-11-13T20:20:39+00:00"
753 | )
754 | rasters = client.list_rasters(
755 | search_string="foo",
756 | captured_before="2018-11-13T20:20:39+00:00",
757 | has_vector_layers=False,
758 | )
759 | assert rasters[0]["name"] == "foo_raster"
760 | # # Filter list 4
761 | add_mock_rasters_in_filtered_list_response(
762 | has_layers=True, after="2022-11-13T20:20:39+00:00", search="bar"
763 | )
764 | rasters = client.list_rasters(
765 | search_string="bar",
766 | captured_after="2022-11-13T20:20:39+00:00",
767 | has_vector_layers=True,
768 | )
769 | assert rasters[0]["name"] == "bar_raster"
770 | # Filter list with pagination
771 | add_mock_rasters_in_filtered_list_response(page=3, search="spam")
772 | rasters = client.list_rasters(
773 | search_string="spam",
774 | page_number=3,
775 | )
776 | assert rasters[0]["id"] == "77"
777 | assert len(responses.calls) == 11
778 |
779 |
780 | @responses.activate
781 | def test_detector_creation(monkeypatch):
782 | client = _client(monkeypatch)
783 | args = [
784 | {"detection_type": "segmentation"},
785 | {"output_type": "bbox"},
786 | {"training_steps": 10**3},
787 | {"backbone": "resnet18"},
788 | {"tile_size": 352},
789 | {"background_sample_ratio": 0.3},
790 | ]
791 | add_mock_detector_creation_response()
792 | client.create_detector()
793 | for a in args:
794 | add_mock_detector_creation_response(**a)
795 | client.create_detector(**a)
796 | merge = dict(p for d in args for p in d.items())
797 | add_mock_detector_creation_response(**merge)
798 | detector_id = client.create_detector(**merge)
799 | assert detector_id == "foobar"
800 |
801 |
802 | @responses.activate
803 | def test_list_detectors(monkeypatch):
804 | client = _client(monkeypatch)
805 | # Full list
806 | add_mock_detectors_list_response()
807 | detectors = client.list_detectors()
808 | assert len(detectors) == 2 # 1st api call
809 | assert detectors[0]["name"] == "detector1"
810 | assert detectors[1]["name"] == "detector2"
811 | assert detectors.next()[1]["id"] == "43" # 2nd api call
812 | # Search list
813 | add_mock_detectors_list_response("spam")
814 | detectors = client.list_detectors("spam") # 3rd api call
815 | assert detectors[0]["name"] == "spam"
816 | # Filter list
817 | add_mock_detectors_list_response(None, "foobar", True)
818 | detectors = client.list_detectors(user_tag="foobar", is_shared=True) # 4th api call
819 | assert detectors[1]["name"] == "detector2"
820 | assert len(responses.calls) == 4
821 |
822 |
823 | @responses.activate
824 | def test_delete_detector(monkeypatch):
825 | DETECTOR_ID = "foobar"
826 | client = _client(monkeypatch)
827 | add_mock_delete_detector_response(DETECTOR_ID)
828 | client.delete_detector(DETECTOR_ID)
829 | assert len(responses.calls) == 1
830 |
831 |
832 | @responses.activate
833 | def test_detector_edit(monkeypatch):
834 | client = _client(monkeypatch)
835 | detector_id = "foobar"
836 | args = [
837 | {"detection_type": "segmentation"},
838 | {"output_type": "bbox"},
839 | {"training_steps": 10**3},
840 | {"backbone": "resnet50"},
841 | {"tile_size": 512},
842 | {"background_sample_ratio": 0.3},
843 | ]
844 | add_mock_detector_edit_response(detector_id)
845 | client.edit_detector(detector_id)
846 | for a in args:
847 | add_mock_detector_edit_response(detector_id, **a)
848 | client.edit_detector(detector_id, **a)
849 | merge = dict(p for d in args for p in d.items())
850 | add_mock_detector_edit_response(detector_id, **merge)
851 | client.edit_detector(detector_id, **merge)
852 | assert len(responses.calls) == 8
853 |
854 |
855 | @responses.activate
856 | def test_set_raster_detection_areas_from_file(monkeypatch):
857 | add_mock_detection_areas_upload_responses(1)
858 | add_mock_operations_responses("success")
859 |
860 | client = _client(monkeypatch)
861 | # This just tests that this doesn't raise
862 | with tempfile.NamedTemporaryFile() as f:
863 | client.set_raster_detection_areas_from_file(1, f.name)
864 | assert len(responses.calls) == 4
865 |
866 |
867 | @responses.activate
868 | def test_run_detector(monkeypatch):
869 | add_mock_detector_run_responses(1, 2)
870 | client = _client(monkeypatch)
871 | client.run_detector(1, 2)
872 | assert len(responses.calls) == 3
873 |
874 |
875 | @responses.activate
876 | def test_run_detector_secondary_raster(monkeypatch):
877 | add_mock_detector_run_responses(1, 2, 3)
878 | client = _client(monkeypatch)
879 | client.run_detector(1, 2, 3)
880 | assert len(responses.calls) == 3
881 |
882 |
883 | @responses.activate
884 | def test_download_result_to_file(monkeypatch):
885 | expected_content = add_mock_download_result_response(101, 1)["class_1"]
886 | client = _client(monkeypatch)
887 | with tempfile.NamedTemporaryFile() as f:
888 | client.download_result_to_file(101, f.name)
889 | assert open(f.name).read() == expected_content
890 | assert len(responses.calls) == 2
891 |
892 |
893 | @responses.activate
894 | def test_download_result_to_feature_collection(monkeypatch):
895 | add_mock_download_result_response(101, 2)
896 | add_mock_vector_layer_download_responses("layer_1", 10)
897 | add_mock_vector_layer_download_responses("layer_2", 20)
898 | client = _client(monkeypatch)
899 | with tempfile.NamedTemporaryFile() as f:
900 | client.download_result_to_feature_collection(101, f.name)
901 | with open(f.name) as fr:
902 | fc = json.load(fr)
903 | assert fc["type"] == "FeatureCollection" and len(fc["features"]) == 2
904 | class_1_index = (
905 | 0 if fc["features"][0]["properties"]["class_name"] == "class_1" else 1
906 | )
907 | feat1 = fc["features"][class_1_index]
908 | assert feat1["type"] == "Feature"
909 | assert feat1["properties"]["class_name"] == "class_1"
910 | assert feat1["geometry"] == make_geojson_multipolygon(10)
911 | assert len(feat1["geometry"]["coordinates"]) == 10
912 | assert isinstance(feat1["geometry"]["coordinates"][0][0][0][0], (int, float))
913 | feat2 = fc["features"][(class_1_index + 1) % 2]
914 | assert feat2["type"] == "Feature" and feat2["geometry"]["type"] == "MultiPolygon"
915 | assert feat2["properties"]["class_name"] == "class_2"
916 | assert len(feat2["geometry"]["coordinates"]) == 20
917 | assert isinstance(feat2["geometry"]["coordinates"][0][0][0][0], (int, float))
918 | assert len(responses.calls) == 7
919 |
920 |
921 | @responses.activate
922 | @pytest.mark.parametrize(
923 | "annotation_type", ["outline", "training_area", "testing_area", "validation_area"]
924 | )
925 | def test_upload_annotations(monkeypatch, annotation_type):
926 | add_mock_annotations_responses(1, 2, annotation_type)
927 | add_mock_operations_responses("running")
928 | add_mock_operations_responses("running")
929 | add_mock_operations_responses("success")
930 | client = _client(monkeypatch)
931 | client.set_annotations(1, 2, annotation_type, {})
932 | assert len(responses.calls) == 6
933 |
934 |
935 | @responses.activate
936 | def test_upload_annotations_class_id(monkeypatch):
937 | add_mock_annotations_responses(1, 2, "outline", class_id="42")
938 | add_mock_operations_responses("success")
939 | client = _client(monkeypatch)
940 | client.set_annotations(1, 2, "outline", {}, class_id="42")
941 |
942 |
943 | @responses.activate
944 | def test_train_detector(monkeypatch):
945 | add_mock_detector_train_responses(1)
946 | add_mock_operations_responses("running")
947 | add_mock_operations_responses("running")
948 | add_mock_operations_responses(
949 | "success",
950 | results={
951 | "score": 92,
952 | "stats": {
953 | "rasters_count": 1,
954 | "training_areas_count": 2,
955 | "assessment_areas_count": 10,
956 | "validation_areas_count": 5,
957 | "total_annotations_count": 4,
958 | "training_annotations_count": 3,
959 | "validation_annotations_count": 1,
960 | },
961 | },
962 | )
963 | client = _client(monkeypatch)
964 | op = client.train_detector(1)
965 | assert op["results"]["score"] == 92
966 | assert len(responses.calls) == 4
967 |
968 |
969 | @responses.activate
970 | def test_run_dataset_recommendation(monkeypatch):
971 | add_mock_run_dataset_recommendation_responses(1)
972 | add_mock_operations_responses("running")
973 | add_mock_operations_responses("running")
974 | add_mock_operations_responses("success")
975 | client = _client(monkeypatch)
976 | op = client.run_dataset_recommendation(1)
977 | assert op["status"] == "success"
978 | assert len(responses.calls) == 4
979 |
980 |
981 | @pytest.mark.parametrize(("name", "color"), ((None, None), ("foobar", "#aabbcc")))
982 | @responses.activate
983 | def test_upload_vector_layer(monkeypatch, name, color):
984 | add_mock_vector_layer_responses(11, 22, name, color)
985 | add_mock_operations_responses("running")
986 | add_mock_operations_responses("success", results={"vector_layer_id": "spam"})
987 | client = _client(monkeypatch)
988 | with tempfile.NamedTemporaryFile() as f:
989 | assert client.upload_vector_layer(22, f.name, name, color) == "spam"
990 | assert len(responses.calls) == 5 # upload req, upload PUT, commit + 2 op polling
991 |
992 |
993 | @responses.activate
994 | def test_delete_vector_layer(monkeypatch):
995 | LAYER_ID = "foobar"
996 | client = _client(monkeypatch)
997 | add_mock_delete_vector_layer_response(LAYER_ID)
998 | client.delete_vector_layer(LAYER_ID)
999 | assert len(responses.calls) == 1
1000 |
1001 |
1002 | @responses.activate
1003 | def test_edit_vector_layer(monkeypatch):
1004 | LAYER_ID = "foobar"
1005 | client = _client(monkeypatch)
1006 | add_mock_edit_vector_layer_response(LAYER_ID, color="#ffffff", name="spam")
1007 | client.edit_vector_layer(LAYER_ID, color="#ffffff", name="spam")
1008 | assert len(responses.calls) == 1
1009 |
1010 |
1011 | @responses.activate
1012 | def test_download_vector_layer_to_file(monkeypatch):
1013 | polygons_fc = add_mock_vector_layer_download_responses("foobar", 2)
1014 | client = _client(monkeypatch)
1015 | with tempfile.NamedTemporaryFile() as fp:
1016 | client.download_vector_layer_to_file("foobar", fp.name)
1017 | fc = json.load(fp)
1018 | assert fc["type"] == "FeatureCollection"
1019 | assert fc == polygons_fc and len(fc["features"]) == 2
1020 | assert fc["features"][0]["geometry"]["type"] == "Polygon"
1021 | assert isinstance(fc["features"][1]["geometry"]["coordinates"][0][0][0], (int, float))
1022 | assert len(responses.calls) == 3 # POST /download, GET /operations, GET url
1023 |
1024 |
1025 | @responses.activate
1026 | def test_list_raster_markers(monkeypatch):
1027 | client = _client(monkeypatch)
1028 | add_mock_raster_markers_list_response("spam")
1029 | rasters = client.list_raster_markers("spam")
1030 | assert rasters[0]["id"] == "1"
1031 | rasters = client.list_raster_markers("spam", page_number=2)
1032 | assert rasters[0]["id"] == "3"
1033 | assert len(responses.calls) == 2
1034 |
1035 |
1036 | @responses.activate
1037 | def test_raster_markers_creation(monkeypatch):
1038 | client = _client(monkeypatch)
1039 | add_mock_marker_creation_response("spam", "foo", "bar", [12.34, 56.78], "foobar")
1040 | marker = client.create_marker("foo", "bar", 12.34, 56.78, "foobar")
1041 | assert marker["id"] == "spam"
1042 |
1043 |
1044 | @responses.activate
1045 | def test_create_raster_marker(monkeypatch):
1046 | client = _client(monkeypatch)
1047 | add_mock_marker_creation_response(
1048 | "id123", "rasterid123", None, [43.21, 87.65], "comment"
1049 | )
1050 | marker = client.create_marker("rasterid123", None, 43.21, 87.65, "comment")
1051 | assert marker["id"] == "id123"
1052 |
1053 |
1054 | @responses.activate
1055 | def test_list_folder_detectors(monkeypatch):
1056 | client = _client(monkeypatch)
1057 | add_mock_folder_detector_response("folder_id123")
1058 | detector_list = client.list_folder_detectors("folder_id123")
1059 | assert len(detector_list) == 2
1060 | assert detector_list[0]["id"] == "id1"
1061 | detector_list = detector_list.next()
1062 | assert detector_list[0]["id"] == "id3"
1063 | assert len(responses.calls) == 2
1064 |
1065 |
1066 | @responses.activate
1067 | def test_list_raster_vector_layers(monkeypatch):
1068 | client = _client(monkeypatch)
1069 | add_mock_vector_layers_filtered_list_response(0, "raster1")
1070 | add_mock_vector_layers_filtered_list_response(1, "raster1", "spam", "detector1")
1071 | assert client.list_raster_vector_layers("raster1")[0]["id"] == "0"
1072 | assert (
1073 | client.list_raster_vector_layers("raster1", "spam", "detector1")[0]["name"]
1074 | == "layer_1"
1075 | )
1076 |
1077 |
1078 | @responses.activate
1079 | def test_run_advanced_tool(monkeypatch):
1080 | _add_api_response(
1081 | detector_api_url("advanced_tools/foobar/run/"),
1082 | responses.POST,
1083 | json=OP_RESP,
1084 | match=responses.matchers.json_params_matcher(
1085 | {
1086 | "inputs": {"foo": "bar"},
1087 | "outputs": {"spam": [1, 2], "bar": {"foo": None, "bar": 4}},
1088 | }
1089 | ),
1090 | )
1091 | add_mock_operations_responses("success")
1092 | client = _client(monkeypatch)
1093 | assert (
1094 | client.run_advanced_tool(
1095 | "foobar", {"foo": "bar"}, {"spam": [1, 2], "bar": {"foo": None, "bar": 4}}
1096 | )["type"]
1097 | == "mock_operation_type"
1098 | )
1099 | assert len(responses.calls) == 2
1100 |
1101 |
1102 | @responses.activate
1103 | def test_import_raster_from_remote_source(monkeypatch):
1104 | body = {
1105 | "method": "streaming",
1106 | "source_id": "source",
1107 | "folder_id": "project",
1108 | "name": "image",
1109 | }
1110 | add_mock_remote_import_responses("upload_id", body)
1111 | add_mock_operations_responses("success")
1112 |
1113 | client = _client(monkeypatch)
1114 | # This just tests that this doesn't raise
1115 | with tempfile.NamedTemporaryFile() as f:
1116 | assert (
1117 | client.import_raster_from_remote_source(
1118 | "image", "project", "source", f.name
1119 | )
1120 | == "foo"
1121 | )
1122 | assert len(responses.calls) == 4
1123 |
1124 |
1125 | @responses.activate
1126 | def test_list_detector_rasters(monkeypatch):
1127 | client = _client(monkeypatch)
1128 | add_mock_rasters_list_response(detector_api_url("detectors/spam/training_rasters/"))
1129 | page1 = client.list_detector_rasters("spam")
1130 | assert page1[0]["name"] == "raster1" and page1[1]["name"] == "raster2"
1131 | page2 = client.list_detector_rasters("spam", page_number=2)
1132 | assert page2[0]["name"] == "raster3" and page2[1]["name"] == "raster4"
1133 | assert len(responses.calls) == 2
1134 |
1135 |
1136 | @responses.activate
1137 | def test_folder_creation(monkeypatch):
1138 | client = _client(monkeypatch)
1139 | add_mock_folder_creation_response("folder-id", "folder-name")
1140 | assert client.create_folder("folder-name") == "folder-id"
1141 |
--------------------------------------------------------------------------------
/tests/test_plots_analysis_client.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 | import tempfile
4 |
5 | import responses
6 |
7 | from picterra import PlotsAnalysisPlatformClient
8 | from tests.utils import (
9 | OP_RESP,
10 | OPERATION_ID,
11 | _add_api_response,
12 | _client,
13 | plots_analysis_api_url,
14 | )
15 |
16 |
17 | def test_plots_analysis_platform_client_base_url(monkeypatch):
18 | """
19 | Sanity-check that the client defaults to the correct base url
20 | """
21 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
22 | client = PlotsAnalysisPlatformClient()
23 | assert client.base_url == "https://app.picterra.ch/public/api/plots_analysis/v1/"
24 |
25 |
26 | @responses.activate
27 | def test_analyse_plots(monkeypatch):
28 | # Setup the fake api responses
29 | fake_analysis_id = "1234-4321-5678"
30 | fake_analysis_results = { "foo": "bar" }
31 | _add_api_response(
32 | plots_analysis_api_url("batch_analysis/upload/"),
33 | responses.POST,
34 | {
35 | "analysis_id": fake_analysis_id,
36 | "upload_url": "https://example.com/upload/to/blobstore?key=123567",
37 | },
38 | )
39 |
40 | responses.put("https://example.com/upload/to/blobstore?key=123567")
41 |
42 | _add_api_response(plots_analysis_api_url(f"batch_analysis/start/{fake_analysis_id}/"), responses.POST, OP_RESP)
43 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
44 | "status": "success",
45 | "results": {
46 | "download_url": "https://example.com/blobstore/results",
47 | "expiration": "2022-12-31",
48 | }
49 | })
50 | responses.get(
51 | "https://example.com/blobstore/results",
52 | json.dumps(fake_analysis_results)
53 | )
54 |
55 | client: PlotsAnalysisPlatformClient = _client(monkeypatch, platform="plots_analysis")
56 | with tempfile.NamedTemporaryFile() as tmp:
57 | with open(tmp.name, "w") as f:
58 | json.dump({"foo": "bar"}, f)
59 | results = client.batch_analyze_plots(
60 | tmp.name,
61 | methodology="eudr_cocoa",
62 | assessment_date=datetime.date.fromisoformat("2020-01-01"),
63 | )
64 | assert results == fake_analysis_results
65 |
66 |
67 | @responses.activate
68 | def test_create_plots_group(monkeypatch):
69 | _add_api_response(
70 | plots_analysis_api_url("plots_groups/upload/"),
71 | responses.POST,
72 | {
73 | "upload_id": "an-upload",
74 | "upload_url": "https://upload.example.com/",
75 | },
76 | )
77 | responses.put("https://upload.example.com/")
78 | _add_api_response(
79 | plots_analysis_api_url("plots_groups/commit/"),
80 | responses.POST,
81 | OP_RESP,
82 | match=responses.matchers.json_params_matcher({
83 | "name": "name of my plot group",
84 | "methodology": "eudr_cocoa",
85 | "upload_id": "an-upload",
86 | "custom_columns_values": {"foo": "bar"}
87 | }),
88 | )
89 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
90 | "status": "success",
91 | "results": {"plots_group_id": "a-plots-group"}
92 | })
93 | client: PlotsAnalysisPlatformClient = _client(monkeypatch, platform="plots_analysis")
94 | with tempfile.NamedTemporaryFile() as tmp:
95 | with open(tmp.name, "w") as f:
96 | json.dump({"type": "FeatureCollection", "features": []}, f)
97 | assert client.create_plots_group(
98 | "name of my plot group",
99 | "eudr_cocoa",
100 | {"foo": "bar"},
101 | tmp.name,
102 | ) == "a-plots-group"
103 |
104 |
105 | @responses.activate
106 | def test_replace_plots_group_plots(monkeypatch):
107 | _add_api_response(
108 | plots_analysis_api_url("plots_groups/upload/"),
109 | responses.POST,
110 | {
111 | "upload_id": "an-upload",
112 | "upload_url": "https://upload.example.com/",
113 | },
114 | )
115 | responses.put("https://upload.example.com/")
116 | _add_api_response(plots_analysis_api_url(
117 | "plots_groups/group-id/replace/"),
118 | responses.POST,
119 | OP_RESP,
120 | match=responses.matchers.json_params_matcher({
121 | "upload_id": "an-upload",
122 | }),
123 | )
124 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
125 | "status": "success",
126 | })
127 | client: PlotsAnalysisPlatformClient = _client(monkeypatch, platform="plots_analysis")
128 | with tempfile.NamedTemporaryFile() as tmp:
129 | with open(tmp.name, "w") as f:
130 | json.dump({"type": "FeatureCollection", "features": []}, f)
131 | client.replace_plots_group_plots("group-id", tmp.name)
132 |
133 |
134 | @responses.activate
135 | def test_group_analyze_plots(monkeypatch):
136 | _add_api_response(
137 | plots_analysis_api_url("plots_groups/a-group-id/analysis/upload/"),
138 | responses.POST,
139 | {
140 | "upload_id": "an-upload-id",
141 | "upload_url": "https://upload.example.com/",
142 | },
143 | )
144 | responses.put("https://upload.example.com/", match=[responses.matchers.json_params_matcher({
145 | "plot_ids": ["uno", "dos"],
146 | })])
147 | _add_api_response(plots_analysis_api_url(
148 | "plots_groups/a-group-id/analysis/"),
149 | responses.POST,
150 | OP_RESP,
151 | match=responses.matchers.json_params_matcher({
152 | "analysis_name": "foobar",
153 | "upload_id": "an-upload-id",
154 | "assessment_date": "2025-01-01",
155 | }),
156 | )
157 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
158 | "status": "success",
159 | "results": {"analysis_id": "an-analysis-id"}
160 | })
161 | _add_api_response(
162 | plots_analysis_api_url("plots_groups/a-group-id/analysis/an-analysis-id/"),
163 | responses.GET,
164 | {"url": "http://analysis.example.com"}
165 | )
166 | client: PlotsAnalysisPlatformClient = _client(monkeypatch, platform="plots_analysis")
167 | with tempfile.NamedTemporaryFile() as tmp:
168 | with open(tmp.name, "w") as f:
169 | json.dump({"type": "FeatureCollection", "features": []}, f)
170 | assert client.group_analyze_plots(
171 | "a-group-id",
172 | "foobar",
173 | ["uno", "dos"],
174 | datetime.date.fromisoformat("2025-01-01")
175 | ) == "http://analysis.example.com"
176 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | from urllib.parse import urljoin
2 |
3 | import responses
4 |
5 | from picterra import DetectorPlatformClient, PlotsAnalysisPlatformClient
6 |
7 |
8 | def _add_api_response(
9 | path, verb=responses.GET, json=None, match=None, body=None, status=None
10 | ):
11 | if status:
12 | expected_status = status
13 | else:
14 | if verb == responses.GET:
15 | expected_status = 200
16 | elif verb == responses.POST:
17 | expected_status = 201
18 | elif verb == responses.PUT:
19 | expected_status = 204
20 | elif verb == responses.DELETE:
21 | expected_status = 204
22 | matchers = [responses.matchers.header_matcher({"X-Api-Key": "1234"})]
23 | if match:
24 | matchers.append(match)
25 | responses.add(
26 | verb,
27 | path,
28 | body=body,
29 | json=json,
30 | match=matchers,
31 | status=expected_status,
32 | )
33 |
34 |
35 | def _client(monkeypatch, platform="detector", max_retries=0, timeout=1, **kwargs):
36 | monkeypatch.setenv("PICTERRA_BASE_URL", TEST_API_URL)
37 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
38 | if platform == "detector":
39 | client = DetectorPlatformClient(timeout=timeout, max_retries=max_retries, **kwargs)
40 | elif platform == "plots_analysis":
41 | client = PlotsAnalysisPlatformClient(timeout=timeout, max_retries=max_retries, **kwargs)
42 | else:
43 | raise NotImplementedError(f"Unrecognised API platform {platform}")
44 | return client
45 |
46 |
47 | def detector_api_url(path):
48 | return urljoin(TEST_API_URL, urljoin("public/api/v2/", path))
49 |
50 |
51 | def plots_analysis_api_url(path):
52 | return urljoin(TEST_API_URL, urljoin("public/api/plots_analysis/v1/", path))
53 |
54 |
55 | TEST_API_URL = "http://example.com/"
56 | TEST_POLL_INTERVAL = 0.1
57 | OPERATION_ID = 21
58 | OP_RESP = {"operation_id": OPERATION_ID, "poll_interval": TEST_POLL_INTERVAL}
59 |
--------------------------------------------------------------------------------