├── docs
├── _static
├── requirements.txt
├── api.rst
├── index.rst
├── make.bat
├── Makefile
├── getting_started.rst
└── conf.py
├── tests
├── __init__.py
├── utils.py
├── test_base_client.py
├── test_nongeo.py
└── test_tracer_client.py
├── examples
├── data
│ ├── raster1.tif
│ ├── xy-test-image.jpeg
│ ├── training_area.geojson
│ ├── validation_area.geojson
│ ├── outline3.geojson
│ ├── outline.geojson
│ └── outline2.geojson
├── tracer
│ └── plots_analysis.py
└── forge
│ ├── upload_and_detect.py
│ ├── detectors_management.py
│ ├── detect_on_project.py
│ ├── training.py
│ ├── raster_management.py
│ ├── training_multiclass_upload.py
│ └── nongeo_results.geojson
├── .zed
└── settings.json
├── setup.cfg
├── .gitignore
├── scripts
├── lint.sh
└── check_commit_titles.sh
├── .github
└── workflows
│ ├── python-publish-pypi.yml
│ ├── python-publish-testpypi.yml
│ ├── codeql-analysis.yml
│ └── lint_test.yml
├── CHANGELOG.md
├── src
└── picterra
│ ├── __init__.py
│ ├── nongeo.py
│ ├── base_client.py
│ ├── tracer_client.py
│ └── forge_client.py
├── setup.py
├── LICENSE
├── .readthedocs.yaml
└── README.md
/docs/_static:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx==7.2.6
2 | sphinx-rtd-theme
3 | sphinx-autobuild
--------------------------------------------------------------------------------
/examples/data/raster1.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/HEAD/examples/data/raster1.tif
--------------------------------------------------------------------------------
/examples/data/xy-test-image.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Picterra/picterra-python/HEAD/examples/data/xy-test-image.jpeg
--------------------------------------------------------------------------------
/.zed/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "format_on_save": "off",
3 | "languages": {
4 | "Python": {
5 | "wrap_guides": [100]
6 | }
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [aliases]
2 | test=pytest
3 |
4 | [tool:pytest]
5 | testpaths = tests
6 | addopts = --verbose
7 |
8 | [flake8]
9 | ignore=E266,W504
10 | max-line-length=100
11 | filename=src
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .cache
3 | .vscode
4 | .venv
5 | *.egg-info
6 | docs/_build
7 | .eggs
8 | .ipynb_checkpoints
9 | .pytest_cache
10 | __pycache__
11 | venv
12 | .vscode
13 | dist
14 | examples/result.geojson
15 | build
--------------------------------------------------------------------------------
/scripts/lint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Run this from the project root directory
3 | printf "==== Running flake8\n"
4 | python -m flake8
5 | printf "==== Running mypy\n"
6 | mypy src examples
7 | printf "==== Running black\n"
8 | black --check --diff src
9 |
--------------------------------------------------------------------------------
/examples/data/training_area.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | { "type": "Feature", "properties": { "type": null }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.71477033469016, 47.747693064169439 ], [ 8.714753220903143, 47.747369138417419 ], [ 8.715131267184505, 47.747360808160593 ], [ 8.715123611819278, 47.747686051231959 ], [ 8.71477033469016, 47.747693064169439 ] ] ] } }
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/examples/data/validation_area.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | { "type": "Feature", "properties": { "type": null }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.71452123764978, 47.747706421540165 ], [ 8.714504123862763, 47.747382495788145 ], [ 8.714751102777912, 47.747382686491655 ], [ 8.714751278242293, 47.7477060728949 ], [ 8.71452123764978, 47.747706421540165 ] ] ] } }
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | ===
2 | API
3 | ===
4 |
5 |
6 | Forge
7 | -----------------
8 |
9 | .. autoclass:: picterra.ForgeClient
10 | :members:
11 |
12 | .. autoclass:: picterra.APIClient
13 | :members:
14 |
15 | .. automodule:: picterra.nongeo
16 | :members:
17 |
18 |
19 | Tracer
20 | -----------------------
21 |
22 | .. autoclass:: picterra.TracerClient
23 | :members:
24 |
25 |
26 | Utility classes
27 | ---------------
28 |
29 | .. autoclass:: picterra.ResultsPage
30 | :members:
31 |
32 | .. autoclass:: picterra.APIError
33 | :members:
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Picterra Python API
2 | ===================
3 |
4 | Easily integrate state-of-the-art machine learning models in your app
5 |
6 | .. raw:: html
7 |
8 |
9 |
10 | Learn more about `Picterra `_
11 |
12 | If you are looking for our REST API documentation, `look here `_
13 |
14 | ----
15 |
16 | .. toctree::
17 | :maxdepth: 2
18 | :caption: Contents:
19 |
20 | getting_started
21 | api
22 |
23 |
--------------------------------------------------------------------------------
/examples/tracer/plots_analysis.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | from picterra import TracerClient
4 |
5 | # Replace this with the id of the plots group to analyze
6 | plots_group_id = "3c3d947c-d982-4af7-ac09-00806b81a216"
7 |
8 | client = TracerClient()
9 |
10 | print("Starting analysis...")
11 | analysis_id = client.analyze_plots(
12 | plots_group_id,
13 | "New analysis",
14 | ["plotid_1", "plotid_2", "plotid_3"],
15 | datetime.date.fromisoformat("2022-01-01"),
16 | datetime.date.fromisoformat("2024-01-01")
17 | )
18 | url = client.get_plots_analysis(analysis_id, plots_group_id)["url"]
19 |
20 |
21 | print("Analysis completed: you can open it at the following URL:" + url)
22 |
--------------------------------------------------------------------------------
/examples/data/outline3.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "outline3",
4 | "features": [
5 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714583380036869, 47.747500798517294 ], [ 8.714557921238089, 47.747439697400225 ], [ 8.714663150939707, 47.747434039889384 ], [ 8.714664282441875, 47.74749627250862 ], [ 8.714583380036869, 47.747500798517294 ] ], [ [ 8.714588471796624, 47.747486513302427 ], [ 8.714585643041204, 47.747477461285079 ], [ 8.714594977934089, 47.74747562259406 ], [ 8.714598513878364, 47.747484250298086 ], [ 8.714588471796624, 47.747486513302427 ] ], [ [ 8.714621426797265, 47.74747406677858 ], [ 8.714615910724195, 47.747454265490639 ], [ 8.714645188342791, 47.747450022357505 ], [ 8.714650562978088, 47.747469965083219 ], [ 8.714621426797265, 47.74747406677858 ] ] ] } }
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish-pypi.yml:
--------------------------------------------------------------------------------
1 | # https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
2 | # https://github.com/marketplace/actions/pypi-publish
3 | name: upload to pypi
4 |
5 | on:
6 | release:
7 | types: [created]
8 |
9 | jobs:
10 | publish:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: setup python
15 | uses: actions/setup-python@v4
16 | with:
17 | python-version: '3.12'
18 | - name: install deps
19 | run: |
20 | python -m pip install --upgrade pip setuptools wheel
21 | - name: build
22 | run: |
23 | python setup.py sdist bdist_wheel
24 | - name: publish to pypi
25 | uses: pypa/gh-action-pypi-publish@release/v1
26 | with:
27 | password: ${{ secrets.pypi_password }}
28 |
--------------------------------------------------------------------------------
/examples/data/outline.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "features": [
4 | {
5 | "type": "Feature",
6 | "properties": {},
7 | "geometry": {
8 | "type": "Polygon",
9 | "coordinates": [
10 | [
11 | [
12 | 8.714802861213684,
13 | 47.74745244535212
14 | ],
15 | [
16 | 8.714770674705505,
17 | 47.74739833982634
18 | ],
19 | [
20 | 8.714942336082457,
21 | 47.747373090561716
22 | ],
23 | [
24 | 8.714987933635712,
25 | 47.74745424886868
26 | ],
27 | [
28 | 8.714802861213684,
29 | 47.74745244535212
30 | ]
31 | ]
32 | ]
33 | }
34 | }
35 | ]
36 | }
--------------------------------------------------------------------------------
/examples/forge/upload_and_detect.py:
--------------------------------------------------------------------------------
1 | from picterra import APIClient
2 |
3 | # Replace this with the id of one of your detectors
4 | detector_id = "d552605b-6972-4a68-8d51-91e6cb531c24"
5 | # Replace this with the id of a folder in which the
6 | # raster should be uploaded.
7 | folder_id = "63207fe9-32b8-410f-a72d-00803cca7bf3"
8 |
9 | # Set the PICTERRA_API_KEY environment variable to define your API key
10 | client = APIClient()
11 | print("Uploading raster...")
12 | raster_id = client.upload_raster(
13 | "data/raster1.tif",
14 | name="a nice raster",
15 | folder_id=folder_id,
16 | captured_at="2020-01-01T12:34:45.789Z",
17 | )
18 | print("Upload finished, starting detector...")
19 | result_id = client.run_detector(detector_id, raster_id)
20 | client.download_result_to_feature_collection(result_id, "result.geojson")
21 | print("Detection finished, results are in result.geojson")
22 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## v3.3.0 (2025-10-01)
4 |
5 | ### Tracer
6 | * [api][breaking] Have 'analyze_plots' return id, not full metadata
7 | * [api][breaking] Rename list_plots_analyses_report_types to list_plots_analysis_report_types
8 | * [api][breaking] Change order and deprecate arguments for the following functions:
9 | * list_plots_analysis_reports
10 | * list_plots_analysis_report_types
11 | * create_plots_analysis_report_precheck
12 | * create_plots_analysis_report
13 | * get_plots_analysis
14 | * get_plots_analysis_report
15 | * [api] Add report list, types list, get, precheck, creation and groups and analysis get
16 | * [api] Extract upload and HTTP response check helpers
17 | * [doc] Fix readme release process section list
18 | * [api] Use new simplified URLs
19 | * [devops] Enforce a commit message pattern
20 | * [api] Add the ability to include archived plots groups, plots analysis and plot analysis reports
21 |
--------------------------------------------------------------------------------
/examples/forge/detectors_management.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from picterra import APIClient
4 |
5 | # Set the PICTERRA_API_KEY environment variable to define your API key
6 | client = APIClient()
7 |
8 | # Create a new detector (its type is 'count' by default)
9 | detector_id = client.create_detector("My first detector")
10 |
11 | # Edit the above detector
12 | client.edit_detector(detector_id, "Renamed detector", "segmentation", "bbox", 1000)
13 |
14 | # List existing detectors
15 | detectors_page_1 = client.list_detectors()
16 | print("Page has " + str(len(detectors_page_1)) + " elements")
17 | d = detectors_page_1[0]
18 | print(
19 | "detector id=%s, name=%s, detection_type=%s, output_type=%s, training_steps=%d"
20 | % (
21 | d["id"],
22 | d["name"],
23 | d["configuration"]["detection_type"],
24 | d["configuration"]["output_type"],
25 | d["configuration"]["training_steps"],
26 | )
27 | )
28 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | #
7 | # -W turns warnings into errors
8 | # --keep-going avoid stopping the build on first warning (but it'll
9 | # still fail if any warnings was triggered)
10 | SPHINXOPTS ?= -W --keep-going
11 | SPHINXBUILD ?= sphinx-build
12 | SOURCEDIR = .
13 | BUILDDIR = _build
14 |
15 | # Put it first so that "make" without argument is like "make help".
16 | help:
17 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
18 |
19 | .PHONY: help Makefile
20 |
21 | # Catch-all target: route all unknown targets to Sphinx using the new
22 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
23 | %: Makefile
24 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
25 |
26 | autobuild:
27 | sphinx-autobuild . _build/html --port 9999
28 |
--------------------------------------------------------------------------------
/examples/forge/detect_on_project.py:
--------------------------------------------------------------------------------
1 | """
2 | Demonstrate running a detector on all images within a project
3 | """
4 | from picterra import APIClient
5 |
6 | # Set the PICTERRA_API_KEY environment variable to define your API key
7 | client = APIClient()
8 | # The Id of a folder/project you own
9 | folder_id = "5cee6276-1c6b-4b00-9201-5d95f01b72b1"
10 | detector_id = "afa558e7-e004-4c76-9aa6-8df72d33e568"
11 |
12 |
13 | rasters = []
14 | page = client.list_rasters(folder_id)
15 | while True:
16 | rasters.extend(list(page))
17 | page = page.next()
18 | if page is None:
19 | break
20 |
21 | print(f"Detecting on {len(rasters)} rasters")
22 |
23 | operations = []
24 | for raster in rasters:
25 | # Note that this will run and wait for results (so sequentially). You could
26 | # alternatively manually call the /detectors//run/ endpoint for all rasters
27 | # and then have them run in parallel
28 | op = client.run_detector(detector_id, raster["id"])
29 | operations.append(op)
30 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish-testpypi.yml:
--------------------------------------------------------------------------------
1 | # This is intended to be run manually before creating a release to test
2 | # the publish to testpypi
3 |
4 | # https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
5 | # https://github.com/marketplace/actions/pypi-publish
6 | name: upload to testpypi
7 |
8 | on: workflow_dispatch
9 |
10 | jobs:
11 | publish:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | - name: setup python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: '3.12'
19 | - name: install deps
20 | run: |
21 | python -m pip install --upgrade pip setuptools wheel
22 | - name: build
23 | run: |
24 | python setup.py sdist bdist_wheel
25 | - name: publish to test pypi
26 | uses: pypa/gh-action-pypi-publish@release/v1
27 | with:
28 | password: ${{ secrets.test_pypi_password }}
29 | repository_url: https://test.pypi.org/legacy/
30 |
31 |
--------------------------------------------------------------------------------
/examples/data/outline2.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "outline2",
4 | "features": [
5 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714555092482669, 47.74767278684682 ], [ 8.714546040465327, 47.747638841781779 ], [ 8.714588471796624, 47.747629789764439 ], [ 8.714595260809631, 47.74767278684682 ], [ 8.714555092482669, 47.74767278684682 ] ] ] } },
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714630903127921, 47.74767052384248 ], [ 8.714627508621417, 47.747636013026359 ], [ 8.714671071454882, 47.747635447275279 ], [ 8.714673334459219, 47.74766147182514 ], [ 8.714630903127921, 47.74767052384248 ] ] ] } },
7 | { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 8.714807983217202, 47.747662037576227 ], [ 8.714807983217202, 47.747634315773112 ], [ 8.714844191286575, 47.747634315773112 ], [ 8.714842494033324, 47.74766147182514 ], [ 8.714807983217202, 47.747662037576227 ] ] ] } }
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/src/picterra/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_client import APIError, ResultsPage
2 |
3 | # Note that we import DetectorPlatformClient thrice, to export it under two names:
4 | # - DetectorPlatformClient as the name it should be used with
5 | # - APIClient and DetectorPlatformClient to preserve backward compatibility, since that was the name it was
6 | # exported under previously (when we originally had only one platform and API client).
7 | # Ditto for PlotsAnalysisPlatformClient / TracerClient
8 | from .forge_client import ForgeClient
9 | from .forge_client import ForgeClient as APIClient
10 | from .forge_client import ForgeClient as DetectorPlatformClient
11 | from .nongeo import nongeo_result_to_pixel
12 | from .tracer_client import TracerClient
13 | from .tracer_client import TracerClient as PlotsAnalysisPlatformClient
14 |
15 | __all__ = [
16 | "APIClient",
17 | "DetectorPlatformClient",
18 | "ForgeClient",
19 | "PlotsAnalysisPlatformClient",
20 | "TracerClient",
21 | "nongeo_result_to_pixel",
22 | "APIError",
23 | "ResultsPage",
24 | ]
25 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | # read the contents of your README file
4 | import sys
5 | from pathlib import Path
6 |
7 | from setuptools import find_packages, setup
8 |
9 | this_directory = Path(__file__).parent
10 | long_description = (this_directory / "README.md").read_text()
11 |
12 | if sys.version_info >= (3, 8):
13 | lint_deps = ["flake8", "mypy==1.8.0", "types-requests", "black"]
14 | else:
15 | lint_deps = ["flake8", "mypy==1.4.1", "types-requests", "black"]
16 | test_deps = ["pytest==7.1", "responses==0.22", "httpretty"]
17 |
18 | setup(
19 | name="picterra",
20 | version="3.0.0",
21 | description="Picterra API client",
22 | long_description=long_description,
23 | long_description_content_type="text/markdown",
24 | package_dir={"": "src"},
25 | packages=find_packages("src"),
26 | install_requires=[
27 | "requests",
28 | # We use the new `allowed_methods` option
29 | "urllib3>=1.26.0",
30 | ],
31 | extras_require={
32 | "test": test_deps,
33 | "lint": lint_deps,
34 | },
35 | )
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Picterra
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.12"
12 | # You can also specify other tool versions:
13 | # nodejs: "20"
14 | # rust: "1.70"
15 | # golang: "1.20"
16 |
17 | # Build documentation in the "docs/" directory with Sphinx
18 | sphinx:
19 | configuration: docs/conf.py
20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
21 | # builder: "dirhtml"
22 | # Fail on all warnings to avoid broken references
23 | # fail_on_warning: true
24 |
25 | # Optionally build your docs in additional formats such as PDF and ePub
26 | # formats:
27 | # - pdf
28 | # - epub
29 |
30 | # Optional but recommended, declare the Python requirements required
31 | # to build your documentation
32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
33 | python:
34 | install:
35 | - requirements: docs/requirements.txt
36 | - method: pip
37 | path: .
--------------------------------------------------------------------------------
/scripts/check_commit_titles.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script checks that all the titles of the commits between the current branch and master
4 | # follow the "([[a-zA-Z_]+\])+ .+$" regex.
5 |
6 | # Regex to match the commit title format
7 | COMMIT_TITLE_REGEX="^(\[[a-zA-Z_]+\])+ .+$"
8 |
9 | master=$1
10 | head=$2
11 |
12 |
13 | # Get the list of commit titles between the current branch and master:
14 | # using master..HEAD would not work so we pass it from GH event vars
15 | COMMIT_TITLES=$(git log $master..$head --pretty=format:%s)
16 |
17 | # Array to store offending commit titles
18 | OFFENDING_COMMIT_TITLES=()
19 |
20 | # Check each commit title against the regex
21 | while IFS= read -r title; do
22 | if ! [[ "$title" =~ $COMMIT_TITLE_REGEX ]]; then
23 | OFFENDING_COMMIT_TITLES+=("$title")
24 | fi
25 | done <<< "$COMMIT_TITLES"
26 |
27 | # Check if there are any offending commit titles
28 | if [ ${#OFFENDING_COMMIT_TITLES[@]} -ne 0 ]; then
29 | echo "Error: The following commit titles do not follow the format '([])+ ':"
30 | for title in "${OFFENDING_COMMIT_TITLES[@]}"; do
31 | echo "- $title"
32 | done
33 | exit 1
34 | else
35 | echo "Success: All commit titles are formatted correctly."
36 | fi
37 |
--------------------------------------------------------------------------------
/examples/forge/training.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 |
5 | from picterra import APIClient
6 |
7 | # Set the PICTERRA_API_KEY environment variable to define your API key
8 | client = APIClient()
9 |
10 | # Create a new detector (its type is 'count' by default)
11 | detector_id = client.create_detector("My first detector")
12 |
13 | # Upload a training raster for the detector above
14 | raster_id = client.upload_raster("data/raster1.tif", name="a nice raster")
15 | client.add_raster_to_detector(raster_id, detector_id)
16 |
17 | # Add annotations
18 | with open("data/outline.geojson") as f:
19 | outlines = json.load(f)
20 | client.set_annotations(detector_id, raster_id, "outline", outlines)
21 | with open("data/training_area.geojson") as f:
22 | training_areas = json.load(f)
23 | client.set_annotations(detector_id, raster_id, "training_area", training_areas)
24 | with open("data/validation_area.geojson") as f:
25 | validation_areas = json.load(f)
26 | client.set_annotations(detector_id, raster_id, "validation_area", validation_areas)
27 |
28 | # Train the detector
29 | client.train_detector(detector_id)
30 |
31 | # At this point your detector is ready to predict: see upload_and_detect.py in order
32 | # to launch a prediction on a raster; you can also use one of the raster already added above.
33 |
--------------------------------------------------------------------------------
/examples/forge/raster_management.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | from pprint import pprint
5 |
6 | from picterra import APIClient
7 |
8 | # Set the PICTERRA_API_KEY environment variable to define your API key
9 | client = APIClient()
10 | # The Id of a folder/project you own
11 | folder_id = "7ec40c11-f181-436a-9d33-d7b3f63e0e0f"
12 | # Upload
13 | local_raster_id = client.upload_raster("data/raster1.tif", name="A nice raster")
14 | print("Uploaded local raster=", local_raster_id)
15 | # Get the first batch of most recent images
16 | first_page = client.list_rasters()
17 | for raster in first_page:
18 | pprint("raster %s" % "\n".join(["%s=%s" % item for item in raster.items()]))
19 | # Get the second batch
20 | second_page = first_page.next()
21 | # Get the first page applying a filter
22 | for raster in client.list_rasters(folder_id):
23 | pprint("raster %s" % "\n".join(["%s=%s" % item for item in raster.items()]))
24 | # Upload, edition and removal
25 | local_raster_id = client.upload_raster("data/raster1.tif", name="A short-lived raster")
26 | print("Uploaded a second local raster=", local_raster_id)
27 | # Editing the image's band specification. See https://docs.picterra.ch/imagery/#Multispectral
28 | client.edit_raster(local_raster_id, multispectral_band_specification={
29 | "ranges": [
30 | [0, 128], [0, 128], [0, 128]
31 | ],
32 | "display_bands": [
33 | {"type": "multiband", "name": "default", "bands": [2, 1, 0]}
34 | ]
35 | })
36 | # Deleting the image
37 | client.delete_raster(local_raster_id)
38 | print("Deleted raster=", local_raster_id)
39 |
--------------------------------------------------------------------------------
/examples/forge/training_multiclass_upload.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | This example shows how to upload outlines/annotations to a mutliclass detector
4 |
5 | Instructions to test:
6 | - Upload 'raster1.tif' in a new detector
7 | - Create 2 classes in the detector (it should have 3 classes total)
8 | - Write down the raster/detector id and replace them below
9 | """
10 | import json
11 | from picterra import APIClient
12 |
13 | # TODO: Adapt IDs
14 | DETECTOR_ID = "9a16c150-ae24-4bb6-9378-085955c7a4ac"
15 | RASTER_ID = "89139314-0bc0-4243-9357-b91c502513b2"
16 |
17 | # Set the PICTERRA_API_KEY environment variable to define your API key
18 | client = APIClient()
19 | detector_info = client.get_detector(DETECTOR_ID)
20 |
21 |
22 | def get_class_id(class_name):
23 | for class_info in detector_info["classes"]:
24 | if class_info["name"] == class_name:
25 | return class_info["id"]
26 | raise RuntimeError("Class with name=%s not found" % class_name)
27 |
28 |
29 | def load_annotations(name):
30 | with open("data/%s" % name) as f:
31 | fc = json.load(f)
32 | return fc
33 |
34 |
35 | client.set_annotations(
36 | DETECTOR_ID,
37 | RASTER_ID,
38 | "outline",
39 | load_annotations("outline.geojson"),
40 | class_id=get_class_id("class0"),
41 | )
42 | client.set_annotations(
43 | DETECTOR_ID,
44 | RASTER_ID,
45 | "outline",
46 | load_annotations("outline2.geojson"),
47 | class_id=get_class_id("class1"),
48 | )
49 | client.set_annotations(
50 | DETECTOR_ID,
51 | RASTER_ID,
52 | "outline",
53 | load_annotations("outline3.geojson"),
54 | class_id=get_class_id("class2"),
55 | )
56 |
--------------------------------------------------------------------------------
/docs/getting_started.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Getting started
3 | ===============
4 |
5 | Installation
6 | ============
7 |
8 | Install using pip
9 |
10 | ::
11 |
12 | pip install picterra
13 |
14 | Set your Picterra API key through an environment variable
15 |
16 | ::
17 |
18 | export PICTERRA_API_KEY=
19 |
20 | Listing entities
21 | ================
22 |
23 | When listing entities (eg rasters, detectors) from your account, the Picterra Server uses a *paginated*
24 | approach; this means that every `list_`-prefixed function returns a special :class:`picterra.ResultsPage` class instance
25 | which can be used like a Python list.
26 |
27 | Here are some examples, but look at the doc for :class:`picterra.ForgeClient` and :class:`picterra.TracerClient`
28 | for all the entities you can list.
29 |
30 | .. literalinclude:: ../examples/forge/detectors_management.py
31 | .. literalinclude:: ../examples/forge/raster_management.py
32 |
33 |
34 | Upload & Detect
35 | ===============
36 |
37 | .. literalinclude:: ../examples/forge/upload_and_detect.py
38 |
39 | Training
40 | ========
41 |
42 | .. note::
43 |
44 | **Please note the below endpoints are still in beta and thus may be subject to change**
45 |
46 | .. literalinclude:: ../examples/forge/training.py
47 |
48 | Detections in image coordinates
49 | ===============================
50 |
51 | If you want to use Picterra with images that are not georeferenced and want to get the detector
52 | outputs in (x, y) coordinates, have a look at our `nongeo_imagery notebook `_ .
53 |
54 | More examples
55 | =============
56 |
57 | Check the `examples directory `_ of our github repo.
58 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | name: "CodeQL"
8 |
9 | on:
10 | push:
11 | branches: [ master ]
12 | pull_request:
13 | # The branches below must be a subset of the branches above
14 | branches: [ master ]
15 | schedule:
16 | - cron: '26 13 * * 5'
17 |
18 | jobs:
19 | analyze:
20 | name: Analyze
21 | runs-on: ubuntu-latest
22 | permissions:
23 | actions: read
24 | contents: read
25 | security-events: write
26 |
27 | strategy:
28 | fail-fast: false
29 | matrix:
30 | language: [ 'python' ]
31 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
32 | # Learn more:
33 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
34 |
35 | steps:
36 | - name: Checkout repository
37 | uses: actions/checkout@v2
38 |
39 | # Initializes the CodeQL tools for scanning.
40 | - name: Initialize CodeQL
41 | uses: github/codeql-action/init@v3
42 | with:
43 | languages: ${{ matrix.language }}
44 | # If you wish to specify custom queries, you can do so here or in a config file.
45 | # By default, queries listed here will override any specified in a config file.
46 | # Prefix the list here with "+" to use these queries and those in the config file.
47 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
48 |
49 | - name: Perform CodeQL Analysis
50 | uses: github/codeql-action/analyze@v3
51 |
--------------------------------------------------------------------------------
/.github/workflows/lint_test.yml:
--------------------------------------------------------------------------------
1 | name: lint and tests
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 |
8 | jobs:
9 | build:
10 | strategy:
11 | matrix:
12 | os: [ubuntu-24.04]
13 | python-version: ['3.10', '3.12']
14 | include:
15 | # We still support python 3.7 for entreprise customers running on legacy
16 | # Python versions. We have to run it on ubuntu 22.04 because it is
17 | # EOL and not available on 24.04
18 | - os: ubuntu-22.04
19 | python-version: '3.7'
20 | runs-on: ${{ matrix.os }}
21 | steps:
22 | - uses: actions/checkout@v2
23 | - name: setup python ${{ matrix.python-version }}
24 | uses: actions/setup-python@v2
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | - name: install deps
28 | # We pin twine to 6.0.1 because 6.1.0 is breaking our build, see #148
29 | run: |
30 | python -m pip install --upgrade pip setuptools wheel flake8 "twine<=6.0.1"
31 | python setup.py develop
32 | pip install -e .[test,lint]
33 | - name: lint
34 | run: |
35 | scripts/lint.sh
36 | - name: tests
37 | run: |
38 | pytest -v tests
39 | # Test the build and run twine to check we are pypi compatible
40 | - name: check build
41 | run: |
42 | python setup.py bdist && twine check dist/*
43 | # Even thought we have readthedocs build docs on every MR, this is useful
44 | # because we build with -W --keep-going (see Makefile) and this will
45 | # therefore fail on warnings that could be ignored by readthedocs and
46 | # lead to half-broken docs
47 | - name: build docs
48 | # This should make the version in .readthedocs.yaml in the repository root
49 | if: matrix.python-version == '3.12'
50 | run: |
51 | cd docs
52 | python -m pip install -r requirements.txt
53 | make html
54 | guidelines:
55 | runs-on: ubuntu-latest
56 | steps:
57 | - uses: actions/checkout@v3
58 | with:
59 | fetch-depth: 0
60 | - name: check commits format
61 | run: |
62 | scripts/check_commit_titles.sh ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}
63 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 | # -- Project information -----------------------------------------------------
18 |
19 | project = "Picterra Python API"
20 | copyright = "2025, Picterra Team"
21 | author = "Picterra Team"
22 |
23 |
24 | # -- General configuration ---------------------------------------------------
25 |
26 | # Add any Sphinx extension module names here, as strings. They can be
27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 | # ones.
29 | extensions = [
30 | "sphinx.ext.autodoc",
31 | "sphinx.ext.viewcode",
32 | "sphinx.ext.napoleon",
33 | "sphinx_rtd_theme",
34 | ]
35 |
36 | # Add any paths that contain templates here, relative to this directory.
37 | templates_path = ["_templates"]
38 |
39 | # List of patterns, relative to source directory, that match files and
40 | # directories to ignore when looking for source files.
41 | # This pattern also affects html_static_path and html_extra_path.
42 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "venv"]
43 |
44 |
45 | # -- Options for HTML output -------------------------------------------------
46 |
47 | # The theme to use for HTML and HTML Help pages. See the documentation for
48 | # a list of builtin themes.
49 | #
50 | html_theme = "sphinx_rtd_theme"
51 |
52 | html_theme_options = {}
53 |
54 |
55 | # Add any paths that contain custom static files (such as style sheets) here,
56 | # relative to this directory. They are copied after the builtin static files,
57 | # so a file named "default.css" will overwrite the builtin "default.css".
58 | html_static_path = ["_static"]
59 |
60 | master_doc = "index"
61 |
62 | autodoc_typehints = "both"
63 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 | from urllib.parse import urljoin
3 |
4 | import responses
5 |
6 | from picterra import ForgeClient, TracerClient
7 |
8 |
9 | def _add_api_response(
10 | path, verb=responses.GET, json=None, match=None, body=None, status=None
11 | ):
12 | if status:
13 | expected_status = status
14 | else:
15 | if verb == responses.GET:
16 | expected_status = 200
17 | elif verb == responses.POST:
18 | expected_status = 201
19 | elif verb == responses.PUT:
20 | expected_status = 204
21 | elif verb == responses.DELETE:
22 | expected_status = 204
23 | matchers = [responses.matchers.header_matcher({"X-Api-Key": "1234"})]
24 | if match:
25 | matchers.append(match)
26 | responses.add(
27 | verb,
28 | path,
29 | body=body,
30 | json=json,
31 | match=matchers,
32 | status=expected_status,
33 | )
34 |
35 |
36 | def _client(monkeypatch, platform="detector", max_retries=0, timeout=1, **kwargs):
37 | monkeypatch.setenv("PICTERRA_BASE_URL", TEST_API_URL)
38 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
39 | if platform == "detector":
40 | client = ForgeClient(timeout=timeout, max_retries=max_retries, **kwargs)
41 | elif platform == "plots_analysis":
42 | client = TracerClient(timeout=timeout, max_retries=max_retries, **kwargs)
43 | else:
44 | raise NotImplementedError(f"Unrecognised API platform {platform}")
45 | return client
46 |
47 |
48 | def detector_api_url(path):
49 | return urljoin(TEST_API_URL, urljoin("public/api/v2/", path))
50 |
51 |
52 | def plots_analysis_api_url(path):
53 | return urljoin(TEST_API_URL, urljoin("public/api/plots_analysis/v1/", path))
54 |
55 |
56 | def add_mock_paginated_list_response(
57 | endpoint: str,
58 | page: int = 1,
59 | search_string: str = None,
60 | name_prefix: str = "a",
61 | num_results: int = 2,
62 | include_archived: Optional[bool] = None,
63 | qs: Optional[dict] = None
64 | ):
65 | curr, next = str(page), str(page + 1)
66 | data1 = {
67 | "count": num_results * num_results,
68 | "next": endpoint + "/?page_number=" + next,
69 | "previous": None,
70 | "page_size": num_results,
71 | "results": [
72 | {"id": str(i), "name": name_prefix + "_" + str(i)} for i in range(1, num_results + 1)
73 | ],
74 | }
75 | qs_params = {"page_number": curr}
76 | if search_string:
77 | qs_params["search"] = search_string
78 | if qs is not None:
79 | qs_params.update(qs)
80 | if include_archived is not None:
81 | qs_params["include_archived"] = str(include_archived).lower()
82 | _add_api_response(
83 | endpoint,
84 | match=responses.matchers.query_param_matcher(qs_params),
85 | json=data1,
86 | )
87 |
88 |
89 | TEST_API_URL = "http://example.com/"
90 | TEST_POLL_INTERVAL = 0.1
91 | OPERATION_ID = 21
92 | OP_RESP = {"operation_id": OPERATION_ID, "poll_interval": TEST_POLL_INTERVAL}
93 |
--------------------------------------------------------------------------------
/src/picterra/nongeo.py:
--------------------------------------------------------------------------------
1 | import json
2 | import math
3 |
4 | # The projected bounds for EPSG 3857 are computed based on the earth radius
5 | # defined in the spheroid https://epsg.io/3857
6 | # https://gis.stackexchange.com/questions/144471/spherical-mercator-world-bounds
7 | _EARTH_RADIUS = 6378137
8 | # They are consistent with the EPSG.io calculator
9 | # https://epsg.io/transform#s_srs=4326&t_srs=3857&x=-180.0000000&y=0.0000000
10 | # Note that the projected bounds are a square (so ymax=xmax on purpose), but
11 | # only latitude between -85 and 85 are considered valid for this projection
12 | _EPSG_3857_X_MIN = -math.pi * _EARTH_RADIUS
13 | _EPSG_3857_Y_MIN = -math.pi * _EARTH_RADIUS
14 | _EPSG_3857_X_MAX = math.pi * _EARTH_RADIUS
15 | _EPSG_3857_Y_MAX = math.pi * _EARTH_RADIUS
16 |
17 | _EPSG_3857_X_EXTENT = _EPSG_3857_X_MAX - _EPSG_3857_X_MIN
18 | _EPSG_3857_Y_EXTENT = _EPSG_3857_Y_MAX - _EPSG_3857_Y_MIN
19 |
20 | _DEG_TO_RAD = math.pi / 180.0
21 |
22 |
23 | def _nongeo_latlng2xy(lat_deg, lng_deg):
24 | """ """
25 | lat = _DEG_TO_RAD * lat_deg
26 | lng = _DEG_TO_RAD * lng_deg
27 |
28 | # First, project to pseudo-mercator
29 | # https://en.wikipedia.org/wiki/Web_Mercator_projection#Formulas
30 | x_proj = _EPSG_3857_X_EXTENT / (2.0 * math.pi) * lng
31 | y_proj = (
32 | _EPSG_3857_Y_EXTENT
33 | / (2.0 * math.pi)
34 | * math.log(math.tan(math.pi / 4.0 + lat / 2.0))
35 | )
36 |
37 | # Then, apply the raster geotransform to get pixel coordinates
38 | # The arbitrary 3857 geotransform that Picterra sets on non-georeferenced rasters
39 | geot = [0, 0.1, 0, 0, 0, -0.1]
40 | x = (x_proj - geot[0]) / geot[1]
41 | y = (y_proj - geot[3]) / geot[5]
42 | return x, y
43 |
44 |
45 | def _load_polygons(geojson):
46 | """
47 | Loads polygons from a geojson file; should work for both MultiPolygon and
48 | FeatureCollection of Polygons
49 | """
50 | polygons = []
51 | if geojson["type"] == "MultiPolygon":
52 | for polygon in geojson["coordinates"]:
53 | polygons.append(polygon)
54 | elif geojson["type"] == "Polygon":
55 | polygons = [geojson["coordinates"]]
56 | elif geojson["type"] == "FeatureCollection":
57 | for feature in geojson["features"]:
58 | geom = feature["geometry"]
59 | polygons.extend(_load_polygons(geom))
60 | return polygons
61 |
62 |
63 | def _polygon_to_xy(polygon):
64 | xy_polygon = []
65 | for ring in polygon:
66 | xy_polygon.append([_nongeo_latlng2xy(lat, lng) for lng, lat in ring])
67 | return xy_polygon
68 |
69 |
70 | def nongeo_result_to_pixel(result_filename):
71 | """
72 | This is a helper function to convert result obtained on non-georeferenced
73 | images in pixel.
74 | Note that this will NOT work if the image was georeferenced. So only use
75 | this function if you are uploading non-georeferenced image formats like
76 | PNG or JPEG
77 |
78 | This is currently in **beta** so let us know if you find any issues
79 |
80 | Args:
81 | result_filename (str): The file path to the GeoJSON file obtained by
82 | `APIClient.download_result_to_file`
83 | Returns:
84 | A list of polygons. Each polygon is a list of rings and
85 | each ring is a list of (x, y) tuples. For example:
86 |
87 | ::
88 |
89 | [
90 | # This is a square with a square hole
91 | [[(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)],
92 | [(0.4, 0.4), (0.5, 0.4), (0.5, 0.5), (0.4, 0.5), (0.4, 0.4)],
93 | # A triangle
94 | [[(0, 0), (1, 0), (1, 1), (0, 0)]]
95 | ]
96 | """
97 | with open(result_filename) as f:
98 | geojson = json.load(f)
99 | polygons = _load_polygons(geojson)
100 | polygons = [_polygon_to_xy(p) for p in polygons]
101 | return polygons
102 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 | # Picterra Python API Client
8 |
9 | 
10 | [](https://picterra-python.readthedocs.io/en/latest/?badge=latest)
11 | [](https://pypi.org/project/picterra/)
12 |
13 | Easily integrate state-of-the-art machine learning models in your app
14 |
15 | ```python
16 | from picterra import APIClient
17 |
18 | # Replace this with the id of one of your detectors
19 | detector_id = 'd552605b-6972-4a68-8d51-91e6cb531c24'
20 |
21 | # Set the PICTERRA_API_KEY environment variable to define your API key
22 | client = APIClient()
23 | print('Uploading raster...')
24 | raster_id = client.upload_raster('data/raster1.tif', name='a nice raster')
25 | print('Upload finished, starting detector...')
26 | result_id = client.run_detector(detector_id, raster_id)
27 | client.download_result_to_feature_collection(result_id, 'result.geojson')
28 | print('Detection finished, results are in result.geojson')
29 | ```
30 |
31 |
32 |
33 | ## Installation
34 |
35 | ```
36 | pip install picterra
37 | ```
38 |
39 | See the `examples` folder for examples.
40 |
41 | ## API Reference and User Guide available on [Read the Docs](https://picterra-python.readthedocs.io/)
42 |
43 | [](https://picterra-python.readthedocs.io/)
44 |
45 |
46 | ## Development
47 |
48 | ### Setup
49 | Make sure you have `Python` and `pip` in your OS and create a virtual environment in the root folder, eg
50 |
51 | ```bash
52 | python3 -m venv .venv
53 | source .venv/bin/activate
54 | ```
55 |
56 | Running
57 | ```bash
58 | pip install --editable '.[lint,test]'
59 | ```
60 | would allow to run test and linting locally, and also avoid re-installing the library every time you change the code.
61 |
62 | If you want to install from a given branch, simply do
63 |
64 | ```bash
65 | pip uninstall picterra
66 | pip install git+https://github.com/Picterra/picterra-python.git@
67 | ```
68 |
69 | ### Running tests
70 | In order to test locally, run:
71 | ```bash
72 | pytest
73 | ```
74 |
75 | ### Documentation
76 | Run
77 |
78 | ```bash
79 | cd docs
80 | pip install -r requirements.txt
81 | make html
82 | ```
83 |
84 | to update the HTML documentation under `_build/html`.
85 | Spawn an HTTP server in that folder to see how they would look like once deployed on Read the Docs.
86 |
87 | ### Checkling linting
88 | Run
89 | ```bash
90 | scripts/lint.sh
91 | ```
92 | ####
93 |
94 | #### Sphinx docs
95 | Run
96 | ```bash
97 | cd docs
98 | python -m pip install -r requirements.txt
99 | make html
100 | ```
101 | and verify no errors are output
102 |
103 | ## Release process
104 |
105 | 1. Preparatory work:
106 | - 1.1 Bump the version number in `setup.py`
107 | - 1.2 Update CHANGELOG.md (no need for all commits, just main changes with a tag)
108 | 2. Manually run the [publish to testpypi workflow](https://github.com/Picterra/picterra-python/actions/workflows/python-publish-testpypi.yml)
109 | 3. Check the publication result on [testpypi](https://test.pypi.org/project/picterra/)
110 | 4. Create a release through github
111 | - 4.1. Make sure you create a new tag vX.Y.Z through the release UI
112 | - 4.2. Click the "generate release notes" button in the UI to get release notes (you can even do it after the release is created by updating it)
113 | 5. The 'publish to pypi' workflow should automatically run
114 | - 5.1. Note this will *not* work if you create the release first as a draft - you
115 | have to create it immediately
116 | 6. Updated package should be available on [pypi](https://pypi.org/project/picterra/)
117 |
118 |
--------------------------------------------------------------------------------
/tests/test_base_client.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | import time
4 |
5 | import httpretty
6 | import pytest
7 | import requests
8 | import responses
9 |
10 | from picterra import base_client
11 | from picterra.forge_client import ForgeClient
12 | from tests.utils import _add_api_response, _client, detector_api_url
13 |
14 |
15 | def test_forge_client_base_url(monkeypatch):
16 | """
17 | Sanity-check that the client defaults to the correct base url
18 | """
19 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
20 | client = ForgeClient()
21 | assert client.base_url == "https://app.picterra.ch/public/api/v2/"
22 |
23 |
24 | # Cannot test Retry with responses, @see https://github.com/getsentry/responses/issues/135
25 | @httpretty.activate
26 | def test_backoff_success(monkeypatch):
27 | data = {"count": 0, "next": None, "previous": None, "results": []}
28 | httpretty.register_uri(
29 | httpretty.GET,
30 | detector_api_url("rasters/"),
31 | responses=[
32 | httpretty.Response(body=None, status=429),
33 | httpretty.Response(body=None, status=502),
34 | httpretty.Response(body=json.dumps(data), status=200),
35 | ],
36 | )
37 | client = _client(monkeypatch, max_retries=2, backoff_factor=0.1)
38 | client.list_rasters()
39 | assert len(httpretty.latest_requests()) == 3
40 |
41 |
42 | @httpretty.activate
43 | def test_backoff_failure(monkeypatch):
44 | httpretty.register_uri(
45 | httpretty.GET,
46 | detector_api_url("rasters/"),
47 | responses=[
48 | httpretty.Response(
49 | body=None,
50 | status=429,
51 | ),
52 | httpretty.Response(body=None, status=502),
53 | httpretty.Response(body=None, status=502),
54 | ],
55 | )
56 | client = _client(monkeypatch, max_retries=1)
57 | with pytest.raises(requests.exceptions.ConnectionError):
58 | client.list_rasters()
59 | assert len(httpretty.latest_requests()) == 2
60 |
61 |
62 | @httpretty.activate
63 | def test_timeout(monkeypatch):
64 | def request_callback(request, uri, response_headers):
65 | time.sleep(2)
66 | return [200, response_headers, json.dumps([])]
67 |
68 | httpretty.register_uri(httpretty.GET, detector_api_url("rasters/"), body=request_callback)
69 | timeout = 1
70 | client = _client(monkeypatch, timeout=timeout)
71 | with pytest.raises(requests.exceptions.ConnectionError) as e:
72 | client.list_rasters()
73 | full_error = str(e.value)
74 | assert "MaxRetryError" not in full_error
75 | assert "timeout" in full_error
76 | assert "read timeout=%d" % timeout in full_error
77 | assert len(httpretty.latest_requests()) == 1
78 |
79 |
80 | @responses.activate
81 | def test_headers_api_key(monkeypatch):
82 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"})
83 | client = _client(monkeypatch)
84 | client.create_detector()
85 | assert len(responses.calls) == 1
86 | assert responses.calls[0].request.headers["X-Api-Key"] == "1234"
87 |
88 |
89 | @responses.activate
90 | def test_headers_user_agent_version(monkeypatch):
91 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"})
92 | client = _client(monkeypatch)
93 | client.create_detector()
94 | assert len(responses.calls) == 1
95 | ua = responses.calls[0].request.headers["User-Agent"]
96 | regex = "^picterra-python/\d+\.\d+"
97 | assert re.compile(regex).match(ua) is not None
98 |
99 |
100 | @responses.activate
101 | def test_headers_user_agent_version__fallback(monkeypatch):
102 | _add_api_response(detector_api_url("detectors/"), responses.POST, json={"id": "foobar"},)
103 | monkeypatch.setattr(base_client, '_get_distr_name', lambda: 'foobar')
104 | client = _client(monkeypatch)
105 | client.create_detector()
106 | assert len(responses.calls) == 1
107 | ua = responses.calls[0].request.headers["User-Agent"]
108 | regex = "^picterra-python/no_version"
109 | assert re.compile(regex).match(ua) is not None
110 |
111 |
112 | @responses.activate
113 | def test_results_page():
114 | responses.add(
115 | method=responses.GET,
116 | url="http://example.com/page/1",
117 | json={
118 | "count": 2,
119 | "next": "http://example.com/page/2",
120 | "previous": None,
121 | "results": ["one", "two"],
122 | },
123 | status=200,
124 | )
125 | responses.add(
126 | method=responses.GET,
127 | url="http://example.com/page/2",
128 | json={
129 | "count": 1, "next": None, "previous": "http://example.com/page/1",
130 | "results": ["three"],
131 | },
132 | status=200,
133 | )
134 | page1 = base_client.ResultsPage("http://example.com/page/1", requests.get)
135 | assert isinstance(page1, base_client.ResultsPage)
136 | assert len(page1) == 2 and page1.previous() is None
137 | assert page1[0] == "one" and page1[1] == "two"
138 | assert list(page1)[0] == "one" and list(page1)[1] == "two"
139 | assert str(page1) == "2 results from http://example.com/page/1"
140 | page2 = page1.next()
141 | assert str(page2) == "1 results from http://example.com/page/2"
142 | assert isinstance(page2, base_client.ResultsPage)
143 | assert len(page2) == 1 and page2[0] == "three"
144 | assert page2.next() is None
145 | assert list(page2.previous())[0] == "one"
146 |
--------------------------------------------------------------------------------
/tests/test_nongeo.py:
--------------------------------------------------------------------------------
1 | import tempfile
2 |
3 | import pytest
4 |
5 | from picterra import nongeo_result_to_pixel
6 | from picterra.nongeo import _load_polygons, _nongeo_latlng2xy
7 |
8 | # The way to get the lat/lng is:
9 | # - Upload a non-georeferenced image to the platform
10 | # - Download the normalized.tif
11 | # - Open normalized.tif in QGIS, get the lat/lng coordinates of the 4 corners of the image
12 |
13 |
14 | # In this case, the image is 1520 x 1086
15 | @pytest.mark.parametrize(
16 | "latlng,xy",
17 | [
18 | # bottom-right corner
19 | ((-0.00097539, 0.00136530), (1520, 1086)),
20 | # bottom-left corner
21 | ((-0.000975470, 0.000000096), (0, 1086)),
22 | # top-left corner
23 | ((-0.000000034, 0.000000034), (0, 0)),
24 | # top-right corner
25 | ((0.000000129, 0.001365320), (1520, 0)),
26 | ],
27 | )
28 | def test_nongeo_latlng2xy(latlng, xy):
29 | x, y = _nongeo_latlng2xy(lat_deg=latlng[0], lng_deg=latlng[1])
30 | assert int(round(x)) == xy[0] and int(round(y)) == xy[1]
31 |
32 |
33 | def test_nongeo_result_to_pixel():
34 | with tempfile.NamedTemporaryFile(mode="wt") as f:
35 | # This is the Multipolygon corresponding to the corners of a
36 | # 1520x1086 non-georeferenced image
37 | f.write(
38 | """
39 | {
40 | "type": "MultiPolygon",
41 | "coordinates":[
42 | [
43 | [
44 | [0.000000096, -0.000975470],
45 | [0.00136530, -0.00097539],
46 | [0.001365320, 0.000000129],
47 | [0.000000034, -0.000000034],
48 | [0.000000096, -0.000975470]
49 | ]
50 | ]
51 | ]
52 | }
53 | """
54 | )
55 | f.flush()
56 | polygons = nongeo_result_to_pixel(f.name)
57 | assert tuple(map(round, polygons[0][0][0])) == (0, 1086)
58 | assert tuple(map(round, polygons[0][0][1])) == (1520, 1086)
59 | assert tuple(map(round, polygons[0][0][2])) == (1520, 0)
60 | assert tuple(map(round, polygons[0][0][3])) == (0, 0)
61 | assert tuple(map(round, polygons[0][0][4])) == (0, 1086)
62 |
63 |
64 | def test_load_polygons_multipoly():
65 | geojson = {
66 | "type": "MultiPolygon",
67 | "coordinates": [
68 | [
69 | [
70 | [0.000000096, -0.000975470],
71 | [0.00136530, -0.00097539],
72 | [0.001365320, 0.000000129],
73 | [0.000000034, -0.000000034],
74 | [0.000000096, -0.000975470],
75 | ]
76 | ]
77 | ],
78 | }
79 | polygons = _load_polygons(geojson)
80 | assert len(polygons) == 1
81 | assert len(polygons[0][0]) == 5
82 | assert polygons[0][0][2][1] == 0.000000129
83 |
84 |
85 | def test_load_polygons_polygon():
86 | geojson = {
87 | "type": "Polygon",
88 | "coordinates": [
89 | [
90 | [0.000000096, -0.000975470],
91 | [0.00136530, -0.00097539],
92 | [0.001365320, 0.000000129],
93 | [0.000000034, -0.000000034],
94 | [0.000000096, -0.000975470],
95 | ]
96 | ],
97 | }
98 | polygons = _load_polygons(geojson)
99 | assert len(polygons) == 1
100 | assert len(polygons[0][0]) == 5
101 | assert polygons[0][0][2][1] == 0.000000129
102 |
103 |
104 | def test_load_polygons_fc():
105 | geojson = {
106 | "type": "FeatureCollection",
107 | "features": [
108 | {
109 | "type": "Feature",
110 | "properties": {},
111 | "geometry": {
112 | "type": "Polygon",
113 | "coordinates": [
114 | [
115 | [0.000000096, -0.000975470],
116 | [0.00136530, -0.00097539],
117 | [0.001365320, 0.000000129],
118 | [0.000000034, -0.000000034],
119 | [0.000000096, -0.000975470],
120 | ]
121 | ],
122 | },
123 | },
124 | {
125 | "type": "Feature",
126 | "properties": {},
127 | "geometry": {
128 | "type": "MultiPolygon",
129 | "coordinates": [
130 | [
131 | [
132 | [0.000000096, -0.000975470],
133 | [0.00136530, -0.00097539],
134 | [0.001365320, 0.000000129],
135 | [0.000000034, -0.000000034],
136 | [0.000000096, -0.000975470],
137 | ]
138 | ],
139 | [
140 | [
141 | [0.100000096, -0.100975470],
142 | [0.10136530, -0.10097539],
143 | [0.101365320, 0.100000129],
144 | [0.100000034, -0.100000034],
145 | [0.100000096, -0.100975470],
146 | ]
147 | ],
148 | ],
149 | },
150 | },
151 | ],
152 | }
153 | polygons = _load_polygons(geojson)
154 | assert len(polygons) == 3
155 | assert len(polygons[0][0]) == 5
156 | assert polygons[0][0][2][1] == 0.000000129
157 | assert polygons[2][0][2][1] == 0.100000129
158 |
--------------------------------------------------------------------------------
/src/picterra/base_client.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import json
4 | import logging
5 | import os
6 | import sys
7 | import time
8 | from collections.abc import Callable, Iterator
9 |
10 | if sys.version_info >= (3, 8):
11 | from typing import Literal, TypedDict
12 | else:
13 | from typing_extensions import Literal, TypedDict
14 |
15 | from typing import Any, Generic, TypeVar
16 | from urllib.parse import urlencode, urljoin
17 |
18 | import requests
19 | from requests.adapters import HTTPAdapter
20 | from requests.auth import AuthBase
21 | from urllib3.util.retry import Retry
22 |
23 | logger = logging.getLogger()
24 |
25 | CHUNK_SIZE_BYTES = 8192 # 8 KiB
26 |
27 |
28 | # allow injecting an non-existing package name to test the fallback behavior
29 | # of _get_ua in tests (see test_headers_user_agent_version__fallback)
30 | def _get_distr_name():
31 | return "picterra"
32 |
33 |
34 | def _get_ua():
35 | import platform
36 |
37 | pkg = _get_distr_name()
38 | if sys.version_info >= (3, 8):
39 | from importlib.metadata import PackageNotFoundError, version
40 |
41 | try:
42 | ver = version(pkg)
43 | except PackageNotFoundError:
44 | ver = "no_version"
45 | else:
46 | import pkg_resources # type: ignore[import]
47 |
48 | try:
49 | ver = pkg_resources.require(pkg)[0].version
50 | except pkg_resources.DistributionNotFound:
51 | ver = "no_version"
52 | o_s = " ".join([os.name, platform.system(), platform.release()])
53 | v_info = sys.version_info
54 | py = "Python " + str(v_info.major) + "." + str(v_info.minor)
55 | return "picterra-python/%s (%s %s)" % (
56 | ver,
57 | py,
58 | o_s,
59 | )
60 |
61 |
62 | class APIError(Exception):
63 | """Generic API error exception"""
64 |
65 | pass
66 |
67 |
68 | class _RequestsSession(requests.Session):
69 | """
70 | Override requests session to to implement a global session timeout
71 | """
72 |
73 | def __init__(self, *args, **kwargs):
74 | self.timeout = kwargs.pop("timeout")
75 | super().__init__(*args, **kwargs)
76 | self.headers.update(
77 | {"User-Agent": "%s - %s" % (_get_ua(), self.headers["User-Agent"])}
78 | )
79 |
80 | def request(self, *args, **kwargs):
81 | kwargs.setdefault("timeout", self.timeout)
82 | return super().request(*args, **kwargs)
83 |
84 |
85 | def _download_to_file(url: str, filename: str):
86 | # Given we do not use self.sess the timeout is disabled (requests default), and this
87 | # is good as file download can take a long time
88 | with requests.get(url, stream=True) as r:
89 | r.raise_for_status()
90 | with open(filename, "wb+") as f:
91 | logger.debug("Downloading to file %s.." % filename)
92 | for chunk in r.iter_content(chunk_size=CHUNK_SIZE_BYTES):
93 | if chunk: # filter out keep-alive new chunks
94 | f.write(chunk)
95 |
96 |
97 | def _upload_file_to_blobstore(upload_url: str, filename: str):
98 | if not (os.path.exists(filename) and os.path.isfile(filename)):
99 | raise ValueError("Invalid file: " + filename)
100 | with open(
101 | filename, "rb"
102 | ) as f: # binary recommended by requests stream upload (see link below)
103 | logger.debug("Opening and streaming to upload file %s" % filename)
104 | # Given we do not use self.sess the timeout is disabled (requests default), and this
105 | # is good as file upload can take a long time. Also we use requests streaming upload
106 | # (https://requests.readthedocs.io/en/latest/user/advanced/#streaming-uploads) to avoid
107 | # reading the (potentially large) layer GeoJSON in memory
108 | resp = requests.put(upload_url, data=f)
109 | if not resp.ok:
110 | logger.error("Error when uploading to blobstore %s" % upload_url)
111 | raise APIError(resp.text)
112 |
113 |
114 | def multipolygon_to_polygon_feature_collection(mp):
115 | return {
116 | "type": "FeatureCollection",
117 | "features": [
118 | {
119 | "type": "Feature",
120 | "properties": {},
121 | "geometry": {"type": "Polygon", "coordinates": p},
122 | }
123 | for p in mp["coordinates"]
124 | ],
125 | }
126 |
127 |
128 | def _check_resp_is_ok(resp: requests.Response, msg: str) -> None:
129 | if not resp.ok:
130 | raise APIError(
131 | "%s (url %s, status %d): %s" % (msg, resp.url, resp.status_code, resp.text)
132 | )
133 |
134 |
135 | T = TypeVar("T")
136 |
137 |
138 | class ResultsPage(Generic[T]):
139 | """
140 | Interface for a paginated response from the API
141 |
142 | Typically the endpoint returning list of objects return them splitted
143 | in pages (page 1, page 2, etc..) of a fixed dimension (eg 20). Thus
144 | each `list_XX` function returns a ResultsPage (by default the first one);
145 | once you have a ResultsPage for a given list of objects, you can:
146 |
147 | * check its length with ``len()``;
148 | - example: ``len(page)``
149 | * access a single element with the index operator ``[]``;
150 | - example: ``page[5]``
151 | * turn it into a list of dictionaries with ``list()``;
152 | - example: ``list(page)``
153 | * get the next page with ``.next()``; this could return None if the list is finished;
154 | - example: ``page.next()``
155 |
156 | You can also get a specific page passing the page number to the ``list_XX`` function
157 | """
158 |
159 | _fetch: Callable[[str], requests.Response]
160 | _next_url: str | None
161 | _prev_url: str | None
162 | _results: list[T]
163 | _url: str
164 |
165 | def __init__(self, url: str, fetch: Callable[[str], requests.Response]):
166 | resp = fetch(url)
167 | _check_resp_is_ok(resp, "Failed to get page")
168 | r: dict[str, Any] = resp.json()
169 | next_url: str | None = r["next"]
170 | prev_url: str | None = r["previous"]
171 | results: list[T] = r["results"]
172 |
173 | self._fetch = fetch
174 | self._next_url = next_url
175 | self._prev_url = prev_url
176 | self._results = results
177 | self._url = url
178 |
179 | def next(self):
180 | return ResultsPage(self._next_url, self._fetch) if self._next_url else None
181 |
182 | def previous(self):
183 | return ResultsPage(self._prev_url, self._fetch) if self._prev_url else None
184 |
185 | def __len__(self) -> int:
186 | return len(self._results)
187 |
188 | def __getitem__(self, key: int) -> T:
189 | return self._results[key]
190 |
191 | def __iter__(self) -> Iterator[T]:
192 | return iter([self._results[i] for i in range(len(self._results))])
193 |
194 | def __str__(self) -> str:
195 | return f"{len(self._results)} results from {self._url}"
196 |
197 |
198 | class Feature(TypedDict):
199 | type: Literal["Feature"]
200 | properties: dict[str, Any]
201 | geometry: dict[str, Any]
202 |
203 |
204 | class FeatureCollection(TypedDict):
205 | type: Literal["FeatureCollection"]
206 | features: list[Feature]
207 |
208 |
209 | class ApiKeyAuth(AuthBase):
210 | api_key: str
211 |
212 | def __init__(self):
213 | api_key = os.environ.get("PICTERRA_API_KEY", None)
214 | if api_key is None:
215 | raise APIError("PICTERRA_API_KEY environment variable is not defined")
216 | self.api_key = api_key
217 |
218 | def __call__(self, r):
219 | r.headers["X-Api-Key"] = self.api_key
220 | return r
221 |
222 |
223 | class BaseAPIClient:
224 | """
225 | Base class for Picterra API clients.
226 |
227 | This is subclassed for the different products we have.
228 | """
229 |
230 | def __init__(
231 | self,
232 | api_url: str,
233 | timeout: int = 30,
234 | max_retries: int = 3,
235 | backoff_factor: int = 10,
236 | ):
237 | """
238 | Args:
239 | api_url: the api's base url. This is different based on the Picterra product used
240 | and is typically defined by implementations of this client
241 | timeout: number of seconds before the request times out
242 | max_retries: max attempts when encountering gateway issues or throttles; see
243 | retry_strategy comment below
244 | backoff_factor: factor used nin the backoff algorithm; see retry_strategy comment below
245 | """
246 | base_url = os.environ.get("PICTERRA_BASE_URL", "https://app.picterra.ch/")
247 | logger.info(
248 | "Using base_url=%s, api_url=%s; %d max retries, %d backoff and %s timeout.",
249 | base_url,
250 | api_url,
251 | max_retries,
252 | backoff_factor,
253 | timeout,
254 | )
255 | self.base_url = urljoin(base_url, api_url)
256 | # Create the session with a default timeout (30 sec) and auth, that we can then
257 | # override on a per-endpoint basis (will be disabled for file uploads and downloads)
258 | self.sess = _RequestsSession(timeout=timeout)
259 | self.sess.auth = ApiKeyAuth() # Authentication
260 | # Retry: we set the HTTP codes for our throttle (429) plus possible gateway problems (50*),
261 | # and for polling methods (GET), as non-idempotent ones should be addressed via idempotency
262 | # key mechanism; given the algorithm is { * (2 **}, and we
263 | # default to 30s for polling and max 30 req/min, the default 5-10-20 sequence should
264 | # provide enough room for recovery
265 | retry_strategy = Retry(
266 | total=max_retries,
267 | status_forcelist=[429, 502, 503, 504],
268 | backoff_factor=backoff_factor,
269 | allowed_methods=["GET"],
270 | )
271 | adapter = HTTPAdapter(max_retries=retry_strategy)
272 | self.sess.mount("https://", adapter)
273 | self.sess.mount("http://", adapter)
274 |
275 | def _full_url(self, path: str, params: dict[str, Any] | None = None):
276 | url = urljoin(self.base_url, path)
277 | if not params:
278 | return url
279 | else:
280 | qstr = urlencode(params)
281 | return "%s?%s" % (url, qstr)
282 |
283 | def _wait_until_operation_completes(
284 | self, operation_response: dict[str, Any]
285 | ) -> dict[str, Any]:
286 | """Polls an operation an returns its data"""
287 | operation_id = operation_response["operation_id"]
288 | poll_interval = operation_response["poll_interval"]
289 | # Just sleep for a short while the first time
290 | time.sleep(poll_interval * 0.1)
291 | while True:
292 | logger.info("Polling operation id %s" % operation_id)
293 | resp = self.sess.get(
294 | self._full_url("operations/%s/" % operation_id),
295 | )
296 | if not resp.ok:
297 | raise APIError(resp.text)
298 | status = resp.json()["status"]
299 | logger.info("status=%s" % status)
300 | if status == "success":
301 | break
302 | if status == "failed":
303 | errors = resp.json()["errors"]
304 | raise APIError(
305 | "Operation %s failed: %s" % (operation_id, json.dumps(errors))
306 | )
307 | time.sleep(poll_interval)
308 | return resp.json()
309 |
310 | def _return_results_page(
311 | self, resource_endpoint: str, params: dict[str, Any] | None = None
312 | ) -> ResultsPage:
313 | if params is None:
314 | params = {}
315 | if "page_number" not in params:
316 | params["page_number"] = 1
317 |
318 | url = self._full_url("%s/" % resource_endpoint, params=params)
319 | return ResultsPage(url, self.sess.get)
320 |
321 | def get_operation_results(self, operation_id: str) -> dict[str, Any]:
322 | """
323 | Return the 'results' dict of an operation
324 |
325 | This a **beta** function, subject to change.
326 |
327 | Args:
328 | operation_id: The id of the operation
329 | """
330 | resp = self.sess.get(
331 | self._full_url("operations/%s/" % operation_id),
332 | )
333 | return resp.json()["results"]
334 |
--------------------------------------------------------------------------------
/tests/test_tracer_client.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import json
3 | import os
4 | import tempfile
5 |
6 | import responses
7 |
8 | from picterra import TracerClient
9 | from picterra.base_client import multipolygon_to_polygon_feature_collection
10 | from tests.utils import (
11 | OP_RESP,
12 | OPERATION_ID,
13 | _add_api_response,
14 | _client,
15 | add_mock_paginated_list_response,
16 | plots_analysis_api_url,
17 | )
18 |
19 |
20 | def make_geojson_polygon(base=1):
21 | return {"type": "Polygon", "coordinates": [[[0, 0], [base, 0], [base, base], [0, base], [0, 0]]]}
22 |
23 |
24 | def make_geojson_multipolygon(npolygons=1):
25 | coords = []
26 | for i in range(npolygons):
27 | coords.append(make_geojson_polygon(i + 1)["coordinates"])
28 | return {"type": "MultiPolygon", "coordinates": coords}
29 |
30 | def test_plots_analysis_platform_client_base_url(monkeypatch):
31 | """
32 | Sanity-check that the client defaults to the correct base url
33 | """
34 | monkeypatch.setenv("PICTERRA_API_KEY", "1234")
35 | client = TracerClient()
36 | assert client.base_url == "https://app.picterra.ch/public/api/plots_analysis/v1/"
37 |
38 |
39 | @responses.activate
40 | def test_create_plots_group(monkeypatch):
41 | _add_api_response(
42 | plots_analysis_api_url("upload/file/"),
43 | responses.POST,
44 | {
45 | "upload_id": "an-upload",
46 | "upload_url": "https://upload.example.com/",
47 | },
48 | )
49 | responses.put("https://upload.example.com/")
50 | _add_api_response(
51 | plots_analysis_api_url("plots_groups/"),
52 | responses.POST,
53 | OP_RESP,
54 | match=responses.matchers.json_params_matcher({
55 | "name": "name of my plot group",
56 | "methodology_id": "eudr-cocoa-id",
57 | "custom_columns_values": {"foo": "bar"}
58 | }),
59 | )
60 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
61 | "status": "success",
62 | "results": {"plots_group_id": "a-plots-group"}
63 | })
64 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
65 | with tempfile.NamedTemporaryFile() as tmp:
66 | _add_api_response(plots_analysis_api_url(
67 | "plots_groups/a-plots-group/upload/commit/"),
68 | responses.POST,
69 | OP_RESP,
70 | match=responses.matchers.json_params_matcher({
71 | "files": [
72 | {
73 | "upload_id": "an-upload",
74 | "filename": os.path.basename(tmp.name),
75 | }
76 | ],
77 | "overwrite": False,
78 | }),
79 | )
80 | with open(tmp.name, "w") as f:
81 | json.dump({"type": "FeatureCollection", "features": []}, f)
82 | assert client.create_plots_group(
83 | "name of my plot group",
84 | "eudr-cocoa-id",
85 | [tmp.name],
86 | {"foo": "bar"},
87 | ) == "a-plots-group"
88 |
89 |
90 | @responses.activate
91 | def test_update_plots_group_plots(monkeypatch):
92 | _add_api_response(
93 | plots_analysis_api_url("upload/file/"),
94 | responses.POST,
95 | {
96 | "upload_id": "an-upload",
97 | "upload_url": "https://upload.example.com/",
98 | },
99 | )
100 | responses.put("https://upload.example.com/")
101 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
102 | "status": "success",
103 | "results": {
104 | "plots_group_id": "group-id",
105 | }
106 | })
107 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
108 | with tempfile.NamedTemporaryFile() as tmp:
109 | _add_api_response(plots_analysis_api_url(
110 | "plots_groups/group-id/upload/commit/"),
111 | responses.POST,
112 | OP_RESP,
113 | match=responses.matchers.json_params_matcher({
114 | "files": [
115 | {
116 | "filename": os.path.basename(tmp.name),
117 | "upload_id": "an-upload",
118 | }
119 | ],
120 | "overwrite": False,
121 | }),
122 | )
123 | with open(tmp.name, "w") as f:
124 | json.dump({"type": "FeatureCollection", "features": []}, f)
125 | client.update_plots_group_plots("group-id", [tmp.name])
126 |
127 |
128 | @responses.activate
129 | def test_analyse_plots(monkeypatch):
130 | _add_api_response(
131 | plots_analysis_api_url("upload/file/"),
132 | responses.POST,
133 | {
134 | "upload_id": "an-upload-id",
135 | "upload_url": "https://upload.example.com/",
136 | },
137 | )
138 | responses.put("https://upload.example.com/", match=[responses.matchers.json_params_matcher({
139 | "plot_ids": ["uno", "dos"],
140 | })])
141 | _add_api_response(plots_analysis_api_url(
142 | "plots_groups/a-group-id/analysis/"),
143 | responses.POST,
144 | OP_RESP,
145 | match=responses.matchers.json_params_matcher({
146 | "analysis_name": "foobar",
147 | "upload_id": "an-upload-id",
148 | "date_from": "2023-01-01",
149 | "date_to": "2025-01-01",
150 | }),
151 | )
152 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
153 | "status": "success",
154 | "results": {"analysis_id": "an-analysis-id"}
155 | })
156 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
157 | with tempfile.NamedTemporaryFile() as tmp:
158 | with open(tmp.name, "w") as f:
159 | json.dump({"type": "FeatureCollection", "features": []}, f)
160 | assert client.analyze_plots(
161 | "a-group-id",
162 | "foobar",
163 | ["uno", "dos"],
164 | datetime.date.fromisoformat("2023-01-01"),
165 | datetime.date.fromisoformat("2025-01-01")
166 | ) == "an-analysis-id"
167 |
168 |
169 | @responses.activate
170 | def test_analyse_precheck(monkeypatch):
171 | _add_api_response(
172 | plots_analysis_api_url("upload/file/"),
173 | responses.POST,
174 | {
175 | "upload_id": "an-upload-id",
176 | "upload_url": "https://upload.example.com/",
177 | },
178 | )
179 | responses.put("https://upload.example.com/", match=[responses.matchers.json_params_matcher({
180 | "plot_ids": ["uno", "dos"],
181 | })])
182 | _add_api_response(plots_analysis_api_url(
183 | "plots_groups/a-group-id/analysis/precheck/"),
184 | responses.POST,
185 | OP_RESP,
186 | match=responses.matchers.json_params_matcher({
187 | "analysis_name": "foobar",
188 | "upload_id": "an-upload-id",
189 | "date_from": "2023-01-01",
190 | "date_to": "2025-01-01",
191 | }),
192 | )
193 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
194 | "status": "success",
195 | "results": {"precheck_data_url": "https://precheck_data_url.example.com/"}
196 | })
197 | precheck = {
198 | "status": "failed",
199 | "errors": {"critical": [], "high": []},
200 | "critical_count": 1,
201 | "high_count": 1,
202 | }
203 | responses.get("https://precheck_data_url.example.com/", json=precheck)
204 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
205 | with tempfile.NamedTemporaryFile() as tmp:
206 | with open(tmp.name, "w") as f:
207 | json.dump({"type": "FeatureCollection", "features": []}, f)
208 | assert client.analyze_plots_precheck(
209 | "a-group-id",
210 | "foobar",
211 | ["uno", "dos"],
212 | datetime.date.fromisoformat("2023-01-01"),
213 | datetime.date.fromisoformat("2025-01-01")
214 | ) == precheck
215 |
216 |
217 | @responses.activate
218 | def test_list_methodologies(monkeypatch):
219 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
220 | url = plots_analysis_api_url("methodologies/")
221 | # Full list
222 | add_mock_paginated_list_response(url)
223 | methodologies = client.list_methodologies()
224 | assert len(methodologies) == 2
225 | assert methodologies[0]["name"] == "a_1"
226 | assert methodologies[1]["name"] == "a_2"
227 | # Search list
228 | add_mock_paginated_list_response(url, 2, "m_2", "spam")
229 | methodologies = client.list_methodologies(search="m_2", page_number=2)
230 | assert methodologies[0]["name"] == "spam_1"
231 |
232 |
233 | @responses.activate
234 | def test_list_plots_groups(monkeypatch):
235 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
236 | url = plots_analysis_api_url("plots_groups/")
237 | # Full list
238 | add_mock_paginated_list_response(url)
239 | plots_groups = client.list_plots_groups()
240 | assert len(plots_groups) == 2
241 | assert plots_groups[0]["name"] == "a_1"
242 | assert plots_groups[1]["name"] == "a_2"
243 | # Search list
244 | add_mock_paginated_list_response(url, 2, "m_2", "spam")
245 | plots_groups = client.list_plots_groups(search="m_2", page_number=2)
246 | assert plots_groups[0]["name"] == "spam_1"
247 | # Filter list
248 | add_mock_paginated_list_response(url, qs={"methodology": "cocoa"})
249 | plots_groups = client.list_plots_groups(methodology="cocoa")
250 | assert plots_groups[0]["name"] == "a_1"
251 |
252 |
253 | @responses.activate
254 | def test_list_plots_analyses(monkeypatch):
255 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
256 | url = plots_analysis_api_url("plots_groups/spam/analysis/")
257 | # Full list
258 | add_mock_paginated_list_response(url)
259 | plots_analyses = client.list_plots_analyses("spam")
260 | assert len(plots_analyses) == 2
261 | assert plots_analyses[0]["name"] == "a_1"
262 | assert plots_analyses[1]["name"] == "a_2"
263 | # Search list
264 | add_mock_paginated_list_response(url, 2, "m_2", "spam")
265 | plots_analyses = client.list_plots_analyses("spam", search="m_2", page_number=2)
266 | assert plots_analyses[0]["name"] == "spam_1"
267 | # Test the include_archived flag
268 | add_mock_paginated_list_response(url, include_archived=True, name_prefix="boo")
269 | plots_analyses = client.list_plots_analyses("spam", include_archived=True)
270 | assert len(plots_analyses) == 2
271 | assert plots_analyses[0]["name"] == "boo_1"
272 |
273 |
274 | @responses.activate
275 | def test_download_plots_group(monkeypatch):
276 | _add_api_response(plots_analysis_api_url(
277 | "plots_groups/a-group-id/export/"),
278 | responses.POST,
279 | OP_RESP,
280 | match=responses.matchers.json_params_matcher({"format": "geojson"}),
281 | )
282 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
283 | "status": "success",
284 | "results": {"download_url": "https://a-group-id.example.com/geojson"}
285 | })
286 | polygons_fc = multipolygon_to_polygon_feature_collection(make_geojson_multipolygon())
287 | responses.add(
288 | responses.GET,
289 | "https://a-group-id.example.com/geojson",
290 | body=json.dumps(polygons_fc),
291 | )
292 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
293 | with tempfile.NamedTemporaryFile() as f:
294 | client.download_plots_group_to_file("a-group-id", "geojson", f.name)
295 | assert json.load(f) == polygons_fc
296 | assert len(responses.calls) == 3
297 |
298 |
299 | @responses.activate
300 | def test_list_plots_analysis_reports(monkeypatch):
301 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
302 | url = plots_analysis_api_url("plots_analyses/my-analysis-id/reports/")
303 | # Full list
304 | add_mock_paginated_list_response(url, num_results=3)
305 | reports = client.list_plots_analysis_reports("my-analysis-id")
306 | assert len(reports) == 3
307 | assert reports[0]["name"] == "a_1" and reports[-1]["name"] == "a_3"
308 | # test search and filter
309 | add_mock_paginated_list_response(url, qs={"report_type": "type_1", "search": "spam"})
310 | reports = client.list_plots_analysis_reports("my-analysis-id", search="spam", report_type="type_1")
311 | assert len(reports) == 2
312 |
313 |
314 | @responses.activate
315 | def test_list_plots_analysis_report_types(monkeypatch):
316 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
317 | url = plots_analysis_api_url("plots_analyses/my-analysis-id/reports/types/")
318 | _add_api_response(
319 | url,
320 | json=[
321 | {"report_type": "type_1", "name": "a_1"},
322 | {"report_type": "type_2", "name": "a_2"},
323 | {"report_type": "type_1", "name": "a_3"},
324 | {"report_type": "type_3", "name": "a_4"},
325 | ],
326 | )
327 | reports = client.list_plots_analysis_report_types("my-analysis-id")
328 | assert len(reports) == 4
329 | assert reports[0]["report_type"] == "type_1" and reports[-1]["name"] == "a_4"
330 | _add_api_response(
331 | url,
332 | json=[
333 | {"report_type": "type_1", "name": "a_1"},
334 | {"report_type": "type_2", "name": "a_2"},
335 | ],
336 | match=responses.matchers.query_param_matcher({"search": "spam"})
337 | )
338 | reports = client.list_plots_analysis_report_types("my-analysis-id", search="spam")
339 | assert len(reports) == 2
340 |
341 |
342 | @responses.activate
343 | def test_create_plots_analysis_report_precheck(monkeypatch):
344 | _add_api_response(
345 | plots_analysis_api_url("upload/file/"),
346 | responses.POST,
347 | {
348 | "upload_id": "an-upload-id",
349 | "upload_url": "https://upload.example.com/",
350 | },
351 | )
352 | responses.put("https://upload.example.com/", match=[responses.matchers.json_params_matcher({
353 | "plot_ids": ["uno", "dos"],
354 | })])
355 | _add_api_response(plots_analysis_api_url(
356 | "plots_analyses/an-analysis-id/reports/precheck/"),
357 | responses.POST,
358 | OP_RESP,
359 | match=responses.matchers.json_params_matcher({
360 | "name": "foobar",
361 | "upload_id": "an-upload-id",
362 | "report_type": "a-report-type",
363 | "metadata": {"foo": "bar"},
364 | }),
365 | )
366 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
367 | "status": "success",
368 | "results": {"precheck_data_url": "https://precheck_data_url.example.com/"}
369 | })
370 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
371 | with tempfile.NamedTemporaryFile() as tmp:
372 | with open(tmp.name, "w") as f:
373 | json.dump({"type": "FeatureCollection", "features": []}, f)
374 | assert client.create_plots_analysis_report_precheck(
375 | "an-analysis-id",
376 | "foobar",
377 | ["uno", "dos"],
378 | "a-report-type",
379 | metadata={"foo": "bar"}
380 | ) == {"status": "passed"}
381 |
382 |
383 | @responses.activate
384 | def test_create_plots_analysis_report(monkeypatch):
385 | _add_api_response(
386 | plots_analysis_api_url("upload/file/"),
387 | responses.POST,
388 | {
389 | "upload_id": "an-upload-id",
390 | "upload_url": "https://upload.example.com/",
391 | },
392 | )
393 | responses.put("https://upload.example.com/", match=[responses.matchers.json_params_matcher({
394 | "plot_ids": ["uno", "dos"],
395 | })])
396 | _add_api_response(plots_analysis_api_url(
397 | "plots_analyses/an-analysis-id/reports/"),
398 | responses.POST,
399 | OP_RESP,
400 | match=responses.matchers.json_params_matcher({
401 | "name": "foobar",
402 | "upload_id": "an-upload-id",
403 | "report_type": "a-report-type",
404 | "metadata": {"foo": "bar"},
405 | }),
406 | )
407 | _add_api_response(plots_analysis_api_url(f"operations/{OPERATION_ID}/"), responses.GET, {
408 | "status": "success",
409 | "results": {"plots_analysis_report_id": "a-report-id"}
410 | })
411 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
412 | with tempfile.NamedTemporaryFile() as tmp:
413 | with open(tmp.name, "w") as f:
414 | json.dump({"type": "FeatureCollection", "features": []}, f)
415 | assert client.create_plots_analysis_report(
416 | "an-analysis-id",
417 | "foobar",
418 | ["uno", "dos"],
419 | "a-report-type",
420 | metadata={"foo": "bar"}
421 | ) == "a-report-id"
422 |
423 |
424 | @responses.activate
425 | def test_get_plots_group(monkeypatch):
426 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
427 | _add_api_response(
428 | plots_analysis_api_url("plots_groups/a-plots-group/"),
429 | responses.GET,
430 | {
431 | "id": "a-plots-group",
432 | "name": "My Plots Group",
433 | "created_at": "2025-09-29T10:04:08.143098Z",
434 | "methodology": "Coffee - EUDR",
435 | }
436 | )
437 | plots_group = client.get_plots_group("a-plots-group")
438 | assert plots_group["id"] == "a-plots-group"
439 | assert plots_group["name"] == "My Plots Group"
440 |
441 |
442 | @responses.activate
443 | def test_get_plots_analysis(monkeypatch):
444 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
445 | _add_api_response(
446 | plots_analysis_api_url("plots_analyses/an-analysis-id/"),
447 | responses.GET,
448 | {
449 | "id": "an-analysis-id",
450 | "name": "My Analysis",
451 | "date_from": "2023-06-06",
452 | "date_to": "2025-02-08",
453 | "url": "https://app.picterra.ch/plots_analysis/plots_groups/136b812e-8d9c-418f-b317-8be5c7c6281d/analysis/cda443d7-5baf-483d-bb5e-fa1190180b0d/" # noqa[E501]
454 | },
455 | )
456 | plots_analysis = client.get_plots_analysis("an-analysis-id")
457 | assert plots_analysis["id"] == "an-analysis-id"
458 | assert plots_analysis["name"] == "My Analysis"
459 |
460 |
461 | @responses.activate
462 | def test_get_plots_analysis_report(monkeypatch):
463 | _add_api_response(
464 | plots_analysis_api_url(
465 | "plots_analysis_reports/a-report-id/"
466 | ),
467 | responses.GET,
468 | {
469 | "id": "a-report-id",
470 | "name": "my report",
471 | "created_at": "2025-09-29T10:04:08.143098Z",
472 | "report_type": "eudr_export",
473 | "artifacts": [
474 | {
475 | "name": "EUDR Report",
476 | "filename": "2025-09-29-nightly-eudr-export.pdf",
477 | "size_bytes": 71802,
478 | "description": "A PDF report to be used for EUDR",
479 | "content_type": "application/pdf",
480 | "download_url": "http://example.com/report.pdf",
481 | },
482 | {
483 | "name": "EUDR Traces NT",
484 | "filename": "2025-09-29-nightly-eudr-export.geojson",
485 | "size_bytes": 877,
486 | "description": "A GeoJSON file that can be submitted to the EU Deforestation Due Diligence Registry",
487 | "content_type": "application/geo+json",
488 | "download_url": "http://example.com/traces.geojson",
489 | },
490 | ],
491 | },
492 | )
493 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
494 | report = client.get_plots_analysis_report("a-report-id")
495 | assert report["id"] == "a-report-id"
496 | assert report["artifacts"][0]["name"] == "EUDR Report"
497 |
498 |
499 | @responses.activate
500 | def test_get_authorization_grants(monkeypatch):
501 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
502 | data = {
503 | "grants": [
504 | {"grantee": {"type": "user", "id": "1"}, "role": "viewer"},
505 | {"grantee": {"type": "organization", "id": "2"}, "role": "editor"},
506 | ]
507 | }
508 | _add_api_response(
509 | plots_analysis_api_url("authorization/grants/plots_group/a-plots-group-id/"),
510 | responses.GET,
511 | data,
512 | )
513 | grants = client.get_authorization_grants("plots_group", "a-plots-group-id")
514 | assert grants == data and len(responses.calls) == 1
515 |
516 |
517 | @responses.activate
518 | def test_set_authorization_grants(monkeypatch):
519 | client: TracerClient = _client(monkeypatch, platform="plots_analysis")
520 | new_grants = [
521 | {"grantee": {"type": "user", "id": "1"}, "role": "viewer"},
522 | {"grantee": {"type": "organization", "id": "2"}, "role": "editor"},
523 | ]
524 | _add_api_response(
525 | plots_analysis_api_url("authorization/grants/plots_group/a-plots-group-id/"),
526 | responses.POST,
527 | {"grants": new_grants},
528 | match=responses.matchers.json_params_matcher({"grants": new_grants}),
529 | )
530 | grants = client.set_authorization_grants("plots_group", "a-plots-group-id", {"grants": new_grants})
531 | assert grants == {"grants": new_grants} and len(responses.calls) == 1
532 |
--------------------------------------------------------------------------------
/src/picterra/tracer_client.py:
--------------------------------------------------------------------------------
1 | """
2 | Handles interfacing with the API documented at https://app.picterra.ch/public/apidocs/plots_analysis/v1/
3 |
4 | Note that Tracer is separate from Forge and so an API key which is valid for
5 | one may encounter permissions issues if used with the other
6 | """
7 |
8 | import datetime
9 | import json
10 | import os.path
11 | import sys
12 | import warnings
13 |
14 | if sys.version_info >= (3, 8):
15 | from typing import Any, Dict, List, Literal, Optional, Tuple
16 | else:
17 | from typing_extensions import Literal
18 | from typing import Any, Dict, List, Optional, Tuple
19 |
20 | import requests
21 |
22 | from picterra.base_client import (
23 | BaseAPIClient,
24 | ResultsPage,
25 | _check_resp_is_ok,
26 | _download_to_file,
27 | )
28 |
29 | GrantKind = Literal["plots_group", "plots_analysis", "methodology"]
30 |
31 |
32 | class TracerClient(BaseAPIClient):
33 | def __init__(self, **kwargs):
34 | super().__init__("public/api/plots_analysis/v1/", **kwargs)
35 |
36 | def _return_results_page(
37 | self, resource_endpoint: str, params: Optional[Dict[str, Any]] = None
38 | ) -> ResultsPage:
39 | if params is None:
40 | params = {}
41 | if "page_number" not in params:
42 | params["page_number"] = 1
43 |
44 | url = self._full_url("%s/" % resource_endpoint, params=params)
45 | return ResultsPage(url, self.sess.get)
46 |
47 | def _make_upload(self) -> Tuple[str, str]:
48 | resp = self.sess.post(self._full_url("upload/file/"))
49 | _check_resp_is_ok(resp, "Failure obtaining an upload")
50 | upload_id, upload_url = resp.json()["upload_id"], resp.json()["upload_url"]
51 | return upload_id, upload_url
52 |
53 | def _upload_plot_ids(self, plot_ids: List[str]) -> str:
54 | upload_id, upload_url = self._make_upload()
55 | resp = requests.put(upload_url, json={"plot_ids": plot_ids})
56 | _check_resp_is_ok(resp, "Failure uploading plots file")
57 | return upload_id
58 |
59 | def list_methodologies(
60 | self,
61 | search: Optional[str] = None,
62 | page_number: Optional[int] = None,
63 | ) -> ResultsPage:
64 | """
65 | List all the methodologies the user can access, see `ResultsPage`
66 | for the pagination access pattern.
67 |
68 | Args:
69 | search: The term used to filter methodologies by name
70 | page_number: Optional page (from 1) of the list we want to retrieve
71 |
72 | Returns:
73 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/plots-groups/operation/getMethodologiesList
74 |
75 | Example:
76 |
77 | ::
78 |
79 | {
80 | 'id': '42',
81 | 'name': 'Coffee - EUDR',
82 | },
83 | {
84 | 'id': '43',
85 | 'name': 'Cattle - EUDR'
86 | }
87 |
88 | """
89 | data: Dict[str, Any] = {}
90 | if search is not None:
91 | data["search"] = search.strip()
92 | if page_number is not None:
93 | data["page_number"] = int(page_number)
94 | return self._return_results_page("methodologies", data)
95 |
96 | def create_plots_group(
97 | self,
98 | plots_group_name: str,
99 | methodology_id: str,
100 | plots_geometries_filenames: List[str],
101 | columns: Optional[Dict[str, str]] = None,
102 | ) -> str:
103 | """
104 | Creates a new plots group.
105 |
106 | Args:
107 | plots_group_name: user-friendly name for the group
108 | methodology_id: id of the methodology to use, retrieved via list_methodologies
109 | plots_geometries_filenames: Paths to files containing the geometries of the plots the group will have
110 | columns: columns to add to the group. if any
111 |
112 | Returns:
113 | str: the id of the new group.
114 | """
115 | data = {
116 | "name": plots_group_name,
117 | "methodology_id": methodology_id,
118 | "custom_columns_values": columns or {},
119 | }
120 | resp = self.sess.post(self._full_url("plots_groups/"), json=data)
121 | _check_resp_is_ok(resp, "Failure starting plots group commit")
122 | op_result = self._wait_until_operation_completes(resp.json())["results"]
123 | self.update_plots_group_plots(
124 | op_result["plots_group_id"], plots_geometries_filenames
125 | )
126 | return op_result["plots_group_id"]
127 |
128 | def update_plots_group_plots(
129 | self,
130 | plots_group_id: str,
131 | plots_geometries_filenames: List[str],
132 | delete_existing_plots: bool = False,
133 | ):
134 | """
135 | Updates the geometries of a given plots group
136 |
137 | Args:
138 | plots_group_id: identifier for the plots group to replace
139 | plots_geometries_filenames: List of paths to files containing the geometries of the plots the group will have
140 | delete_existing_plots: If true, will remove all existing plots in the plots group before uploading new ones.
141 | If False (default), plot data uploaded is merged with existing plots.
142 |
143 | Returns:
144 | dict: The creation operation result, which includes the plot group id
145 | """
146 | files = []
147 | for filename in plots_geometries_filenames:
148 | upload_id, upload_url = self._make_upload()
149 | with open(filename, "rb") as fh:
150 | resp = requests.put(upload_url, data=fh.read())
151 | _check_resp_is_ok(resp, "Failure uploading plots file for group")
152 | files.append(
153 | {"filename": os.path.basename(filename), "upload_id": upload_id}
154 | )
155 | data = {"files": files, "overwrite": delete_existing_plots}
156 | resp = self.sess.post(
157 | self._full_url(f"plots_groups/{plots_group_id}/upload/commit/"), json=data
158 | )
159 | _check_resp_is_ok(resp, "Failure starting plots group update:")
160 | return self._wait_until_operation_completes(resp.json())
161 |
162 | def download_plots_group_to_file(
163 | self, plots_group_id: str, format: Literal["excel", "geojson"], filename: str
164 | ) -> None:
165 | """
166 | Downloads a plots group to a local file
167 |
168 | Args:
169 | plots_group_id: The id of the plots group to download
170 | filename: The local filename where to save the plots group
171 |
172 | Raises:
173 | APIError: There was an error while trying to download the plots group id
174 | """
175 | data = {"format": format}
176 | resp = self.sess.post(
177 | self._full_url("plots_groups/%s/export/" % plots_group_id), json=data
178 | )
179 | _check_resp_is_ok(resp, "Failure starting plots group download")
180 | op = self._wait_until_operation_completes(resp.json())
181 | _download_to_file(op["results"]["download_url"], filename)
182 |
183 | def list_plots_groups(
184 | self,
185 | search: Optional[str] = None,
186 | page_number: Optional[int] = None,
187 | include_archived: bool = False,
188 | methodology: Optional[str] = None,
189 | ) -> ResultsPage:
190 | """
191 | List all the plots group the user can access, see `ResultsPage`
192 | for the pagination access pattern.
193 |
194 | This function is still **beta** and subject to change.
195 |
196 |
197 | Args:
198 | search: The term used to filter by name
199 | page_number: Optional page (from 1) of the list we want to retrieve
200 | include_archived: If true, includes archived plot groups in the results
201 | methodology: If not None, filters the groups by the methodology (eg "Coffee - EUDR")
202 |
203 | Returns:
204 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/plots-groups/operation/getPlotsGroupsList
205 | """
206 | data: Dict[str, Any] = {}
207 | if search is not None:
208 | data["search"] = search.strip()
209 | if page_number is not None:
210 | data["page_number"] = int(page_number)
211 | if include_archived:
212 | data["include_archived"] = include_archived
213 | if methodology is not None:
214 | data["methodology"] = methodology
215 | return self._return_results_page("plots_groups", data)
216 |
217 | def analyze_plots_precheck(
218 | self,
219 | plots_group_id: str,
220 | plots_analysis_name: str,
221 | plot_ids: List[str],
222 | date_from: datetime.date,
223 | date_to: datetime.date,
224 | ) -> dict:
225 | """
226 | Check the analysis for a given date over the plot ids of the specified plot group has no errors
227 |
228 | Args:
229 | plots_group_id: id of the plots group on which we want to run the new analysis
230 | plots_analysis_name: name to give to the new analysis
231 | plot_ids: list of the plot ids of the plots group to select for the analysis
232 | date_from: start point in time at which the analysis should be evaluated; please note that
233 | **the date that make sense are methodology dependent**, so please check the methodology
234 | of the plots group beforehand
235 | date_to: end point in time at which the analysis should be evaluated.
236 |
237 | Returns:
238 | dict: the precheck data
239 | """
240 | upload_id, upload_url = self._make_upload()
241 | resp = requests.put(upload_url, data=json.dumps({"plot_ids": plot_ids}))
242 | _check_resp_is_ok(resp, "Failure uploading plots file for analysis")
243 | data = {
244 | "analysis_name": plots_analysis_name,
245 | "upload_id": upload_id,
246 | "date_from": date_from.isoformat(),
247 | "date_to": date_to.isoformat(),
248 | }
249 | resp = self.sess.post(
250 | self._full_url(f"plots_groups/{plots_group_id}/analysis/precheck/"),
251 | json=data,
252 | )
253 | _check_resp_is_ok(resp, "Failure starting analysis precheck")
254 | op_result = self._wait_until_operation_completes(resp.json())
255 | url = op_result["results"]["precheck_data_url"]
256 | return requests.get(url).json()
257 |
258 | def analyze_plots(
259 | self,
260 | plots_group_id: str,
261 | plots_analysis_name: str,
262 | plot_ids: List[str],
263 | date_from: datetime.date,
264 | date_to: datetime.date,
265 | ) -> str:
266 | """
267 | Runs the analysis for a given date over the plot ids of the specified plot group,
268 | and returns the URL where we can see the analysis in the Picterra platform.
269 |
270 | Args:
271 | plots_group_id: id of the plots group on which we want to run the new analysis
272 | plots_analysis_name: name to give to the new analysis
273 | plot_ids: list of the plot ids of the plots group to select for the analysis
274 | date_from: start point in time at which the analysis should be evaluated; please note
275 | that **the date that make sense are methodology dependent**, so please check the
276 | methodology of the plots group beforehand
277 | date_to: end point in time at which the analysis should be evaluated.
278 |
279 | Returns:
280 | str: the analysis id.
281 | """
282 | upload_id = self._upload_plot_ids(plot_ids)
283 | data = {
284 | "analysis_name": plots_analysis_name,
285 | "upload_id": upload_id,
286 | "date_from": date_from.isoformat(),
287 | "date_to": date_to.isoformat(),
288 | }
289 | resp = self.sess.post(
290 | self._full_url(f"plots_groups/{plots_group_id}/analysis/"), json=data
291 | )
292 | _check_resp_is_ok(resp, "Couldn't start analysis")
293 | op_result = self._wait_until_operation_completes(resp.json())
294 | analysis_id = op_result["results"]["analysis_id"]
295 | return analysis_id
296 |
297 | def list_plots_analyses(
298 | self,
299 | plots_group_id: str,
300 | search: Optional[str] = None,
301 | page_number: Optional[int] = None,
302 | include_archived: bool = False,
303 | ) -> ResultsPage:
304 | """
305 | List all the plots analyses the user can access, see `ResultsPage`
306 | for the pagination access pattern.
307 |
308 | This function is still **beta** and subject to change.
309 |
310 | Args:
311 | plots_group_id: id of the plots group on which we want to list the analyses
312 | search: The term used to filter by name
313 | page_number: Optional page (from 1) of the list we want to retrieve
314 | include_archived: Defaults to false. If true, includes archived analyses in the results
315 |
316 | Returns:
317 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/analysis/operation/getPlotsAnalysesList
318 | """
319 | data: Dict[str, Any] = {}
320 | if include_archived:
321 | data["include_archived"] = str(include_archived).lower()
322 | if search is not None:
323 | data["search"] = search.strip()
324 | if page_number is not None:
325 | data["page_number"] = int(page_number)
326 | return self._return_results_page(
327 | f"plots_groups/{plots_group_id}/analysis/", data
328 | )
329 |
330 | def list_plots_analysis_reports(
331 | self,
332 | plots_analysis_id: str,
333 | plots_group_id: Optional[str] = None,
334 | page_number: Optional[int] = None,
335 | include_archived: bool = False,
336 | search: Optional[str] = None,
337 | report_type: Optional[str] = None,
338 | ) -> ResultsPage:
339 | """
340 | List all the reports belonging to a given plots analysis, see `ResultsPage`
341 | for the pagination access pattern.
342 |
343 | Args:
344 | plots_analysis_id: id of the plots analysis for which we want to list the reports
345 | page_number: Optional page (from 1) of the list we want to retrieve
346 | include_archived: Defaults to false. If true, includes archived analysis reports in the
347 | results
348 | search: Optional term to search report types by name
349 | report_type: Optional type of report to restrict the list by, use list_plots_analysis_report_types
350 | to know which the available report types are
351 |
352 | Deprecated arguments:
353 | plots_group_id: ignored, do not provide it
354 |
355 | Returns:
356 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/reports/operation/getReportsList
357 | """ # noqa[E501]
358 | if plots_group_id is not None:
359 | warnings.warn(
360 | "Passing plots_group_id is not needed anymore, remove it",
361 | DeprecationWarning,
362 | )
363 |
364 | params: Dict[str, Any] = {}
365 | if page_number is not None:
366 | params["page_number"] = int(page_number)
367 | if include_archived:
368 | params["include_archived"] = include_archived
369 | if search is not None:
370 | params["search"] = search.strip()
371 | if report_type is not None:
372 | params["report_type"] = report_type
373 | return self._return_results_page(
374 | f"plots_analyses/{plots_analysis_id}/reports/", params
375 | )
376 |
377 | def list_plots_analysis_report_types(
378 | self,
379 | plots_analysis_id: str,
380 | plots_group_id: Optional[str] = None,
381 | search: Optional[str] = None,
382 | ) -> List[Dict[str, Any]]:
383 | """
384 | List all the plots analyses report types the user can use (see create_plots_analysis_report)
385 |
386 | Args:
387 | plots_analysis_id: id of the plots analysis
388 | search: optional term to search report types by name, if any
389 |
390 | Deprecated arguments:
391 | plots_group_id: ignored, do not provide it
392 |
393 | Returns:
394 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/reports/operation/getReportTypesForAnalysis
395 | """ # noqa[E501]
396 | if plots_group_id is not None:
397 | warnings.warn(
398 | "Passing plots_group_id is not needed anymore, remove it",
399 | DeprecationWarning,
400 | )
401 |
402 | params: Dict[str, Any] = {}
403 | if search is not None:
404 | params["search"] = search.strip()
405 | resp = self.sess.get(
406 | self._full_url(f"plots_analyses/{plots_analysis_id}/reports/types/"),
407 | params=params,
408 | )
409 | _check_resp_is_ok(resp, "Couldn't list report types")
410 | return resp.json()
411 |
412 | def create_plots_analysis_report_precheck(
413 | self,
414 | plots_analysis_id: str,
415 | report_name: str,
416 | plot_ids: List[str],
417 | report_type: str,
418 | plots_group_id: Optional[str] = None,
419 | *,
420 | metadata: Optional[dict] = None,
421 | ) -> Dict[str, Any]:
422 | """
423 | Check creation of a report with the given parameters is ok
424 |
425 | If the function fails, the report is not valid
426 |
427 | Args:
428 | plots_analysis_id: id of the plots analysis
429 | report_name: name to give to the report
430 | plot_ids: list of the plot ids to select for the report
431 | report_type: type of report to generate, as per list_plots_analyses_report_types
432 | metadata: set of key-value pairs which may be included in the report
433 |
434 | Deprecated arguments:
435 | plots_group_id: ignored, do not provide it
436 |
437 | Returns:
438 | dict: the precheck data
439 | """
440 | if plots_group_id is not None:
441 | warnings.warn(
442 | "Passing plots_group_id is not needed anymore, remove it",
443 | DeprecationWarning,
444 | )
445 | upload_id = self._upload_plot_ids(plot_ids)
446 | data = {
447 | "name": report_name,
448 | "upload_id": upload_id,
449 | "report_type": report_type,
450 | "metadata": metadata if metadata is not None else {},
451 | }
452 | resp = self.sess.post(
453 | self._full_url(f"plots_analyses/{plots_analysis_id}/reports/precheck/"),
454 | json=data,
455 | )
456 | _check_resp_is_ok(resp, "Failure starting precheck")
457 | self._wait_until_operation_completes(resp.json())
458 | return {"status": "passed"}
459 |
460 | def create_plots_analysis_report(
461 | self,
462 | plots_analysis_id: str,
463 | report_name: str,
464 | plot_ids: List[str],
465 | report_type: str,
466 | plots_group_id: Optional[str] = None,
467 | *,
468 | metadata: Optional[dict] = None,
469 | ) -> str:
470 | """
471 | Creates a report
472 |
473 | Args:
474 | plots_analysis_id: id of the plots analysis
475 | report_name: name to give to the report
476 | plot_ids: list of the plot ids to select for the report
477 | report_type: type of report to generate, as per list_plots_analysis_report_types
478 | metadata: set of key-value pairs which may be included in the report
479 |
480 | Deprecated arguments:
481 | plots_group_id: ignored, do not provide it
482 |
483 | Returns:
484 | str: the id of the new report
485 | """
486 | if plots_group_id is not None:
487 | warnings.warn(
488 | "Passing plots_group_id is not needed anymore, remove it",
489 | DeprecationWarning,
490 | )
491 | upload_id = self._upload_plot_ids(plot_ids)
492 | data = {
493 | "name": report_name,
494 | "upload_id": upload_id,
495 | "report_type": report_type,
496 | "metadata": metadata if metadata is not None else {},
497 | }
498 | resp = self.sess.post(
499 | self._full_url(f"plots_analyses/{plots_analysis_id}/reports/"), json=data
500 | )
501 | _check_resp_is_ok(resp, "Failure starting analysis precheck")
502 | op_result = self._wait_until_operation_completes(resp.json())
503 | report_id = op_result["results"]["plots_analysis_report_id"]
504 | return report_id
505 |
506 | def get_plots_group(self, plots_group_id: str) -> dict:
507 | """
508 | Get plots group information
509 |
510 | Args:
511 | plots_group_id: id of the plots group
512 |
513 | Raises:
514 | APIError: There was an error while getting the plots group information
515 |
516 | Returns:
517 | dict: see https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/plots-groups/operation/getPlotsGroup
518 | """
519 | resp = self.sess.get(self._full_url("plots_groups/%s/" % plots_group_id))
520 | _check_resp_is_ok(resp, "Failed to get plots group")
521 | return resp.json()
522 |
523 | def get_plots_analysis(
524 | self, plots_analysis_id: str, plots_group_id: Optional[str] = None
525 | ) -> Dict[str, Any]:
526 | """
527 | Get plots analysis information
528 |
529 | Args:
530 | plots_analysis_id: id of the plots analysis
531 |
532 | Deprecated arguments:
533 | plots_group_id: ignored, do not provide it
534 |
535 | Returns:
536 | dict: see https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/analysis/operation/getAnalysis
537 | """
538 | if plots_group_id is not None:
539 | warnings.warn(
540 | "Passing plots_group_id is not needed anymore, remove it",
541 | DeprecationWarning,
542 | )
543 |
544 | resp = self.sess.get(self._full_url(f"plots_analyses/{plots_analysis_id}/"))
545 | _check_resp_is_ok(resp, "Failed to get plots analysis")
546 | return resp.json()
547 |
548 | def get_plots_analysis_report(
549 | self,
550 | plots_analysis_report_id: str,
551 | plots_group_id: Optional[str] = None,
552 | plots_analysis_id: Optional[str] = None,
553 | ) -> Dict[str, Any]:
554 | """
555 | Get plots analysis report information
556 |
557 | Args:
558 | plots_analysis_report_id: id of the plots analysis report
559 |
560 | Deprecated arguments:
561 | plots_group_id: ignored, do not provide it
562 | plots_analysis_id: ignored, do not provide it
563 |
564 | Raises:
565 | APIError: There was an error while getting the plots analysis report information
566 |
567 | Returns:
568 | dict: see https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/reports/operation/getReportForAnalysis
569 | """
570 | if plots_group_id is not None or plots_analysis_id is not None:
571 | warnings.warn(
572 | "Passing plots_group_id/plots_analysis_id is not needed anymore, remove it",
573 | DeprecationWarning,
574 | )
575 |
576 | resp = self.sess.get(
577 | self._full_url(f"plots_analysis_reports/{plots_analysis_report_id}/")
578 | )
579 | _check_resp_is_ok(resp, "Failed to get plots analysis report")
580 | return resp.json()
581 |
582 | def get_authorization_grants(self, kind: GrantKind, resource_id: str):
583 | """
584 | **beta** function. Get the authorization grants for a given resource.
585 |
586 | Args:
587 | kind: The kind of resource to get the grants for
588 | resource_id: The ID of the resource
589 |
590 | Returns:
591 | dict: A dictionary containing the authorization grants for the resource.
592 | See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/authorization/operation/getGrants
593 | """
594 | resp = self.sess.get(
595 | self._full_url("authorization/grants/%s/%s/" % (kind, resource_id))
596 | )
597 | _check_resp_is_ok(resp, "Failed to get grants for %s %s" % (kind, resource_id))
598 | return resp.json()
599 |
600 | def set_authorization_grants(
601 | self,
602 | kind: GrantKind,
603 | resource_id: str,
604 | grants_data: dict,
605 | ):
606 | """
607 | **beta** function. Set the authorization grants for a given resource.
608 |
609 | Args:
610 | kind: The kind of resource to set the grants for
611 | resource_id: The ID of the resource
612 | grants: See https://app.picterra.ch/public/apidocs/plots_analysis/v1/#tag/authorization/operation/setGrants.
613 |
614 | Returns:
615 | dict: The updated authorization grants for the resource.
616 | """
617 | resp = self.sess.post(
618 | self._full_url("authorization/grants/%s/%s/" % (kind, resource_id)),
619 | json=grants_data,
620 | )
621 | _check_resp_is_ok(resp, "Failed to set grants for %s %s" % (kind, resource_id))
622 | return resp.json()
623 |
--------------------------------------------------------------------------------
/examples/forge/nongeo_results.geojson:
--------------------------------------------------------------------------------
1 | { "type": "MultiPolygon", "coordinates": [ [ [ [ 0.000067822803951, -0.00006782280396 ], [ 0.00002829693145, -0.000066026173389 ], [ 0.000010330625767, -0.00006243291226 ], [ 0.000008533995199, -0.000057941335827 ], [ 0.000000449157642, -0.000061534596968 ], [ 0.000000449157642, -0.000050754813557 ], [ 0.000000449157642, -0.000053449759419 ], [ 0.000001347472926, -0.000053449759419 ], [ 0.00000224578821, -0.000052551444127 ], [ 0.000006737364631, -0.000056144705256 ], [ 0.000007635679915, -0.000045364921857 ], [ 0.000014822202188, -0.000040873345424 ], [ 0.000014822202188, -0.000034585138445 ], [ 0.000031890192586, -0.000027398616162 ], [ 0.000076805956792, -0.000025601985604 ], [ 0.000112738568157, -0.000030093562025 ], [ 0.00013519645026, -0.000029195246746 ], [ 0.000141484657249, -0.000039076714866 ], [ 0.000156756017079, -0.000031890192596 ], [ 0.000155857701795, -0.000025601985604 ], [ 0.000160349278215, -0.000021110409184 ], [ 0.000194485259012, -0.000016618832763 ], [ 0.000230417870377, -0.000015720517484 ], [ 0.00023760439265, -0.000018415463334 ], [ 0.000234909446797, -0.000022907039742 ], [ 0.000236706077365, -0.000025601985604 ], [ 0.000251079121911, -0.000030991877304 ], [ 0.000266350481741, -0.000029195246746 ], [ 0.000276231949867, -0.000033686823154 ], [ 0.000294198255549, -0.000036381769003 ], [ 0.00028791004856, -0.000050754813557 ], [ 0.000278028580435, -0.000053449759419 ], [ 0.000240299338502, -0.000055246389977 ], [ 0.000225027978672, -0.000048958182986 ], [ 0.00022233303282, -0.000045364921857 ], [ 0.000216943141115, -0.000045364921857 ], [ 0.00019179031316, -0.000053449759419 ], [ 0.000172027376909, -0.000054348074685 ], [ 0.00016573916992, -0.000049856498265 ], [ 0.000160349278215, -0.000050754813557 ], [ 0.000149569494806, -0.000055246389977 ], [ 0.000145077918385, -0.000061534596968 ], [ 0.000137891396112, -0.000063331227539 ], [ 0.000067822803951, -0.00006782280396 ], [ 0.000067822803951, -0.00006782280396 ] ] ], [ [ [ 0.000842170578862, -0.000046263237136 ], [ 0.000790966607667, -0.000044466606565 ], [ 0.000769407040848, -0.000039076714866 ], [ 0.000733474429484, -0.000039975030145 ], [ 0.000714609808517, -0.000038178399574 ], [ 0.000711016547381, -0.000034585138445 ], [ 0.000714609808517, -0.000025601985604 ], [ 0.000727186222495, -0.000022008724463 ], [ 0.000749644104598, -0.000021110409184 ], [ 0.000768508725564, -0.000026500300896 ], [ 0.000803543021645, -0.000027398616162 ], [ 0.000826000903748, -0.000019313778613 ], [ 0.000862831830397, -0.000021110409184 ], [ 0.000867323406817, -0.000030093562025 ], [ 0.000884391397216, -0.000033686823154 ], [ 0.000890679604205, -0.000033686823154 ], [ 0.000898764441762, -0.000028296931454 ], [ 0.000929307161422, -0.000030093562025 ], [ 0.000929307161422, -0.000036381769003 ], [ 0.000904154333466, -0.000036381769003 ], [ 0.000898764441762, -0.000041771660716 ], [ 0.000879001505511, -0.000040873345424 ], [ 0.000842170578862, -0.000046263237136 ], [ 0.000842170578862, -0.000046263237136 ] ] ], [ [ [ 0.001035308364948, -0.000049856498265 ], [ 0.001020037005118, -0.000045364921857 ], [ 0.001004765645288, -0.000049856498265 ], [ 0.000963443142218, -0.000046263237136 ], [ 0.000949070097672, -0.000039076714866 ], [ 0.000947273467104, -0.000030991877304 ], [ 0.001018240374549, -0.000027398616162 ], [ 0.001037104995516, -0.000029195246746 ], [ 0.001075732552733, -0.000022008724463 ], [ 0.001106275272393, -0.000021110409184 ], [ 0.001143106199042, -0.000023805355034 ], [ 0.001177242179839, -0.000033686823154 ], [ 0.001174547233986, -0.000042669975995 ], [ 0.001161970820009, -0.000046263237136 ], [ 0.001144004514326, -0.000044466606565 ], [ 0.001035308364948, -0.000049856498265 ], [ 0.001035308364948, -0.000049856498265 ] ] ], [ [ [ 0.00120598826893, -0.000063331227539 ], [ 0.001188021963248, -0.00006243291226 ], [ 0.001178140495123, -0.000056144705256 ], [ 0.001179038810407, -0.000030991877304 ], [ 0.001184428702112, -0.000034585138445 ], [ 0.00120598826893, -0.000030991877304 ], [ 0.001212276475919, -0.000032788507875 ], [ 0.001215869737056, -0.000037280084295 ], [ 0.001223954574613, -0.000038178399574 ], [ 0.001227547835749, -0.000034585138445 ], [ 0.001254497294273, -0.000030093562025 ], [ 0.001311989472457, -0.000029195246746 ], [ 0.001331752408707, -0.000033686823154 ], [ 0.001341633876833, -0.000040873345424 ], [ 0.001339837246264, -0.000045364921857 ], [ 0.001325464201718, -0.000051653128836 ], [ 0.001276056861092, -0.000050754813557 ], [ 0.001253598978989, -0.000054348074685 ], [ 0.001223056259329, -0.000054348074685 ], [ 0.00120598826893, -0.000063331227539 ], [ 0.00120598826893, -0.000063331227539 ] ] ], [ [ [ 0.000534048436409, -0.0000821958485 ], [ 0.000501709086181, -0.00007950090265 ], [ 0.000464878159532, -0.0000821958485 ], [ 0.000437030385724, -0.000076805956801 ], [ 0.000410979242485, -0.000075907641509 ], [ 0.000382233153393, -0.000071416065088 ], [ 0.000356182010153, -0.000059737966398 ], [ 0.000334622443335, -0.000061534596968 ], [ 0.00033372412805, -0.000055246389977 ], [ 0.000336419073903, -0.000049856498265 ], [ 0.000368758424131, -0.000048958182986 ], [ 0.000393012936802, -0.000044466606565 ], [ 0.000393012936802, -0.000048059867707 ], [ 0.000413674188337, -0.000056144705256 ], [ 0.000441521962145, -0.000058839651106 ], [ 0.000473861312373, -0.000057043020535 ], [ 0.000483742780498, -0.00006243291226 ], [ 0.00051248886959, -0.000064229542818 ], [ 0.000518777076579, -0.000066924488668 ], [ 0.000522370337716, -0.000072314380367 ], [ 0.000536743382261, -0.000076805956801 ], [ 0.000534048436409, -0.0000821958485 ], [ 0.000534048436409, -0.0000821958485 ] ] ], [ [ [ 0.000942781890683, -0.000111840252873 ], [ 0.000879001505511, -0.000105552045882 ], [ 0.000867323406817, -0.000097467208332 ], [ 0.000863730145681, -0.00008938237077 ], [ 0.000858340253976, -0.0000875857402 ], [ 0.000817017750907, -0.000088484055491 ], [ 0.000809831228634, -0.000086687424921 ], [ 0.000805339652213, -0.000081297533221 ], [ 0.000790966607667, -0.000083094163779 ], [ 0.000799051445224, -0.000078602587359 ], [ 0.000807136282781, -0.000068721119239 ], [ 0.000807136282781, -0.000064229542818 ], [ 0.000822407642611, -0.000061534596968 ], [ 0.000827797534316, -0.00006512785811 ], [ 0.000853848677556, -0.000066924488668 ], [ 0.000860136884544, -0.000064229542818 ], [ 0.000876306559659, -0.00006961943453 ], [ 0.000887984658352, -0.00006961943453 ], [ 0.000896069495909, -0.000073212695659 ], [ 0.000907747594603, -0.000088484055491 ], [ 0.000928408846138, -0.000093873947204 ], [ 0.000950866728241, -0.00009477226247 ], [ 0.00099218923131, -0.00008938237077 ], [ 0.000998477438299, -0.000091179001341 ], [ 0.000997579123015, -0.000101958784753 ], [ 0.000994884177162, -0.000103755415324 ], [ 0.000942781890683, -0.000111840252873 ], [ 0.000942781890683, -0.000111840252873 ] ] ], [ [ [ 0.001055071301198, -0.000128009927984 ], [ 0.001050579724778, -0.000128009927984 ], [ 0.001046986463641, -0.000123518351564 ], [ 0.001045189833073, -0.000110043622315 ], [ 0.001073037606881, -0.000086687424921 ], [ 0.001113461794666, -0.00007770427208 ], [ 0.001143106199042, -0.00007950090265 ], [ 0.001148496090747, -0.000081297533221 ], [ 0.001151191036599, -0.000085789109642 ], [ 0.001150292721315, -0.000092077316633 ], [ 0.001139512937906, -0.000103755415324 ], [ 0.00112513989336, -0.000111840252873 ], [ 0.001091003912563, -0.000123518351564 ], [ 0.001078427498585, -0.000122620036285 ], [ 0.001055071301198, -0.000128009927984 ], [ 0.001055071301198, -0.000128009927984 ] ] ], [ [ [ 0.001325464201718, -0.000152264440657 ], [ 0.001320972625298, -0.000151366125378 ], [ 0.001313786103025, -0.000139688026688 ], [ 0.001315582733593, -0.000126213297413 ], [ 0.001311989472457, -0.000110043622315 ], [ 0.001314684418309, -0.000100162154182 ], [ 0.001325464201718, -0.000091179001341 ], [ 0.00133444735456, -0.000091179001341 ], [ 0.001355108606094, -0.000097467208332 ], [ 0.001363193443651, -0.000110941937594 ], [ 0.001363193443651, -0.000129806558555 ], [ 0.001358701867231, -0.000137891396117 ], [ 0.001351515344958, -0.000143281287816 ], [ 0.001325464201718, -0.000152264440657 ], [ 0.001325464201718, -0.000152264440657 ] ] ], [ [ [ 0.000056144705257, -0.000151366125378 ], [ 0.000034585138439, -0.000151366125378 ], [ 0.000023805355029, -0.000146874548958 ], [ 0.000027398616166, -0.000136094765546 ], [ 0.000037280084291, -0.000130704873847 ], [ 0.000066026173383, -0.000123518351564 ], [ 0.000139688026681, -0.000114535198735 ], [ 0.000167535800488, -0.000104653730603 ], [ 0.000199875150717, -0.000098365523611 ], [ 0.000229519555093, -0.000098365523611 ], [ 0.000253774067764, -0.000101958784753 ], [ 0.000336419073903, -0.000101060469474 ], [ 0.000387623045098, -0.000103755415324 ], [ 0.000517878761295, -0.000101060469474 ], [ 0.000543929904534, -0.00009477226247 ], [ 0.000574472624194, -0.00009477226247 ], [ 0.000595133875729, -0.000101958784753 ], [ 0.000644541216356, -0.000106450361161 ], [ 0.000694846872266, -0.000115433514014 ], [ 0.000700236763971, -0.000119925090435 ], [ 0.000695745187551, -0.000129806558555 ], [ 0.000681372143005, -0.000135196450267 ], [ 0.000618490073116, -0.000127111612705 ], [ 0.000570879363058, -0.000125314982134 ], [ 0.000517878761295, -0.000128908243276 ], [ 0.000450505114986, -0.000125314982134 ], [ 0.000394809567371, -0.000131603189126 ], [ 0.000363368532426, -0.000127111612705 ], [ 0.000335520758619, -0.000127111612705 ], [ 0.00027263868873, -0.000136993080825 ], [ 0.000225027978672, -0.000148671179529 ], [ 0.000117230144578, -0.000146874548958 ], [ 0.000064229542815, -0.000148671179529 ], [ 0.000056144705257, -0.000151366125378 ], [ 0.000056144705257, -0.000151366125378 ] ] ], [ [ [ 0.001206886584215, -0.000181010529764 ], [ 0.001199700061942, -0.000179213899193 ], [ 0.001195208485521, -0.000173824007481 ], [ 0.001199700061942, -0.000163942539348 ], [ 0.001197903431373, -0.000160349278207 ], [ 0.001187123647964, -0.000154061071228 ], [ 0.001178140495123, -0.000154061071228 ], [ 0.001178140495123, -0.00014777286425 ], [ 0.001172750603418, -0.000151366125378 ], [ 0.001161072504724, -0.000150467810087 ], [ 0.001158377558872, -0.000134298134988 ], [ 0.001165564081145, -0.000129806558555 ], [ 0.001178140495123, -0.000130704873847 ], [ 0.001179038810407, -0.000127111612705 ], [ 0.001188021963248, -0.000127111612705 ], [ 0.001217666367624, -0.000115433514014 ], [ 0.001244615826148, -0.000116331829293 ], [ 0.001248209087284, -0.000126213297413 ], [ 0.001247310772, -0.000136993080825 ], [ 0.001257192240125, -0.000145077918387 ], [ 0.001251802348421, -0.000152264440657 ], [ 0.001254497294273, -0.000157654332357 ], [ 0.001206886584215, -0.000181010529764 ], [ 0.001206886584215, -0.000181010529764 ] ] ], [ [ [ 0.000787373346531, -0.000337317389189 ], [ 0.000741559267041, -0.000331029182198 ], [ 0.000731677798915, -0.000327435921069 ], [ 0.000698440133403, -0.000324740975207 ], [ 0.000628371541242, -0.000325639290499 ], [ 0.000583455777036, -0.000329232551628 ], [ 0.000556506318512, -0.000317554452949 ], [ 0.000549319796239, -0.000309469615388 ], [ 0.000555608003228, -0.000304978038967 ], [ 0.000574472624194, -0.000299588147255 ], [ 0.000606811974423, -0.000300486462534 ], [ 0.000647236162208, -0.000295096570834 ], [ 0.000661609206754, -0.000295994886113 ], [ 0.000679575512436, -0.000289706679122 ], [ 0.000704728340392, -0.000302283093105 ], [ 0.000734372744768, -0.000299588147255 ], [ 0.000741559267041, -0.000287910048564 ], [ 0.000773898617269, -0.000279825211015 ], [ 0.00077839019369, -0.000269943742882 ], [ 0.00077839019369, -0.0002519774372 ], [ 0.000790068292383, -0.000242994284359 ], [ 0.00081342448977, -0.000238502707939 ], [ 0.0008286958496, -0.000227722924527 ], [ 0.000848458785851, -0.000228621239819 ], [ 0.000890679604205, -0.000222333032827 ], [ 0.000916730747444, -0.000207061672995 ], [ 0.000976019556196, -0.000195383574304 ], [ 0.001087410651427, -0.000194485259013 ], [ 0.001111665164098, -0.000183705475601 ], [ 0.001131428100349, -0.000182807160322 ], [ 0.001147597775463, -0.000176518953331 ], [ 0.001153885982452, -0.000181010529764 ], [ 0.001164665765861, -0.000182807160322 ], [ 0.001169157342282, -0.000189095367313 ], [ 0.001170055657566, -0.000193586943734 ], [ 0.001165564081145, -0.000200773466004 ], [ 0.001149394406031, -0.000212451564695 ], [ 0.001101783695973, -0.000233112816226 ], [ 0.001088308966711, -0.000234011131505 ], [ 0.001082919075006, -0.000224129663398 ], [ 0.001067647715176, -0.000222333032827 ], [ 0.001000274068867, -0.000229519555098 ], [ 0.000974222925628, -0.000239401023218 ], [ 0.000990392600742, -0.0002519774372 ], [ 0.000975121240912, -0.000256469013621 ], [ 0.00093559536841, -0.000258265644179 ], [ 0.000886188027784, -0.000248384176059 ], [ 0.000870916667954, -0.000248384176059 ], [ 0.000858340253976, -0.0002519774372 ], [ 0.000849357101135, -0.000260062274762 ], [ 0.000847560470567, -0.00026904542759 ], [ 0.000853848677556, -0.000278926895723 ], [ 0.000862831830397, -0.000282520156852 ], [ 0.000865526776249, -0.000297791516684 ], [ 0.000863730145681, -0.000310367930667 ], [ 0.000851153731703, -0.000318452768228 ], [ 0.000837679002441, -0.000319351083508 ], [ 0.000829594164884, -0.000330130866919 ], [ 0.000787373346531, -0.000337317389189 ], [ 0.000787373346531, -0.000337317389189 ] ] ], [ [ [ 0.000055246389973, -0.000329232551628 ], [ 0.000047161552416, -0.000322046029357 ], [ 0.000047161552416, -0.000316656137645 ], [ 0.00005883965111, -0.000297791516684 ], [ 0.000083094163781, -0.000287011733272 ], [ 0.000101060469463, -0.000284316787435 ], [ 0.000161247593499, -0.000265452166462 ], [ 0.00018101052975, -0.000262757220612 ], [ 0.00019628188958, -0.000256469013621 ], [ 0.000204366727137, -0.000245689230209 ], [ 0.000214248195263, -0.000242095969067 ], [ 0.000220536402251, -0.000245689230209 ], [ 0.000235807762081, -0.000246587545488 ], [ 0.000247485860775, -0.000242095969067 ], [ 0.000279825211003, -0.000237604392647 ], [ 0.000376843261688, -0.000229519555098 ], [ 0.000397504513223, -0.000224129663398 ], [ 0.000461284898395, -0.000216943141115 ], [ 0.000546624850387, -0.000215146510557 ], [ 0.000596032191013, -0.000205265042424 ], [ 0.000629269856526, -0.000205265042424 ], [ 0.000688558665278, -0.000197180204862 ], [ 0.00074784747403, -0.000198078520141 ], [ 0.000754135681018, -0.000194485259013 ], [ 0.000754135681018, -0.000189095367313 ], [ 0.000764915464428, -0.000188197052034 ], [ 0.000773898617269, -0.000181908845043 ], [ 0.000799949760508, -0.000198976835433 ], [ 0.000826000903748, -0.000203468411854 ], [ 0.0008286958496, -0.000209756618845 ], [ 0.000827797534316, -0.000216943141115 ], [ 0.000762220518575, -0.000218739771686 ], [ 0.000754135681018, -0.000221434717536 ], [ 0.000725389591927, -0.000218739771686 ], [ 0.000674185620732, -0.000222333032827 ], [ 0.000650829423345, -0.000220536402257 ], [ 0.000642744585788, -0.000227722924527 ], [ 0.000689456980562, -0.000227722924527 ], [ 0.000769407040848, -0.000234011131505 ], [ 0.000773898617269, -0.000239401023218 ], [ 0.000773000301985, -0.000248384176059 ], [ 0.000775695247837, -0.000250180806629 ], [ 0.000769407040848, -0.00025467238305 ], [ 0.000741559267041, -0.000258265644179 ], [ 0.000728084537779, -0.000253774067771 ], [ 0.000716406439085, -0.000253774067771 ], [ 0.000703830025108, -0.0002573673289 ], [ 0.000703830025108, -0.000266350481741 ], [ 0.000722694646074, -0.000275333634582 ], [ 0.000727186222495, -0.000282520156852 ], [ 0.000727186222495, -0.000287011733272 ], [ 0.000719101384938, -0.000289706679122 ], [ 0.000680473827721, -0.000287910048564 ], [ 0.000609506920275, -0.000275333634582 ], [ 0.000526861914136, -0.000286113417993 ], [ 0.000519675391863, -0.000278028580431 ], [ 0.000511590554306, -0.000278028580431 ], [ 0.000500810770897, -0.000285215102714 ], [ 0.000478352888794, -0.000287011733272 ], [ 0.000459488267827, -0.000302283093105 ], [ 0.000437030385724, -0.000302283093105 ], [ 0.000401996089643, -0.000293299940264 ], [ 0.000367860108847, -0.000291503309693 ], [ 0.000348097172596, -0.000295096570834 ], [ 0.000294198255549, -0.000296893201405 ], [ 0.00028341847214, -0.000303181408384 ], [ 0.00028341847214, -0.000306774669525 ], [ 0.000289706679129, -0.000308571300096 ], [ 0.000290604994413, -0.000311266245946 ], [ 0.000282520156856, -0.000313062876516 ], [ 0.000278926895719, -0.000307672984804 ], [ 0.000270842058162, -0.000305876354246 ], [ 0.00026814711231, -0.000301384777813 ], [ 0.000260960590037, -0.000298689831976 ], [ 0.000215146510547, -0.000299588147255 ], [ 0.000205265042421, -0.000295096570834 ], [ 0.000183705475602, -0.000293299940264 ], [ 0.000177417268614, -0.000287011733272 ], [ 0.000163942539352, -0.000286113417993 ], [ 0.000143281287817, -0.000296893201405 ], [ 0.000118128459862, -0.000318452768228 ], [ 0.000083094163781, -0.000326537605778 ], [ 0.000055246389973, -0.000329232551628 ], [ 0.000055246389973, -0.000329232551628 ] ] ], [ [ [ 0.001226649520465, -0.000324740975207 ], [ 0.001214073106488, -0.000324740975207 ], [ 0.001209581530067, -0.000319351083508 ], [ 0.001178140495123, -0.000305876354246 ], [ 0.001178140495123, -0.000303181408384 ], [ 0.001153885982452, -0.000295096570834 ], [ 0.001153885982452, -0.000282520156852 ], [ 0.001169157342282, -0.000271740373453 ], [ 0.001180835440975, -0.000269943742882 ], [ 0.001200598377226, -0.000260960590028 ], [ 0.001227547835749, -0.000260062274762 ], [ 0.001229344466318, -0.000263655535891 ], [ 0.001225751205181, -0.000272638688732 ], [ 0.001215869737056, -0.000277130265152 ], [ 0.001206886584215, -0.000286113417993 ], [ 0.001211378160635, -0.000292401624985 ], [ 0.001235632673306, -0.000308571300096 ], [ 0.001236530988591, -0.000317554452949 ], [ 0.001226649520465, -0.000324740975207 ], [ 0.001226649520465, -0.000324740975207 ] ] ], [ [ [ 0.001009257221708, -0.000303181408384 ], [ 0.000975121240912, -0.000298689831976 ], [ 0.000925713900285, -0.000300486462534 ], [ 0.000912239171023, -0.000287011733272 ], [ 0.000912239171023, -0.000280723526281 ], [ 0.00091583243216, -0.000277130265152 ], [ 0.000927510530853, -0.000276231949861 ], [ 0.001005663960572, -0.000286113417993 ], [ 0.001046986463641, -0.000286113417993 ], [ 0.001051478040062, -0.000288808363843 ], [ 0.00104878309421, -0.000295096570834 ], [ 0.001009257221708, -0.000303181408384 ], [ 0.001009257221708, -0.000303181408384 ] ] ], [ [ [ 0.000869120037386, -0.000357080325442 ], [ 0.000850255416419, -0.000354385379592 ], [ 0.000843068894146, -0.000335520758619 ], [ 0.000851153731703, -0.000328334236349 ], [ 0.000852052046987, -0.000318452768228 ], [ 0.000865526776249, -0.000309469615388 ], [ 0.000880798136079, -0.000304978038967 ], [ 0.000916730747444, -0.000307672984804 ], [ 0.000968833033923, -0.000316656137645 ], [ 0.001029020157959, -0.000314859507087 ], [ 0.001062257823471, -0.000307672984804 ], [ 0.001091902227847, -0.000306774669525 ], [ 0.001120648316939, -0.000300486462534 ], [ 0.001131428100349, -0.000301384777813 ], [ 0.001135919676769, -0.000306774669525 ], [ 0.001134123046201, -0.000314859507087 ], [ 0.001126936523928, -0.000319351083508 ], [ 0.001085614020858, -0.000331927497477 ], [ 0.001055969616482, -0.000337317389189 ], [ 0.001037104995516, -0.000349893803172 ], [ 0.000985901024321, -0.000345402226751 ], [ 0.000947273467104, -0.00034899548788 ], [ 0.000902357702898, -0.00034899548788 ], [ 0.000879899820795, -0.000352588749022 ], [ 0.000869120037386, -0.000357080325442 ], [ 0.000869120037386, -0.000357080325442 ] ] ], [ [ [ 0.001212276475919, -0.000401996089647 ], [ 0.001191615224385, -0.000401097774368 ], [ 0.001178140495123, -0.000395707882643 ], [ 0.001178140495123, -0.000393012936793 ], [ 0.001130529785064, -0.000394809567377 ], [ 0.001119750001655, -0.000386724729815 ], [ 0.001118851686371, -0.000381334838103 ], [ 0.001128733154496, -0.000377741576974 ], [ 0.001165564081145, -0.000374148315845 ], [ 0.001178140495123, -0.000375944946403 ], [ 0.001179038810407, -0.000372351685274 ], [ 0.001202395007794, -0.000371453369983 ], [ 0.001214971421772, -0.000364266847712 ], [ 0.001223954574613, -0.00035618201015 ], [ 0.001230242781602, -0.000345402226751 ], [ 0.001238327619159, -0.000340012335039 ], [ 0.001270666969387, -0.000336419073898 ], [ 0.001282345068081, -0.00034180896561 ], [ 0.00129312485149, -0.000342707280902 ], [ 0.001314684418309, -0.00033462244334 ], [ 0.001333549039275, -0.000321147714066 ], [ 0.001364091758935, -0.000314859507087 ], [ 0.001363193443651, -0.00033462244334 ], [ 0.001314684418309, -0.000355283694859 ], [ 0.001301209689047, -0.000370555054704 ], [ 0.001224852889897, -0.000394809567377 ], [ 0.001212276475919, -0.000401996089647 ], [ 0.001212276475919, -0.000401996089647 ] ] ], [ [ [ 0.000678677197152, -0.000397504513227 ], [ 0.00064543953164, -0.000393012936793 ], [ 0.000640049639935, -0.000388521360373 ], [ 0.000650829423345, -0.000379538207545 ], [ 0.000718203069654, -0.000375944946403 ], [ 0.000738864321188, -0.000369656739412 ], [ 0.000749644104598, -0.000371453369983 ], [ 0.000774796932553, -0.000365165162991 ], [ 0.00081342448977, -0.000363368532433 ], [ 0.00085474699284, -0.00035618201015 ], [ 0.000868221722102, -0.000357080325442 ], [ 0.000869120037386, -0.000359775271292 ], [ 0.000867323406817, -0.000371453369983 ], [ 0.000858340253976, -0.000375046631124 ], [ 0.000832289110737, -0.000375944946403 ], [ 0.000748745789314, -0.000392114621514 ], [ 0.000705626655676, -0.000393012936793 ], [ 0.000678677197152, -0.000397504513227 ], [ 0.000678677197152, -0.000397504513227 ] ] ], [ [ [ 0.000569981047774, -0.000483742780495 ], [ 0.000533150121125, -0.000482844465216 ], [ 0.000456793321975, -0.000464878159521 ], [ 0.000442420277429, -0.000457691637251 ], [ 0.000442420277429, -0.000446911853852 ], [ 0.000448708484418, -0.000443318592711 ], [ 0.00047745457351, -0.000440623646861 ], [ 0.000542133273966, -0.000443318592711 ], [ 0.000562794525501, -0.00043613207044 ], [ 0.00058884566874, -0.000439725331582 ], [ 0.000640049639935, -0.000440623646861 ], [ 0.000695745187551, -0.000435233755161 ], [ 0.000711914862665, -0.000428047232878 ], [ 0.000734372744768, -0.000422657341179 ], [ 0.000850255416419, -0.000400199459076 ], [ 0.000864628460965, -0.000391216306235 ], [ 0.000888882973636, -0.000383131468673 ], [ 0.000899662757046, -0.000375046631124 ], [ 0.000947273467104, -0.000364266847712 ], [ 0.000973324610344, -0.000364266847712 ], [ 0.000985901024321, -0.000376843261682 ], [ 0.001019138689834, -0.000378639892253 ], [ 0.001028121842675, -0.000384928099244 ], [ 0.001023630266254, -0.000395707882643 ], [ 0.001006562275856, -0.000408284296638 ], [ 0.000940985260115, -0.000413674188338 ], [ 0.000903256018182, -0.000410979242488 ], [ 0.000843068894146, -0.000419064080037 ], [ 0.000787373346531, -0.000433437124591 ], [ 0.000756830626871, -0.00045140343026 ], [ 0.000724491276642, -0.000454996691401 ], [ 0.000709219916812, -0.000459488267822 ], [ 0.000684965404141, -0.000460386583114 ], [ 0.000680473827721, -0.000462183213672 ], [ 0.000671490674879, -0.000473861312375 ], [ 0.000664304152606, -0.000476556258225 ], [ 0.000649032792776, -0.000476556258225 ], [ 0.000632863117662, -0.000463979844255 ], [ 0.000601422082718, -0.000463979844255 ], [ 0.000587947353456, -0.000468471420663 ], [ 0.000587049038172, -0.000481047834645 ], [ 0.000569981047774, -0.000483742780495 ], [ 0.000569981047774, -0.000483742780495 ] ] ], [ [ [ 0.00127785349166, -0.000429843863449 ], [ 0.001263480447114, -0.000429843863449 ], [ 0.001259887185978, -0.000427148917599 ], [ 0.001267073708251, -0.000418165764758 ], [ 0.001285040013933, -0.000410979242488 ], [ 0.001286836644501, -0.000407385981347 ], [ 0.001295819797342, -0.000404691035484 ], [ 0.001298514743195, -0.000401097774368 ], [ 0.001362295128367, -0.000401097774368 ], [ 0.00136499007422, -0.000405589350776 ], [ 0.001364091758935, -0.0004217590259 ], [ 0.001328159147571, -0.0004217590259 ], [ 0.001287734959785, -0.00042625060232 ], [ 0.00127785349166, -0.000429843863449 ], [ 0.00127785349166, -0.000429843863449 ] ] ], [ [ [ 0.001127834839212, -0.000479251204075 ], [ 0.001106275272393, -0.000478352888796 ], [ 0.001082919075006, -0.000457691637251 ], [ 0.001076630868017, -0.000442420277432 ], [ 0.001089207281995, -0.000432538809312 ], [ 0.001102682011257, -0.00043164049402 ], [ 0.001105376957109, -0.0004217590259 ], [ 0.001101783695973, -0.000417267449479 ], [ 0.001104478641825, -0.000414572503617 ], [ 0.001124241578076, -0.000407385981347 ], [ 0.001137716307337, -0.000407385981347 ], [ 0.001148496090747, -0.000419962395329 ], [ 0.001150292721315, -0.000432538809312 ], [ 0.001142207883758, -0.00044152196214 ], [ 0.001134123046201, -0.000445115223269 ], [ 0.001124241578076, -0.000445115223269 ], [ 0.001117953371087, -0.000449606799702 ], [ 0.001120648316939, -0.000455895006693 ], [ 0.001134123046201, -0.000464878159521 ], [ 0.001134123046201, -0.000473861312375 ], [ 0.001127834839212, -0.000479251204075 ], [ 0.001127834839212, -0.000479251204075 ] ] ], [ [ [ 0.000327435921062, -0.000456793321972 ], [ 0.000304978038959, -0.000455895006693 ], [ 0.000300486462538, -0.000452301745564 ], [ 0.000300486462538, -0.00044601353856 ], [ 0.000310367930663, -0.00043613207044 ], [ 0.000365165162995, -0.000433437124591 ], [ 0.000417267449474, -0.000422657341179 ], [ 0.000437030385724, -0.000424453971737 ], [ 0.00043613207044, -0.000437928700998 ], [ 0.000427148917599, -0.000445115223269 ], [ 0.00037953820754, -0.000447810169131 ], [ 0.000327435921062, -0.000456793321972 ], [ 0.000327435921062, -0.000456793321972 ] ] ], [ [ [ 0.001024528581538, -0.000507997293156 ], [ 0.001018240374549, -0.000507997293156 ], [ 0.001010155536992, -0.000502607401469 ], [ 0.000995782492446, -0.00050440403204 ], [ 0.000985901024321, -0.000501709086177 ], [ 0.000977816186764, -0.000492725933336 ], [ 0.000976019556196, -0.000473861312375 ], [ 0.000970629664491, -0.000465776474813 ], [ 0.000945476836536, -0.000456793321972 ], [ 0.000925713900285, -0.000445115223269 ], [ 0.000932900422558, -0.000439725331582 ], [ 0.000932002107274, -0.000437030385732 ], [ 0.000936493683695, -0.000432538809312 ], [ 0.00094637515182, -0.000425352287029 ], [ 0.000954459989377, -0.000423555656458 ], [ 0.000974222925628, -0.000423555656458 ], [ 0.001003867330004, -0.000435233755161 ], [ 0.001020935320402, -0.00045140343026 ], [ 0.001028121842675, -0.000462183213672 ], [ 0.001028121842675, -0.000481047834645 ], [ 0.001032613419095, -0.000488234356928 ], [ 0.00103351173438, -0.000500810770885 ], [ 0.001029918473243, -0.000506200662598 ], [ 0.001024528581538, -0.000507997293156 ], [ 0.001024528581538, -0.000507997293156 ] ] ], [ [ [ 0.001302108004331, -0.000474759627654 ], [ 0.001278751806944, -0.000472962997083 ], [ 0.001270666969387, -0.000467573105384 ], [ 0.001269768654103, -0.000462183213672 ], [ 0.001273361915239, -0.000456793321972 ], [ 0.001287734959785, -0.000450505114981 ], [ 0.00131917599473, -0.000445115223269 ], [ 0.001350617029674, -0.00044421690799 ], [ 0.001356905236663, -0.000445115223269 ], [ 0.001359600182515, -0.000453200060843 ], [ 0.001364091758935, -0.000450505114981 ], [ 0.001364091758935, -0.000458589952543 ], [ 0.001356006921378, -0.000465776474813 ], [ 0.001329057462855, -0.000472962997083 ], [ 0.001302108004331, -0.000474759627654 ], [ 0.001302108004331, -0.000474759627654 ] ] ], [ [ [ 0.000240299338502, -0.000540336643391 ], [ 0.000218739771683, -0.000540336643391 ], [ 0.000203468411853, -0.000536743382262 ], [ 0.000192688628444, -0.000529556859979 ], [ 0.000192688628444, -0.000517878761288 ], [ 0.00019628188958, -0.00051428550016 ], [ 0.000212451564694, -0.000511590554297 ], [ 0.000230417870377, -0.000501709086177 ], [ 0.000253774067764, -0.000503505716748 ], [ 0.000263655535889, -0.000509793923739 ], [ 0.000264553851173, -0.000519675391859 ], [ 0.0002573673289, -0.000530455175271 ], [ 0.000240299338502, -0.000540336643391 ], [ 0.000240299338502, -0.000540336643391 ] ] ], [ [ [ 0.001211378160635, -0.000572675993613 ], [ 0.001199700061942, -0.000571777678334 ], [ 0.001192513539669, -0.000566387786622 ], [ 0.001191615224385, -0.000557404633794 ], [ 0.001205089953646, -0.000544828219811 ], [ 0.001200598377226, -0.000538540012833 ], [ 0.001179937125691, -0.000527760229408 ], [ 0.001183530386827, -0.000520573707138 ], [ 0.001204191638362, -0.000516082130718 ], [ 0.00121676805234, -0.00051877707658 ], [ 0.001231141096886, -0.000515183815451 ], [ 0.001247310772, -0.000515183815451 ], [ 0.001261683816546, -0.000506200662598 ], [ 0.001283243383365, -0.000505302347319 ], [ 0.001290429905638, -0.000511590554297 ], [ 0.001309294526604, -0.000519675391859 ], [ 0.001325464201718, -0.000537641697541 ], [ 0.001326362517002, -0.000549319796232 ], [ 0.001320074310014, -0.000552014742082 ], [ 0.001233836042738, -0.000551116426803 ], [ 0.001223056259329, -0.000568184417193 ], [ 0.001211378160635, -0.000572675993613 ], [ 0.001211378160635, -0.000572675993613 ] ] ], [ [ [ 0.000151366125374, -0.000664304152594 ], [ 0.000139688026681, -0.000662507522023 ], [ 0.000137891396112, -0.000652626053903 ], [ 0.000142382972533, -0.000641846270492 ], [ 0.000132501504408, -0.000631964802372 ], [ 0.000125314982135, -0.000630168171801 ], [ 0.000130704873839, -0.000615795127248 ], [ 0.000140586341965, -0.000608608604977 ], [ 0.000163044224068, -0.000609506920269 ], [ 0.000180112214466, -0.000623879964822 ], [ 0.000188197052023, -0.000626574910659 ], [ 0.000193586943728, -0.0006409479552 ], [ 0.000188197052023, -0.000649032792762 ], [ 0.000151366125374, -0.000664304152594 ], [ 0.000151366125374, -0.000664304152594 ] ] ], [ [ [ 0.001220361313476, -0.000666999098457 ], [ 0.001193411854953, -0.000665202467873 ], [ 0.001181733756259, -0.000660710891465 ], [ 0.001176343864555, -0.000663405837315 ], [ 0.001161072504724, -0.000660710891465 ], [ 0.001154784297736, -0.000654422684461 ], [ 0.001154784297736, -0.000640049639934 ], [ 0.001158377558872, -0.000630168171801 ], [ 0.001168259026997, -0.000626574910659 ], [ 0.001177242179839, -0.00063106648708 ], [ 0.001211378160635, -0.000631964802372 ], [ 0.001223056259329, -0.000642744585771 ], [ 0.001222157944045, -0.000651727738624 ], [ 0.001224852889897, -0.000657117630324 ], [ 0.001220361313476, -0.000666999098457 ], [ 0.001220361313476, -0.000666999098457 ] ] ] ] }
--------------------------------------------------------------------------------
/src/picterra/forge_client.py:
--------------------------------------------------------------------------------
1 | """
2 | Handles interfacing with the API documented at https://app.picterra.ch/public/apidocs/v2/
3 |
4 | Note that Forge is separate from Tracer and so an API key which is valid for
5 | one may encounter permissions issues if used with the other
6 | """
7 |
8 | from __future__ import annotations
9 |
10 | import json
11 | import logging
12 | import sys
13 | import tempfile
14 | import warnings
15 |
16 | if sys.version_info >= (3, 8):
17 | from typing import Literal
18 | else:
19 | from typing_extensions import Literal
20 |
21 | from typing import Any
22 |
23 | import requests
24 |
25 | from picterra.base_client import (
26 | APIError,
27 | BaseAPIClient,
28 | Feature,
29 | FeatureCollection,
30 | _download_to_file,
31 | _upload_file_to_blobstore,
32 | )
33 |
34 | logger = logging.getLogger()
35 |
36 |
37 | class ForgeClient(BaseAPIClient):
38 | def __init__(self, **kwargs):
39 | super().__init__("public/api/v2/", **kwargs)
40 |
41 | def upload_raster(
42 | self,
43 | filename: str,
44 | name: str,
45 | folder_id: str | None = None,
46 | captured_at: str | None = None,
47 | identity_key: str | None = None,
48 | multispectral: bool = True,
49 | cloud_coverage: int | None = None,
50 | user_tag: str | None = None,
51 | ) -> str:
52 | """
53 | Upload a raster to picterra.
54 |
55 | Args:
56 | filename: Local filename of raster to upload
57 | name: A human-readable name for this raster
58 | folder_id: Id of the folder this raster
59 | belongs to; if not provided, the raster will be put in the
60 | "Picterra API Project" folder
61 | captured_at: ISO-8601 date and time at which this
62 | raster was captured, YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z];
63 | e.g. "2020-01-01T12:34:56.789Z"
64 | identity_key: Personal identifier for this raster.
65 | multispectral: If True, the raster is in multispectral mode and can have
66 | an associated band specification
67 | cloud_coverage: Raster cloud coverage %.
68 | user_tag (beta): Raster tag
69 |
70 | Returns:
71 | str: The id of the uploaded raster
72 | """
73 | data: dict[str, Any] = {"name": name, "multispectral": multispectral}
74 | if folder_id is not None:
75 | data.update({"folder_id": folder_id})
76 | if captured_at is not None:
77 | data.update({"captured_at": captured_at})
78 | if identity_key is not None:
79 | data.update({"identity_key": identity_key})
80 | if cloud_coverage is not None:
81 | data.update({"cloud_coverage": cloud_coverage})
82 | if user_tag is not None:
83 | data.update({"user_tag": user_tag})
84 | resp = self.sess.post(self._full_url("rasters/upload/file/"), json=data)
85 | if not resp.ok:
86 | raise APIError(resp.text)
87 | data = resp.json()
88 | upload_url = str(data["upload_url"])
89 | raster_id: str = data["raster_id"]
90 | _upload_file_to_blobstore(upload_url, filename)
91 | resp = self.sess.post(self._full_url("rasters/%s/commit/" % raster_id))
92 | if not resp.ok:
93 | raise APIError(resp.text)
94 | self._wait_until_operation_completes(resp.json())
95 | return raster_id
96 |
97 | def list_folder_detectors(self, folder_id: str, page_number: int | None = None):
98 | """
99 | List of detectors assigned to a given folder, see `ResultsPage`
100 | for the pagination access pattern.
101 |
102 | This a **beta** function, subject to change.
103 |
104 | Args:
105 | folder_id: The id of the folder to obtain the detectors for
106 | page_number: Optional page (from 1) of the list we want to retrieve
107 |
108 | Returns:
109 | ResultsPage: a ResultsPage object that contains a slice of the list of detector dictionaries,
110 | plus methods to retrieve the other pages
111 |
112 | Example:
113 |
114 | ::
115 |
116 | {
117 | "id": "id1",
118 | "name": "detector1",
119 | "is_runnable": True,
120 | "user_tag": "tag1",
121 | },
122 | {
123 | "id": "id2",
124 | "name": "detector2",
125 | "is_runnable": False,
126 | "user_tag": "tag2",
127 | }
128 |
129 | """
130 | return self._return_results_page(
131 | "folders/%s/detectors" % folder_id,
132 | {"page_number": page_number} if page_number is not None else None,
133 | )
134 |
135 | def list_rasters(
136 | self,
137 | folder_id: str | None = None,
138 | search_string: str | None = None,
139 | user_tag: str | None = None,
140 | max_cloud_coverage: int | None = None,
141 | captured_before: str | None = None,
142 | captured_after: str | None = None,
143 | has_vector_layers: bool | None = None,
144 | page_number: int | None = None,
145 | ):
146 | """
147 | List of rasters metadata, see `ResultsPage` for the pagination access pattern.
148 |
149 | Args:
150 | folder_id: The id of the folder to search rasters in
151 | search_string: The search term used to filter rasters by name
152 | user_tag: [beta] The user tag to filter rasters by
153 | max_cloud_coverage: [beta] The max_cloud_coverage of the rasters (between 0 and 100)
154 | captured_before: ISO 8601 -formatted date / time of capture
155 | we want to list the rasters since
156 | captured_after: ISO 8601 -formatted date / time of capture
157 | we want to list the rasters from
158 | has_vector_layers: [beta] Whether or not the rasters have at least one vector layer
159 | page_number: Optional page (from 1) of the list we want to retrieve
160 |
161 | Returns:
162 | ResultsPage: a ResultsPage object that contains a slice of the list of raster dictionaries
163 |
164 | Example:
165 |
166 | ::
167 |
168 | {
169 | 'id': '42',
170 | 'status': 'ready',
171 | 'name': 'raster1',
172 | 'folder_id': 'abc'
173 | },
174 | {
175 | 'id': '43',
176 | 'status': 'ready',
177 | 'name': 'raster2',
178 | 'folder_id': 'def'
179 | }
180 |
181 | """
182 | params: dict[str, Any] = {}
183 | if folder_id:
184 | params["folder"] = folder_id
185 | if search_string:
186 | params["search"] = search_string
187 | if user_tag is not None:
188 | params["user_tag"] = user_tag.strip()
189 | if max_cloud_coverage is not None:
190 | params["max_cloud_coverage"] = max_cloud_coverage
191 | if captured_before is not None:
192 | params["captured_before"] = captured_before
193 | if captured_after is not None:
194 | params["captured_after"] = captured_after
195 | if has_vector_layers is not None:
196 | params["has_vector_layers"] = bool(has_vector_layers)
197 | if page_number is not None:
198 | params["page_number"] = page_number
199 | return self._return_results_page("rasters", params)
200 |
201 | def get_raster(self, raster_id: str) -> dict[str, Any]:
202 | """
203 | Get raster information
204 |
205 | Args:
206 | raster_id: id of the raster
207 |
208 | Raises:
209 | APIError: There was an error while getting the raster information
210 |
211 | Returns:
212 | dict: Dictionary of the information
213 | """
214 | resp = self.sess.get(self._full_url("rasters/%s/" % raster_id))
215 | if not resp.ok:
216 | raise APIError(resp.text)
217 | return resp.json()
218 |
219 | def edit_raster(
220 | self,
221 | raster_id: str,
222 | name: str | None = None,
223 | folder_id: str | None = None,
224 | captured_at: str | None = None,
225 | identity_key: str | None = None,
226 | multispectral_band_specification: dict | None = None,
227 | cloud_coverage: int | None = None,
228 | user_tag: str | None = None,
229 | ):
230 | """
231 | Edits an already existing raster.
232 |
233 | Args:
234 | name: New human-readable name for this raster
235 | folder_id: Id of the new folder for this raster (move is in another project)
236 | captured_at: new ISO-8601 date and time at which this
237 | raster was captured, YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z];
238 | e.g. "2020-01-01T12:34:56.789Z"
239 | identity_key: New personal identifier for this raster.
240 | multispectral_band_specification: The new band specification,
241 | see https://docs.picterra.ch/advanced-topics/multispectral
242 | cloud_coverage: Raster cloud coverage new percentage
243 | user_tag (beta): Raster tag
244 |
245 | Returns:
246 | str: The id of the edited raster
247 | """
248 | data: dict[str, Any] = {}
249 | if name:
250 | data.update({"name": name})
251 | if folder_id is not None:
252 | data.update({"folder_id": folder_id})
253 | if captured_at is not None:
254 | data.update({"captured_at": captured_at})
255 | if identity_key is not None:
256 | data.update({"identity_key": identity_key})
257 | if multispectral_band_specification is not None:
258 | data.update(
259 | {"multispectral_band_specification": multispectral_band_specification}
260 | )
261 | if cloud_coverage is not None:
262 | data.update({"cloud_coverage": cloud_coverage})
263 | if user_tag:
264 | data.update({"user_tag": user_tag})
265 | resp = self.sess.put(self._full_url("rasters/%s/" % raster_id), json=data)
266 | if not resp.ok:
267 | raise APIError(resp.text)
268 | return raster_id
269 |
270 | def delete_raster(self, raster_id: str):
271 | """
272 | Deletes a given raster by its identifier
273 |
274 | Args:
275 | raster_id: The id of the raster to delete
276 |
277 | Raises:
278 | APIError: There was an error while trying to delete the raster
279 | """
280 |
281 | resp = self.sess.delete(self._full_url("rasters/%s/" % raster_id))
282 | if not resp.ok:
283 | raise APIError(resp.text)
284 |
285 | def download_raster_to_file(self, raster_id: str, filename: str):
286 | """
287 | Downloads a raster to a local file
288 |
289 | Args:
290 | raster_id: The id of the raster to download
291 | filename: The local filename where to save the raster image
292 |
293 | Raises:
294 | APIError: There was an error while trying to download the raster
295 | """
296 | resp = self.sess.get(self._full_url("rasters/%s/download/" % raster_id))
297 | if not resp.ok:
298 | raise APIError(resp.text)
299 | raster_url = resp.json()["download_url"]
300 | logger.debug("Trying to download raster %s from %s.." % (raster_id, raster_url))
301 | _download_to_file(raster_url, filename)
302 |
303 | def set_raster_detection_areas_from_file(self, raster_id: str, filename: str):
304 | """
305 | This is an experimental feature
306 |
307 | Set detection areas from a GeoJSON file
308 |
309 | Args:
310 | raster_id: The id of the raster to which to assign the detection areas
311 | filename: The filename of a GeoJSON file. This should contain a FeatureCollection
312 | of Polygon/MultiPolygon
313 |
314 | Raises:
315 | APIError: There was an error uploading the file to cloud storage
316 | """
317 | # Get upload URL
318 | resp = self.sess.post(
319 | self._full_url("rasters/%s/detection_areas/upload/file/" % raster_id)
320 | )
321 | if not resp.ok:
322 | raise APIError(resp.text)
323 | data = resp.json()
324 | upload_url = data["upload_url"]
325 | upload_id = data["upload_id"]
326 | # Upload to blobstore
327 | _upload_file_to_blobstore(upload_url, filename)
328 | # Commit upload
329 | resp = self.sess.post(
330 | self._full_url(
331 | "rasters/%s/detection_areas/upload/%s/commit/" % (raster_id, upload_id)
332 | )
333 | )
334 | if not resp.ok:
335 | raise APIError(resp.text)
336 | self._wait_until_operation_completes(resp.json())
337 |
338 | def remove_raster_detection_areas(self, raster_id: str):
339 | """
340 | This is an experimental feature
341 |
342 | Remove the detection areas of a raster
343 |
344 | Args:
345 | raster_id: The id of the raster whose detection areas will be removed
346 |
347 | Raises:
348 | APIError: There was an error during the operation
349 | """
350 | resp = self.sess.delete(
351 | self._full_url("rasters/%s/detection_areas/" % raster_id)
352 | )
353 | if not resp.ok:
354 | raise APIError(resp.text)
355 |
356 | def add_raster_to_detector(self, raster_id: str, detector_id: str):
357 | """
358 | Associate a raster to a detector
359 |
360 | This a **beta** function, subject to change.
361 |
362 | Args:
363 | detector_id: The id of the detector
364 | raster_id: The id of the raster
365 |
366 | Raises:
367 | APIError: There was an error uploading the file to cloud storage
368 | """
369 | resp = self.sess.post(
370 | self._full_url("detectors/%s/training_rasters/" % detector_id),
371 | json={"raster_id": raster_id},
372 | )
373 | if not resp.status_code == 201:
374 | raise APIError(resp.text)
375 |
376 | def create_detector(
377 | self,
378 | name: str | None = None,
379 | detection_type: str = "count",
380 | output_type: str = "polygon",
381 | training_steps: int = 500,
382 | backbone: str = "resnet34",
383 | tile_size: int = 256,
384 | background_sample_ratio: float = 0.25,
385 | ) -> str:
386 | """
387 | Creates a new detector
388 |
389 | This a **beta** function, subject to change.
390 |
391 | Please note that depending on your plan some setting cannot be different
392 | from the default ones
393 |
394 | Args:
395 | name: Name of the detector
396 | detection_type: Type of the detector (one of 'count', 'segmentation')
397 | output_type: Output type of the detector (one of 'polygon', 'bbox')
398 | training_steps: Training steps the detector (integer between 500 & 40000)
399 | backbone: detector backbone (one of 'resnet18', 'resnet34', 'resnet50')
400 | tile_size: tile size (see HTTP API docs for the allowed values)
401 | background_sample_ratio: bg sample ratio (between 0 and 1)
402 |
403 | Returns:
404 | str: The id of the detector
405 |
406 | Raises:
407 | APIError: There was an error while creating the detector
408 | """
409 | # Build request body
410 | body_data: dict[str, Any] = {"configuration": {}}
411 | if name:
412 | body_data["name"] = name
413 | for i in (
414 | "detection_type",
415 | "output_type",
416 | "training_steps",
417 | "backbone",
418 | "tile_size",
419 | "background_sample_ratio",
420 | ):
421 | body_data["configuration"][i] = locals()[i]
422 | # Call API and check response
423 | resp = self.sess.post(self._full_url("detectors/"), json=body_data)
424 | if not resp.status_code == 201:
425 | raise APIError(resp.text)
426 | return resp.json()["id"]
427 |
428 | def get_detector(self, detector_id: str):
429 | resp = self.sess.get(self._full_url("detectors/%s/" % detector_id))
430 | if not resp.status_code == 200:
431 | raise APIError(resp.text)
432 | return resp.json()
433 |
434 | def list_detectors(
435 | self,
436 | search_string: str | None = None,
437 | user_tag: str | None = None,
438 | is_shared: bool | None = None,
439 | page_number: int | None = None,
440 | ):
441 | """
442 | List all the detectors the user can access, see `ResultsPage`
443 | for the pagination access pattern.
444 |
445 | Args:
446 | search_string: The term used to filter detectors by name
447 | user_tag: [beta] User tag to filter detectors by
448 | is_shared: [beta] Share status to filter detectors by
449 | page_number: Optional page (from 1) of the list we want to retrieve
450 |
451 | Returns:
452 | ResultsPage: A ResultsPage object that contains a slice of the list of detector dictionaries
453 |
454 | Example:
455 |
456 | ::
457 |
458 | {
459 | 'id': '42',
460 | 'name': 'cow detector',
461 | 'configuration': {
462 | 'detection_type': 'count',
463 | 'output_type': 'bbox',
464 | 'training_steps': 787
465 | }
466 | },
467 | {
468 | 'id': '43',
469 | 'name': 'test5',
470 | 'configuration': {
471 | 'detection_type': 'segmentation',
472 | 'output_type': 'polygon',
473 | 'training_steps': 500
474 | }
475 | }
476 |
477 | """
478 | data: dict[str, Any] = {}
479 | if search_string is not None:
480 | data["search"] = search_string.strip()
481 | if user_tag is not None:
482 | data["user_tag"] = user_tag.strip()
483 | if is_shared is not None:
484 | data["is_shared"] = is_shared
485 | if page_number is not None:
486 | data["page_number"] = page_number
487 | return self._return_results_page("detectors", data)
488 |
489 | def edit_detector(
490 | self,
491 | detector_id: str,
492 | name: str | None = None,
493 | detection_type: str | None = None,
494 | output_type: str | None = None,
495 | training_steps: int | None = None,
496 | backbone: str | None = None,
497 | tile_size: int | None = None,
498 | background_sample_ratio: float | None = None,
499 | ):
500 | """
501 | Edit a detector
502 |
503 | This a **beta** function, subject to change.
504 |
505 | Please note that depending on your plan some settings may not be editable.
506 |
507 | Args:
508 | detector_id: identifier of the detector
509 | name: Name of the detector
510 | detection_type: The type of the detector (one of 'count', 'segmentation')
511 | output_type: The output type of the detector (one of 'polygon', 'bbox')
512 | training_steps: The training steps the detector (int in [500, 40000])
513 | backbone: detector backbone (one of 'resnet18', 'resnet34', 'resnet50')
514 | tile_size: tile size (see HTTP API docs for the allowed values)
515 | background_sample_ratio: bg sample ratio (between 0 and 1)
516 |
517 | Raises:
518 | APIError: There was an error while editing the detector
519 | """
520 | # Build request body
521 | body_data: dict[str, Any] = {"configuration": {}}
522 | if name:
523 | body_data["name"] = name
524 | for i in (
525 | "detection_type",
526 | "output_type",
527 | "training_steps",
528 | "backbone",
529 | "tile_size",
530 | "background_sample_ratio",
531 | ):
532 | if locals()[i]:
533 | body_data["configuration"][i] = locals()[i]
534 | # Call API and check response
535 | resp = self.sess.put(
536 | self._full_url("detectors/%s/" % detector_id), json=body_data
537 | )
538 | if not resp.status_code == 204:
539 | raise APIError(resp.text)
540 |
541 | def delete_detector(self, detector_id: str):
542 | """
543 | Deletes a given detector by its identifier
544 |
545 | Args:
546 | detector_id: The id of the detector to delete
547 |
548 | Raises:
549 | APIError: There was an error while trying to delete the detector
550 | """
551 |
552 | resp = self.sess.delete(self._full_url("detectors/%s/" % detector_id))
553 | if not resp.ok:
554 | raise APIError(resp.text)
555 |
556 | def run_detector(
557 | self, detector_id: str, raster_id: str, secondary_raster_id: str | None = None
558 | ) -> str:
559 | """
560 | Runs a detector on a raster: predictions are subject to a minimum charge
561 | of 10 MP.
562 |
563 | Args:
564 | detector_id: The id of the detector
565 | raster_id: The id of the raster
566 | secondary_raster_id: The id of the secondary raster. This needs to be provided to
567 | run change detectors.
568 |
569 | Returns:
570 | str: The id of the operation. You typically want to pass this
571 | to `download_result_to_feature_collection`
572 | """
573 | body = {"raster_id": raster_id}
574 | if secondary_raster_id is not None:
575 | body["secondary_raster_id"] = secondary_raster_id
576 | resp = self.sess.post(
577 | self._full_url("detectors/%s/run/" % detector_id),
578 | json=body,
579 | )
580 | if not resp.ok:
581 | raise APIError(resp.text)
582 | operation_response = resp.json()
583 | self._wait_until_operation_completes(operation_response)
584 | return operation_response["operation_id"]
585 |
586 | def download_result_to_feature_collection(self, operation_id: str, filename: str):
587 | """
588 | Downloads the results from a detection operation to a local GeoJSON file.
589 |
590 | Results are stored as a FeatureCollection of Multipolygon. Each feature has a 'class_name'
591 | property indicating the corresponding class name
592 |
593 | Args:
594 | operation_id: The id of the operation to download. This should be a
595 | detect operation
596 | filename: The local filename where to save the results
597 | """
598 | results = self.get_operation_results(operation_id)
599 | # We download results to a temporary directory and then assemble them into a
600 | # FeatureCollection
601 | fc: FeatureCollection = {"type": "FeatureCollection", "features": []}
602 |
603 | for class_result in results["by_class"]:
604 | with tempfile.NamedTemporaryFile() as f:
605 | self.download_vector_layer_to_file(
606 | class_result["result"]["vector_layer_id"], f.name
607 | )
608 | with open(f.name) as fr:
609 | vl_polygon_fc: FeatureCollection = json.load(fr)
610 | mp_feature: Feature = {
611 | "type": "Feature",
612 | "properties": {"class_name": class_result["class"]["name"]},
613 | "geometry": {"type": "MultiPolygon", "coordinates": []},
614 | }
615 | for poly_feat in vl_polygon_fc["features"]:
616 | mp_feature["geometry"]["coordinates"].append(
617 | poly_feat["geometry"]["coordinates"]
618 | )
619 | fc["features"].append(mp_feature)
620 | with open(filename, "w") as f:
621 | json.dump(fc, f)
622 |
623 | def download_result_to_file(self, operation_id: str, filename: str):
624 | """
625 | Downloads a set of results to a local GeoJSON file
626 |
627 | .. deprecated:: 1.0.0
628 | Use `download_result_to_feature_collection` instead
629 |
630 | Args:
631 | operation_id: The id of the operation to download
632 | filename: The local filename where to save the results
633 | """
634 | warnings.warn(
635 | "This function is deprecated. Use download_result_to_feature_collection instead",
636 | DeprecationWarning,
637 | )
638 | result_url = self.get_operation_results(operation_id)["url"]
639 | logger.debug("Trying to download result %s.." % result_url)
640 | _download_to_file(result_url, filename)
641 |
642 | def set_annotations(
643 | self,
644 | detector_id: str,
645 | raster_id: str,
646 | annotation_type: Literal[
647 | "outline", "training_area", "testing_area", "validation_area"
648 | ],
649 | annotations: dict[str, Any],
650 | class_id: str | None = None,
651 | ):
652 | """
653 | Replaces the annotations of type 'annotation_type' with 'annotations', for the
654 | given raster-detector pair.
655 |
656 | Args:
657 | detector_id: The id of the detector
658 | raster_id: The id of the raster
659 | annotation_type: One of (outline, training_area, testing_area, validation_area)
660 | annotations: GeoJSON representation of the features to upload
661 | class_id: The class id to which to associate the new annotations. Only valid if
662 | annotation_type is "outline"
663 | """
664 | # Get an upload url
665 | create_upload_resp = self.sess.post(
666 | self._full_url(
667 | "detectors/%s/training_rasters/%s/%s/upload/bulk/"
668 | % (detector_id, raster_id, annotation_type)
669 | )
670 | )
671 | if not create_upload_resp.ok:
672 | raise APIError(create_upload_resp.text)
673 |
674 | upload = create_upload_resp.json()
675 | upload_url = upload["upload_url"]
676 | upload_id = upload["upload_id"]
677 |
678 | # Given we do not use self.sess the timeout is disabled (requests default), and this
679 | # is good as file upload can take a long time
680 | upload_resp = requests.put(upload_url, json=annotations)
681 | if not upload_resp.ok:
682 | logger.error(
683 | "Error when sending annotation upload %s to blobstore at url %s"
684 | % (upload_id, upload_url)
685 | )
686 | raise APIError(upload_resp.text)
687 |
688 | # Commit upload
689 | body = {}
690 | if class_id is not None:
691 | body["class_id"] = class_id
692 | commit_upload_resp = self.sess.post(
693 | self._full_url(
694 | "detectors/%s/training_rasters/%s/%s/upload/bulk/%s/commit/"
695 | % (detector_id, raster_id, annotation_type, upload_id)
696 | ),
697 | json=body,
698 | )
699 | if not commit_upload_resp.ok:
700 | raise APIError(commit_upload_resp.text)
701 |
702 | # Poll for operation completion
703 | self._wait_until_operation_completes(commit_upload_resp.json())
704 |
705 | def train_detector(self, detector_id: str):
706 | """
707 | Start the training of a detector
708 |
709 | Args:
710 | detector_id: The id of the detector
711 | """
712 | resp = self.sess.post(self._full_url("detectors/%s/train/" % detector_id))
713 | if not resp.ok:
714 | raise APIError(resp.text)
715 | return self._wait_until_operation_completes(resp.json())
716 |
717 | def run_dataset_recommendation(self, detector_id: str):
718 | """
719 | This is an **experimental** feature
720 |
721 | Runs dataset recommendation on a detector. Note that you currently have to use
722 | the UI to be able to view the recommendation markers/report.
723 |
724 | Args:
725 | detector_id: The id of the detector
726 | """
727 | resp = self.sess.post(
728 | self._full_url("detectors/%s/dataset_recommendation/" % detector_id)
729 | )
730 | if not resp.ok:
731 | raise APIError(resp.text)
732 | return self._wait_until_operation_completes(resp.json())
733 |
734 | def run_advanced_tool(
735 | self, tool_id: str, inputs: dict[str, Any], outputs: dict[str, Any]
736 | ):
737 | """
738 | This is an experimental feature
739 |
740 | Runs a tool and waits for its execution, returning the finished operation metadata
741 |
742 | Args:
743 | tool_id: The id of the tool to run
744 | inputs: tool inputs
745 | outputs: tool outputs
746 |
747 | Raises:
748 | APIError: There was an error while launching and executing the tool
749 | """
750 | resp = self.sess.post(
751 | self._full_url("advanced_tools/%s/run/" % tool_id),
752 | json={"inputs": inputs, "outputs": outputs},
753 | )
754 | if not resp.ok:
755 | raise APIError(resp.text)
756 | return self._wait_until_operation_completes(resp.json())
757 |
758 | def upload_vector_layer(
759 | self,
760 | raster_id: str,
761 | filename: str,
762 | name: str | None = None,
763 | color: str | None = None,
764 | ) -> str:
765 | """
766 | Uploads a vector layer from a GeoJSON file
767 |
768 | This a **beta** function, subject to change.
769 |
770 | Args:
771 | raster_id: The id of the raster we want to attach the vector layer to
772 | filename: Path to the local GeoJSOn file we want to upload
773 | name: Optional name to give to the vector layer
774 | color: Optional color of the vector layer, has an HTML hex color code (eg "#aabbcc")
775 | Returns:
776 | str: the vector layer unique identifier
777 | """
778 | resp = self.sess.post(self._full_url("vector_layers/%s/upload/" % raster_id))
779 | if not resp.ok:
780 | raise APIError(resp.text)
781 | upload = resp.json()
782 | upload_id, upload_url = upload["upload_id"], upload["upload_url"]
783 | _upload_file_to_blobstore(upload_url, filename)
784 | data = {}
785 | if name is not None:
786 | data["name"] = name
787 | if color is not None:
788 | data["color"] = color
789 | resp = self.sess.post(
790 | self._full_url(
791 | "vector_layers/%s/upload/%s/commit/" % (raster_id, upload_id)
792 | ),
793 | json=data,
794 | )
795 | if not resp.ok:
796 | raise APIError(resp.text)
797 | op = self._wait_until_operation_completes(resp.json())
798 | return op["results"]["vector_layer_id"]
799 |
800 | def edit_vector_layer(
801 | self, vector_layer_id: str, name: str | None = None, color: str | None = None
802 | ):
803 | """
804 | Edits a vector layer
805 |
806 | This a **beta** function, subject to change.
807 |
808 | Args:
809 | vector_layer_id: The id of the vector layer to remove
810 | name: new name
811 | color: new color
812 | """
813 | data = {}
814 | if name:
815 | data.update({"name": name})
816 | if color is not None:
817 | data.update({"color": color})
818 | resp = self.sess.put(
819 | self._full_url("vector_layers/%s/" % vector_layer_id), json=data
820 | )
821 | if not resp.ok:
822 | raise APIError(resp.text)
823 |
824 | def delete_vector_layer(self, vector_layer_id: str):
825 | """
826 | Removes a vector layer
827 |
828 | This a **beta** function, subject to change.
829 |
830 | Args:
831 | vector_layer_id: The id of the vector layer to remove
832 | """
833 | resp = self.sess.delete(self._full_url("vector_layers/%s/" % vector_layer_id))
834 | if not resp.ok:
835 | raise APIError(resp.text)
836 |
837 | def download_vector_layer_to_file(self, vector_layer_id: str, filename: str):
838 | """
839 | Downloads a vector layer
840 |
841 | This a **beta** function, subject to change.
842 |
843 | Args:
844 | vector_layer_id: The id of the vector layer to download
845 | filename: existing file to save the vector layer in, as a feature collection of polygons
846 | """
847 | resp = self.sess.post(
848 | self._full_url("vector_layers/%s/download/" % vector_layer_id)
849 | )
850 | if not resp.ok:
851 | raise APIError(resp.text)
852 | op = self._wait_until_operation_completes(resp.json())
853 | _download_to_file(op["results"]["download_url"], filename)
854 |
855 | def list_raster_markers(
856 | self,
857 | raster_id: str,
858 | page_number: int | None = None,
859 | ):
860 | """
861 | This a **beta** function, subject to change.
862 |
863 | List all the markers on a raster, see `ResultsPage` for the pagination access pattern.
864 |
865 | Args:
866 | raster_id: The id of the raster
867 | page_number: Optional page (from 1) of the list we want to retrieve
868 | """
869 | return self._return_results_page(
870 | "rasters/%s/markers/" % raster_id,
871 | {"page_number": page_number} if page_number is not None else None,
872 | )
873 |
874 | def create_marker(
875 | self,
876 | raster_id: str,
877 | detector_id: str | None,
878 | lng: float,
879 | lat: float,
880 | text: str,
881 | ) -> dict[str, Any]:
882 | """
883 | This is an **experimental** (beta) feature
884 |
885 | Creates a marker
886 |
887 | Args:
888 | raster_id: The id of the raster (belonging to detector) to create the marker on
889 | detector_id: The id of the detector to create the marker on. If this is None, the marker
890 | is created associated with the raster only
891 |
892 | Raises:
893 | APIError: There was an error while creating the marker
894 | """
895 | if detector_id is None:
896 | url = "rasters/%s/markers/" % raster_id
897 | else:
898 | url = "detectors/%s/training_rasters/%s/markers/" % (detector_id, raster_id)
899 | data = {
900 | "marker": {"type": "Point", "coordinates": [lng, lat]},
901 | "text": text,
902 | }
903 | resp = self.sess.post(self._full_url(url), json=data)
904 | if not resp.ok:
905 | raise APIError(resp.text)
906 | return resp.json()
907 |
908 | def import_raster_from_remote_source(
909 | self,
910 | raster_name: str,
911 | folder_id: str,
912 | source_id: str,
913 | aoi_filename: str,
914 | method: Literal["streaming"] = "streaming",
915 | ) -> str:
916 | """
917 | Import a raster from a remote imagery source given a GeoJSON file for the AOI
918 |
919 | Args:
920 | raster_name: Name of the new raster
921 | folder_id: The id of the folder / project the raster will live in
922 | source_id: The id of the remote imagery source to import from
923 | filename: The filename of a GeoJSON file. This should contain a FeatureCollection of
924 | Polygon/MultiPolygon representing the AOI of the new raster
925 |
926 | Raises:
927 | APIError: There was an error during import
928 | """
929 | # Get upload URL
930 | resp = self.sess.post(self._full_url("rasters/import/"))
931 | if not resp.ok:
932 | raise APIError(resp.text)
933 | data = resp.json()
934 | upload_url = data["upload_url"]
935 | upload_id = data["upload_id"]
936 | # Upload to blobstore
937 | _upload_file_to_blobstore(upload_url, aoi_filename)
938 | # Commit upload
939 | resp = self.sess.post(
940 | self._full_url(f"rasters/import/{upload_id}/commit/"),
941 | json={
942 | "method": method,
943 | "source_id": source_id,
944 | "folder_id": folder_id,
945 | "name": raster_name,
946 | },
947 | )
948 | if not resp.ok:
949 | raise APIError(resp.text)
950 | # Poll operation and get raster identifier
951 | operation = self._wait_until_operation_completes(resp.json())
952 | return operation["metadata"]["raster_id"]
953 |
954 | def list_raster_vector_layers(
955 | self,
956 | raster_id: str,
957 | search: str | None = None,
958 | detector_id: str | None = None,
959 | page_number: int | None = None,
960 | ):
961 | """
962 | This a **beta** function, subject to change.
963 |
964 | List all the vector layers on a raster, see `ResultsPage`
965 | for the pagination access pattern.
966 |
967 | Args:
968 | raster_id: The id of the raster
969 | search: Optional string to search layers by name
970 | page_number: Optional page (from 1) of the list we want to retrieve
971 | """
972 | params: dict[str, str | int] = {}
973 | if search is not None:
974 | params["search"] = search
975 | if detector_id is not None:
976 | params["detector"] = detector_id
977 | if page_number is not None:
978 | params["page_number"] = page_number
979 | url = "rasters/%s/vector_layers/" % raster_id
980 | return self._return_results_page(url, params)
981 |
982 | def list_detector_rasters(
983 | self,
984 | detector_id: str,
985 | page_number: int | None = None,
986 | ):
987 | """
988 | This a **beta** function, subject to change.
989 |
990 | List rasters of a detector, see `ResultsPage` for the pagination access pattern.
991 |
992 | Args:
993 | detector_id: The id of the detector
994 | page_number: Optional page (from 1) of the list we want to retrieve
995 | """
996 | params: dict[str, int] = {}
997 | if page_number is not None:
998 | params["page_number"] = page_number
999 | url = "detectors/%s/training_rasters/" % detector_id
1000 | return self._return_results_page(url, params)
1001 |
1002 | def create_folder(self, name: str) -> str:
1003 | """
1004 | Creates a new folder with the given name
1005 |
1006 | Args:
1007 | name: Name of the new folder
1008 |
1009 | Returns:
1010 | str: The id of the folder
1011 |
1012 | Raises:
1013 | APIError: There was an error while creating the folder
1014 | """
1015 | resp = self.sess.post(self._full_url("folders/"), json={"name": name})
1016 | if not resp.status_code == 201:
1017 | raise APIError(resp.text)
1018 | return resp.json()["id"]
1019 |
--------------------------------------------------------------------------------