├── .env.example
├── .github
├── codecov.yml
├── dependabot.yml
└── workflows
│ ├── ci.yml
│ └── deploy_mkdocs.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CHANGES.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── data
├── countries.sql
├── landsat_wrs.sql
└── sentinel_mgrs.sql
├── docker-compose.yml
├── dockerfiles
├── Dockerfile
├── Dockerfile.db
└── scripts
│ └── wait-for-it.sh
├── docs
├── logos
│ ├── TiPg_logo_large.png
│ ├── TiPg_logo_medium.png
│ ├── TiPg_logo_no_text_large.png
│ └── TiPg_logo_small.png
├── mkdocs.yml
└── src
│ ├── advanced
│ ├── customization.md
│ ├── functions.md
│ ├── ogc_features_server.md
│ └── ogc_tiles_server.md
│ ├── benchmark.html
│ ├── contributing.md
│ ├── img
│ ├── favicon.ico
│ └── logo.png
│ ├── index.md
│ ├── release-notes.md
│ └── user_guide
│ ├── configuration.md
│ ├── endpoints.md
│ └── factories.md
├── pyproject.toml
├── tests
├── __init__.py
├── benchmarks.py
├── conftest.py
├── fixtures
│ ├── canada.sql
│ ├── functions
│ │ ├── hexagon.sql
│ │ ├── landsat_poly_centroid.sql
│ │ ├── landsat_tile.sql
│ │ └── squares.sql
│ ├── landsat_wrs.sql
│ ├── minnesota.sql
│ ├── my_data.sql
│ ├── nongeo_data.sql
│ └── templates
│ │ └── collections.html
├── routes
│ ├── __init__.py
│ ├── test_collections.py
│ ├── test_endpoints.py
│ ├── test_geography.py
│ ├── test_item.py
│ ├── test_items.py
│ ├── test_non_geo.py
│ ├── test_templates.py
│ ├── test_tiles.py
│ ├── test_tilesets.py
│ └── test_tms.py
├── test_factories.py
├── test_main.py
├── test_middleware.py
├── test_schemas.py
├── test_settings.py
└── test_sql_functions.py
└── tipg
├── __init__.py
├── collections.py
├── database.py
├── dependencies.py
├── errors.py
├── factory.py
├── filter
├── __init__.py
├── evaluate.py
└── filters.py
├── logger.py
├── main.py
├── middleware.py
├── model.py
├── py.typed
├── resources
├── __init__.py
├── enums.py
└── response.py
├── settings.py
├── sql
└── dbcatalog.sql
└── templates
├── collection.html
├── collections.html
├── conformance.html
├── debug.html
├── footer.html
├── header.html
├── item.html
├── items.html
├── landing.html
├── map.html
├── queryables.html
├── tilematrixset.html
├── tilematrixsets.html
├── tileset.html
└── tilesets.html
/.env.example:
--------------------------------------------------------------------------------
1 |
2 | TIPG_NAME="OGC Features and Tiles API"
3 | DATABASE_URL=postgresql://username:password@0.0.0.0:5439/postgis
4 |
--------------------------------------------------------------------------------
/.github/codecov.yml:
--------------------------------------------------------------------------------
1 | comment: off
2 |
3 | coverage:
4 | status:
5 | project:
6 | default:
7 | target: auto
8 | threshold: 5
9 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # Set update schedule for GitHub Actions
2 |
3 | version: 2
4 | updates:
5 |
6 | - package-ecosystem: "github-actions"
7 | directory: "/"
8 | schedule:
9 | # Check for updates to GitHub Actions every week
10 | interval: "weekly"
11 | groups:
12 | all:
13 | patterns:
14 | - "*"
15 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | # On every pull request, but only on push to main
4 | on:
5 | push:
6 | branches:
7 | - main
8 | tags:
9 | - '*'
10 | paths:
11 | # Only run test and docker publish if somde code have changed
12 | - 'pyproject.toml'
13 | - 'tipg/**'
14 | - 'tests/**'
15 | - '.pre-commit-config.yaml'
16 | - 'dockerfiles/**'
17 | - '.github/workflows/ci.yml'
18 | pull_request:
19 | env:
20 | LATEST_PY_VERSION: '3.13'
21 |
22 | jobs:
23 | tests:
24 | runs-on: ubuntu-latest
25 | strategy:
26 | matrix:
27 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
28 |
29 | steps:
30 | - uses: actions/checkout@v4
31 | - name: Set up Python ${{ matrix.python-version }}
32 | uses: actions/setup-python@v5
33 | with:
34 | python-version: ${{ matrix.python-version }}
35 |
36 | - name: install lib postgres
37 | uses: nyurik/action-setup-postgis@v2
38 |
39 | - name: Install dependencies
40 | run: |
41 | python -m pip install --upgrade pip
42 | python -m pip install .["test"]
43 |
44 | - name: Run pre-commit
45 | if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
46 | run: |
47 | python -m pip install pre-commit
48 | pre-commit run --all-files
49 |
50 | - name: Run tests
51 | run: python -m pytest --cov tipg --cov-report xml --cov-report term-missing --asyncio-mode=strict
52 |
53 | - name: Upload Results
54 | if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
55 | uses: codecov/codecov-action@v5
56 | with:
57 | file: ./coverage.xml
58 | flags: unittests
59 | name: ${{ matrix.python-version }}
60 | fail_ci_if_error: false
61 |
62 | benchmark:
63 | needs: [tests]
64 | runs-on: ubuntu-latest
65 | steps:
66 | - uses: actions/checkout@v4
67 | - name: Set up Python
68 | uses: actions/setup-python@v5
69 | with:
70 | python-version: ${{ env.LATEST_PY_VERSION }}
71 |
72 | - name: install lib postgres
73 | uses: nyurik/action-setup-postgis@v2
74 |
75 | - name: Install dependencies
76 | run: |
77 | python -m pip install --upgrade pip
78 | python -m pip install .["test"]
79 |
80 | - name: Run Benchmark
81 | run: python -m pytest tests/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --benchmark-json output.json --asyncio-mode=strict
82 |
83 | - name: Store and benchmark result
84 | uses: benchmark-action/github-action-benchmark@v1
85 | with:
86 | name: TiPg Benchmarks
87 | tool: 'pytest'
88 | output-file-path: output.json
89 | alert-threshold: '130%'
90 | comment-on-alert: true
91 | fail-on-alert: false
92 | # GitHub API token to make a commit comment
93 | github-token: ${{ secrets.GITHUB_TOKEN }}
94 | gh-pages-branch: 'gh-benchmarks'
95 | # Make a commit only if main
96 | auto-push: ${{ github.ref == 'refs/heads/main' }}
97 |
98 | publish:
99 | needs: [tests]
100 | runs-on: ubuntu-latest
101 | if: contains(github.ref, 'tags') && github.event_name == 'push'
102 | steps:
103 | - uses: actions/checkout@v4
104 | - name: Set up Python
105 | uses: actions/setup-python@v5
106 | with:
107 | python-version: ${{ env.LATEST_PY_VERSION }}
108 |
109 | - name: Install dependencies
110 | run: |
111 | python -m pip install --upgrade pip
112 | python -m pip install hatch
113 | python -m hatch build
114 |
115 | - name: Set tag version
116 | id: tag
117 | run: |
118 | echo "version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
119 |
120 | - name: Set module version
121 | id: module
122 | run: |
123 | echo "version=$(hatch --quiet version)" >> $GITHUB_OUTPUT
124 |
125 | - name: Build and publish
126 | if: ${{ steps.tag.outputs.version }} == ${{ steps.module.outputs.version}}
127 | env:
128 | HATCH_INDEX_USER: ${{ secrets.PYPI_USERNAME }}
129 | HATCH_INDEX_AUTH: ${{ secrets.PYPI_PASSWORD }}
130 | run: |
131 | python -m hatch publish
132 |
133 | publish-docker:
134 | needs: [tests]
135 | if: github.ref == 'refs/heads/main' || startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
136 | runs-on: ubuntu-latest
137 | steps:
138 | - name: Checkout
139 | uses: actions/checkout@v4
140 |
141 | - name: Set up QEMU
142 | uses: docker/setup-qemu-action@v3
143 |
144 | - name: Set up Docker Buildx
145 | uses: docker/setup-buildx-action@v3
146 |
147 | - name: Login to Github
148 | uses: docker/login-action@v3
149 | with:
150 | registry: ghcr.io
151 | username: ${{ github.actor }}
152 | password: ${{ secrets.GITHUB_TOKEN }}
153 |
154 | - name: Docker meta
155 | id: meta
156 | uses: docker/metadata-action@v5
157 | with:
158 | images: |
159 | ghcr.io/${{ github.repository }}
160 | flavor: |
161 | latest=false
162 | tags: |
163 | type=semver,pattern={{version}}
164 | type=raw,value=latest,enable={{is_default_branch}}
165 |
166 | - name: Build and push
167 | uses: docker/build-push-action@v6
168 | with:
169 | platforms: linux/amd64,linux/arm64
170 | context: .
171 | file: dockerfiles/Dockerfile
172 | push: ${{ github.event_name != 'pull_request' }}
173 | tags: ${{ steps.meta.outputs.tags }}
174 | labels: ${{ steps.meta.outputs.labels }}
175 | cache-from: type=gha
176 | cache-to: type=gha,mode=max
177 |
--------------------------------------------------------------------------------
/.github/workflows/deploy_mkdocs.yml:
--------------------------------------------------------------------------------
1 | name: Publish docs via GitHub Pages
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | paths:
8 | # Only rebuild website when docs have changed
9 | - 'README.md'
10 | - 'CHANGES.md'
11 | - 'CONTRIBUTING.md'
12 | - 'docs/**'
13 | - '.github/workflows/deploy_mkdocs.yml'
14 |
15 | jobs:
16 | build:
17 | name: Deploy docs
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Checkout master
21 | uses: actions/checkout@v4
22 |
23 | - name: Set up Python 3.11
24 | uses: actions/setup-python@v5
25 | with:
26 | python-version: 3.11
27 |
28 | - name: Install dependencies
29 | run: |
30 | python -m pip install --upgrade pip
31 | python -m pip install -e .["docs"]
32 |
33 | - name: Deploy docs
34 | run: mkdocs gh-deploy --force -f docs/mkdocs.yml
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | cdk.out/
104 |
105 | # pycharm
106 | .idea/
107 |
108 | .benchmarks/
109 |
110 | .pgdata/
111 | docs/src/api/*
112 |
113 | traefik.toml
114 | routes.toml
115 | .tms/
116 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/abravalheri/validate-pyproject
3 | rev: v0.12.1
4 | hooks:
5 | - id: validate-pyproject
6 |
7 | - repo: https://github.com/PyCQA/isort
8 | rev: 5.13.2
9 | hooks:
10 | - id: isort
11 | language_version: python
12 |
13 | - repo: https://github.com/astral-sh/ruff-pre-commit
14 | rev: v0.8.4
15 | hooks:
16 | - id: ruff
17 | args: ["--fix"]
18 | - id: ruff-format
19 |
20 | - repo: https://github.com/pre-commit/mirrors-mypy
21 | rev: v1.11.2
22 | hooks:
23 | - id: mypy
24 | language_version: python
25 | # No reason to run if only tests have changed. They intentionally break typing.
26 | exclude: tests/.*
27 | additional_dependencies:
28 | - pydantic~=2.0
29 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Development - Contributing
2 |
3 | Issues and pull requests are more than welcome: https://github.com/developmentseed/tipg/issues
4 |
5 | **dev install**
6 |
7 | ```bash
8 | git clone https://github.com/developmentseed/tipg.git
9 | cd tipg
10 | python -m pip install -e .["test,dev"]
11 | ```
12 |
13 | You can then run the tests with the following command:
14 |
15 | ```sh
16 | python -m pytest --cov tipg --cov-report term-missing --asyncio-mode=strict
17 | ```
18 |
19 | and run benchmark
20 |
21 | ```sh
22 | python -m pytest tests/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --asyncio-mode=strict
23 | ```
24 |
25 | **pre-commit**
26 |
27 | This repo is set to use `pre-commit` to run *isort*, *flake8*, *pydocstring*, *black* ("uncompromising Python code formatter") and mypy when committing new code.
28 |
29 | ```bash
30 | # Install pre-commit command
31 | $ pip install pre-commit
32 |
33 | # Setup pre-commit withing your local environment
34 | $ pre-commit install
35 | ```
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Development Seed
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Simple and Fast Geospatial OGC Features and Tiles API for PostGIS.
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | ---
21 |
22 | **Documentation**: https://developmentseed.org/tipg/
23 |
24 | **Source Code**: https://github.com/developmentseed/tipg
25 |
26 | ---
27 |
28 | `tipg`, pronounced *T[ee]pg*, is a **Python** package that helps create lightweight OGC **Features** and **Tiles** API with a PostGIS Database backend. The API has been designed for [OGC Features](https://ogcapi.ogc.org/features) and [OGC Tiles](https://ogcapi.ogc.org/tiles/) specifications.
29 |
30 | > **Note**
31 | > This project is the result of the merge between [tifeatures](https://github.com/developmentseed/tifeatures) and [timvt](https://github.com/developmentseed/timvt).
32 |
33 | ## Install
34 |
35 | ```bash
36 | $ python -m pip install pip -U
37 | $ python -m pip install tipg
38 |
39 | # or from source
40 | $ git clone https://github.com/developmentseed/tipg.git
41 | $ cd tipg
42 | $ python -m pip install -e .
43 | ```
44 |
45 | ## OGC Specifications
46 |
47 | Specification | Status | link |
48 | | -- | -- | -- |
49 | OGC Common Part 1: Core | ✅ | https://docs.ogc.org/DRAFTS/19-072.html
50 | OGC Common Part 2: Geospatial Data | ✅ | http://docs.ogc.org/DRAFTS/20-024.html
51 | OGC Features Part 1: Core | ✅ | https://docs.ogc.org/is/17-069r4/17-069r4.html
52 | OGC Features Part 2: CRS by Reference | ❌ | https://docs.ogc.org/is/18-058r1/18-058r1.html
53 | OGC Features Part 3: Filtering / CQL2 | ✅ | https://docs.ogc.org/DRAFTS/19-079r1.html
54 | OGC Tiles Part 1: Core | ✅ | https://docs.ogc.org/is/20-057/20-057.html
55 |
56 | Notes:
57 |
58 | We chose to avoid implementing the second part of the specification to prevent the introduction of CRS-based GeoJSON. We may review this decision in the future.
59 |
60 | While we tried to follow OGC specifications to the letter, some API endpoints might have more capabilities (e.g., geometry column selection).
61 |
62 | ## PostGIS/PostgreSQL
63 |
64 | `tipg` relies greatly on PostGIS' `ST_*` functions. PostGIS must be installed on your PostgreSQL database.
65 |
66 | ```sql
67 | SELECT name, default_version,installed_version
68 | FROM pg_available_extensions WHERE name LIKE 'postgis%' or name LIKE 'address%';
69 | ```
70 |
71 | ```sql
72 | CREATE EXTENSION postgis;
73 | ```
74 |
75 | ### Configuration
76 |
77 | To be able to work, the application will need access to the database. `tipg` uses [Starlette](https://www.starlette.io/config/)'s configuration pattern, which makes use of environment variables or a `.env` file to pass variables to the application.
78 |
79 | An example of a `.env` file can be found in [.env.example](https://github.com/developmentseed/tipg/blob/main/.env.example)
80 |
81 | ```
82 | # you need to define the DATABASE_URL directly
83 | DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis
84 | ```
85 |
86 | More info about configuration options at https://developmentseed.org/tipg/user_guide/configuration/
87 |
88 | ## Launch
89 |
90 | ```bash
91 | $ python -m pip install uvicorn
92 |
93 | # Set your PostGIS database instance URL in the environment
94 | $ export DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis
95 | $ uvicorn tipg.main:app
96 |
97 | # or using Docker
98 |
99 | $ docker-compose up app
100 | ```
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 | ## Docker images
116 |
117 | We are publishing two different docker images on `tag` and on every commit to `main` branch:
118 |
119 | | | Gunicorn |
120 | | -- | -- |
121 | main commit | `ghcr.io/developmentseed/tipg:latest`
122 | tags | `ghcr.io/developmentseed/tipg:X.X.X`
123 | dockerfile | [/dockerfiles/Dockerfile](https://github.com/developmentseed/tipg/blob/main/dockerfiles/Dockerfile)
124 |
125 | See all version at https://github.com/developmentseed/tipg/pkgs/container/tipg
126 |
127 | ```
128 | $ docker run \
129 | -p 8000:8000 \
130 | -e DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis \
131 | ghcr.io/developmentseed/tipg:latest \
132 | uvicorn tipg.main:app --host 0.0.0.0 --port 8000 --workers 1
133 |
134 | # using Gunicorn
135 | $ docker run \
136 | -p 8000:8000 \
137 | -e DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis \
138 | ghcr.io/developmentseed/tipg:latest \
139 | gunicorn -k uvicorn.workers.UvicornWorker tipg.main:app --bind 0.0.0.0:8000 --workers 1
140 | ```
141 |
142 | ## Contribution & Development
143 |
144 | See [CONTRIBUTING.md](https://github.com/developmentseed/tipg/blob/main/CONTRIBUTING.md)
145 |
146 | ## License
147 |
148 | See [LICENSE](https://github.com/developmentseed/tipg/blob/main/LICENSE)
149 |
150 | ## Authors
151 |
152 | Created by [Development Seed]()
153 |
154 | ## Changes
155 |
156 | See [CHANGES.md](https://github.com/developmentseed/tipg/blob/main/CHANGES.md).
157 |
158 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | app:
3 | build:
4 | context: .
5 | dockerfile: dockerfiles/Dockerfile
6 | environment:
7 | - PYTHONWARNINGS=ignore
8 | - POSTGRES_USER=username
9 | - POSTGRES_PASS=password
10 | - POSTGRES_DBNAME=postgis
11 | - POSTGRES_HOST=database
12 | - POSTGRES_PORT=5432
13 | - DEBUG=TRUE
14 | ports:
15 | - "${MY_DOCKER_IP:-127.0.0.1}:8081:8081"
16 | depends_on:
17 | - database
18 | command:
19 | bash -c "bash /tmp/scripts/wait-for-it.sh database:5432 --timeout=30 && uvicorn tipg.main:app --host 0.0.0.0 --port 8081 --workers 5"
20 | volumes:
21 | - ./dockerfiles/scripts:/tmp/scripts
22 |
23 | database:
24 | build:
25 | context: .
26 | dockerfile: dockerfiles/Dockerfile.db
27 | environment:
28 | - POSTGRES_USER=username
29 | - POSTGRES_PASSWORD=password
30 | - POSTGRES_DB=postgis
31 | ports:
32 | - "5439:5432"
33 | command: postgres -N 500
34 | volumes:
35 | - ./.pgdata:/var/lib/postgresql/data
36 |
37 | # pg_featureserv:
38 | # image: pramsey/pg_featureserv:latest
39 | # environment:
40 | # - DATABASE_URL=postgresql://username:password@database:5432/postgis
41 | # ports:
42 | # - "9000:9000"
43 | # depends_on:
44 | # - database
45 |
46 | # fast_features:
47 | # image: ghcr.io/microsoft/ogc-api-fast-features:main
48 | # environment:
49 | # - APP_POSTGRESQL_PROFILE=stac_hybrid
50 | # - APP_POSTGRESQL_HOST=database
51 | # - APP_POSTGRESQL_PORT=5432
52 | # - APP_POSTGRESQL_USER=username
53 | # - APP_POSTGRESQL_PASSWORD=password
54 | # - APP_POSTGERSQL_DBNAME=postgis
55 | # - APP_DATA_SOURCE_TYPES=postgresql
56 | # ports:
57 | # - "8080:80"
58 | # depends_on:
59 | # - database
60 |
61 | # pg_tileserv:
62 | # image: pramsey/pg_tileserv:latest
63 | # environment:
64 | # - DATABASE_URL=postgresql://username:password@database:5432/postgis
65 | # ports:
66 | # - "7800:7800"
67 | # depends_on:
68 | # - database
69 |
70 | # martin:
71 | # platform: linux/amd64
72 | # image: maplibre/martin
73 | # environment:
74 | # - DATABASE_URL=postgresql://username:password@database:5432/postgis
75 | # ports:
76 | # - "3000:3000"
77 | # depends_on:
78 | # - database
79 |
80 | # t-rex:
81 | # platform: linux/amd64
82 | # image: sourcepole/t-rex
83 | # ports:
84 | # - "3000:3000"
85 | # command:
86 | # serve --dbconn postgresql://username:password@database:5432/postgis --bind=0.0.0.0 --port 3000
87 | # depends_on:
88 | # - database
89 |
90 |
--------------------------------------------------------------------------------
/dockerfiles/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PYTHON_VERSION=3.12
2 |
3 | FROM bitnami/python:${PYTHON_VERSION}
4 | RUN apt update && apt upgrade -y \
5 | && apt install curl -y \
6 | && rm -rf /var/lib/apt/lists/*
7 |
8 | # Ensure root certificates are always updated at evey container build
9 | # and curl is using the latest version of them
10 | RUN mkdir /usr/local/share/ca-certificates/cacert.org
11 | RUN cd /usr/local/share/ca-certificates/cacert.org && curl -k -O https://www.cacert.org/certs/root.crt
12 | RUN cd /usr/local/share/ca-certificates/cacert.org && curl -k -O https://www.cacert.org/certs/class3.crt
13 | RUN update-ca-certificates
14 | ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
15 |
16 | RUN python -m pip install -U pip
17 | RUN python -m pip install uvicorn uvicorn-worker gunicorn
18 |
19 | COPY README.md README.md
20 | COPY LICENSE LICENSE
21 | COPY tipg/ tipg/
22 | COPY pyproject.toml pyproject.toml
23 |
24 | RUN python -m pip install . --no-cache-dir
25 | RUN rm -rf tipg/ README.md pyproject.toml LICENSE
26 |
27 | ###################################################
28 | # For compatibility (might be removed at one point)
29 | ENV MODULE_NAME=tipg.main
30 | ENV VARIABLE_NAME=app
31 | ENV HOST=0.0.0.0
32 | ENV PORT=80
33 | ENV WEB_CONCURRENCY=1
34 | CMD gunicorn -k uvicorn.workers.UvicornWorker ${MODULE_NAME}:${VARIABLE_NAME} --bind ${HOST}:${PORT} --workers ${WEB_CONCURRENCY}
35 |
--------------------------------------------------------------------------------
/dockerfiles/Dockerfile.db:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/vincentsarago/postgis:14-3.4
2 |
3 | COPY data/*.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/dockerfiles/scripts/wait-for-it.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Use this script to test if a given TCP host/port are available
3 |
4 | ######################################################
5 | # Copied from https://github.com/vishnubob/wait-for-it
6 | ######################################################
7 |
8 | WAITFORIT_cmdname=${0##*/}
9 |
10 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
11 |
12 | usage()
13 | {
14 | cat << USAGE >&2
15 | Usage:
16 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
17 | -h HOST | --host=HOST Host or IP under test
18 | -p PORT | --port=PORT TCP port under test
19 | Alternatively, you specify the host and port as host:port
20 | -s | --strict Only execute subcommand if the test succeeds
21 | -q | --quiet Don't output any status messages
22 | -t TIMEOUT | --timeout=TIMEOUT
23 | Timeout in seconds, zero for no timeout
24 | -- COMMAND ARGS Execute command with args after the test finishes
25 | USAGE
26 | exit 1
27 | }
28 |
29 | wait_for()
30 | {
31 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
32 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
33 | else
34 | echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
35 | fi
36 | WAITFORIT_start_ts=$(date +%s)
37 | while :
38 | do
39 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
40 | nc -z $WAITFORIT_HOST $WAITFORIT_PORT
41 | WAITFORIT_result=$?
42 | else
43 | (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
44 | WAITFORIT_result=$?
45 | fi
46 | if [[ $WAITFORIT_result -eq 0 ]]; then
47 | WAITFORIT_end_ts=$(date +%s)
48 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
49 | break
50 | fi
51 | sleep 1
52 | done
53 | return $WAITFORIT_result
54 | }
55 |
56 | wait_for_wrapper()
57 | {
58 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
59 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then
60 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
61 | else
62 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
63 | fi
64 | WAITFORIT_PID=$!
65 | trap "kill -INT -$WAITFORIT_PID" INT
66 | wait $WAITFORIT_PID
67 | WAITFORIT_RESULT=$?
68 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then
69 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
70 | fi
71 | return $WAITFORIT_RESULT
72 | }
73 |
74 | # process arguments
75 | while [[ $# -gt 0 ]]
76 | do
77 | case "$1" in
78 | *:* )
79 | WAITFORIT_hostport=(${1//:/ })
80 | WAITFORIT_HOST=${WAITFORIT_hostport[0]}
81 | WAITFORIT_PORT=${WAITFORIT_hostport[1]}
82 | shift 1
83 | ;;
84 | --child)
85 | WAITFORIT_CHILD=1
86 | shift 1
87 | ;;
88 | -q | --quiet)
89 | WAITFORIT_QUIET=1
90 | shift 1
91 | ;;
92 | -s | --strict)
93 | WAITFORIT_STRICT=1
94 | shift 1
95 | ;;
96 | -h)
97 | WAITFORIT_HOST="$2"
98 | if [[ $WAITFORIT_HOST == "" ]]; then break; fi
99 | shift 2
100 | ;;
101 | --host=*)
102 | WAITFORIT_HOST="${1#*=}"
103 | shift 1
104 | ;;
105 | -p)
106 | WAITFORIT_PORT="$2"
107 | if [[ $WAITFORIT_PORT == "" ]]; then break; fi
108 | shift 2
109 | ;;
110 | --port=*)
111 | WAITFORIT_PORT="${1#*=}"
112 | shift 1
113 | ;;
114 | -t)
115 | WAITFORIT_TIMEOUT="$2"
116 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
117 | shift 2
118 | ;;
119 | --timeout=*)
120 | WAITFORIT_TIMEOUT="${1#*=}"
121 | shift 1
122 | ;;
123 | --)
124 | shift
125 | WAITFORIT_CLI=("$@")
126 | break
127 | ;;
128 | --help)
129 | usage
130 | ;;
131 | *)
132 | echoerr "Unknown argument: $1"
133 | usage
134 | ;;
135 | esac
136 | done
137 |
138 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
139 | echoerr "Error: you need to provide a host and port to test."
140 | usage
141 | fi
142 |
143 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
144 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
145 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
146 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
147 |
148 | # Check to see if timeout is from busybox?
149 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
150 | WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
151 |
152 | WAITFORIT_BUSYTIMEFLAG=""
153 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
154 | WAITFORIT_ISBUSY=1
155 | # Check if busybox timeout uses -t flag
156 | # (recent Alpine versions don't support -t anymore)
157 | if timeout &>/dev/stdout | grep -q -e '-t '; then
158 | WAITFORIT_BUSYTIMEFLAG="-t"
159 | fi
160 | else
161 | WAITFORIT_ISBUSY=0
162 | fi
163 |
164 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then
165 | wait_for
166 | WAITFORIT_RESULT=$?
167 | exit $WAITFORIT_RESULT
168 | else
169 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
170 | wait_for_wrapper
171 | WAITFORIT_RESULT=$?
172 | else
173 | wait_for
174 | WAITFORIT_RESULT=$?
175 | fi
176 | fi
177 |
178 | if [[ $WAITFORIT_CLI != "" ]]; then
179 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
180 | echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
181 | exit $WAITFORIT_RESULT
182 | fi
183 | exec "${WAITFORIT_CLI[@]}"
184 | else
185 | exit $WAITFORIT_RESULT
186 | fi
187 |
--------------------------------------------------------------------------------
/docs/logos/TiPg_logo_large.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/logos/TiPg_logo_large.png
--------------------------------------------------------------------------------
/docs/logos/TiPg_logo_medium.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/logos/TiPg_logo_medium.png
--------------------------------------------------------------------------------
/docs/logos/TiPg_logo_no_text_large.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/logos/TiPg_logo_no_text_large.png
--------------------------------------------------------------------------------
/docs/logos/TiPg_logo_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/logos/TiPg_logo_small.png
--------------------------------------------------------------------------------
/docs/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: tipg
2 | site_description: Simple and Fast Geospatial OGC Features and Tiles API for PostGIS.
3 |
4 | docs_dir: 'src'
5 | site_dir: 'build'
6 |
7 | repo_name: "developmentseed/tipg"
8 | repo_url: "https://github.com/developmentseed/tipg"
9 |
10 | extra:
11 | social:
12 | - icon: "fontawesome/brands/github"
13 | link: "https://github.com/developmentseed"
14 | - icon: "fontawesome/brands/twitter"
15 | link: "https://twitter.com/developmentseed"
16 | - icon: "fontawesome/brands/medium"
17 | link: "https://medium.com/devseed"
18 |
19 | nav:
20 | - Intro: "index.md"
21 | - User Guide:
22 | - "Endpoints documentation": user_guide/endpoints.md
23 | - "Endpoints Factories": user_guide/factories.md
24 | - "Configuration": user_guide/configuration.md
25 | - Advanced User Guide:
26 | - "Function Layers": advanced/functions.md
27 | - "Customization": advanced/customization.md
28 | - "TiPg Vector Tiles server": advanced/ogc_tiles_server.md
29 | - "TiPg Features server": advanced/ogc_features_server.md
30 | - API:
31 | - database: api/tipg/database.md
32 | - dependencies: api/tipg/dependencies.md
33 | - errors: api/tipg/errors.md
34 | - factory: api/tipg/factory.md
35 | - middleware: api/tipg/middleware.md
36 | - model: api/tipg/model.md
37 | - settings: api/tipg/settings.md
38 | - resources:
39 | - enums: api/tipg/resources/enums.md
40 | - response: api/tipg/resources/response.md
41 | - filters:
42 | - evaluate: api/tipg/filter/evaluate.md
43 | - filters: api/tipg/filter/filters.md
44 | - Development - Contributing: "contributing.md"
45 | - Release Notes: "release-notes.md"
46 | - Performance Benchmarks: benchmark.html
47 |
48 | plugins:
49 | - search
50 | - mkdocstrings:
51 | enable_inventory: true
52 | handlers:
53 | python:
54 | paths: [src]
55 | options:
56 | filters:
57 | - "!^__post_init__"
58 | docstring_section_style: list
59 | docstring_style: google
60 | line_length: 100
61 | separate_signature: true
62 | show_root_heading: true
63 | show_signature_annotations: true
64 | show_source: false
65 | show_symbol_type_toc: true
66 | signature_crossrefs: true
67 | extensions:
68 | - griffe_inherited_docstrings
69 | import:
70 | - https://docs.python.org/3/objects.inv
71 | - https://docs.pydantic.dev/latest/objects.inv
72 | - https://fastapi.tiangolo.com/objects.inv
73 |
74 | theme:
75 | name: material
76 | palette:
77 | primary: indigo
78 | scheme: default
79 | favicon: img/favicon.ico
80 | logo: img/logo.png
81 |
82 |
83 | markdown_extensions:
84 | - admonition
85 | - attr_list
86 | - codehilite:
87 | guess_lang: false
88 | - def_list
89 | - footnotes
90 | - pymdownx.arithmatex
91 | - pymdownx.betterem
92 | - pymdownx.caret:
93 | insert: false
94 | - pymdownx.details
95 | - pymdownx.emoji
96 | - pymdownx.escapeall:
97 | hardbreak: true
98 | nbsp: true
99 | - pymdownx.magiclink:
100 | hide_protocol: true
101 | repo_url_shortener: true
102 | - pymdownx.smartsymbols
103 | - pymdownx.superfences
104 | - pymdownx.tasklist:
105 | custom_checkbox: true
106 | - pymdownx.tilde
107 | - toc:
108 | permalink: true
109 |
--------------------------------------------------------------------------------
/docs/src/advanced/customization.md:
--------------------------------------------------------------------------------
1 |
2 | `Tipg` is designed to be fully customizable, in respect to the OGC standard. This page aims to show some example of customizations.
3 |
4 |
5 | ### Application
6 |
7 | While `Tipg` provides a default application `tipg.main:app`, users can easily create their own FastAPI application and register the OGC API endpoints using the [endpoint factories](/tipg/user_guide/factories/) provided by `Tipg`.
8 |
9 | ```python
10 | from contextlib import asynccontextmanager
11 | from tipg.database import close_db_connection, connect_to_db
12 | from tipg.collections import register_collection_catalog
13 | from tipg.errors import DEFAULT_STATUS_CODES, add_exception_handlers
14 | from tipg.factory import OGCFeaturesFactory
15 | from tipg.settings import PostgresSettings, DatabaseSettings
16 |
17 | from fastapi import FastAPI
18 |
19 |
20 | @asynccontextmanager
21 | async def lifespan(app: FastAPI):
22 | """FastAPI Lifespan
23 |
24 | - Create DB connection POOL and `register` the custom tipg SQL function within `pg_temp`
25 | - Create the collection_catalog
26 | - Close the connection pool when closing the application
27 |
28 | """
29 | await connect_to_db(
30 | app,
31 | schemas=["public"],
32 | tipg_schema="pg_temp",
33 | settings=PostgresSettings(database_url="postgres://...."),
34 | )
35 | await register_collection_catalog(
36 | app,
37 | db_settings=DatabaseSettings(schemas=["public"], application_schema="pg_temp"),
38 | )
39 |
40 | yield
41 |
42 | await close_db_connection(app)
43 |
44 |
45 | app = FastAPI(openapi_url="/api", docs_url="/api.html", lifespan=lifespan)
46 |
47 | endpoints = OGCFeaturesFactory(with_common=True)
48 | app.include_router(endpoints.router)
49 |
50 | add_exception_handlers(app, DEFAULT_STATUS_CODES)
51 | ```
52 |
53 | ### HTML Templates
54 |
55 | The default `HTML` responses are generated using [Jinja](https://jinja.palletsprojects.com) HTML [templates](https://github.com/developmentseed/tipg/tree/main/tipg/templates).
56 |
57 | You can override part or a complete list of templates and then provide `TIPG_TEMPLATE_DIRECTORY` environment to tell the `tipg` application to first look in the provided directory for HTML templates.
58 |
59 | When building custom `tipg` application you can set the `templates` attribute of the `Endpoints` Factory.
60 |
61 | ```python
62 | from fastapi import FastAPI
63 | import jinja2
64 |
65 | from tipg.factory import Endpoints
66 |
67 | app = FastAPI(openapi_url="/api", docs_url="/api.html")
68 |
69 | templates_location = (
70 | [
71 | jinja2.FileSystemLoader("{PATH TO YOUR CUSTOM TEMPLATE DIRECTORY}"),
72 | jinja2.PackageLoader("tipg", "templates"), # Fallback to default's tipg templates
73 | ]
74 | )
75 |
76 | jinja2_env = jinja2.Environment(loader=jinja2.ChoiceLoader(templates_location))
77 | templates = Jinja2Templates(env=jinja2_env)
78 |
79 | ogc_api = Endpoints(templates=templates)
80 | app.include_router(ogc_api.router)
81 | ```
82 |
83 | Example:
84 |
85 | In [`eoAPI`](https://github.com/developmentseed/eoAPI), we use a custom logo by overriding the `header.html` : https://github.com/developmentseed/eoAPI/blob/8a3b3de4e82499994fec022229ac3be70bbc1388/runtime/eoapi/vector/eoapi/vector/templates/header.html
86 |
87 | 
88 |
89 |
90 | ### SQL Functions
91 |
92 | `tipg` support SQL functional layers (see [Functions](../functions/)).
93 |
94 | `Functions` will be either found by `tipg` at startup within the specified schemas or by registering them dynamically to the [`pg_temp`](https://www.postgresql.org/docs/current/runtime-config-client.html) schema when creating the [Database connection](https://github.com/developmentseed/tipg/blob/2543707238a97a0527effff710a83f9bea66440f/tipg/db.py#L63-L65).
95 |
96 | To `register` custom SQL functions, user can set `TIPG_CUSTOM_SQL_DIRECTORY` environment variable when using `tipg` demo application or set `user_sql_files` option in [tipg.db.connect_to_db](https://github.com/developmentseed/tipg/blob/2543707238a97a0527effff710a83f9bea66440f/tipg/main.py#L90-L109).
97 |
98 | ```python
99 | from tipg.database import connect_to_db
100 | from tipg.collections import register_collection_catalog
101 | from tipg.settings import PostgresSettings, DatabaseSettings
102 |
103 | postgres_settings = PostgresSettings()
104 |
105 | app = FastAPI()
106 |
107 | @app.on_event("startup")
108 | async def startup_event() -> None:
109 | """Connect to database on startup."""
110 | await connect_to_db(
111 | app,
112 | settings=postgres_settings,
113 | schemas=["public"],
114 | user_sql_files="tests/fixtures/functions", # <----
115 | )
116 | await register_collection_catalog(
117 | app,
118 | db_settings=DatabaseSettings(
119 | schemas=["public"],
120 | exclude_function_schemas=["public"],
121 | ),
122 | )
123 | ```
124 |
125 | ```bash
126 | TIPG_DB_EXCLUDE_FUNCTION_SCHEMAS='["public"]' TIPG_CUSTOM_SQL_DIRECTORY=tests/fixtures/functions uvicorn tipg.main:app --port 8000 --reload
127 | ```
128 |
129 | ```
130 | curl -s http://127.0.0.1:8000/collections\?f\=json | jq -r '.collections[].id' | grep "pg_temp"
131 | pg_temp.landsat_centroids
132 | pg_temp.hexagons_g
133 | pg_temp.hexagons
134 | pg_temp.squares
135 | pg_temp.landsat
136 | ```
137 |
138 | ### Custom schema for `tipg` catalog method
139 |
140 | By default, when users start the `tipg` application, we will register some SQL function to the `pg_temp` schema. This schema might not always be available to the user deploying the application (e.g in AWS Aurora).
141 |
142 | Starting with `tipg>=0.12`, users can use environment variable `TIPG_DB_APPLICATION_SCHEMA` to change the schema where `tipg` will register the catalog function.
143 |
144 | !!! important
145 |
146 | This schema must already exist, and the logged in user must have full permissions to the schema!).
147 |
148 |
--------------------------------------------------------------------------------
/docs/src/advanced/functions.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | `SQL Functions` are any procedural functions defined in the database that match the following criteria:
4 |
5 | - Must be defined to return `SETOF`
6 | - Functions defined to return `RECORD` must include typed OUT definitions in the function signature
7 | - All arguments (`IN` and `OUT`) must be named
8 | - Functions that return a `geometry` will be usable as Features and Vector Tiles, those that do not, will be available to return json/csv
9 | - Functions that match these qualifications will be found based on the DB visibility settings (schemas)
10 |
11 | `SQL Function` arguments will be exposed to the items API as query parameters. Any argument that does not have a default will be required and will return an error if not set as a query parameter. Functions should have a unique name. Function overloading by argument type is not currently supported.
12 |
13 | If a function is defined to have `z`, `x`, or `y` parameters, those will be filled from the path parameters in requests to Tile endpoints. If a function has a `bounds` parameter, that will be populated by the Tile bounding box in requests to Tile endpoints. All other arguments will be filled from query parameters matching the name of the argument and cast to the appropriate types.
14 |
15 | At startup, `tipg` application will look for any SQL functions with the above signature and then get displayed as `Collections`:
16 |
17 | ```bash
18 | curl -s http://127.0.0.1:8000/collections\?f\=json | jq -r '.collections[].id' | grep "public.st_"
19 | public.st_squaregrid
20 | public.st_hexagongrid
21 | public.st_subdivide
22 | ```
23 |
24 | Note: By default, `tipg` should find `st_squaregrid`, `st_hexagongrid`, and `st_subdivide` functions when using the `public` schema from PostGIS
25 |
26 | !!! important
27 |
28 | `SQL Functions` can be pre-existent in the database, or you can tell `tipg` to **register** SQL code dynamically to the `pg_temp` schema at startup, see [customization](../customization/#sql-functions)
29 |
30 |
31 | ### FUNCTION examples
32 |
33 | With `geometry` input:
34 |
35 | ```SQL
36 | CREATE FUNCTION hexagons(
37 | IN size int DEFAULT 10,
38 | IN bounds geometry DEFAULT 'srid=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry,
39 | OUT geom geometry,
40 | OUT i integer,
41 | OUT j integer
42 | ) RETURNS SETOF RECORD AS $$
43 | SELECT * FROM st_hexagongrid(size, bounds);
44 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
45 | ```
46 |
47 | Only returning geometries:
48 |
49 | ```sql
50 | CREATE FUNCTION hexagons(
51 | IN size int DEFAULT 10,
52 | IN bounds geometry DEFAULT 'srid=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry,
53 | OUT geom geometry,
54 | ) RETURNS SETOF geometry AS $$
55 | SELECT geom FROM st_hexagongrid(size, bounds);
56 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
57 | ```
58 |
59 | With `x`, `y`, `z` input:
60 |
61 | ```sql
62 | CREATE FUNCTION landsat(
63 | IN z int,
64 | IN x int,
65 | IN y int,
66 | IN p int default 0,
67 | OUT path_row text,
68 | OUT grid_path int,
69 | OUT grid_row int,
70 | OUT geom geometry
71 | ) RETURNS SETOF RECORD AS $$
72 | SELECT pr as path_row, path as grid_path, row AS grid_row, geom FROM public.landsat_wrs WHERE path = p AND ST_Intersects(geom, ST_Transform(ST_TileEnvelope(z, x, y), 4326));
73 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
74 | ```
75 |
76 | ```bash
77 | curl -s http://127.0.0.1:8000/collections/pg_temp.landsat/queryables?f=schemajson | jq
78 |
79 | {
80 | "title": "pg_temp.landsat",
81 | "properties": {
82 | "geom": {
83 | "$ref": "https://geojson.org/schema/Geometry.json"
84 | },
85 | "grid_path": {
86 | "name": "grid_path",
87 | "type": "number"
88 | },
89 | "grid_row": {
90 | "name": "grid_row",
91 | "type": "number"
92 | },
93 | "path_row": {
94 | "name": "path_row",
95 | "type": "string"
96 | }
97 | },
98 | "type": "object",
99 | "$schema": "https://json-schema.org/draft/2019-09/schema",
100 | "$id": "http://127.0.0.1:8000/collections/pg_temp.landsat/queryables?f=schemajson"
101 | }
102 | ```
103 |
104 | ### VIEW example
105 |
106 | ```sql
107 | CREATE OR REPLACE VIEW landsat_centroids AS
108 | SELECT ogc_fid, pr, st_pointonsurface(geom) as geom, path, row
109 | FROM public.landsat_wrs;
110 | ```
111 |
112 | ```bash
113 | curl -s http://127.0.0.1:8000/collections/pg_temp.landsat_centroids/queryables?f=schemajson | jq
114 | {
115 | "title": "pg_temp.landsat_centroids",
116 | "properties": {
117 | "geom": {
118 | "$ref": "https://geojson.org/schema/Geometry.json"
119 | },
120 | "ogc_fid": {
121 | "name": "ogc_fid",
122 | "type": "number"
123 | },
124 | "path": {
125 | "name": "path",
126 | "type": "number"
127 | },
128 | "pr": {
129 | "name": "pr",
130 | "type": "string"
131 | },
132 | "row": {
133 | "name": "row",
134 | "type": "number"
135 | }
136 | },
137 | "type": "object",
138 | "$schema": "https://json-schema.org/draft/2019-09/schema",
139 | "$id": "http://127.0.0.1:8000/collections/pg_temp.landsat_centroids/queryables?f=schemajson"
140 | }
141 | ```
142 |
--------------------------------------------------------------------------------
/docs/src/advanced/ogc_features_server.md:
--------------------------------------------------------------------------------
1 |
2 | `TiPg` default application comes with both OGC Tiles and Features endpoints but some users might just want one or the other. `TiPg` is built around the notion of endpoints [factories](/tipg/user_guide/factories/) which then makes *easy* to build custom applications with the desired set of endpoints.
3 |
4 | The code below shows how to create a simple OGC Features service.
5 |
6 | ```python
7 | """OGC Features service."""
8 |
9 | from contextlib import asynccontextmanager
10 |
11 | import jinja2
12 |
13 | from tipg.collections import register_collection_catalog
14 | from tipg.database import close_db_connection, connect_to_db
15 | from tipg.errors import DEFAULT_STATUS_CODES, add_exception_handlers
16 | from tipg.factory import OGCFeaturesFactory
17 | from tipg.middleware import CacheControlMiddleware, CatalogUpdateMiddleware
18 | from tipg.settings import CustomSQLSettings, DatabaseSettings, PostgresSettings
19 |
20 | from fastapi import FastAPI
21 |
22 | from starlette.middleware.cors import CORSMiddleware
23 | from starlette.templating import Jinja2Templates
24 | from starlette_cramjam.middleware import CompressionMiddleware
25 |
26 | postgres_settings = PostgresSettings()
27 | db_settings = DatabaseSettings()
28 | custom_sql_settings = CustomSQLSettings()
29 |
30 |
31 | @asynccontextmanager
32 | async def lifespan(app: FastAPI):
33 | """FastAPI Lifespan."""
34 | # Create Connection Pool
35 | await connect_to_db(
36 | app,
37 | settings=postgres_settings,
38 | schemas=db_settings.schemas,
39 | user_sql_files=custom_sql_settings.sql_files,
40 | )
41 |
42 | # Register Collection Catalog
43 | await register_collection_catalog(app, db_settings=db_settings)
44 |
45 | yield
46 |
47 | # Close the Connection Pool
48 | await close_db_connection(app)
49 |
50 |
51 | app = FastAPI(
52 | title="TiPG Features Server",
53 | openapi_url="/api",
54 | docs_url="/api.html",
55 | lifespan=lifespan,
56 | )
57 |
58 | templates = Jinja2Templates(
59 | directory="", # we need to set a dummy directory variable, see https://github.com/encode/starlette/issues/1214
60 | loader=jinja2.ChoiceLoader([jinja2.PackageLoader("tipg", "templates")]),
61 | ) # type: ignore
62 |
63 | ogc_features = OGCFeaturesFactory(
64 | title="TiPG Features Server",
65 | templates=templates,
66 | with_common=True,
67 | )
68 | app.include_router(ogc_features.router)
69 |
70 | app.add_middleware(
71 | CORSMiddleware,
72 | allow_origins=["*"],
73 | allow_credentials=True,
74 | allow_methods=["GET"],
75 | allow_headers=["*"],
76 | )
77 | app.add_middleware(CacheControlMiddleware, cachecontrol="public, max-age=3600")
78 | app.add_middleware(CompressionMiddleware)
79 | app.add_middleware(
80 | CatalogUpdateMiddleware,
81 | func=register_collection_catalog,
82 | ttl=300,
83 | db_settings=db_settings,
84 | )
85 |
86 | add_exception_handlers(app, DEFAULT_STATUS_CODES)
87 |
88 |
89 | @app.get(
90 | "/healthz",
91 | description="Health Check.",
92 | summary="Health Check.",
93 | operation_id="healthCheck",
94 | tags=["Health Check"],
95 | )
96 | def ping():
97 | """Health check."""
98 | return {"ping": "pong!"}
99 |
100 | ```
101 |
--------------------------------------------------------------------------------
/docs/src/advanced/ogc_tiles_server.md:
--------------------------------------------------------------------------------
1 |
2 | `TiPg` default application comes with both OGC Tiles and Features endpoints but some users might just want one or the other. `TiPg` is built around the notion of endpoints [factories](/tipg/user_guide/factories/) which then makes *easy* to build custom applications with the desired set of endpoints.
3 |
4 | The code below shows how to create a simple OGC Vector tiles service.
5 |
6 | ```python
7 | """OGC Vector Tiles service."""
8 |
9 | from contextlib import asynccontextmanager
10 |
11 | import jinja2
12 |
13 | from tipg.collections import register_collection_catalog
14 | from tipg.database import close_db_connection, connect_to_db
15 | from tipg.errors import DEFAULT_STATUS_CODES, add_exception_handlers
16 | from tipg.factory import OGCTilesFactory
17 | from tipg.middleware import CacheControlMiddleware, CatalogUpdateMiddleware
18 | from tipg.settings import CustomSQLSettings, DatabaseSettings, PostgresSettings
19 |
20 | from fastapi import FastAPI
21 |
22 | from starlette.middleware.cors import CORSMiddleware
23 | from starlette.templating import Jinja2Templates
24 | from starlette_cramjam.middleware import CompressionMiddleware
25 |
26 | postgres_settings = PostgresSettings()
27 | db_settings = DatabaseSettings()
28 | custom_sql_settings = CustomSQLSettings()
29 |
30 |
31 | @asynccontextmanager
32 | async def lifespan(app: FastAPI):
33 | """FastAPI Lifespan."""
34 | # Create Connection Pool
35 | await connect_to_db(
36 | app,
37 | settings=postgres_settings,
38 | schemas=db_settings.schemas,
39 | user_sql_files=custom_sql_settings.sql_files,
40 | )
41 |
42 | # Register Collection Catalog
43 | await register_collection_catalog(
44 | app,
45 | db_settings=db_settings,
46 | )
47 |
48 | yield
49 |
50 | # Close the Connection Pool
51 | await close_db_connection(app)
52 |
53 |
54 | app = FastAPI(
55 | title="TiPG Vector Tiles Server",
56 | openapi_url="/api",
57 | docs_url="/api.html",
58 | lifespan=lifespan,
59 | )
60 |
61 | templates = Jinja2Templates(
62 | directory="", # we need to set a dummy directory variable, see https://github.com/encode/starlette/issues/1214
63 | loader=jinja2.ChoiceLoader([jinja2.PackageLoader("tipg", "templates")]),
64 | ) # type: ignore
65 |
66 | ogc_tiles = OGCTilesFactory(
67 | title="TiPG Vector Tiles Server",
68 | templates=templates,
69 | with_common=True,
70 | with_viewer=True,
71 | )
72 | app.include_router(ogc_tiles.router)
73 |
74 | app.add_middleware(
75 | CORSMiddleware,
76 | allow_origins=["*"],
77 | allow_credentials=True,
78 | allow_methods=["GET"],
79 | allow_headers=["*"],
80 | )
81 | app.add_middleware(CacheControlMiddleware, cachecontrol="public, max-age=3600")
82 | app.add_middleware(CompressionMiddleware)
83 | app.add_middleware(
84 | CatalogUpdateMiddleware,
85 | func=register_collection_catalog,
86 | ttl=300,
87 | db_settings=db_settings,
88 | )
89 |
90 | add_exception_handlers(app, DEFAULT_STATUS_CODES)
91 |
92 |
93 | @app.get(
94 | "/healthz",
95 | description="Health Check.",
96 | summary="Health Check.",
97 | operation_id="healthCheck",
98 | tags=["Health Check"],
99 | )
100 | def ping():
101 | """Health check."""
102 | return {"ping": "pong!"}
103 |
104 | ```
105 |
--------------------------------------------------------------------------------
/docs/src/benchmark.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
86 | Benchmarks
87 |
88 |
89 |
90 |
100 |
101 |
102 | Download data as JSON
103 |
104 |
105 |
106 |
107 |
108 |
109 |
291 |
292 |
293 |
--------------------------------------------------------------------------------
/docs/src/contributing.md:
--------------------------------------------------------------------------------
1 | ../../CONTRIBUTING.md
--------------------------------------------------------------------------------
/docs/src/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/src/img/favicon.ico
--------------------------------------------------------------------------------
/docs/src/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/docs/src/img/logo.png
--------------------------------------------------------------------------------
/docs/src/index.md:
--------------------------------------------------------------------------------
1 | ../../README.md
--------------------------------------------------------------------------------
/docs/src/release-notes.md:
--------------------------------------------------------------------------------
1 | ../../CHANGES.md
--------------------------------------------------------------------------------
/docs/src/user_guide/configuration.md:
--------------------------------------------------------------------------------
1 |
2 | `tipg` uses [starlette](https://www.starlette.io/config/)'s configuration pattern which make use of environment variable and/or `.env` file.
3 |
4 | Example of `.env` file can be found in [.env.example](https://github.com/developmentseed/tipg/blob/main/.env.example)
5 |
6 | ## DB connection settings
7 |
8 | class: `tipg.settings.PostgresSettings`
9 |
10 | #### Database address
11 |
12 | You can pass either the Postgres DSN using `DATABASE_URL` or with each specific values for usernane, password, host, port and database name.
13 |
14 | - **POSTGRES_USER** (str): Username
15 | - **POSTGRES_PASS** (str): Password
16 | - **POSTGRES_HOST** (str): Hostname (e.g `127.0.0.1`)
17 | - **POSTGRES_PORT** (str): Port number (e.g `5432`)
18 | - **POSTGRES_DBNAME** (str): Database name (e.g `postgis`)
19 |
20 | ```bash
21 | POSTGRES_USER=username
22 | POSTGRES_PASS=password
23 | POSTGRES_HOST=0.0.0.0
24 | POSTGRES_PORT=5432
25 | POSTGRES_DNAME=postgis
26 | ```
27 |
28 | Or
29 |
30 | - **DATABASE_URL** (str): Full postgres DSN in form of `postgresql://user[:password]@[host][:port][/dbname]`
31 |
32 | ```bash
33 | DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis
34 | ```
35 |
36 | #### Connection Pools configuration
37 |
38 | - **DB_MIN_CONN_SIZE** (int): Number of connection the pool will be initialized with. Default is `1`
39 | - **DB_MAX_CONN_SIZE** (int): Max number of connections in the pool. Default is `10`
40 | - **DB_MAX_QUERIES** (int): Number of queries after a connection is closed and replaced with a new connection. Default is `50000`
41 | - **DB_MAX_INACTIVE_CONN_LIFETIME** (float): Number of seconds after which inactive connections in the pool will be closed. Pass 0 to disable this mechanism. Default is `300`
42 |
43 | ref: https://magicstack.github.io/asyncpg/current/api/index.html#connection-pools
44 |
45 |
46 | ## DB visibility settings
47 |
48 | class: `tipg.settings.DatabaseSettings`
49 |
50 | prefix: **`TIPG_DB_`**
51 |
52 | - **SCHEMAS** (list of string): Named schemas, `tipg` can look for `Tables` or `Functions`. Default is `["public"]`
53 | - **SPATIAL_EXTENT** (bool): Calculate spatial extent of records. Default is `True`.
54 | - **DATETIME_EXTENT** (bool): Calculate temporal extent of records. Default is `True`.
55 |
56 | #### `Tables`
57 |
58 | - **TABLES** (list of string): Allow only specific `Tables`
59 | - **EXCLUDE_TABLES** (list of string): Excludes specific `Tables`
60 | - **EXCLUDE_TABLE_SCHEMAS** (list of string): Excludes `Schemas` for `Tables` lookup.
61 | - **ONLY_SPATIAL_TABLES** (bool): Accept `Tables` without `geometry` or `geography` column. Default is `True`.
62 |
63 | #### `Functions`
64 |
65 | - **FUNCTIONS** (list of string): Allow only specific `Functions`
66 | - **EXCLUDE_FUNCTIONS** (list of string): Excludes specific `Functions`
67 | - **EXCLUDE_FUNCTION_SCHEMAS** (list of string): Excludes `Schemas` for `Functions` lookup.
68 |
69 | ```bash
70 | TIPG_DB_SCHEMAS='["myschema", "public"]'
71 | TIPG_DB_EXCLUDE_FUNCTION_SCHEMAS='["public"]'
72 | ```
73 |
74 | ## Table settings
75 |
76 | class: `tipg.settings.TableSettings`
77 |
78 | prefix: **`TIPG_`**
79 |
80 | - **DATETIME_EXTENT** (bool): Fetch datetime extent by going throught all rows. Default is `True`
81 | - **FALLBACK_KEY_NAMES** (list of string): Primary Key names to look for in the tables. Default is `["ogc_fid", "id", "pkey", "gid"]`
82 | - **SORT_COLUMNS** (bool): Sort the `columns` for a table alphabetically. Default is `True`.
83 | - **TABLE_CONFIG** (dict of `TableConfig`)
84 | - **TABLE_CONFIG_ _ {schemaId}_{tableId} _ _GEOMCOL** (str): Table's geometry/geography column name
85 | - **TABLE_CONFIG_ _ {schemaId}_{tableId} _ _DATETIMECOL** (str): Table's datetime column name
86 | - **TABLE_CONFIG_ _ {schemaId}_{tableId} _ _PK** (str): Table's primary key
87 | - **TABLE_CONFIG_ _ {schemaId}_{tableId} _ _PROPERTIES** (list of string): Select specific properties from table (for filtering and output)
88 |
89 | ```bash
90 | TIPG_TABLE_CONFIG__pgstac_items__PK=id
91 | ```
92 |
93 | ## Custom SQL Function
94 |
95 | `TiPG` allows for exposing custom `SQL Functions` as collections (see https://developmentseed.org/tipg/advanced/functions/)
96 |
97 | class: `tipg.settings.CustomSQLSettings`
98 |
99 | prefix: **`TIPG_`**
100 |
101 | - **CUSTOM_SQL_DIRECTORY** (str): path to the directory containing `.sql` files.
102 |
103 | ```bash
104 | TIPG_CUSTOM_SQL_DIRECTORY=/tmp/functions
105 | ```
106 |
107 | Note: On each `connection` creation, `TiPG` will look for all the files within the directory and *execute* them within the `pg_temp` schema.
108 |
109 | ## Vector Tiles settings
110 |
111 | class: `tipg.settings.MVTSettings`
112 |
113 | prefix: **`TIPG_`**
114 |
115 | - **TILE_RESOLUTION** (int): Default is `4096`
116 | - **TILE_BUFFER** (int): Default is `256`
117 | - **TILE_CLIP** (bool): Default is `True`
118 | - **MAX_FEATURE_PER_TILE** (int): Default is `10000`
119 | - **SET_MVT_LAYERNAME** (bool): Set MVT layername from Table ID. Default is `False` ("default")
120 |
121 | ```bash
122 | TIPG_MAX_FEATURE_PER_TILE=1000
123 | ```
124 |
125 | ## Features settings
126 |
127 | class: `tipg.settings.FeaturesSettings`
128 |
129 | prefix: **`TIPG_`**
130 |
131 | - **DEFAULT_FEATURES_LIMIT** (int): Set the default `Limit` values for `/items` endpoint. Default is `10`
132 | - **MAX_FEATURES_PER_QUERY** (int): Set the maximum number of features the `/items` endpoint can return. Default is `10000`.
133 |
134 | ```bash
135 | TIPG_DEFAULT_FEATURES_LIMIT=1000 TIPG_MAX_FEATURES_PER_QUERY=2000
136 | ```
137 |
138 | ## Tile Matrix Sets setting
139 |
140 | class: `tipg.settings.TMSSettings`
141 |
142 | prefix: **`TIPG_`**
143 |
144 | - **DEFAULT_TMS** (str): Default TileMatrixSetId for tiles endpoint. Default is `WebMercatorQuad`
145 | - **DEFAULT_MINZOOM** (int): Default `MinZoom` for tiles endpoints. Default is `0`
146 | - **DEFAULT_MAXZOOM** (int): Default `MaxZoom` for tiles endpoints. Default is `22`
147 |
148 | ## TiPG Application settings
149 |
150 | Settings to control `TiPG` default's FastAPI application.
151 |
152 | class: `tipg.settings.APISettings`
153 |
154 | prefix: **`TIPG_`**
155 |
156 | - **NAME** (str): Set custom name for `TiPG` app. Default is `TiPg: OGC Features and Tiles API`
157 | - **DEBUG** (bool): Default is `False`
158 | - **CORS_ORIGIN** (str): Default is `*`
159 | - **CACHECONTROL** (str): Default is `public, max-age=3600`
160 | - **TEMPLATE_DIRECTORY** (str): Path to custom template directory to overwrite the HTML files.
161 | - **ROOT_PATH** (str): A path prefix handled by a proxy that is not seen by the application but is seen by external clients.
162 | - **ADD_TILES_VIEWER** (bool): Defaults is `True`
163 | - **CATALOG_TTL** (int, in seconds): Tables/Functions catalog **Time To Live** cache (default to 300 seconds).
164 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "tipg"
3 | description = "Simple and Fast Geospatial OGC Features and Tiles API for PostGIS."
4 | readme = "README.md"
5 | requires-python = ">=3.9"
6 | license = {file = "LICENSE"}
7 | authors = [
8 | {name = "Vincent Sarago", email = "vincent@developmentseed.org"},
9 | {name = "David Bitner", email = "david@developmentseed.org"},
10 | ]
11 | keywords = ["FastAPI", "POSTGIS", "OGC Features", "OGC Tiles"]
12 | classifiers = [
13 | "Intended Audience :: Information Technology",
14 | "Intended Audience :: Science/Research",
15 | "License :: OSI Approved :: BSD License",
16 | "Programming Language :: Python :: 3.9",
17 | "Programming Language :: Python :: 3.10",
18 | "Programming Language :: Python :: 3.11",
19 | "Programming Language :: Python :: 3.12",
20 | "Programming Language :: Python :: 3.13",
21 | "Topic :: Scientific/Engineering :: GIS",
22 | ]
23 | dynamic = ["version"]
24 | dependencies = [
25 | "orjson",
26 | "asyncpg>=0.23.0",
27 | "buildpg>=0.3",
28 | "fastapi>=0.100.0",
29 | "jinja2>=2.11.2,<4.0.0",
30 | "morecantile>=5.0,<7.0",
31 | "pydantic>=2.4,<3.0",
32 | "pydantic-settings~=2.0",
33 | "geojson-pydantic>=1.0,<3.0",
34 | "pygeofilter>=0.2.0,<0.3.0",
35 | "ciso8601~=2.3",
36 | "starlette-cramjam>=0.4,<0.5",
37 | ]
38 |
39 | [project.optional-dependencies]
40 | test = [
41 | "pytest",
42 | "pytest-cov",
43 | "pytest-asyncio",
44 | "pytest-benchmark",
45 | "httpx",
46 | "pytest-postgresql",
47 | "mapbox-vector-tile>=2.1",
48 | "numpy",
49 | ]
50 | dev = [
51 | "pre-commit",
52 | "bump-my-version",
53 | ]
54 | server = [
55 | "uvicorn[standard]>=0.12.0,<0.19.0",
56 | ]
57 | docs = [
58 | "black>=23.10.1",
59 | "mkdocs",
60 | "mkdocs-material[imaging]>=9.5",
61 | "griffe-inherited-docstrings>=1.0.0",
62 | "mkdocstrings[python]>=0.25.1",
63 | ]
64 |
65 | [project.urls]
66 | Homepage = "https://developmentseed.org/tipg"
67 | Source = "https://github.com/developmentseed/tipg"
68 | Documentation = "https://developmentseed.org/tipg/"
69 |
70 | [tool.hatch.version]
71 | path = "tipg/__init__.py"
72 |
73 | [tool.hatch.build.targets.sdist]
74 | exclude = [
75 | "/tests",
76 | "/dockerfiles",
77 | "/docs",
78 | "/data",
79 | "docker-compose.yml",
80 | "CONTRIBUTING.md",
81 | "CHANGES.md",
82 | ".pytest_cache",
83 | ".github",
84 | ".bumpversion.cfg",
85 | ".env.example",
86 | ".flake8",
87 | ".gitignore",
88 | ".pre-commit-config.yaml",
89 | ]
90 |
91 | [build-system]
92 | requires = ["hatchling"]
93 | build-backend = "hatchling.build"
94 |
95 | [tool.coverage.run]
96 | branch = true
97 | parallel = true
98 |
99 | [tool.coverage.report]
100 | exclude_lines = [
101 | "no cov",
102 | "if __name__ == .__main__.:",
103 | "if TYPE_CHECKING:",
104 | ]
105 |
106 | [tool.isort]
107 | profile = "black"
108 | known_first_party = ["tipg"]
109 | known_third_party = ["geojson_pydantic", "buildpg", "pydantic"]
110 | forced_separate = [
111 | "fastapi",
112 | "starlette",
113 | ]
114 | default_section = "THIRDPARTY"
115 |
116 | [tool.mypy]
117 | no_strict_optional = "True"
118 |
119 | [tool.ruff.lint]
120 | select = [
121 | "D1", # pydocstyle errors
122 | "E", # pycodestyle errors
123 | "W", # pycodestyle warnings
124 | "F", # flake8
125 | "C", # flake8-comprehensions
126 | "B", # flake8-bugbear
127 | ]
128 | ignore = [
129 | "E501", # line too long, handled by black
130 | "B008", # do not perform function calls in argument defaults
131 | "B905", # ignore zip() without an explicit strict= parameter, only support with python >3.10
132 | ]
133 |
134 |
135 | [tool.pytest.ini_options]
136 | filterwarnings = [
137 | "ignore::morecantile.errors.PointOutsideTMSBounds",
138 | "ignore:You will likely lose important projection*:UserWarning",
139 | "ignore:Call to deprecated create function *:DeprecationWarning",
140 | ]
141 |
142 | [tool.bumpversion]
143 | current_version = "1.1.0"
144 |
145 | search = "{current_version}"
146 | replace = "{new_version}"
147 | regex = false
148 | tag = true
149 | commit = true
150 | tag_name = "{new_version}"
151 |
152 | [[tool.bumpversion.files]]
153 | filename = "tipg/__init__.py"
154 | search = '__version__ = "{current_version}"'
155 | replace = '__version__ = "{new_version}"'
156 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """TiPG tests."""
2 |
--------------------------------------------------------------------------------
/tests/benchmarks.py:
--------------------------------------------------------------------------------
1 | """Benchmark Tipg Endpoints."""
2 |
3 | import pytest
4 |
5 |
6 | @pytest.mark.parametrize("limit", [1, 10])
7 | @pytest.mark.parametrize("format", ["json", "html"])
8 | def test_benchmark_collections(benchmark, format, limit, app):
9 | """Benchmark collections endpoint."""
10 | params = {"f": format, "limit": limit}
11 |
12 | def f(p):
13 | return app.get("/collections", params=p)
14 |
15 | benchmark.group = f"Collections - {format}"
16 |
17 | response = benchmark(f, params)
18 | assert response.status_code == 200
19 |
20 |
21 | @pytest.mark.parametrize("format", ["json", "html"])
22 | def test_benchmark_collection(benchmark, format, app):
23 | """Benchmark collection endpoint."""
24 | params = {"f": format}
25 |
26 | def f(p):
27 | return app.get("/collections/public.landsat_wrs", params=p)
28 |
29 | benchmark.group = f"Collection - {format}"
30 |
31 | response = benchmark(f, params)
32 | assert response.status_code == 200
33 |
34 |
35 | def test_benchmark_queryables(benchmark, app):
36 | """Benchmark queryables endpoint."""
37 |
38 | def f():
39 | return app.get(
40 | "/collections/public.landsat_wrs/queryables", params={"f": "schemajson"}
41 | )
42 |
43 | benchmark.group = "Queryables"
44 |
45 | response = benchmark(f)
46 | assert response.status_code == 200
47 |
48 |
49 | @pytest.mark.parametrize("limit", [1, 10, 50, 100, 200, 250])
50 | @pytest.mark.parametrize("format", ["geojson", "csv", "html"])
51 | def test_benchmark_items(benchmark, format, limit, app):
52 | """Benchmark items endpoint."""
53 | params = {"f": format, "limit": limit}
54 |
55 | def f(p):
56 | return app.get("/collections/public.landsat_wrs/items", params=p)
57 |
58 | benchmark.group = f"Items: {format}"
59 |
60 | response = benchmark(f, params)
61 | assert response.status_code == 200
62 |
63 |
64 | @pytest.mark.parametrize("name", ["NewfoundlandandLabrador", "Saskatchewan"])
65 | @pytest.mark.parametrize("format", ["geojson", "html"])
66 | def test_benchmark_item(benchmark, format, name, app):
67 | """Benchmark big item."""
68 |
69 | params = {"f": format, "prnom": name}
70 |
71 | def f(p):
72 | return app.get("/collections/public.canada/items", params=p)
73 |
74 | benchmark.group = "Big Feature"
75 |
76 | response = benchmark(f, params)
77 | assert response.status_code == 200
78 | if format == "geojson":
79 | assert response.json()["features"][0]["properties"]["prnom"] == name
80 |
81 |
82 | @pytest.mark.parametrize("tms", ["WGS1984Quad", "WebMercatorQuad"])
83 | @pytest.mark.parametrize("tile", ["0/0/0", "4/8/5", "6/33/25"])
84 | def test_benchmark_tile(benchmark, tile, tms, app):
85 | """Benchmark items endpoint."""
86 |
87 | def f(input_tms, input_tile):
88 | return app.get(
89 | f"/collections/public.landsat_wrs/tiles/{input_tms}/{input_tile}"
90 | )
91 |
92 | benchmark.group = f"Tiles-{tms}"
93 |
94 | response = benchmark(f, tms, tile)
95 | assert response.status_code == 200
96 |
97 |
98 | @pytest.mark.parametrize(
99 | "endpoint",
100 | [
101 | "/tileMatrixSets",
102 | "/tileMatrixSets/WGS1984Quad",
103 | "/tileMatrixSets/WebMercatorQuad",
104 | "/collections/public.landsat_wrs/tiles",
105 | "/collections/public.landsat_wrs/tiles/WGS1984Quad",
106 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad",
107 | ],
108 | )
109 | def test_benchmark_tilematrixset_endpoints(benchmark, endpoint, app):
110 | """Benchmark tilematrixset endpoint."""
111 |
112 | def f(endpoint):
113 | return app.get(endpoint, params={"f": "json"})
114 |
115 | response = benchmark(f, endpoint)
116 | assert response.status_code == 200
117 |
--------------------------------------------------------------------------------
/tests/fixtures/functions/hexagon.sql:
--------------------------------------------------------------------------------
1 | CREATE FUNCTION hexagons(
2 | IN size int DEFAULT 10,
3 | IN bounds geometry DEFAULT 'srid=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry,
4 | OUT geom geometry,
5 | OUT i integer,
6 | OUT j integer
7 | ) RETURNS SETOF RECORD AS $$
8 | SELECT * FROM st_hexagongrid(size, bounds);
9 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
10 |
11 | -- Hexagon function but with `g` as geometry input
12 | CREATE FUNCTION hexagons_g(
13 | IN size int DEFAULT 10,
14 | IN g geometry DEFAULT 'srid=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry,
15 | OUT geom geometry,
16 | OUT i integer,
17 | OUT j integer
18 | ) RETURNS SETOF RECORD AS $$
19 | SELECT * FROM st_hexagongrid(size, g);
20 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
21 |
--------------------------------------------------------------------------------
/tests/fixtures/functions/landsat_poly_centroid.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW landsat_centroids AS
2 | SELECT ogc_fid, pr, st_pointonsurface(geom) as geom, path, row
3 | FROM public.landsat_wrs;
4 |
--------------------------------------------------------------------------------
/tests/fixtures/functions/landsat_tile.sql:
--------------------------------------------------------------------------------
1 | CREATE FUNCTION landsat(
2 | IN z int,
3 | IN x int,
4 | IN y int,
5 | IN p int default 0,
6 | OUT path_row text,
7 | OUT grid_path int,
8 | OUT grid_row int,
9 | OUT geom geometry
10 | ) RETURNS SETOF RECORD AS $$
11 | SELECT pr as path_row, path as grid_path, row AS grid_row, geom FROM public.landsat_wrs WHERE path = p AND ST_Intersects(geom, ST_Transform(ST_TileEnvelope(z, x, y), 4326));
12 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
13 |
14 |
--------------------------------------------------------------------------------
/tests/fixtures/functions/squares.sql:
--------------------------------------------------------------------------------
1 | CREATE FUNCTION squares(
2 | IN size int DEFAULT 10,
3 | IN bounds geometry DEFAULT 'srid=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry,
4 | OUT geom geometry,
5 | OUT i integer,
6 | OUT j integer
7 | ) RETURNS SETOF RECORD AS $$
8 | SELECT * FROM st_squaregrid(size, bounds);
9 | $$ LANGUAGE SQL IMMUTABLE PARALLEL SAFE;
10 |
--------------------------------------------------------------------------------
/tests/fixtures/my_data.sql:
--------------------------------------------------------------------------------
1 | SET standard_conforming_strings = OFF;
2 | DROP TABLE IF EXISTS "public"."my_data" CASCADE;
3 | DELETE FROM geometry_columns WHERE f_table_name = 'my_data' AND f_table_schema = 'public';
4 | BEGIN;
5 | CREATE TABLE "public"."my_data" ( "ogc_fid" SERIAL, CONSTRAINT "my_data_pk" PRIMARY KEY ("ogc_fid") );
6 | SELECT AddGeometryColumn('public','my_data','geom',4326,'GEOMETRY',2);
7 | CREATE INDEX "my_data_geom_geom_idx" ON "public"."my_data" USING GIST ("geom");
8 | ALTER TABLE "public"."my_data" ADD COLUMN "id" VARCHAR;
9 | ALTER TABLE "public"."my_data" ADD COLUMN "datetime" TIMESTAMP;
10 | ALTER TABLE "public"."my_data" ADD COLUMN "decimal" DECIMAL;
11 | ALTER TABLE "public"."my_data" ADD COLUMN "numeric" NUMERIC;
12 | ALTER TABLE "public"."my_data" ADD COLUMN "uuid" UUID;
13 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime", "decimal", "numeric", "uuid") VALUES ('0103000020E6100000010000001B0000003670CC05599B25C03A92CB7F483F54408907944DB9F221C0D9CEF753E315544069D68681BE5B22C0355D864BD1145440984C2580F45C27C062327530C20754409CB396CA942C30C08E6EC42E50F05340F32225E11DCB30C07C98C2D614ED5340075F984C15FC30C0075F984C15EC53400AA1BD9D6AD732C03439A530F50B5440D8BFC6C0170533C00414E74C050F54407650100F7C0E33C0B199D586A60F5440A01BF45DE29634C0B61719B9F6295440838D3D254B5D35C0DC611EC044375440B8A6A26802F135C06705618A2C4154407CBD21E2CF3136C09B1B77FC844554402CD49AE61D3736C076711B0DE045544039117CFD650136C001AEC11005475440DC27DD0AB9C935C0F45E61C1344854406182187FE9BA35C03AF2E08A854854400736A0D273F130C050CF32FAA1625440ED137AA9497230C0441F419D576554401D9FC06CB06E2BC0B1930B183C745440017C2AECC5F92AC01E2006F67A7554401895D40968822AC0986E1283C07654405D44620EE0782AC0E00B92AC54765440FAACE2F3F95C27C0CDCE93B2275354400D2FBCF61DD226C0385BB99C044D54403670CC05599B25C03A92CB7F483F5440', '0', '2004-10-19 10:23:54', 1.25, 1.25, '9cf5fce4-0b4a-47cc-98ef-83b958c03c2b'), ('0103000020E61000000100000019000000984067B8143E3DC043C2B8B8F40B5440ACEF9DFAC14B3DC0E950B3BEBB0C544070CE88D2DE503DC01B2FDD24060D544034C8A112A4243DC064CC7707650E54409232AD9551103DC079704A40060F5440A630DBCBFBF43CC0E22ABE1BDF0F5440AC95A5A7DFA638C09E34007606325440FE987A2A9D7238C05D165F0DA5335440D1BF9E64C80A38C0FF6D3AC6DC3654409ACC3E07335D36C0578150C82C4454407CBD21E2CF3136C09B1B77FC8445544039117CFD650136C001AEC110054754401EA7E8482ECF35C07F6ABC7493485440DC27DD0AB9C935C0F45E61C134485440A2F3387764C135C09C775737A44754405526CE34BBDB34C047F7C465133854408DF37646C5EA33C0F10FDC85BE2754406D6485236BA431C08C72AF36460054403EE8D9ACFA9C30C07CF2B0506BEE5340F32225E11DCB30C07C98C2D614ED5340FE41CA2BA27737C016B27D9C8ABB5340C442AD69DEA137C05F07CE1951BA5340F9CBEEC9C30A38C07E078C8947C05340898D7238194D38C059C5B4D10CC45340984067B8143E3DC043C2B8B8F40B5440', '1', '2004-10-20 10:23:54', 0.8, 0.8, 'f8c0e812-ca5b-4112-917e-60ad7ec3b376'), ('0103000020E61000000100000013000000C0155236C40A38C052F1FFE1D8C75340B244B5A16EC837C014EBB5CD0CC4534073D712F2414F37C0D3BCE3141DBD5340FE41CA2BA27737C016B27D9C8ABB5340A2728C64C30A38C03BFB4402D0B553400C6AB4D7723A3DC0BDA377861D82534058CA32C4B15E3DC062105839B48053402A2097D1F19641C0EAE96F4E58CC5340F0A7C64B379941C07F6ABC7493CC5340E11AE2531A8741C01F2670501DCE5340CED31A45F57241C03EC92059D3CF534009E08D47F1E83FC0EAC3384350F05340DFE755925F713EC036A2858243005440ACEF9DFAC14B3DC0E950B3BEBB0C544034C8A112A4243DC064CC7707650E5440F602E719D4063DC0AE877727A90F54400A68226C78FA3CC0234A7B832F105440A630DBCBFBF43CC0E22ABE1BDF0F5440C0155236C40A38C052F1FFE1D8C75340', '2', '2004-10-21 10:23:54', -5.68, -5.68, '2ea9951d-3aa9-499b-ac5e-2d82cdaf7aa8'), ('0103000020E610000001000000110000001B2CBE53855542C051F99E0E805D534049A5CD2EAE0644C03857A7D846865340462575029A0844C0A60A46257586534063B4EEABC4F943C08D992E511D8853409C72BC6BC5E843C0920AAB5C038A5340721D3749863342C03D0220C7DABA53402A2097D1F19641C0EAE96F4E58CC5340E11AE2531A8741C01F2670501DCE534068226C787A7541C0075F984C15D05340CED31A45F57241C03EC92059D3CF534048E17A14AE173DC06B2BF697DD8353400C6AB4D7723A3DC0BDA377861D825340A03E0335AD283FC0314A54553C6953409C6F1F2DEA1541C00EA6095E6A425340BEC11726532541C0BE9F1A2FDD405340EB51B81E853342C0302C67AA4C5A53401B2CBE53855542C051F99E0E805D5340', '3', '2004-10-22 10:23:54', 98, 98, '6b6a659d-8483-4e71-9299-9fb717a9ab17'), ('0103000020E610000001000000110000000A4C8422590E46C0B656FB86F03B5340D5E76A2BF60F46C0075F984C153C5340FA28B2217F0346C0CE0A257ADB3D5340BEE6287052F545C01AA33BF2DF3F5340F25A937BB7D244C009CB92853C69534049A5CD2EAE0644C03857A7D84686534063B4EEABC4F943C08D992E511D88534034A2B437F8EA43C0F54A5986388A53409C72BC6BC5E843C0920AAB5C038A534050AF9465883342C0363B85F6B5605340D43E0032881142C02A5884BF7F5D5340F4FDD478E90641C007F01648504453409C6F1F2DEA1541C00EA6095E6A4253404E4E9C88873342C06DC6E4C7471E53403EDF52396E3443C0DC9EAF2DC7FD524044696FF0854143C032772D211FFC52400A4C8422590E46C0B656FB86F03B5340', '4', '2004-10-23 10:23:54', 7.55526, 7.55526, 'aaffbb93-ddbc-42a0-9f1d-68a9e3f23b15'), ('0103000020E6100000010000000D000000BBE9944235C347C0EBF06E7961EE52406ADE718A8EC447C0D122DBF97EEE5240942D6301ECB947C05B59871F60F0524086CAEEF61AAE47C0BDEF3BBB76F252400A4C8422590E46C0B656FB86F03B5340FA28B2217F0346C0CE0A257ADB3D534057EC2FBB27F745C02B1895D409405340BEE6287052F545C01AA33BF2DF3F53401D386744692743C07958A835CDFF52403EDF52396E3443C0DC9EAF2DC7FD5240B9E39237FD0645C0574B4E2543B552400AD7A3703D1245C03A234A7B83B35240BBE9944235C347C0EBF06E7961EE5240', '5', '2004-10-24 10:23:54', -78.56, null, '0c485db0-0465-44cc-b4d2-3444dc655229');
14 | ALTER TABLE public.my_data ADD COLUMN otherdt timestamptz;
15 | ALTER TABLE public.my_data ADD COLUMN datedt date;
16 | ALTER TABLE public.my_data ADD COLUMN othergeom geometry;
17 | UPDATE my_data SET otherdt=datetime+'1 year'::interval, othergeom=st_pointonsurface(geom);
18 | UPDATE my_data SET datedt=datetime+'3 year'::interval;
19 | CREATE VIEW public.my_data_alt AS SELECT * FROM my_data;
20 | CREATE VIEW public.my_data_date AS SELECT * FROM my_data;
21 | -- Create a copy of my_data but with geography instead of Geometry
22 | CREATE TABLE public.my_data_geo AS SELECT * FROM my_data;
23 | ALTER TABLE public.my_data_geo ALTER COLUMN geom TYPE geography(Polygon,4326) USING ST_Transform(geom,4326)::geography;
24 | COMMIT;
25 |
--------------------------------------------------------------------------------
/tests/fixtures/nongeo_data.sql:
--------------------------------------------------------------------------------
1 | SET standard_conforming_strings = OFF;
2 | DROP TABLE IF EXISTS "public"."nongeo_data" CASCADE;
3 | DELETE FROM geometry_columns WHERE f_table_name = 'nongeo_data' AND f_table_schema = 'public';
4 | BEGIN;
5 | CREATE TABLE "public"."nongeo_data" ( "ogc_fid" SERIAL, CONSTRAINT "nongeo_data_pk" PRIMARY KEY ("ogc_fid") );
6 | ALTER TABLE "public"."nongeo_data" ADD COLUMN "id" VARCHAR;
7 | ALTER TABLE "public"."nongeo_data" ADD COLUMN "datetime" TIMESTAMP WITH TIME ZONE;
8 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES
9 | ('0', '2004-10-19 10:23:54+01:00'),
10 | ('1', '2004-10-20 10:23:54+01:00'),
11 | ('2', '2004-10-21 10:23:54+01:00'),
12 | ('3', '2004-10-22 10:23:54+01:00'),
13 | ('4', '2004-10-23 10:23:54+01:00'),
14 | ('5', '2004-10-24 10:23:54+01:00');
15 | COMMIT;
16 |
--------------------------------------------------------------------------------
/tests/fixtures/templates/collections.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 | Custom Collections
4 |
5 |
6 |
7 |
8 |
9 | Title
10 | Type
11 | Description
12 |
13 |
14 |
15 | {% for collection in response.collections %}
16 |
17 | {{ collection.title or collection.id }}
18 | {{ collection.itemType }}
19 | {{ collection.description or collection.title or collection.id }}
20 |
21 | {% endfor %}
22 |
23 |
24 |
25 |
26 | {% include "footer.html" %}
27 |
--------------------------------------------------------------------------------
/tests/routes/__init__.py:
--------------------------------------------------------------------------------
1 | """timvt route tests."""
2 |
--------------------------------------------------------------------------------
/tests/routes/test_endpoints.py:
--------------------------------------------------------------------------------
1 | """Test endpoints."""
2 |
3 |
4 | def test_landing(app):
5 | """Test / endpoint."""
6 | response = app.get("/")
7 | assert response.status_code == 200
8 | assert response.headers["content-type"] == "application/json"
9 | body = response.json()
10 | assert body["title"] == "TiPg: OGC Features and Tiles API"
11 | assert body["links"]
12 |
13 | response = app.get("/?f=html")
14 | assert response.status_code == 200
15 | assert "text/html" in response.headers["content-type"]
16 | assert "TiPg: OGC Features and Tiles API" in response.text
17 |
18 | # Check accept headers
19 | response = app.get("/", headers={"accept": "text/html"})
20 | assert response.status_code == 200
21 | assert "text/html" in response.headers["content-type"]
22 | assert "TiPg: OGC Features and Tiles API" in response.text
23 |
24 | # accept quality
25 | response = app.get(
26 | "/", headers={"accept": "application/json;q=0.9, text/html;q=1.0"}
27 | )
28 | assert response.status_code == 200
29 | assert "text/html" in response.headers["content-type"]
30 | assert "TiPg: OGC Features and Tiles API" in response.text
31 |
32 | # accept quality but only json is available
33 | response = app.get("/", headers={"accept": "text/csv;q=1.0, application/json"})
34 | assert response.status_code == 200
35 | assert response.headers["content-type"] == "application/json"
36 | body = response.json()
37 | assert body["title"] == "TiPg: OGC Features and Tiles API"
38 |
39 | # accept quality but only json is available
40 | response = app.get("/", headers={"accept": "text/csv;q=1.0, */*"})
41 | assert response.status_code == 200
42 | assert response.headers["content-type"] == "application/json"
43 | body = response.json()
44 | assert body["title"] == "TiPg: OGC Features and Tiles API"
45 |
46 | # Invalid accept, return default
47 | response = app.get("/", headers={"accept": "text/htm"})
48 | assert response.status_code == 200
49 | assert response.headers["content-type"] == "application/json"
50 | body = response.json()
51 | assert body["title"] == "TiPg: OGC Features and Tiles API"
52 | assert body["links"]
53 |
54 | # make sure `?f=` has priority over headers
55 | response = app.get("/?f=json", headers={"accept": "text/html"})
56 | assert response.status_code == 200
57 | assert response.headers["content-type"] == "application/json"
58 | body = response.json()
59 | assert body["title"] == "TiPg: OGC Features and Tiles API"
60 |
61 |
62 | def test_docs(app):
63 | """Test /api endpoint."""
64 | response = app.get("/api")
65 | assert response.status_code == 200
66 | assert response.headers["content-type"] == "application/json"
67 | body = response.json()
68 | assert body["openapi"]
69 |
70 | response = app.get("/api.html")
71 | assert response.status_code == 200
72 | assert "text/html" in response.headers["content-type"]
73 |
74 |
75 | def test_conformance(app):
76 | """Test /conformance endpoint."""
77 | response = app.get("/conformance")
78 | assert response.status_code == 200
79 | assert response.headers["content-type"] == "application/json"
80 | body = response.json()
81 | assert body["conformsTo"]
82 |
83 | response = app.get("/conformance?f=html")
84 | assert response.status_code == 200
85 | assert "text/html" in response.headers["content-type"]
86 | assert "Conformance" in response.text
87 |
--------------------------------------------------------------------------------
/tests/routes/test_geography.py:
--------------------------------------------------------------------------------
1 | """test tipg endpoint with table having a geography column."""
2 |
3 | import mapbox_vector_tile
4 | import numpy
5 |
6 |
7 | def test_geography_column(app):
8 | """Test endpoints with table having geography column."""
9 | response = app.get("/collections/public.my_data_geo")
10 | assert response.status_code == 200
11 | assert response.headers["content-type"] == "application/json"
12 | body = response.json()
13 | assert body["id"] == "public.my_data_geo"
14 |
15 | response = app.get("/collections/public.my_data_geo/items")
16 | assert response.status_code == 200
17 | assert response.headers["content-type"] == "application/geo+json"
18 | body = response.json()
19 | assert body["type"] == "FeatureCollection"
20 | assert body["id"] == "public.my_data_geo"
21 | assert body["title"] == "public.my_data_geo"
22 | assert body["links"]
23 | assert body["numberMatched"] == 6
24 | assert body["numberReturned"] == 6
25 | assert body["features"][0]["geometry"]["type"] == "Polygon"
26 |
27 | response = app.get(
28 | "/collections/public.my_data_geo/tiles/WebMercatorQuad/tilejson.json"
29 | )
30 | assert response.status_code == 200
31 | resp_json = response.json()
32 | assert resp_json["name"] == "public.my_data_geo"
33 | assert resp_json["minzoom"] == 5
34 | assert resp_json["maxzoom"] == 12
35 | numpy.testing.assert_almost_equal(
36 | resp_json["bounds"], [-47.5356, 74.8049, -8.97407, 81.8555]
37 | )
38 |
39 | response = app.get("/collections/public.my_data_geo/tiles/WebMercatorQuad/5/11/5")
40 | assert response.status_code == 200
41 | decoded = mapbox_vector_tile.decode(response.content)
42 | assert len(decoded["default"]["features"])
43 |
--------------------------------------------------------------------------------
/tests/routes/test_item.py:
--------------------------------------------------------------------------------
1 | """Test /item endpoints."""
2 |
3 | from tipg.model import Item
4 |
5 |
6 | def test_item(app):
7 | """Test /items/{item id} endpoint."""
8 | response = app.get("/collections/public.landsat_wrs/items/1")
9 | assert response.status_code == 200
10 | assert response.headers["content-type"] == "application/geo+json"
11 | body = response.json()
12 | assert body["type"] == "Feature"
13 | assert body["id"] == 1
14 | assert body["links"]
15 | Item.model_validate(body)
16 |
17 | response = app.get("/collections/public.landsat_wrs/items/1?f=html")
18 | assert response.status_code == 200
19 | assert "text/html" in response.headers["content-type"]
20 | assert "Collection Item: 1" in response.text
21 |
22 | # json output
23 | response = app.get("/collections/public.landsat_wrs/items/1?f=json")
24 | assert response.status_code == 200
25 | assert response.headers["content-type"] == "application/json"
26 | feat = response.json()
27 | assert {
28 | "collectionId",
29 | "itemId",
30 | "id",
31 | "pr",
32 | "row",
33 | "path",
34 | "ogc_fid",
35 | "geometry",
36 | } == set(feat.keys())
37 |
38 | # not found
39 | response = app.get("/collections/public.landsat_wrs/items/50000")
40 | assert response.status_code == 404
41 |
42 |
43 | def test_item_with_property_config(app_public_table):
44 | """Test /items/{item id} endpoint."""
45 | response = app_public_table.get("/collections/public.landsat_wrs/items/1")
46 | assert response.status_code == 200
47 | assert response.headers["content-type"] == "application/geo+json"
48 | body = response.json()
49 | assert body["type"] == "Feature"
50 | assert body["id"] == 1
51 | assert body["links"]
52 | assert list(body["properties"]) == ["pr"]
53 | Item.model_validate(body)
54 |
--------------------------------------------------------------------------------
/tests/routes/test_non_geo.py:
--------------------------------------------------------------------------------
1 | """Test endpoints with non-geo table."""
2 |
3 |
4 | def test_non_geo(app):
5 | """Test endpoint with non-geo tables."""
6 | response = app.get("/collections/public.nongeo_data")
7 | assert response.status_code == 200
8 | body = response.json()
9 | assert body["id"] == "public.nongeo_data"
10 | # No Extent for non-geo table
11 | assert sorted(["id", "links", "extent", "itemType", "crs"]) == sorted(body)
12 | assert body["extent"]["temporal"]
13 | assert body["extent"]["temporal"]["interval"][0] == [
14 | "2004-10-19T09:23:54+00:00",
15 | "2004-10-24T09:23:54+00:00",
16 | ]
17 | assert not body["extent"].get("spatial")
18 |
19 | response = app.get("/collections/public.nongeo_data/items")
20 | assert response.status_code == 200
21 | assert response.headers["content-type"] == "application/geo+json"
22 | body = response.json()
23 | assert body["type"] == "FeatureCollection"
24 | assert body["id"] == "public.nongeo_data"
25 | assert body["title"] == "public.nongeo_data"
26 | assert body["links"]
27 | assert body["numberMatched"] == 6
28 | assert body["numberReturned"] == 6
29 | assert ["collection", "self"] == [link["rel"] for link in body["links"]]
30 |
31 | response = app.get("/collections/public.nongeo_data/items?f=json")
32 | assert response.status_code == 200
33 | assert response.headers["content-type"] == "application/json"
34 | body = response.json()
35 | assert len(body) == 6
36 |
37 | response = app.get("/collections/public.nongeo_data/items?f=html")
38 | assert response.status_code == 200
39 | assert "text/html" in response.headers["content-type"]
40 | assert "Collection Items: public.nongeo_data" in response.text
41 |
42 | response = app.get("/collections/public.nongeo_data/items?geom-column=geom")
43 | assert response.status_code == 404
44 |
45 | response = app.get("/collections/public.nongeo_data/queryables")
46 | assert response.status_code == 200
47 |
48 | # bbox filter should have not effect
49 | response = app.get("/collections/public.nongeo_data/items?bbox=0,10,0,10")
50 | assert response.status_code == 200
51 | assert response.headers["content-type"] == "application/geo+json"
52 | body = response.json()
53 | assert body["numberMatched"] == 6
54 | assert body["numberReturned"] == 6
55 |
--------------------------------------------------------------------------------
/tests/routes/test_templates.py:
--------------------------------------------------------------------------------
1 | """Test HTML templates."""
2 |
3 |
4 | def test_custom_templates(app):
5 | """Test /collections endpoint."""
6 | response = app.get("/collections")
7 | assert response.status_code == 200
8 |
9 | response = app.get("/collections?f=html")
10 | assert response.status_code == 200
11 | assert "Custom Collections" in response.text
12 |
--------------------------------------------------------------------------------
/tests/routes/test_tiles.py:
--------------------------------------------------------------------------------
1 | """Test Tiles endpoints."""
2 |
3 | import mapbox_vector_tile
4 | import numpy as np
5 |
6 | from tipg.collections import mvt_settings
7 |
8 |
9 | def test_tilejson(app):
10 | """Test TileJSON endpoint."""
11 | response = app.get(
12 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad/tilejson.json"
13 | )
14 | assert response.status_code == 200
15 |
16 | resp_json = response.json()
17 | assert resp_json["tilejson"] == "3.0.0"
18 | assert resp_json["name"] == "public.landsat_wrs"
19 | assert resp_json["minzoom"] == 5
20 | assert resp_json["maxzoom"] == 12
21 | assert resp_json["vector_layers"]
22 |
23 | np.testing.assert_almost_equal(
24 | resp_json["bounds"], [-180.0, -82.6401, 180.0, 82.6401], decimal=4
25 | )
26 |
27 | response = app.get(
28 | "/collections/public.landsat_wrs/tiles/WGS1984Quad/tilejson.json"
29 | )
30 | assert response.status_code == 200
31 |
32 | resp_json = response.json()
33 | assert resp_json["tilejson"] == "3.0.0"
34 | assert resp_json["name"] == "public.landsat_wrs"
35 | assert resp_json["minzoom"] == 0
36 | assert resp_json["maxzoom"] == 23
37 | assert resp_json["vector_layers"]
38 | assert "WGS1984Quad" in resp_json["tiles"][0]
39 |
40 | np.testing.assert_almost_equal(
41 | resp_json["bounds"], [-180.0, -82.6401, 180.0, 82.6401], decimal=4
42 | )
43 |
44 | response = app.get(
45 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad/tilejson.json?minzoom=1&maxzoom=2"
46 | )
47 | assert response.status_code == 200
48 |
49 | resp_json = response.json()
50 | assert resp_json["name"] == "public.landsat_wrs"
51 | assert resp_json["minzoom"] == 1
52 | assert resp_json["maxzoom"] == 2
53 |
54 | response = app.get(
55 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad/tilejson.json?minzoom=1&maxzoom=2&limit=1000"
56 | )
57 | assert response.status_code == 200
58 |
59 | resp_json = response.json()
60 | assert resp_json["name"] == "public.landsat_wrs"
61 | assert resp_json["minzoom"] == 1
62 | assert resp_json["maxzoom"] == 2
63 | assert "?limit=1000" in resp_json["tiles"][0]
64 |
65 | # Make sure that a non-4326 collection still returns the bounds in 4326
66 | response = app.get(
67 | "/collections/public.minnesota/tiles/WebMercatorQuad/tilejson.json"
68 | )
69 | assert response.status_code == 200
70 |
71 | resp_json = response.json()
72 |
73 | np.testing.assert_almost_equal(
74 | resp_json["bounds"],
75 | [-96.28961808496446, 46.11168980088226, -93.05330550250615, 48.56828559755232],
76 | decimal=3,
77 | )
78 |
79 |
80 | def test_tile(app):
81 | """request a tile."""
82 | init_value = mvt_settings.set_mvt_layername
83 | mvt_settings.set_mvt_layername = False
84 |
85 | name = "landsat_wrs"
86 | response = app.get(f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0")
87 | assert response.status_code == 200
88 | decoded = mapbox_vector_tile.decode(response.content)
89 | assert "default" in decoded.keys()
90 | assert len(decoded["default"]["features"]) == 10000
91 |
92 | response = app.get(
93 | f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0?limit=1000"
94 | )
95 | assert response.status_code == 200
96 | decoded = mapbox_vector_tile.decode(response.content)
97 | assert len(decoded["default"]["features"]) == 1000
98 | assert sorted(["id", "pr", "row", "path", "ogc_fid"]) == sorted(
99 | decoded["default"]["features"][0]["properties"]
100 | )
101 |
102 | response = app.get(
103 | f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0?limit=1&properties=pr,row,path"
104 | )
105 | assert response.status_code == 200
106 | decoded = mapbox_vector_tile.decode(response.content)
107 | assert sorted(["pr", "row", "path"]) == sorted(
108 | decoded["default"]["features"][0]["properties"]
109 | )
110 |
111 | response = app.get(
112 | f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0?geom-column=geom"
113 | )
114 | assert response.status_code == 200
115 | decoded = mapbox_vector_tile.decode(response.content)
116 | assert len(decoded["default"]["features"]) == 10000
117 |
118 | # invalid geometry column name
119 | response = app.get(
120 | f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0?geom-column=the_geom"
121 | )
122 | assert response.status_code == 404
123 |
124 | mvt_settings.set_mvt_layername = init_value
125 |
126 |
127 | def test_tile_custom_name(app):
128 | """Test custom layer name."""
129 | init_value = mvt_settings.set_mvt_layername
130 | mvt_settings.set_mvt_layername = True
131 |
132 | name = "landsat_wrs"
133 | response = app.get(f"/collections/public.{name}/tiles/WebMercatorQuad/0/0/0")
134 | assert response.status_code == 200
135 | decoded = mapbox_vector_tile.decode(response.content)
136 | assert name in decoded.keys()
137 | assert len(decoded[name]["features"]) == 10000
138 |
139 | mvt_settings.set_mvt_layername = init_value
140 |
141 |
142 | def test_tile_tms(app):
143 | """request a tile with specific TMS."""
144 | init_value = mvt_settings.set_mvt_layername
145 | mvt_settings.set_mvt_layername = False
146 |
147 | name = "landsat_wrs"
148 | response = app.get(f"/collections/public.{name}/tiles/WorldCRS84Quad/0/0/0")
149 | assert response.status_code == 200
150 | decoded = mapbox_vector_tile.decode(response.content)
151 | assert "default" in decoded.keys()
152 | assert len(decoded["default"]["features"]) > 1000
153 |
154 | response = app.get(
155 | f"/collections/public.{name}/tiles/WorldCRS84Quad/0/0/0?limit=1000"
156 | )
157 | assert response.status_code == 200
158 | decoded = mapbox_vector_tile.decode(response.content)
159 | assert len(decoded["default"]["features"]) <= 1000
160 | assert sorted(["id", "pr", "row", "path", "ogc_fid"]) == sorted(
161 | decoded["default"]["features"][0]["properties"]
162 | )
163 |
164 | response = app.get(
165 | f"/collections/public.{name}/tiles/WorldCRS84Quad/0/0/0?limit=1&properties=pr,row,path"
166 | )
167 | assert response.status_code == 200
168 | decoded = mapbox_vector_tile.decode(response.content)
169 | assert sorted(["pr", "row", "path"]) == sorted(
170 | decoded["default"]["features"][0]["properties"]
171 | )
172 |
173 | mvt_settings.set_mvt_layername = init_value
174 |
175 |
176 | def test_tile_tms_custom_name(app):
177 | """test layername with tms."""
178 | init_value = mvt_settings.set_mvt_layername
179 | mvt_settings.set_mvt_layername = True
180 |
181 | name = "landsat_wrs"
182 | response = app.get(f"/collections/public.{name}/tiles/WorldCRS84Quad/0/0/0")
183 | assert response.status_code == 200
184 | decoded = mapbox_vector_tile.decode(response.content)
185 | assert name in decoded.keys()
186 | assert len(decoded[name]["features"]) > 1000
187 |
188 | mvt_settings.set_mvt_layername = init_value
189 |
190 |
191 | def test_stylejson(app):
192 | """Test StyleJSON endpoint."""
193 | response = app.get(
194 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad/style.json"
195 | )
196 | assert response.status_code == 200
197 |
198 | resp_json = response.json()
199 | assert resp_json["version"] == 8
200 | assert resp_json["name"] == "TiPg"
201 | assert "sources" in resp_json
202 | assert "layers" in resp_json
203 | assert "center" in resp_json
204 | assert "zoom" in resp_json
205 |
206 | source = resp_json["sources"]["public.landsat_wrs"]
207 | assert source["minzoom"] == 5
208 | assert source["maxzoom"] == 12
209 |
210 | np.testing.assert_equal(
211 | np.around(source["bounds"], 4), [-180.0, -82.6401, 180.0, 82.6401]
212 | )
213 |
214 | response = app.get("/collections/public.landsat_wrs/tiles/WGS1984Quad/style.json")
215 | assert response.status_code == 200
216 |
217 | resp_json = response.json()
218 | assert resp_json["version"] == 8
219 | assert resp_json["name"] == "TiPg"
220 | assert "sources" in resp_json
221 | assert "layers" in resp_json
222 | assert "center" in resp_json
223 | assert "zoom" in resp_json
224 |
225 | source = resp_json["sources"]["public.landsat_wrs"]
226 | assert source["minzoom"] == 0
227 | assert source["maxzoom"] == 23
228 | assert "WGS1984Quad" in source["tiles"][0]
229 |
230 | np.testing.assert_equal(
231 | np.around(source["bounds"], 4), [-180.0, -82.6401, 180.0, 82.6401]
232 | )
233 |
234 | response = app.get(
235 | "/collections/public.landsat_wrs/tiles/WebMercatorQuad/style.json?minzoom=1&maxzoom=2"
236 | )
237 | assert response.status_code == 200
238 |
239 | resp_json = response.json()
240 | source = resp_json["sources"]["public.landsat_wrs"]
241 | assert source["minzoom"] == 1
242 | assert source["maxzoom"] == 2
243 | assert "minzoom" not in source["tiles"][0]
244 | assert "maxzoom" not in source["tiles"][0]
245 |
246 | response = app.get(
247 | "/collections/public.landsat/tiles/WebMercatorQuad/style.json?geom-column=centroid"
248 | )
249 | assert response.status_code == 200
250 |
--------------------------------------------------------------------------------
/tests/routes/test_tilesets.py:
--------------------------------------------------------------------------------
1 | """test tileset endpoints."""
2 |
3 | import pytest
4 | from morecantile import tms
5 |
6 |
7 | def test_tilesets(app):
8 | """test /collections/{collectionId}/tiles endpoint."""
9 | response = app.get("/collections/public.landsat/tiles")
10 | assert response.status_code == 200
11 | body = response.json()
12 |
13 | assert len(body["tilesets"]) == len(tms.list())
14 | tileset = list(
15 | filter(
16 | lambda m: m["title"]
17 | == "'public.landsat' tileset tiled using WebMercatorQuad TileMatrixSet",
18 | body["tilesets"],
19 | )
20 | )[0]
21 | assert tileset["dataType"] == "vector"
22 | assert tileset["crs"] == "http://www.opengis.net/def/crs/EPSG/0/3857"
23 | assert tileset["boundingBox"]
24 | assert tileset["links"]
25 |
26 | response = app.get("/collections/public.landsat/tiles?f=html")
27 | assert response.status_code == 200
28 | assert "text/html" in response.headers["content-type"]
29 |
30 |
31 | @pytest.mark.parametrize("tms_name", tms.list())
32 | def test_tileset(app, tms_name):
33 | """test /collections/{collectionId}/tiles/tileMatrixSetId endpoint."""
34 | response = app.get(f"/collections/public.landsat/tiles/{tms_name}")
35 | assert response.status_code == 200
36 | tileset = response.json()
37 |
38 | assert tileset["dataType"] == "vector"
39 | assert tileset["boundingBox"]
40 | assert tileset["links"]
41 | assert tileset["tileMatrixSetLimits"]
42 |
43 | response = app.get(f"/collections/public.landsat/tiles/{tms_name}?f=html")
44 | assert response.status_code == 200
45 | assert "text/html" in response.headers["content-type"]
46 |
--------------------------------------------------------------------------------
/tests/routes/test_tms.py:
--------------------------------------------------------------------------------
1 | """test TileMatrixSets endpoints."""
2 |
3 | from morecantile import tms
4 |
5 |
6 | def test_tilematrix(app):
7 | """test /tileMatrixSet endpoint."""
8 | response = app.get("/tileMatrixSets")
9 | assert response.status_code == 200
10 | body = response.json()
11 |
12 | assert len(body["tileMatrixSets"]) == len(tms.list())
13 | tileMatrixSets = list(
14 | filter(lambda m: m["id"] == "WebMercatorQuad", body["tileMatrixSets"])
15 | )[0]
16 | assert (
17 | tileMatrixSets["links"][0]["href"]
18 | == "http://testserver/tileMatrixSets/WebMercatorQuad"
19 | )
20 |
21 | response = app.get("/tileMatrixSets?f=html")
22 | assert response.status_code == 200
23 | assert "text/html" in response.headers["content-type"]
24 |
25 |
26 | def test_tilematrixInfo(app):
27 | """test /tileMatrixSet endpoint."""
28 | response = app.get("/tileMatrixSets/WebMercatorQuad")
29 | assert response.headers["content-type"] == "application/json"
30 | assert response.status_code == 200
31 | body = response.json()
32 | assert body["id"] == "WebMercatorQuad"
33 | assert body["crs"]
34 | assert body["tileMatrices"]
35 |
36 | response = app.get("/tileMatrixSets/WebMercatorQuad?f=html")
37 | assert response.status_code == 200
38 | assert "text/html" in response.headers["content-type"]
39 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | """Test pg_mvt.main.app."""
2 |
3 |
4 | def test_health(app):
5 | """Test /healthz endpoint."""
6 | response = app.get("/healthz")
7 | assert response.status_code == 200
8 | assert response.json() == {"ping": "pong!"}
9 |
10 | response = app.get("/rawcatalog")
11 | assert response.status_code == 200
12 | body = response.json()
13 | assert body["collections"]
14 | assert body["last_updated"]
15 |
--------------------------------------------------------------------------------
/tests/test_middleware.py:
--------------------------------------------------------------------------------
1 | """Test tipg middleware."""
2 |
3 | import time
4 |
5 | from tipg.middleware import CacheControlMiddleware
6 |
7 | from fastapi import FastAPI
8 |
9 | from starlette.responses import Response
10 | from starlette.testclient import TestClient
11 |
12 |
13 | def test_middleware(app_middleware_refresh):
14 | """Test CatalogUpdateMiddleware."""
15 | # Wait we pass the `ttl`
16 | time.sleep(2)
17 |
18 | response = app_middleware_refresh.get("/rawcatalog")
19 | assert response.status_code == 200
20 | body = response.json()
21 | assert body["collections"]
22 | last_updated = body["last_updated"]
23 |
24 | # Because we waited 2 seconds before the request
25 | # the background task should have been called
26 | # let's wait until the refresh is done
27 | time.sleep(5)
28 |
29 | response = app_middleware_refresh.get("/rawcatalog")
30 | assert response.status_code == 200
31 | body = response.json()
32 | assert body["collections"]
33 | new_updated = body["last_updated"]
34 | assert last_updated != new_updated
35 |
36 |
37 | def test_middleware_cache():
38 | """test CacheControlMiddleware."""
39 | app = FastAPI()
40 |
41 | @app.get("/route1")
42 | async def route1():
43 | """route1."""
44 | return "yo"
45 |
46 | @app.get("/route2")
47 | async def route2():
48 | """route2."""
49 | return "yeah"
50 |
51 | @app.get("/route3")
52 | async def route3():
53 | """route3."""
54 | return "yeah"
55 |
56 | @app.get("/route4")
57 | async def route4():
58 | return "yep"
59 |
60 | @app.get("/route5")
61 | async def route5():
62 | return Response(status_code=404)
63 |
64 | app.add_middleware(
65 | CacheControlMiddleware,
66 | cachecontrol="public",
67 | cachecontrol_max_http_code=400,
68 | exclude_path={r"/route1", r"/route2", r"/route[3]"},
69 | )
70 |
71 | with TestClient(app) as client:
72 | response = client.get("/route1")
73 | assert not response.headers.get("Cache-Control")
74 |
75 | response = client.get("/route2")
76 | assert not response.headers.get("Cache-Control")
77 |
78 | # No cache because of `/route[3]` regex
79 | response = client.get("/route3")
80 | assert not response.headers.get("Cache-Control")
81 |
82 | # cache-control
83 | response = client.get("/route4")
84 | assert response.headers["Cache-Control"] == "public"
85 |
86 | # Not cache for status > 400
87 | response = client.get("/route5")
88 | assert not response.headers.get("Cache-Control")
89 |
--------------------------------------------------------------------------------
/tests/test_schemas.py:
--------------------------------------------------------------------------------
1 | """Test schemas."""
2 |
3 |
4 | def test_myschema(app_myschema):
5 | """Available tables should come from `myschema` and functions from `pg_temp`."""
6 | collection_number = 6 # 5 custom functions + 1 tables from myschema
7 |
8 | response = app_myschema.get("/collections")
9 | assert response.status_code == 200
10 | assert response.headers["content-type"] == "application/json"
11 | body = response.json()
12 | ids = [x["id"] for x in body["collections"]]
13 |
14 | # custom functions
15 | assert "pg_temp.landsat_centroids" in ids
16 | assert "pg_temp.landsat" in ids
17 | assert "pg_temp.squares" in ids
18 | assert "pg_temp.hexagons" in ids
19 | assert "pg_temp.hexagons_g" in ids
20 |
21 | # myschema table
22 | assert "myschema.landsat" in ids
23 |
24 | assert body["numberMatched"] == collection_number
25 |
26 |
27 | def test_myschema_and_public_functions(app_myschema_public_functions):
28 | """Available tables should come from `myschema` and functions from `pg_temp` and `public` schema."""
29 | collection_number = (
30 | 6 # 5 custom functions + 1 tables from myschema + (N) functions from public
31 | )
32 |
33 | response = app_myschema_public_functions.get("/collections")
34 | assert response.status_code == 200
35 | body = response.json()
36 | ids = [x["id"] for x in body["collections"]]
37 |
38 | # custom functions
39 | assert "pg_temp.landsat_centroids" in ids
40 | assert "pg_temp.landsat" in ids
41 | assert "pg_temp.squares" in ids
42 | assert "pg_temp.hexagons" in ids
43 | assert "pg_temp.hexagons_g" in ids
44 |
45 | # public functions
46 | assert "public.st_hexagongrid" in ids
47 |
48 | # myschema table
49 | assert "myschema.landsat" in ids
50 |
51 | # no tables from public
52 | assert "public.my_data" not in ids
53 |
54 | assert body["numberMatched"] >= collection_number
55 |
56 |
57 | def test_myschema_and_public(app_myschema_public):
58 | """Available tables should come from `myschema` and `public` and functions from `pg_temp`"""
59 | collection_number = (
60 | 15 # 5 custom functions + 1 tables from myschema + 8 tables from public
61 | )
62 |
63 | response = app_myschema_public.get("/collections")
64 | assert response.status_code == 200
65 | body = response.json()
66 | ids = [x["id"] for x in body["collections"]]
67 |
68 | # custom functions
69 | assert "pg_temp.landsat_centroids" in ids
70 | assert "pg_temp.landsat" in ids
71 | assert "pg_temp.squares" in ids
72 | assert "pg_temp.hexagons" in ids
73 | assert "pg_temp.hexagons_g" in ids
74 |
75 | # myschema table
76 | assert "myschema.landsat" in ids
77 |
78 | # tables from public
79 | assert "public.my_data" in ids
80 | assert "public.my_data_alt" in ids
81 | assert "public.my_data_date" in ids
82 | assert "public.minnesota" in ids
83 | assert "public.canada" in ids
84 | assert "public.landsat" in ids
85 | assert "public.nongeo_data" in ids
86 | assert "public.my_data_geo" in ids
87 | assert "public.landsat_wrs" in ids
88 |
89 | # no public functions
90 | assert "public.st_hexagongrid" not in ids
91 |
92 | assert body["numberMatched"] == collection_number
93 |
94 |
95 | def test_public_functions(app_only_public_functions):
96 | """Available functions from `pg_temp` and `public` schema (no tables available)."""
97 | collection_number = 5 # 5 custom functions + (N) functions from public
98 |
99 | response = app_only_public_functions.get("/collections")
100 | assert response.status_code == 200
101 | body = response.json()
102 | ids = [x["id"] for x in body["collections"]]
103 |
104 | # custom functions
105 | assert "pg_temp.landsat_centroids" in ids
106 | assert "pg_temp.landsat" in ids
107 | assert "pg_temp.squares" in ids
108 | assert "pg_temp.hexagons" in ids
109 | assert "pg_temp.hexagons_g" in ids
110 |
111 | # public functions
112 | assert "public.st_hexagongrid" in ids
113 |
114 | # no myschema table
115 | assert "myschema.landsat" not in ids
116 |
117 | # no tables from public
118 | assert "public.my_data" not in ids
119 |
120 | assert body["numberMatched"] >= collection_number
121 |
122 |
123 | def test_myschema_and_public_order(app_myschema_public_order):
124 | """Available tables should come from `myschema` and `public` and functions from `pg_temp`"""
125 | collection_number = (
126 | 15 # 5 custom functions + 1 tables from myschema + 8 tables from public
127 | )
128 |
129 | response = app_myschema_public_order.get("/collections")
130 | assert response.status_code == 200
131 | body = response.json()
132 | ids = [x["id"] for x in body["collections"]]
133 |
134 | # custom functions
135 | assert "pg_temp.landsat_centroids" in ids
136 | assert "pg_temp.landsat" in ids
137 | assert "pg_temp.squares" in ids
138 | assert "pg_temp.hexagons" in ids
139 | assert "pg_temp.hexagons_g" in ids
140 |
141 | # myschema table
142 | assert "myschema.landsat" in ids
143 |
144 | # tables from public
145 | assert "public.my_data" in ids
146 |
147 | # no public functions
148 | assert "public.st_hexagongrid" not in ids
149 |
150 | assert body["numberMatched"] == collection_number
151 |
152 |
153 | def test_user_schema(app_user_schema):
154 | """Test Function without parameters."""
155 | collection_number = 1
156 |
157 | response = app_user_schema.get("/collections")
158 | assert response.status_code == 200
159 | body = response.json()
160 | ids = [x["id"] for x in body["collections"]]
161 |
162 | assert len(ids) == collection_number
163 | assert "userschema.test_no_params" in ids
164 |
165 | response = app_user_schema.get("/collections/userschema.test_no_params/queryables")
166 | assert response.status_code == 200
167 |
168 | response = app_user_schema.get("/collections/userschema.test_no_params/items")
169 | assert response.status_code == 200
170 | body = response.json()
171 | assert body["numberMatched"] == 1
172 |
--------------------------------------------------------------------------------
/tests/test_settings.py:
--------------------------------------------------------------------------------
1 | """test tipg settings classes."""
2 |
3 | import pytest
4 | from pydantic import ValidationError
5 |
6 | from tipg.settings import PostgresSettings
7 |
8 |
9 | def test_pg_settings(monkeypatch):
10 | """test PostgresSettings class."""
11 | # Makes sure we don't have any pg env set
12 | monkeypatch.delenv("DATABASE_URL", raising=False)
13 | monkeypatch.delenv("POSTGRES_USER", raising=False)
14 | monkeypatch.delenv("POSTGRES_PASS", raising=False)
15 | monkeypatch.delenv("POSTGRES_HOST", raising=False)
16 | monkeypatch.delenv("POSTGRES_PORT", raising=False)
17 | monkeypatch.delenv("POSTGRES_DBNAME", raising=False)
18 |
19 | # Should raises a validation error if no env or parameters is passed
20 | with pytest.raises(ValidationError):
21 | # we use `_env_file=None` to make sure pydantic do not use any `.env` files in local environment
22 | PostgresSettings(_env_file=None)
23 |
24 | settings = PostgresSettings(
25 | postgres_user="user",
26 | postgres_pass="secret",
27 | postgres_host="0.0.0.0",
28 | postgres_port=8888,
29 | postgres_dbname="db",
30 | _env_file=None,
31 | )
32 | assert str(settings.database_url) == "postgresql://user:secret@0.0.0.0:8888/db"
33 |
34 | # Make sure pydantic will cast the port to integer
35 | settings = PostgresSettings(
36 | postgres_user="user",
37 | postgres_pass="secret",
38 | postgres_host="0.0.0.0",
39 | postgres_port="8888",
40 | postgres_dbname="db",
41 | _env_file=None,
42 | )
43 | assert str(settings.database_url) == "postgresql://user:secret@0.0.0.0:8888/db"
44 | assert settings.postgres_port == 8888
45 |
46 | settings = PostgresSettings(
47 | database_url="postgresql://user:secret@0.0.0.0:8888/db", _env_file=None
48 | )
49 | assert str(settings.database_url) == "postgresql://user:secret@0.0.0.0:8888/db"
50 | assert not settings.postgres_port
51 |
--------------------------------------------------------------------------------
/tests/test_sql_functions.py:
--------------------------------------------------------------------------------
1 | """test custom SQL functions."""
2 |
3 | import mapbox_vector_tile
4 | import pytest
5 |
6 | from tipg.errors import NoPrimaryKey
7 |
8 |
9 | def test_collections_function(app_functions):
10 | """Test /collections endpoint."""
11 | response = app_functions.get("/collections")
12 | assert response.status_code == 200
13 | assert response.headers["content-type"] == "application/json"
14 | body = response.json()
15 | assert [
16 | "links",
17 | "numberMatched",
18 | "numberReturned",
19 | "collections",
20 | ] == list(body)
21 | ids = [x["id"] for x in body["collections"]]
22 |
23 | # Custom function
24 | assert "pg_temp.landsat_centroids" in ids
25 | assert "pg_temp.landsat" in ids
26 | assert "pg_temp.hexagons" in ids
27 | assert "pg_temp.hexagons_g" in ids
28 | assert "pg_temp.squares" in ids
29 |
30 | response = app_functions.get("/collections/pg_temp.landsat_centroids")
31 | assert response.status_code == 200
32 | assert response.headers["content-type"] == "application/json"
33 | body = response.json()
34 | assert body["id"] == "pg_temp.landsat_centroids"
35 |
36 | response = app_functions.get("/collections/pg_temp.squares")
37 | assert response.status_code == 200
38 | assert response.headers["content-type"] == "application/json"
39 | body = response.json()
40 | assert body["id"] == "pg_temp.squares"
41 | assert body["extent"]["spatial"]["bbox"][0] == [-180.0, -90.0, 180.0, 90.0]
42 |
43 |
44 | def test_queryables_function(app_functions):
45 | """Test /queryables endpoint."""
46 | response = app_functions.get("/collections/pg_temp.landsat_centroids/queryables")
47 | assert response.status_code == 200
48 | assert response.headers["content-type"] == "application/schema+json"
49 | body = response.json()
50 | assert {"geom", "ogc_fid", "path", "pr", "row"}.issubset(body["properties"].keys())
51 |
52 | response = app_functions.get("/collections/pg_temp.landsat/queryables")
53 | assert response.status_code == 200
54 | assert response.headers["content-type"] == "application/schema+json"
55 | body = response.json()
56 | assert {"geom", "grid_path", "grid_row", "path_row"}.issubset(
57 | body["properties"].keys()
58 | )
59 |
60 |
61 | def test_items_function(app_functions):
62 | """Test /items endpoint."""
63 | response = app_functions.get("/collections/pg_temp.landsat_centroids/items")
64 | assert response.status_code == 200
65 | assert response.headers["content-type"] == "application/geo+json"
66 | body = response.json()
67 | assert body["id"] == "pg_temp.landsat_centroids"
68 | assert body["features"][0]["geometry"]["type"] == "Point"
69 | assert body["features"][0]["id"] == 1
70 | assert body["features"][0]["properties"]["ogc_fid"] == 1
71 | assert body["numberMatched"] == 16269
72 | assert body["numberReturned"] == 10
73 |
74 | response = app_functions.get("/collections/pg_temp.landsat_centroids/items?path=0")
75 | assert response.status_code == 200
76 | assert response.headers["content-type"] == "application/geo+json"
77 | body = response.json()
78 | assert body["numberMatched"] == 0
79 | assert body["numberReturned"] == 0
80 |
81 | response = app_functions.get("/collections/pg_temp.landsat_centroids/items?path=13")
82 | assert response.status_code == 200
83 | assert response.headers["content-type"] == "application/geo+json"
84 | body = response.json()
85 | assert body["numberMatched"] == 104
86 | assert body["numberReturned"] == 10
87 |
88 | # Check functions that take x/y/z input
89 | response = app_functions.get("/collections/pg_temp.landsat/items?p=13&x=0&y=0&z=0")
90 | assert response.status_code == 200
91 | assert response.headers["content-type"] == "application/geo+json"
92 | body = response.json()
93 | assert body["numberMatched"] == 104
94 | assert body["numberReturned"] == 10
95 |
96 | response = app_functions.get("/collections/pg_temp.hexagons/items")
97 | assert response.status_code == 200
98 | assert response.headers["content-type"] == "application/geo+json"
99 | body = response.json()
100 | assert body["id"] == "pg_temp.hexagons"
101 | assert body["features"][0]["geometry"]["type"] == "Polygon"
102 | assert body["features"][0]["id"]
103 | assert body["features"][0]["properties"]["i"]
104 | assert body["features"][0]["properties"]["j"]
105 | assert body["numberMatched"] == 287
106 | assert body["numberReturned"] == 10
107 |
108 | response = app_functions.get(
109 | "/collections/pg_temp.hexagons/items",
110 | params={
111 | "size": 10,
112 | "bounds": "POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))",
113 | },
114 | )
115 | assert response.status_code == 200
116 | assert response.headers["content-type"] == "application/geo+json"
117 | body = response.json()
118 | assert body["id"] == "pg_temp.hexagons"
119 | assert body["features"][0]["geometry"]["type"] == "Polygon"
120 | assert body["features"][0]["id"]
121 | assert body["features"][0]["properties"]["i"] == 0
122 | assert body["features"][0]["properties"]["j"] == 0
123 | assert body["numberMatched"] == 287
124 | assert body["numberReturned"] == 10
125 |
126 | response = app_functions.get("/collections/pg_temp.landsat_centroids/items/1")
127 | assert response.status_code == 200
128 | assert response.headers["content-type"] == "application/geo+json"
129 | body = response.json()
130 | assert body["id"] == 1
131 | assert body["geometry"]["type"] == "Point"
132 | assert body["properties"]["ogc_fid"] == 1
133 |
134 | # No Primary key for functions
135 | with pytest.raises(NoPrimaryKey):
136 | app_functions.get("/collections/pg_temp.hexagons/items/1")
137 |
138 |
139 | def test_tiles_functions(app_functions):
140 | """Test Tiles endpoint."""
141 | response = app_functions.get(
142 | "/collections/pg_temp.landsat_centroids/tiles/WebMercatorQuad/tilejson.json"
143 | )
144 | assert response.status_code == 200
145 | body = response.json()
146 | assert body["name"] == "pg_temp.landsat_centroids"
147 | assert body["minzoom"] == 5
148 | assert body["maxzoom"] == 12
149 |
150 | response = app_functions.get(
151 | "/collections/pg_temp.hexagons/tiles/WebMercatorQuad/tilejson.json"
152 | )
153 | assert response.status_code == 200
154 | body = response.json()
155 | assert body["name"] == "pg_temp.hexagons"
156 | assert body["minzoom"] == 5
157 | assert body["maxzoom"] == 12
158 |
159 | response = app_functions.get(
160 | "/collections/pg_temp.hexagons/tiles/WebMercatorQuad/tilejson.json?minzoom=1&maxzoom=2&size=4"
161 | )
162 | assert response.status_code == 200
163 | body = response.json()
164 | assert body["name"] == "pg_temp.hexagons"
165 | assert body["minzoom"] == 1
166 | assert body["maxzoom"] == 2
167 | assert "?size=4" in body["tiles"][0]
168 |
169 | # tilesets
170 | response = app_functions.get("/collections/pg_temp.landsat_centroids/tiles")
171 | assert response.status_code == 200
172 | body = response.json()
173 | assert body["tilesets"]
174 |
175 | response = app_functions.get("/collections/pg_temp.hexagons/tiles")
176 | assert response.status_code == 200
177 | assert body["tilesets"]
178 |
179 | # tileset
180 | response = app_functions.get(
181 | "/collections/pg_temp.landsat_centroids/tiles/WebMercatorQuad"
182 | )
183 | assert response.status_code == 200
184 | body = response.json()
185 | assert (
186 | body["title"]
187 | == "'pg_temp.landsat_centroids' tileset tiled using WebMercatorQuad TileMatrixSet"
188 | )
189 |
190 | response = app_functions.get("/collections/pg_temp.hexagons/tiles/WebMercatorQuad")
191 | assert response.status_code == 200
192 | body = response.json()
193 | assert (
194 | body["title"]
195 | == "'pg_temp.hexagons' tileset tiled using WebMercatorQuad TileMatrixSet"
196 | )
197 |
198 | # tiles
199 | # Check default's function are used
200 | response = app_functions.get(
201 | "/collections/pg_temp.squares/tiles/WebMercatorQuad/3/3/3"
202 | )
203 | assert response.status_code == 200
204 | decoded = mapbox_vector_tile.decode(response.content)
205 | assert len(decoded["default"]["features"]) == 25
206 |
207 | # Check default's function are used
208 | response = app_functions.get(
209 | "/collections/pg_temp.squares/tiles/WebMercatorQuad/3/3/3?size=2"
210 | )
211 | assert response.status_code == 200
212 | decoded = mapbox_vector_tile.decode(response.content)
213 | assert len(decoded["default"]["features"]) == 483
214 |
215 | # Check any geometry input column will work
216 | response = app_functions.get(
217 | "/collections/pg_temp.hexagons/tiles/WebMercatorQuad/3/3/3"
218 | )
219 | assert response.status_code == 200
220 | decoded = mapbox_vector_tile.decode(response.content)
221 | assert len(decoded["default"]["features"]) == 12
222 |
223 | response = app_functions.get(
224 | "/collections/pg_temp.hexagons_g/tiles/WebMercatorQuad/3/3/3"
225 | )
226 | assert response.status_code == 200
227 | decoded = mapbox_vector_tile.decode(response.content)
228 | assert len(decoded["default"]["features"]) == 12
229 |
230 | # Check function with x/y/z input
231 | response = app_functions.get(
232 | "/collections/pg_temp.landsat/tiles/WebMercatorQuad/0/0/0?p=13"
233 | )
234 | assert response.status_code == 200
235 | decoded = mapbox_vector_tile.decode(response.content)
236 | assert len(decoded["default"]["features"]) == 104
237 | assert decoded["default"]["features"][0]["properties"]["grid_path"] == 13
238 |
239 | # No features with p=0
240 | response = app_functions.get(
241 | "/collections/pg_temp.landsat/tiles/WebMercatorQuad/0/0/0?p=0"
242 | )
243 | assert response.status_code == 200
244 | decoded = mapbox_vector_tile.decode(response.content)
245 | assert not decoded
246 |
247 | # default p=0 so it should return nothing
248 | response = app_functions.get(
249 | "/collections/pg_temp.landsat/tiles/WebMercatorQuad/0/0/0"
250 | )
251 | assert response.status_code == 200
252 | decoded = mapbox_vector_tile.decode(response.content)
253 | assert not decoded
254 |
--------------------------------------------------------------------------------
/tipg/__init__.py:
--------------------------------------------------------------------------------
1 | """tipg."""
2 |
3 | __version__ = "1.1.0"
4 |
--------------------------------------------------------------------------------
/tipg/database.py:
--------------------------------------------------------------------------------
1 | """tipg.db: database events."""
2 |
3 | import pathlib
4 | from importlib.resources import files as resources_files
5 | from typing import List, Optional
6 |
7 | import orjson
8 | from buildpg import asyncpg
9 |
10 | from tipg.logger import logger
11 | from tipg.settings import PostgresSettings
12 |
13 | from fastapi import FastAPI
14 |
15 | DB_CATALOG_FILE = resources_files(__package__) / "sql" / "dbcatalog.sql"
16 |
17 |
18 | class connection_factory:
19 | """Connection creation."""
20 |
21 | schemas: List[str]
22 | tipg_schema: str
23 | user_sql_files: List[pathlib.Path]
24 |
25 | def __init__(
26 | self,
27 | schemas: List[str],
28 | tipg_schema: str,
29 | user_sql_files: Optional[List[pathlib.Path]] = None,
30 | ) -> None:
31 | """Init."""
32 | self.schemas = schemas
33 | self.tipg_schema = tipg_schema
34 | self.user_sql_files = user_sql_files or []
35 |
36 | async def __call__(self, conn: asyncpg.Connection):
37 | """Create connection."""
38 | await conn.set_type_codec(
39 | "json", encoder=orjson.dumps, decoder=orjson.loads, schema="pg_catalog"
40 | )
41 | await conn.set_type_codec(
42 | "jsonb", encoder=orjson.dumps, decoder=orjson.loads, schema="pg_catalog"
43 | )
44 |
45 | # Note: we add `{tipg_schema}` as the first element of the schemas list to make sure
46 | # we register the custom functions and `dbcatalog` in it.
47 | schemas = ",".join([self.tipg_schema, *self.schemas])
48 | logger.debug(f"Looking for Tables and Functions in {schemas} schemas")
49 |
50 | await conn.execute(
51 | f"""
52 | SELECT set_config(
53 | 'search_path',
54 | '{schemas},' || current_setting('search_path', false),
55 | false
56 | );
57 | """
58 | )
59 |
60 | # Register custom SQL functions/table/views in `{tipg_schema}`
61 | for sqlfile in self.user_sql_files:
62 | await conn.execute(sqlfile.read_text())
63 |
64 | # Register TiPG functions in `{tipg_schema}`
65 | await conn.execute(
66 | DB_CATALOG_FILE.read_text().replace("pg_temp", self.tipg_schema)
67 | )
68 |
69 |
70 | async def connect_to_db(
71 | app: FastAPI,
72 | *,
73 | schemas: List[str],
74 | tipg_schema: str = "pg_temp",
75 | user_sql_files: Optional[List[pathlib.Path]] = None,
76 | settings: Optional[PostgresSettings] = None,
77 | **kwargs,
78 | ) -> None:
79 | """Connect."""
80 | con_init = connection_factory(schemas, tipg_schema, user_sql_files)
81 |
82 | if not settings:
83 | settings = PostgresSettings()
84 |
85 | app.state.pool = await asyncpg.create_pool_b(
86 | str(settings.database_url),
87 | min_size=settings.db_min_conn_size,
88 | max_size=settings.db_max_conn_size,
89 | max_queries=settings.db_max_queries,
90 | max_inactive_connection_lifetime=settings.db_max_inactive_conn_lifetime,
91 | init=con_init,
92 | **kwargs,
93 | )
94 |
95 |
96 | async def close_db_connection(app: FastAPI) -> None:
97 | """Close connection."""
98 | await app.state.pool.close()
99 |
--------------------------------------------------------------------------------
/tipg/errors.py:
--------------------------------------------------------------------------------
1 | """tipg.errors: Error classes."""
2 |
3 | import logging
4 | from typing import Callable, Dict, Type
5 |
6 | from asyncpg.exceptions._base import PostgresError
7 |
8 | from fastapi import FastAPI
9 |
10 | from starlette import status
11 | from starlette.requests import Request
12 | from starlette.responses import JSONResponse
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | class TiPgError(Exception):
18 | """Base exception class."""
19 |
20 |
21 | class NotFound(TiPgError):
22 | """Invalid table name."""
23 |
24 |
25 | class NoPrimaryKey(TiPgError):
26 | """Table has no primary key."""
27 |
28 |
29 | class MissingGeometryColumn(TiPgError):
30 | """Table has no geometry column."""
31 |
32 |
33 | class MissingDatetimeColumn(TiPgError):
34 | """Table has no datetime column."""
35 |
36 |
37 | class InvalidBBox(TiPgError):
38 | """Invalid bounding box coordinates."""
39 |
40 |
41 | class InvalidPropertyName(TiPgError):
42 | """Invalid property/column name."""
43 |
44 |
45 | class InvalidGeometryColumnName(TiPgError):
46 | """Invalid geometry column name."""
47 |
48 |
49 | class InvalidDatetimeColumnName(TiPgError):
50 | """Invalid datetime column name."""
51 |
52 |
53 | class InvalidDatetime(TiPgError):
54 | """Invalid datetime."""
55 |
56 |
57 | class InvalidLimit(TiPgError):
58 | """Invalid Limit."""
59 |
60 |
61 | class MissingFunctionParameter(TiPgError):
62 | """Missing Function Parameter."""
63 |
64 |
65 | class FunctionDirectoryDoesNotExist(TiPgError):
66 | """Function Directory Is Set But Does Not Exist."""
67 |
68 |
69 | class MissingCollectionCatalog(TiPgError):
70 | """`collection_catalog` not registered in the application state."""
71 |
72 |
73 | DEFAULT_STATUS_CODES = {
74 | NotFound: status.HTTP_404_NOT_FOUND,
75 | InvalidBBox: status.HTTP_422_UNPROCESSABLE_ENTITY,
76 | InvalidDatetime: status.HTTP_422_UNPROCESSABLE_ENTITY,
77 | InvalidLimit: status.HTTP_422_UNPROCESSABLE_ENTITY,
78 | MissingGeometryColumn: status.HTTP_500_INTERNAL_SERVER_ERROR,
79 | MissingDatetimeColumn: status.HTTP_500_INTERNAL_SERVER_ERROR,
80 | InvalidPropertyName: status.HTTP_404_NOT_FOUND,
81 | InvalidGeometryColumnName: status.HTTP_404_NOT_FOUND,
82 | InvalidDatetimeColumnName: status.HTTP_404_NOT_FOUND,
83 | PostgresError: status.HTTP_500_INTERNAL_SERVER_ERROR,
84 | Exception: status.HTTP_500_INTERNAL_SERVER_ERROR,
85 | NoPrimaryKey: status.HTTP_422_UNPROCESSABLE_ENTITY,
86 | MissingFunctionParameter: status.HTTP_422_UNPROCESSABLE_ENTITY,
87 | FunctionDirectoryDoesNotExist: status.HTTP_500_INTERNAL_SERVER_ERROR,
88 | MissingCollectionCatalog: status.HTTP_500_INTERNAL_SERVER_ERROR,
89 | }
90 |
91 |
92 | def exception_handler_factory(status_code: int) -> Callable:
93 | """
94 | Create a FastAPI exception handler from a status code.
95 | """
96 |
97 | def handler(request: Request, exc: Exception):
98 | logger.error(exc, exc_info=True)
99 | return JSONResponse(content={"detail": str(exc)}, status_code=status_code)
100 |
101 | return handler
102 |
103 |
104 | def add_exception_handlers(
105 | app: FastAPI, status_codes: Dict[Type[Exception], int]
106 | ) -> None:
107 | """
108 | Add exception handlers to the FastAPI app.
109 | """
110 | for exc, code in status_codes.items():
111 | app.add_exception_handler(exc, exception_handler_factory(code))
112 |
--------------------------------------------------------------------------------
/tipg/filter/__init__.py:
--------------------------------------------------------------------------------
1 | """tipg.filter"""
2 |
--------------------------------------------------------------------------------
/tipg/filter/evaluate.py:
--------------------------------------------------------------------------------
1 | """tipg.filter.evaluate."""
2 |
3 | from datetime import date, datetime, time, timedelta
4 |
5 | from pygeofilter import ast, values
6 | from pygeofilter.backends.evaluator import Evaluator, handle
7 |
8 | from tipg.filter import filters
9 |
10 | LITERALS = (str, float, int, bool, datetime, date, time, timedelta)
11 |
12 |
13 | class BuildPGEvaluator(Evaluator): # noqa: D101
14 | def __init__(self, field_mapping): # noqa: D107
15 | self.field_mapping = field_mapping
16 |
17 | @handle(ast.Not)
18 | def not_(self, node, sub): # noqa: D102
19 | return filters.negate(sub)
20 |
21 | @handle(ast.And, ast.Or)
22 | def combination(self, node, lhs, rhs): # noqa: D102
23 | return filters.combine((lhs, rhs), node.op.value)
24 |
25 | @handle(ast.Comparison, subclasses=True)
26 | def comparison(self, node, lhs, rhs): # noqa: D102
27 | return filters.runop(
28 | lhs,
29 | rhs,
30 | node.op.value,
31 | )
32 |
33 | @handle(ast.Between)
34 | def between(self, node, lhs, low, high): # noqa: D102
35 | return filters.between(lhs, low, high, node.not_)
36 |
37 | @handle(ast.Like)
38 | def like(self, node, lhs): # noqa: D102
39 | return filters.like(
40 | lhs,
41 | node.pattern,
42 | not node.nocase,
43 | node.not_,
44 | )
45 |
46 | @handle(ast.In)
47 | def in_(self, node, lhs, *options): # noqa: D102
48 | return filters.runop(
49 | lhs,
50 | options,
51 | "in",
52 | node.not_,
53 | )
54 |
55 | @handle(ast.IsNull)
56 | def null(self, node, lhs): # noqa: D102
57 | if isinstance(lhs, list):
58 | lhs = filters.attribute(lhs[0].name, self.field_mapping)
59 | return filters.isnull(lhs)
60 |
61 | # @handle(ast.ExistsPredicateNode)
62 | # def exists(self, node, lhs):
63 | # if self.use_getattr:
64 | # result = hasattr(self.obj, node.lhs.name)
65 | # else:
66 | # result = lhs in self.obj
67 |
68 | # if node.not_:
69 | # result = not result
70 | # return result
71 |
72 | @handle(ast.TemporalPredicate, subclasses=True)
73 | def temporal(self, node, lhs, rhs): # noqa: D102
74 | return filters.temporal(
75 | lhs,
76 | rhs,
77 | node.op.value,
78 | )
79 |
80 | @handle(ast.SpatialComparisonPredicate, subclasses=True)
81 | def spatial_operation(self, node, lhs, rhs): # noqa: D102
82 | return filters.spatial(
83 | lhs,
84 | rhs,
85 | node.op.name,
86 | )
87 |
88 | @handle(ast.Relate)
89 | def spatial_pattern(self, node, lhs, rhs): # noqa: D102
90 | return filters.spatial(
91 | lhs,
92 | rhs,
93 | "RELATE",
94 | pattern=node.pattern,
95 | )
96 |
97 | @handle(ast.SpatialDistancePredicate, subclasses=True)
98 | def spatial_distance(self, node, lhs, rhs): # noqa: D102
99 | return filters.spatial(
100 | lhs,
101 | rhs,
102 | node.op.value,
103 | distance=node.distance,
104 | units=node.units,
105 | )
106 |
107 | @handle(ast.BBox)
108 | def bbox(self, node, lhs): # noqa: D102
109 | return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs)
110 |
111 | @handle(ast.Attribute)
112 | def attribute(self, node): # noqa: D102
113 | return filters.attribute(node.name, self.field_mapping)
114 |
115 | @handle(ast.Arithmetic, subclasses=True)
116 | def arithmetic(self, node, lhs, rhs): # noqa: D102
117 | return filters.runop(lhs, rhs, node.op.value)
118 |
119 | @handle(ast.Function)
120 | def function(self, node, *arguments): # noqa: D102
121 | return filters.func(node.name, *arguments)
122 |
123 | @handle(*values.LITERALS)
124 | def literal(self, node): # noqa: D102
125 | return filters.literal(node)
126 |
127 | @handle(values.Interval)
128 | def interval(self, node, start, end): # noqa: D102
129 | return filters.literal((start, end))
130 |
131 | @handle(values.Geometry)
132 | def geometry(self, node): # noqa: D102
133 | return filters.parse_geometry(node.__geo_interface__)
134 |
135 | @handle(values.Envelope)
136 | def envelope(self, node): # noqa: D102
137 | return filters.parse_bbox([node.x1, node.y1, node.x2, node.y2])
138 |
139 |
140 | def to_filter(ast, field_mapping=None): # noqa: D102
141 | """Helper function to translate ECQL AST to Django Query expressions.
142 |
143 | :param ast: the abstract syntax tree
144 | :param field_mapping: a dict mapping from the filter name to the Django field lookup.
145 | :param mapping_choices: a dict mapping field lookups to choices.
146 | :type ast: :class:`Node`
147 | :returns: a Django query object
148 | :rtype: :class:`django.db.models.Q`
149 |
150 | """
151 | return BuildPGEvaluator(field_mapping).evaluate(ast)
152 |
--------------------------------------------------------------------------------
/tipg/filter/filters.py:
--------------------------------------------------------------------------------
1 | """tipg.filter.filters"""
2 |
3 | import re
4 | from datetime import timedelta
5 | from functools import reduce
6 | from inspect import signature
7 | from typing import Any, Callable, Dict, List
8 |
9 | from buildpg import V
10 | from buildpg.funcs import AND as and_
11 | from buildpg.funcs import NOT as not_
12 | from buildpg.funcs import OR as or_
13 | from buildpg.funcs import any
14 | from buildpg.logic import Func
15 | from geojson_pydantic.geometries import Polygon, parse_geometry_obj
16 |
17 |
18 | def bbox_to_wkt(bbox: List[float], srid: int = 4326) -> str:
19 | """Return WKT representation of a BBOX."""
20 | poly = Polygon.from_bounds(*bbox) # type:ignore
21 | return f"SRID={srid};{poly.wkt}"
22 |
23 |
24 | def parse_geometry(geom: Dict[str, Any]) -> str:
25 | """Parse geometry object and return WKT."""
26 | wkt = parse_geometry_obj(geom).wkt # type:ignore
27 | sridtxt = "" if wkt.startswith("SRID=") else "SRID=4326;"
28 | return f"{sridtxt}{wkt}"
29 |
30 |
31 | # ------------------------------------------------------------------------------
32 | # Filters
33 | # ------------------------------------------------------------------------------
34 | class Operator:
35 | """Filter Operators."""
36 |
37 | OPERATORS: Dict[str, Callable] = {
38 | "==": lambda f, a: f == a,
39 | "=": lambda f, a: f == a,
40 | "eq": lambda f, a: f == a,
41 | "!=": lambda f, a: f != a,
42 | "<>": lambda f, a: f != a,
43 | "ne": lambda f, a: f != a,
44 | ">": lambda f, a: f > a,
45 | "gt": lambda f, a: f > a,
46 | "<": lambda f, a: f < a,
47 | "lt": lambda f, a: f < a,
48 | ">=": lambda f, a: f >= a,
49 | "ge": lambda f, a: f >= a,
50 | "<=": lambda f, a: f <= a,
51 | "le": lambda f, a: f <= a,
52 | "like": lambda f, a: f.like(a),
53 | "ilike": lambda f, a: f.ilike(a),
54 | "not_ilike": lambda f, a: ~f.ilike(a),
55 | "in": lambda f, a: f == any(a),
56 | "not_in": lambda f, a: ~f == any(a),
57 | "any": lambda f, a: f.any(a),
58 | "not_any": lambda f, a: f.not_(f.any(a)),
59 | "INTERSECTS": lambda f, a: Func(
60 | "st_intersects",
61 | f,
62 | Func("st_transform", a, Func("st_srid", f)),
63 | ),
64 | "DISJOINT": lambda f, a: Func(
65 | "st_disjoint", f, Func("st_transform", a, Func("st_srid", f))
66 | ),
67 | "CONTAINS": lambda f, a: Func(
68 | "st_contains", f, Func("st_transform", a, Func("st_srid", f))
69 | ),
70 | "WITHIN": lambda f, a: Func(
71 | "st_within", f, Func("st_transform", a, Func("st_srid", f))
72 | ),
73 | "TOUCHES": lambda f, a: Func(
74 | "st_touches", f, Func("st_transform", a, Func("st_srid", f))
75 | ),
76 | "CROSSES": lambda f, a: Func(
77 | "st_crosses",
78 | f,
79 | Func("st_transform", a, Func("st_srid", f)),
80 | ),
81 | "OVERLAPS": lambda f, a: Func(
82 | "st_overlaps",
83 | f,
84 | Func("st_transform", a, Func("st_srid", f)),
85 | ),
86 | "EQUALS": lambda f, a: Func(
87 | "st_equals",
88 | f,
89 | Func("st_transform", a, Func("st_srid", f)),
90 | ),
91 | "RELATE": lambda f, a, pattern: Func(
92 | "st_relate", f, Func("st_transform", a, Func("st_srid", f)), pattern
93 | ),
94 | "DWITHIN": lambda f, a, distance: Func(
95 | "st_dwithin", f, Func("st_transform", a, Func("st_srid", f)), distance
96 | ),
97 | "BEYOND": lambda f, a, distance: ~Func(
98 | "st_dwithin", f, Func("st_transform", a, Func("st_srid", f)), distance
99 | ),
100 | "+": lambda f, a: f + a,
101 | "-": lambda f, a: f - a,
102 | "*": lambda f, a: f * a,
103 | "/": lambda f, a: f / a,
104 | }
105 |
106 | def __init__(self, operator: str = None):
107 | """Init."""
108 | if not operator:
109 | operator = "=="
110 |
111 | if operator not in self.OPERATORS:
112 | raise Exception("Operator `{}` not valid.".format(operator))
113 |
114 | self.operator = operator
115 | self.function = self.OPERATORS[operator]
116 | self.arity = len(signature(self.function).parameters)
117 |
118 |
119 | def func(name, *args):
120 | """Return results of running SQL function with arguments."""
121 | return Func(name, *args)
122 |
123 |
124 | def combine(sub_filters, combinator: str = "AND"):
125 | """Combine filters using a logical combinator
126 |
127 | :param sub_filters: the filters to combine
128 | :param combinator: a string: "AND" / "OR"
129 | :return: the combined filter
130 |
131 | """
132 | assert combinator in ("AND", "OR")
133 | _op = and_ if combinator == "AND" else or_
134 |
135 | def test(acc, q):
136 | return _op(acc, q)
137 |
138 | return reduce(test, sub_filters)
139 |
140 |
141 | def negate(sub_filter):
142 | """Negate a filter, opposing its meaning.
143 |
144 | :param sub_filter: the filter to negate
145 | :return: the negated filter
146 |
147 | """
148 | return not_(sub_filter)
149 |
150 |
151 | def runop(lhs, rhs=None, op: str = "=", negate: bool = False):
152 | """Compare a filter with an expression using a comparison operation.
153 |
154 | :param lhs: the field to compare
155 | :param rhs: the filter expression
156 | :param op: a string denoting the operation.
157 | :return: a comparison expression object
158 |
159 | """
160 | _op = Operator(op)
161 |
162 | if negate:
163 | return not_(_op.function(lhs, rhs))
164 | return _op.function(lhs, rhs)
165 |
166 |
167 | def between(lhs, low, high, negate=False):
168 | """Create a filter to match elements that have a value within a certain range.
169 |
170 | :param lhs: the field to compare
171 | :param low: the lower value of the range
172 | :param high: the upper value of the range
173 | :param not_: whether the range shall be inclusive (the default) or exclusive
174 | :return: a comparison expression object
175 |
176 | """
177 | l_op = Operator("<=")
178 | g_op = Operator(">=")
179 | if negate:
180 | return not_(and_(g_op.function(lhs, low), l_op.function(lhs, high)))
181 |
182 | return and_(g_op.function(lhs, low), l_op.function(lhs, high))
183 |
184 |
185 | def like(lhs, rhs, case=False, negate=False):
186 | """Create a filter to filter elements according to a string attribute using wildcard expressions.
187 |
188 | :param lhs: the field to compare
189 | :param rhs: the wildcard pattern: a string containing any number of '%' characters as wildcards.
190 | :param case: whether the lookup shall be done case sensitively or not
191 | :param not_: whether the range shall be inclusive (the default) or exclusive
192 | :return: a comparison expression object
193 |
194 | """
195 | if case:
196 | _op = Operator("like")
197 | else:
198 | _op = Operator("ilike")
199 |
200 | if negate:
201 | return not_(_op.function(lhs, rhs))
202 |
203 | return _op.function(lhs, rhs)
204 |
205 |
206 | def temporal(lhs, time_or_period, op):
207 | """Create a temporal filter for the given temporal attribute.
208 |
209 | :param lhs: the field to compare
210 | :type lhs: :class:`django.db.models.F`
211 | :param time_or_period: the time instant or time span to use as a filter
212 | :type time_or_period: :class:`datetime.datetime` or a tuple of two datetimes or a tuple of one datetime and one :class:`datetime.timedelta`
213 | :param op: the comparison operation. one of ``"BEFORE"``, ``"BEFORE OR DURING"``, ``"DURING"``, ``"DURING OR AFTER"``, ``"AFTER"``.
214 | :type op: str
215 | :return: a comparison expression object
216 | :rtype: :class:`django.db.models.Q`
217 |
218 | """
219 | low = None
220 | high = None
221 | equal = None
222 | if op in ("BEFORE", "AFTER"):
223 | if op == "BEFORE":
224 | high = time_or_period
225 | else:
226 | low = time_or_period
227 | elif op == "TEQUALS":
228 | equal = time_or_period
229 | else:
230 | low, high = time_or_period
231 |
232 | if isinstance(low, timedelta):
233 | low = high - low
234 | if isinstance(high, timedelta):
235 | high = low + high
236 | if low is not None or high is not None:
237 | if low is not None and high is not None:
238 | return between(lhs, low, high)
239 | elif low is not None:
240 | return runop(lhs, low, ">=")
241 | else:
242 | return runop(lhs, high, "<=")
243 | elif equal is not None:
244 | return runop(lhs, equal, "==")
245 |
246 |
247 | UNITS_LOOKUP = {"kilometers": "km", "meters": "m"}
248 |
249 |
250 | def spatial(lhs, rhs, op, pattern=None, distance=None, units=None):
251 | """Create a spatial filter for the given spatial attribute.
252 |
253 | :param lhs: the field to compare
254 | :param rhs: the time instant or time span to use as a filter
255 | :param op: the comparison operation. one of ``"INTERSECTS"``, ``"DISJOINT"``, `"CONTAINS"``, ``"WITHIN"``, ``"TOUCHES"``, ``"CROSSES"``, ``"OVERLAPS"``, ``"EQUALS"``, ``"RELATE"``, ``"DWITHIN"``, ``"BEYOND"``
256 | :param pattern: the spatial relation pattern
257 | :param distance: the distance value for distance based lookups: ``"DWITHIN"`` and ``"BEYOND"``
258 | :param units: the units the distance is expressed in
259 | :return: a comparison expression object
260 |
261 | """
262 |
263 | _op = Operator(op)
264 | if op == "RELATE":
265 | return _op.function(lhs, rhs, pattern)
266 | elif op in ("DWITHIN", "BEYOND"):
267 | if units == "kilometers":
268 | distance = distance / 1000
269 | elif units == "miles":
270 | distance = distance / 1609
271 | return _op.function(lhs, rhs, distance)
272 | else:
273 | return _op.function(lhs, rhs)
274 |
275 |
276 | def bbox(lhs, minx, miny, maxx, maxy, crs: int = 4326):
277 | """Create a bounding box filter for the given spatial attribute.
278 |
279 | :param lhs: the field to compare
280 | :param minx: the lower x part of the bbox
281 | :param miny: the lower y part of the bbox
282 | :param maxx: the upper x part of the bbox
283 | :param maxy: the upper y part of the bbox
284 | :param crs: the CRS the bbox is expressed in
285 | :return: a comparison expression object
286 |
287 | """
288 |
289 | return Func("st_intersects", lhs, bbox_to_wkt([minx, miny, maxx, maxy], crs))
290 |
291 |
292 | def quote_ident(s: str) -> str:
293 | """quote."""
294 | if re.match(r"^[a-z]+$", s):
295 | return s
296 | if re.match(r"^[a-zA-Z][\w\d_]*$", s):
297 | return f'"{s}"'
298 | raise TypeError(f"{s} is not a valid identifier")
299 |
300 |
301 | def attribute(name: str, fields: List[str]):
302 | """Create an attribute lookup expression using a field mapping dictionary.
303 |
304 | :param name: the field filter name
305 | :param field_mapping: the dictionary to use as a lookup.
306 |
307 | """
308 | if name in fields:
309 | return V(name)
310 | elif name.lower() == "true":
311 | return True
312 | elif name.lower() == "false":
313 | return False
314 | else:
315 | raise TypeError(f"Field {name} not in table.")
316 |
317 |
318 | def isnull(lhs):
319 | """null value."""
320 | return lhs.is_(V("NULL"))
321 |
322 |
323 | def literal(value):
324 | """literal value."""
325 | return value
326 |
--------------------------------------------------------------------------------
/tipg/logger.py:
--------------------------------------------------------------------------------
1 | """tipg logger."""
2 |
3 | import logging
4 |
5 | logger = logging.getLogger("tipg")
6 |
--------------------------------------------------------------------------------
/tipg/main.py:
--------------------------------------------------------------------------------
1 | """tipg app."""
2 |
3 | from contextlib import asynccontextmanager
4 | from typing import Any, List
5 |
6 | import jinja2
7 |
8 | from tipg import __version__ as tipg_version
9 | from tipg.collections import register_collection_catalog
10 | from tipg.database import close_db_connection, connect_to_db
11 | from tipg.errors import DEFAULT_STATUS_CODES, add_exception_handlers
12 | from tipg.factory import Endpoints
13 | from tipg.middleware import CacheControlMiddleware, CatalogUpdateMiddleware
14 | from tipg.settings import APISettings, CustomSQLSettings, DatabaseSettings
15 |
16 | from fastapi import FastAPI, Request
17 |
18 | from starlette.middleware.cors import CORSMiddleware
19 | from starlette.templating import Jinja2Templates
20 | from starlette_cramjam.middleware import CompressionMiddleware
21 |
22 | settings = APISettings()
23 | db_settings = DatabaseSettings()
24 | custom_sql_settings = CustomSQLSettings()
25 |
26 |
27 | @asynccontextmanager
28 | async def lifespan(app: FastAPI):
29 | """FastAPI Lifespan."""
30 | # Create Connection Pool
31 | await connect_to_db(
32 | app,
33 | schemas=db_settings.schemas,
34 | tipg_schema=db_settings.tipg_schema,
35 | user_sql_files=custom_sql_settings.sql_files,
36 | )
37 |
38 | # Register Collection Catalog
39 | await register_collection_catalog(app, db_settings=db_settings)
40 |
41 | yield
42 |
43 | # Close the Connection Pool
44 | await close_db_connection(app)
45 |
46 |
47 | app = FastAPI(
48 | title=settings.name,
49 | version=tipg_version,
50 | openapi_url="/api",
51 | docs_url="/api.html",
52 | lifespan=lifespan,
53 | root_path=settings.root_path,
54 | )
55 |
56 | # custom template directory
57 | templates_location: List[Any] = (
58 | [jinja2.FileSystemLoader(settings.template_directory)]
59 | if settings.template_directory
60 | else []
61 | )
62 | # default template directory
63 | templates_location.append(jinja2.PackageLoader(__package__, "templates"))
64 |
65 | jinja2_env = jinja2.Environment(loader=jinja2.ChoiceLoader(templates_location))
66 | templates = Jinja2Templates(env=jinja2_env)
67 |
68 | ogc_api = Endpoints(
69 | title=settings.name,
70 | templates=templates,
71 | with_tiles_viewer=settings.add_tiles_viewer,
72 | )
73 | app.include_router(ogc_api.router)
74 |
75 | # Set all CORS enabled origins
76 | if settings.cors_origins:
77 | app.add_middleware(
78 | CORSMiddleware,
79 | allow_origins=settings.cors_origins,
80 | allow_credentials=True,
81 | allow_methods=["GET"],
82 | allow_headers=["*"],
83 | )
84 |
85 | app.add_middleware(CacheControlMiddleware, cachecontrol=settings.cachecontrol)
86 | app.add_middleware(CompressionMiddleware, compression_level=6)
87 |
88 | if settings.catalog_ttl:
89 | app.add_middleware(
90 | CatalogUpdateMiddleware,
91 | func=register_collection_catalog,
92 | ttl=settings.catalog_ttl,
93 | db_settings=db_settings,
94 | )
95 |
96 | add_exception_handlers(app, DEFAULT_STATUS_CODES)
97 |
98 |
99 | @app.get(
100 | "/healthz",
101 | description="Health Check.",
102 | summary="Health Check.",
103 | operation_id="healthCheck",
104 | tags=["Health Check"],
105 | )
106 | def ping():
107 | """Health check."""
108 | return {"ping": "pong!"}
109 |
110 |
111 | if settings.debug:
112 |
113 | @app.get("/rawcatalog", tags=["debug"])
114 | async def raw_catalog(request: Request):
115 | """Return parsed catalog data for testing."""
116 | return request.app.state.collection_catalog
117 |
118 | @app.get("/refresh", tags=["debug"])
119 | async def refresh(request: Request):
120 | """Return parsed catalog data for testing."""
121 | await register_collection_catalog(
122 | request.app,
123 | db_settings=db_settings,
124 | )
125 | return request.app.state.collection_catalog
126 |
--------------------------------------------------------------------------------
/tipg/middleware.py:
--------------------------------------------------------------------------------
1 | """tipg middlewares."""
2 |
3 | import re
4 | from datetime import datetime, timedelta
5 | from typing import Any, Optional, Protocol, Set
6 |
7 | from tipg.collections import Catalog
8 | from tipg.errors import MissingCollectionCatalog
9 | from tipg.logger import logger
10 |
11 | from starlette.background import BackgroundTask
12 | from starlette.datastructures import MutableHeaders
13 | from starlette.requests import Request
14 | from starlette.types import ASGIApp, Message, Receive, Scope, Send
15 |
16 |
17 | class CacheControlMiddleware:
18 | """MiddleWare to add CacheControl in response headers."""
19 |
20 | def __init__(
21 | self,
22 | app: ASGIApp,
23 | cachecontrol: Optional[str] = None,
24 | cachecontrol_max_http_code: Optional[int] = 500,
25 | exclude_path: Optional[Set[str]] = None,
26 | ) -> None:
27 | """Init Middleware.
28 |
29 | Args:
30 | app (ASGIApp): starlette/FastAPI application.
31 | cachecontrol (str): Cache-Control string to add to the response.
32 | exclude_path (set): Set of regex expression to use to filter the path.
33 |
34 | """
35 | self.app = app
36 | self.cachecontrol = cachecontrol
37 | self.cachecontrol_max_http_code = cachecontrol_max_http_code
38 | self.exclude_path = exclude_path or set()
39 |
40 | async def __call__(self, scope: Scope, receive: Receive, send: Send):
41 | """Handle call."""
42 | if scope["type"] != "http":
43 | await self.app(scope, receive, send)
44 | return
45 |
46 | async def send_wrapper(message: Message):
47 | """Send Message."""
48 | if message["type"] == "http.response.start":
49 | response_headers = MutableHeaders(scope=message)
50 | if self.cachecontrol and not response_headers.get("Cache-Control"):
51 | if (
52 | scope["method"] in ["HEAD", "GET"]
53 | and message["status"] < self.cachecontrol_max_http_code
54 | and not any(
55 | re.match(path, scope["path"]) for path in self.exclude_path
56 | )
57 | ):
58 | response_headers["Cache-Control"] = self.cachecontrol
59 |
60 | await send(message)
61 |
62 | await self.app(scope, receive, send_wrapper)
63 |
64 |
65 | class CatalogUpdateFunc(Protocol):
66 | """Catalog update function protocol."""
67 |
68 | def __call__(self, app: ASGIApp, **kwargs: Any) -> None:
69 | """define input/output for the function."""
70 | ...
71 |
72 |
73 | class CatalogUpdateMiddleware:
74 | """Middleware to update the catalog cache."""
75 |
76 | def __init__(
77 | self,
78 | app: ASGIApp,
79 | *,
80 | func: CatalogUpdateFunc,
81 | ttl: int = 300,
82 | **kwargs: Any,
83 | ) -> None:
84 | """Init Middleware."""
85 | self.app = app
86 | self.func = func
87 | self.ttl = ttl
88 | self.kwargs = kwargs
89 |
90 | async def __call__(self, scope: Scope, receive: Receive, send: Send):
91 | """Handle call."""
92 | if scope["type"] != "http":
93 | await self.app(scope, receive, send)
94 | return
95 |
96 | request = Request(scope)
97 | background: Optional[BackgroundTask] = None
98 |
99 | catalog: Catalog = getattr(request.app.state, "collection_catalog", None)
100 | if not catalog:
101 | raise MissingCollectionCatalog("Could not find collections catalog.")
102 |
103 | last_updated = catalog["last_updated"]
104 | if not last_updated or datetime.now() > (
105 | last_updated + timedelta(seconds=self.ttl)
106 | ):
107 | logger.debug(
108 | f"Running catalog refresh in background. Last Updated: {last_updated}"
109 | )
110 | background = BackgroundTask(
111 | self.func,
112 | request.app,
113 | **self.kwargs,
114 | )
115 |
116 | await self.app(scope, receive, send)
117 | if background:
118 | await background()
119 |
--------------------------------------------------------------------------------
/tipg/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/tipg/py.typed
--------------------------------------------------------------------------------
/tipg/resources/__init__.py:
--------------------------------------------------------------------------------
1 | """tipg.resources"""
2 |
--------------------------------------------------------------------------------
/tipg/resources/enums.py:
--------------------------------------------------------------------------------
1 | """tipg enums."""
2 |
3 | from enum import Enum
4 |
5 |
6 | class MediaType(str, Enum):
7 | """Responses Media types formerly known as MIME types."""
8 |
9 | xml = "application/xml"
10 | json = "application/json"
11 | ndjson = "application/ndjson"
12 | geojson = "application/geo+json"
13 | geojsonseq = "application/geo+json-seq"
14 | schemajson = "application/schema+json"
15 | html = "text/html"
16 | text = "text/plain"
17 | csv = "text/csv"
18 | openapi30_json = "application/vnd.oai.openapi+json;version=3.0"
19 | openapi30_yaml = "application/vnd.oai.openapi;version=3.0"
20 | pbf = "application/x-protobuf"
21 | mvt = "application/vnd.mapbox-vector-tile"
22 |
--------------------------------------------------------------------------------
/tipg/resources/response.py:
--------------------------------------------------------------------------------
1 | """tipg custom responses."""
2 |
3 | import decimal
4 | from typing import Any
5 |
6 | import orjson
7 | from asyncpg.pgproto import pgproto
8 |
9 | from fastapi.responses import JSONResponse
10 |
11 |
12 | def default(obj):
13 | """Instruct orjson what to do with types it does not natively serialize"""
14 | if isinstance(obj, decimal.Decimal):
15 | return str(obj)
16 | elif isinstance(obj, pgproto.UUID):
17 | return str(obj)
18 |
19 |
20 | def orjsonDumps(content: Any):
21 | """Small wrapper function to run the orjson.dumps with the additional options we want"""
22 | return orjson.dumps(
23 | content,
24 | default=default,
25 | option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY,
26 | )
27 |
28 |
29 | class ORJSONResponse(JSONResponse):
30 | """Custom response handler for using orjson"""
31 |
32 | def render(self, content: Any) -> bytes:
33 | """Render the content into a JSON response using orjson"""
34 | return orjsonDumps(content)
35 |
36 |
37 | class GeoJSONResponse(ORJSONResponse):
38 | """GeoJSON Response"""
39 |
40 | media_type = "application/geo+json"
41 |
42 |
43 | class SchemaJSONResponse(ORJSONResponse):
44 | """Schema Response"""
45 |
46 | media_type = "application/schema+json"
47 |
--------------------------------------------------------------------------------
/tipg/settings.py:
--------------------------------------------------------------------------------
1 | """tipg config."""
2 |
3 | import json
4 | import pathlib
5 | from typing import Any, Dict, List, Optional
6 |
7 | from pydantic import (
8 | BaseModel,
9 | DirectoryPath,
10 | Field,
11 | PostgresDsn,
12 | ValidationInfo,
13 | field_validator,
14 | model_validator,
15 | )
16 | from pydantic_settings import BaseSettings
17 |
18 |
19 | class APISettings(BaseSettings):
20 | """API settings"""
21 |
22 | name: str = "TiPg: OGC Features and Tiles API"
23 | debug: bool = False
24 | cors_origins: str = "*"
25 | cachecontrol: str = "public, max-age=3600"
26 | template_directory: Optional[str] = None
27 | root_path: str = ""
28 |
29 | add_tiles_viewer: bool = True
30 |
31 | catalog_ttl: int = 300
32 |
33 | model_config = {"env_prefix": "TIPG_", "env_file": ".env", "extra": "ignore"}
34 |
35 | @field_validator("cors_origins")
36 | def parse_cors_origin(cls, v):
37 | """Parse CORS origins."""
38 | return [origin.strip() for origin in v.split(",")]
39 |
40 |
41 | class TableConfig(BaseModel):
42 | """Configuration to add table options with env variables."""
43 |
44 | geomcol: Optional[str] = None
45 | datetimecol: Optional[str] = None
46 | pk: Optional[str] = None
47 | properties: Optional[List[str]] = None
48 |
49 | model_config = {"extra": "ignore"}
50 |
51 | @field_validator("properties", mode="before")
52 | def _properties(cls, v: Any) -> Any:
53 | """set geometry from geo interface or input"""
54 | if isinstance(v, str):
55 | return json.loads(v)
56 | else:
57 | return v
58 |
59 |
60 | class TableSettings(BaseSettings):
61 | """Table configuration settings"""
62 |
63 | fallback_key_names: List[str] = ["ogc_fid", "id", "pkey", "gid"]
64 | table_config: Dict[str, TableConfig] = {}
65 | sort_columns: bool = True
66 |
67 | model_config = {
68 | "env_prefix": "TIPG_",
69 | "env_file": ".env",
70 | "env_nested_delimiter": "__",
71 | "extra": "ignore",
72 | }
73 |
74 |
75 | class TMSSettings(BaseSettings):
76 | """TiPG TMS settings"""
77 |
78 | default_tms: str = "WebMercatorQuad"
79 | default_minzoom: int = 0
80 | default_maxzoom: int = 22
81 |
82 | model_config = {"env_prefix": "TIPG_", "env_file": ".env", "extra": "ignore"}
83 |
84 |
85 | class FeaturesSettings(BaseSettings):
86 | """TiPG Items settings"""
87 |
88 | default_features_limit: int = Field(10, ge=0)
89 | max_features_per_query: int = Field(10000, ge=0)
90 |
91 | model_config = {"env_prefix": "TIPG_", "env_file": ".env", "extra": "ignore"}
92 |
93 | @model_validator(mode="after")
94 | def max_default(self):
95 | """Set default bounds and srid when this is a function."""
96 | if self.default_features_limit > self.max_features_per_query:
97 | raise ValueError(
98 | f"Invalid combination of `limit` ({self.default_features_limit}) and `max features per query` ({self.max_features_per_query}) values"
99 | )
100 |
101 | return self
102 |
103 |
104 | class MVTSettings(BaseSettings):
105 | """TiPG MVT settings"""
106 |
107 | tile_resolution: int = 4096
108 | tile_buffer: int = 256
109 | tile_clip: bool = True
110 | max_features_per_tile: int = 10000
111 |
112 | set_mvt_layername: Optional[bool] = None
113 |
114 | model_config = {"env_prefix": "TIPG_", "env_file": ".env", "extra": "ignore"}
115 |
116 |
117 | class PostgresSettings(BaseSettings):
118 | """Postgres connection settings.
119 |
120 | Attributes:
121 | postgres_user: postgres username.
122 | postgres_pass: postgres password.
123 | postgres_host: hostname for the connection.
124 | postgres_port: database port.
125 | postgres_dbname: database name.
126 |
127 | """
128 |
129 | postgres_user: Optional[str] = None
130 | postgres_pass: Optional[str] = None
131 | postgres_host: Optional[str] = None
132 | postgres_port: Optional[int] = None
133 | postgres_dbname: Optional[str] = None
134 |
135 | database_url: Optional[PostgresDsn] = None
136 |
137 | db_min_conn_size: int = 1
138 | db_max_conn_size: int = 10
139 | db_max_queries: int = 50000
140 | db_max_inactive_conn_lifetime: float = 300
141 |
142 | model_config = {"env_file": ".env", "extra": "ignore"}
143 |
144 | # https://github.com/tiangolo/full-stack-fastapi-postgresql/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/backend/app/app/core/config.py#L42
145 | @field_validator("database_url", mode="before")
146 | def assemble_db_connection(
147 | cls, v: Optional[str], info: ValidationInfo
148 | ) -> PostgresDsn:
149 | """Validate db url settings."""
150 | if isinstance(v, str):
151 | return PostgresDsn(v)
152 |
153 | return PostgresDsn.build(
154 | scheme="postgresql",
155 | username=info.data.get("postgres_user"),
156 | password=info.data.get("postgres_pass"),
157 | host=info.data.get("postgres_host", ""),
158 | port=info.data.get("postgres_port", 5432),
159 | path=info.data.get("postgres_dbname", ""),
160 | )
161 |
162 |
163 | class DatabaseSettings(BaseSettings):
164 | """TiPg Database settings."""
165 |
166 | schemas: List[str] = ["public"]
167 | tipg_schema: str = Field("pg_temp", alias="application_schema")
168 | tables: Optional[List[str]] = None
169 | exclude_tables: Optional[List[str]] = None
170 | exclude_table_schemas: Optional[List[str]] = None
171 | functions: Optional[List[str]] = None
172 | exclude_functions: Optional[List[str]] = None
173 | exclude_function_schemas: Optional[List[str]] = None
174 | datetime_extent: bool = True
175 | spatial_extent: bool = True
176 |
177 | only_spatial_tables: bool = True
178 |
179 | model_config = {"env_prefix": "TIPG_DB_", "env_file": ".env", "extra": "ignore"}
180 |
181 |
182 | class CustomSQLSettings(BaseSettings):
183 | """TiPg Custom SQL settings."""
184 |
185 | custom_sql_directory: Optional[DirectoryPath] = None
186 |
187 | model_config = {"env_prefix": "TIPG_", "env_file": ".env", "extra": "ignore"}
188 |
189 | @property
190 | def sql_files(self) -> Optional[List[pathlib.Path]]:
191 | """return a list of SQL files within the custom sql directory."""
192 | if self.custom_sql_directory:
193 | return list(self.custom_sql_directory.glob("*.sql"))
194 |
195 | return None
196 |
--------------------------------------------------------------------------------
/tipg/sql/dbcatalog.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION pg_temp.typ(t text) RETURNS text AS $$
2 | SELECT replace(replace(replace(replace(t,'character varying','text'),'double precision','float8'),'timestamp with time zone','timestamptz'),'timestamp without time zone','timestamp');
3 | $$ LANGUAGE SQL IMMUTABLE STRICT;
4 |
5 | CREATE OR REPLACE FUNCTION pg_temp.nspname(n oid) RETURNS text AS $$
6 | SELECT CASE WHEN n=pg_my_temp_schema() THEN 'pg_temp' ELSE nspname::text END
7 | FROM pg_namespace WHERE oid=n;
8 | $$ LANGUAGE SQL STABLE;
9 |
10 | CREATE OR REPLACE FUNCTION pg_temp.nspname(n regnamespace) RETURNS text AS $$
11 | SELECT pg_temp.nspname(n::oid);
12 | $$ LANGUAGE SQL STABLE;
13 |
14 | CREATE OR REPLACE FUNCTION pg_temp.tipg_pk(
15 | table_oid oid
16 | ) RETURNS text AS $$
17 | SELECT attname::text
18 | FROM
19 | pg_attribute a
20 | LEFT JOIN
21 | pg_index i
22 | ON (a.attrelid=i.indrelid AND a.attnum = ANY(i.indkey))
23 | WHERE
24 | a.attrelid = table_oid
25 | AND
26 | i.indnatts = 1
27 | ORDER BY
28 | i.indisprimary DESC NULLS LAST,
29 | i.indisunique DESC NULLS LAST
30 | LIMIT 1;
31 | $$ LANGUAGE SQL;
32 |
33 | CREATE OR REPLACE FUNCTION pg_temp.tipg_properties(
34 | att pg_attribute,
35 | spatial_extent boolean,
36 | datetime_extent boolean
37 | ) RETURNS jsonb AS $$
38 | DECLARE
39 | attname text := att.attname;
40 | attdescription text := col_description(att.attrelid, att.attnum);
41 | atttype text := pg_temp.typ(format_type(att.atttypid, null));
42 | attmin json;
43 | attmax json;
44 | srid int;
45 | geometry_type text;
46 | _schemaname text;
47 | _relname text;
48 | _n_live_tup bigint;
49 | _n_mod_since_analyze bigint;
50 | bounds_geom geometry;
51 | bounds float[];
52 | BEGIN
53 | IF atttype IN ('timestamp', 'timestamptz', 'date') AND datetime_extent THEN
54 | EXECUTE FORMAT(
55 | $q$
56 | SELECT to_json(min(%I::timestamptz)), to_json(max(%I::timestamptz))
57 | FROM %s;
58 | $q$,
59 | attname,
60 | attname,
61 | att.attrelid::regclass::text
62 | ) INTO attmin, attmax;
63 | ELSIF atttype IN ('geometry', 'geography') THEN
64 | geometry_type := postgis_typmod_type(att.atttypmod);
65 | srid = coalesce(nullif(postgis_typmod_srid(att.atttypmod),0), 4326);
66 |
67 | IF spatial_extent THEN
68 | SELECT schemaname, relname, n_live_tup, n_mod_since_analyze
69 | INTO _schemaname, _relname, _n_live_tup, _n_mod_since_analyze
70 | FROM pg_stat_user_tables
71 | WHERE relid = att.attrelid;
72 |
73 | IF _n_live_tup > 0 AND _n_mod_since_analyze = 0 THEN
74 | bounds_geom := st_setsrid(st_estimatedextent(_schemaname, _relname, attname), srid);
75 | END IF;
76 |
77 | IF bounds_geom IS NULL THEN
78 | IF atttype = 'geography' THEN
79 | EXECUTE format('SELECT ST_SetSRID(ST_Extent(%I::geometry), %L) FROM %s', attname, srid, att.attrelid::regclass::text) INTO bounds_geom;
80 | ELSE
81 | EXECUTE format('SELECT ST_SetSRID(ST_Extent(%I), %L) FROM %s', attname, srid, att.attrelid::regclass::text) INTO bounds_geom;
82 | END IF;
83 | END IF;
84 |
85 | IF bounds_geom IS NOT NULL THEN
86 | IF srid != 4326 THEN
87 | bounds_geom := st_transform(bounds_geom, 4326);
88 | END IF;
89 | bounds = ARRAY[ st_xmin(bounds_geom), st_ymin(bounds_geom), st_xmax(bounds_geom), st_ymax(bounds_geom) ];
90 | END IF;
91 | END IF;
92 | END IF;
93 |
94 | RETURN jsonb_strip_nulls(jsonb_build_object(
95 | 'name', attname,
96 | 'type', atttype,
97 | 'description', attdescription,
98 | 'mindt', attmin,
99 | 'maxdt', attmax,
100 | 'geometry_type', geometry_type,
101 | 'srid', srid,
102 | 'bounds', bounds
103 | ));
104 | END;
105 | $$ LANGUAGE PLPGSQL;
106 |
107 | CREATE OR REPLACE FUNCTION pg_temp.tipg_tproperties(
108 | c pg_class,
109 | spatial_extent boolean,
110 | datetime_extent boolean
111 | ) RETURNS jsonb AS $$
112 | WITH t AS (
113 | SELECT
114 | jsonb_agg(pg_temp.tipg_properties(a, spatial_extent, datetime_extent)) as properties
115 | FROM
116 | pg_attribute a
117 | WHERE
118 | attnum>0
119 | and attrelid=c.oid
120 | and not attisdropped
121 | and has_column_privilege(c.oid, a.attnum, 'SELECT')
122 | ) SELECT jsonb_build_object(
123 | 'entity', 'Table',
124 | 'pk', pg_temp.tipg_pk(c.oid),
125 | 'name', c.relname::text,
126 | 'schema', pg_temp.nspname(c.relnamespace),
127 | 'properties', properties
128 | ) FROM t;
129 | $$ LANGUAGE SQL;
130 |
131 | CREATE OR REPLACE FUNCTION pg_temp.tipg_tproperties(
132 | tabl text,
133 | spatial_extent boolean,
134 | datetime_extent boolean
135 | ) RETURNS jsonb AS $$
136 | SELECT pg_temp.tipg_tproperties(pg_class, spatial_extent, datetime_extent) FROM pg_class WHERE oid=tabl::regclass;
137 | $$ LANGUAGE SQL;
138 |
139 | CREATE OR REPLACE FUNCTION pg_temp.tipg_fun_defaults(defaults pg_node_tree) RETURNS text[] AS $$
140 | WITH d AS (
141 | SELECT btrim(split_part(btrim(unnest(string_to_array(
142 | pg_get_expr(defaults,0::oid),
143 | ','
144 | ))),'::',1),'''') d
145 | ) SELECT array_agg(d) FROM d
146 | ;
147 | $$ LANGUAGE SQL;
148 |
149 | CREATE OR REPLACE FUNCTION pg_temp.tipg_fproperties(
150 | p pg_proc
151 | ) RETURNS jsonb AS $$
152 | DECLARE
153 | defaults text[];
154 | argtypes oid[];
155 | argmodes text[];
156 | proargnames text[] := coalesce(p.proargnames, array_fill(null::text, ARRAY[p.pronargs]));
157 | properties json;
158 | parameters json;
159 | BEGIN
160 | IF p.pronargdefaults > 0 AND p.pronargs > 0 THEN
161 | defaults :=
162 | array_fill(null::text, ARRAY[p.pronargs-p.pronargdefaults])
163 | || pg_temp.tipg_fun_defaults(p.proargdefaults)
164 | ;
165 | ELSE
166 | defaults := array_fill(null::text, ARRAY[p.pronargs]);
167 | END IF;
168 | argtypes := coalesce(p.proallargtypes, (p.proargtypes::oid[])[:]);
169 | argmodes := coalesce(p.proargmodes,array_fill('i'::text,ARRAY[cardinality(proargnames)]));
170 |
171 | IF format_type(p.prorettype, null) IS DISTINCT FROM 'record' THEN
172 | argtypes := argtypes || p.prorettype;
173 | argmodes := argmodes || 'o'::text;
174 | proargnames := proargnames || p.proname::text;
175 | END IF;
176 |
177 | WITH t AS (
178 | SELECT
179 | pg_temp.typ(format_type(argtype, null)) as argtype,
180 | argmode,
181 | proargname,
182 | def,
183 | argnum
184 | FROM UNNEST(argtypes, argmodes, proargnames, defaults)
185 | WITH ORDINALITY AS a(argtype, argmode, proargname, def, argnum)
186 | ) SELECT
187 | jsonb_agg(json_strip_nulls(json_build_object(
188 | 'name', proargname,
189 | 'type', argtype,
190 | 'geometry_type', CASE WHEN argtype IN ('geometry','geography') THEN 'Geometry' ELSE NULL END
191 | )) ORDER BY argnum ) FILTER (WHERE argmode IN ('t','b', 'o')),
192 | jsonb_agg(json_strip_nulls(json_build_object(
193 | 'name', proargname,
194 | 'type', argtype,
195 | 'default', def
196 | )) ORDER BY argnum ) FILTER (WHERE argmode IN ('i','b'))
197 | FROM t INTO properties, parameters;
198 | RETURN jsonb_build_object(
199 | 'entity', 'Function',
200 | 'name', p.proname,
201 | 'schema', pg_temp.nspname(p.pronamespace),
202 | 'properties', properties,
203 | 'parameters', parameters
204 | );
205 | END;
206 | $$ LANGUAGE PLPGSQL;
207 |
208 | CREATE OR REPLACE FUNCTION pg_temp.tipg_fproperties(
209 | func text
210 | ) RETURNS jsonb AS $$
211 | SELECT pg_temp.tipg_fproperties(pg_proc) FROM pg_proc WHERE oid=func::regproc;
212 | $$ LANGUAGE SQL;
213 |
214 | CREATE OR REPLACE FUNCTION pg_temp.tipg_get_schemas(include text[] DEFAULT NULL, exclude text[] DEFAULT NULL) RETURNS SETOF oid AS $$
215 | DECLARE
216 | BEGIN
217 | IF include IS NULL OR cardinality(include) = 0 THEN
218 | include:=string_to_array(current_setting('search_path',false),',');
219 | END IF;
220 | RETURN QUERY
221 | WITH schemas AS (
222 | SELECT pg_my_temp_schema()::regnamespace::text AS _schema
223 | UNION
224 | SELECT btrim(unnest(include))
225 | EXCEPT
226 | SELECT btrim(unnest(exclude))
227 | )
228 | SELECT DISTINCT oid
229 | FROM pg_namespace, schemas
230 | WHERE
231 | nspname::text=_schema
232 | AND
233 | has_schema_privilege(oid, 'usage')
234 | ;
235 | END;
236 | $$ LANGUAGE PLPGSQL;
237 |
238 | CREATE OR REPLACE FUNCTION pg_temp.tipg_catalog(
239 | schemas text[] DEFAULT NULL,
240 | tables text[] DEFAULT NULL,
241 | exclude_tables text[] DEFAULT NULL,
242 | exclude_table_schemas text[] DEFAULT NULL,
243 | functions text[] DEFAULT NULL,
244 | exclude_functions text[] DEFAULT NULL,
245 | exclude_function_schemas text[] DEFAULT NULL,
246 | spatial boolean DEFAULT FALSE,
247 | spatial_extent boolean DEFAULT TRUE,
248 | datetime_extent boolean DEFAULT TRUE
249 | ) RETURNS SETOF jsonb AS $$
250 | WITH a AS (
251 | SELECT
252 | pg_temp.tipg_tproperties(c, spatial_extent, datetime_extent) as meta
253 | FROM pg_class c, pg_temp.tipg_get_schemas(schemas,exclude_table_schemas) s
254 | WHERE
255 | c.relnamespace=s
256 | AND relkind IN ('r','v', 'm', 'f', 'p')
257 | AND has_table_privilege(c.oid, 'SELECT')
258 | AND c.relname::text NOT IN ('spatial_ref_sys','geometry_columns','geography_columns')
259 | AND (exclude_tables IS NULL OR concat(pg_temp.nspname(relnamespace),'.',c.relname::text) <> ALL (exclude_tables))
260 | AND (tables IS NULL OR concat(pg_temp.nspname(relnamespace),'.',c.relname::text) = ANY (tables))
261 |
262 | UNION ALL
263 |
264 | SELECT
265 | pg_temp.tipg_fproperties(p) as meta
266 | FROM
267 | pg_proc p, pg_temp.tipg_get_schemas(schemas,exclude_function_schemas) s
268 | WHERE
269 | p.pronamespace=s
270 | AND proretset
271 | AND prokind='f'
272 | AND proargnames is not null
273 | AND '' != ANY(proargnames)
274 | AND has_function_privilege(oid, 'execute')
275 | AND provariadic=0
276 | AND (exclude_functions IS NULL OR concat(pg_temp.nspname(pronamespace),'.', proname::text) <> ALL (exclude_functions))
277 | AND (functions IS NULL OR concat(pg_temp.nspname(pronamespace),'.', proname::text) = ANY (functions))
278 | AND p.proname::text NOT ILIKE 'tipg_%'
279 | )
280 | SELECT meta FROM a
281 | WHERE
282 | CASE
283 | WHEN spatial THEN meta @? '$.properties[*] ? (exists (@.geometry_type))'
284 | ELSE TRUE
285 | END
286 | ;
287 | $$ LANGUAGE SQL;
288 |
--------------------------------------------------------------------------------
/tipg/templates/collection.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 | {% if params %}
3 | {% set urlq = url + '?' + params + '&' %}
4 | {% else %}
5 | {% set urlq = url + '?' %}
6 | {% endif %}
7 |
8 |
9 |
10 | {% for crumb in crumbs %} {% if not loop.last %}
11 |
12 | {{ crumb.part }}
13 |
14 | {% else %}
15 | {{ crumb.part }}
16 | {% endif %} {% endfor %}
17 |
18 |
19 | JSON
20 |
21 |
22 |
23 |
24 | Collection: {{ response.title or response.id }}
25 |
26 |
27 |
28 |
{{ response.description or response.title or response.id }}
29 | {% if "keywords" in response and length(response.keywords) > 0 %}
30 |
31 |
32 | {% for keyword in response.keywords %}
33 | {{ keyword }}
34 | {% endfor %}
35 |
36 |
37 | {% endif %}
38 |
39 |
Links
40 |
45 |
46 |
47 |
48 | Loading...
49 |
50 |
51 |
52 |
53 |
81 |
82 | {% include "footer.html" %}
83 |
--------------------------------------------------------------------------------
/tipg/templates/collections.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 | {% set show_prev_link = false %}
4 | {% set show_next_link = false %}
5 | {% if params %}
6 | {% set urlq = url + '?' + params + '&' %}
7 | {% else %}
8 | {% set urlq = url + '?' %}
9 | {% endif %}
10 |
11 |
12 |
13 | {% for crumb in crumbs %}
14 | {% if not loop.last %}
15 | {{ crumb.part }}
16 | {% else %}{{ crumb.part }}
17 | {% endif %}
18 | {% endfor %}
19 |
20 | JSON
21 |
22 |
23 |
24 | Collections
25 |
26 |
27 | Number of matching collections: {{ response.numberMatched }}
28 | Number of returned collections: {{ response.numberReturned }}
29 | Page: of
30 |
31 |
32 |
53 |
54 |
55 |
56 |
57 |
58 | Title
59 | Type
60 | Description
61 |
62 |
63 |
64 | {% for collection in response.collections %}
65 |
66 | {{ collection.title or collection.id }}
67 | {{ collection.itemType }}
68 | {{ collection.description or collection.title or collection.id }}
69 |
70 | {% endfor %}
71 |
72 |
73 |
74 |
75 |
117 |
118 | {% include "footer.html" %}
119 |
--------------------------------------------------------------------------------
/tipg/templates/conformance.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 | {% if params %}
3 | {% set urlq = url + '?' + params + '&' %}
4 | {% else %}
5 | {% set urlq = url + '?' %}
6 | {% endif %}
7 |
8 |
9 |
10 | {% for crumb in crumbs %}
11 | {% if not loop.last %}
12 | {{ crumb.part }}
13 | {% else %}{{ crumb.part }}
14 | {% endif %}
15 | {% endfor %}
16 |
17 | JSON
18 |
19 |
20 |
21 | {{ template.title }}Conformance
22 |
23 | This API implements the conformance classes from standards and community specifications that are listed below.
24 |
25 | Links
26 |
27 | {% for url in response.conformsTo %}
28 | {{ url }}
29 | {% endfor %}
30 |
31 |
32 | {% include "footer.html" %}
33 |
--------------------------------------------------------------------------------
/tipg/templates/debug.html:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tipg/871ee56e38281dbfeec493fc20d08b7b60e922cd/tipg/templates/debug.html
--------------------------------------------------------------------------------
/tipg/templates/footer.html:
--------------------------------------------------------------------------------
1 | {% include "debug.html" %}
2 |
3 |
4 |