├── .bumpversion.cfg
├── .env.example
├── .flake8
├── .github
├── codecov.yml
└── workflows
│ ├── ci.yml
│ └── deploy_mkdocs.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CHANGES.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── data
├── countries.sql
├── landsat_wrs.sql
└── sentinel_mgrs.sql
├── docker-compose.yml
├── dockerfiles
├── Dockerfile
├── Dockerfile.db
└── scripts
│ └── wait-for-it.sh
├── docs
├── logos
│ ├── TiFeatures_logo_large.png
│ ├── TiFeatures_logo_medium.png
│ ├── TiFeatures_logo_no_text_large.png
│ └── TiFeatures_logo_small.png
├── mkdocs.yml
└── src
│ ├── advanced
│ └── timvt_and_tifeatures.md
│ ├── contributing.md
│ ├── endpoints.md
│ ├── img
│ ├── favicon.ico
│ └── logo.png
│ ├── index.md
│ └── release-notes.md
├── pyproject.toml
├── setup.py
├── tests
├── benchmarks.py
├── conftest.py
├── fixtures
│ ├── canada.sql
│ ├── landsat_wrs.sql
│ ├── my_data.sql
│ ├── nongeo_data.sql
│ └── templates
│ │ └── collections.html
├── routes
│ ├── __init__.py
│ ├── test_collections.py
│ ├── test_endpoints.py
│ ├── test_item.py
│ ├── test_items.py
│ ├── test_non_geo.py
│ └── test_templates.py
└── test_main.py
└── tifeatures
├── __init__.py
├── db.py
├── dbmodel.py
├── dependencies.py
├── errors.py
├── factory.py
├── filter
├── __init__.py
├── evaluate.py
└── filters.py
├── layer.py
├── main.py
├── middleware.py
├── model.py
├── resources
├── __init__.py
├── enums.py
└── response.py
├── settings.py
└── templates
├── collection.html
├── collections.html
├── conformance.html
├── debug.html
├── footer.html
├── header.html
├── item.html
├── items.html
├── landing.html
└── queryables.html
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 0.1.0a4
3 | commit = True
4 | tag = True
5 | tag_name = {new_version}
6 |
7 | [bumpversion:file:tifeatures/__init__.py]
8 | search = __version__ = "{current_version}"
9 | replace = __version__ = "{new_version}"
10 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 |
2 | TIFEATURES_NAME="OGC Feature API"
3 | DATABASE_URL=postgresql://username:password@0.0.0.0:5439/postgis
4 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E501,W503,E203
3 | exclude = .git,__pycache__,docs/source/conf.py,old,build,dist
4 | max-complexity = 14
5 | max-line-length = 90
6 |
--------------------------------------------------------------------------------
/.github/codecov.yml:
--------------------------------------------------------------------------------
1 | comment: off
2 |
3 | coverage:
4 | status:
5 | project:
6 | default:
7 | target: auto
8 | threshold: 5
9 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | # On every pull request, but only on push to master
4 | on:
5 | push:
6 | branches:
7 | - master
8 | tags:
9 | - '*'
10 | paths:
11 | # Only run test and docker publish if somde code have changed
12 | - 'pyproject.toml'
13 | - 'tifeatures/**'
14 | - 'tests/**'
15 | - '.pre-commit-config.yaml'
16 | - 'dockerfiles/**'
17 | - .github/workflows/ci.yml
18 | pull_request:
19 | env:
20 | LATEST_PY_VERSION: '3.10'
21 |
22 | jobs:
23 | tests:
24 | runs-on: ubuntu-20.04
25 | strategy:
26 | matrix:
27 | python-version: ['3.8', '3.9', '3.10', '3.11']
28 |
29 | steps:
30 | - uses: actions/checkout@v3
31 | - name: Set up Python ${{ matrix.python-version }}
32 | uses: actions/setup-python@v4
33 | with:
34 | python-version: ${{ matrix.python-version }}
35 |
36 | - name: install lib postgres
37 | run: |
38 | sudo apt update
39 | wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O- | sudo apt-key add -
40 | echo "deb [arch=amd64] http://apt.postgresql.org/pub/repos/apt/ focal-pgdg main" | sudo tee /etc/apt/sources.list.d/postgresql.list
41 | sudo apt update
42 | sudo apt-get install --yes libpq-dev postgis postgresql-14-postgis-3
43 |
44 | - name: Install dependencies
45 | run: |
46 | python -m pip install --upgrade pip
47 | python -m pip install .["all,test"]
48 |
49 | - name: Run pre-commit
50 | if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
51 | run: |
52 | python -m pip install pre-commit
53 | pre-commit run --all-files
54 |
55 | - name: Run tests
56 | run: python -m pytest --cov tifeatures --cov-report xml --cov-report term-missing --asyncio-mode=strict
57 |
58 | - name: Upload Results
59 | if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
60 | uses: codecov/codecov-action@v1
61 | with:
62 | file: ./coverage.xml
63 | flags: unittests
64 | name: ${{ matrix.python-version }}
65 | fail_ci_if_error: false
66 |
67 | benchmark:
68 | needs: [tests]
69 | runs-on: ubuntu-20.04
70 | steps:
71 | - uses: actions/checkout@v3
72 | - name: Set up Python
73 | uses: actions/setup-python@v4
74 | with:
75 | python-version: ${{ env.LATEST_PY_VERSION }}
76 |
77 | - name: install lib postgres
78 | run: |
79 | sudo apt update
80 | wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O- | sudo apt-key add -
81 | echo "deb [arch=amd64] http://apt.postgresql.org/pub/repos/apt/ focal-pgdg main" | sudo tee /etc/apt/sources.list.d/postgresql.list
82 | sudo apt update
83 | sudo apt-get install --yes libpq-dev postgis postgresql-14-postgis-3
84 |
85 | - name: Install dependencies
86 | run: |
87 | python -m pip install --upgrade pip
88 | python -m pip install .["test"]
89 |
90 | - name: Run Benchmark
91 | run: python -m pytest tests/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --benchmark-json output.json --asyncio-mode=strict
92 |
93 | # - name: Store and benchmark result
94 | # uses: benchmark-action/github-action-benchmark@v1
95 | # with:
96 | # name: TiFeatures Benchmarks
97 | # tool: 'pytest'
98 | # output-file-path: output.json
99 | # alert-threshold: '130%'
100 | # comment-on-alert: true
101 | # fail-on-alert: true
102 | # # GitHub API token to make a commit comment
103 | # github-token: ${{ secrets.GITHUB_TOKEN }}
104 | # # Make a commit on `gh-pages` only if master
105 | # auto-push: ${{ github.ref == 'refs/heads/master' }}
106 | # benchmark-data-dir-path: benchmarks
107 |
108 | publish:
109 | needs: [tests]
110 | runs-on: ubuntu-latest
111 | if: contains(github.ref, 'tags') && github.event_name == 'push'
112 | steps:
113 | - uses: actions/checkout@v3
114 | - name: Set up Python
115 | uses: actions/setup-python@v4
116 | with:
117 | python-version: ${{ env.LATEST_PY_VERSION }}
118 |
119 | - name: Install dependencies
120 | run: |
121 | python -m pip install --upgrade pip
122 | python -m pip install hatch
123 | python -m hatch build
124 |
125 | - name: Set tag version
126 | id: tag
127 | run: |
128 | echo "version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
129 |
130 | - name: Set module version
131 | id: module
132 | run: |
133 | echo "version=$(hatch --quiet version)" >> $GITHUB_OUTPUT
134 |
135 | - name: Build and publish
136 | if: ${{ steps.tag.outputs.version }} == ${{ steps.module.outputs.version}}
137 | env:
138 | HATCH_INDEX_USER: ${{ secrets.PYPI_USERNAME }}
139 | HATCH_INDEX_AUTH: ${{ secrets.PYPI_PASSWORD }}
140 | run: |
141 | python -m hatch publish
142 |
143 | publish-docker:
144 | needs: [tests]
145 | if: github.ref == 'refs/heads/master' || startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
146 | runs-on: ubuntu-latest
147 | steps:
148 | - name: Checkout
149 | uses: actions/checkout@v3
150 |
151 | - name: Set up QEMU
152 | uses: docker/setup-qemu-action@v1
153 |
154 | - name: Set up Docker Buildx
155 | uses: docker/setup-buildx-action@v1
156 |
157 | - name: Login to Github
158 | uses: docker/login-action@v1
159 | with:
160 | registry: ghcr.io
161 | username: ${{ github.actor }}
162 | password: ${{ secrets.GITHUB_TOKEN }}
163 |
164 | - name: Set tag version
165 | id: tag
166 | # https://stackoverflow.com/questions/58177786/get-the-current-pushed-tag-in-github-actions
167 | run: echo ::set-output name=tag::${GITHUB_REF#refs/*/}
168 |
169 | # Push `latest` when commiting to master
170 | - name: Build and push
171 | if: github.ref == 'refs/heads/master'
172 | uses: docker/build-push-action@v2
173 | with:
174 | platforms: linux/amd64,linux/arm64
175 | context: .
176 | file: dockerfiles/Dockerfile
177 | push: true
178 | tags: |
179 | ghcr.io/${{ github.repository }}:latest
180 |
181 | # Push `{VERSION}` when pushing a new tag
182 | - name: Build and push
183 | if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
184 | uses: docker/build-push-action@v2
185 | with:
186 | platforms: linux/amd64,linux/arm64
187 | context: .
188 | file: dockerfiles/Dockerfile
189 | push: true
190 | tags: |
191 | ghcr.io/${{ github.repository }}:${{ steps.tag.outputs.tag }}
192 |
--------------------------------------------------------------------------------
/.github/workflows/deploy_mkdocs.yml:
--------------------------------------------------------------------------------
1 | name: Publish docs via GitHub Pages
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | paths:
8 | # Only rebuild website when docs have changed
9 | - 'README.md'
10 | - 'CHANGES.md'
11 | - 'CONTRIBUTING.md'
12 | - 'docs/**'
13 |
14 | jobs:
15 | build:
16 | name: Deploy docs
17 | runs-on: ubuntu-latest
18 | steps:
19 | - name: Checkout master
20 | uses: actions/checkout@v3
21 |
22 | - name: Set up Python 3.8
23 | uses: actions/setup-python@v4
24 | with:
25 | python-version: 3.8
26 |
27 | - name: Install dependencies
28 | run: |
29 | python -m pip install --upgrade pip
30 | python -m pip install -e .["docs"]
31 |
32 | - name: Create API docs
33 | env:
34 | # we need to set a fake PG url or import will fail
35 | DATABASE_URL: postgresql://username:password@0.0.0.0:5439/postgis
36 | run: |
37 | pdocs as_markdown \
38 | --output_dir docs/src/api \
39 | --exclude_source \
40 | --overwrite \
41 | tifeatures.filter.evaluate \
42 | tifeatures.filter.filters \
43 | tifeatures.resources.enums \
44 | tifeatures.resources.response \
45 | tifeatures.db \
46 | tifeatures.dbmodel \
47 | tifeatures.dependencies \
48 | tifeatures.errors \
49 | tifeatures.factory \
50 | tifeatures.layer \
51 | tifeatures.middleware \
52 | tifeatures.model \
53 | tifeatures.settings
54 |
55 | - name: Deploy docs
56 | run: mkdocs gh-deploy --force -f docs/mkdocs.yml
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | cdk.out/
104 |
105 | # pycharm
106 | .idea/
107 |
108 | .benchmarks/
109 |
110 | .pgdata/
111 | docs/src/api/*
112 |
113 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/psf/black
3 | rev: 22.3.0
4 | hooks:
5 | - id: black
6 | language_version: python
7 |
8 | - repo: https://github.com/PyCQA/isort
9 | rev: 5.12.0
10 | hooks:
11 | - id: isort
12 | language_version: python
13 |
14 | - repo: https://github.com/PyCQA/flake8
15 | rev: 3.8.3
16 | hooks:
17 | - id: flake8
18 | language_version: python
19 |
20 | - repo: https://github.com/PyCQA/pydocstyle
21 | rev: 6.1.1
22 | hooks:
23 | - id: pydocstyle
24 | language_version: python
25 | additional_dependencies:
26 | - toml
27 |
28 | - repo: https://github.com/pre-commit/mirrors-mypy
29 | rev: v0.991
30 | hooks:
31 | - id: mypy
32 | language_version: python
33 |
--------------------------------------------------------------------------------
/CHANGES.md:
--------------------------------------------------------------------------------
1 | # Release Notes
2 |
3 | ## 0.1.0a4 (2023-03-15)
4 |
5 | * fix factories `url_for` type (for starlette >=0.26)
6 |
7 | ## 0.1.0a3 (2023-02-27)
8 |
9 | * fix CRS for collection and extent (switch from `epsg4326` to `CRS84`)
10 |
11 | ## 0.1.0a2 (2022-11-21)
12 |
13 | * fix package metadata
14 |
15 | ## 0.1.0a0 (2022-11-16)
16 |
17 | * OGC Feature API Part 1: Core (https://docs.ogc.org/is/17-069r4/17-069r4.html)
18 | * OGC Feature API Part 3: Filtering / CQL2 (https://docs.ogc.org/DRAFTS/19-079r1.html)
19 | * Support multiple output format (json, html, geojson, ndjson, csv, geojson-seg)
20 | * geometry column selection
21 | * datetime column selection
22 | * primary key column selection
23 | * output property filter
24 | * options to reduce the bandwidth required for returning record geometries.
25 | - bbox-only=[bool] only return the bounding box in the return geometry
26 | - geom-column=none don't return geometry as part of the return
27 | - simplify=[float] Use ST_SnapToGrid(ST_Simplify(geom, [simplify]),[simplify]) to simplify and reduce precision of output geometry.
28 | - sortby=[+/-][field] support to sorting by a field
29 | * ability to use user defined PostgreSQL functions as per the "custom functions" spec in CQL2.
30 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Development - Contributing
2 |
3 | Issues and pull requests are more than welcome: https://github.com/developmentseed/tifeatures/issues
4 |
5 | **dev install**
6 |
7 | ```bash
8 | $ git clone https://github.com/developmentseed/tifeatures.git
9 | $ cd tifeatures
10 | $ pip install -e .["test,dev"]
11 | ```
12 |
13 | You can then run the tests with the following command:
14 |
15 | ```sh
16 | python -m pytest --cov tifeatures --cov-report term-missing --asyncio-mode=strict
17 | ```
18 |
19 | and run benchmark
20 |
21 | ```sh
22 | python -m pytest tests/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --asyncio-mode=strict
23 | ```
24 |
25 | **pre-commit**
26 |
27 | This repo is set to use `pre-commit` to run *isort*, *flake8*, *pydocstring*, *black* ("uncompromising Python code formatter") and mypy when committing new code.
28 |
29 | ```bash
30 | # Install pre-commit command
31 | $ pip install pre-commit
32 |
33 | # Setup pre-commit withing your local environment
34 | $ pre-commit install
35 | ```
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Development Seed
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Simple and Fast Geospatial Features API for PostGIS.
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | ---
22 |
23 | **Documentation**: https://developmentseed.org/tifeatures/
24 |
25 | **Source Code**: https://github.com/developmentseed/tifeatures
26 |
27 | ---
28 |
29 | > :warning: This project is on pause while we focus on [`developmentseed/tipg`](https://github.com/developmentseed/tipg) :warning:
30 | >
31 | > ref: https://github.com/developmentseed/timvt/discussions/96
32 |
33 | ---
34 |
35 | `TiFeatures`, pronounced *T[ee]Features*, is a **python** package which helps creating lightweight **Features** server for PostGIS Database. The API has been designed with respect to [OGC Features API specification](https://github.com/opengeospatial/ogcapi-features).
36 |
37 | ---
38 |
39 | ## Install
40 |
41 | ```bash
42 | $ python -m pip install pip -U
43 | $ python -m pip install tifeatures
44 |
45 | # or from source
46 | $ git clone https://github.com/developmentseed/tifeatures.git
47 | $ cd tifeatures
48 | $ python -m pip install -e .
49 | ```
50 |
51 | ## OGC Specification
52 |
53 | Specification | Status | link |
54 | | -- | -- | -- |
55 | Part 1: Core | ✅ | https://docs.ogc.org/is/17-069r4/17-069r4.html
56 | Part 2: CRS by Reference | ❌ | https://docs.ogc.org/is/18-058r1/18-058r1.html
57 | Part 3: Filtering / CQL2 | ✅ | https://docs.ogc.org/DRAFTS/19-079r1.html
58 |
59 | Notes:
60 |
61 | The project authors choose not to implement the Part 2 of the specification to avoid the introduction of CRS based GeoJSON. This might change in the future.
62 |
63 | While the authors tried to follow the specification (part 1 and 3) to the letter, some API endpoints might have more capabilities (e.g geometry column selection).
64 |
65 | ## PostGIS/PostgreSQL
66 |
67 | `TiFeatures` rely a lot of `ST_*` PostGIS functions. You need to make sure your PostgreSQL database has PostGIS installed.
68 |
69 | ```sql
70 | SELECT name, default_version,installed_version
71 | FROM pg_available_extensions WHERE name LIKE 'postgis%' or name LIKE 'address%';
72 | ```
73 |
74 | ```sql
75 | CREATE EXTENSION postgis;
76 | ```
77 |
78 | ### Configuration
79 |
80 | To be able to work, the application will need access to the database. `tifeatures` uses [starlette](https://www.starlette.io/config/)'s configuration pattern which make use of environment variable and/or `.env` file to pass variable to the application.
81 |
82 | Example of `.env` file can be found in [.env.example](https://github.com/developmentseed/tifeatures/blob/master/.env.example)
83 |
84 | ```
85 | # you need define the DATABASE_URL directly
86 | DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis
87 | ```
88 |
89 | ## Launch
90 |
91 | ```bash
92 | $ pip install uvicorn
93 |
94 | # Set your postgis database instance URL in the environment
95 | $ export DATABASE_URL=postgresql://username:password@0.0.0.0:5432/postgis
96 | $ uvicorn tifeatures.main:app
97 |
98 | # or using Docker
99 |
100 | $ docker-compose up
101 | ```
102 |
103 |
104 |
105 |
106 |
107 | ## Contribution & Development
108 |
109 | See [CONTRIBUTING.md](https://github.com/developmentseed/tifeatures/blob/master/CONTRIBUTING.md)
110 |
111 | ## License
112 |
113 | See [LICENSE](https://github.com/developmentseed/tifeatures/blob/master/LICENSE)
114 |
115 | ## Authors
116 |
117 | Created by [Development Seed]()
118 |
119 | ## Changes
120 |
121 | See [CHANGES.md](https://github.com/developmentseed/tifeatures/blob/master/CHANGES.md).
122 |
123 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | dockerfile: dockerfiles/Dockerfile
8 | environment:
9 | - HOST=0.0.0.0
10 | - PORT=8081
11 | - PYTHONWARNINGS=ignore
12 | - POSTGRES_USER=username
13 | - POSTGRES_PASS=password
14 | - POSTGRES_DBNAME=postgis
15 | - POSTGRES_HOST=database
16 | - POSTGRES_PORT=5432
17 | - DEBUG=TRUE
18 | ports:
19 | - "${MY_DOCKER_IP:-127.0.0.1}:8081:8081"
20 | depends_on:
21 | - database
22 | command:
23 | bash -c "bash /tmp/scripts/wait-for-it.sh database:5432 --timeout=30 && /start.sh"
24 | volumes:
25 | - ./dockerfiles/scripts:/tmp/scripts
26 |
27 | database:
28 | build:
29 | context: .
30 | dockerfile: dockerfiles/Dockerfile.db
31 | environment:
32 | - POSTGRES_USER=username
33 | - POSTGRES_PASSWORD=password
34 | - POSTGRES_DB=postgis
35 | ports:
36 | - "5439:5432"
37 | command: postgres -N 500
38 | volumes:
39 | - ./.pgdata:/var/lib/postgresql/data
40 |
41 | # pg_featureserv:
42 | # image: pramsey/pg_featureserv:latest
43 | # environment:
44 | # - DATABASE_URL=postgresql://username:password@database:5432/postgis
45 | # ports:
46 | # - "9000:9000"
47 | # depends_on:
48 | # - database
49 |
50 | # fast_features:
51 | # image: ghcr.io/microsoft/ogc-api-fast-features:main
52 | # environment:
53 | # - APP_POSTGRESQL_PROFILE=stac_hybrid
54 | # - APP_POSTGRESQL_HOST=database
55 | # - APP_POSTGRESQL_PORT=5432
56 | # - APP_POSTGRESQL_USER=username
57 | # - APP_POSTGRESQL_PASSWORD=password
58 | # - APP_POSTGERSQL_DBNAME=postgis
59 | # - APP_DATA_SOURCE_TYPES=postgresql
60 | # ports:
61 | # - "8080:80"
62 | # depends_on:
63 | # - database
64 |
65 |
--------------------------------------------------------------------------------
/dockerfiles/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PYTHON_VERSION=3.10
2 |
3 | FROM ghcr.io/vincentsarago/uvicorn-gunicorn:${PYTHON_VERSION}
4 |
5 | WORKDIR /tmp
6 |
7 | COPY README.md README.md
8 | COPY LICENSE LICENSE
9 | COPY tifeatures/ tifeatures/
10 | COPY pyproject.toml pyproject.toml
11 |
12 | RUN pip install . --no-cache-dir
13 | RUN rm -rf tifeatures/ README.md pyproject.toml LICENSE
14 |
15 | ENV MODULE_NAME tifeatures.main
16 | ENV VARIABLE_NAME app
17 |
--------------------------------------------------------------------------------
/dockerfiles/Dockerfile.db:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/vincentsarago/postgis:14-3.3
2 |
3 | COPY data/*.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/dockerfiles/scripts/wait-for-it.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Use this script to test if a given TCP host/port are available
3 |
4 | ######################################################
5 | # Copied from https://github.com/vishnubob/wait-for-it
6 | ######################################################
7 |
8 | WAITFORIT_cmdname=${0##*/}
9 |
10 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
11 |
12 | usage()
13 | {
14 | cat << USAGE >&2
15 | Usage:
16 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
17 | -h HOST | --host=HOST Host or IP under test
18 | -p PORT | --port=PORT TCP port under test
19 | Alternatively, you specify the host and port as host:port
20 | -s | --strict Only execute subcommand if the test succeeds
21 | -q | --quiet Don't output any status messages
22 | -t TIMEOUT | --timeout=TIMEOUT
23 | Timeout in seconds, zero for no timeout
24 | -- COMMAND ARGS Execute command with args after the test finishes
25 | USAGE
26 | exit 1
27 | }
28 |
29 | wait_for()
30 | {
31 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
32 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
33 | else
34 | echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
35 | fi
36 | WAITFORIT_start_ts=$(date +%s)
37 | while :
38 | do
39 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
40 | nc -z $WAITFORIT_HOST $WAITFORIT_PORT
41 | WAITFORIT_result=$?
42 | else
43 | (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
44 | WAITFORIT_result=$?
45 | fi
46 | if [[ $WAITFORIT_result -eq 0 ]]; then
47 | WAITFORIT_end_ts=$(date +%s)
48 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
49 | break
50 | fi
51 | sleep 1
52 | done
53 | return $WAITFORIT_result
54 | }
55 |
56 | wait_for_wrapper()
57 | {
58 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
59 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then
60 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
61 | else
62 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
63 | fi
64 | WAITFORIT_PID=$!
65 | trap "kill -INT -$WAITFORIT_PID" INT
66 | wait $WAITFORIT_PID
67 | WAITFORIT_RESULT=$?
68 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then
69 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
70 | fi
71 | return $WAITFORIT_RESULT
72 | }
73 |
74 | # process arguments
75 | while [[ $# -gt 0 ]]
76 | do
77 | case "$1" in
78 | *:* )
79 | WAITFORIT_hostport=(${1//:/ })
80 | WAITFORIT_HOST=${WAITFORIT_hostport[0]}
81 | WAITFORIT_PORT=${WAITFORIT_hostport[1]}
82 | shift 1
83 | ;;
84 | --child)
85 | WAITFORIT_CHILD=1
86 | shift 1
87 | ;;
88 | -q | --quiet)
89 | WAITFORIT_QUIET=1
90 | shift 1
91 | ;;
92 | -s | --strict)
93 | WAITFORIT_STRICT=1
94 | shift 1
95 | ;;
96 | -h)
97 | WAITFORIT_HOST="$2"
98 | if [[ $WAITFORIT_HOST == "" ]]; then break; fi
99 | shift 2
100 | ;;
101 | --host=*)
102 | WAITFORIT_HOST="${1#*=}"
103 | shift 1
104 | ;;
105 | -p)
106 | WAITFORIT_PORT="$2"
107 | if [[ $WAITFORIT_PORT == "" ]]; then break; fi
108 | shift 2
109 | ;;
110 | --port=*)
111 | WAITFORIT_PORT="${1#*=}"
112 | shift 1
113 | ;;
114 | -t)
115 | WAITFORIT_TIMEOUT="$2"
116 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
117 | shift 2
118 | ;;
119 | --timeout=*)
120 | WAITFORIT_TIMEOUT="${1#*=}"
121 | shift 1
122 | ;;
123 | --)
124 | shift
125 | WAITFORIT_CLI=("$@")
126 | break
127 | ;;
128 | --help)
129 | usage
130 | ;;
131 | *)
132 | echoerr "Unknown argument: $1"
133 | usage
134 | ;;
135 | esac
136 | done
137 |
138 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
139 | echoerr "Error: you need to provide a host and port to test."
140 | usage
141 | fi
142 |
143 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
144 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
145 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
146 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
147 |
148 | # Check to see if timeout is from busybox?
149 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
150 | WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
151 |
152 | WAITFORIT_BUSYTIMEFLAG=""
153 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
154 | WAITFORIT_ISBUSY=1
155 | # Check if busybox timeout uses -t flag
156 | # (recent Alpine versions don't support -t anymore)
157 | if timeout &>/dev/stdout | grep -q -e '-t '; then
158 | WAITFORIT_BUSYTIMEFLAG="-t"
159 | fi
160 | else
161 | WAITFORIT_ISBUSY=0
162 | fi
163 |
164 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then
165 | wait_for
166 | WAITFORIT_RESULT=$?
167 | exit $WAITFORIT_RESULT
168 | else
169 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
170 | wait_for_wrapper
171 | WAITFORIT_RESULT=$?
172 | else
173 | wait_for
174 | WAITFORIT_RESULT=$?
175 | fi
176 | fi
177 |
178 | if [[ $WAITFORIT_CLI != "" ]]; then
179 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
180 | echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
181 | exit $WAITFORIT_RESULT
182 | fi
183 | exec "${WAITFORIT_CLI[@]}"
184 | else
185 | exit $WAITFORIT_RESULT
186 | fi
187 |
--------------------------------------------------------------------------------
/docs/logos/TiFeatures_logo_large.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/logos/TiFeatures_logo_large.png
--------------------------------------------------------------------------------
/docs/logos/TiFeatures_logo_medium.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/logos/TiFeatures_logo_medium.png
--------------------------------------------------------------------------------
/docs/logos/TiFeatures_logo_no_text_large.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/logos/TiFeatures_logo_no_text_large.png
--------------------------------------------------------------------------------
/docs/logos/TiFeatures_logo_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/logos/TiFeatures_logo_small.png
--------------------------------------------------------------------------------
/docs/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: tifeatures
2 | site_description: Simple and Fast Geospatial OGC Features API for PostGIS.
3 |
4 | docs_dir: 'src'
5 | site_dir: 'build'
6 |
7 | repo_name: "developmentseed/tifeatures"
8 | repo_url: "https://github.com/developmentseed/tifeatures"
9 |
10 | extra:
11 | social:
12 | - icon: "fontawesome/brands/github"
13 | link: "https://github.com/developmentseed"
14 | - icon: "fontawesome/brands/twitter"
15 | link: "https://twitter.com/developmentseed"
16 | - icon: "fontawesome/brands/medium"
17 | link: "https://medium.com/devseed"
18 |
19 | nav:
20 | - TiFeatures: "index.md"
21 | - User Guide:
22 | - "Endpoints": endpoints.md
23 | - Advanced User Guide:
24 | - "Combine MVT and Features": advanced/timvt_and_tifeatures.md
25 | - API:
26 | - db: api/tifeatures/db.md
27 | - dbmodel: api/tifeatures/dbmodel.md
28 | - dependencies: api/tifeatures/dependencies.md
29 | - errors: api/tifeatures/errors.md
30 | - factory: api/tifeatures/factory.md
31 | - layer: api/tifeatures/layer.md
32 | - middleware: api/tifeatures/middleware.md
33 | - model: api/tifeatures/model.md
34 | - settings: api/tifeatures/settings.md
35 | - resources:
36 | - enums: api/tifeatures/resources/enums.md
37 | - response: api/tifeatures/resources/response.md
38 | - filters:
39 | - evaluate: api/tifeatures/filter/evaluate.md
40 | - filters: api/tifeatures/filter/filters.md
41 | - Development - Contributing: "contributing.md"
42 | - Release Notes: "release-notes.md"
43 |
44 | plugins:
45 | - search
46 |
47 | theme:
48 | name: material
49 | palette:
50 | primary: indigo
51 | scheme: default
52 | favicon: img/favicon.ico
53 | logo: img/logo.png
54 |
55 |
56 | markdown_extensions:
57 | - admonition
58 | - attr_list
59 | - codehilite:
60 | guess_lang: false
61 | - def_list
62 | - footnotes
63 | - pymdownx.arithmatex
64 | - pymdownx.betterem
65 | - pymdownx.caret:
66 | insert: false
67 | - pymdownx.details
68 | - pymdownx.emoji
69 | - pymdownx.escapeall:
70 | hardbreak: true
71 | nbsp: true
72 | - pymdownx.magiclink:
73 | hide_protocol: true
74 | repo_url_shortener: true
75 | - pymdownx.smartsymbols
76 | - pymdownx.superfences
77 | - pymdownx.tasklist:
78 | custom_checkbox: true
79 | - pymdownx.tilde
80 | - toc:
81 | permalink: true
82 |
--------------------------------------------------------------------------------
/docs/src/advanced/timvt_and_tifeatures.md:
--------------------------------------------------------------------------------
1 |
2 | !!! Note
3 | Please checkout the [`developmentseed/tipg`](https://github.com/developmentseed/tipg) which is the result of the merge between tifeatures and timvt.
4 |
5 |
6 | `TiFeatures` and [`TiMVT`](https://github.com/developmentseed/timvt) share a lot of in common and it's possible to create a unique FastAPI application with both **Features** and **MVT** endpoints:
7 |
8 | ``` py
9 | """Features and MVT app."""
10 |
11 | from tifeatures.db import close_db_connection, connect_to_db, register_table_catalog
12 | from tifeatures.factory import Endpoints
13 | from timvt.factory import VectorTilerFactory
14 | from fastapi import FastAPI
15 | from starlette_cramjam.middleware import CompressionMiddleware
16 |
17 | app = FastAPI(
18 | title="Features and MVT",
19 | openapi_url="/api",
20 | docs_url="/api.html",
21 | )
22 |
23 | # Register endpoints.
24 | endpoints = Endpoints()
25 | app.include_router(endpoints.router, tags=["Features"])
26 |
27 | # By default the VectorTilerFactory will only create tiles/ and tilejson.json endpoints
28 | mvt_endpoints = VectorTilerFactory()
29 | app.include_router(mvt_endpoints.router)
30 |
31 | app.add_middleware(CompressionMiddleware)
32 |
33 |
34 | @app.on_event("startup")
35 | async def startup_event() -> None:
36 | """Connect to database on startup."""
37 | await connect_to_db(app)
38 | # TiMVT and TiFeatures share the same `Table_catalog` format
39 | # see https://github.com/developmentseed/timvt/pull/83
40 | await register_table_catalog(app)
41 |
42 |
43 | @app.on_event("shutdown")
44 | async def shutdown_event() -> None:
45 | """Close database connection."""
46 | await close_db_connection(app)
47 | ```
48 |
49 | !!! Note
50 | To run the example, copy the code to a file main.py, and start uvicorn with:
51 |
52 | `uvicorn main:app --reload`
53 |
54 |
55 | 
56 |
--------------------------------------------------------------------------------
/docs/src/contributing.md:
--------------------------------------------------------------------------------
1 | ../../CONTRIBUTING.md
--------------------------------------------------------------------------------
/docs/src/endpoints.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Landing page
4 |
5 | Path: `/`
6 |
7 | QueryParams:
8 |
9 | - **f** (str, one of [`json`, `html`]): Select response MediaType.
10 |
11 | HeaderParams:
12 |
13 | - **accept** (str, one of [`application/json`, `text/html`]): Select response MediaType.
14 |
15 | Example:
16 |
17 | ```json
18 | curl http://127.0.0.1:8081 | jq
19 | {
20 | "title": "TiFeatures",
21 | "links": [
22 | {
23 | "href": "http://127.0.0.1:8081/",
24 | "rel": "self",
25 | "type": "application/json",
26 | "title": "Landing Page"
27 | },
28 | {
29 | "href": "http://127.0.0.1:8081/api",
30 | "rel": "service-desc",
31 | "type": "application/vnd.oai.openapi+json;version=3.0",
32 | "title": "the API definition (JSON)"
33 | },
34 | {
35 | "href": "http://127.0.0.1:8081/api.html",
36 | "rel": "service-doc",
37 | "type": "text/html",
38 | "title": "the API documentation"
39 | },
40 | {
41 | "href": "http://127.0.0.1:8081/conformance",
42 | "rel": "conformance",
43 | "type": "application/json",
44 | "title": "Conformance"
45 | },
46 | {
47 | "href": "http://127.0.0.1:8081/collections",
48 | "rel": "data",
49 | "type": "application/json",
50 | "title": "List of Collections"
51 | },
52 | {
53 | "href": "http://127.0.0.1:8081/collections/{collectionId}",
54 | "rel": "data",
55 | "type": "application/json",
56 | "title": "Collection metadata"
57 | },
58 | {
59 | "href": "http://127.0.0.1:8081/collections/{collectionId}/queryables",
60 | "rel": "queryables",
61 | "type": "application/schema+json",
62 | "title": "Collection queryables"
63 | },
64 | {
65 | "href": "http://127.0.0.1:8081/collections/{collectionId}/items",
66 | "rel": "data",
67 | "type": "application/geo+json",
68 | "title": "Collection Features"
69 | },
70 | {
71 | "href": "http://127.0.0.1:8081/collections/{collectionId}/items/{itemId}",
72 | "rel": "data",
73 | "type": "application/geo+json",
74 | "title": "Collection Feature"
75 | }
76 | ]
77 | }
78 | ```
79 |
80 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_api_landing_page
81 |
82 | ## Conformance declaration
83 |
84 | Path: `/conformance`
85 |
86 | QueryParams:
87 |
88 | - **f** (str, one of [`json`, `html`]): Select response MediaType.
89 |
90 | HeaderParams:
91 |
92 | - **accept** (str, one of [`application/json`, `text/html`])): Select response MediaType.
93 |
94 | Example:
95 |
96 | ```json
97 | curl http://127.0.0.1:8081/conformance | jq
98 | {
99 | "conformsTo": [
100 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core",
101 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas3",
102 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson",
103 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/html",
104 | "http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/core",
105 | "http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/landing-page",
106 | "http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/json",
107 | "http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/html",
108 | "http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/oas30",
109 | "http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/collections",
110 | "http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query",
111 | "http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter,",
112 | "http://www.opengis.net/def/rel/ogc/1.0/queryables"
113 | ]
114 | }
115 | ```
116 |
117 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_declaration_of_conformance_classes
118 |
119 | ## List Feature Collections
120 |
121 | Path: `/collections`
122 |
123 | QueryParams:
124 |
125 | - **f** (str, one of [`json`, `html`]): Select response MediaType.
126 |
127 | HeaderParams:
128 |
129 | - **accept** (str, one of [`application/json`, `text/html`])): Select response MediaType.
130 |
131 | Example:
132 |
133 | ```json
134 | curl http://127.0.0.1:8081/collections | jq
135 | {
136 | "collections": [
137 | {
138 | "id": "public.countries",
139 | "title": "public.countries",
140 | "links": [
141 | {
142 | "href": "http://127.0.0.1:8081/collections/public.countries",
143 | "rel": "collection",
144 | "type": "application/json"
145 | },
146 | {
147 | "href": "http://127.0.0.1:8081/collections/public.countries/items",
148 | "rel": "items",
149 | "type": "application/geo+json"
150 | },
151 | {
152 | "href": "http://127.0.0.1:8081/collections/public.countries/queryables",
153 | "rel": "queryables",
154 | "type": "application/schema+json"
155 | }
156 | ],
157 | "itemType": "feature",
158 | "crs": [
159 | "http://www.opengis.net/def/crs/OGC/1.3/CRS84"
160 | ]
161 | },
162 | ...
163 | ],
164 | "links": [
165 | {
166 | "href": "http://127.0.0.1:8081/",
167 | "rel": "parent",
168 | "type": "application/json"
169 | },
170 | {
171 | "href": "http://127.0.0.1:8081/collections",
172 | "rel": "self",
173 | "type": "application/json"
174 | }
175 | ]
176 | }
177 | ```
178 |
179 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_collections_
180 |
181 | ## Feature Collection's Metadata
182 |
183 | Path: `/collections/{collectionId}`
184 |
185 | PathParams:
186 |
187 | - **collectionId** (str): Feature Collection Id
188 |
189 | QueryParams:
190 |
191 | - **f** (str, one of [`json`, `html`]): Select response MediaType.
192 |
193 | HeaderParams:
194 |
195 | - **accept** (str, one of [`application/json`, `text/html`])): Select response MediaType.
196 |
197 | Example:
198 |
199 | ```json
200 | curl http://127.0.0.1:8081/collections/public.countries | jq
201 | {
202 | "id": "public.countries",
203 | "links": [
204 | {
205 | "href": "http://127.0.0.1:8081/collections/public.countries",
206 | "rel": "self",
207 | "type": "application/json"
208 | },
209 | {
210 | "href": "http://127.0.0.1:8081/collections/public.countries/items",
211 | "rel": "items",
212 | "type": "application/geo+json",
213 | "title": "Items"
214 | },
215 | {
216 | "href": "http://127.0.0.1:8081/collections/public.countries/items?f=csv",
217 | "rel": "alternate",
218 | "type": "text/csv",
219 | "title": "Items (CSV)"
220 | },
221 | {
222 | "href": "http://127.0.0.1:8081/collections/public.countries/items?f=geojsonseq",
223 | "rel": "alternate",
224 | "type": "application/geo+json-seq",
225 | "title": "Items (GeoJSONSeq)"
226 | },
227 | {
228 | "href": "http://127.0.0.1:8081/collections/public.countries/queryables",
229 | "rel": "queryables",
230 | "type": "application/schema+json",
231 | "title": "Queryables"
232 | }
233 | ],
234 | "itemType": "feature",
235 | "crs": [
236 | "http://www.opengis.net/def/crs/OGC/1.3/CRS84"
237 | ]
238 | }
239 | ```
240 |
241 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_collection_
242 |
243 |
244 | ## Feature Collection's Queryables
245 |
246 | Path: `/collections/{collectionId}/queryables`
247 |
248 | PathParams:
249 |
250 | - **collectionId** (str): Feature Collection Id
251 |
252 | QueryParams:
253 |
254 | - **f** (str, one of [`json`, `html`]): Select response MediaType.
255 |
256 | HeaderParams:
257 |
258 | - **accept** (str, one of [`application/json`, `text/html`])): Select response MediaType.
259 |
260 | Example:
261 |
262 | ```json
263 | curl http://127.0.0.1:8081/collections/public.landsat_wrs/queryables | jq
264 | {
265 | "title": "public.landsat_wrs",
266 | "properties": {
267 | "geom": {
268 | "$ref": "https://geojson.org/schema/Geometry.json"
269 | },
270 | "ogc_fid": {
271 | "name": "ogc_fid",
272 | "type": "number"
273 | },
274 | "id": {
275 | "name": "id",
276 | "type": "string"
277 | },
278 | "pr": {
279 | "name": "pr",
280 | "type": "string"
281 | },
282 | "path": {
283 | "name": "path",
284 | "type": "number"
285 | },
286 | "row": {
287 | "name": "row",
288 | "type": "number"
289 | }
290 | },
291 | "type": "object",
292 | "$schema": "https://json-schema.org/draft/2019-09/schema",
293 | "$id": "http://127.0.0.1:8081/collections/public.landsat_wrs/queryables"
294 | }
295 | ```
296 |
297 | Ref: http://docs.ogc.org/DRAFTS/19-079r1.html#filter-queryables
298 |
299 | ## Features
300 |
301 | Path: `/collections/{collectionId}/items`
302 |
303 | PathParams:
304 |
305 | - **collectionId** (str): Feature Collection Id
306 |
307 | QueryParams:
308 |
309 | - **limit** (int): Limits the number of features in the response. Defaults to 10.
310 | - **offset** (int): Features offset.
311 | - **bbox** (str): Coma (,) delimited bbox coordinates to spatially filter features in `minx,miny,maxx,maxy` form.
312 | - **datetime** (str): Single datetime or `/` delimited datetime intervals to temporally filter features.
313 |
314 | - interval-bounded = `date-time/date-time`
315 | - interval-half-bounded-start = `../date-time`
316 | - interval-half-bounded-end = `date-time/..`
317 | - datetime = `date-time`
318 |
319 | - **ids** * (str): Coma (,) delimited list of item Ids.
320 | - **properties** * (str): Coma (,) delimited list of item properties to return in each feature.
321 | - **filter** (str): CQL2 filter as defined by https://docs.ogc.org/DRAFTS/19-079r1.html#rc_filter
322 | - **filter-lang** (str, one of [`cql2-text`, `cql2-json`]): `Filter` language. Defaults to `cql2-text`.
323 | - **geom-column** * (str): Select geometry column to apply filter on and to create geometry from.
324 | - **datetime-column** * (str): Select datetime column to apply filter on.
325 | - **bbox-only** * (bool): Only return the bounding box of the feature.
326 | - **simplify** * (float): Simplify the output geometry to given threshold in decimal degrees.
327 |
328 | - **f** (str, one of [`geojson`, `html`, `json`, `csv`, `geojsonseq`, `ndjson`]): Select response MediaType.
329 |
330 | HeaderParams:
331 |
332 | - **accept** (str, one of [`application/geo+json`, `text/html`, `application/json`, `text/csv`, `application/geo+json-seq`, `application/ndjson`])): Select response MediaType.
333 |
334 | \* **Not in OGC API Features Specification**
335 |
336 | !!! Important
337 | Additional query-parameters (form `PROP=VALUE`) will be considered as a **property filter**.
338 | Properties (`PROP`) not matching collection's column will be ignored.
339 |
340 | Example:
341 |
342 | - `http://127.0.0.1:8081/collections/public.countries/items`
343 | - `http://127.0.0.1:8081/collections/public.countries/items?limit=1` *limit to only 1 feature*
344 | - `http://127.0.0.1:8081/collections/public.countries/items?limit=1&offset=2` *limit to only 1 feature and add offset 2 (return the third feature of the collection)*
345 | - `http://127.0.0.1:8081/collections/public.countries/items?bbox=-94.702148,34.488448,-85.429688,41.112469` *limit result to a specific bbox*.
346 | - `http://127.0.0.1:8081/collections/public.countries/items?ids=1,2,3` *limit result to ids `1`, `2` and `3`*
347 | - `http://127.0.0.1:8081/collections/public.countries/items?properties=name` *only return `name` property*
348 |
349 | - **Property Filter**
350 | - `http://127.0.0.1:8081/collections/public.countries/items?name=Zimbabwe` *only return features where property `name==Zimbabwe`*
351 |
352 | - **Datetime**
353 | - `http://127.0.0.1:8081/collections/public.countries/items?datetime=2004-10-19T10:23:54Z` *return features with datetime column with value `==2004-10-19T10:23:54Z`*.
354 | - `http://127.0.0.1:8081/collections/public.countries/items?datetime=../2004-10-19T10:23:54Z` *return features with datetime column with value `<=2004-10-19T10:23:54Z`*.
355 | - `http://127.0.0.1:8081/collections/public.countries/items?datetime=2004-10-19T10:23:54Z/..` *return features with datetime column with value `>=2004-10-19T10:23:54Z`*.
356 | - `http://127.0.0.1:8081/collections/public.countries/items?datetime=2004-10-19T10:23:54Z/2004-10-20T10:23:54Z` *return features with datetime column with value between `2004-10-19T10:23:54Z` and `2004-10-20T10:23:54Z`*.
357 |
358 | - **CQL2**
359 | - `http://127.0.0.1:8081/collections/public.countries/items?filter-lang=cql2-json&filter={"op":"=","args":[{"property":"ogc_fid"},1]}`
360 | - `http://127.0.0.1:8081/collections/public.countries/items?filter-lang=cql2-text&filter=ogc_fid=1`
361 |
362 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_items_ and https://docs.ogc.org/DRAFTS/19-079r1.html#filter-param
363 |
364 |
365 | ## Feature
366 |
367 | Path: `/collections/{collectionId}/items/{itemId}`
368 |
369 | PathParams:
370 |
371 | - **collectionId** (str): Feature Collection Id
372 | - **itemId** (str): Feature Id
373 |
374 | QueryParams:
375 | - **geom-column** * (str): Select geometry column to create geometry from.
376 | - **bbox-only** * (bool): Only return the bounding box of the feature.
377 | - **simplify** * (float): Simplify the output geometry to given threshold in decimal degrees.
378 |
379 | - **f** (str, one of [`geojson`, `html`, `json`]): Select response MediaType.
380 |
381 | HeaderParams:
382 |
383 | - **accept** (str, one of [`application/geo+json`, `text/html`, `application/json`])): Select response MediaType.
384 |
385 | Example:
386 |
387 | ```json
388 | {
389 | "type": "Feature",
390 | "geometry": {
391 | "coordinates": [...]
392 | "type": "MultiPolygon"
393 | },
394 | "properties": {
395 | "gid": 1,
396 | "name": "Zimbabwe",
397 | ...
398 | },
399 | "id": "1",
400 | "links": [
401 | {
402 | "href": "http://127.0.0.1:8081/collections/public.countries",
403 | "rel": "collection",
404 | "type": "application/json"
405 | },
406 | {
407 | "href": "http://127.0.0.1:8081/collections/public.countries/items/1",
408 | "rel": "self",
409 | "type": "application/geo+json"
410 | }
411 | ]
412 | }
413 | ```
414 |
415 | Ref: https://docs.ogc.org/is/17-069r4/17-069r4.html#_feature_
416 |
--------------------------------------------------------------------------------
/docs/src/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/src/img/favicon.ico
--------------------------------------------------------------------------------
/docs/src/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/docs/src/img/logo.png
--------------------------------------------------------------------------------
/docs/src/index.md:
--------------------------------------------------------------------------------
1 | ../../README.md
--------------------------------------------------------------------------------
/docs/src/release-notes.md:
--------------------------------------------------------------------------------
1 | ../../CHANGES.md
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "tifeatures"
3 | description = "Simple and Fast Geospatial Features API for PostGIS."
4 | readme = "README.md"
5 | requires-python = ">=3.8"
6 | license = {file = "LICENSE"}
7 | authors = [
8 | {name = "Vincent Sarago", email = "vincent@developmentseed.org"},
9 | {name = "David Bitner", email = "david@developmentseed.org"},
10 | ]
11 | keywords = ["FastAPI", "POSTGIS", "OGC Features"]
12 | classifiers = [
13 | "Intended Audience :: Information Technology",
14 | "Intended Audience :: Science/Research",
15 | "License :: OSI Approved :: BSD License",
16 | "Programming Language :: Python :: 3.8",
17 | "Programming Language :: Python :: 3.9",
18 | "Programming Language :: Python :: 3.10",
19 | "Programming Language :: Python :: 3.11",
20 | "Topic :: Scientific/Engineering :: GIS",
21 | ]
22 | dynamic = ["version"]
23 | dependencies = [
24 | "orjson",
25 | "asyncpg>=0.23.0",
26 | "buildpg>=0.3",
27 | "fastapi>=0.87",
28 | "jinja2>=2.11.2,<4.0.0",
29 | "geojson-pydantic>=0.4.3",
30 | "pygeofilter>=0.2.0,<0.3.0",
31 | "ciso8601~=2.2.0",
32 | "starlette-cramjam>=0.3,<0.4",
33 | "importlib_resources>=1.1.0; python_version < '3.9'",
34 | "typing_extensions; python_version < '3.9.2'",
35 | ]
36 |
37 | [project.optional-dependencies]
38 | test = [
39 | "pytest",
40 | "pytest-cov",
41 | "pytest-asyncio",
42 | "pytest-benchmark",
43 | "sqlalchemy>=1.1,<1.4",
44 | "httpx",
45 | "psycopg2",
46 | "pytest-pgsql",
47 | ]
48 | dev = [
49 | "pre-commit",
50 | ]
51 | server = [
52 | "uvicorn[standard]>=0.12.0,<0.19.0",
53 | ]
54 | docs = [
55 | "mkdocs",
56 | "mkdocs-material",
57 | "pygments",
58 | "pdocs",
59 | ]
60 |
61 | [project.urls]
62 | Homepage = "https://developmentseed.org/tifeatures"
63 | Source = "https://github.com/developmentseed/tifeatures"
64 | Documentation = "https://developmentseed.org/tifeatures/"
65 |
66 | [tool.hatch.version]
67 | path = "tifeatures/__init__.py"
68 |
69 | [tool.hatch.build.targets.sdist]
70 | exclude = [
71 | "/tests",
72 | "/dockerfiles",
73 | "/docs",
74 | "/data",
75 | "docker-compose.yml",
76 | "CONTRIBUTING.md",
77 | "CHANGES.md",
78 | ".pytest_cache",
79 | ".github",
80 | ".bumpversion.cfg",
81 | ".env.example",
82 | ".flake8",
83 | ".gitignore",
84 | ".pre-commit-config.yaml",
85 | ]
86 |
87 | [build-system]
88 | requires = ["hatchling"]
89 | build-backend = "hatchling.build"
90 |
91 |
92 | [tool.isort]
93 | profile = "black"
94 | known_first_party = ["tifeatures"]
95 | known_third_party = ["geojson_pydantic", "buildpg", "pydantic"]
96 | forced_separate = [
97 | "fastapi",
98 | "starlette",
99 | ]
100 | default_section = "THIRDPARTY"
101 |
102 | [tool.mypy]
103 | no_strict_optional = "True"
104 |
105 | [tool.pydocstyle]
106 | select = "D1"
107 | match = "(?!test).*.py"
108 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Fake tifeatures setup.py for github."""
2 | import sys
3 |
4 | from setuptools import setup
5 |
6 | sys.stderr.write(
7 | """
8 | ===============================
9 | Unsupported installation method
10 | ===============================
11 | tifeatures no longer supports installation with `python setup.py install`.
12 | Please use `python -m pip install .` instead.
13 | """
14 | )
15 | sys.exit(1)
16 |
17 |
18 | # The below code will never execute, however GitHub is particularly
19 | # picky about where it finds Python packaging metadata.
20 | # See: https://github.com/github/feedback/discussions/6456
21 | #
22 | # To be removed once GitHub catches up.
23 |
24 | setup(
25 | name="tifeatures",
26 | install_requires=[
27 | "orjson",
28 | "asyncpg>=0.23.0",
29 | "buildpg>=0.3",
30 | "fastapi>=0.87",
31 | "jinja2>=2.11.2,<4.0.0",
32 | "geojson-pydantic>=0.4.3",
33 | "pygeofilter>=0.2.0,<0.3.0",
34 | "ciso8601~=2.2.0",
35 | "starlette-cramjam>=0.3,<0.4",
36 | "importlib_resources>=1.1.0; python_version < '3.9'",
37 | "typing_extensions; python_version < '3.9.2'",
38 | ],
39 | )
40 |
--------------------------------------------------------------------------------
/tests/benchmarks.py:
--------------------------------------------------------------------------------
1 | """Benchmark items."""
2 |
3 | import pytest
4 |
5 |
6 | @pytest.mark.parametrize("limit", [1, 10, 50, 100, 200, 250])
7 | @pytest.mark.parametrize("format", ["geojson", "csv", "html"])
8 | def test_benchmark_items(benchmark, format, limit, app):
9 | """Benchmark items endpoint."""
10 | params = {"f": format, "limit": limit}
11 |
12 | def f(p):
13 | return app.get("/collections/public.landsat_wrs/items", params=p)
14 |
15 | benchmark.group = f"Items: {format}"
16 |
17 | response = benchmark(f, params)
18 | assert response.status_code == 200
19 |
20 |
21 | @pytest.mark.parametrize("name", ["NewfoundlandandLabrador", "Saskatchewan"])
22 | @pytest.mark.parametrize("format", ["geojson", "html"])
23 | def test_benchmark_item(benchmark, format, name, app):
24 | """Benchmark big item."""
25 |
26 | params = {"f": format, "prnom": name}
27 |
28 | def f(p):
29 | return app.get("/collections/public.canada/items", params=p)
30 |
31 | benchmark.group = "Big Feature"
32 |
33 | response = benchmark(f, params)
34 | assert response.status_code == 200
35 | if format == "geojson":
36 | assert response.json()["features"][0]["properties"]["prnom"] == name
37 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """``pytest`` configuration."""
2 |
3 | import os
4 |
5 | import pytest
6 | import pytest_pgsql
7 |
8 | from starlette.testclient import TestClient
9 |
10 | DATA_DIR = os.path.join(os.path.dirname(__file__), "fixtures")
11 |
12 |
13 | test_db = pytest_pgsql.TransactedPostgreSQLTestDB.create_fixture(
14 | "test_db", scope="session", use_restore_state=False
15 | )
16 |
17 |
18 | @pytest.fixture(scope="session")
19 | def database_url(test_db):
20 | """
21 | Session scoped fixture to launch a postgresql database in a separate process. We use psycopg2 to ingest test data
22 | because pytest-asyncio event loop is a function scoped fixture and cannot be called within the current scope. Yields
23 | a database url which we pass to our application through a monkeypatched environment variable.
24 | """
25 | assert test_db.install_extension("postgis")
26 | test_db.run_sql_file(os.path.join(DATA_DIR, "landsat_wrs.sql"))
27 | assert test_db.has_table("landsat_wrs")
28 |
29 | test_db.run_sql_file(os.path.join(DATA_DIR, "my_data.sql"))
30 | assert test_db.has_table("my_data")
31 |
32 | test_db.run_sql_file(os.path.join(DATA_DIR, "nongeo_data.sql"))
33 | assert test_db.has_table("nongeo_data")
34 |
35 | test_db.connection.execute(
36 | "CREATE TABLE landsat AS SELECT geom, ST_Centroid(geom) as centroid, ogc_fid, id, pr, path, row from landsat_wrs;"
37 | )
38 | test_db.connection.execute("ALTER TABLE landsat ADD PRIMARY KEY (ogc_fid);")
39 | assert test_db.has_table("landsat")
40 |
41 | count_landsat = test_db.connection.execute(
42 | "SELECT COUNT(*) FROM landsat_wrs"
43 | ).scalar()
44 | count_landsat_centroid = test_db.connection.execute(
45 | "SELECT COUNT(*) FROM landsat"
46 | ).scalar()
47 | assert count_landsat == count_landsat_centroid
48 |
49 | test_db.run_sql_file(os.path.join(DATA_DIR, "canada.sql"))
50 | assert test_db.has_table("canada")
51 |
52 | return test_db.connection.engine.url
53 |
54 |
55 | @pytest.fixture(autouse=True)
56 | def app(database_url, monkeypatch):
57 | """Create app with connection to the pytest database."""
58 | monkeypatch.setenv("DATABASE_URL", str(database_url))
59 | monkeypatch.setenv("ONLY_SPATIAL_TABLES", "FALSE")
60 | monkeypatch.setenv(
61 | "TIFEATURES_TEMPLATE_DIRECTORY", os.path.join(DATA_DIR, "templates")
62 | )
63 | monkeypatch.setenv(
64 | "TIFEATURES_TABLE_CONFIG__public_my_data__datetimecol", "datetime"
65 | )
66 | monkeypatch.setenv("TIFEATURES_TABLE_CONFIG__public_my_data__geomcol", "geom")
67 | monkeypatch.setenv("TIFEATURES_TABLE_CONFIG__public_my_data__pk", "ogc_fid")
68 | monkeypatch.setenv(
69 | "TIFEATURES_TABLE_CONFIG__public_my_data_alt__datetimecol", "otherdt"
70 | )
71 | monkeypatch.setenv(
72 | "TIFEATURES_TABLE_CONFIG__public_my_data_alt__geomcol", "othergeom"
73 | )
74 | monkeypatch.setenv("TIFEATURES_TABLE_CONFIG__public_my_data_alt__pk", "id")
75 | monkeypatch.setenv("TIFEATURES_TABLE_CONFIG__public_landsat__geomcol", "geom")
76 |
77 | from tifeatures.main import app
78 |
79 | # Remove middlewares https://github.com/encode/starlette/issues/472
80 | app.user_middleware = []
81 | app.middleware_stack = app.build_middleware_stack()
82 |
83 | # register functions to app.state.function_catalog here
84 |
85 | with TestClient(app) as app:
86 | yield app
87 |
--------------------------------------------------------------------------------
/tests/fixtures/my_data.sql:
--------------------------------------------------------------------------------
1 | SET standard_conforming_strings = OFF;
2 | DROP TABLE IF EXISTS "public"."my_data" CASCADE;
3 | DELETE FROM geometry_columns WHERE f_table_name = 'my_data' AND f_table_schema = 'public';
4 | BEGIN;
5 | CREATE TABLE "public"."my_data" ( "ogc_fid" SERIAL, CONSTRAINT "my_data_pk" PRIMARY KEY ("ogc_fid") );
6 | SELECT AddGeometryColumn('public','my_data','geom',4326,'GEOMETRY',2);
7 | CREATE INDEX "my_data_geom_geom_idx" ON "public"."my_data" USING GIST ("geom");
8 | ALTER TABLE "public"."my_data" ADD COLUMN "id" VARCHAR;
9 | ALTER TABLE "public"."my_data" ADD COLUMN "datetime" TIMESTAMP;
10 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E6100000010000001B0000003670CC05599B25C03A92CB7F483F54408907944DB9F221C0D9CEF753E315544069D68681BE5B22C0355D864BD1145440984C2580F45C27C062327530C20754409CB396CA942C30C08E6EC42E50F05340F32225E11DCB30C07C98C2D614ED5340075F984C15FC30C0075F984C15EC53400AA1BD9D6AD732C03439A530F50B5440D8BFC6C0170533C00414E74C050F54407650100F7C0E33C0B199D586A60F5440A01BF45DE29634C0B61719B9F6295440838D3D254B5D35C0DC611EC044375440B8A6A26802F135C06705618A2C4154407CBD21E2CF3136C09B1B77FC844554402CD49AE61D3736C076711B0DE045544039117CFD650136C001AEC11005475440DC27DD0AB9C935C0F45E61C1344854406182187FE9BA35C03AF2E08A854854400736A0D273F130C050CF32FAA1625440ED137AA9497230C0441F419D576554401D9FC06CB06E2BC0B1930B183C745440017C2AECC5F92AC01E2006F67A7554401895D40968822AC0986E1283C07654405D44620EE0782AC0E00B92AC54765440FAACE2F3F95C27C0CDCE93B2275354400D2FBCF61DD226C0385BB99C044D54403670CC05599B25C03A92CB7F483F5440', '0', '2004-10-19 10:23:54');
11 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E61000000100000019000000984067B8143E3DC043C2B8B8F40B5440ACEF9DFAC14B3DC0E950B3BEBB0C544070CE88D2DE503DC01B2FDD24060D544034C8A112A4243DC064CC7707650E54409232AD9551103DC079704A40060F5440A630DBCBFBF43CC0E22ABE1BDF0F5440AC95A5A7DFA638C09E34007606325440FE987A2A9D7238C05D165F0DA5335440D1BF9E64C80A38C0FF6D3AC6DC3654409ACC3E07335D36C0578150C82C4454407CBD21E2CF3136C09B1B77FC8445544039117CFD650136C001AEC110054754401EA7E8482ECF35C07F6ABC7493485440DC27DD0AB9C935C0F45E61C134485440A2F3387764C135C09C775737A44754405526CE34BBDB34C047F7C465133854408DF37646C5EA33C0F10FDC85BE2754406D6485236BA431C08C72AF36460054403EE8D9ACFA9C30C07CF2B0506BEE5340F32225E11DCB30C07C98C2D614ED5340FE41CA2BA27737C016B27D9C8ABB5340C442AD69DEA137C05F07CE1951BA5340F9CBEEC9C30A38C07E078C8947C05340898D7238194D38C059C5B4D10CC45340984067B8143E3DC043C2B8B8F40B5440', '1', '2004-10-20 10:23:54');
12 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E61000000100000013000000C0155236C40A38C052F1FFE1D8C75340B244B5A16EC837C014EBB5CD0CC4534073D712F2414F37C0D3BCE3141DBD5340FE41CA2BA27737C016B27D9C8ABB5340A2728C64C30A38C03BFB4402D0B553400C6AB4D7723A3DC0BDA377861D82534058CA32C4B15E3DC062105839B48053402A2097D1F19641C0EAE96F4E58CC5340F0A7C64B379941C07F6ABC7493CC5340E11AE2531A8741C01F2670501DCE5340CED31A45F57241C03EC92059D3CF534009E08D47F1E83FC0EAC3384350F05340DFE755925F713EC036A2858243005440ACEF9DFAC14B3DC0E950B3BEBB0C544034C8A112A4243DC064CC7707650E5440F602E719D4063DC0AE877727A90F54400A68226C78FA3CC0234A7B832F105440A630DBCBFBF43CC0E22ABE1BDF0F5440C0155236C40A38C052F1FFE1D8C75340', '2', '2004-10-21 10:23:54');
13 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E610000001000000110000001B2CBE53855542C051F99E0E805D534049A5CD2EAE0644C03857A7D846865340462575029A0844C0A60A46257586534063B4EEABC4F943C08D992E511D8853409C72BC6BC5E843C0920AAB5C038A5340721D3749863342C03D0220C7DABA53402A2097D1F19641C0EAE96F4E58CC5340E11AE2531A8741C01F2670501DCE534068226C787A7541C0075F984C15D05340CED31A45F57241C03EC92059D3CF534048E17A14AE173DC06B2BF697DD8353400C6AB4D7723A3DC0BDA377861D825340A03E0335AD283FC0314A54553C6953409C6F1F2DEA1541C00EA6095E6A425340BEC11726532541C0BE9F1A2FDD405340EB51B81E853342C0302C67AA4C5A53401B2CBE53855542C051F99E0E805D5340', '3', '2004-10-22 10:23:54');
14 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E610000001000000110000000A4C8422590E46C0B656FB86F03B5340D5E76A2BF60F46C0075F984C153C5340FA28B2217F0346C0CE0A257ADB3D5340BEE6287052F545C01AA33BF2DF3F5340F25A937BB7D244C009CB92853C69534049A5CD2EAE0644C03857A7D84686534063B4EEABC4F943C08D992E511D88534034A2B437F8EA43C0F54A5986388A53409C72BC6BC5E843C0920AAB5C038A534050AF9465883342C0363B85F6B5605340D43E0032881142C02A5884BF7F5D5340F4FDD478E90641C007F01648504453409C6F1F2DEA1541C00EA6095E6A4253404E4E9C88873342C06DC6E4C7471E53403EDF52396E3443C0DC9EAF2DC7FD524044696FF0854143C032772D211FFC52400A4C8422590E46C0B656FB86F03B5340', '4', '2004-10-23 10:23:54');
15 | INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E6100000010000000D000000BBE9944235C347C0EBF06E7961EE52406ADE718A8EC447C0D122DBF97EEE5240942D6301ECB947C05B59871F60F0524086CAEEF61AAE47C0BDEF3BBB76F252400A4C8422590E46C0B656FB86F03B5340FA28B2217F0346C0CE0A257ADB3D534057EC2FBB27F745C02B1895D409405340BEE6287052F545C01AA33BF2DF3F53401D386744692743C07958A835CDFF52403EDF52396E3443C0DC9EAF2DC7FD5240B9E39237FD0645C0574B4E2543B552400AD7A3703D1245C03A234A7B83B35240BBE9944235C347C0EBF06E7961EE5240', '5', '2004-10-24 10:23:54');
16 | ALTER TABLE public.my_data ADD COLUMN otherdt timestamptz;
17 | ALTER TABLE public.my_data ADD COLUMN othergeom geometry;
18 | UPDATE my_data SET otherdt=datetime+'1 year'::interval, othergeom=st_pointonsurface(geom);
19 | CREATE VIEW public.my_data_alt AS SELECT * FROM my_data;
20 | COMMIT;
21 |
--------------------------------------------------------------------------------
/tests/fixtures/nongeo_data.sql:
--------------------------------------------------------------------------------
1 | SET standard_conforming_strings = OFF;
2 | DROP TABLE IF EXISTS "public"."nongeo_data" CASCADE;
3 | DELETE FROM geometry_columns WHERE f_table_name = 'nongeo_data' AND f_table_schema = 'public';
4 | BEGIN;
5 | CREATE TABLE "public"."nongeo_data" ( "ogc_fid" SERIAL, CONSTRAINT "nongeo_data_pk" PRIMARY KEY ("ogc_fid") );
6 | ALTER TABLE "public"."nongeo_data" ADD COLUMN "id" VARCHAR;
7 | ALTER TABLE "public"."nongeo_data" ADD COLUMN "datetime" TIMESTAMP;
8 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('0', '2004-10-19 10:23:54');
9 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('1', '2004-10-20 10:23:54');
10 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('2', '2004-10-21 10:23:54');
11 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('3', '2004-10-22 10:23:54');
12 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('4', '2004-10-23 10:23:54');
13 | INSERT INTO "public"."nongeo_data" ("id", "datetime") VALUES ('5', '2004-10-24 10:23:54');
14 | COMMIT;
15 |
--------------------------------------------------------------------------------
/tests/fixtures/templates/collections.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 | Custom Collections
4 |
5 |
6 |
7 |
8 |
9 | Title
10 | Type
11 | Description
12 |
13 |
14 |
15 | {% for collection in response.collections %}
16 |
17 | {{ collection.title or collection.id }}
18 | {{ collection.itemType }}
19 | {{ collection.description or collection.title or collection.id }}
20 |
21 | {% endfor %}
22 |
23 |
24 |
25 |
26 | {% include "footer.html" %}
27 |
--------------------------------------------------------------------------------
/tests/routes/__init__.py:
--------------------------------------------------------------------------------
1 | """timvt route tests."""
2 |
--------------------------------------------------------------------------------
/tests/routes/test_collections.py:
--------------------------------------------------------------------------------
1 | """Test /collections endpoints."""
2 |
3 |
4 | def test_collections(app):
5 | """Test /collections endpoint."""
6 | response = app.get("/collections")
7 | assert response.status_code == 200
8 | assert response.headers["content-type"] == "application/json"
9 | body = response.json()
10 | assert ["collections", "links"] == list(body)
11 |
12 | assert list(filter(lambda x: x["id"] == "public.landsat_wrs", body["collections"]))
13 | assert list(filter(lambda x: x["id"] == "public.my_data", body["collections"]))
14 | assert list(filter(lambda x: x["id"] == "public.nongeo_data", body["collections"]))
15 | assert list(filter(lambda x: x["id"] == "public.landsat", body["collections"]))
16 |
17 | response = app.get("/?f=html")
18 | assert response.status_code == 200
19 | assert "text/html" in response.headers["content-type"]
20 | assert "Collections" in response.text
21 |
22 |
23 | def test_collections_landsat(app):
24 | """Test /collections/{collectionId} endpoint."""
25 | response = app.get("/collections/public.landsat_wrs")
26 | assert response.status_code == 200
27 | assert response.headers["content-type"] == "application/json"
28 | body = response.json()
29 | assert body["id"] == "public.landsat_wrs"
30 | assert ["id", "links", "extent", "itemType", "crs"] == list(body)
31 | assert ["bbox", "crs"] == list(body["extent"]["spatial"])
32 |
33 | response = app.get("/collections/public.landsat_wrs?f=html")
34 | assert response.status_code == 200
35 | assert "text/html" in response.headers["content-type"]
36 | assert "Collection: public.landsat_wrs" in response.text
37 |
38 | # bad collection name
39 | response = app.get("/collections/public.landsat_wr")
40 | assert response.status_code == 404
41 | assert response.headers["content-type"] == "application/json"
42 | body = response.json()
43 | assert body["detail"] == "Table/Function 'public.landsat_wr' not found."
44 |
45 | # bad collection name
46 | response = app.get("/collections/landsat_wrs")
47 | assert response.status_code == 422
48 | body = response.json()
49 | assert body["detail"] == "Invalid Table format 'landsat_wrs'."
50 |
51 |
52 | def test_collections_queryables(app):
53 | """Test /collections/{collectionId}/queryables endpoint."""
54 | response = app.get("/collections/public.landsat_wrs/queryables")
55 | assert response.status_code == 200
56 | assert response.headers["content-type"] == "application/schema+json"
57 | body = response.json()
58 | assert body["title"] == "public.landsat_wrs"
59 | assert body["type"] == "object"
60 | assert ["title", "properties", "type", "$schema", "$id"] == list(body)
61 |
62 | response = app.get("/collections/public.landsat_wrs/queryables?f=html")
63 | assert response.status_code == 200
64 | assert "text/html" in response.headers["content-type"]
65 | assert "Queryables" in response.text
66 |
67 | response = app.get("/collections/public.landsat/queryables")
68 | assert response.status_code == 200
69 | assert response.headers["content-type"] == "application/schema+json"
70 | body = response.json()
71 | assert body["title"] == "public.landsat"
72 | # 2 geometry column
73 | assert (
74 | body["properties"]["geom"]["$ref"] == "https://geojson.org/schema/Geometry.json"
75 | )
76 | assert (
77 | body["properties"]["centroid"]["$ref"]
78 | == "https://geojson.org/schema/Geometry.json"
79 | )
80 |
--------------------------------------------------------------------------------
/tests/routes/test_endpoints.py:
--------------------------------------------------------------------------------
1 | """Test endpoints."""
2 |
3 |
4 | def test_landing(app):
5 | """Test / endpoint."""
6 | response = app.get("/")
7 | assert response.status_code == 200
8 | assert response.headers["content-type"] == "application/json"
9 | body = response.json()
10 | assert body["title"] == "TiFeatures"
11 | assert body["links"]
12 |
13 | response = app.get("/?f=html")
14 | assert response.status_code == 200
15 | assert "text/html" in response.headers["content-type"]
16 | assert "TiFeatures" in response.text
17 |
18 | # Check accept headers
19 | response = app.get("/", headers={"accept": "text/html"})
20 | assert response.status_code == 200
21 | assert "text/html" in response.headers["content-type"]
22 | assert "TiFeatures" in response.text
23 |
24 | # accept quality
25 | response = app.get(
26 | "/", headers={"accept": "application/json;q=0.9, text/html;q=1.0"}
27 | )
28 | assert response.status_code == 200
29 | assert "text/html" in response.headers["content-type"]
30 | assert "TiFeatures" in response.text
31 |
32 | # accept quality but only json is available
33 | response = app.get("/", headers={"accept": "text/csv;q=1.0, application/json"})
34 | assert response.status_code == 200
35 | assert response.headers["content-type"] == "application/json"
36 | body = response.json()
37 | assert body["title"] == "TiFeatures"
38 |
39 | # accept quality but only json is available
40 | response = app.get("/", headers={"accept": "text/csv;q=1.0, */*"})
41 | assert response.status_code == 200
42 | assert response.headers["content-type"] == "application/json"
43 | body = response.json()
44 | assert body["title"] == "TiFeatures"
45 |
46 | # Invalid accept, return default
47 | response = app.get("/", headers={"accept": "text/htm"})
48 | assert response.status_code == 200
49 | assert response.headers["content-type"] == "application/json"
50 | body = response.json()
51 | assert body["title"] == "TiFeatures"
52 | assert body["links"]
53 |
54 | # make sure `?f=` has priority over headers
55 | response = app.get("/?f=json", headers={"accept": "text/html"})
56 | assert response.status_code == 200
57 | assert response.headers["content-type"] == "application/json"
58 | body = response.json()
59 | assert body["title"] == "TiFeatures"
60 |
61 |
62 | def test_docs(app):
63 | """Test /api endpoint."""
64 | response = app.get("/api")
65 | assert response.status_code == 200
66 | assert response.headers["content-type"] == "application/json"
67 | body = response.json()
68 | assert body["openapi"]
69 |
70 | response = app.get("/api.html")
71 | assert response.status_code == 200
72 | assert "text/html" in response.headers["content-type"]
73 |
74 |
75 | def test_conformance(app):
76 | """Test /conformance endpoint."""
77 | response = app.get("/conformance")
78 | assert response.status_code == 200
79 | assert response.headers["content-type"] == "application/json"
80 | body = response.json()
81 | assert body["conformsTo"]
82 |
83 | response = app.get("/conformance?f=html")
84 | assert response.status_code == 200
85 | assert "text/html" in response.headers["content-type"]
86 | assert "Conformance" in response.text
87 |
--------------------------------------------------------------------------------
/tests/routes/test_item.py:
--------------------------------------------------------------------------------
1 | """Test /item endpoints."""
2 |
3 |
4 | def test_item(app):
5 | """Test /items/{item id} endpoint."""
6 | response = app.get("/collections/public.landsat_wrs/items/1")
7 | assert response.status_code == 200
8 | assert response.headers["content-type"] == "application/geo+json"
9 | body = response.json()
10 | assert body["type"] == "Feature"
11 | assert body["id"] == 1
12 | assert body["links"]
13 |
14 | response = app.get("/collections/public.landsat_wrs/items/1?f=html")
15 | assert response.status_code == 200
16 | assert "text/html" in response.headers["content-type"]
17 | assert "Collection Item: 1" in response.text
18 |
19 | # json output
20 | response = app.get("/collections/public.landsat_wrs/items/1?f=json")
21 | assert response.status_code == 200
22 | assert response.headers["content-type"] == "application/json"
23 | feat = response.json()
24 | assert set(
25 | [
26 | "collectionId",
27 | "itemId",
28 | "id",
29 | "pr",
30 | "row",
31 | "path",
32 | "ogc_fid",
33 | "geometry",
34 | ]
35 | ) == set(feat.keys())
36 |
37 | # not found
38 | response = app.get("/collections/public.landsat_wrs/items/50000")
39 | assert response.status_code == 404
40 |
--------------------------------------------------------------------------------
/tests/routes/test_items.py:
--------------------------------------------------------------------------------
1 | """Test /items endpoints."""
2 |
3 | import json
4 |
5 |
6 | def test_items(app):
7 | """Test /items endpoint."""
8 | response = app.get("/collections/public.landsat_wrs/items")
9 | assert response.status_code == 200
10 | assert response.headers["content-type"] == "application/geo+json"
11 | body = response.json()
12 | assert body["type"] == "FeatureCollection"
13 | assert body["id"] == "public.landsat_wrs"
14 | assert body["title"] == "public.landsat_wrs"
15 | assert body["links"]
16 | assert body["numberMatched"] == 16269
17 | assert body["numberReturned"] == 10
18 | assert ["collection", "self", "next"] == [link["rel"] for link in body["links"]]
19 |
20 | response = app.get("/collections/public.landsat_wrs/items?f=html")
21 | assert response.status_code == 200
22 | assert "text/html" in response.headers["content-type"]
23 | assert "Collection Items: public.landsat_wrs" in response.text
24 |
25 |
26 | def test_items_limit_and_offset(app):
27 | """Test /items endpoint with limit and offset options."""
28 | response = app.get("/collections/public.landsat_wrs/items?limit=1")
29 | assert response.status_code == 200
30 | assert response.headers["content-type"] == "application/geo+json"
31 | body = response.json()
32 | assert len(body["features"]) == 1
33 | assert body["features"][0]["id"] == 1
34 | assert body["features"][0]["properties"]["ogc_fid"] == 1
35 | assert body["numberMatched"] == 16269
36 | assert body["numberReturned"] == 1
37 |
38 | response = app.get("/collections/public.landsat_wrs/items?limit=1&offset=1")
39 | assert response.status_code == 200
40 | assert response.headers["content-type"] == "application/geo+json"
41 | body = response.json()
42 | assert len(body["features"]) == 1
43 | assert body["features"][0]["id"] == 2
44 | assert body["features"][0]["properties"]["ogc_fid"] == 2
45 | assert body["numberMatched"] == 16269
46 | assert body["numberReturned"] == 1
47 | assert ["collection", "self", "next", "prev"] == [
48 | link["rel"] for link in body["links"]
49 | ]
50 |
51 | # negative offset
52 | response = app.get("/collections/public.landsat_wrs/items?offset=-1")
53 | assert response.status_code == 422
54 |
55 | # last item
56 | response = app.get("/collections/public.landsat_wrs/items?offset=16268")
57 | assert response.status_code == 200
58 | assert response.headers["content-type"] == "application/geo+json"
59 | body = response.json()
60 | assert len(body["features"]) == 1
61 | assert body["features"][0]["id"] == 16269
62 | assert body["features"][0]["properties"]["ogc_fid"] == 16269
63 | assert body["numberMatched"] == 16269
64 | assert body["numberReturned"] == 1
65 | assert ["collection", "self", "prev"] == [link["rel"] for link in body["links"]]
66 |
67 | # offset overflow, return empty feature collection
68 | response = app.get("/collections/public.landsat_wrs/items?offset=20000")
69 | assert response.status_code == 200
70 | body = response.json()
71 | assert len(body["features"]) == 0
72 | assert body["numberMatched"] == 16269
73 | assert body["numberReturned"] == 0
74 |
75 |
76 | def test_items_bbox(app):
77 | """Test /items endpoint with bbox options."""
78 | response = app.get(
79 | "/collections/public.landsat_wrs/items?bbox=-94.702148,34.488448,-85.429688,41.112469"
80 | )
81 | assert response.status_code == 200
82 | assert response.headers["content-type"] == "application/geo+json"
83 | body = response.json()
84 | assert len(body["features"]) == 10
85 | assert body["numberMatched"] == 45
86 | assert body["numberReturned"] == 10
87 |
88 | response = app.get(
89 | "/collections/public.landsat_wrs/items?bbox=-200,34.488448,-85.429688,41.112469"
90 | )
91 | assert response.status_code == 422
92 |
93 | response = app.get(
94 | "/collections/public.landsat_wrs/items?bbox=-94.702148,91,-85.429688,41.112469"
95 | )
96 | assert response.status_code == 422
97 |
98 | response = app.get(
99 | "/collections/public.landsat_wrs/items?bbox=-200,34.488448,0,-85.429688,41.112469,0"
100 | )
101 | assert response.status_code == 422
102 |
103 | response = app.get(
104 | "/collections/public.landsat_wrs/items?bbox=-94.702148,91,0,-85.429688,41.112469,0"
105 | )
106 | assert response.status_code == 422
107 |
108 | # more than 6 coordinates
109 | response = app.get("/collections/public.landsat_wrs/items?bbox=0,1,2,3,4,5,6")
110 | assert response.status_code == 422
111 |
112 |
113 | def test_items_ids(app):
114 | """Test /items endpoint with ids options."""
115 | response = app.get("/collections/public.landsat_wrs/items?ids=1")
116 | assert response.status_code == 200
117 | assert response.headers["content-type"] == "application/geo+json"
118 | body = response.json()
119 | assert len(body["features"]) == 1
120 | assert body["numberMatched"] == 1
121 | assert body["numberReturned"] == 1
122 | assert body["features"][0]["id"] == 1
123 | assert body["features"][0]["properties"]["ogc_fid"] == 1
124 |
125 | response = app.get("/collections/public.landsat_wrs/items?ids=1,2")
126 | assert response.status_code == 200
127 | assert response.headers["content-type"] == "application/geo+json"
128 | body = response.json()
129 | assert len(body["features"]) == 2
130 | assert body["numberMatched"] == 2
131 | assert body["numberReturned"] == 2
132 | assert body["features"][0]["id"] == 1
133 | assert body["features"][0]["properties"]["ogc_fid"] == 1
134 | assert body["features"][1]["id"] == 2
135 | assert body["features"][1]["properties"]["ogc_fid"] == 2
136 |
137 |
138 | def test_items_properties(app):
139 | """Test /items endpoint with properties options."""
140 | response = app.get("/collections/public.landsat_wrs/items?properties=path,row")
141 | assert response.status_code == 200
142 | assert response.headers["content-type"] == "application/geo+json"
143 | body = response.json()
144 | assert sorted(["path", "row"]) == sorted(list(body["features"][0]["properties"]))
145 |
146 | # no properties
147 | response = app.get("/collections/public.landsat_wrs/items?properties=")
148 | assert response.status_code == 200
149 | assert response.headers["content-type"] == "application/geo+json"
150 | body = response.json()
151 | assert [] == list(body["features"][0]["properties"])
152 |
153 |
154 | def test_items_properties_filter(app):
155 | """Test /items endpoint with properties filter options."""
156 | response = app.get("/collections/public.landsat_wrs/items?path=13")
157 | assert response.status_code == 200
158 | assert response.headers["content-type"] == "application/geo+json"
159 | body = response.json()
160 | assert len(body["features"]) == 10
161 | assert body["numberMatched"] == 104
162 | assert body["numberReturned"] == 10
163 | assert body["features"][0]["properties"]["path"] == 13
164 |
165 | # invalid type (str instead of int)
166 | response = app.get("/collections/public.landsat_wrs/items?path=d")
167 | assert response.status_code == 500
168 | assert "invalid input syntax for type integer" in response.json()["detail"]
169 |
170 | response = app.get("/collections/public.landsat_wrs/items?path=13&row=10")
171 | assert response.status_code == 200
172 | assert response.headers["content-type"] == "application/geo+json"
173 | body = response.json()
174 | assert len(body["features"]) == 1
175 | assert body["numberMatched"] == 1
176 | assert body["numberReturned"] == 1
177 | assert body["features"][0]["properties"]["path"] == 13
178 | assert body["features"][0]["properties"]["row"] == 10
179 |
180 | response = app.get("/collections/public.landsat_wrs/items?pr=013001")
181 | assert response.status_code == 200
182 | assert response.headers["content-type"] == "application/geo+json"
183 | body = response.json()
184 | assert len(body["features"]) == 1
185 | assert body["numberMatched"] == 1
186 | assert body["numberReturned"] == 1
187 | assert body["features"][0]["properties"]["path"] == 13
188 | assert body["features"][0]["properties"]["row"] == 1
189 |
190 | response = app.get("/collections/public.landsat_wrs/items?path=1000000")
191 | assert response.status_code == 200
192 | assert response.headers["content-type"] == "application/geo+json"
193 | body = response.json()
194 | assert len(body["features"]) == 0
195 | assert body["numberMatched"] == 0
196 | assert body["numberReturned"] == 0
197 |
198 | # We exclude invalid properties (not matching any collection column.) so they have no effects
199 | response = app.get("/collections/public.landsat_wrs/items?token=mysecrettoken")
200 | assert response.status_code == 200
201 | assert response.headers["content-type"] == "application/geo+json"
202 | body = response.json()
203 | assert len(body["features"]) == 10
204 |
205 |
206 | def test_items_filter_cql_ids(app):
207 | """Test /items endpoint with ids options."""
208 | filter_query = {"op": "=", "args": [{"property": "ogc_fid"}, 1]}
209 | response = app.get(
210 | f"/collections/public.landsat_wrs/items?filter-lang=cql2-json&filter={json.dumps(filter_query)}"
211 | )
212 | assert response.status_code == 200
213 | assert response.headers["content-type"] == "application/geo+json"
214 | body = response.json()
215 | assert len(body["features"]) == 1
216 | assert body["numberMatched"] == 1
217 | assert body["numberReturned"] == 1
218 | assert body["features"][0]["id"] == 1
219 | assert body["features"][0]["properties"]["ogc_fid"] == 1
220 |
221 | response = app.get(
222 | "/collections/public.landsat_wrs/items?filter-lang=cql2-text&filter=ogc_fid=1"
223 | )
224 | assert response.status_code == 200
225 | assert response.headers["content-type"] == "application/geo+json"
226 | body = response.json()
227 | assert len(body["features"]) == 1
228 | assert body["numberMatched"] == 1
229 | assert body["numberReturned"] == 1
230 | assert body["features"][0]["id"] == 1
231 | assert body["features"][0]["properties"]["ogc_fid"] == 1
232 |
233 | response = app.get(
234 | "/collections/public.landsat_wrs/items?filter-lang=cql2-text&filter=ogc_fid IN (1,2)"
235 | )
236 |
237 | assert response.status_code == 200
238 | assert response.headers["content-type"] == "application/geo+json"
239 | body = response.json()
240 | assert len(body["features"]) == 2
241 | assert body["numberMatched"] == 2
242 | assert body["numberReturned"] == 2
243 | assert body["features"][0]["id"] == 1
244 | assert body["features"][0]["properties"]["ogc_fid"] == 1
245 | assert body["features"][1]["id"] == 2
246 | assert body["features"][1]["properties"]["ogc_fid"] == 2
247 |
248 |
249 | def test_items_properties_filter_cql2(app):
250 | """Test /items endpoint with properties filter options."""
251 | filter_query = {"op": "=", "args": [{"property": "path"}, 13]}
252 | response = app.get(
253 | f"/collections/public.landsat_wrs/items?filter-lang=cql2-json&filter={json.dumps(filter_query)}"
254 | )
255 | assert response.status_code == 200
256 | assert response.headers["content-type"] == "application/geo+json"
257 | body = response.json()
258 | assert len(body["features"]) == 10
259 | assert body["numberMatched"] == 104
260 | assert body["numberReturned"] == 10
261 | assert body["features"][0]["properties"]["path"] == 13
262 |
263 | # invalid type (str instead of int)
264 | filter_query = {"op": "=", "args": [{"property": "path"}, "d"]}
265 | response = app.get(
266 | f"/collections/public.landsat_wrs/items?filter-lang=cql2-json&filter={json.dumps(filter_query)}"
267 | )
268 | assert response.status_code == 500
269 | assert "integer" in response.json()["detail"]
270 |
271 | filter_query = {
272 | "op": "and",
273 | "args": [
274 | {"op": "=", "args": [{"property": "path"}, 13]},
275 | {"op": "=", "args": [{"property": "row"}, 10]},
276 | ],
277 | }
278 | response = app.get(
279 | f"/collections/public.landsat_wrs/items?filter-lang=cql2-json&filter={json.dumps(filter_query)}"
280 | )
281 | assert response.status_code == 200
282 | assert response.headers["content-type"] == "application/geo+json"
283 | body = response.json()
284 | assert len(body["features"]) == 1
285 | assert body["numberMatched"] == 1
286 | assert body["numberReturned"] == 1
287 | assert body["features"][0]["properties"]["path"] == 13
288 | assert body["features"][0]["properties"]["row"] == 10
289 |
290 | response = app.get(
291 | "/collections/public.landsat_wrs/items?filter-lang=cql2-text&filter=path=13 AND row=10"
292 | )
293 | assert response.status_code == 200
294 | assert response.headers["content-type"] == "application/geo+json"
295 | body = response.json()
296 | assert len(body["features"]) == 1
297 | assert body["numberMatched"] == 1
298 | assert body["numberReturned"] == 1
299 | assert body["features"][0]["properties"]["path"] == 13
300 | assert body["features"][0]["properties"]["row"] == 10
301 |
302 |
303 | def test_items_geo_filter_cql2(app):
304 | response = app.get(
305 | "/collections/public.landsat_wrs/items?filter-lang=cql2-text&filter=S_INTERSECTS(geom, POLYGON((-22.2153 79.6888,-22.2153 81.8555,-8.97407 81.8555,-8.97407 79.6888,-22.2153 79.6888)))"
306 | )
307 | assert response.status_code == 200
308 | body = response.json()
309 | assert len(body["features"]) == 10
310 | assert body["numberMatched"] == 78
311 |
312 |
313 | def test_items_function_filter_cql2(app):
314 | response = app.get(
315 | "/collections/public.landsat_wrs/items?filter-lang=cql2-text&filter=left(pr,2)='13'"
316 | )
317 | assert response.status_code == 200
318 | body = response.json()
319 | assert len(body["features"]) == 10
320 | assert body["numberMatched"] == 642
321 |
322 |
323 | def test_items_geom_column(app):
324 | """Test /items endpoint geom_column."""
325 | response = app.get("/collections/public.landsat_wrs/items?geom-column=geom")
326 | assert response.status_code == 200
327 | assert response.headers["content-type"] == "application/geo+json"
328 | body = response.json()
329 | assert body["type"] == "FeatureCollection"
330 | assert body["id"] == "public.landsat_wrs"
331 | assert body["title"] == "public.landsat_wrs"
332 | assert body["links"]
333 | assert body["numberMatched"] == 16269
334 | assert body["numberReturned"] == 10
335 |
336 | # Invalid geom-column name
337 | response = app.get("/collections/public.landsat_wrs/items?geom-column=the_geom")
338 | assert response.status_code == 404
339 | assert response.headers["content-type"] == "application/json"
340 | body = response.json()
341 | assert body["detail"] == "Invalid Geometry Column: the_geom."
342 |
343 | response = app.get("/collections/public.landsat/items")
344 | body = response.json()
345 | assert body["type"] == "FeatureCollection"
346 | assert body["id"] == "public.landsat"
347 | assert body["title"] == "public.landsat"
348 | assert body["links"]
349 | assert body["numberMatched"] == 16269
350 | assert body["numberReturned"] == 10
351 | assert body["features"][0]["geometry"]["type"] == "Polygon"
352 | # Make sure we don't return any geometry in the properties
353 | assert "centroid" not in body["features"][0]["properties"]
354 | assert "geom" not in body["features"][0]["properties"]
355 |
356 | response = app.get("/collections/public.landsat/items?geom-column=centroid")
357 | body = response.json()
358 | assert body["numberMatched"] == 16269
359 | assert body["numberReturned"] == 10
360 | assert body["features"][0]["geometry"]["type"] == "Point"
361 | assert "centroid" not in body["features"][0]["properties"]
362 | assert "geom" not in body["features"][0]["properties"]
363 |
364 | response = app.get("/collections/public.landsat/items/1")
365 | body = response.json()
366 | assert body["geometry"]["type"] == "Polygon"
367 |
368 | response = app.get("/collections/public.landsat/items/1?geom-column=geom")
369 | body = response.json()
370 | assert body["geometry"]["type"] == "Polygon"
371 |
372 |
373 | def test_geom_operation(app):
374 | """Check that bbox-only and simplify give the correct results."""
375 | response = app.get("/collections/public.landsat/items/1")
376 | body = response.json()
377 | assert body["geometry"]["type"] == "Polygon"
378 | poly = body["geometry"]
379 |
380 | response = app.get("/collections/public.landsat/items/1?bbox-only=true")
381 | body = response.json()
382 | assert body["geometry"]["type"] == "Polygon"
383 | assert not body["geometry"] == poly
384 |
385 | response = app.get("/collections/public.landsat/items/1?geom-column=centroid")
386 | body = response.json()
387 | assert body["geometry"]["type"] == "Point"
388 | point = body["geometry"]
389 |
390 | # bbox-only (ST_Envelope) has no influence on point
391 | response = app.get(
392 | "/collections/public.landsat/items/1?geom-column=centroid&bbox-only=true"
393 | )
394 | body = response.json()
395 | assert body["geometry"]["type"] == "Point"
396 | assert body["geometry"] == point
397 |
398 | response = app.get("/collections/public.landsat/items/1")
399 | body = response.json()
400 | assert body["geometry"]["type"] == "Polygon"
401 | poly = body["geometry"]
402 |
403 | response = app.get("/collections/public.landsat/items/1?simplify=0.5")
404 | body = response.json()
405 | assert body["geometry"]["type"] == "Polygon"
406 | assert not body["geometry"] == poly
407 |
408 | response = app.get("/collections/public.landsat/items/1?bbox-only=true")
409 | body = response.json()
410 | assert body["geometry"]["type"] == "Polygon"
411 | poly = body["geometry"]
412 |
413 | # Check that simplify has no influence when bbox only
414 | response = app.get(
415 | "/collections/public.landsat/items/1?simplify=0.5&bbox-only=true"
416 | )
417 | body = response.json()
418 | assert body["geometry"]["type"] == "Polygon"
419 | assert body["geometry"] == poly
420 |
421 |
422 | def test_items_datetime(app):
423 | """Test /items endpoint datetime."""
424 | response = app.get(
425 | "/collections/public.my_data/items?datetime=2004-10-19T10:23:54Z"
426 | )
427 | assert response.status_code == 200
428 | assert response.headers["content-type"] == "application/geo+json"
429 | body = response.json()
430 | assert body["type"] == "FeatureCollection"
431 | assert body["id"] == "public.my_data"
432 | assert body["title"] == "public.my_data"
433 | assert body["links"]
434 | assert body["numberMatched"] == 1
435 | assert body["numberReturned"] == 1
436 |
437 | # no datetime column
438 | response = app.get(
439 | "/collections/public.landsat_wrs/items?datetime=2004-10-19T10:23:54Z&datetime-column=the_datetime"
440 | )
441 | assert response.status_code == 500
442 | assert response.headers["content-type"] == "application/json"
443 | body = response.json()
444 | assert body["detail"] == "Must have timestamp typed column to filter with datetime."
445 |
446 | # Invalid datetime column
447 | response = app.get(
448 | "/collections/public.my_data/items?datetime=2004-10-19T10:23:54Z&datetime-column=the_datetime"
449 | )
450 | assert response.status_code == 404
451 | assert response.headers["content-type"] == "application/json"
452 | body = response.json()
453 | assert body["detail"] == "Invalid Datetime Column: the_datetime."
454 |
455 | # no items for 2004-10-10T10:23:54
456 | response = app.get(
457 | "/collections/public.my_data/items?datetime=2004-10-10T10:23:54Z"
458 | )
459 | assert response.status_code == 200
460 | assert response.headers["content-type"] == "application/geo+json"
461 | body = response.json()
462 | assert body["numberMatched"] == 0
463 | assert body["numberReturned"] == 0
464 |
465 | # Closed Interval
466 | response = app.get(
467 | "/collections/public.my_data/items?datetime=2004-10-19T10:23:54Z/2004-10-21T10:23:54Z"
468 | )
469 | assert response.status_code == 200
470 | assert response.headers["content-type"] == "application/geo+json"
471 | body = response.json()
472 |
473 | assert body["numberMatched"] == 2
474 | assert body["numberReturned"] == 2
475 |
476 | # Open end-Interval (2004-10-20T10:23:54Z or later)
477 | response = app.get(
478 | "/collections/public.my_data/items?datetime=2004-10-20T10:23:54Z/.."
479 | )
480 | assert response.status_code == 200
481 | assert response.headers["content-type"] == "application/geo+json"
482 | body = response.json()
483 | assert body["numberMatched"] == 5
484 | assert body["numberReturned"] == 5
485 |
486 | response = app.get(
487 | "/collections/public.my_data/items?datetime=2004-10-20T10:23:54Z/"
488 | )
489 | assert response.status_code == 200
490 | assert response.headers["content-type"] == "application/geo+json"
491 | body = response.json()
492 | assert body["numberMatched"] == 5
493 | assert body["numberReturned"] == 5
494 |
495 | # Open start-Interval (2004-10-20T10:23:54 or earlier)
496 | response = app.get(
497 | "/collections/public.my_data/items?datetime=../2004-10-20T10:23:54Z"
498 | )
499 | assert response.status_code == 200
500 | assert response.headers["content-type"] == "application/geo+json"
501 | body = response.json()
502 | assert body["numberMatched"] == 2
503 | assert body["numberReturned"] == 2
504 |
505 | response = app.get(
506 | "/collections/public.my_data/items?datetime=/2004-10-20T10:23:54Z"
507 | )
508 | assert response.status_code == 200
509 | assert response.headers["content-type"] == "application/geo+json"
510 | body = response.json()
511 | assert body["numberMatched"] == 2
512 | assert body["numberReturned"] == 2
513 |
514 | # bad interval
515 | response = app.get("/collections/public.my_data/items?datetime=../..")
516 | assert response.status_code == 422
517 | assert response.headers["content-type"] == "application/json"
518 |
519 | # bad interval
520 | response = app.get(
521 | "/collections/public.my_data/items?datetime=2004-10-21T10:23:54Z/2004-10-20T10:23:54Z"
522 | )
523 | assert response.status_code == 422
524 | assert response.headers["content-type"] == "application/json"
525 |
526 |
527 | def test_items_geometry_return_options(app):
528 | """Test /items endpoint with geometry return options."""
529 | response = app.get("/collections/public.landsat_wrs/items?ids=1&geom-column=none")
530 | assert response.status_code == 200
531 | assert response.headers["content-type"] == "application/geo+json"
532 | body = response.json()
533 | assert len(body["features"]) == 1
534 | assert body["numberMatched"] == 1
535 | assert body["numberReturned"] == 1
536 | assert body["features"][0]["id"] == 1
537 | assert body["features"][0]["properties"]["ogc_fid"] == 1
538 | assert not body["features"][0]["geometry"]
539 |
540 | response = app.get("/collections/public.landsat_wrs/items?ids=1&bbox-only=true")
541 | assert response.status_code == 200
542 | assert response.headers["content-type"] == "application/geo+json"
543 | body = response.json()
544 | assert len(body["features"]) == 1
545 | assert body["numberMatched"] == 1
546 | assert body["numberReturned"] == 1
547 | assert body["features"][0]["id"] == 1
548 | assert body["features"][0]["properties"]["ogc_fid"] == 1
549 | assert body["features"][0]["geometry"] == {
550 | "coordinates": [
551 | [
552 | [-22.2153, 79.6888],
553 | [-22.2153, 81.8555],
554 | [-8.97407, 81.8555],
555 | [-8.97407, 79.6888],
556 | [-22.2153, 79.6888],
557 | ]
558 | ],
559 | "type": "Polygon",
560 | }
561 |
562 | response = app.get("/collections/public.landsat_wrs/items?ids=1&simplify=.001")
563 | assert response.status_code == 200
564 | assert response.headers["content-type"] == "application/geo+json"
565 | body = response.json()
566 | assert len(body["features"]) == 1
567 | assert body["numberMatched"] == 1
568 | assert body["numberReturned"] == 1
569 | assert body["features"][0]["id"] == 1
570 | assert body["features"][0]["properties"]["ogc_fid"] == 1
571 | assert body["features"][0]["geometry"] == {
572 | "coordinates": [
573 | [
574 | [-10.803, 80.989],
575 | [-8.974, 80.342],
576 | [-16.985, 79.689],
577 | [-22.215, 81.092],
578 | [-13.255, 81.856],
579 | [-10.803, 80.989],
580 | ]
581 | ],
582 | "type": "Polygon",
583 | }
584 |
585 |
586 | def test_output_response_type(app):
587 | """Make sure /items returns wanted output response type."""
588 | # CSV output
589 | response = app.get("/collections/public.landsat_wrs/items?f=csv")
590 | assert response.status_code == 200
591 | assert "text/csv" in response.headers["content-type"]
592 | body = response.text.splitlines()
593 | assert len(body) == 11
594 | assert set(body[0].split(",")) == set(
595 | "collectionId,itemId,id,pr,row,path,ogc_fid,geometry".split(",")
596 | )
597 |
598 | # we only accept csv
599 | response = app.get(
600 | "/collections/public.landsat_wrs/items", headers={"accept": "text/csv"}
601 | )
602 | assert response.status_code == 200
603 | assert "text/csv" in response.headers["content-type"]
604 |
605 | # we accept csv or json (CSV should be returned)
606 | response = app.get(
607 | "/collections/public.landsat_wrs/items",
608 | headers={"accept": "text/csv;q=1.0, application/json;q=0.4"},
609 | )
610 | assert response.status_code == 200
611 | assert "text/csv" in response.headers["content-type"]
612 |
613 | # the first preference is geo+json
614 | response = app.get(
615 | "/collections/public.landsat_wrs/items",
616 | headers={"accept": "application/geo+json, text/csv;q=0.1"},
617 | )
618 | assert response.status_code == 200
619 | assert response.headers["content-type"] == "application/geo+json"
620 |
621 | # geojsonseq output
622 | response = app.get("/collections/public.landsat_wrs/items?f=geojsonseq")
623 | assert response.status_code == 200
624 | assert response.headers["content-type"] == "application/geo+json-seq"
625 | body = response.text.splitlines()
626 | assert len(body) == 10
627 | assert json.loads(body[0])["type"] == "Feature"
628 |
629 | response = app.get(
630 | "/collections/public.landsat_wrs/items",
631 | headers={"accept": "application/geo+json-seq"},
632 | )
633 | assert response.status_code == 200
634 | assert response.headers["content-type"] == "application/geo+json-seq"
635 | body = response.text.splitlines()
636 | assert len(body) == 10
637 | assert json.loads(body[0])["type"] == "Feature"
638 |
639 | # json output
640 | response = app.get("/collections/public.landsat_wrs/items?f=json")
641 | assert response.status_code == 200
642 | assert response.headers["content-type"] == "application/json"
643 | body = response.json()
644 | assert len(body) == 10
645 | feat = body[0]
646 | assert set(
647 | [
648 | "collectionId",
649 | "itemId",
650 | "id",
651 | "pr",
652 | "row",
653 | "path",
654 | "ogc_fid",
655 | "geometry",
656 | ]
657 | ) == set(feat.keys())
658 |
659 | # json output no geometry
660 | response = app.get("/collections/public.landsat_wrs/items?f=json&geom-column=none")
661 | assert response.status_code == 200
662 | assert response.headers["content-type"] == "application/json"
663 | body = response.json()
664 | assert len(body) == 10
665 | feat = body[0]
666 | assert "geometry" not in feat.keys()
667 |
668 | response = app.get(
669 | "/collections/public.landsat_wrs/items",
670 | headers={"accept": "application/json"},
671 | )
672 | assert response.status_code == 200
673 | assert response.headers["content-type"] == "application/json"
674 | body = response.json()
675 | assert len(body) == 10
676 |
677 | # ndjson output
678 | response = app.get("/collections/public.landsat_wrs/items?f=ndjson")
679 | assert response.status_code == 200
680 | assert response.headers["content-type"] == "application/ndjson"
681 | body = response.text.splitlines()
682 | assert len(body) == 10
683 | feat = json.loads(body[0])
684 | assert set(
685 | [
686 | "collectionId",
687 | "itemId",
688 | "id",
689 | "pr",
690 | "row",
691 | "path",
692 | "ogc_fid",
693 | "geometry",
694 | ]
695 | ) == set(feat.keys())
696 |
697 | response = app.get(
698 | "/collections/public.landsat_wrs/items",
699 | headers={"accept": "application/ndjson"},
700 | )
701 | assert response.status_code == 200
702 | assert response.headers["content-type"] == "application/ndjson"
703 | body = response.text.splitlines()
704 | assert len(body) == 10
705 |
706 |
707 | def test_items_sortby(app):
708 | """Test /items endpoint with sortby options."""
709 | response = app.get("/collections/public.landsat_wrs/items?limit=1")
710 | assert response.status_code == 200
711 | assert response.headers["content-type"] == "application/geo+json"
712 | body = response.json()
713 | assert body["features"][0]["properties"]["ogc_fid"] == 1
714 | assert body["numberMatched"] == 16269
715 |
716 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=ogc_fid")
717 | assert response.status_code == 200
718 | assert response.headers["content-type"] == "application/geo+json"
719 | body = response.json()
720 | assert body["features"][0]["properties"]["ogc_fid"] == 1
721 | assert body["numberMatched"] == 16269
722 |
723 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=row")
724 | assert response.status_code == 200
725 | body = response.json()
726 | assert body["features"][0]["properties"]["row"] == 1
727 | assert body["numberMatched"] == 16269
728 |
729 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=+row")
730 | assert response.status_code == 200
731 | body = response.json()
732 | assert body["features"][0]["properties"]["row"] == 1
733 |
734 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=-row")
735 | assert response.status_code == 200
736 | body = response.json()
737 | assert body["features"][0]["properties"]["row"] == 248
738 |
739 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=-row,path")
740 | assert response.status_code == 200
741 | body = response.json()
742 | assert body["features"][0]["properties"]["row"] == 248
743 | assert body["features"][0]["properties"]["path"] == 1
744 |
745 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=path,-row")
746 | assert response.status_code == 200
747 | body = response.json()
748 | assert body["features"][0]["properties"]["row"] == 248
749 | assert body["features"][0]["properties"]["path"] == 1
750 |
751 | # Invalid column name
752 | response = app.get("/collections/public.landsat_wrs/items?limit=1&sortby=something")
753 | assert response.status_code == 404
754 |
755 |
756 | def test_items_env_table_config_main(app, monkeypatch):
757 | """Test /items endpoint using table_config environment variables."""
758 | response = app.get("/collections/public.my_data/items?limit=1")
759 | body = response.json()
760 | assert body["features"][0]["geometry"]["type"] == "Polygon"
761 |
762 | response = app.get(
763 | "/collections/public.my_data/items?datetime=2004-10-19T10:23:54Z"
764 | )
765 | body = response.json()
766 | assert body["features"][0]["id"] == 1
767 |
768 |
769 | def test_items_env_table_config_alt(app, monkeypatch):
770 | """Test /items endpoint using alt table_config environment variables."""
771 | response = app.get("/collections/public.my_data_alt/items?limit=1")
772 | body = response.json()
773 | assert body["features"][0]["geometry"]["type"] == "Point"
774 |
775 | response = app.get(
776 | "/collections/public.my_data_alt/items?datetime=2005-10-19T10:23:54Z"
777 | )
778 | body = response.json()
779 | assert body["features"][0]["id"] == "0"
780 |
--------------------------------------------------------------------------------
/tests/routes/test_non_geo.py:
--------------------------------------------------------------------------------
1 | """Test endpoints with non-geo table."""
2 |
3 |
4 | def test_non_geo(app):
5 | """Test endpoint with non-geo tables."""
6 | response = app.get("/collections/public.nongeo_data")
7 | assert response.status_code == 200
8 | body = response.json()
9 | assert body["id"] == "public.nongeo_data"
10 | # No Extent for non-geo table
11 | assert ["id", "links", "itemType", "crs"] == list(body)
12 |
13 | response = app.get("/collections/public.nongeo_data/items")
14 | assert response.status_code == 200
15 | assert response.headers["content-type"] == "application/geo+json"
16 | body = response.json()
17 | assert body["type"] == "FeatureCollection"
18 | assert body["id"] == "public.nongeo_data"
19 | assert body["title"] == "public.nongeo_data"
20 | assert body["links"]
21 | assert body["numberMatched"] == 6
22 | assert body["numberReturned"] == 6
23 | assert ["collection", "self"] == [link["rel"] for link in body["links"]]
24 |
25 | response = app.get("/collections/public.nongeo_data/items?f=json")
26 | assert response.status_code == 200
27 | assert response.headers["content-type"] == "application/json"
28 | body = response.json()
29 | assert len(body) == 6
30 |
31 | response = app.get("/collections/public.nongeo_data/items?f=html")
32 | assert response.status_code == 200
33 | assert "text/html" in response.headers["content-type"]
34 | assert "Collection Items: public.nongeo_data" in response.text
35 |
36 | response = app.get("/collections/public.nongeo_data/items?geom-column=geom")
37 | assert response.status_code == 404
38 |
39 | response = app.get("/collections/public.nongeo_data/queryables")
40 | assert response.status_code == 200
41 |
42 | # bbox filter should have not effect
43 | response = app.get("/collections/public.nongeo_data/items?bbox=0,10,0,10")
44 | assert response.status_code == 200
45 | assert response.headers["content-type"] == "application/geo+json"
46 | body = response.json()
47 | assert body["numberMatched"] == 6
48 | assert body["numberReturned"] == 6
49 |
--------------------------------------------------------------------------------
/tests/routes/test_templates.py:
--------------------------------------------------------------------------------
1 | """Test HTML templates."""
2 |
3 |
4 | def test_custom_templates(app):
5 | """Test /collections endpoint."""
6 | response = app.get("/collections")
7 | assert response.status_code == 200
8 |
9 | response = app.get("/collections?f=html")
10 | assert response.status_code == 200
11 | assert "Custom Collections" in response.text
12 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | """Test pg_mvt.main.app."""
2 |
3 |
4 | def test_health(app):
5 | """Test /healthz endpoint."""
6 | response = app.get("/healthz")
7 | assert response.status_code == 200
8 | assert response.json() == {"ping": "pong!"}
9 |
--------------------------------------------------------------------------------
/tifeatures/__init__.py:
--------------------------------------------------------------------------------
1 | """tifeatures."""
2 |
3 | __version__ = "0.1.0a4"
4 |
--------------------------------------------------------------------------------
/tifeatures/db.py:
--------------------------------------------------------------------------------
1 | """tifeatures.db: database events."""
2 |
3 | from typing import Any, Optional
4 |
5 | import orjson
6 | from buildpg import asyncpg
7 |
8 | from tifeatures.dbmodel import get_table_index
9 | from tifeatures.settings import PostgresSettings
10 |
11 | from fastapi import FastAPI
12 |
13 |
14 | async def con_init(conn):
15 | """Use json for json returns."""
16 | await conn.set_type_codec(
17 | "json", encoder=orjson.dumps, decoder=orjson.loads, schema="pg_catalog"
18 | )
19 | await conn.set_type_codec(
20 | "jsonb", encoder=orjson.dumps, decoder=orjson.loads, schema="pg_catalog"
21 | )
22 |
23 |
24 | async def connect_to_db(
25 | app: FastAPI,
26 | settings: Optional[PostgresSettings] = None,
27 | **kwargs,
28 | ) -> None:
29 | """Connect."""
30 | if not settings:
31 | settings = PostgresSettings()
32 |
33 | app.state.pool = await asyncpg.create_pool_b(
34 | settings.database_url,
35 | min_size=settings.db_min_conn_size,
36 | max_size=settings.db_max_conn_size,
37 | max_queries=settings.db_max_queries,
38 | max_inactive_connection_lifetime=settings.db_max_inactive_conn_lifetime,
39 | init=con_init,
40 | **kwargs,
41 | )
42 |
43 |
44 | async def register_table_catalog(app: FastAPI, **kwargs: Any) -> None:
45 | """Register Table catalog."""
46 | app.state.table_catalog = await get_table_index(app.state.pool, **kwargs)
47 |
48 |
49 | async def close_db_connection(app: FastAPI) -> None:
50 | """Close connection."""
51 | await app.state.pool.close()
52 |
--------------------------------------------------------------------------------
/tifeatures/dbmodel.py:
--------------------------------------------------------------------------------
1 | """tifeatures.dbmodel: database events."""
2 |
3 | from typing import Any, Dict, List, Optional
4 |
5 | from buildpg import asyncpg
6 | from pydantic import BaseModel, Field
7 |
8 | from tifeatures.settings import TableSettings
9 |
10 |
11 | class Column(BaseModel):
12 | """Model for database Column."""
13 |
14 | name: str
15 | type: str
16 | description: Optional[str]
17 |
18 | @property
19 | def json_type(self) -> str:
20 | """Return JSON field type."""
21 | if self.type.endswith("[]"):
22 | return "array"
23 |
24 | if self.type in [
25 | "smallint",
26 | "integer",
27 | "bigint",
28 | "decimal",
29 | "numeric",
30 | "real",
31 | "double precision",
32 | "smallserial",
33 | "serial",
34 | "bigserial",
35 | # Float8 is not a Postgres type name but is the name we give
36 | # internally do Double Precision type
37 | # ref: https://github.com/developmentseed/tifeatures/pull/60/files#r1011863866
38 | "float8",
39 | ]:
40 | return "number"
41 |
42 | if self.type.startswith("bool"):
43 | return "boolean"
44 |
45 | if any([self.type.startswith("json"), self.type.startswith("geo")]):
46 | return "object"
47 |
48 | return "string"
49 |
50 |
51 | class GeometryColumn(Column):
52 | """Model for PostGIS geometry/geography column."""
53 |
54 | bounds: List[float] = [-180, -90, 180, 90]
55 | srid: int = 4326
56 | geometry_type: str
57 |
58 |
59 | class DatetimeColumn(Column):
60 | """Model for PostGIS geometry/geography column."""
61 |
62 | min: Optional[str]
63 | max: Optional[str]
64 |
65 |
66 | class Table(BaseModel):
67 | """Model for DB Table."""
68 |
69 | id: str
70 | table: str
71 | dbschema: str = Field(..., alias="schema")
72 | description: Optional[str]
73 | properties: List[Column] = []
74 | id_column: Optional[str]
75 | geometry_columns: List[GeometryColumn] = []
76 | datetime_columns: List[DatetimeColumn] = []
77 | geometry_column: Optional[GeometryColumn]
78 | datetime_column: Optional[DatetimeColumn]
79 |
80 | def get_datetime_column(self, name: Optional[str] = None) -> Optional[Column]:
81 | """Return the Column for either the passed in tstz column or the first tstz column."""
82 | if not self.datetime_columns:
83 | return None
84 |
85 | if name is None:
86 | return self.datetime_column
87 |
88 | for col in self.datetime_columns:
89 | if col.name == name:
90 | return col
91 |
92 | return None
93 |
94 | def get_geometry_column(
95 | self, name: Optional[str] = None
96 | ) -> Optional[GeometryColumn]:
97 | """Return the name of the first geometry column."""
98 | if (not self.geometry_columns) or (name and name.lower() == "none"):
99 | return None
100 |
101 | if name is None:
102 | return self.geometry_column
103 |
104 | for col in self.geometry_columns:
105 | if col.name == name:
106 | return col
107 |
108 | return None
109 |
110 | @property
111 | def id_column_info(self) -> Column: # type: ignore
112 | """Return Column for a unique identifier."""
113 | for col in self.properties:
114 | if col.name == self.id_column:
115 | return col
116 |
117 | def columns(self, properties: Optional[List[str]] = None) -> List[str]:
118 | """Return table columns optionally filtered to only include columns from properties."""
119 | if properties in [[], [""]]:
120 | return []
121 |
122 | cols = [
123 | c.name for c in self.properties if c.type not in ["geometry", "geography"]
124 | ]
125 | if properties is None:
126 | return cols
127 |
128 | return [c for c in cols if c in properties]
129 |
130 | def get_column(self, property_name: str) -> Optional[Column]:
131 | """Return column info."""
132 | for p in self.properties:
133 | if p.name == property_name:
134 | return p
135 |
136 | return None
137 |
138 |
139 | Database = Dict[str, Dict[str, Any]]
140 |
141 |
142 | async def get_table_index(
143 | db_pool: asyncpg.BuildPgPool,
144 | schemas: Optional[List[str]] = ["public"],
145 | tables: Optional[List[str]] = None,
146 | spatial: bool = True,
147 | ) -> Database:
148 | """Fetch Table index."""
149 |
150 | query = """
151 | WITH table_columns AS (
152 | SELECT
153 | nspname,
154 | relname,
155 | format('%I.%I', nspname, relname) as id,
156 | c.oid as t_oid,
157 | obj_description(c.oid, 'pg_class') as description,
158 | attname,
159 | atttypmod,
160 | replace(replace(replace(replace(format_type(atttypid, null),'character varying','text'),'double precision','float8'),'timestamp with time zone','timestamptz'),'timestamp without time zone','timestamp') as "type",
161 | col_description(attrelid, attnum)
162 | FROM
163 | pg_class c
164 | JOIN pg_namespace n on (c.relnamespace=n.oid)
165 | JOIN pg_attribute a on (attnum>0 and attrelid=c.oid and not attisdropped)
166 | WHERE
167 | relkind IN ('r','v', 'm', 'f', 'p')
168 | AND has_table_privilege(c.oid, 'SELECT')
169 | AND has_column_privilege(c.oid,a.attnum, 'SELECT')
170 | AND n.nspname NOT IN ('pg_catalog', 'information_schema')
171 | AND c.relname NOT IN ('spatial_ref_sys','geometry_columns','geography_columns')
172 | AND (:schemas::text[] IS NULL OR n.nspname = ANY (:schemas))
173 | AND (:tables::text[] IS NULL OR c.relname = ANY (:tables))
174 | ),
175 | grouped as
176 | (SELECT
177 | nspname,
178 | relname,
179 | id,
180 | t_oid,
181 | description,
182 | (
183 | SELECT attname
184 | FROM
185 | pg_attribute a
186 | LEFT JOIN
187 | pg_index i
188 | ON (
189 | a.attrelid = i.indrelid
190 | AND a.attnum = ANY(i.indkey)
191 | )
192 | WHERE
193 | a.attrelid = t_oid
194 | AND
195 | i.indnatts = 1
196 | ORDER BY
197 | i.indisprimary DESC NULLS LAST,
198 | i.indisunique DESC NULLS LAST
199 | LIMIT 1
200 | ) as id_column,
201 | coalesce(jsonb_agg(
202 | jsonb_build_object(
203 | 'name', attname,
204 | 'type', "type",
205 | 'geometry_type', postgis_typmod_type(atttypmod),
206 | 'srid', postgis_typmod_srid(atttypmod),
207 | 'description', description,
208 | 'bounds',
209 | CASE WHEN postgis_typmod_srid(atttypmod) IS NOT NULL AND postgis_typmod_srid(atttypmod) != 0 THEN
210 | (
211 | SELECT
212 | ARRAY[
213 | ST_XMin(extent.geom),
214 | ST_YMin(extent.geom),
215 | ST_XMax(extent.geom),
216 | ST_YMax(extent.geom)
217 | ]
218 | FROM (
219 | SELECT
220 | coalesce(
221 | ST_Transform(
222 | ST_SetSRID(
223 | ST_EstimatedExtent(nspname, relname, attname),
224 | postgis_typmod_srid(atttypmod)
225 | ),
226 | 4326
227 | ),
228 | ST_MakeEnvelope(-180, -90, 180, 90, 4326)
229 | ) as geom
230 | ) AS extent
231 | )
232 | ELSE ARRAY[-180,-90,180,90]
233 | END
234 | )
235 | ) FILTER (WHERE "type" IN ('geometry','geography')), '[]'::jsonb) as geometry_columns,
236 | coalesce(jsonb_agg(
237 | jsonb_build_object(
238 | 'name', attname,
239 | 'type', "type",
240 | 'description', description
241 | )
242 | ) FILTER (WHERE type LIKE 'timestamp%'), '[]'::jsonb) as datetime_columns,
243 | coalesce(jsonb_agg(
244 | jsonb_build_object(
245 | 'name', attname,
246 | 'type', "type",
247 | 'description', description
248 | )
249 | ),'[]'::jsonb) as properties
250 | FROM
251 | table_columns
252 | GROUP BY 1,2,3,4,5,6 ORDER BY 1,2
253 | )
254 | SELECT
255 | id,
256 | relname as table,
257 | nspname as dbschema,
258 | description,
259 | id_column,
260 | geometry_columns,
261 | datetime_columns,
262 | properties
263 | FROM grouped
264 | WHERE :spatial = FALSE OR jsonb_array_length(geometry_columns)>=1
265 | ;
266 |
267 | """
268 |
269 | async with db_pool.acquire() as conn:
270 | rows = await conn.fetch_b(
271 | query,
272 | schemas=schemas,
273 | tables=tables,
274 | spatial=spatial,
275 | )
276 |
277 | catalog = {}
278 | table_settings = TableSettings()
279 | table_confs = table_settings.table_config
280 | fallback_key_names = table_settings.fallback_key_names
281 |
282 | for table in rows:
283 | id = table["id"]
284 | confid = id.replace(".", "_")
285 | table_conf = table_confs.get(confid, {})
286 |
287 | # Make sure that any properties set in conf exist in table
288 | properties = table.get("properties", [])
289 | properties_setting = table_conf.get("properties", [])
290 | if properties_setting:
291 | properties = [p for p in properties if p["name"] in properties_setting]
292 |
293 | property_names = [p["name"] for p in properties]
294 |
295 | # ID Column
296 | id_column = table_conf.get("pk") or table["id_column"]
297 | if not id_column and fallback_key_names:
298 | for p in properties:
299 | if p["name"] in fallback_key_names:
300 | id_column = p["name"]
301 | break
302 |
303 | # Datetime Column
304 | datetime_columns = [
305 | c
306 | for c in table.get("datetime_columns", [])
307 | if c["name"] in property_names
308 | ]
309 |
310 | datetime_column = None
311 | for col in datetime_columns:
312 | if table_conf.get("datetimecol") == col["name"]:
313 | datetime_column = col
314 |
315 | if not datetime_column and datetime_columns:
316 | datetime_column = datetime_columns[0]
317 |
318 | # Geometry Column
319 | geometry_columns = [
320 | c
321 | for c in table.get("geometry_columns", [])
322 | if c["name"] in property_names
323 | ]
324 | geometry_column = None
325 | for col in geometry_columns:
326 | if table_conf.get("geomcol") == col["name"]:
327 | geometry_column = col
328 | if not geometry_column and geometry_columns:
329 | geometry_column = geometry_columns[0]
330 |
331 | catalog[id] = {
332 | "id": id,
333 | "table": table["table"],
334 | "schema": table["dbschema"],
335 | "description": table["description"],
336 | "id_column": id_column,
337 | "geometry_columns": geometry_columns,
338 | "datetime_columns": datetime_columns,
339 | "properties": properties,
340 | "datetime_column": datetime_column,
341 | "geometry_column": geometry_column,
342 | }
343 |
344 | return catalog
345 |
--------------------------------------------------------------------------------
/tifeatures/dependencies.py:
--------------------------------------------------------------------------------
1 | """tifeatures dependencies."""
2 |
3 | import re
4 | from typing import List, Optional
5 |
6 | from pygeofilter.ast import AstType
7 | from pygeofilter.parsers.cql2_json import parse as cql2_json_parser
8 | from pygeofilter.parsers.cql2_text import parse as cql2_text_parser
9 |
10 | from tifeatures.errors import InvalidBBox
11 | from tifeatures.layer import Table as TableLayer
12 | from tifeatures.resources import enums
13 |
14 | from fastapi import HTTPException, Path, Query
15 |
16 | from starlette.requests import Request
17 |
18 |
19 | def CollectionParams(
20 | request: Request,
21 | collectionId: str = Path(..., description="Collection identifier"),
22 | ) -> TableLayer:
23 | """Return Layer Object."""
24 | table_pattern = re.match( # type: ignore
25 | r"^(?P.+)\.(?P.+)$", collectionId
26 | )
27 | if not table_pattern:
28 | raise HTTPException(
29 | status_code=422, detail=f"Invalid Table format '{collectionId}'."
30 | )
31 |
32 | assert table_pattern.groupdict()["schema"]
33 | assert table_pattern.groupdict()["table"]
34 |
35 | table_catalog = getattr(request.app.state, "table_catalog", {})
36 | if collectionId in table_catalog:
37 | return TableLayer(**table_catalog[collectionId])
38 |
39 | raise HTTPException(
40 | status_code=404, detail=f"Table/Function '{collectionId}' not found."
41 | )
42 |
43 |
44 | def accept_media_type(
45 | accept: str, mediatypes: List[enums.MediaType]
46 | ) -> Optional[enums.MediaType]:
47 | """Return MediaType based on accept header and available mediatype.
48 |
49 | Links:
50 | - https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
51 | - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept
52 |
53 | """
54 | accept_values = {}
55 | for m in accept.replace(" ", "").split(","):
56 | values = m.split(";")
57 | if len(values) == 1:
58 | name = values[0]
59 | quality = 1.0
60 | else:
61 | name = values[0]
62 | groups = dict([param.split("=") for param in values[1:]]) # type: ignore
63 | try:
64 | q = groups.get("q")
65 | quality = float(q) if q else 1.0
66 | except ValueError:
67 | quality = 0
68 |
69 | # if quality is 0 we ignore encoding
70 | if quality:
71 | accept_values[name] = quality
72 |
73 | # Create Preference matrix
74 | media_preference = {
75 | v: [n for (n, q) in accept_values.items() if q == v]
76 | for v in sorted({q for q in accept_values.values()}, reverse=True)
77 | }
78 |
79 | # Loop through available compression and encoding preference
80 | for _, pref in media_preference.items():
81 | for media in mediatypes:
82 | if media.value in pref:
83 | return media
84 |
85 | # If no specified encoding is supported but "*" is accepted,
86 | # take one of the available compressions.
87 | if "*" in accept_values and mediatypes:
88 | return mediatypes[0]
89 |
90 | return None
91 |
92 |
93 | def OutputType(
94 | request: Request,
95 | f: Optional[enums.ResponseType] = Query(
96 | None,
97 | description="Response MediaType. Defaults to endpoint's default or value defined in `accept` header.",
98 | ),
99 | ) -> Optional[enums.MediaType]:
100 | """Output MediaType: json or html."""
101 | if f:
102 | return enums.MediaType[f.name]
103 |
104 | accepted_media = [enums.MediaType[v] for v in enums.ResponseType.__members__]
105 | return accept_media_type(request.headers.get("accept", ""), accepted_media)
106 |
107 |
108 | def QueryablesOutputType(
109 | request: Request,
110 | f: Optional[enums.QueryablesResponseType] = Query(
111 | None,
112 | description="Response MediaType. Defaults to endpoint's default or value defined in `accept` header.",
113 | ),
114 | ) -> Optional[enums.MediaType]:
115 | """Output MediaType: json or html."""
116 | if f:
117 | return enums.MediaType[f.name]
118 |
119 | accepted_media = [
120 | enums.MediaType[v] for v in enums.QueryablesResponseType.__members__
121 | ]
122 | return accept_media_type(request.headers.get("accept", ""), accepted_media)
123 |
124 |
125 | def ItemsOutputType(
126 | request: Request,
127 | f: Optional[enums.ItemsResponseType] = Query(
128 | None,
129 | description="Response MediaType. Defaults to endpoint's default or value defined in `accept` header.",
130 | ),
131 | ) -> Optional[enums.MediaType]:
132 | """Output MediaType: json or html."""
133 | if f:
134 | return enums.MediaType[f.name]
135 |
136 | accepted_media = [enums.MediaType[v] for v in enums.ItemsResponseType.__members__]
137 | return accept_media_type(request.headers.get("accept", ""), accepted_media)
138 |
139 |
140 | def ItemOutputType(
141 | request: Request,
142 | f: Optional[enums.ItemResponseType] = Query(
143 | None,
144 | description="Response MediaType. Defaults to endpoint's default or value defined in `accept` header.",
145 | ),
146 | ) -> Optional[enums.MediaType]:
147 | """Output MediaType: json or html."""
148 | if f:
149 | return enums.MediaType[f.name]
150 |
151 | accepted_media = [enums.MediaType[v] for v in enums.ItemResponseType.__members__]
152 | return accept_media_type(request.headers.get("accept", ""), accepted_media)
153 |
154 |
155 | def bbox_query(
156 | bbox: Optional[str] = Query(
157 | None,
158 | description="Spatial Filter.",
159 | )
160 | ) -> Optional[List[float]]:
161 | """BBox dependency."""
162 | if bbox:
163 | bounds = list(map(float, bbox.split(",")))
164 | if len(bounds) == 4:
165 | if abs(bounds[0]) > 180 or abs(bounds[2]) > 180:
166 | raise InvalidBBox(f"Invalid longitude in bbox: {bounds}")
167 | if abs(bounds[1]) > 90 or abs(bounds[3]) > 90:
168 | raise InvalidBBox(f"Invalid latitude in bbox: {bounds}")
169 |
170 | elif len(bounds) == 6:
171 | if abs(bounds[0]) > 180 or abs(bounds[3]) > 180:
172 | raise InvalidBBox(f"Invalid longitude in bbox: {bounds}")
173 | if abs(bounds[1]) > 90 or abs(bounds[4]) > 90:
174 | raise InvalidBBox(f"Invalid latitude in bbox: {bounds}")
175 |
176 | else:
177 | raise InvalidBBox(f"Invalid bbox: {bounds}")
178 |
179 | return bounds
180 |
181 | return None
182 |
183 |
184 | def ids_query(
185 | ids: Optional[str] = Query(None, description="Filter by Ids."),
186 | ) -> Optional[List[str]]:
187 | """Ids dependency."""
188 | return ids.split(",") if ids else None
189 |
190 |
191 | def datetime_query(
192 | datetime: Optional[str] = Query(None, description="Temporal Filter."),
193 | ) -> Optional[List[str]]:
194 | """Datetime dependency."""
195 | if datetime:
196 | dt = datetime.split("/")
197 | if len(dt) > 2:
198 | raise HTTPException(status_code=422, detail="Invalid datetime: {datetime}")
199 |
200 | return dt
201 |
202 | return None
203 |
204 |
205 | def properties_query(
206 | properties: Optional[str] = Query(
207 | None,
208 | description="Return only specific properties (comma-separated). If PROP-LIST is empty, no properties are returned. If not present, all properties are returned.",
209 | )
210 | ) -> Optional[List[str]]:
211 | """Return property list."""
212 | if properties is not None:
213 | return [p.strip() for p in properties.split(",")]
214 |
215 | return None
216 |
217 |
218 | def filter_query(
219 | query: Optional[str] = Query(None, description="CQL2 Filter", alias="filter"),
220 | filter_lang: Optional[enums.FilterLang] = Query(
221 | None,
222 | description="CQL2 Language (cql2-text, cql2-json). Defaults to cql2-text.",
223 | alias="filter-lang",
224 | ),
225 | ) -> Optional[AstType]:
226 | """Parse Filter Query."""
227 | if query is not None:
228 | if filter_lang == enums.FilterLang.cql2_json:
229 | return cql2_json_parser(query)
230 |
231 | # default to cql2-text
232 | return cql2_text_parser(query)
233 |
234 | return None
235 |
236 |
237 | def sortby_query(
238 | sortby: Optional[str] = Query(
239 | None,
240 | description="Column Sort the items by Column (ascending (default) or descending).",
241 | )
242 | ):
243 | """Sortby dependency."""
244 | return sortby
245 |
--------------------------------------------------------------------------------
/tifeatures/errors.py:
--------------------------------------------------------------------------------
1 | """tifeatures.errors: Error classes."""
2 |
3 | import logging
4 | from typing import Callable, Dict, Type
5 |
6 | from asyncpg.exceptions._base import PostgresError
7 |
8 | from fastapi import FastAPI
9 |
10 | from starlette import status
11 | from starlette.requests import Request
12 | from starlette.responses import JSONResponse
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | class TiFeaturesError(Exception):
18 | """Base exception class."""
19 |
20 |
21 | class NotFound(TiFeaturesError):
22 | """Invalid table name."""
23 |
24 |
25 | class NoPrimaryKey(TiFeaturesError):
26 | """Table has no primary key."""
27 |
28 |
29 | class MissingGeometryColumn(TiFeaturesError):
30 | """Table has no geometry column."""
31 |
32 |
33 | class MissingDatetimeColumn(TiFeaturesError):
34 | """Table has no datetime column."""
35 |
36 |
37 | class InvalidBBox(TiFeaturesError):
38 | """Invalid bounding box coordinates."""
39 |
40 |
41 | class InvalidPropertyName(TiFeaturesError):
42 | """Invalid property/column name."""
43 |
44 |
45 | class InvalidGeometryColumnName(TiFeaturesError):
46 | """Invalid geometry column name."""
47 |
48 |
49 | class InvalidDatetimeColumnName(TiFeaturesError):
50 | """Invalid datetime column name."""
51 |
52 |
53 | class InvalidDatetime(TiFeaturesError):
54 | """Invalid datetime."""
55 |
56 |
57 | DEFAULT_STATUS_CODES = {
58 | NotFound: status.HTTP_404_NOT_FOUND,
59 | InvalidBBox: status.HTTP_422_UNPROCESSABLE_ENTITY,
60 | InvalidDatetime: status.HTTP_422_UNPROCESSABLE_ENTITY,
61 | MissingGeometryColumn: status.HTTP_500_INTERNAL_SERVER_ERROR,
62 | MissingDatetimeColumn: status.HTTP_500_INTERNAL_SERVER_ERROR,
63 | InvalidPropertyName: status.HTTP_404_NOT_FOUND,
64 | InvalidGeometryColumnName: status.HTTP_404_NOT_FOUND,
65 | InvalidDatetimeColumnName: status.HTTP_404_NOT_FOUND,
66 | PostgresError: status.HTTP_500_INTERNAL_SERVER_ERROR,
67 | Exception: status.HTTP_500_INTERNAL_SERVER_ERROR,
68 | NoPrimaryKey: status.HTTP_422_UNPROCESSABLE_ENTITY,
69 | }
70 |
71 |
72 | def exception_handler_factory(status_code: int) -> Callable:
73 | """
74 | Create a FastAPI exception handler from a status code.
75 | """
76 |
77 | def handler(request: Request, exc: Exception):
78 | logger.error(exc, exc_info=True)
79 | return JSONResponse(content={"detail": str(exc)}, status_code=status_code)
80 |
81 | return handler
82 |
83 |
84 | def add_exception_handlers(
85 | app: FastAPI, status_codes: Dict[Type[Exception], int]
86 | ) -> None:
87 | """
88 | Add exception handlers to the FastAPI app.
89 | """
90 | for (exc, code) in status_codes.items():
91 | app.add_exception_handler(exc, exception_handler_factory(code))
92 |
--------------------------------------------------------------------------------
/tifeatures/filter/__init__.py:
--------------------------------------------------------------------------------
1 | """tifeatures.filter"""
2 |
--------------------------------------------------------------------------------
/tifeatures/filter/evaluate.py:
--------------------------------------------------------------------------------
1 | """tifeatures.filter.evaluate."""
2 |
3 | from datetime import date, datetime, time, timedelta
4 |
5 | from pygeofilter import ast, values
6 | from pygeofilter.backends.evaluator import Evaluator, handle
7 |
8 | from tifeatures.filter import filters
9 |
10 | LITERALS = (str, float, int, bool, datetime, date, time, timedelta)
11 |
12 |
13 | class BuildPGEvaluator(Evaluator): # noqa: D101
14 | def __init__(self, field_mapping): # noqa: D107
15 | self.field_mapping = field_mapping
16 |
17 | @handle(ast.Not)
18 | def not_(self, node, sub): # noqa: D102
19 | return filters.negate(sub)
20 |
21 | @handle(ast.And, ast.Or)
22 | def combination(self, node, lhs, rhs): # noqa: D102
23 | return filters.combine((lhs, rhs), node.op.value)
24 |
25 | @handle(ast.Comparison, subclasses=True)
26 | def comparison(self, node, lhs, rhs): # noqa: D102
27 | return filters.runop(
28 | lhs,
29 | rhs,
30 | node.op.value,
31 | )
32 |
33 | @handle(ast.Between)
34 | def between(self, node, lhs, low, high): # noqa: D102
35 | return filters.between(lhs, low, high, node.not_)
36 |
37 | @handle(ast.Like)
38 | def like(self, node, lhs): # noqa: D102
39 | return filters.like(
40 | lhs,
41 | node.pattern,
42 | not node.nocase,
43 | node.not_,
44 | )
45 |
46 | @handle(ast.In)
47 | def in_(self, node, lhs, *options): # noqa: D102
48 | return filters.runop(
49 | lhs,
50 | options,
51 | "in",
52 | node.not_,
53 | )
54 |
55 | @handle(ast.IsNull)
56 | def null(self, node, lhs): # noqa: D102
57 | return filters.runop(lhs, None, "is_null", node.not_)
58 |
59 | # @handle(ast.ExistsPredicateNode)
60 | # def exists(self, node, lhs):
61 | # if self.use_getattr:
62 | # result = hasattr(self.obj, node.lhs.name)
63 | # else:
64 | # result = lhs in self.obj
65 |
66 | # if node.not_:
67 | # result = not result
68 | # return result
69 |
70 | @handle(ast.TemporalPredicate, subclasses=True)
71 | def temporal(self, node, lhs, rhs): # noqa: D102
72 | return filters.temporal(
73 | lhs,
74 | rhs,
75 | node.op.value,
76 | )
77 |
78 | @handle(ast.SpatialComparisonPredicate, subclasses=True)
79 | def spatial_operation(self, node, lhs, rhs): # noqa: D102
80 | return filters.spatial(
81 | lhs,
82 | rhs,
83 | node.op.name,
84 | )
85 |
86 | @handle(ast.Relate)
87 | def spatial_pattern(self, node, lhs, rhs): # noqa: D102
88 | return filters.spatial(
89 | lhs,
90 | rhs,
91 | "RELATE",
92 | pattern=node.pattern,
93 | )
94 |
95 | @handle(ast.SpatialDistancePredicate, subclasses=True)
96 | def spatial_distance(self, node, lhs, rhs): # noqa: D102
97 | return filters.spatial(
98 | lhs,
99 | rhs,
100 | node.op.value,
101 | distance=node.distance,
102 | units=node.units,
103 | )
104 |
105 | @handle(ast.BBox)
106 | def bbox(self, node, lhs): # noqa: D102
107 | return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs)
108 |
109 | @handle(ast.Attribute)
110 | def attribute(self, node): # noqa: D102
111 | return filters.attribute(node.name, self.field_mapping)
112 |
113 | @handle(ast.Arithmetic, subclasses=True)
114 | def arithmetic(self, node, lhs, rhs): # noqa: D102
115 | return filters.runop(lhs, rhs, node.op.value)
116 |
117 | @handle(ast.Function)
118 | def function(self, node, *arguments): # noqa: D102
119 | return filters.func(node.name, *arguments)
120 |
121 | @handle(*values.LITERALS)
122 | def literal(self, node): # noqa: D102
123 | return filters.literal(node)
124 |
125 | @handle(values.Interval)
126 | def interval(self, node, start, end): # noqa: D102
127 | return filters.literal((start, end))
128 |
129 | @handle(values.Geometry)
130 | def geometry(self, node): # noqa: D102
131 | return filters.parse_geometry(node.__geo_interface__)
132 |
133 | @handle(values.Envelope)
134 | def envelope(self, node): # noqa: D102
135 | return filters.parse_bbox([node.x1, node.y1, node.x2, node.y2])
136 |
137 |
138 | def to_filter(ast, field_mapping=None): # noqa: D102
139 | """Helper function to translate ECQL AST to Django Query expressions.
140 |
141 | :param ast: the abstract syntax tree
142 | :param field_mapping: a dict mapping from the filter name to the Django field lookup.
143 | :param mapping_choices: a dict mapping field lookups to choices.
144 | :type ast: :class:`Node`
145 | :returns: a Django query object
146 | :rtype: :class:`django.db.models.Q`
147 |
148 | """
149 | return BuildPGEvaluator(field_mapping).evaluate(ast)
150 |
--------------------------------------------------------------------------------
/tifeatures/filter/filters.py:
--------------------------------------------------------------------------------
1 | """tifeatures.filter.filters"""
2 |
3 | import re
4 | from datetime import timedelta
5 | from functools import reduce
6 | from inspect import signature
7 | from typing import Any, Callable, Dict, List
8 |
9 | from buildpg import V
10 | from buildpg.funcs import AND as and_
11 | from buildpg.funcs import NOT as not_
12 | from buildpg.funcs import OR as or_
13 | from buildpg.funcs import any
14 | from buildpg.logic import Func
15 | from geojson_pydantic.geometries import Polygon, parse_geometry_obj
16 |
17 |
18 | def bbox_to_wkt(bbox: List[float], srid: int = 4326) -> str:
19 | """Return WKT representation of a BBOX."""
20 | poly = Polygon.from_bounds(*bbox) # type:ignore
21 | return f"SRID={srid};{poly.wkt}"
22 |
23 |
24 | def parse_geometry(geom: Dict[str, Any]) -> str:
25 | """Parse geometry object and return WKT."""
26 | wkt = parse_geometry_obj(geom).wkt # type:ignore
27 | sridtxt = "" if wkt.startswith("SRID=") else "SRID=4326;"
28 | return f"{sridtxt}{wkt}"
29 |
30 |
31 | # ------------------------------------------------------------------------------
32 | # Filters
33 | # ------------------------------------------------------------------------------
34 | class Operator:
35 | """Filter Operators."""
36 |
37 | OPERATORS: Dict[str, Callable] = {
38 | "is_null": lambda f, a=None: f.is_(None),
39 | "is_not_null": lambda f, a=None: f.isnot(None),
40 | "==": lambda f, a: f == a,
41 | "=": lambda f, a: f == a,
42 | "eq": lambda f, a: f == a,
43 | "!=": lambda f, a: f != a,
44 | "<>": lambda f, a: f != a,
45 | "ne": lambda f, a: f != a,
46 | ">": lambda f, a: f > a,
47 | "gt": lambda f, a: f > a,
48 | "<": lambda f, a: f < a,
49 | "lt": lambda f, a: f < a,
50 | ">=": lambda f, a: f >= a,
51 | "ge": lambda f, a: f >= a,
52 | "<=": lambda f, a: f <= a,
53 | "le": lambda f, a: f <= a,
54 | "like": lambda f, a: f.like(a),
55 | "ilike": lambda f, a: f.ilike(a),
56 | "not_ilike": lambda f, a: ~f.ilike(a),
57 | "in": lambda f, a: f == any(a),
58 | "not_in": lambda f, a: ~f == any(a),
59 | "any": lambda f, a: f.any(a),
60 | "not_any": lambda f, a: f.not_(f.any(a)),
61 | "INTERSECTS": lambda f, a: Func("st_intersects", f, a),
62 | "DISJOINT": lambda f, a: Func("st_disjoint", f, a),
63 | "CONTAINS": lambda f, a: Func("st_contains", f, a),
64 | "WITHIN": lambda f, a: Func("st_within", f, a),
65 | "TOUCHES": lambda f, a: Func("st_touches", f, a),
66 | "CROSSES": lambda f, a: Func("st_crosses", f, a),
67 | "OVERLAPS": lambda f, a: Func("st_overlaps", f, a),
68 | "EQUALS": lambda f, a: Func("st_equals", f, a),
69 | "RELATE": lambda f, a, pattern: Func("st_relate", f, a, pattern),
70 | "DWITHIN": lambda f, a, distance: Func("st_dwithin", f, a, distance),
71 | "BEYOND": lambda f, a, distance: ~Func("st_dwithin", f, a, distance),
72 | "+": lambda f, a: f + a,
73 | "-": lambda f, a: f - a,
74 | "*": lambda f, a: f * a,
75 | "/": lambda f, a: f / a,
76 | }
77 |
78 | def __init__(self, operator: str = None):
79 | """Init."""
80 | if not operator:
81 | operator = "=="
82 |
83 | if operator not in self.OPERATORS:
84 | raise Exception("Operator `{}` not valid.".format(operator))
85 |
86 | self.operator = operator
87 | self.function = self.OPERATORS[operator]
88 | self.arity = len(signature(self.function).parameters)
89 |
90 |
91 | def func(name, *args):
92 | """Return results of running SQL function with arguments."""
93 | return Func(name, *args)
94 |
95 |
96 | def combine(sub_filters, combinator: str = "AND"):
97 | """Combine filters using a logical combinator
98 |
99 | :param sub_filters: the filters to combine
100 | :param combinator: a string: "AND" / "OR"
101 | :return: the combined filter
102 |
103 | """
104 | assert combinator in ("AND", "OR")
105 | _op = and_ if combinator == "AND" else or_
106 |
107 | def test(acc, q):
108 | return _op(acc, q)
109 |
110 | return reduce(test, sub_filters)
111 |
112 |
113 | def negate(sub_filter):
114 | """Negate a filter, opposing its meaning.
115 |
116 | :param sub_filter: the filter to negate
117 | :return: the negated filter
118 |
119 | """
120 | return not_(sub_filter)
121 |
122 |
123 | def runop(lhs, rhs=None, op: str = "=", negate: bool = False):
124 | """Compare a filter with an expression using a comparison operation.
125 |
126 | :param lhs: the field to compare
127 | :param rhs: the filter expression
128 | :param op: a string denoting the operation.
129 | :return: a comparison expression object
130 |
131 | """
132 | _op = Operator(op)
133 |
134 | if negate:
135 | return not_(_op.function(lhs, rhs))
136 | return _op.function(lhs, rhs)
137 |
138 |
139 | def between(lhs, low, high, negate=False):
140 | """Create a filter to match elements that have a value within a certain range.
141 |
142 | :param lhs: the field to compare
143 | :param low: the lower value of the range
144 | :param high: the upper value of the range
145 | :param not_: whether the range shall be inclusive (the default) or exclusive
146 | :return: a comparison expression object
147 |
148 | """
149 | l_op = Operator("<=")
150 | g_op = Operator(">=")
151 | if negate:
152 | return not_(and_(g_op.function(lhs, low), l_op.function(lhs, high)))
153 |
154 | return and_(g_op.function(lhs, low), l_op.function(lhs, high))
155 |
156 |
157 | def like(lhs, rhs, case=False, negate=False):
158 | """Create a filter to filter elements according to a string attribute using wildcard expressions.
159 |
160 | :param lhs: the field to compare
161 | :param rhs: the wildcard pattern: a string containing any number of '%' characters as wildcards.
162 | :param case: whether the lookup shall be done case sensitively or not
163 | :param not_: whether the range shall be inclusive (the default) or exclusive
164 | :return: a comparison expression object
165 |
166 | """
167 | if case:
168 | _op = Operator("like")
169 | else:
170 | _op = Operator("ilike")
171 |
172 | if negate:
173 | return not_(_op.function(lhs, rhs))
174 |
175 | return _op.function(lhs, rhs)
176 |
177 |
178 | def temporal(lhs, time_or_period, op):
179 | """Create a temporal filter for the given temporal attribute.
180 |
181 | :param lhs: the field to compare
182 | :type lhs: :class:`django.db.models.F`
183 | :param time_or_period: the time instant or time span to use as a filter
184 | :type time_or_period: :class:`datetime.datetime` or a tuple of two datetimes or a tuple of one datetime and one :class:`datetime.timedelta`
185 | :param op: the comparison operation. one of ``"BEFORE"``, ``"BEFORE OR DURING"``, ``"DURING"``, ``"DURING OR AFTER"``, ``"AFTER"``.
186 | :type op: str
187 | :return: a comparison expression object
188 | :rtype: :class:`django.db.models.Q`
189 |
190 | """
191 | low = None
192 | high = None
193 | equal = None
194 | if op in ("BEFORE", "AFTER"):
195 | if op == "BEFORE":
196 | high = time_or_period
197 | else:
198 | low = time_or_period
199 | elif op == "TEQUALS":
200 | equal = time_or_period
201 | else:
202 | low, high = time_or_period
203 |
204 | if isinstance(low, timedelta):
205 | low = high - low
206 | if isinstance(high, timedelta):
207 | high = low + high
208 | if low is not None or high is not None:
209 | if low is not None and high is not None:
210 | return between(lhs, low, high)
211 | elif low is not None:
212 | return runop(lhs, low, ">=")
213 | else:
214 | return runop(lhs, high, "<=")
215 | elif equal is not None:
216 | return runop(lhs, equal, "==")
217 |
218 |
219 | UNITS_LOOKUP = {"kilometers": "km", "meters": "m"}
220 |
221 |
222 | def spatial(lhs, rhs, op, pattern=None, distance=None, units=None):
223 | """Create a spatial filter for the given spatial attribute.
224 |
225 | :param lhs: the field to compare
226 | :param rhs: the time instant or time span to use as a filter
227 | :param op: the comparison operation. one of ``"INTERSECTS"``, ``"DISJOINT"``, `"CONTAINS"``, ``"WITHIN"``, ``"TOUCHES"``, ``"CROSSES"``, ``"OVERLAPS"``, ``"EQUALS"``, ``"RELATE"``, ``"DWITHIN"``, ``"BEYOND"``
228 | :param pattern: the spatial relation pattern
229 | :param distance: the distance value for distance based lookups: ``"DWITHIN"`` and ``"BEYOND"``
230 | :param units: the units the distance is expressed in
231 | :return: a comparison expression object
232 |
233 | """
234 |
235 | _op = Operator(op)
236 | if op == "RELATE":
237 | return _op.function(lhs, rhs, pattern)
238 | elif op in ("DWITHIN", "BEYOND"):
239 | if units == "kilometers":
240 | distance = distance / 1000
241 | elif units == "miles":
242 | distance = distance / 1609
243 | return _op.function(lhs, rhs, distance)
244 | else:
245 | return _op.function(lhs, rhs)
246 |
247 |
248 | def bbox(lhs, minx, miny, maxx, maxy, crs: int = 4326):
249 | """Create a bounding box filter for the given spatial attribute.
250 |
251 | :param lhs: the field to compare
252 | :param minx: the lower x part of the bbox
253 | :param miny: the lower y part of the bbox
254 | :param maxx: the upper x part of the bbox
255 | :param maxy: the upper y part of the bbox
256 | :param crs: the CRS the bbox is expressed in
257 | :return: a comparison expression object
258 |
259 | """
260 |
261 | return Func("st_intersects", lhs, bbox_to_wkt([minx, miny, maxx, maxy], crs))
262 |
263 |
264 | def quote_ident(s: str) -> str:
265 | """quote."""
266 | if re.match(r"^[a-z]+$", s):
267 | return s
268 | if re.match(r"^[a-zA-Z][\w\d_]*$", s):
269 | return f'"{s}"'
270 | raise TypeError(f"{s} is not a valid identifier")
271 |
272 |
273 | def attribute(name: str, fields: List[str]):
274 | """Create an attribute lookup expression using a field mapping dictionary.
275 |
276 | :param name: the field filter name
277 | :param field_mapping: the dictionary to use as a lookup.
278 |
279 | """
280 | if name in fields:
281 | return V(name)
282 | elif name.lower() == "true":
283 | return True
284 | elif name.lower() == "false":
285 | return False
286 | else:
287 | raise TypeError(f"Field {name} not in table.")
288 |
289 |
290 | def literal(value):
291 | """literal value."""
292 | return value
293 |
--------------------------------------------------------------------------------
/tifeatures/layer.py:
--------------------------------------------------------------------------------
1 | """tifeatures.layers."""
2 |
3 | import abc
4 | import re
5 | from typing import Any, Dict, List, Optional, Tuple, TypedDict, Union
6 |
7 | from buildpg import RawDangerous as raw
8 | from buildpg import asyncpg, clauses
9 | from buildpg import funcs as pg_funcs
10 | from buildpg import logic, render
11 | from buildpg.components import VarLiteral
12 | from ciso8601 import parse_rfc3339
13 | from pydantic import BaseModel, root_validator
14 | from pygeofilter.ast import AstType
15 |
16 | from tifeatures.dbmodel import GeometryColumn
17 | from tifeatures.dbmodel import Table as DBTable
18 | from tifeatures.errors import (
19 | InvalidDatetime,
20 | InvalidDatetimeColumnName,
21 | InvalidGeometryColumnName,
22 | InvalidPropertyName,
23 | MissingDatetimeColumn,
24 | )
25 | from tifeatures.filter.evaluate import to_filter
26 | from tifeatures.filter.filters import bbox_to_wkt
27 |
28 |
29 | class RawComponent(VarLiteral):
30 | """Enable building statements with more complicated logic."""
31 |
32 | __slots__ = "val"
33 |
34 | def __init__(self, val):
35 | """Initialize"""
36 | self.val = val
37 |
38 | def render(self):
39 | """Render"""
40 | yield raw(" ")
41 | yield self.val
42 | yield raw(" ")
43 |
44 | def __add__(self, other):
45 | """Append"""
46 | return clauses.Clauses(self, other)
47 |
48 |
49 | # Links to geojson schema
50 | geojson_schema = {
51 | "GEOMETRY": "https://geojson.org/schema/Geometry.json",
52 | "POINT": "https://geojson.org/schema/Point.json",
53 | "MULTIPOINT": "https://geojson.org/schema/MultiPoint.json",
54 | "LINESTRING": "https://geojson.org/schema/LineString.json",
55 | "MULTILINESTRING": "https://geojson.org/schema/MultiLineString.json",
56 | "POLYGON": "https://geojson.org/schema/Polygon.json",
57 | "MULTIPOLYGON": "https://geojson.org/schema/MultiPolygon.json",
58 | "GEOMETRYCOLLECTION": "https://geojson.org/schema/GeometryCollection.json",
59 | }
60 |
61 |
62 | class Feature(TypedDict, total=False):
63 | """Simple Feature model."""
64 |
65 | type: str
66 | # Geometry is either a dict or a str (wkt)
67 | geometry: Optional[Union[Dict, str]]
68 | properties: Optional[Dict]
69 | id: Optional[Any]
70 | bbox: Optional[List[float]]
71 |
72 |
73 | class FeatureCollection(TypedDict, total=False):
74 | """Simple FeatureCollection model."""
75 |
76 | type: str
77 | features: List[Feature]
78 | bbox: Optional[List[float]]
79 |
80 |
81 | class CollectionLayer(BaseModel, metaclass=abc.ABCMeta):
82 | """Layer's Abstract BaseClass.
83 |
84 | Attributes:
85 | id (str): Layer's name.
86 | bounds (list): Layer's bounds (left, bottom, right, top).
87 | crs (str): Coordinate reference system of the Collection.
88 | title (str): Layer's title
89 | description (str): Layer's description
90 |
91 | """
92 |
93 | id: str
94 | bounds: Optional[List[float]]
95 | crs: Optional[str]
96 | title: Optional[str]
97 | description: Optional[str]
98 |
99 | @abc.abstractmethod
100 | async def features(
101 | self,
102 | pool: asyncpg.BuildPgPool,
103 | *,
104 | ids_filter: Optional[List[str]] = None,
105 | bbox_filter: Optional[List[float]] = None,
106 | datetime_filter: Optional[List[str]] = None,
107 | properties_filter: Optional[List[Tuple[str, str]]] = None,
108 | cql_filter: Optional[AstType] = None,
109 | sortby: Optional[str] = None,
110 | properties: Optional[List[str]] = None,
111 | geom: Optional[str] = None,
112 | dt: Optional[str] = None,
113 | limit: Optional[int] = None,
114 | offset: Optional[int] = None,
115 | bbox_only: Optional[bool] = None,
116 | simplify: Optional[float] = None,
117 | geom_as_wkt: bool = False,
118 | ) -> Tuple[FeatureCollection, int]:
119 | """Return a FeatureCollection and the number of matched items."""
120 | ...
121 |
122 | @property
123 | def queryables(self) -> Dict:
124 | """Return the queryables."""
125 | ...
126 |
127 |
128 | class Table(CollectionLayer, DBTable):
129 | """Table Reader.
130 |
131 | Attributes:
132 | id (str): Layer's name.
133 | bounds (list): Layer's bounds (left, bottom, right, top).
134 | crs (str): Coordinate reference system of the Table.
135 | type (str): Layer's type.
136 | schema (str): Table's database schema (e.g public).
137 | geometry_type (str): Table's geometry type (e.g polygon).
138 | srid (int): Table's SRID
139 | geometry_column (str): Name of the geomtry column in the table.
140 | properties (Dict): Properties available in the table.
141 |
142 | """
143 |
144 | type: str = "Table"
145 |
146 | @root_validator
147 | def bounds_default(cls, values):
148 | """Get default bounds from the first geometry columns."""
149 | geoms = values.get("geometry_columns")
150 | if geoms:
151 | # Get the Extent of all the bounds
152 | minx, miny, maxx, maxy = zip(*[geom.bounds for geom in geoms])
153 | values["bounds"] = [min(minx), min(miny), max(maxx), max(maxy)]
154 |
155 | return values
156 |
157 | def _select(
158 | self,
159 | properties: Optional[List[str]],
160 | geometry_column: Optional[GeometryColumn],
161 | bbox_only: Optional[bool],
162 | simplify: Optional[float],
163 | geom_as_wkt: bool = False,
164 | ):
165 | columns = self.columns(properties)
166 | if columns:
167 | sel = clauses.Select(columns) + raw(",")
168 | else:
169 | sel = raw("SELECT ")
170 |
171 | if self.id_column:
172 | sel = sel + raw(logic.V(self.id_column)) + raw(" AS tifeatures_id, ")
173 | else:
174 | sel = sel + raw(" ROW_NUMBER () OVER () AS tifeatures_id, ")
175 |
176 | geom = self._geom(geometry_column, bbox_only, simplify)
177 | if geom_as_wkt:
178 | if geom:
179 | sel = (
180 | sel
181 | + raw(logic.Func("st_asewkt", geom))
182 | + raw(" AS tifeatures_geom ")
183 | )
184 | else:
185 | sel = sel + raw(" NULL::text AS tifeatures_geom ")
186 |
187 | else:
188 | if geom:
189 | sel = (
190 | sel
191 | + raw(pg_funcs.cast(logic.Func("st_asgeojson", geom), "json"))
192 | + raw(" AS tifeatures_geom ")
193 | )
194 | else:
195 | sel = sel + raw(" NULL::json AS tifeatures_geom ")
196 |
197 | return RawComponent(sel)
198 |
199 | def _select_count(self):
200 | return clauses.Select(pg_funcs.count("*"))
201 |
202 | def _from(self):
203 | return clauses.From(self.id)
204 |
205 | def _geom(
206 | self,
207 | geometry_column: Optional[GeometryColumn],
208 | bbox_only: Optional[bool],
209 | simplify: Optional[float],
210 | ):
211 | if geometry_column is None:
212 | return None
213 |
214 | g = logic.V(geometry_column.name)
215 | g = pg_funcs.cast(g, "geometry")
216 |
217 | if geometry_column.srid == 4326:
218 | g = logic.Func("ST_Transform", g, pg_funcs.cast(4326, "int"))
219 |
220 | if bbox_only:
221 | g = logic.Func("ST_Envelope", g)
222 | elif simplify:
223 | g = logic.Func(
224 | "ST_SnapToGrid",
225 | logic.Func("ST_Simplify", g, simplify),
226 | simplify,
227 | )
228 |
229 | return g
230 |
231 | def _where(
232 | self,
233 | ids: Optional[List[str]] = None,
234 | datetime: Optional[List[str]] = None,
235 | bbox: Optional[List[float]] = None,
236 | properties: Optional[List[Tuple[str, Any]]] = None,
237 | cql: Optional[AstType] = None,
238 | geom: Optional[str] = None,
239 | dt: Optional[str] = None,
240 | ):
241 | """Construct WHERE query."""
242 | wheres = [logic.S(True)]
243 |
244 | # `ids` filter
245 | if ids is not None:
246 | if len(ids) == 1:
247 | wheres.append(
248 | logic.V(self.id_column)
249 | == pg_funcs.cast(
250 | pg_funcs.cast(ids[0], "text"), self.id_column_info.type
251 | )
252 | )
253 | else:
254 | w = [
255 | logic.V(self.id_column)
256 | == logic.S(
257 | pg_funcs.cast(
258 | pg_funcs.cast(i, "text"), self.id_column_info.type
259 | )
260 | )
261 | for i in ids
262 | ]
263 | wheres.append(pg_funcs.OR(*w))
264 |
265 | # `properties filter
266 | if properties is not None:
267 | w = []
268 | for (prop, val) in properties:
269 | col = self.get_column(prop)
270 | if not col:
271 | raise InvalidPropertyName(f"Invalid property name: {prop}")
272 |
273 | w.append(
274 | logic.V(col.name)
275 | == logic.S(pg_funcs.cast(pg_funcs.cast(val, "text"), col.type))
276 | )
277 |
278 | if w:
279 | wheres.append(pg_funcs.AND(*w))
280 |
281 | # `bbox` filter
282 | geometry_column = self.get_geometry_column(geom)
283 | if bbox is not None and geometry_column is not None:
284 | wheres.append(
285 | logic.Func(
286 | "ST_Intersects",
287 | logic.S(bbox_to_wkt(bbox)),
288 | logic.V(geometry_column.name),
289 | )
290 | )
291 |
292 | # `datetime` filter
293 | if datetime:
294 | if not self.datetime_columns:
295 | raise MissingDatetimeColumn(
296 | "Must have timestamp typed column to filter with datetime."
297 | )
298 |
299 | datetime_column = self.get_datetime_column(dt)
300 | if not datetime_column:
301 | raise InvalidDatetimeColumnName(f"Invalid Datetime Column: {dt}.")
302 |
303 | wheres.append(self._datetime_filter_to_sql(datetime, datetime_column.name))
304 |
305 | # `CQL` filter
306 | if cql is not None:
307 | wheres.append(to_filter(cql, [p.name for p in self.properties]))
308 |
309 | return clauses.Where(pg_funcs.AND(*wheres))
310 |
311 | def _datetime_filter_to_sql(self, interval: List[str], dt_name: str):
312 | if len(interval) == 1:
313 | return logic.V(dt_name) == logic.S(
314 | pg_funcs.cast(parse_rfc3339(interval[0]), "timestamptz")
315 | )
316 |
317 | else:
318 | start = (
319 | parse_rfc3339(interval[0]) if not interval[0] in ["..", ""] else None
320 | )
321 | end = parse_rfc3339(interval[1]) if not interval[1] in ["..", ""] else None
322 |
323 | if start is None and end is None:
324 | raise InvalidDatetime(
325 | "Double open-ended datetime intervals are not allowed."
326 | )
327 |
328 | if start is not None and end is not None and start > end:
329 | raise InvalidDatetime("Start datetime cannot be before end datetime.")
330 |
331 | if not start:
332 | return logic.V(dt_name) <= logic.S(pg_funcs.cast(end, "timestamptz"))
333 |
334 | elif not end:
335 | return logic.V(dt_name) >= logic.S(pg_funcs.cast(start, "timestamptz"))
336 |
337 | else:
338 | return pg_funcs.AND(
339 | logic.V(dt_name) >= logic.S(pg_funcs.cast(start, "timestamptz")),
340 | logic.V(dt_name) < logic.S(pg_funcs.cast(end, "timestamptz")),
341 | )
342 |
343 | def _sortby(self, sortby: Optional[str]):
344 | sorts = []
345 | if sortby:
346 | for s in sortby.strip().split(","):
347 | parts = re.match(
348 | "^(?P[+-]?)(?P.*)$", s
349 | ).groupdict() # type:ignore
350 |
351 | direction = parts["direction"]
352 | column = parts["column"].strip()
353 | if self.get_column(column):
354 | if direction == "-":
355 | sorts.append(logic.V(column).desc())
356 | else:
357 | sorts.append(logic.V(column))
358 | else:
359 | raise InvalidPropertyName(f"Property {column} does not exist.")
360 |
361 | else:
362 | if self.id_column is not None:
363 | sorts.append(logic.V(self.id_column))
364 | else:
365 | sorts.append(logic.V(self.properties[0].name))
366 |
367 | return clauses.OrderBy(*sorts)
368 |
369 | async def _features_query(
370 | self,
371 | *,
372 | pool: asyncpg.BuildPgPool,
373 | ids_filter: Optional[List[str]] = None,
374 | bbox_filter: Optional[List[float]] = None,
375 | datetime_filter: Optional[List[str]] = None,
376 | properties_filter: Optional[List[Tuple[str, str]]] = None,
377 | cql_filter: Optional[AstType] = None,
378 | sortby: Optional[str] = None,
379 | properties: Optional[List[str]] = None,
380 | geom: Optional[str] = None,
381 | dt: Optional[str] = None,
382 | limit: Optional[int] = None,
383 | offset: Optional[int] = None,
384 | bbox_only: Optional[bool] = None,
385 | simplify: Optional[float] = None,
386 | geom_as_wkt: bool = False,
387 | ):
388 | """Build Features query."""
389 | c = (
390 | self._select(
391 | properties=properties,
392 | geometry_column=self.get_geometry_column(geom),
393 | bbox_only=bbox_only,
394 | simplify=simplify,
395 | geom_as_wkt=geom_as_wkt,
396 | )
397 | + self._from()
398 | + self._where(
399 | ids=ids_filter,
400 | datetime=datetime_filter,
401 | bbox=bbox_filter,
402 | properties=properties_filter,
403 | cql=cql_filter,
404 | geom=geom,
405 | dt=dt,
406 | )
407 | + self._sortby(sortby)
408 | + clauses.Limit(limit or 10)
409 | + clauses.Offset(offset or 0)
410 | )
411 |
412 | q, p = render(":c", c=c)
413 | async with pool.acquire() as conn:
414 | for r in await conn.fetch(q, *p):
415 | props = dict(r)
416 | g = props.pop("tifeatures_geom")
417 | id = props.pop("tifeatures_id")
418 | feature = Feature(type="Feature", geometry=g, id=id, properties=props)
419 | yield feature
420 |
421 | async def _features_count_query(
422 | self,
423 | *,
424 | pool: asyncpg.BuildPgPool,
425 | ids_filter: Optional[List[str]] = None,
426 | bbox_filter: Optional[List[float]] = None,
427 | datetime_filter: Optional[List[str]] = None,
428 | properties_filter: Optional[List[Tuple[str, str]]] = None,
429 | cql_filter: Optional[AstType] = None,
430 | geom: Optional[str] = None,
431 | dt: Optional[str] = None,
432 | ) -> int:
433 | """Build features COUNT query."""
434 | c = (
435 | self._select_count()
436 | + self._from()
437 | + self._where(
438 | ids=ids_filter,
439 | datetime=datetime_filter,
440 | bbox=bbox_filter,
441 | properties=properties_filter,
442 | cql=cql_filter,
443 | geom=geom,
444 | dt=dt,
445 | )
446 | )
447 |
448 | q, p = render(":c", c=c)
449 | async with pool.acquire() as conn:
450 | count = await conn.fetchval(q, *p)
451 | return count
452 |
453 | async def features(
454 | self,
455 | pool: asyncpg.BuildPgPool,
456 | *,
457 | ids_filter: Optional[List[str]] = None,
458 | bbox_filter: Optional[List[float]] = None,
459 | datetime_filter: Optional[List[str]] = None,
460 | properties_filter: Optional[List[Tuple[str, str]]] = None,
461 | cql_filter: Optional[AstType] = None,
462 | sortby: Optional[str] = None,
463 | properties: Optional[List[str]] = None,
464 | geom: Optional[str] = None,
465 | dt: Optional[str] = None,
466 | limit: Optional[int] = None,
467 | offset: Optional[int] = None,
468 | bbox_only: Optional[bool] = None,
469 | simplify: Optional[float] = None,
470 | geom_as_wkt: bool = False,
471 | ) -> Tuple[FeatureCollection, int]:
472 | """Build and run Pg query."""
473 | if geom and geom.lower() != "none" and not self.get_geometry_column(geom):
474 | raise InvalidGeometryColumnName(f"Invalid Geometry Column: {geom}.")
475 |
476 | count = await self._features_count_query(
477 | pool=pool,
478 | ids_filter=ids_filter,
479 | datetime_filter=datetime_filter,
480 | bbox_filter=bbox_filter,
481 | properties_filter=properties_filter,
482 | cql_filter=cql_filter,
483 | geom=geom,
484 | dt=dt,
485 | )
486 |
487 | features = [
488 | f
489 | async for f in self._features_query(
490 | pool=pool,
491 | ids_filter=ids_filter,
492 | datetime_filter=datetime_filter,
493 | bbox_filter=bbox_filter,
494 | properties_filter=properties_filter,
495 | cql_filter=cql_filter,
496 | sortby=sortby,
497 | properties=properties,
498 | geom=geom,
499 | dt=dt,
500 | limit=limit,
501 | offset=offset,
502 | bbox_only=bbox_only,
503 | simplify=simplify,
504 | geom_as_wkt=geom_as_wkt,
505 | )
506 | ]
507 |
508 | return (
509 | FeatureCollection(type="FeatureCollection", features=features),
510 | count,
511 | )
512 |
513 | @property
514 | def queryables(self) -> Dict:
515 | """Return the queryables."""
516 | if self.geometry_columns:
517 | geoms = {
518 | col.name: {"$ref": geojson_schema.get(col.geometry_type.upper(), "")}
519 | for col in self.geometry_columns
520 | }
521 | else:
522 | geoms = {}
523 | props = {
524 | col.name: {"name": col.name, "type": col.json_type}
525 | for col in self.properties
526 | if col.name not in geoms
527 | }
528 | return {**geoms, **props}
529 |
--------------------------------------------------------------------------------
/tifeatures/main.py:
--------------------------------------------------------------------------------
1 | """tifeatures app."""
2 |
3 | from typing import Any, List
4 |
5 | import jinja2
6 |
7 | from tifeatures import __version__ as tifeatures_version
8 | from tifeatures.db import close_db_connection, connect_to_db, register_table_catalog
9 | from tifeatures.dbmodel import Table
10 | from tifeatures.errors import DEFAULT_STATUS_CODES, add_exception_handlers
11 | from tifeatures.factory import Endpoints
12 | from tifeatures.middleware import CacheControlMiddleware
13 | from tifeatures.settings import APISettings, PostgresSettings
14 |
15 | from fastapi import FastAPI, Request
16 |
17 | from starlette.middleware.cors import CORSMiddleware
18 | from starlette.templating import Jinja2Templates
19 | from starlette_cramjam.middleware import CompressionMiddleware
20 |
21 | settings = APISettings()
22 | postgres_settings = PostgresSettings()
23 |
24 | app = FastAPI(
25 | title=settings.name,
26 | version=tifeatures_version,
27 | openapi_url="/api",
28 | docs_url="/api.html",
29 | )
30 |
31 | # custom template directory
32 | templates_location: List[Any] = (
33 | [jinja2.FileSystemLoader(settings.template_directory)]
34 | if settings.template_directory
35 | else []
36 | )
37 | # default template directory
38 | templates_location.append(jinja2.PackageLoader(__package__, "templates"))
39 |
40 | templates = Jinja2Templates(
41 | directory="", # we need to set a dummy directory variable, see https://github.com/encode/starlette/issues/1214
42 | loader=jinja2.ChoiceLoader(templates_location),
43 | ) # type: ignore
44 |
45 | # Register endpoints.
46 | endpoints = Endpoints(title=settings.name, templates=templates)
47 | app.include_router(endpoints.router)
48 |
49 | # Set all CORS enabled origins
50 | if settings.cors_origins:
51 | app.add_middleware(
52 | CORSMiddleware,
53 | allow_origins=settings.cors_origins,
54 | allow_credentials=True,
55 | allow_methods=["GET"],
56 | allow_headers=["*"],
57 | )
58 |
59 | app.add_middleware(CacheControlMiddleware, cachecontrol=settings.cachecontrol)
60 | app.add_middleware(CompressionMiddleware)
61 | add_exception_handlers(app, DEFAULT_STATUS_CODES)
62 |
63 |
64 | @app.on_event("startup")
65 | async def startup_event() -> None:
66 | """Connect to database on startup."""
67 | await connect_to_db(app, settings=postgres_settings)
68 | await register_table_catalog(
69 | app,
70 | schemas=postgres_settings.db_schemas,
71 | tables=postgres_settings.db_tables,
72 | spatial=postgres_settings.only_spatial_tables,
73 | )
74 |
75 |
76 | @app.on_event("shutdown")
77 | async def shutdown_event() -> None:
78 | """Close database connection."""
79 | await close_db_connection(app)
80 |
81 |
82 | @app.get("/healthz", description="Health Check", tags=["Health Check"])
83 | def ping():
84 | """Health check."""
85 | return {"ping": "pong!"}
86 |
87 |
88 | if settings.DEBUG:
89 |
90 | @app.get("/rawcatalog")
91 | def raw_catalog(request: Request):
92 | """Return parsed catalog data for testing."""
93 | ret = {}
94 | cat = request.app.state.table_catalog
95 | for k, v in cat.items():
96 | ret[k] = Table(**v)
97 | return ret
98 |
--------------------------------------------------------------------------------
/tifeatures/middleware.py:
--------------------------------------------------------------------------------
1 | """tifeatures middlewares."""
2 |
3 | import re
4 | from typing import Optional, Set
5 |
6 | from starlette.middleware.base import BaseHTTPMiddleware
7 | from starlette.requests import Request
8 | from starlette.types import ASGIApp
9 |
10 |
11 | class CacheControlMiddleware(BaseHTTPMiddleware):
12 | """MiddleWare to add CacheControl in response headers."""
13 |
14 | def __init__(
15 | self,
16 | app: ASGIApp,
17 | cachecontrol: Optional[str] = None,
18 | exclude_path: Optional[Set[str]] = None,
19 | ) -> None:
20 | """Init Middleware.
21 |
22 | Args:
23 | app (ASGIApp): starlette/FastAPI application.
24 | cachecontrol (str): Cache-Control string to add to the response.
25 | exclude_path (set): Set of regex expression to use to filter the path.
26 |
27 | """
28 | super().__init__(app)
29 | self.cachecontrol = cachecontrol
30 | self.exclude_path = exclude_path or set()
31 |
32 | async def dispatch(self, request: Request, call_next):
33 | """Add cache-control."""
34 | response = await call_next(request)
35 | if self.cachecontrol and not response.headers.get("Cache-Control"):
36 | for path in self.exclude_path:
37 | if re.match(path, request.url.path):
38 | return response
39 |
40 | if request.method in ["HEAD", "GET"] and response.status_code < 500:
41 | response.headers["Cache-Control"] = self.cachecontrol
42 |
43 | return response
44 |
--------------------------------------------------------------------------------
/tifeatures/model.py:
--------------------------------------------------------------------------------
1 | """tifeatures models."""
2 |
3 | from typing import Dict, List, Optional
4 |
5 | from geojson_pydantic.features import Feature, FeatureCollection
6 | from pydantic import BaseModel, Field
7 |
8 | from tifeatures.resources.enums import MediaType
9 |
10 |
11 | class Link(BaseModel):
12 | """Link model.
13 |
14 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/link.yaml
15 |
16 | """
17 |
18 | href: str
19 | rel: Optional[str]
20 | type: Optional[MediaType]
21 | hreflang: Optional[str]
22 | title: Optional[str]
23 | length: Optional[int]
24 |
25 | class Config:
26 | """Link model configuration."""
27 |
28 | use_enum_values = True
29 |
30 |
31 | class Spatial(BaseModel):
32 | """Spatial Extent model.
33 |
34 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/extent.yaml
35 |
36 | """
37 |
38 | # bbox: List[BBox]
39 | bbox: List[List[float]]
40 | crs: str
41 |
42 |
43 | class Temporal(BaseModel):
44 | """Temporal Extent model.
45 |
46 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/extent.yaml
47 |
48 | """
49 |
50 | # interval: List[List[Optional[str], Optional[str]]]
51 | interval: List[List[Optional[str]]]
52 | trs: str = "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian"
53 |
54 |
55 | class Extent(BaseModel):
56 | """Extent model.
57 |
58 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/extent.yaml
59 |
60 | """
61 |
62 | spatial: Optional[Spatial]
63 | temporal: Optional[Temporal]
64 |
65 |
66 | class Collection(BaseModel):
67 | """Collection model.
68 |
69 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/collection.yaml
70 |
71 | """
72 |
73 | id: str
74 | title: Optional[str]
75 | description: Optional[str]
76 | links: List[Link]
77 | extent: Optional[Extent]
78 | itemType: str = "feature"
79 | crs: List[str] = ["http://www.opengis.net/def/crs/OGC/1.3/CRS84"]
80 |
81 | class Config:
82 | """Collection model configuration."""
83 |
84 | extra = "ignore"
85 |
86 |
87 | class Collections(BaseModel):
88 | """
89 | Collections model.
90 |
91 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/collections.yaml
92 |
93 | """
94 |
95 | collections: List[Collection]
96 | links: List[Link]
97 |
98 |
99 | class Item(Feature):
100 | """Item model
101 |
102 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/featureGeoJSON.yaml
103 |
104 | """
105 |
106 | links: Optional[List[Link]]
107 |
108 | class Config:
109 | """Link model configuration."""
110 |
111 | arbitrary_types_allowed = True
112 |
113 |
114 | class Items(FeatureCollection):
115 | """Items model
116 |
117 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/featureCollectionGeoJSON.yaml
118 |
119 | """
120 |
121 | id: str
122 | title: Optional[str]
123 | description: Optional[str]
124 | keywords: Optional[List[str]]
125 | features: List[Item] # type: ignore
126 | links: Optional[List[Link]]
127 | timeStamp: Optional[str]
128 | numberMatched: Optional[int]
129 | numberReturned: Optional[int]
130 |
131 | class Config:
132 | """Link model configuration."""
133 |
134 | arbitrary_types_allowed = True
135 |
136 | def json_seq(self, **kwargs):
137 | """return a GeoJSON sequence representation."""
138 | for f in self.features:
139 | yield f.json(**kwargs) + "\n"
140 |
141 |
142 | class Conformance(BaseModel):
143 | """Conformance model.
144 |
145 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/confClasses.yaml
146 |
147 | """
148 |
149 | conformsTo: List[str]
150 |
151 |
152 | class Landing(BaseModel):
153 | """Landing page model.
154 |
155 | Ref: http://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/schemas/landingPage.yaml
156 |
157 | """
158 |
159 | title: Optional[str]
160 | description: Optional[str]
161 | links: List[Link]
162 |
163 |
164 | class Queryables(BaseModel):
165 | """Queryables model.
166 |
167 | Ref: https://docs.ogc.org/DRAFTS/19-079r1.html#filter-queryables
168 |
169 | """
170 |
171 | title: str
172 | properties: Dict[str, Dict[str, str]]
173 | type: str = "object"
174 | schema_name: str = Field(
175 | "https://json-schema.org/draft/2019-09/schema", alias="$schema"
176 | )
177 | link: str = Field(..., alias="$id")
178 |
--------------------------------------------------------------------------------
/tifeatures/resources/__init__.py:
--------------------------------------------------------------------------------
1 | """tifeatures.resources"""
2 |
--------------------------------------------------------------------------------
/tifeatures/resources/enums.py:
--------------------------------------------------------------------------------
1 | """tifeatures enums."""
2 |
3 | from enum import Enum
4 |
5 |
6 | class ResponseType(str, Enum):
7 | """Response Type Enums."""
8 |
9 | json = "json"
10 | html = "html"
11 |
12 |
13 | class QueryablesResponseType(str, Enum):
14 | """Response Type Enums."""
15 |
16 | schemajson = "schemajson"
17 | html = "html"
18 |
19 |
20 | class ItemsResponseType(str, Enum):
21 | """Response Type Enums."""
22 |
23 | geojson = "geojson"
24 | html = "html"
25 | json = "json"
26 | csv = "csv"
27 | geojsonseq = "geojsonseq"
28 | ndjson = "ndjson"
29 |
30 |
31 | class ItemResponseType(str, Enum):
32 | """Response Type Enums."""
33 |
34 | geojson = "geojson"
35 | html = "html"
36 | json = "json"
37 |
38 |
39 | class MediaType(str, Enum):
40 | """Responses Media types formerly known as MIME types."""
41 |
42 | xml = "application/xml"
43 | json = "application/json"
44 | ndjson = "application/ndjson"
45 | geojson = "application/geo+json"
46 | geojsonseq = "application/geo+json-seq"
47 | schemajson = "application/schema+json"
48 | html = "text/html"
49 | text = "text/plain"
50 | csv = "text/csv"
51 | openapi30_json = "application/vnd.oai.openapi+json;version=3.0"
52 | openapi30_yaml = "application/vnd.oai.openapi;version=3.0"
53 |
54 |
55 | class FilterLang(str, Enum):
56 | """filter language.
57 |
58 | ref: https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#get-query-parameters-and-post-json-fields
59 | """
60 |
61 | cql2_text = "cql2-text"
62 | cql2_json = "cql2-json"
63 |
--------------------------------------------------------------------------------
/tifeatures/resources/response.py:
--------------------------------------------------------------------------------
1 | """tifeatures custom responses."""
2 |
3 | from fastapi.responses import ORJSONResponse
4 |
5 |
6 | class GeoJSONResponse(ORJSONResponse):
7 | """GeoJSON Response"""
8 |
9 | media_type = "application/geo+json"
10 |
11 |
12 | class SchemaJSONResponse(ORJSONResponse):
13 | """Schema Response"""
14 |
15 | media_type = "application/schema+json"
16 |
--------------------------------------------------------------------------------
/tifeatures/settings.py:
--------------------------------------------------------------------------------
1 | """tifeatures config."""
2 |
3 | import sys
4 | from functools import lru_cache
5 | from typing import Any, Dict, List, Optional
6 |
7 | import pydantic
8 |
9 | # Pydantic does not support older versions of typing.TypedDict
10 | # https://github.com/pydantic/pydantic/pull/3374
11 | if sys.version_info < (3, 9, 2):
12 | from typing_extensions import TypedDict
13 | else:
14 | from typing import TypedDict
15 |
16 |
17 | class TableConfig(TypedDict, total=False):
18 | """Configuration to add table options with env variables."""
19 |
20 | geomcol: Optional[str]
21 | datetimecol: Optional[str]
22 | pk: Optional[str]
23 | properties: Optional[List[str]]
24 |
25 |
26 | class TableSettings(pydantic.BaseSettings):
27 | """Table configuration settings"""
28 |
29 | fallback_key_names: List[str] = ["ogc_fid", "id", "pkey", "gid"]
30 | table_config: Dict[str, TableConfig] = {}
31 |
32 | class Config:
33 | """model config"""
34 |
35 | env_prefix = "TIFEATURES_"
36 | env_file = ".env"
37 | env_nested_delimiter = "__"
38 |
39 |
40 | class _APISettings(pydantic.BaseSettings):
41 | """API settings"""
42 |
43 | name: str = "TiFeatures"
44 | DEBUG: bool = False
45 | cors_origins: str = "*"
46 | cachecontrol: str = "public, max-age=3600"
47 | template_directory: Optional[str] = None
48 |
49 | @pydantic.validator("cors_origins")
50 | def parse_cors_origin(cls, v):
51 | """Parse CORS origins."""
52 | return [origin.strip() for origin in v.split(",")]
53 |
54 | class Config:
55 | """model config"""
56 |
57 | env_prefix = "TIFEATURES_"
58 | env_file = ".env"
59 | env_nested_delimiter = "__"
60 |
61 |
62 | @lru_cache()
63 | def APISettings() -> _APISettings:
64 | """This function returns a cached instance of the Settings object."""
65 | return _APISettings()
66 |
67 |
68 | class PostgresSettings(pydantic.BaseSettings):
69 | """Postgres-specific API settings.
70 |
71 | Attributes:
72 | postgres_user: postgres username.
73 | postgres_pass: postgres password.
74 | postgres_host: hostname for the connection.
75 | postgres_port: database port.
76 | postgres_dbname: database name.
77 | """
78 |
79 | postgres_user: Optional[str]
80 | postgres_pass: Optional[str]
81 | postgres_host: Optional[str]
82 | postgres_port: Optional[str]
83 | postgres_dbname: Optional[str]
84 |
85 | database_url: Optional[pydantic.PostgresDsn] = None
86 |
87 | db_min_conn_size: int = 1
88 | db_max_conn_size: int = 10
89 | db_max_queries: int = 50000
90 | db_max_inactive_conn_lifetime: float = 300
91 |
92 | db_schemas: List[str] = ["public"]
93 | db_tables: Optional[List[str]]
94 |
95 | only_spatial_tables: bool = True
96 |
97 | class Config:
98 | """model config"""
99 |
100 | env_file = ".env"
101 |
102 | # https://github.com/tiangolo/full-stack-fastapi-postgresql/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/backend/app/app/core/config.py#L42
103 | @pydantic.validator("database_url", pre=True)
104 | def assemble_db_connection(cls, v: Optional[str], values: Dict[str, Any]) -> Any:
105 | """Validate db url settings."""
106 | if isinstance(v, str):
107 | return v
108 |
109 | return pydantic.PostgresDsn.build(
110 | scheme="postgresql",
111 | user=values.get("postgres_user"),
112 | password=values.get("postgres_pass"),
113 | host=values.get("postgres_host", ""),
114 | port=values.get("postgres_port", 5432),
115 | path=f"/{values.get('postgres_dbname') or ''}",
116 | )
117 |
--------------------------------------------------------------------------------
/tifeatures/templates/collection.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 |
4 |
5 | {% for crumb in crumbs %}
6 | {% if not loop.last %}
7 | {{ crumb.part }}
8 | {% else %}{{ crumb.part }}
9 | {% endif %}
10 | {% endfor %}
11 |
12 | JSON
13 |
14 |
15 |
16 | Collection: {{ response.title or response.id }}
17 |
18 |
19 |
20 |
21 |
{{ response.description or response.title or response.id }}
22 | {% if "keywords" in response and length(response.keywords) > 0 %}
23 |
24 | {% for keyword in response.keywords %}
25 | {{ keyword }}
26 | {% endfor %}
27 |
28 | {% endif %}
29 |
30 |
Links
31 |
36 |
37 |
40 |
41 |
42 |
64 |
65 | {% include "footer.html" %}
66 |
--------------------------------------------------------------------------------
/tifeatures/templates/collections.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 |
4 |
5 | {% for crumb in crumbs %}
6 | {% if not loop.last %}
7 | {{ crumb.part }}
8 | {% else %}{{ crumb.part }}
9 | {% endif %}
10 | {% endfor %}
11 |
12 | JSON
13 |
14 |
15 |
16 | Collections
17 |
18 |
19 |
20 |
21 |
22 | Title
23 | Type
24 | Description
25 |
26 |
27 |
28 | {% for collection in response.collections %}
29 |
30 | {{ collection.title or collection.id }}
31 | {{ collection.itemType }}
32 | {{ collection.description or collection.title or collection.id }}
33 |
34 | {% endfor %}
35 |
36 |
37 |
38 |
39 | {% include "footer.html" %}
40 |
--------------------------------------------------------------------------------
/tifeatures/templates/conformance.html:
--------------------------------------------------------------------------------
1 | {% include "header.html" %}
2 |
3 |
4 |
5 | {% for crumb in crumbs %}
6 | {% if not loop.last %}
7 | {{ crumb.part }}
8 | {% else %}{{ crumb.part }}
9 | {% endif %}
10 | {% endfor %}
11 |
12 | JSON
13 |
14 |
15 |
16 | {{ template.title }}Conformance
17 |
18 | This API implements the conformance classes from standards and community specifications that are listed below.
19 |
20 | Links
21 |
22 | {% for url in response.conformsTo %}
23 | {{ url }}
24 | {% endfor %}
25 |
26 |
27 | {% include "footer.html" %}
28 |
--------------------------------------------------------------------------------
/tifeatures/templates/debug.html:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/developmentseed/tifeatures/555638a83b3538e1d91fe8718a8ea047a527f207/tifeatures/templates/debug.html
--------------------------------------------------------------------------------
/tifeatures/templates/footer.html:
--------------------------------------------------------------------------------
1 | {% include "debug.html" %}
2 |
3 |
4 |