├── .dockerignore ├── .env-full-template ├── .env-required-template ├── .github └── workflows │ ├── manual_tests.yaml │ ├── pr.yaml │ └── release.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .releaserc.yaml ├── Dockerfile ├── LICENSE ├── README.md ├── azure ├── .funcignore ├── README.md ├── azure_functions.Dockerfile ├── function_app.py ├── host.json ├── local.settings.json ├── patched_asgi_function_wrapper.py └── publish_or_start.sh ├── changelog.md ├── commitlint.config.js ├── demo ├── docker-compose.yml └── prez-v4-backend │ ├── config.ttl │ ├── docker-compose.yml │ └── readme.md ├── dev ├── dev-config.ttl └── dev-setup.py ├── docs ├── custom_endpoints.md ├── development.md ├── examples │ ├── cql │ │ ├── geo_contains.json │ │ ├── geo_contains_filter.json │ │ ├── geo_contains_inverse.json │ │ ├── geo_contains_like.json │ │ ├── geo_crosses.json │ │ ├── geo_disjoint.json │ │ ├── geo_equals.json │ │ ├── geo_intersects.json │ │ ├── geo_overlaps.json │ │ ├── geo_touches.json │ │ └── geo_within.json │ └── custom_endpoints │ │ ├── example_4_levels.trig │ │ └── example_alternate_classes.ttl ├── faceted_search.md ├── ogc_features.md └── path_aliases.md ├── main.py ├── poetry.lock ├── poetry.toml ├── prez-logo.png ├── prez ├── app.py ├── bnode.py ├── cache.py ├── config.py ├── dependencies.py ├── enums.py ├── exceptions │ └── model_exceptions.py ├── middleware.py ├── models │ ├── endpoint_config.py │ ├── ogc_features.py │ └── query_params.py ├── reference_data │ ├── annotations │ │ ├── bfo-annotations.ttl │ │ ├── bfo2020-annotations.ttl │ │ ├── bibo-annotations.ttl │ │ ├── cito-annotations.ttl │ │ ├── data-roles-annotations.ttl │ │ ├── dcat-annotations.ttl │ │ ├── dcterms-annotations.ttl │ │ ├── dwc-annotations.ttl │ │ ├── geo-annotations.ttl │ │ ├── geojson-annotations.ttl │ │ ├── gml-annotations.ttl │ │ ├── gts2020-annotations.ttl │ │ ├── owl-annotations.ttl │ │ ├── pav-annotations.ttl │ │ ├── prez-ontology.ttl │ │ ├── prof-annotations.ttl │ │ ├── prov-annotations.ttl │ │ ├── qb-annotations.ttl │ │ ├── quantitykinds-annotations.ttl │ │ ├── qudt-annotations.ttl │ │ ├── rdf-annotations.ttl │ │ ├── rdflicenses-annotations.ttl │ │ ├── rdfs-annotations.ttl │ │ ├── rdwg-taxon-name-annotations.ttl │ │ ├── reg-annotations.ttl │ │ ├── reg-statuses-annotations.ttl │ │ ├── sdo-annotations.ttl │ │ ├── shacl-annotations.ttl │ │ ├── skos-annotations.ttl │ │ ├── skos-xl-annotations.ttl │ │ ├── sosa-annotations.ttl │ │ ├── tern-annotations.ttl │ │ ├── time-annotations.ttl │ │ ├── vann-vocab-20100607.ttl │ │ ├── vocab_derivation_modes.ttl │ │ ├── void-annotations.ttl │ │ └── xsd.ttl │ ├── cql │ │ ├── bounded_temporal_interval_relation_matrix.json │ │ ├── bounded_temporal_interval_relation_matrix.schema.json │ │ ├── default_context.json │ │ └── geo_function_mapping.py │ ├── endpoints │ │ ├── base │ │ │ ├── endpoint_metadata.ttl │ │ │ └── endpoint_nodeshapes.ttl │ │ ├── data_endpoints_default │ │ │ └── default_endpoints.ttl │ │ └── features │ │ │ ├── features_metadata.ttl │ │ │ └── features_nodeshapes.ttl │ ├── prefixes │ │ ├── all.file.vann.ttl │ │ ├── standard.ttl │ │ └── testing.ttl │ ├── prez_ns.py │ └── profiles │ │ ├── dd.ttl │ │ ├── ogc_features.ttl │ │ ├── ogc_records_profile.ttl │ │ └── prez_default_profiles.ttl ├── renderers │ ├── csv_renderer.py │ ├── json_renderer.py │ └── renderer.py ├── repositories │ ├── __init__.py │ ├── base.py │ ├── oxrdflib.py │ ├── pyoxigraph.py │ └── remote_sparql.py ├── response.py ├── routers │ ├── api_extras_examples.py │ ├── base_router.py │ ├── conformance.py │ ├── custom_endpoints.py │ ├── identifier.py │ ├── management.py │ ├── ogc_features_router.py │ ├── rdf_response_examples.json │ └── sparql.py ├── services │ ├── annotations.py │ ├── app_service.py │ ├── classes.py │ ├── connegp_service.py │ ├── curie_functions.py │ ├── exception_catchers.py │ ├── generate_endpoint_rdf.py │ ├── generate_profiles.py │ ├── generate_queryables.py │ ├── link_generation.py │ ├── listings.py │ ├── objects.py │ ├── prez_logging.py │ ├── query_generation │ │ ├── annotations.py │ │ ├── bbox_filter.py │ │ ├── classes.py │ │ ├── concept_hierarchy.py │ │ ├── count.py │ │ ├── cql.py │ │ ├── datetime_filter.py │ │ ├── facet.py │ │ ├── homepage.py │ │ ├── identifier.py │ │ ├── prefixes.py │ │ ├── search_default.py │ │ ├── search_fuseki_fts.py │ │ ├── shacl.py │ │ ├── sparql_escaping.py │ │ └── umbrella.py │ └── validate_iri.py └── static │ └── endpoint_config.html ├── pyproject.toml ├── test_data ├── animal_profiles.ttl ├── animals.ttl ├── bnode_depth-1.ttl ├── bnode_depth-2-2.ttl ├── bnode_depth-2.ttl ├── bnode_depth-4.ttl ├── catprez.ttl ├── cpr.ttl ├── cql │ ├── README.md │ ├── expected_generated_queries │ │ ├── additional_temporal_disjoint_instant.rq │ │ ├── additional_temporal_during_intervals.rq │ │ ├── clause7_12.rq │ │ ├── clause7_13.rq │ │ ├── clause7_17.rq │ │ ├── example20.rq │ │ ├── example21.rq │ │ ├── example22.rq │ │ ├── example27.rq │ │ ├── example53.rq │ │ ├── example54.rq │ │ ├── example55.rq │ │ ├── example56.rq │ │ ├── example57.rq │ │ ├── example58.rq │ │ ├── example59.rq │ │ ├── example60.rq │ │ ├── example61.rq │ │ ├── example62.rq │ │ ├── example63.rq │ │ ├── example64.rq │ │ ├── example65.rq │ │ ├── example66.rq │ │ └── example67.rq │ └── input │ │ ├── additional_temporal_disjoint_instant.json │ │ ├── additional_temporal_during_intervals.json │ │ ├── additional_temporal_intersects_instant.json │ │ ├── clause6_01.json │ │ ├── clause6_02a.json │ │ ├── clause6_02b.json │ │ ├── clause6_02c.json │ │ ├── clause6_02d.json │ │ ├── clause6_03.json │ │ ├── clause7_01.json │ │ ├── clause7_02.json │ │ ├── clause7_03a.json │ │ ├── clause7_03b.json │ │ ├── clause7_04.json │ │ ├── clause7_05.json │ │ ├── clause7_07.json │ │ ├── clause7_10.json │ │ ├── clause7_12.json │ │ ├── clause7_13.json │ │ ├── clause7_15.json │ │ ├── clause7_16.json │ │ ├── clause7_17.json │ │ ├── clause7_18.json │ │ ├── clause7_19.json │ │ ├── example01.json │ │ ├── example02.json │ │ ├── example03.json │ │ ├── example04.json │ │ ├── example05a.json │ │ ├── example05b.json │ │ ├── example06a.json │ │ ├── example06b.json │ │ ├── example07.json │ │ ├── example08.json │ │ ├── example09.json │ │ ├── example10.json │ │ ├── example11.json │ │ ├── example12.json │ │ ├── example13.json │ │ ├── example14.json │ │ ├── example15.json │ │ ├── example16.json │ │ ├── example17.json │ │ ├── example18.json │ │ ├── example19.json │ │ ├── example20.json │ │ ├── example21.json │ │ ├── example22.json │ │ ├── example23.json │ │ ├── example24.json │ │ ├── example25.json │ │ ├── example26.json │ │ ├── example27.json │ │ ├── example28.json │ │ ├── example29.json │ │ ├── example30.json │ │ ├── example31.json │ │ ├── example32.json │ │ ├── example33.json │ │ ├── example34.json │ │ ├── example35.json │ │ ├── example36.json │ │ ├── example37.json │ │ ├── example38.json │ │ ├── example39.json │ │ ├── example40.json │ │ ├── example41.json │ │ ├── example42.json │ │ ├── example43.json │ │ ├── example44.json │ │ ├── example45.json │ │ ├── example46.json │ │ ├── example47.json │ │ ├── example48.json │ │ ├── example49.json │ │ ├── example50.json │ │ ├── example51.json │ │ ├── example52.json │ │ ├── example53.json │ │ ├── example54.json │ │ ├── example55.json │ │ ├── example56.json │ │ ├── example57.json │ │ ├── example58.json │ │ ├── example59.json │ │ ├── example60.json │ │ ├── example61.json │ │ ├── example62.json │ │ ├── example63.json │ │ ├── example64.json │ │ ├── example65.json │ │ ├── example66.json │ │ ├── example67.json │ │ ├── example68.json │ │ ├── example69.json │ │ ├── example70.json │ │ ├── example71.json │ │ ├── example72.json │ │ ├── example73.json │ │ ├── example74.json │ │ ├── example75.json │ │ ├── example76.json │ │ ├── example77.json │ │ ├── example78.json │ │ ├── example79.json │ │ ├── example80.json │ │ ├── example81.json │ │ ├── example82.json │ │ ├── example83.json │ │ ├── example84.json │ │ ├── example85.json │ │ └── example86.json ├── cql_queryable_shapes.ttl ├── cql_queryable_shapes_bdr.ttl ├── custom_endpoints_vanilla_5_level.ttl ├── custom_endpoints_vanilla_5_level_data.ttl ├── fts_property_shapes.ttl ├── issue_286.ttl ├── object_catalog_bblocks_catalog.ttl ├── object_vocab_api_bblocks.ttl ├── object_vocab_datatype_bblocks.ttl ├── object_vocab_parameter_bblocks.ttl ├── object_vocab_schema_bblocks.ttl ├── obs.ttl ├── ogc_features.ttl ├── redirect-foaf-homepage.ttl ├── sandgate.ttl ├── spaceprez.ttl └── vocprez.ttl └── tests ├── TO_FIX_test_dd_profiles.py ├── TO_FIX_test_endpoints_vocprez.py ├── TO_FIX_test_search.py ├── __init__.py ├── _test_count.py ├── _test_cql_fuseki.py ├── _test_curie_generation.py ├── conftest.py ├── cql-fuseki-config.ttl ├── data └── prefixes │ ├── data_using_prefixes.ttl │ └── remote_prefixes.ttl ├── test_alt_profiles.py ├── test_bnode.py ├── test_connegp.py ├── test_cql.py ├── test_cql_queryable.py ├── test_cql_time.py ├── test_curie_endpoint.py ├── test_endpoints_cache.py ├── test_endpoints_catprez.py ├── test_endpoints_concept_hierarchy.py ├── test_endpoints_management.py ├── test_endpoints_object.py ├── test_endpoints_ok.py ├── test_endpoints_profiles.py ├── test_endpoints_spaceprez.py ├── test_facet_query.py ├── test_geojson_to_wkt.py ├── test_issue_286.py ├── test_node_selection_shacl.py ├── test_ogc.py ├── test_ogc_features_manual.py ├── test_parse_datetimes.py ├── test_path_alias.py ├── test_predicates.py ├── test_property_selection_shacl.py ├── test_query_construction.py ├── test_redirect_endpoint.py ├── test_remote_prefixes.py ├── test_search.py ├── test_search_fuseki_fts_class.py ├── test_shacl_sequence_alternative_paths.py └── test_sparql.py /.dockerignore: -------------------------------------------------------------------------------- 1 | tests/ 2 | __pycache__/ 3 | .pytest_cache/ 4 | .venv/ 5 | venv/ 6 | .vscode/ 7 | .idea/ 8 | .git/ 9 | build/ 10 | test_*.py 11 | .github/ 12 | Dockerfile 13 | *.Dockerfile 14 | -------------------------------------------------------------------------------- /.env-required-template: -------------------------------------------------------------------------------- 1 | SPARQL_ENDPOINT=http://my-sparql-endpoint 2 | ## Alternatively 3 | SPARQL_REPO_TYPE=pyoxigraph 4 | LOCAL_RDF_DIR=/path/to/your/data -------------------------------------------------------------------------------- /.github/workflows/manual_tests.yaml: -------------------------------------------------------------------------------- 1 | name: Manually run Test Suite 2 | 3 | on: 4 | # push: 5 | # branches: 6 | # - feature/** 7 | workflow_dispatch: 8 | 9 | # based on https://github.com/snok/install-poetry 10 | jobs: 11 | test: 12 | runs-on: ubuntu-latest 13 | steps: 14 | #---------------------------------------------- 15 | # check-out repo and set-up python 16 | #---------------------------------------------- 17 | - name: Check out repository 18 | uses: actions/checkout@v4 19 | - name: Set up python 20 | id: setup-python 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: '3.12' 24 | #---------------------------------------------- 25 | # ----- install & configure poetry ----- 26 | #---------------------------------------------- 27 | - name: Install Poetry 28 | uses: snok/install-poetry@v1 29 | with: 30 | virtualenvs-create: true 31 | virtualenvs-in-project: true 32 | installer-parallel: true 33 | 34 | #---------------------------------------------- 35 | # load cached venv if cache exists 36 | #---------------------------------------------- 37 | - name: Load cached venv 38 | id: cached-poetry-dependencies 39 | uses: actions/cache@v4 40 | with: 41 | path: .venv 42 | key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} 43 | #---------------------------------------------- 44 | # install dependencies if cache does not exist 45 | #---------------------------------------------- 46 | - name: Install dependencies 47 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 48 | run: poetry install --no-interaction --no-root 49 | #---------------------------------------------- 50 | # install your root project, if required 51 | #---------------------------------------------- 52 | - name: Install project 53 | run: poetry install --no-interaction --extras "server" 54 | #---------------------------------------------- 55 | # run test suite 56 | #---------------------------------------------- 57 | - name: Run tests 58 | run: poetry run pytest tests 59 | 60 | -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Action: Run Tests & Lint PR Title" 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | branches: 10 | - main 11 | workflow_dispatch: 12 | 13 | # based on https://github.com/snok/install-poetry 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | steps: 18 | #---------------------------------------------- 19 | # check-out repo and set-up python 20 | #---------------------------------------------- 21 | - name: Check out repository 22 | uses: actions/checkout@v4 23 | - name: Set up python 24 | id: setup-python 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: '3.12' 28 | #---------------------------------------------- 29 | # ----- install & configure poetry ----- 30 | #---------------------------------------------- 31 | - name: Install Poetry 32 | uses: snok/install-poetry@v1 33 | with: 34 | virtualenvs-create: true 35 | virtualenvs-in-project: true 36 | installer-parallel: true 37 | 38 | #---------------------------------------------- 39 | # load cached venv if cache exists 40 | #---------------------------------------------- 41 | - name: Load cached venv 42 | id: cached-poetry-dependencies 43 | uses: actions/cache@v4 44 | with: 45 | path: .venv 46 | key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} 47 | #---------------------------------------------- 48 | # install dependencies if cache does not exist 49 | #---------------------------------------------- 50 | - name: Install dependencies 51 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 52 | run: poetry install --no-interaction --no-root 53 | #---------------------------------------------- 54 | # install your root project, if required 55 | #---------------------------------------------- 56 | - name: Install project 57 | run: poetry install --no-interaction --extras "server" 58 | #---------------------------------------------- 59 | # run test suite 60 | #---------------------------------------------- 61 | - name: Run tests 62 | run: poetry run pytest tests 63 | 64 | lint-pr-title: 65 | runs-on: ubuntu-latest 66 | steps: 67 | - name: Check out repository 68 | uses: actions/checkout@v4 69 | 70 | - name: Setup Node.js 71 | uses: actions/setup-node@v4 72 | with: 73 | node-version: 18 74 | 75 | - name: Install Commitlint 76 | run: npm install --global @commitlint/{cli,config-conventional} 77 | 78 | - name: Lint PR Title 79 | run: echo "${{ github.event.pull_request.title }}" | commitlint 80 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | __pycache__/ 3 | .venv/ 4 | .vscode/ 5 | .idea/ 6 | .pytest_cache/ 7 | .env* 8 | dist/ 9 | build/ 10 | !.env-required-template 11 | !.env-full-template 12 | rdf/ 13 | http/ 14 | /.python-version 15 | http/ 16 | .ropeproject/ 17 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.0.1 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: end-of-file-fixer 9 | - id: check-yaml 10 | - id: check-json 11 | - id: check-added-large-files 12 | - repo: https://github.com/astral-sh/ruff-pre-commit 13 | rev: v0.6.1 14 | hooks: 15 | - id: ruff 16 | - repo: https://github.com/psf/black-pre-commit-mirror 17 | rev: 24.8.0 18 | hooks: 19 | - id: black 20 | language_version: python3.12 21 | - repo: https://github.com/pycqa/isort 22 | rev: 5.13.2 23 | hooks: 24 | - id: isort 25 | - repo: https://github.com/compilerla/conventional-pre-commit 26 | rev: v3.4.0 27 | hooks: 28 | - id: conventional-pre-commit 29 | stages: [commit-msg] 30 | args: [--strict] 31 | -------------------------------------------------------------------------------- /.releaserc.yaml: -------------------------------------------------------------------------------- 1 | plugins: 2 | - '@semantic-release/commit-analyzer': 3 | preset: "angular" 4 | releaseRules: 5 | - type: "feat" 6 | release: "minor" 7 | - type: "fix" 8 | release: "patch" 9 | - type: "perf" 10 | release: "patch" 11 | - type: "chore" 12 | release: "patch" 13 | - type: "docs" 14 | release: "patch" 15 | - type: "revert" 16 | release: "patch" 17 | - breaking: true 18 | release: "major" 19 | - '@semantic-release/release-notes-generator' 20 | - '@semantic-release/changelog' 21 | - '@semantic-release/github' 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PREZ_VERSION 2 | ARG PYTHON_VERSION=3.13 3 | ARG POETRY_VERSION=2.1.2 4 | ARG ALPINE_VERSION=3.21 5 | ARG VIRTUAL_ENV=/opt/venv 6 | 7 | # 8 | # Base 9 | # 10 | FROM python:${PYTHON_VERSION}-alpine${ALPINE_VERSION} AS base 11 | ARG POETRY_VERSION 12 | ARG VIRTUAL_ENV 13 | ENV VIRTUAL_ENV=${VIRTUAL_ENV} \ 14 | POETRY_VIRTUALENVS_CREATE=false \ 15 | PATH=${VIRTUAL_ENV}/bin:/root/.local/bin:${PATH} 16 | 17 | RUN apk add --no-cache \ 18 | bash \ 19 | pipx \ 20 | git 21 | 22 | RUN pipx install poetry==${POETRY_VERSION} 23 | 24 | WORKDIR /app 25 | 26 | COPY . . 27 | 28 | RUN poetry build 29 | RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} 30 | RUN ${VIRTUAL_ENV}/bin/pip3 install --no-cache-dir dist/*.whl 31 | RUN ${VIRTUAL_ENV}/bin/pip3 install uvicorn 32 | 33 | # 34 | # Final 35 | # 36 | FROM python:${PYTHON_VERSION}-alpine${ALPINE_VERSION} AS final 37 | 38 | ARG PREZ_VERSION 39 | ENV PREZ_VERSION=${PREZ_VERSION} 40 | ARG VIRTUAL_ENV 41 | ENV VIRTUAL_ENV=${VIRTUAL_ENV} \ 42 | PATH=${VIRTUAL_ENV}/bin:/root/.local/bin:${PATH} 43 | ENV APP_ROOT_PATH='' 44 | 45 | COPY --from=base ${VIRTUAL_ENV} ${VIRTUAL_ENV} 46 | 47 | RUN apk update && \ 48 | apk upgrade --no-cache && \ 49 | apk add --no-cache 50 | 51 | 52 | WORKDIR /app 53 | # prez module is already built as a package and installed in $VIRTUAL_ENV as a library 54 | COPY main.py pyproject.toml ./ 55 | 56 | ENTRYPOINT uvicorn prez.app:assemble_app --factory \ 57 | --host=${HOST:-0.0.0.0} \ 58 | --port=${PORT:-8000} \ 59 | $( [ "$(echo "$PROXY_HEADERS" | tr '[:upper:]' '[:lower:]')" = "true" ] || [ "$PROXY_HEADERS" = "1" ] && echo "--proxy-headers" ) \ 60 | --forwarded-allow-ips=${FORWARDED_ALLOW_IPS:-127.0.0.1} \ 61 | --root-path "${APP_ROOT_PATH}" 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, RDFLib Team 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /azure/.funcignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | .venv/ 3 | .idea/ 4 | __pycache__/ 5 | __pycache__ 6 | local.settings.json 7 | 8 | -------------------------------------------------------------------------------- /azure/README.md: -------------------------------------------------------------------------------- 1 | # Prez Azure Function-App deployment files 2 | 3 | This directory contains the files required to build and start or publish Prez as an Azure Function-App, as well as a Dockerfile that 4 | can be used to build a container image for deploying the app as an Azure Container App. 5 | 6 | ## Publishing 7 | There is a publish_or_start.sh script that can be used to either build and run the function app locally, or publish the app to Azure. 8 | To call it, make sure you are not in the "azure" directory, instead run the script from the root of the project. 9 | 10 | ```bash 11 | ./azure/publish_or_start.sh start|publish --extra-options 12 | ``` 13 | The FunctionAppName is required for publishing only, and is the name of the Azure Function-App that you want to publish to. 14 | Note, the FunctionAppName must be the second argument to the script, after any optional arguments. 15 | 16 | This script will perform the following steps: 17 | 1. Create a ./build directory 18 | 2. Copy the required azure function files from the ./azure directory into the ./build directory 19 | * ./azure/function_app.py 20 | * ./azure/patched_asgi_function_wrapper.py 21 | * ./azure/host.json 22 | * ./azure/.funcignore 23 | 3. Copy the local prez module source code into the ./build directory 24 | 4. Copy the .env file into the ./build directory if it exists 25 | 5. Copy the pyproject.toml and poetry.lock files into the ./build directory 26 | 6. Generate the requirements.txt file using poetry 27 | 7. Start the app locally, or publish the app to the Azure Function-App (using remote build) 28 | 29 | **extra-options** can be used to pass additional arguments to the azure publish command. (Eg, the `--subscription` argument) 30 | 31 | _Note:_ the script automatically adds the `--build remote` argument to the publish command, you don't need to specify it. 32 | 33 | ## Building the Docker container image 34 | 35 | To build the Docker container image, run the following command from the root of the project: 36 | 37 | ```bash 38 | docker build -t -f azure/azure_functions.Dockerfile . 39 | ``` 40 | -------------------------------------------------------------------------------- /azure/azure_functions.Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PREZ_VERSION 2 | ARG POETRY_VERSION=1.8.3 3 | 4 | # 5 | # Base 6 | # 7 | # To enable ssh & remote debugging on app service change the base image to the one below 8 | # FROM mcr.microsoft.com/azure-functions/python:4-python3.11-appservice 9 | FROM mcr.microsoft.com/azure-functions/python:4-python3.11 as base 10 | ARG POETRY_VERSION 11 | 12 | RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update && \ 13 | DEBIAN_FRONTEND=noninteractive apt-get -qq install -y \ 14 | bash \ 15 | python3-pip \ 16 | python3-dev 17 | 18 | RUN pip3 install poetry==${POETRY_VERSION} 19 | RUN mkdir -p /build 20 | WORKDIR /build 21 | 22 | COPY .. . 23 | RUN poetry build 24 | 25 | RUN mkdir -p /home/site/wwwroot 26 | ENV VIRTUAL_ENV=/home/site/wwwroot/.python_packages \ 27 | POETRY_VIRTUALENVS_CREATE=false 28 | ENV PATH=${VIRTUAL_ENV}/bin:${PATH} 29 | RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} 30 | RUN ${VIRTUAL_ENV}/bin/pip3 install --no-cache-dir ./dist/*.whl "azure-functions>=1.19,<2" 31 | 32 | # 33 | # Final 34 | # 35 | FROM mcr.microsoft.com/azure-functions/python:4-python3.11 as final 36 | 37 | ARG PREZ_VERSION 38 | ENV PREZ_VERSION=${PREZ_VERSION} 39 | ENV VIRTUAL_ENV=/home/site/wwwroot/.python_packages \ 40 | POETRY_VIRTUALENVS_CREATE=false 41 | ENV PATH=${VIRTUAL_ENV}/bin:/root/.local/bin:${PATH} 42 | 43 | # The base container installed some files in system-site-packages location, so copy those 44 | COPY --from=base /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages 45 | RUN mkdir -p /home/site/wwwroot 46 | # Copy the pre-built virtual env from the base container 47 | COPY --from=base ${VIRTUAL_ENV} ${VIRTUAL_ENV} 48 | 49 | RUN DEBIAN_FRONTEND=noninteractive apt-get -qq update && \ 50 | DEBIAN_FRONTEND=noninteractive apt-get -qq upgrade -y && \ 51 | DEBIAN_FRONTEND=noninteractive apt-get -qq install -y \ 52 | bash 53 | 54 | WORKDIR /home/site/wwwroot 55 | COPY pyproject.toml poetry.lock azure/host.json azure/function_app.py azure/patched_asgi_function_wrapper.py ./ 56 | 57 | ENTRYPOINT [] 58 | CMD ["/opt/startup/start_nonappservice.sh"] 59 | -------------------------------------------------------------------------------- /azure/function_app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import pathlib 4 | import logging 5 | 6 | cwd = pathlib.Path(__file__).parent 7 | if cwd.name == "azure": 8 | # We are running from the repo source directory 9 | # assume running locally, we need to add the parent 10 | # directory to the Python Path 11 | sys.path.append(str(cwd.parent)) 12 | 13 | import azure.functions as func 14 | 15 | try: 16 | from prez.app import assemble_app 17 | except ImportError as e: 18 | logging.exception("Cannot import prez") 19 | assemble_app = None 20 | 21 | 22 | if assemble_app is None: 23 | raise RuntimeError( 24 | "Cannot import prez in the Azure function app. Check requirements.txt and pyproject.toml." 25 | ) 26 | from patched_asgi_function_wrapper import AsgiFunctionApp 27 | 28 | # This is the base URL path that Prez routes will stem from 29 | # must _start_ in a slash, but _not end_ in slash, eg: /prez 30 | env_root_path: str = os.getenv("FUNCTION_APP_ROOT_PATH", "") 31 | ROOT_PATH: str = env_root_path.strip() 32 | # Note, must be _empty_ string for no path prefix (not "/") 33 | if ROOT_PATH == "/": 34 | ROOT_PATH = "" 35 | env_auth_level: str = os.getenv("FUNCTION_APP_AUTH_LEVEL", "FUNCTION") 36 | env_auth_level = env_auth_level.strip().upper() 37 | if env_auth_level == "ADMIN": 38 | auth_level: func.AuthLevel = func.AuthLevel.ADMIN 39 | elif env_auth_level == "ANONYMOUS": 40 | auth_level = func.AuthLevel.ANONYMOUS 41 | else: 42 | auth_level = func.AuthLevel.FUNCTION 43 | 44 | prez_app = assemble_app(root_path=ROOT_PATH) 45 | 46 | app = AsgiFunctionApp(app=prez_app, http_auth_level=auth_level) 47 | 48 | if __name__ == "__main__": 49 | from azure.functions import HttpRequest, Context 50 | import asyncio 51 | 52 | req = HttpRequest("GET", "/catalogs", headers={}, body=b"") 53 | context = dict() 54 | loop = asyncio.get_event_loop() 55 | fns = app.get_functions() 56 | assert len(fns) == 1 57 | fn_def = fns[0] 58 | fn = fn_def.get_user_function() 59 | task = fn(req, context) 60 | resp = loop.run_until_complete(task) 61 | print(resp) 62 | -------------------------------------------------------------------------------- /azure/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "logging": { 4 | "applicationInsights": { 5 | "samplingSettings": { 6 | "isEnabled": true, 7 | "excludedTypes": "Request" 8 | } 9 | } 10 | }, 11 | "extensions": { 12 | "http": { 13 | "routePrefix": "" 14 | } 15 | }, 16 | "extensionBundle": { 17 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 18 | "version": "[4.*, 5.0.0)" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /azure/local.settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "IsEncrypted": false, 3 | "Values": { 4 | "FUNCTIONS_WORKER_RUNTIME": "python", 5 | "AzureWebJobsFeatureFlags": "EnableWorkerIndexing", 6 | "AzureWebJobsStorage": "", 7 | "UseDevelopmentStorage": true 8 | }, 9 | "Host": { 10 | "LocalHttpPort": 7071, 11 | "CORS": "*", 12 | "CORSCredentials": false 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /azure/patched_asgi_function_wrapper.py: -------------------------------------------------------------------------------- 1 | from typing import Union, TYPE_CHECKING 2 | from copy import copy 3 | import azure.functions as func 4 | from azure.functions.decorators.http import HttpMethod 5 | from azure.functions._http_asgi import AsgiMiddleware, AsgiRequest, AsgiResponse 6 | from azure.functions._http_wsgi import WsgiMiddleware 7 | from azure.functions._abc import Context 8 | from azure.functions import HttpRequest 9 | 10 | 11 | # ------------------- 12 | # Create a patched AsgiFunctionApp to fix the ASGI scope state issue 13 | # ------------------- 14 | # See https://github.com/Azure/azure-functions-python-worker/issues/1566 15 | class MyAsgiMiddleware(AsgiMiddleware): 16 | async def _handle_async(self, req, context): 17 | asgi_request = AsgiRequest(req, context) 18 | scope = asgi_request.to_asgi_http_scope() 19 | # shallow copy the state as-per the ASGI spec 20 | scope["state"] = copy( 21 | self.state 22 | ) # <-- this is the patch, add the state to the scope 23 | asgi_response = await AsgiResponse.from_app(self._app, scope, req.get_body()) 24 | return asgi_response.to_func_response() 25 | 26 | 27 | # ------------------- 28 | # Create a patched AsgiFunctionApp to fix the double-slash route issue 29 | # ------------------- 30 | # See https://github.com/Azure/azure-functions-python-worker/issues/1310 31 | class AsgiFunctionApp(func.AsgiFunctionApp): 32 | def __init__(self, app, http_auth_level): 33 | super(AsgiFunctionApp, self).__init__(None, http_auth_level=http_auth_level) 34 | self._function_builders.clear() 35 | self.middleware = MyAsgiMiddleware(app) 36 | self._add_http_app(self.middleware) 37 | self.startup_task_done = False 38 | 39 | def _add_http_app( 40 | self, http_middleware: Union[AsgiMiddleware, WsgiMiddleware] 41 | ) -> None: 42 | """Add an Asgi app integrated http function. 43 | 44 | :param http_middleware: :class:`WsgiMiddleware` 45 | or class:`AsgiMiddleware` instance. 46 | 47 | :return: None 48 | """ 49 | 50 | asgi_middleware: AsgiMiddleware = http_middleware 51 | 52 | @self.http_type(http_type="asgi") 53 | @self.route( 54 | methods=(method for method in HttpMethod), 55 | auth_level=self.auth_level, 56 | route="{*route}", # <-- this is the patch, removed the leading slash from the route 57 | ) 58 | async def http_app_func(req: HttpRequest, context: Context): 59 | if not self.startup_task_done: 60 | success = await asgi_middleware.notify_startup() 61 | if not success: 62 | raise RuntimeError("ASGI middleware startup failed.") 63 | self.startup_task_done = True 64 | 65 | return await asgi_middleware.handle_async(req, context) 66 | -------------------------------------------------------------------------------- /azure/publish_or_start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DEFAULT_FUNC=$(which func) 3 | DEFAULT_POETRY=$(which poetry) 4 | FUNC_CLI=${FUNC_CLI:-"$DEFAULT_FUNC"} 5 | POETRY=${POETRY:-"$DEFAULT_POETRY"} 6 | 7 | if [[ "$#" -lt 1 ]] ; then 8 | echo "Usage: $0 [optional arguments] [FunctionAppName]" 9 | echo " start: Run the function app locally (FunctionAppName not required)" 10 | echo " publish: Publish the function app to Azure (FunctionAppName required)" 11 | exit 1 12 | fi 13 | 14 | # Extract the first argument as the ACTION 15 | ACTION="$1" 16 | shift 17 | 18 | CWD="$(pwd)" 19 | BASE_CWD="${CWD##*/}" 20 | if [[ "$BASE_CWD" = "azure" ]] ; then 21 | echo "Do not run this script from within the azure directory" 22 | echo "Run from the root of the repo" 23 | echo "eg: ./azure/publish_or_start.sh start" 24 | exit 1 25 | fi 26 | 27 | if [[ -z "$FUNC_CLI" ]] ; then 28 | echo "func cli not found, specify the location using env FUNC_CLI" 29 | exit 1 30 | fi 31 | 32 | if [[ -z "$POETRY" ]] ; then 33 | echo "poetry not found. Local poetry>=1.8.2 is required to generate the requirements.txt file" 34 | echo "specify the location using env POETRY" 35 | exit 1 36 | fi 37 | 38 | mkdir -p build 39 | rm -rf build/* 40 | cp ./azure/function_app.py ./azure/patched_asgi_function_wrapper.py ./azure/.funcignore ./azure/host.json ./azure/local.settings.json build/ 41 | cp ./pyproject.toml ./poetry.lock ./build 42 | cp -r ./prez ./build 43 | if [[ -f "./.env" ]] ; then 44 | cp ./.env ./build 45 | fi 46 | cd ./build 47 | "$POETRY" export --without-hashes --format=requirements.txt > requirements.txt 48 | echo "generated requirements.txt" 49 | cat ./requirements.txt 50 | 51 | if [[ "$ACTION" == "publish" ]] ; then 52 | if [[ "$#" -lt 1 ]] ; then 53 | echo "Error: FunctionAppName is required for publish action" 54 | exit 1 55 | fi 56 | FUNC_APP_NAME="$1" 57 | shift 58 | "$FUNC_CLI" azure functionapp publish "$FUNC_APP_NAME" --build remote "$@" 59 | elif [[ "$ACTION" == "start" ]] ; then 60 | "$FUNC_CLI" start "$@" 61 | else 62 | echo "Invalid action. Use 'start' for local testing or 'publish' for publishing to Azure." 63 | exit 1 64 | fi 65 | 66 | cd .. 67 | echo "You can now delete the build directory if you wish." -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | ## Changes for 2023-09-27 2 | 3 | ### Features 4 | 5 | - Default search added. This is a simple search that will search for terms across all annotation predicates Prez has configured. By default in prez/config.py these are set to: 6 | - label_predicates = [SKOS.prefLabel, DCTERMS.title, RDFS.label, SDO.name] 7 | - description_predicates = [SKOS.definition, DCTERMS.description, SDO.description] 8 | - provenance_predicates = [DCTERMS.provenance] 9 | These are configurable via environment variables using the Pydantic BaseSettings functionality but will need to be properly escaped as they are a list. 10 | 11 | More detail on adding filters to search is provided in the readme. 12 | - Timeout for httpx AsyncClient and Client instances is set on the shared instance to 30s. Previously this was set in some individual calls resulting in inconsistent behaviour, as the default is otherwise 5s. 13 | - Updated `purge-tbox-cache` endpoint functionality. This reflects that prez now 14 | includes a number of common ontologies by default (prez/reference_data/annotations), and on startup will load 15 | annotation triples (e.g. x rdfs:label y) from these. As such, the tbox or annotation cache is no longer completely 16 | purged but can be reset to this default state instead. 17 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { extends: ["@commitlint/config-conventional"] }; -------------------------------------------------------------------------------- /demo/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | 4 | fuseki: 5 | user: root 6 | image: "ghcr.io/zazuko/fuseki-geosparql:latest" 7 | ports: 8 | - "3030:3030" 9 | volumes: 10 | - type: bind 11 | source: ${PWD}/../dev/dev-config.ttl 12 | target: /fuseki/config.ttl 13 | environment: 14 | ADMIN_PASSWORD: pw 15 | healthcheck: 16 | test: ["CMD-SHELL", "wget -qO- http://localhost:3030 || exit 1"] 17 | interval: 5s 18 | timeout: 10s 19 | retries: 3 20 | start_period: 20s 21 | 22 | prez: 23 | image: "ghcr.io/rdflib/prez:latest" 24 | ports: 25 | - "8000:8000" 26 | environment: 27 | SPARQL_ENDPOINT: 'http://fuseki:3030/myds' 28 | depends_on: 29 | fuseki: 30 | condition: service_healthy 31 | 32 | prez-ui: 33 | image: "ghcr.io/rdflib/prez-ui:latest" 34 | ports: 35 | - "81:80" 36 | environment: 37 | VITE_API_BASE_URL: 'http://localhost:8000' 38 | depends_on: 39 | fuseki: 40 | condition: service_healthy 41 | -------------------------------------------------------------------------------- /demo/prez-v4-backend/config.ttl: -------------------------------------------------------------------------------- 1 | ## Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0 2 | 3 | PREFIX : <#> 4 | PREFIX fuseki: 5 | PREFIX rdf: 6 | PREFIX rdfs: 7 | PREFIX ja: 8 | PREFIX geosparql: 9 | 10 | [] rdf:type fuseki:Server ; 11 | fuseki:services ( 12 | :service 13 | ) . 14 | 15 | :service rdf:type fuseki:Service ; 16 | fuseki:name "dataset" ; 17 | 18 | fuseki:endpoint [ fuseki:operation fuseki:query ; ] ; 19 | fuseki:endpoint [ 20 | fuseki:operation fuseki:query ; 21 | fuseki:name "sparql" 22 | ]; 23 | fuseki:endpoint [ 24 | fuseki:operation fuseki:query ; 25 | fuseki:name "query" 26 | ] ; 27 | fuseki:endpoint [ 28 | fuseki:operation fuseki:gsp-r ; 29 | fuseki:name "get" 30 | ] ; 31 | fuseki:dataset <#geo_ds> ; 32 | . 33 | 34 | <#geo_ds> rdf:type geosparql:GeosparqlDataset ; 35 | geosparql:dataset :dataset ; 36 | geosparql:inference true ; 37 | geosparql:queryRewrite true ; 38 | geosparql:indexEnabled true ; 39 | geosparql:applyDefaultGeometry true ; 40 | . 41 | 42 | # Transactional in-memory dataset. 43 | :dataset rdf:type ja:MemoryDataset ; 44 | ## Optional load with data on start-up 45 | ja:data "/rdf/catprez.ttl"; 46 | ja:data "/rdf/vocprez.ttl"; 47 | ja:data "/rdf/catprez.ttl"; 48 | ja:data "/rdf/sandgate.ttl"; 49 | ja:data "/rdf/object_catalog_bblocks_catalog.ttl"; 50 | ja:data "/rdf/object_vocab_api_bblocks.ttl"; 51 | ja:data "/rdf/object_vocab_datatype_bblocks.ttl"; 52 | ja:data "/rdf/object_vocab_parameter_bblocks.ttl"; 53 | ja:data "/rdf/object_vocab_schema_bblocks.ttl"; 54 | . 55 | -------------------------------------------------------------------------------- /demo/prez-v4-backend/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | 4 | fuseki: 5 | image: "ghcr.io/zazuko/fuseki-geosparql:v3.3.1" 6 | ports: 7 | - "3030:3030" 8 | volumes: 9 | - type: bind 10 | source: config.ttl 11 | target: /fuseki/config.ttl 12 | - type: bind 13 | source: ../../test_data 14 | target: /rdf 15 | environment: 16 | ADMIN_PASSWORD: pw 17 | healthcheck: 18 | test: ["CMD-SHELL", "wget -qO- http://localhost:3030 || exit 1"] 19 | interval: 5s 20 | timeout: 10s 21 | retries: 3 22 | 23 | # prez: 24 | # build: 25 | # context: ../../ 26 | # dockerfile: ./Dockerfile 27 | # ports: 28 | # - "8000:8000" 29 | # environment: 30 | # SPARQL_ENDPOINT: 'http://fuseki:3030/dataset' 31 | # depends_on: 32 | # fuseki: 33 | # condition: service_healthy 34 | -------------------------------------------------------------------------------- /demo/prez-v4-backend/readme.md: -------------------------------------------------------------------------------- 1 | This directory contains a docker compose file which will run the Prez backend and Fuseki GeoSPARQL together with some sample data. 2 | 3 | NB any data added to the test_data folder must also be specified in the fuseki config.ttl file. -------------------------------------------------------------------------------- /dev/dev-setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import requests 4 | import glob 5 | 6 | os.system("docker context use default") 7 | response = os.system( 8 | "docker run -d -v ${PWD}/dev/dev-config.ttl:/fuseki/config.ttl -p 3030:3030 ghcr.io/zazuko/fuseki-geosparql" 9 | ) 10 | 11 | time.sleep(15) 12 | 13 | 14 | def setup(): 15 | url = "http://localhost:3030/myds" 16 | headers = {} 17 | 18 | # Get all TTL files from test_data directory 19 | ttl_files = glob.glob("test_data/*.ttl") 20 | 21 | # Process each file sequentially 22 | for i, file_path in enumerate(ttl_files, 1): 23 | file_name = os.path.basename(file_path) 24 | print(f"Loading file {i}/{len(ttl_files)}: {file_name}") 25 | 26 | files = [ 27 | ( 28 | "file", 29 | ( 30 | file_name, 31 | open(file_path, "rb"), 32 | "application/octet-stream", 33 | ), 34 | ) 35 | ] 36 | 37 | response = requests.request( 38 | "POST", 39 | url, 40 | headers=headers, 41 | data={}, 42 | files=files, 43 | params={"graph": "http://exampledatagraph"}, 44 | ) 45 | 46 | if response.status_code != 200: 47 | print(f"Error loading {file_name}: {response.status_code}") 48 | print(response.text) 49 | else: 50 | print(f"Successfully loaded {file_name}") 51 | 52 | 53 | setup() 54 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_contains.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_contains", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.0606281, 13 | -27.3096141 14 | ], 15 | [ 16 | 153.0604564, 17 | -27.3105197 18 | ], 19 | [ 20 | 153.0600487, 21 | -27.3109296 22 | ], 23 | [ 24 | 153.0607354, 25 | -27.3127218 26 | ], 27 | [ 28 | 153.063203, 29 | -27.3121212 30 | ], 31 | [ 32 | 153.0621623, 33 | -27.3095187 34 | ], 35 | [ 36 | 153.0617868, 37 | -27.3098333 38 | ], 39 | [ 40 | 153.0606281, 41 | -27.3096141 42 | ] 43 | ] 44 | ] 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_contains_filter.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "s_contains", 6 | "args": [ 7 | { 8 | "property": "geometry" 9 | }, 10 | { 11 | "type": "Polygon", 12 | "coordinates": [ 13 | [ 14 | [ 15 | 153.0606281, 16 | -27.3096141 17 | ], 18 | [ 19 | 153.0604564, 20 | -27.3105197 21 | ], 22 | [ 23 | 153.0600487, 24 | -27.3109296 25 | ], 26 | [ 27 | 153.0607354, 28 | -27.3127218 29 | ], 30 | [ 31 | 153.063203, 32 | -27.3121212 33 | ], 34 | [ 35 | 153.0621623, 36 | -27.3095187 37 | ], 38 | [ 39 | 153.0617868, 40 | -27.3098333 41 | ], 42 | [ 43 | 153.0606281, 44 | -27.3096141 45 | ] 46 | ] 47 | ] 48 | } 49 | ] 50 | }, 51 | { 52 | "op": "=", 53 | "args": [ 54 | { 55 | "property": "http://www.w3.org/2000/01/rdf-schema#label" 56 | }, 57 | "Sandgate Respite Centre Area" 58 | ] 59 | } 60 | ] 61 | } 62 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_contains_inverse.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "s_contains", 6 | "args": [ 7 | { 8 | "property": "geometry" 9 | }, 10 | { 11 | "type": "Polygon", 12 | "coordinates": [ 13 | [ 14 | [ 15 | 153.0606281, 16 | -27.3096141 17 | ], 18 | [ 19 | 153.0604564, 20 | -27.3105197 21 | ], 22 | [ 23 | 153.0600487, 24 | -27.3109296 25 | ], 26 | [ 27 | 153.0607354, 28 | -27.3127218 29 | ], 30 | [ 31 | 153.063203, 32 | -27.3121212 33 | ], 34 | [ 35 | 153.0621623, 36 | -27.3095187 37 | ], 38 | [ 39 | 153.0617868, 40 | -27.3098333 41 | ], 42 | [ 43 | 153.0606281, 44 | -27.3096141 45 | ] 46 | ] 47 | ] 48 | } 49 | ] 50 | }, 51 | { 52 | "op": "=", 53 | "args": [ 54 | { 55 | "property": "http://www.w3.org/2000/01/rdf-schema#member" 56 | }, 57 | "http://example.com/datasets/sandgate/facilities" 58 | ] 59 | } 60 | ] 61 | } 62 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_contains_like.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "s_contains", 6 | "args": [ 7 | { 8 | "property": "geometry" 9 | }, 10 | { 11 | "type": "Polygon", 12 | "coordinates": [ 13 | [ 14 | [ 15 | 153.0606281, 16 | -27.3096141 17 | ], 18 | [ 19 | 153.0604564, 20 | -27.3105197 21 | ], 22 | [ 23 | 153.0600487, 24 | -27.3109296 25 | ], 26 | [ 27 | 153.0607354, 28 | -27.3127218 29 | ], 30 | [ 31 | 153.063203, 32 | -27.3121212 33 | ], 34 | [ 35 | 153.0621623, 36 | -27.3095187 37 | ], 38 | [ 39 | 153.0617868, 40 | -27.3098333 41 | ], 42 | [ 43 | 153.0606281, 44 | -27.3096141 45 | ] 46 | ] 47 | ] 48 | } 49 | ] 50 | }, 51 | { 52 | "op": "like", 53 | "args": [ 54 | { 55 | "property": "http://www.w3.org/2000/01/rdf-schema#label" 56 | }, 57 | "%Sandgate%" 58 | ] 59 | } 60 | ] 61 | } 62 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_crosses.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_crosses", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "LineString", 7 | "coordinates": [ [ 172.03086, 1.5 ], 8 | [ 1.1, -90.0 ], 9 | [ -159.757695, 0.99999 ], 10 | [ -180.0, 0.5 ], 11 | [ -12.111235, 81.336403 ], 12 | [ -0.5, 64.43958 ], 13 | [ 0.0, 81.991815 ], 14 | [ -155.93831, 90.0 ] ] 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_disjoint.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_disjoint", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.03375, 13 | -27.42 14 | ], 15 | [ 16 | 153.16, 17 | -27.3217012 18 | ], 19 | [ 20 | 153.03375, 21 | -27.2234024 22 | ], 23 | [ 24 | 152.9075, 25 | -27.3217012 26 | ], 27 | [ 28 | 153.03375, 29 | -27.42 30 | ] 31 | ] 32 | ] 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_equals.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_equals", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.0606281, 13 | -27.3096141 14 | ], 15 | [ 16 | 153.0604564, 17 | -27.3105197 18 | ], 19 | [ 20 | 153.0600487, 21 | -27.3109296 22 | ], 23 | [ 24 | 153.0607354, 25 | -27.3127218 26 | ], 27 | [ 28 | 153.063203, 29 | -27.3121212 30 | ], 31 | [ 32 | 153.0621623, 33 | -27.3095187 34 | ], 35 | [ 36 | 153.0617868, 37 | -27.3098333 38 | ], 39 | [ 40 | 153.0606281, 41 | -27.3096141 42 | ] 43 | ] 44 | ] 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_intersects.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_intersects", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.03375, 13 | -27.42 14 | ], 15 | [ 16 | 153.16, 17 | -27.3217012 18 | ], 19 | [ 20 | 153.03375, 21 | -27.2234024 22 | ], 23 | [ 24 | 152.9075, 25 | -27.3217012 26 | ], 27 | [ 28 | 153.03375, 29 | -27.42 30 | ] 31 | ] 32 | ] 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_overlaps.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_overlaps", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.03375, 13 | -27.42 14 | ], 15 | [ 16 | 153.16, 17 | -27.3217012 18 | ], 19 | [ 20 | 153.03375, 21 | -27.2234024 22 | ], 23 | [ 24 | 152.9075, 25 | -27.3217012 26 | ], 27 | [ 28 | 153.03375, 29 | -27.42 30 | ] 31 | ] 32 | ] 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_touches.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_touches", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.03375, 13 | -27.42 14 | ], 15 | [ 16 | 153.16, 17 | -27.3217012 18 | ], 19 | [ 20 | 153.0638169, 21 | -27.2897951 22 | ], 23 | [ 24 | 153.03375, 25 | -27.2234024 26 | ], 27 | [ 28 | 152.9075, 29 | -27.3217012 30 | ], 31 | [ 32 | 153.03375, 33 | -27.42 34 | ] 35 | ] 36 | ] 37 | } 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /docs/examples/cql/geo_within.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_within", 3 | "args": [ 4 | { 5 | "property": "geometry" 6 | }, 7 | { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [ 12 | 153.03375, 13 | -27.42 14 | ], 15 | [ 16 | 153.16, 17 | -27.3217012 18 | ], 19 | [ 20 | 153.03375, 21 | -27.2234024 22 | ], 23 | [ 24 | 152.9075, 25 | -27.3217012 26 | ], 27 | [ 28 | 153.03375, 29 | -27.42 30 | ] 31 | ] 32 | ] 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /docs/examples/custom_endpoints/example_alternate_classes.ttl: -------------------------------------------------------------------------------- 1 | @prefix ex: . 2 | @prefix ont: . 3 | @prefix rdf: . 4 | @prefix rdfs: . 5 | @prefix schema: . 6 | @prefix sh: . 7 | @prefix skos: . 8 | @prefix xsd: . 9 | 10 | ex:catalogs-listing a ont:ListingEndpoint , ont:DynamicEndpoint ; 11 | rdfs:label "Catalogs Listing" ; 12 | ont:apiPath "/catalogs" ; 13 | ont:relevantShapes ex:shape-R0-HL1 . 14 | 15 | ex:catalogs-object a ont:ObjectEndpoint , ont:DynamicEndpoint ; 16 | rdfs:label "Catalogs Object" ; 17 | ont:apiPath "/catalogs/{catalogId}" ; 18 | ont:relevantShapes ex:shape-R0-HL1 . 19 | 20 | ex:items-listing a ont:ListingEndpoint , ont:DynamicEndpoint ; 21 | rdfs:label "Items Listing" ; 22 | ont:apiPath "/catalogs/{catalogId}/items" ; 23 | ont:relevantShapes ex:shape-R0-HL2 . 24 | 25 | ex:items-object a ont:ObjectEndpoint , ont:DynamicEndpoint ; 26 | rdfs:label "Items Object" ; 27 | ont:apiPath "/catalogs/{catalogId}/items/{itemId}" ; 28 | ont:relevantShapes ex:shape-R0-HL2 . 29 | 30 | ex:shape-R0-HL1 a sh:NodeShape ; 31 | sh:property [ sh:or ( [ sh:class ] [ sh:class schema:CreativeWork ] [ sh:class ] [ sh:class ] ) ; 32 | sh:path skos:member ] ; 33 | sh:targetClass skos:Collection ; 34 | ont:hierarchyLevel 1 . 35 | 36 | ex:shape-R0-HL2 a sh:NodeShape ; 37 | sh:property [ sh:class skos:Collection ; 38 | sh:path [ sh:inversePath skos:member ] ] ; 39 | sh:targetClass , 40 | , 41 | , 42 | schema:CreativeWork ; 43 | ont:hierarchyLevel 2 . 44 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | 3 | from dotenv import load_dotenv 4 | 5 | if __name__ == "__main__": 6 | try: 7 | import uvicorn 8 | except ImportError: 9 | print( 10 | 'Error: Uvicorn is not installed. Install it with \'poetry install --extras "server".' 11 | ) 12 | import sys 13 | 14 | sys.exit(1) 15 | 16 | # Load config from .env file. 17 | # See .env-template and prez/config.py for usage. 18 | load_dotenv() 19 | 20 | port = int(environ.get("PREZ_DEV_SERVER_PORT", 8000)) 21 | 22 | uvicorn.run( 23 | "prez.app:assemble_app", 24 | factory=True, 25 | port=port, 26 | reload=True, 27 | proxy_headers=True, 28 | forwarded_allow_ips="*", 29 | ) 30 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true -------------------------------------------------------------------------------- /prez-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RDFLib/prez/d3e8bec0157bea6b5602b2825aa664018bcfabf1/prez-logo.png -------------------------------------------------------------------------------- /prez/bnode.py: -------------------------------------------------------------------------------- 1 | from rdflib import BNode, Graph, URIRef 2 | from rdflib.term import Node 3 | 4 | 5 | def dfs(node: Node, graph: Graph, depth: int = 0): 6 | """ 7 | Perform a depth-first search of the blank node max depth of the resource recursively. 8 | 9 | :param node: An RDFLib Node. 10 | :param graph: The resource's concise bounded description graph. 11 | :param depth: The current depth count. 12 | """ 13 | # Base case 14 | if not isinstance(node, BNode): 15 | return depth 16 | 17 | # Recursive case 18 | max_depth = depth 19 | for obj in graph.objects(node, None): 20 | max_depth = max(max_depth, dfs(obj, graph, depth + 1)) 21 | 22 | return max_depth 23 | 24 | 25 | def get_bnode_depth(node: URIRef, graph: Graph) -> int: 26 | """Get the max blank node depth of the node in the graph. 27 | 28 | :param node: The starting resource node. 29 | :param graph: The resource's concise bounded description graph. 30 | 31 | >>> graph = Graph().parse("example-data.ttl") 32 | >>> resource = URIRef("node-name") 33 | >>> cbd = graph.cbd(resource) 34 | >>> depth = get_bnode_depth(resource, cbd) 35 | """ 36 | max_depth = 0 37 | for obj in graph.objects(node, None): 38 | if isinstance(obj, BNode): 39 | max_depth = max(max_depth, dfs(obj, graph)) 40 | 41 | return max_depth 42 | -------------------------------------------------------------------------------- /prez/cache.py: -------------------------------------------------------------------------------- 1 | from aiocache import caches 2 | from pyoxigraph.pyoxigraph import Store 3 | from rdflib import ConjunctiveGraph, Dataset, Graph 4 | 5 | profiles_graph_cache = Graph() 6 | profiles_graph_cache.bind("prez", "https://prez.dev/") 7 | 8 | endpoints_graph_cache = ConjunctiveGraph() 9 | endpoints_graph_cache.bind("prez", "https://prez.dev/") 10 | 11 | prez_system_graph = Graph() 12 | prez_system_graph.bind("prez", "https://prez.dev/") 13 | 14 | prefix_graph = Graph(bind_namespaces="rdflib") 15 | 16 | # TODO can probably merge counts graph 17 | counts_graph = Graph() 18 | 19 | links_ids_graph_cache = Dataset() 20 | links_ids_graph_cache.bind("prez", "https://prez.dev/") 21 | 22 | store = Store() 23 | 24 | system_store = Store() 25 | 26 | annotations_store = Store() 27 | 28 | queryable_props = {} 29 | 30 | oxrdflib_store = Graph(store="Oxigraph") 31 | 32 | caches.set_config( 33 | { 34 | "default": { 35 | "cache": "aiocache.SimpleMemoryCache", 36 | "serializer": {"class": "aiocache.serializers.PickleSerializer"}, 37 | }, 38 | "curies": { 39 | "cache": "aiocache.SimpleMemoryCache", 40 | "serializer": {"class": "aiocache.serializers.PickleSerializer"}, 41 | }, 42 | "classes": { 43 | "cache": "aiocache.SimpleMemoryCache", 44 | "serializer": {"class": "aiocache.serializers.PickleSerializer"}, 45 | }, 46 | } 47 | ) 48 | -------------------------------------------------------------------------------- /prez/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class NonAnnotatedRDFMediaType(Enum): 5 | LD_JSON = "application/ld+json" 6 | RDF_XML = "application/rdf+xml" 7 | TURTLE = "text/turtle" 8 | N_TRIPLES = "application/n-triples" 9 | 10 | 11 | class AnnotatedRDFMediaType(Enum): 12 | ANOT_LD_JSON = "application/anot+ld+json" 13 | ANOT_RDF_XML = "application/anot+rdf+xml" 14 | ANOT_TURTLE = "text/anot+turtle" 15 | ANOT_N_TRIPLES = "application/anot+n-triples" 16 | 17 | 18 | class SPARQLQueryMediaType(Enum): 19 | SPARQL_QUERY = "application/sparql-query" 20 | 21 | 22 | class JSONMediaType(Enum): 23 | JSON = "application/json" 24 | 25 | 26 | class GeoJSONMediaType(Enum): 27 | GEOJSON = "application/geo+json" 28 | 29 | 30 | class OrderByDirectionEnum(Enum): 31 | ASC = "ASC" 32 | DESC = "DESC" 33 | 34 | 35 | class FilterLangEnum(Enum): 36 | CQL_JSON = "cql2-json" 37 | 38 | 39 | class SearchMethod(str, Enum): 40 | DEFAULT = "default" 41 | FTS_FUSEKI = "fts_fuseki" 42 | -------------------------------------------------------------------------------- /prez/exceptions/model_exceptions.py: -------------------------------------------------------------------------------- 1 | from rdflib import URIRef 2 | 3 | from prez.config import settings 4 | 5 | 6 | class ClassNotFoundException(Exception): 7 | """ 8 | Raised when no classes can be found for a given URI. 9 | If the URI is also not found, a URINotFoundException is raised instead. 10 | """ 11 | 12 | def __init__(self, uri: URIRef): 13 | self.message = f"No classes found for {uri}. Prez can only display information for instances of classes" 14 | super().__init__(self.message) 15 | 16 | 17 | class URINotFoundException(Exception): 18 | """ 19 | Raised when a URI is not found in the triplestore. 20 | """ 21 | 22 | def __init__(self, uri: URIRef = None, curie: str = None): 23 | if uri: 24 | self.message = ( 25 | f'URI "{uri}" not found at endpoint {settings.sparql_endpoint}.' 26 | ) 27 | if curie: 28 | self.message = f'URI for curie "{curie}" not found at endpoint {settings.sparql_endpoint}.' 29 | super().__init__(self.message) 30 | 31 | 32 | class PrefixNotBoundException(Exception): 33 | """ 34 | Raised when a requested prefix is not bound in Prez's namespace manager. 35 | """ 36 | 37 | def __init__(self, prefix: str): 38 | self.message = ( 39 | f'Prefix "{prefix}" not bound to a namespace in Prez.' 40 | ) 41 | super().__init__(self.message) 42 | 43 | 44 | class NoProfilesException(Exception): 45 | """ 46 | Raised when no profiles can be found for a resource. 47 | """ 48 | 49 | def __init__(self, classes: list): 50 | self.message = ( 51 | f"No profiles and/or mediatypes could be found to render the resource. The resource class(es) " 52 | f"for which a profile was searched was/were: {', '.join(klass for klass in classes)}" 53 | ) 54 | super().__init__(self.message) 55 | 56 | 57 | class InvalidSPARQLQueryException(Exception): 58 | """ 59 | Raised when a SPARQL query is invalid. 60 | """ 61 | 62 | def __init__(self, error: str): 63 | self.message = f"Invalid SPARQL query: {error}" 64 | super().__init__(self.message) 65 | 66 | 67 | class NoEndpointNodeshapeException(Exception): 68 | """ 69 | Raised when no endpoint nodeshape can be identified for the given classes/relations. 70 | """ 71 | 72 | def __init__(self, ep_uri: str, hierarchy_level: int): 73 | self.message = ( 74 | f"No relevant nodeshape found for the given endpoint {ep_uri}, hierarchy level " 75 | f"{hierarchy_level}, and parent URI" 76 | ) 77 | super().__init__(self.message) 78 | 79 | 80 | class MissingFilterQueryError(ValueError): 81 | """ 82 | Raised when a filter query is missing. 83 | """ 84 | def __init__(self, message): 85 | self.message = message 86 | super().__init__(self.message) -------------------------------------------------------------------------------- /prez/middleware.py: -------------------------------------------------------------------------------- 1 | from fastapi import Request 2 | from fastapi.responses import JSONResponse 3 | 4 | 5 | def create_validate_header_middleware(required_header: dict[str, str] | None): 6 | async def validate_header(request: Request, call_next): 7 | if required_header: 8 | header_name, expected_value = next(iter(required_header.items())) 9 | if ( 10 | header_name not in request.headers 11 | or request.headers[header_name] != expected_value 12 | ): 13 | return JSONResponse( # attempted to use Exception and although it was caught it did not propagate 14 | status_code=400, 15 | content={ 16 | "error": "Header Validation Error", 17 | "message": f"Missing or invalid header: {header_name}", 18 | "code": "HEADER_VALIDATION_ERROR", 19 | }, 20 | ) 21 | return await call_next(request) 22 | 23 | return validate_header 24 | -------------------------------------------------------------------------------- /prez/reference_data/annotations/dwc-annotations.ttl: -------------------------------------------------------------------------------- 1 | PREFIX rdfs: 2 | PREFIX schema: 3 | 4 | 5 | rdfs:label "Core terms defined by Darwin Core"@en ; 6 | schema:description "This term list includes all currently valid terms that have been defined in the core Darwin Core namespace dwc:. To comment on this schema, please create a new issue in https://github.com/tdwg/dwc/issues"@en ; 7 | . 8 | 9 | -------------------------------------------------------------------------------- /prez/reference_data/annotations/geojson-annotations.ttl: -------------------------------------------------------------------------------- 1 | PREFIX rdfs: 2 | PREFIX schema: 3 | 4 | 5 | schema:description "A description of the RFC 7946 GeoJSON model. See https://github.com/geojson/geojson-ld for vocabulary developments." ; 6 | . 7 | 8 | 9 | rdfs:label "Feature"@en ; 10 | schema:description "See RFC 7946 Section 3.2."@en ; 11 | . 12 | 13 | 14 | rdfs:label "FeatureCollection"@en ; 15 | schema:description "See RFC 7946 Section 3.3."@en ; 16 | . 17 | 18 | 19 | rdfs:label "GeometryCollection"@en ; 20 | schema:description "See RFC 7946 Section 3.1.8."@en ; 21 | . 22 | 23 | 24 | rdfs:label "LineString"@en ; 25 | schema:description "See RFC 7946 Section 3.1.4."@en ; 26 | . 27 | 28 | 29 | rdfs:label "MultiLineString"@en ; 30 | schema:description "See RFC 7946 Section 3.1.5."@en ; 31 | . 32 | 33 | 34 | rdfs:label "MultiPoint"@en ; 35 | schema:description "See RFC 7946 Section 3.1.3."@en ; 36 | . 37 | 38 | 39 | rdfs:label "MultiPolygon"@en ; 40 | schema:description "See RFC 7946 Section 3.1.7."@en ; 41 | . 42 | 43 | 44 | rdfs:label "Point"@en ; 45 | schema:description "See RFC 7946 Section 3.1.2."@en ; 46 | . 47 | 48 | 49 | rdfs:label "Polygon"@en ; 50 | schema:description "See RFC 7946 Section 3.1.6."@en ; 51 | . 52 | 53 | 54 | rdfs:label "bbox"@en ; 55 | schema:description "See RFC 7946 Section 5."@en ; 56 | . 57 | 58 | 59 | rdfs:label "coordinates"@en ; 60 | schema:description "RFC 7946 Section 3.1.1."@en ; 61 | . 62 | 63 | 64 | rdfs:label "features"@en ; 65 | schema:description "RFC 7946 Section 3.3."@en ; 66 | . 67 | 68 | 69 | rdfs:label "geometry"@en ; 70 | schema:description "RFC 7946 Section 3.2."@en ; 71 | . 72 | 73 | 74 | rdfs:label "id"@en ; 75 | schema:description "RFC 7946 Section 3.2."@en ; 76 | . 77 | 78 | 79 | rdfs:label "properties"@en ; 80 | schema:description "RFC 7946 Section 3.2."@en ; 81 | . 82 | 83 | 84 | rdfs:label "type"@en ; 85 | schema:description "RFC 7946 Section 3."@en ; 86 | . 87 | 88 | -------------------------------------------------------------------------------- /prez/reference_data/annotations/prez-ontology.ttl: -------------------------------------------------------------------------------- 1 | @prefix rdfs: . 2 | 3 | rdfs:label "All Predicate Values" . 4 | 5 | rdfs:comment "blank node depth" . 6 | 7 | rdfs:comment "limit" . 8 | 9 | rdfs:comment "offset" . 10 | 11 | rdfs:comment "order by" . 12 | 13 | rdfs:label "Constrains Class" . 14 | 15 | rdfs:label "Has Default Profile" . 16 | 17 | rdfs:label "Default Resource Format" . 18 | 19 | rdfs:label "Has Node Shape" . 20 | 21 | rdfs:label "Has Resource Format" . 22 | 23 | rdfs:label "count" . 24 | 25 | rdfs:label "link" . 26 | 27 | rdfs:label "members" . 28 | 29 | rdfs:label "delivers classes" . 30 | 31 | rdfs:label "endpoint template" . 32 | 33 | rdfs:label "focus to parent relation" . 34 | 35 | rdfs:label "parent endpoint" . 36 | 37 | rdfs:label "parent to focus relation" . 38 | 39 | rdfs:label "Matched Term" . 40 | 41 | rdfs:label "Matched Predicate" . 42 | 43 | rdfs:label "Search Result Weight" . 44 | -------------------------------------------------------------------------------- /prez/reference_data/annotations/rdfs-annotations.ttl: -------------------------------------------------------------------------------- 1 | PREFIX rdfs: 2 | PREFIX schema: 3 | 4 | rdfs: 5 | rdfs:seeAlso ; 6 | . 7 | 8 | rdfs:Class 9 | rdfs:label "Class" ; 10 | schema:description "The class of classes." ; 11 | . 12 | 13 | rdfs:Container 14 | rdfs:label "Container" ; 15 | schema:description "The class of RDF containers." ; 16 | . 17 | 18 | rdfs:ContainerMembershipProperty 19 | rdfs:label "Container Membership Property" ; 20 | schema:description """The class of container membership properties, rdf:_1, rdf:_2, ..., 21 | all of which are sub-properties of 'member'.""" ; 22 | . 23 | 24 | rdfs:Datatype 25 | rdfs:label "Datatype" ; 26 | schema:description "The class of RDF datatypes." ; 27 | . 28 | 29 | rdfs:Literal 30 | rdfs:label "Literal" ; 31 | schema:description "The class of literal values, eg. textual strings and integers." ; 32 | . 33 | 34 | rdfs:Resource 35 | rdfs:label "Resource" ; 36 | schema:description "The class resource, everything." ; 37 | . 38 | 39 | rdfs:comment 40 | rdfs:label "comment" ; 41 | schema:description "A description of the subject resource." ; 42 | . 43 | 44 | rdfs:domain 45 | rdfs:label "domain" ; 46 | schema:description "A domain of the subject property." ; 47 | . 48 | 49 | rdfs:isDefinedBy 50 | rdfs:label "is defined by" ; 51 | schema:description "The defininition of the subject resource." ; 52 | . 53 | 54 | rdfs:label 55 | rdfs:label "label" ; 56 | schema:description "A human-readable name for the subject." ; 57 | . 58 | 59 | rdfs:member 60 | rdfs:label "member" ; 61 | schema:description "A member of the subject resource." ; 62 | . 63 | 64 | rdfs:range 65 | rdfs:label "range" ; 66 | schema:description "A range of the subject property." ; 67 | . 68 | 69 | rdfs:seeAlso 70 | rdfs:label "see also" ; 71 | schema:description "Further information about the subject resource." ; 72 | . 73 | 74 | rdfs:subClassOf 75 | rdfs:label "subclass of" ; 76 | schema:description "The subject is a subclass of a class." ; 77 | . 78 | 79 | rdfs:subPropertyOf 80 | rdfs:label "subproperty of" ; 81 | schema:description "The subject is a subproperty of a property." ; 82 | . 83 | 84 | -------------------------------------------------------------------------------- /prez/reference_data/annotations/skos-xl-annotations.ttl: -------------------------------------------------------------------------------- 1 | PREFIX rdfs: 2 | PREFIX schema: 3 | PREFIX skos: 4 | 5 | 6 | rdfs:label "SKOS XL Vocabulary"@en ; 7 | rdfs:seeAlso ; 8 | schema:description "An RDF vocabulary extending SKOS and allowing the description and linking of lexical entities."@en ; 9 | . 10 | 11 | 12 | rdfs:label "Label"@en ; 13 | schema:description "A special class of lexical entities."@en ; 14 | . 15 | 16 | 17 | rdfs:label "alternative label"@en ; 18 | rdfs:seeAlso skos:altLabel ; 19 | schema:description "The property skosxl:altLabel is used to associate an skosxl:Label with a skos:Concept. The property is analogous to skos:altLabel."@en ; 20 | . 21 | 22 | 23 | rdfs:label "hidden label"@en ; 24 | rdfs:seeAlso skos:hiddenLabel ; 25 | schema:description "The property skosxl:hiddenLabel is used to associate an skosxl:Label with a skos:Concept. The property is analogous to skos:hiddenLabel."@en ; 26 | . 27 | 28 | 29 | rdfs:label "label relation"@en ; 30 | schema:description "The property skosxl:labelRelation is used for representing binary ('direct') relations between instances of the class skosxl:Label."@en ; 31 | . 32 | 33 | 34 | rdfs:label "literal form"@en ; 35 | schema:description "The property skosxl:literalForm is used to give the literal form of an skosxl:Label."@en ; 36 | . 37 | 38 | 39 | rdfs:label "preferred label"@en ; 40 | rdfs:seeAlso skos:prefLabel ; 41 | schema:description "The property skosxl:prefLabel is used to associate an skosxl:Label with a skos:Concept. The property is analogous to skos:prefLabel."@en ; 42 | . 43 | 44 | -------------------------------------------------------------------------------- /prez/reference_data/cql/default_context.json: -------------------------------------------------------------------------------- 1 | { 2 | "@base": "http://example.com/", 3 | "@version": 1.1, 4 | "@vocab": "http://example.com/vocab/", 5 | "cql": "http://www.opengis.net/doc/IS/cql2/1.0/", 6 | "sf": "http://www.opengis.net/ont/sf#", 7 | "geo": "http://www.opengis.net/ont/geosparql#", 8 | "landsat": "http://example.com/landsat/", 9 | "ro": "http://example.com/ro/", 10 | "args": { 11 | "@container": "@set", 12 | "@id": "cql:args" 13 | }, 14 | "property": { 15 | "@type": "@id", 16 | "@id": "cql:property" 17 | }, 18 | "op": { 19 | "@id": "cql:operator" 20 | }, 21 | "type": { 22 | "@id": "sf:type" 23 | }, 24 | "date": { 25 | "@id": "cql:date" 26 | }, 27 | "datetime": { 28 | "@id": "cql:datetime" 29 | }, 30 | "timestamp": { 31 | "@id": "cql:timestamp" 32 | }, 33 | "interval": { 34 | "@id": "cql:interval" 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /prez/reference_data/cql/geo_function_mapping.py: -------------------------------------------------------------------------------- 1 | from rdflib import Namespace 2 | 3 | GEOF = Namespace("http://www.opengis.net/def/function/geosparql/") 4 | 5 | cql_sparql_spatial_mapping = { 6 | "s_intersects": GEOF.sfIntersects, 7 | "s_within": GEOF.sfWithin, 8 | "s_contains": GEOF.sfContains, 9 | "s_disjoint": GEOF.sfDisjoint, 10 | "s_equals": GEOF.sfEquals, 11 | "s_overlaps": GEOF.sfOverlaps, 12 | "s_touches": GEOF.sfTouches, 13 | "s_crosses": GEOF.sfCrosses, 14 | } 15 | -------------------------------------------------------------------------------- /prez/reference_data/endpoints/base/endpoint_metadata.ttl: -------------------------------------------------------------------------------- 1 | @prefix ex: . 2 | @prefix ogce: . 3 | @prefix ont: . 4 | @prefix prez: . 5 | @prefix sys: . 6 | 7 | sys:profile-listing 8 | a ont:ListingEndpoint , ont:SystemEndpoint ; 9 | ont:relevantShapes ex:Profiles ; 10 | . 11 | 12 | sys:profile-object 13 | a ont:ObjectEndpoint , ont:SystemEndpoint ; 14 | ont:relevantShapes ex:Profiles ; 15 | . 16 | 17 | sys:object 18 | a ont:ObjectEndpoint , ont:SystemEndpoint ; 19 | ont:relevantShapes ex:Profiles ; 20 | . 21 | 22 | ogce:cql-get 23 | a ont:ListingEndpoint ; 24 | ont:relevantShapes ex:CQL ; 25 | . 26 | 27 | ogce:cql-post 28 | a ont:ListingEndpoint ; 29 | ont:relevantShapes ex:CQL ; 30 | . 31 | 32 | ogce:search 33 | a ont:ListingEndpoint ; 34 | ont:relevantShapes ex:Search ; 35 | . 36 | 37 | ogce:top-concepts 38 | a ont:ListingEndpoint ; 39 | ont:relevantShapes ex:TopConcepts ; 40 | . 41 | 42 | ogce:narrowers 43 | a ont:ListingEndpoint ; 44 | ont:relevantShapes ex:Narrowers ; 45 | . 46 | -------------------------------------------------------------------------------- /prez/reference_data/endpoints/base/endpoint_nodeshapes.ttl: -------------------------------------------------------------------------------- 1 | @prefix ont: . 2 | @prefix dcat: . 3 | @prefix dcterms: . 4 | @prefix ex: . 5 | @prefix geo: . 6 | @prefix prez: . 7 | @prefix prof: . 8 | @prefix rdf: . 9 | @prefix rdfs: . 10 | @prefix sh: . 11 | @prefix shext: . 12 | @prefix skos: . 13 | @prefix altr-ext: . 14 | 15 | ex:Profiles 16 | a sh:NodeShape ; 17 | ont:hierarchyLevel 1 ; 18 | sh:targetClass prof:Profile ; 19 | . 20 | 21 | ex:AltProfilesForListing 22 | a sh:NodeShape ; 23 | ont:hierarchyLevel 1 ; 24 | sh:targetClass prez:ListingProfile ; 25 | sh:property [ 26 | sh:path altr-ext:constrainsClass ; 27 | ] 28 | . 29 | 30 | ex:AltProfilesForObject 31 | a sh:NodeShape ; 32 | ont:hierarchyLevel 1 ; 33 | sh:targetClass prez:ObjectProfile ; 34 | sh:property [ 35 | sh:path altr-ext:constrainsClass ; 36 | ] 37 | . 38 | 39 | ex:Object 40 | a sh:NodeShape ; 41 | ont:hierarchyLevel 1 ; 42 | . 43 | 44 | ex:TopConcepts 45 | a sh:NodeShape ; 46 | sh:targetClass skos:Concept ; 47 | ont:hierarchyLevel 1 ; 48 | . 49 | 50 | ex:Narrowers 51 | a sh:NodeShape ; 52 | sh:targetClass skos:Concept ; 53 | ont:hierarchyLevel 1 ; 54 | . 55 | 56 | ex:CQL 57 | a sh:NodeShape ; 58 | sh:targetClass prez:CQLFilterResult ; 59 | ont:hierarchyLevel 1 ; 60 | . 61 | 62 | ex:Search 63 | a sh:NodeShape ; 64 | sh:targetClass prez:SearchResult ; 65 | ont:hierarchyLevel 1 ; 66 | . -------------------------------------------------------------------------------- /prez/reference_data/endpoints/features/features_metadata.ttl: -------------------------------------------------------------------------------- 1 | @prefix ex: . 2 | @prefix ogce: . 3 | @prefix ogcfeat: . 4 | @prefix ont: . 5 | @prefix prez: . 6 | @prefix sys: . 7 | 8 | ogcfeat:feature-collections 9 | a ont:ListingEndpoint , ont:OGCFeaturesEndpoint ; 10 | ont:relevantShapes ex:FeatureCollections ; 11 | . 12 | 13 | ogcfeat:feature-collection 14 | a ont:ObjectEndpoint , ont:OGCFeaturesEndpoint ; 15 | ont:relevantShapes ex:FeatureCollections ; 16 | . 17 | 18 | ogcfeat:features 19 | a ont:ListingEndpoint , ont:OGCFeaturesEndpoint ; 20 | ont:relevantShapes ex:Feature ; 21 | . 22 | 23 | ogcfeat:feature 24 | a ont:ObjectEndpoint , ont:OGCFeaturesEndpoint ; 25 | ont:relevantShapes ex:Feature ; 26 | . 27 | 28 | ogcfeat:queryables-global 29 | a ont:ListingEndpoint , ont:OGCFeaturesEndpoint ; 30 | ont:relevantShapes ex:QueryablesGlobal ; 31 | . 32 | 33 | ogcfeat:queryables-local 34 | a ont:ListingEndpoint , ont:OGCFeaturesEndpoint ; 35 | ont:relevantShapes ex:QueryablesLocal ; 36 | . -------------------------------------------------------------------------------- /prez/reference_data/endpoints/features/features_nodeshapes.ttl: -------------------------------------------------------------------------------- 1 | @prefix void: . 2 | @prefix dcat: . 3 | @prefix dcterms: . 4 | @prefix ex: . 5 | @prefix geo: . 6 | @prefix ont: . 7 | @prefix prez: . 8 | @prefix rdfs: . 9 | @prefix sh: . 10 | @prefix xsd: . 11 | @prefix skos: . 12 | 13 | ex:FeatureCollections 14 | a sh:NodeShape ; 15 | sh:property [ sh:path void:inDataset ; 16 | sh:class dcat:Dataset ; ] ; 17 | sh:targetClass geo:FeatureCollection ; 18 | ont:hierarchyLevel 1 . 19 | 20 | ex:Feature 21 | a sh:NodeShape ; 22 | sh:property [ sh:class geo:FeatureCollection ; 23 | sh:path [ sh:inversePath rdfs:member ] ] ; 24 | sh:property [ sh:class dcat:Dataset ; 25 | sh:path ( [ sh:inversePath rdfs:member ] void:inDataset ) ] ; 26 | sh:targetClass geo:Feature ; 27 | ont:hierarchyLevel 2 . 28 | 29 | ex:Object 30 | a sh:NodeShape ; 31 | ont:hierarchyLevel 1 . 32 | 33 | ex:QueryablesGlobal 34 | a sh:NodeShape ; 35 | sh:targetClass geo:Feature ; 36 | ont:hierarchyLevel 1 ; 37 | . 38 | 39 | ex:QueryablesLocal 40 | a sh:NodeShape ; 41 | sh:targetClass geo:Feature ; 42 | ont:hierarchyLevel 2 ; 43 | . 44 | -------------------------------------------------------------------------------- /prez/reference_data/prefixes/standard.ttl: -------------------------------------------------------------------------------- 1 | PREFIX vann: 2 | PREFIX prez: 3 | PREFIX exds: 4 | PREFIX dcat: 5 | PREFIX sdoprof: 6 | 7 | [ vann:preferredNamespacePrefix "prez" ; 8 | vann:preferredNamespaceUri 9 | ] . 10 | 11 | [ vann:preferredNamespacePrefix "altr-ext" ; 12 | vann:preferredNamespaceUri 13 | ] . 14 | 15 | [ vann:preferredNamespacePrefix "exds" ; 16 | vann:preferredNamespaceUri 17 | ] . 18 | 19 | [ vann:preferredNamespacePrefix "sdoprof" ; 20 | vann:preferredNamespaceUri 21 | ] . 22 | 23 | [ vann:preferredNamespacePrefix "vc" ; 24 | vann:preferredNamespaceUri 25 | ] . 26 | 27 | [ vann:preferredNamespacePrefix "vid" ; 28 | vann:preferredNamespaceUri 29 | ] . 30 | 31 | [ vann:preferredNamespacePrefix "geofab" ; 32 | vann:preferredNamespaceUri 33 | ] . 34 | 35 | [ vann:preferredNamespacePrefix "meshblock" ; 36 | vann:preferredNamespaceUri 37 | ] . 38 | 39 | [ vann:preferredNamespacePrefix "ldgov" ; 40 | vann:preferredNamespaceUri 41 | ] . 42 | 43 | [ vann:preferredNamespacePrefix "orc" ; 44 | vann:preferredNamespaceUri 45 | ] . 46 | 47 | [ vann:preferredNamespacePrefix "profile" ; 48 | vann:preferredNamespaceUri 49 | ] . 50 | 51 | [ vann:preferredNamespacePrefix "exm" ; 52 | vann:preferredNamespaceUri 53 | ] . 54 | 55 | [ vann:preferredNamespacePrefix "prfl" ; 56 | vann:preferredNamespaceUri 57 | ] . -------------------------------------------------------------------------------- /prez/reference_data/prefixes/testing.ttl: -------------------------------------------------------------------------------- 1 | PREFIX vann: 2 | PREFIX ldgovau: 3 | PREFIX gnaf: 4 | PREFIX addr: 5 | 6 | 7 | [ vann:preferredNamespacePrefix "ldgovau" ; 8 | vann:preferredNamespaceUri ; 9 | ] . 10 | 11 | [ vann:preferredNamespacePrefix "gnaf" ; 12 | vann:preferredNamespaceUri ; 13 | ] . 14 | 15 | [ vann:preferredNamespacePrefix "addr" ; 16 | vann:preferredNamespaceUri ; 17 | ] . 18 | 19 | [ vann:preferredNamespacePrefix "preztest" ; 20 | vann:preferredNamespaceUri ; 21 | ] . 22 | 23 | [ vann:preferredNamespacePrefix "sys" ; 24 | vann:preferredNamespaceUri ; 25 | ] . 26 | 27 | [ vann:preferredNamespacePrefix "sys" ; 28 | vann:preferredNamespaceUri ; 29 | ] . 30 | 31 | [ vann:preferredNamespacePrefix "defn" ; 32 | vann:preferredNamespaceUri ; 33 | ] . 34 | 35 | [ vann:preferredNamespacePrefix "preztest" ; 36 | vann:preferredNamespaceUri ; 37 | ] . 38 | 39 | [ vann:preferredNamespacePrefix "sys" ; 40 | vann:preferredNamespaceUri ; 41 | ] . 42 | 43 | [ vann:preferredNamespacePrefix "sys" ; 44 | vann:preferredNamespaceUri ; 45 | ] . 46 | 47 | [ vann:preferredNamespacePrefix "defn" ; 48 | vann:preferredNamespaceUri ; 49 | ] . 50 | -------------------------------------------------------------------------------- /prez/reference_data/prez_ns.py: -------------------------------------------------------------------------------- 1 | from rdflib import Namespace 2 | 3 | PREZ = Namespace("https://prez.dev/") 4 | ONT = Namespace("https://prez.dev/ont/") 5 | ALTREXT = Namespace("http://www.w3.org/ns/dx/connegp/altr-ext#") 6 | REG = Namespace("http://purl.org/linked-data/registry#") 7 | EP = Namespace("https://prez.dev/endpoint/") 8 | SHEXT = Namespace("http://example.com/shacl-extension#") 9 | OGCE = Namespace(PREZ["endpoint/extended-ogc-records/"]) 10 | OGCFEAT = Namespace("http://www.opengis.net/ogcapi-features-1/1.0/") 11 | TERN = Namespace("https://w3id.org/tern/ontologies/tern/") 12 | -------------------------------------------------------------------------------- /prez/renderers/csv_renderer.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import io 3 | 4 | 5 | def render_csv_dropdown(rows: list[dict]) -> io.StringIO: 6 | stream = io.StringIO() 7 | headers = list(rows[0].keys()) 8 | writer = csv.DictWriter( 9 | stream, fieldnames=headers, quotechar='"', quoting=csv.QUOTE_MINIMAL 10 | ) 11 | writer.writeheader() 12 | 13 | for row in rows: 14 | writer.writerow(row) 15 | 16 | stream.seek(0) 17 | return stream 18 | -------------------------------------------------------------------------------- /prez/repositories/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import Repo 2 | from .oxrdflib import OxrdflibRepo 3 | from .pyoxigraph import PyoxigraphRepo 4 | from .remote_sparql import RemoteSparqlRepo 5 | 6 | __all__ = ["Repo", "OxrdflibRepo", "PyoxigraphRepo", "RemoteSparqlRepo"] 7 | -------------------------------------------------------------------------------- /prez/repositories/base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from abc import ABC, abstractmethod 4 | from typing import List, Tuple 5 | 6 | from rdflib import Graph, Namespace, URIRef 7 | 8 | from prez.cache import prefix_graph 9 | 10 | PREZ = Namespace("https://prez.dev/") 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | 15 | class Repo(ABC): 16 | @abstractmethod 17 | async def rdf_query_to_graph(self, query: str): 18 | pass 19 | 20 | @abstractmethod 21 | async def tabular_query_to_table(self, query: str, context: URIRef = None): 22 | pass 23 | 24 | async def send_queries( 25 | self, 26 | rdf_queries: List[str], 27 | tabular_queries: List[Tuple[URIRef | None, str]] = None, 28 | ) -> Tuple[Graph, List]: 29 | # Common logic to send both query types in parallel 30 | results = await asyncio.gather( 31 | *[self.rdf_query_to_graph(query) for query in rdf_queries if query], 32 | *[ 33 | self.tabular_query_to_table(query, context) 34 | for context, query in tabular_queries 35 | if query 36 | ], 37 | ) 38 | g = Graph(namespace_manager=prefix_graph.namespace_manager) 39 | tabular_results = [] 40 | for result in results: 41 | if isinstance(result, Graph): 42 | g += result 43 | else: 44 | tabular_results.append(result) 45 | return g, tabular_results 46 | 47 | @abstractmethod 48 | def sparql( 49 | self, query: str, raw_headers: list[tuple[bytes, bytes]], method: str = "GET" 50 | ): 51 | pass 52 | -------------------------------------------------------------------------------- /prez/repositories/oxrdflib.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from fastapi.concurrency import run_in_threadpool 4 | from rdflib import BNode, Graph, Literal, Namespace, URIRef 5 | 6 | from prez.repositories.base import Repo 7 | 8 | PREZ = Namespace("https://prez.dev/") 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class OxrdflibRepo(Repo): 14 | def __init__(self, oxrdflib_graph: Graph): 15 | self.oxrdflib_graph = oxrdflib_graph 16 | 17 | def _sync_rdf_query_to_graph(self, query: str) -> Graph: 18 | results = self.oxrdflib_graph.query(query) 19 | return results.graph 20 | 21 | def _sync_tabular_query_to_table(self, query: str, context: URIRef = None): 22 | results = self.oxrdflib_graph.query(query) 23 | reformatted_results = [] 24 | for result in results: 25 | reformatted_result = {} 26 | for var in results.vars: 27 | binding = result[var] 28 | if binding: 29 | str_type = self._str_type_for_rdflib_type(binding) 30 | reformatted_result[str(var)] = {"type": str_type, "value": binding} 31 | reformatted_results.append(reformatted_result) 32 | return context, reformatted_results 33 | 34 | async def rdf_query_to_graph(self, query: str) -> Graph: 35 | return await run_in_threadpool(self._sync_rdf_query_to_graph, query) 36 | 37 | async def tabular_query_to_table(self, query: str, context: URIRef = None): 38 | return await run_in_threadpool( 39 | self._sync_tabular_query_to_table, query, context 40 | ) 41 | 42 | def _str_type_for_rdflib_type(self, instance): 43 | map = {URIRef: "uri", BNode: "bnode", Literal: "literal"} 44 | return map[type(instance)] 45 | -------------------------------------------------------------------------------- /prez/response.py: -------------------------------------------------------------------------------- 1 | from fastapi.responses import StreamingResponse 2 | 3 | 4 | class StreamingTurtleResponse(StreamingResponse): 5 | media_type = "text/turtle" 6 | 7 | def render(self, content: str) -> bytes: 8 | return content.encode("utf-8") 9 | 10 | 11 | class StreamingTurtleAnnotatedResponse(StreamingTurtleResponse): 12 | media_type = "text/anot+turtle" 13 | -------------------------------------------------------------------------------- /prez/routers/conformance.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, HTTPException 2 | 3 | from prez.models.ogc_features import CONFORMANCE_CLASSES, ConformanceDeclaration 4 | 5 | router = APIRouter(tags=["Conformance"]) 6 | 7 | 8 | @router.get("/conformance", response_model=ConformanceDeclaration, status_code=200) 9 | async def get_conformance(): 10 | try: 11 | return ConformanceDeclaration(conformsTo=CONFORMANCE_CLASSES) 12 | except Exception as e: 13 | raise HTTPException(status_code=500, detail=str(e)) 14 | -------------------------------------------------------------------------------- /prez/routers/rdf_response_examples.json: -------------------------------------------------------------------------------- 1 | { 2 | "200": { 3 | "content": { 4 | "application/ld+json": { 5 | "example": { 6 | "@id": "https://example.com/item/1", 7 | "https://example.com/property": "value" 8 | } 9 | }, 10 | "application/anot+ld+json": { 11 | "example": { 12 | "@context": { 13 | "prez": "https://prez.dev/" 14 | }, 15 | "@id": "https://example.com/item/1", 16 | "https://example.com/property": "value", 17 | "prez:label": "Item One" 18 | } 19 | }, 20 | "application/rdf+xml": { 21 | "example": "\n\n \n value\n \n\n]]>" 22 | }, 23 | "text/anot+turtle": { 24 | "example": "@prefix prez: .\n\n \n \"value\" ;\n prez:label \"Item One\" ." 25 | }, 26 | "text/turtle": { 27 | "example": " \"value\" ." 28 | }, 29 | "application/sparql-query": { 30 | "example": "CONSTRUCT { ?item ?value } WHERE { ?item ?value }" 31 | } 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /prez/services/generate_profiles.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from pathlib import Path 3 | 4 | from rdflib import Graph 5 | 6 | from prez.cache import profiles_graph_cache 7 | 8 | log = logging.getLogger(__name__) 9 | 10 | 11 | async def create_profiles_graph(repo) -> Graph: 12 | if ( 13 | len(profiles_graph_cache) > 0 14 | ): # pytest imports app.py multiple times, so this is needed. Not sure why cache is 15 | # not cleared between calls 16 | return 17 | for f in (Path(__file__).parent.parent / "reference_data/profiles").glob("*.ttl"): 18 | profiles_graph_cache.parse(f) 19 | log.info("Prez default profiles loaded") 20 | remote_profiles_query = """ 21 | PREFIX prof: 22 | PREFIX prez: 23 | 24 | DESCRIBE ?prof { 25 | SELECT DISTINCT ?prof { 26 | VALUES ?prof_class { prez:ListingProfile prez:ObjectProfile prez:IndexProfile } 27 | ?prof a ?prof_class 28 | } 29 | } 30 | """ 31 | g, _ = await repo.send_queries([remote_profiles_query], []) 32 | if len(g) > 0: 33 | profiles_graph_cache.__iadd__(g) 34 | log.info("Remote profile(s) found and added") 35 | else: 36 | log.info("No remote profiles found") 37 | -------------------------------------------------------------------------------- /prez/services/generate_queryables.py: -------------------------------------------------------------------------------- 1 | from prez.config import settings 2 | from prez.models.ogc_features import QueryableProperty, Queryables 3 | from prez.reference_data.prez_ns import OGCFEAT, PREZ 4 | 5 | 6 | def generate_queryables_json(item_graph, annotations_graph, url, endpoint_uri): 7 | queryable_props = {} 8 | for queryable in item_graph.subjects(): 9 | queryable_props[str(queryable)] = QueryableProperty( 10 | title=annotations_graph.value(queryable, PREZ.label), 11 | description=annotations_graph.value(queryable, PREZ.description), 12 | ) 13 | if endpoint_uri == OGCFEAT["queryables-global"]: 14 | title = "Global Queryables" 15 | description = ( 16 | "Global queryable properties for all collections in the OGC Features API." 17 | ) 18 | else: 19 | title = "Local Queryables" 20 | description = ( 21 | "Local queryable properties for the collection in the OGC Features API." 22 | ) 23 | queryable_params = { 24 | "$id": f"{settings.system_uri}{url.path}", 25 | "title": title, 26 | "description": description, 27 | "properties": queryable_props, 28 | } 29 | return Queryables(**queryable_params) 30 | -------------------------------------------------------------------------------- /prez/services/prez_logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from datetime import datetime 4 | from pathlib import Path 5 | 6 | 7 | def setup_logger(settings): 8 | # create logger 9 | logger = logging.getLogger("prez") 10 | logger.setLevel(settings.log_level) 11 | 12 | # create formatter 13 | formatter = logging.Formatter( 14 | fmt="%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s", 15 | datefmt="%Y-%m-%d %H:%M:%S", 16 | ) 17 | 18 | # create console handler and set level to debug 19 | handlers = [] 20 | if settings.log_output == "file" or settings.log_output == "both": 21 | logfile = Path( 22 | f"../logs/{datetime.now().replace(microsecond=0).isoformat()}.log" 23 | ) 24 | logfile.parent.mkdir(parents=True, exist_ok=True) 25 | logfile.touch(exist_ok=True) 26 | file_handler = logging.FileHandler(filename=logfile) 27 | file_handler.setLevel(5) 28 | file_handler.setFormatter(formatter) 29 | handlers.append(file_handler) 30 | if settings.log_output == "stdout" or settings.log_output == "both": 31 | stdout_handler = logging.StreamHandler(stream=sys.stdout) 32 | stdout_handler.setLevel(5) 33 | stdout_handler.setFormatter(formatter) 34 | handlers.append(stdout_handler) 35 | logger.propagate = False 36 | 37 | # add ch to logger 38 | logger.handlers = handlers 39 | -------------------------------------------------------------------------------- /prez/services/query_generation/bbox_filter.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from rdflib.namespace import GEO 4 | from sparql_grammar_pydantic import ( 5 | IRI, 6 | ArgList, 7 | Constraint, 8 | Expression, 9 | Filter, 10 | FunctionCall, 11 | GraphPatternNotTriples, 12 | PrimaryExpression, 13 | RDFLiteral, 14 | TriplesSameSubjectPath, 15 | Var, 16 | ) 17 | 18 | from prez.reference_data.cql.geo_function_mapping import GEOF 19 | from prez.services.query_generation.cql import ( 20 | format_coordinates_as_wkt, 21 | get_wkt_from_coords, 22 | ) 23 | 24 | 25 | def generate_bbox_filter( 26 | bbox: List[float], filter_crs: str 27 | ) -> (GraphPatternNotTriples, List[TriplesSameSubjectPath]): 28 | coordinates = format_coordinates_as_wkt(bbox) 29 | wkt = get_wkt_from_coords(coordinates, "Polygon") 30 | 31 | wkt_with_crs = f"<{filter_crs}> {wkt}" 32 | subject = Var(value="focus_node") 33 | geom_bn_var = Var(value="geom_bnode") 34 | geom_lit_var = Var(value="geom_var") 35 | tssp_list = [] 36 | tssp_list.append( 37 | TriplesSameSubjectPath.from_spo( 38 | subject, IRI(value=GEO.hasGeometry), geom_bn_var 39 | ) 40 | ) 41 | tssp_list.append( 42 | TriplesSameSubjectPath.from_spo(geom_bn_var, IRI(value=GEO.asWKT), geom_lit_var) 43 | ) 44 | 45 | geom_func_iri = IRI(value=GEOF.sfIntersects) 46 | geom_1_exp = Expression.from_primary_expression( 47 | primary_expression=PrimaryExpression(content=geom_lit_var) 48 | ) 49 | geom_2_datatype = IRI(value="http://www.opengis.net/ont/geosparql#wktLiteral") 50 | geom_2_exp = Expression.from_primary_expression( 51 | primary_expression=PrimaryExpression( 52 | content=RDFLiteral(value=wkt_with_crs, datatype=geom_2_datatype) 53 | ) 54 | ) 55 | arg_list = ArgList(expressions=[geom_1_exp, geom_2_exp]) 56 | fc = FunctionCall(iri=geom_func_iri, arg_list=arg_list) 57 | 58 | spatial_filter = Filter(constraint=Constraint(content=fc)) 59 | filter_gpnt = GraphPatternNotTriples(content=spatial_filter) 60 | return filter_gpnt, tssp_list 61 | -------------------------------------------------------------------------------- /prez/services/query_generation/classes.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from rdflib.namespace import RDF 4 | from sparql_grammar_pydantic import ( 5 | IRI, 6 | DataBlock, 7 | DataBlockValue, 8 | GroupGraphPattern, 9 | GroupGraphPatternSub, 10 | InlineDataOneVar, 11 | SelectClause, 12 | SubSelect, 13 | TriplesBlock, 14 | TriplesSameSubjectPath, 15 | Var, 16 | WhereClause, 17 | GraphPatternNotTriples, 18 | InlineData, SolutionModifier, 19 | ) 20 | 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class ClassesSelectQuery(SubSelect): 25 | """ 26 | SELECT ?class ?uri 27 | WHERE { 28 | ?uri rdf:type ?class 29 | VALUES ?uri { <...> <...> } 30 | } 31 | """ 32 | 33 | def __init__( 34 | self, 35 | iris: list[IRI], 36 | ): 37 | class_var = Var(value="class") 38 | uris_var = Var(value="uri") 39 | select_clause = SelectClause(variables_or_all=[class_var, uris_var]) 40 | where_clause = WhereClause( 41 | group_graph_pattern=GroupGraphPattern( 42 | content=GroupGraphPatternSub( 43 | triples_block=TriplesBlock( 44 | triples=TriplesSameSubjectPath.from_spo( 45 | subject=uris_var, 46 | predicate=IRI(value=RDF.type), 47 | object=class_var, 48 | ) 49 | ), 50 | graph_patterns_or_triples_blocks=[ 51 | GraphPatternNotTriples( 52 | content=InlineData( 53 | data_block=DataBlock( 54 | block=InlineDataOneVar( 55 | variable=uris_var, 56 | datablockvalues=[DataBlockValue(value=uri) for uri in iris], 57 | ) 58 | ) 59 | ) 60 | ) 61 | ] 62 | ) 63 | ) 64 | ) 65 | super().__init__( 66 | select_clause=select_clause, 67 | where_clause=where_clause, 68 | solution_modifier=SolutionModifier(), 69 | ) 70 | -------------------------------------------------------------------------------- /prez/services/query_generation/homepage.py: -------------------------------------------------------------------------------- 1 | from sparql_grammar_pydantic import ( 2 | IRI, 3 | GroupGraphPattern, 4 | GroupGraphPatternSub, 5 | SelectClause, 6 | SubSelect, 7 | TriplesBlock, 8 | TriplesSameSubjectPath, 9 | Var, 10 | WhereClause, 11 | ) 12 | 13 | 14 | class FoafHomepageQuery(SubSelect): 15 | """ 16 | SELECT DISTINCT ?url 17 | WHERE { 18 | <{{ iri }}> foaf:homepage ?url . 19 | } 20 | """ 21 | 22 | def __init__(self, iri: str): 23 | iri_var = IRI(value=iri) 24 | url_var = Var(value="url") 25 | select_clause = SelectClause(distinct=True, variables_or_all=[url_var]) 26 | where_clause = WhereClause( 27 | group_graph_pattern=GroupGraphPattern( 28 | content=GroupGraphPatternSub( 29 | triples_block=TriplesBlock( 30 | triples=TriplesSameSubjectPath.from_spo( 31 | subject=iri_var, 32 | predicate=IRI(value="http://xmlns.com/foaf/0.1/homepage"), 33 | object=url_var, 34 | ) 35 | ) 36 | ) 37 | ) 38 | ) 39 | super().__init__( 40 | select_clause=select_clause, 41 | where_clause=where_clause, 42 | ) 43 | -------------------------------------------------------------------------------- /prez/services/query_generation/identifier.py: -------------------------------------------------------------------------------- 1 | from textwrap import dedent 2 | 3 | from jinja2 import Template 4 | 5 | 6 | def get_foaf_homepage_query(iri: str) -> str: 7 | query = Template( 8 | """ 9 | PREFIX foaf: 10 | 11 | SELECT DISTINCT ?url 12 | WHERE { 13 | <{{ iri }}> foaf:homepage ?url . 14 | } 15 | """ 16 | ).render(iri=iri) 17 | 18 | return dedent(query) 19 | -------------------------------------------------------------------------------- /prez/services/query_generation/prefixes.py: -------------------------------------------------------------------------------- 1 | from sparql_grammar_pydantic import ( 2 | IRI, 3 | GroupGraphPattern, 4 | GroupGraphPatternSub, 5 | SelectClause, 6 | SubSelect, 7 | TriplesBlock, 8 | TriplesSameSubjectPath, 9 | Var, 10 | WhereClause, SolutionModifier, ValuesClause, 11 | ) 12 | 13 | 14 | class PrefixQuery(SubSelect): 15 | """ 16 | SELECT ?prefix ?namespace 17 | WHERE { 18 | ?subject vann:preferredNamespacePrefix ?prefix ; 19 | vann:preferredNamespaceUri ?namespace . 20 | } 21 | """ 22 | 23 | def __init__(self): 24 | prefix_var = Var(value="prefix") 25 | namespace_var = Var(value="namespace") 26 | subject_var = Var(value="subject") 27 | select_clause = SelectClause(variables_or_all=[prefix_var, namespace_var]) 28 | where_clause = WhereClause( 29 | group_graph_pattern=GroupGraphPattern( 30 | content=GroupGraphPatternSub( 31 | triples_block=TriplesBlock.from_tssp_list( 32 | [ 33 | TriplesSameSubjectPath.from_spo( 34 | subject=subject_var, 35 | predicate=IRI( 36 | value="http://purl.org/vocab/vann/preferredNamespacePrefix" 37 | ), 38 | object=prefix_var, 39 | ), 40 | TriplesSameSubjectPath.from_spo( 41 | subject=subject_var, 42 | predicate=IRI( 43 | value="http://purl.org/vocab/vann/preferredNamespaceUri" 44 | ), 45 | object=namespace_var, 46 | ), 47 | ] 48 | ) 49 | ) 50 | ) 51 | ) 52 | super().__init__( 53 | select_clause=select_clause, 54 | where_clause=where_clause, 55 | solution_modifier=SolutionModifier(), 56 | ) 57 | -------------------------------------------------------------------------------- /prez/services/query_generation/sparql_escaping.py: -------------------------------------------------------------------------------- 1 | def escape_for_lucene_and_sparql(query): 2 | chars_to_escape = r'\+-!(){}[]^"~*?:/' 3 | return "".join(r"\\" + char if char in chars_to_escape else char for char in query) 4 | -------------------------------------------------------------------------------- /prez/services/validate_iri.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from rdflib import URIRef 4 | 5 | # Define the regex based on the SPARQL ABNF definition for IRIREF, WITHOUT the leading and trailing < > 6 | # ^ : Start of the string 7 | # < : Literal '<' 8 | # [^"{}|^`\\\x00-\x20] : Match any character NOT in the specified set: 9 | # " { } | ^ ` \ (note the double escape \\ for literal \) 10 | # and the range U+0000 to U+0020 (\x00-\x20) 11 | # * : Match the preceding character class zero or more times 12 | # > : Literal '>' 13 | # $ : End of the string 14 | # Compile the regex for efficiency if used repeatedly 15 | sparql_iri_pattern_str = r'^[^<>"{}|^`\\\x00-\x20]*$' 16 | sparql_iri_re = re.compile(sparql_iri_pattern_str) 17 | 18 | def validate_iri(iri: str | URIRef): 19 | if isinstance(iri, URIRef): 20 | iri = str(iri) 21 | return bool(sparql_iri_re.fullmatch(iri)) -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "prez" 3 | version = "4.7.5" 4 | description = "A python application for displaying linked data on the web" 5 | authors = ["Nicholas Car ", "David Habgood ", "Lawson Lewis "] 6 | packages = [ 7 | { include = "prez" }, 8 | { include = "pyproject.toml", format = "wheel", to="prez" }, 9 | ] 10 | include = [ 11 | { path = "./*.md", format = "sdist" }, 12 | { path = "LICENSE", format = "sdist" }, 13 | { path = "demo", format = "sdist" }, 14 | { path = "dev", format = "sdist" }, 15 | { path = "tests", format = "sdist" }, 16 | { path = "poetry.lock", format = "sdist" }, 17 | { path = "./*.whl", format = "sdist" }, 18 | { path = "*.toml", format = "sdist" } 19 | ] 20 | 21 | [tool.poetry.dependencies] 22 | python = "^3.12" 23 | uvicorn = {version = "^0.34.0", optional = true } 24 | httpx = "^0.27.0" 25 | rdflib = "^7.0.0" 26 | toml = "^0.10.2" 27 | fastapi = "^0.115.5" 28 | jinja2 = "^3.1.6" 29 | pydantic = "^2.10.6" 30 | pydantic-settings = "^2.5.0" 31 | pyld = "^2.0.4" 32 | aiocache = "^0.12.2" 33 | sparql-grammar-pydantic = "^0.1.6" 34 | rdf2geojson = {git = "https://github.com/Kurrawong/rdf2geojson.git", rev = "v0.7.1"} 35 | python-multipart = "^0.0.20" 36 | pyoxigraph = "^0.4.4" 37 | oxrdflib = {git = "https://github.com/oxigraph/oxrdflib.git", rev = "main"} 38 | 39 | [tool.poetry.group.dev.dependencies] 40 | pytest = "^8.2.1" 41 | pre-commit = "^2.15.0" 42 | black = "^24.4.2" 43 | pytest-asyncio = "^0.23.7" 44 | requests = "^2.28.1" 45 | python-dotenv = "^1.0.0" 46 | coverage = "^7.3.2" 47 | tabulate = "^0.9.0" 48 | ogctests = "^0.1.15" 49 | 50 | [tool.poetry.extras] 51 | server = ["uvicorn"] 52 | 53 | [tool.black] 54 | line-length = 88 55 | 56 | [tool.isort] 57 | profile = "black" 58 | 59 | [pytest] 60 | pythonpath = ["prez"] 61 | testpaths = ["tests"] 62 | 63 | [build-system] 64 | requires = ["poetry-core>=1.9.0"] 65 | build-backend = "poetry.core.masonry.api" 66 | -------------------------------------------------------------------------------- /test_data/animal_profiles.ttl: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix rdfs: . 3 | @prefix sh: . 4 | @prefix dcterms: . 5 | @prefix prof: . 6 | @prefix prez: . 7 | @prefix xsd: . 8 | @prefix ex: . 9 | @prefix schema: . 10 | @prefix shext: . 11 | 12 | 13 | a prof:Profile , prez:ListingProfile ; 14 | dcterms:identifier "animal-facets"^^xsd:token ; 15 | dcterms:title "Animal Faceting Profile" ; 16 | dcterms:description "Profile defining properties for faceting animal data, including nested conservation info." ; 17 | sh:property [ 18 | sh:path [ 19 | sh:union ( 20 | ex:species 21 | ex:habitat 22 | ex:diet 23 | [ 24 | sh:path ( ex:conservationInfo ex:status ) ; 25 | shext:pathAlias ; 26 | ] 27 | [ 28 | sh:path ( ex:conservationInfo ex:region ) ; 29 | shext:pathAlias ; 30 | ] 31 | ) 32 | ] 33 | ] . 34 | 35 | 36 | a prof:Profile , prez:ListingProfile ; 37 | dcterms:identifier "animal-search"^^xsd:token ; 38 | dcterms:title "Animal Search Results Profile" ; 39 | dcterms:description "Profile defining properties to return for animal search results (class, label, description)." ; 40 | sh:property [ 41 | sh:path [ 42 | sh:union ( 43 | rdf:type 44 | rdfs:label 45 | dcterms:description 46 | ) 47 | ] 48 | ] . 49 | 50 | 51 | a prof:Profile , prez:ListingProfile ; 52 | dcterms:identifier "facet-type"^^xsd:token ; 53 | dcterms:title "Facet things by type" ; 54 | dcterms:description "Allows faceting by rdf:type" ; 55 | sh:property [ sh:path [ sh:union ( rdf:type ) ] ] . 56 | -------------------------------------------------------------------------------- /test_data/bnode_depth-1.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX isoroles: 4 | PREFIX prov: 5 | PREFIX schema: 6 | PREFIX skos: 7 | PREFIX xsd: 8 | 9 | 10 | a dcat:Catalog ; 11 | schema:member [ 12 | schema:name "123" ; 13 | ] ; 14 | . -------------------------------------------------------------------------------- /test_data/bnode_depth-2.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX isoroles: 4 | PREFIX prov: 5 | PREFIX schema: 6 | PREFIX skos: 7 | PREFIX xsd: 8 | 9 | 10 | a dcat:Catalog ; 11 | schema:member [ 12 | schema:name "123" ; 13 | schema:member [ 14 | schema:name "456" 15 | ] ; 16 | ] ; 17 | . -------------------------------------------------------------------------------- /test_data/bnode_depth-4.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX isoroles: 4 | PREFIX prov: 5 | PREFIX rdfs: 6 | PREFIX schema: 7 | PREFIX skos: 8 | PREFIX xsd: 9 | 10 | a schema:Thing . 11 | 12 | 13 | a dcat:Catalog ; 14 | dcterms:created "2022-07-31"^^xsd:date ; 15 | dcterms:description """The Indigenous Data Network's demonstration catalogue of datasets. This catalogue contains records of datasets in Australia, most of which have some relation to indigenous Australia. 16 | The purpose of this catalogue is not to act as a master catalogue of indigenous data in Australia to demonstrate improved metadata models and rating systems for data and metadata in order to improve indigenous data governance. 17 | The content of this catalogue conforms to the Indigenous Data Network's Catalogue Profile which is a profile of the DCAT, SKOS and PROV data models."""@en ; 18 | dcterms:identifier "democat"^^xsd:token ; 19 | dcterms:modified "2022-08-29"^^xsd:date ; 20 | dcterms:title "IDN Demonstration Catalogue" ; 21 | prov:qualifiedAttribution [ 22 | a prov:QualifiedAttribution ; 23 | dcat:hadRole 24 | isoroles:author , 25 | isoroles:custodian , 26 | isoroles:owner ; 27 | prov:agent [ 28 | a schema:Organization ; 29 | schema:name "some org" ; 30 | schema:member [ 31 | a schema:Person ; 32 | schema:name "some person" ; 33 | schema:memberOf [ 34 | schema:name "another some org" 35 | ] ; 36 | ] ; 37 | ] ; 38 | ] ; 39 | . 40 | -------------------------------------------------------------------------------- /test_data/catprez.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX ex: 4 | PREFIX rdf: 5 | PREFIX rdfs: 6 | 7 | ex:CatalogOne a dcat:Catalog ; 8 | rdfs:label "Catalog One" ; 9 | dcterms:hasPart ex:DCATResource ; 10 | ex:property "Catalog property" ; 11 | . 12 | 13 | ex:DCATResource a dcat:Resource ; 14 | rdfs:label "DCAT Resource" ; 15 | ex:property "DCAT Resource property" 16 | . 17 | 18 | ex:CatalogTwo a dcat:Catalog ; 19 | rdfs:label "amazing catalog" ; 20 | dcterms:hasPart ex:DCATResourceTwo ; 21 | ex:property "complete" ; 22 | . 23 | 24 | ex:DCATResourceTwo a dcat:Resource ; 25 | rdfs:label "rightful" ; 26 | ex:property "exposure" 27 | . 28 | -------------------------------------------------------------------------------- /test_data/cql/README.md: -------------------------------------------------------------------------------- 1 | Additional CQL test data is in `docs/examples/cql/`. The example data is located there such that the built docker image 2 | can utilise it as examples for the OpenAPI documentation. 3 | -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/additional_temporal_disjoint_instant.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant > "2012-08-10T05:30:00+00:00"^^ || ?dt_1_instant < "2012-08-10T05:30:00+00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/additional_temporal_during_intervals.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start > "2017-06-10T07:30:00+00:00"^^ && ?dt_1_end < "2017-06-11T10:30:00+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/clause7_12.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (! (?dt_1_instant > "1969-07-24T16:50:35+00:00"^^ || ?dt_1_instant < "1969-07-16T05:32:00+00:00"^^)) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/clause7_13.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start > "1969-07-16T13:32:00+00:00"^^ && ?dt_1_end < "1969-07-24T16:50:35+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/clause7_17.rq: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example20.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant < "2015-01-01T00:00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example21.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant > "2012-06-05T00:00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example22.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start > "2017-06-10T07:30:00+00:00"^^ && ?dt_1_end < "2017-06-11T10:30:00+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example27.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?datetime 3 | } 4 | WHERE { 5 | ?focus_node ?datetime 6 | FILTER (?datetime > "2012-06-05T00:00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example53.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant > "2010-02-10T00:00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example54.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant < "2012-08-10T05:30:00+00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example55.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("2000-01-01T00:00:00+00:00"^^ < ?dt_2_start && "2005-01-10T01:01:01.393216+00:00"^^ > ?dt_2_end) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example56.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("2005-01-10T01:01:01.393216+00:00"^^ < ?dt_2_start) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example57.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start > "2005-01-10T00:00:00"^^ && ?dt_1_end < "2010-02-10T00:00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example58.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_instant 3 | } 4 | WHERE { 5 | ?focus_node ?dt_1_instant 6 | FILTER (?dt_1_instant = "1851-04-29T00:00:00"^^) 7 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example59.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start < "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end = "2010-02-10T05:29:20.073225+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example60.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start > "1991-10-07T00:00:00"^^ && ?dt_1_end = "2010-02-10T05:29:20.073225+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example61.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (! (?dt_1_end < "1991-10-07T08:21:06.393262+00:00"^^ || ?dt_1_start > "2010-02-10T05:29:20.073225+00:00"^^)) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example62.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("2010-02-10T00:00:00"^^ = ?dt_2_start) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example63.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("2010-02-10T05:29:20.073225+00:00"^^ = ?dt_2_end) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example64.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("1991-10-07T08:21:06.393262+00:00"^^ > ?dt_2_start && "1991-10-07T08:21:06.393262+00:00"^^ < ?dt_2_end && "2010-02-10T05:29:20.073225+00:00"^^ > ?dt_2_end) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example65.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start < "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end > "1991-10-07T08:21:06.393262+00:00"^^ && ?dt_1_end < "1992-10-09T08:08:08.393473+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example66.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_2_end . 3 | ?focus_node ?dt_2_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_2_end . 7 | ?focus_node ?dt_2_start 8 | 9 | FILTER ("1991-10-07T08:21:06.393262+00:00"^^ = ?dt_2_start && "2010-02-10T05:29:20.073225+00:00"^^ > ?dt_2_end) 10 | } -------------------------------------------------------------------------------- /test_data/cql/expected_generated_queries/example67.rq: -------------------------------------------------------------------------------- 1 | CONSTRUCT { 2 | ?focus_node ?dt_1_end . 3 | ?focus_node ?dt_1_start 4 | } 5 | WHERE { 6 | ?focus_node ?dt_1_end . 7 | ?focus_node ?dt_1_start 8 | 9 | FILTER (?dt_1_start = "1991-10-07T08:21:06.393262+00:00"^^) 10 | } -------------------------------------------------------------------------------- /test_data/cql/input/additional_temporal_disjoint_instant.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_disjoint", 3 | "args": [ 4 | { "property": "ex:updated_at" }, 5 | { "timestamp": "2012-08-10T05:30:00Z" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/additional_temporal_during_intervals.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_during", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "2017-06-10T07:30:00Z", "2017-06-11T10:30:00Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/additional_temporal_intersects_instant.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_disjoint", 3 | "args": [ 4 | { "property": "ex:updated_at" }, 5 | { "timestamp": "2012-08-10T05:30:00Z" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_01.json: -------------------------------------------------------------------------------- 1 | { "op": "avg", "args": [ { "property": "ex:windSpeed" } ] } 2 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_02a.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "ex:city" }, 5 | "Toronto" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_02b.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<", 3 | "args": [ 4 | { 5 | "op": "avg", 6 | "args": [ { "property": "ex:windSpeed" } ] 7 | }, 8 | 4 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_02c.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">", 3 | "args": [ 4 | { 5 | "op": "-", 6 | "args": [ 7 | { "property": "balance" }, 8 | 150.0 9 | ] 10 | }, 11 | 0 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_02d.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">=", 3 | "args": [ 4 | { "property": "ex:updated" }, 5 | { "date": "1970-01-01" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause6_03.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "isNull", 6 | "args": [ { "property": "ex:geometry" } ] 7 | } 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_01.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "name" }, 5 | "Smith%" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_02.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "between", 3 | "args": [ 4 | { "property": "depth" }, 5 | 100.0, 6 | 150.0 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_03a.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { "property": "ex:cityName" }, 5 | [ "Toronto", "Frankfurt", "Tokyo", "New York" ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_03b.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "in", 6 | "args": [ 7 | { "property": "ex:category" }, 8 | [ 1, 2, 3, 4 ] 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_04.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { 5 | "op": "casei", 6 | "args": [ { "property": "road_class" } ] 7 | }, 8 | [ 9 | { "op": "casei", "args": [ "Οδος" ] }, 10 | { "op": "casei", "args": [ "Straße" ] } 11 | ] 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_05.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { 5 | "op": "accenti", 6 | "args": [ { "property": "ex:etat_vol" } ] 7 | }, 8 | { 9 | "op": "accenti", 10 | "args": [ "débárquér" ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_07.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_intersects", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "Point", 7 | "coordinates": [ 36.319836, 32.288087 ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_10.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_crosses", 3 | "args": [ 4 | { "property": "road" }, 5 | { 6 | "type": "Polygon", 7 | "coordinates": [ 8 | [ 9 | [ 43.7286, -79.2986 ], [ 43.7311, -79.2996 ], [ 43.7323, -79.2972 ], 10 | [ 43.7326, -79.2971 ], [ 43.7350, -79.2981 ], [ 43.7350, -79.2982 ], 11 | [ 43.7352, -79.2982 ], [ 43.7357, -79.2956 ], [ 43.7337, -79.2948 ], 12 | [ 43.7343, -79.2933 ], [ 43.7339, -79.2923 ], [ 43.7327, -79.2947 ], 13 | [ 43.7320, -79.2942 ], [ 43.7322, -79.2937 ], [ 43.7306, -79.2930 ], 14 | [ 43.7303, -79.2930 ], [ 43.7299, -79.2928 ], [ 43.7286, -79.2986 ] 15 | ] 16 | ] 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_12.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_intersects", 3 | "args": [ 4 | { "property": "ex:event_time" }, 5 | { "interval": [ "1969-07-16T05:32:00Z", "1969-07-24T16:50:35Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_13.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_during", 3 | "args": [ 4 | { "interval": [ { "property": "ex:touchdown" }, { "property": "ex:liftOff" } ] }, 5 | { "interval": [ "1969-07-16T13:32:00Z", "1969-07-24T16:50:35Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_15.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "a_contains", 3 | "args": [ 4 | { "property": "layer:ids" }, 5 | [ "layers-ca", "layers-us" ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_16.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_crosses", 3 | "args": [ 4 | { 5 | "type": "LineString", 6 | "coordinates": [ 7 | [ 43.72992, -79.2998 ], [ 43.73005, -79.2991 ], [ 43.73006, -79.2984 ], 8 | [ 43.73140, -79.2956 ], [ 43.73259, -79.2950 ], [ 43.73266, -79.2945 ], 9 | [ 43.73320, -79.2936 ], [ 43.73378, -79.2936 ], [ 43.73486, -79.2917 ] 10 | ] 11 | }, 12 | { 13 | "type": "Polygon", 14 | "coordinates": [ 15 | [ 16 | [ 43.7286, -79.2986 ], [ 43.7311, -79.2996 ], [ 43.7323, -79.2972 ], 17 | [ 43.7326, -79.2971 ], [ 43.7350, -79.2981 ], [ 43.7350, -79.2982 ], 18 | [ 43.7352, -79.2982 ], [ 43.7357, -79.2956 ], [ 43.7337, -79.2948 ], 19 | [ 43.7343, -79.2933 ], [ 43.7339, -79.2923 ], [ 43.7327, -79.2947 ], 20 | [ 43.7320, -79.2942 ], [ 43.7322, -79.2937 ], [ 43.7306, -79.2930 ], 21 | [ 43.7303, -79.2930 ], [ 43.7299, -79.2928 ], [ 43.7286, -79.2986 ] 22 | ] 23 | ] 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_17.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_during", 3 | "args": [ 4 | { "interval": [ "1969-07-20T20:17:40Z", "1969-07-21T17:54:00Z" ] }, 5 | { "interval": [ "1969-07-16T13:32:00Z", "1969-07-24T16:50:35Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_18.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_within", 3 | "args": [ 4 | { "property": "ex:road" }, 5 | { 6 | "op": "Buffer", 7 | "args": [ 8 | { "property": "ex:geometry" }, 9 | 10, 10 | "m" 11 | ] 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /test_data/cql/input/clause7_19.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">", 3 | "args": [ 4 | { "property": "ex:vehicle_height" }, 5 | { 6 | "op": "-", 7 | "args": [ 8 | { "property": "ex:bridge_clearance" }, 9 | 1 10 | ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example01.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "landsat:scene_id" }, 5 | "LC82030282019133LGN00" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example02.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "eo:instrument" }, 5 | "OLI%" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example03.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { "property": "landsat:wrs_path" }, 5 | [ "153", "154", "15X" ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example04.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "<", 6 | "args": [ 7 | { "property": "eo:cloud_cover" }, 8 | 0.1 9 | ] 10 | }, 11 | { 12 | "op": "=", 13 | "args": [ 14 | { "property": "landsat:wrs_row" }, 15 | 28 16 | ] 17 | }, 18 | { 19 | "op": "=", 20 | "args": [ 21 | { "property": "landsat:wrs_path" }, 22 | 203 23 | ] 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /test_data/cql/input/example05a.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "or", 3 | "args": [ 4 | { 5 | "op": "=", 6 | "args": [ 7 | { "property": "eo:cloud_cover" }, 8 | 0.1 9 | ] 10 | }, 11 | { 12 | "op": "=", 13 | "args": [ 14 | { "property": "eo:cloud_cover" }, 15 | 0.2 16 | ] 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /test_data/cql/input/example05b.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { "property": "eo:cloud_cover" }, 5 | [ 0.1, 0.2 ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example06a.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "between", 6 | "args": [ 7 | { "property": "eo:cloud_cover" }, 8 | 0.1, 0.2 9 | ] 10 | }, 11 | { 12 | "op": "=", 13 | "args": [ 14 | { "property": "landsat:wrs_row" }, 15 | 28 16 | ] 17 | }, 18 | { 19 | "op": "=", 20 | "args": [ 21 | { "property": "landsat:wrs_path" }, 22 | 203 23 | ] 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /test_data/cql/input/example06b.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": ">=", 6 | "args": [ 7 | { "property": "eo:cloud_cover" }, 8 | 0.1 9 | ] 10 | }, 11 | { 12 | "op": "<=", 13 | "args": [ 14 | { "property": "eo:cloud_cover" }, 15 | 0.2 16 | ] 17 | }, 18 | { 19 | "op": "=", 20 | "args": [ 21 | { "property": "landsat:wrs_row" }, 22 | 28 23 | ] 24 | }, 25 | { 26 | "op": "=", 27 | "args": [ 28 | { "property": "landsat:wrs_path" }, 29 | 203 30 | ] 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /test_data/cql/input/example07.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "like", 6 | "args": [ 7 | { "property": "eo:instrument" }, 8 | "OLI%" 9 | ] 10 | }, 11 | { 12 | "op": "s_intersects", 13 | "args": [ 14 | { "property": "ex:footprint" }, 15 | { 16 | "type": "Polygon", 17 | "coordinates": [ 18 | [ [ 43.5845, -79.5442 ], 19 | [ 43.6079, -79.4893 ], 20 | [ 43.5677, -79.4632 ], 21 | [ 43.6129, -79.3925 ], 22 | [ 43.6223, -79.3238 ], 23 | [ 43.6576, -79.3163 ], 24 | [ 43.7945, -79.1178 ], 25 | [ 43.8144, -79.1542 ], 26 | [ 43.8555, -79.1714 ], 27 | [ 43.7509, -79.639 ], 28 | [ 43.5845, -79.5442 ] 29 | ] 30 | ] 31 | } 32 | ] 33 | } 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /test_data/cql/input/example08.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "=", 6 | "args": [ 7 | { "property": "beamMode" }, 8 | "ScanSAR Narrow" 9 | ] 10 | }, 11 | { 12 | "op": "=", 13 | "args": [ 14 | { "property": "swathDirection" }, 15 | "ascending" 16 | ] 17 | }, 18 | { 19 | "op": "=", 20 | "args": [ 21 | { "property": "polarization" }, 22 | "HH+VV+HV+VH" 23 | ] 24 | }, 25 | { 26 | "op": "s_intersects", 27 | "args": [ 28 | { 29 | "property": "footprint" 30 | }, 31 | { 32 | "type": "Polygon", 33 | "coordinates": [ 34 | [ [ -77.117938, 38.936860 ], 35 | [ -77.040604, 39.995648 ], 36 | [ -76.910536, 38.892912 ], 37 | [ -77.039359, 38.791753 ], 38 | [ -77.047906, 38.841462 ], 39 | [ -77.034183, 38.840655 ], 40 | [ -77.033142, 38.857490 ], 41 | [ -77.117938, 38.936860 ] 42 | ] 43 | ] 44 | } 45 | ] 46 | } 47 | ] 48 | } 49 | -------------------------------------------------------------------------------- /test_data/cql/input/example09.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">", 3 | "args": [ 4 | { "property": "ex:floors" }, 5 | 5 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example10.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<=", 3 | "args": [ 4 | { "property": "ex:taxes" }, 5 | 500 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example11.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "ex:owner" }, 5 | "%Jones%" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example12.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "ex:owner" }, 5 | "Mike%" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example13.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "like", 6 | "args": [ 7 | { "property": "ex:owner" }, 8 | "%Mike%" 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test_data/cql/input/example14.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "ex:swimming_pool" }, 5 | true 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example15.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": ">", 6 | "args": [ 7 | { "property": "ex:floors" }, 8 | 5 9 | ] 10 | }, 11 | { 12 | "op": "=", 13 | "args": [ 14 | { "property": "ex:swimming_pool" }, 15 | true 16 | ] 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /test_data/cql/input/example16.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "=", 6 | "args": [ 7 | { "property": "ex:swimming_pool" }, 8 | true 9 | ] 10 | }, 11 | { 12 | "op": "or", 13 | "args": [ 14 | { 15 | "op": ">", 16 | "args": [ 17 | { "property": "ex:floors" }, 18 | 5 19 | ] 20 | }, 21 | { 22 | "op": "like", 23 | "args": [ 24 | { "property": "ex:material" }, 25 | "brick%" 26 | ] 27 | }, 28 | { 29 | "op": "like", 30 | "args": [ 31 | { "property": "ex:material" }, 32 | "%brick" 33 | ] 34 | } 35 | ] 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /test_data/cql/input/example17.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "or", 3 | "args": [ 4 | { 5 | "op": "and", 6 | "args": [ 7 | { 8 | "op": ">", 9 | "args": [ 10 | { "property": "ex:floors" }, 11 | 5 12 | ] 13 | }, 14 | { 15 | "op": "=", 16 | "args": [ 17 | { "property": "ex:material" }, 18 | "brick" 19 | ] 20 | } 21 | ] 22 | }, 23 | { 24 | "op": "=", 25 | "args": [ 26 | { "property": "ex:swimming_pool" }, 27 | true 28 | ] 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /test_data/cql/input/example18.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "or", 3 | "args": [ 4 | { 5 | "op": "not", 6 | "args": [ 7 | { 8 | "op": "<", 9 | "args": [ 10 | { "property": "ex:floors" }, 11 | 5 12 | ] 13 | } 14 | ] 15 | }, 16 | { 17 | "op": "=", 18 | "args": [ 19 | { "property": "ex:swimming_pool" }, 20 | true 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /test_data/cql/input/example19.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "or", 6 | "args": [ 7 | { 8 | "op": "like", 9 | "args": [ 10 | { "property": "ex:owner" }, 11 | "mike%" 12 | ] 13 | }, 14 | { 15 | "op": "like", 16 | "args": [ 17 | { "property": "ex:owner" }, 18 | "Mike%" 19 | ] 20 | } 21 | ] 22 | }, 23 | { 24 | "op": "<", 25 | "args": [ 26 | { "property": "ex:floors" }, 27 | 4 28 | ] 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /test_data/cql/input/example20.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_before", 3 | "args": [ 4 | { "property": "ex:built" }, 5 | { "date": "2015-01-01" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example21.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_after", 3 | "args": [ 4 | { "property": "ex:built" }, 5 | { "date": "2012-06-05" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example22.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_during", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "2017-06-10T07:30:00Z", "2017-06-11T10:30:00Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example23.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_within", 3 | "args": [ 4 | { "property": "ex:location" }, 5 | { "bbox": [ -118, 33.8, -117.9, 34 ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example24.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_intersects", 3 | "args": [ 4 | { "property": "ex:geometry" }, 5 | { 6 | "type": "Polygon", 7 | "coordinates": [ [ [ -10, -10 ], [ 10, -10 ], [ 10, 10 ], [ -10, -10 ] ] ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example25.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": ">", 6 | "args": [ 7 | { "property": "ex:floors" }, 8 | 5 9 | ] 10 | }, 11 | { 12 | "op": "s_within", 13 | "args": [ 14 | { "property": "geometry" }, 15 | { "bbox": [ -118, 33.8, -117.9, 34 ] } 16 | ] 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /test_data/cql/input/example26.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { "op": "casei", "args": [ { "property": "ex:road_class" } ] }, 5 | [ 6 | { "op": "casei", "args": [ "Οδος" ] }, 7 | { "op": "casei", "args": [ "Straße" ] } 8 | ] 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example27.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "op": "accenti", "args": [ { "property": "ex:etat_vol" } ] }, 5 | { "op": "accenti", "args": [ "débárquér" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example28.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "op": "casei", "args": [ { "property": "geophys:SURVEY_NAME" } ] }, 5 | { "op": "casei", "args": [ "%calcutta%" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example29.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "ex:id" }, 5 | "fa7e1920-9107-422d-a3db-c468cbc5d6df" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example30.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<>", 3 | "args": [ 4 | { "property": "ex:id" }, 5 | "fa7e1920-9107-422d-a3db-c468cbc5d6df" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example31.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | 10 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example32.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | 10 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example33.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<=", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | 10 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example34.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">=", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | 10 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example35.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "ex:name" }, 5 | "foo%" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example36.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "like", 6 | "args": [ 7 | { "property": "ex:name" }, 8 | "foo%" 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test_data/cql/input/example37.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "between", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | 10, 20 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example38.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "between", 6 | "args": [ 7 | { "property": "ex:value" }, 8 | 10, 20 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test_data/cql/input/example39.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "in", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | [ 1.0, 2.0, 3.0 ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example40.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "in", 6 | "args": [ 7 | { "property": "ex:value" }, 8 | [ "a", "b", "c" ] 9 | ] 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test_data/cql/input/example41.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "isNull", 3 | "args": [ { "property": "ex:value" } ] 4 | } 5 | -------------------------------------------------------------------------------- /test_data/cql/input/example42.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "not", 3 | "args": [ 4 | { 5 | "op": "isNull", 6 | "args": [ { "property": "ex:value" } ] 7 | } 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /test_data/cql/input/example43.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "and", 3 | "args": [ 4 | { 5 | "op": "not", 6 | "args": [ 7 | { 8 | "op": "like", 9 | "args": [ 10 | { "property": "ex:name" }, 11 | "foo%" 12 | ] 13 | } 14 | ] 15 | }, 16 | { 17 | "op": ">", 18 | "args": [ 19 | { "property": "ex:value" }, 20 | 10 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /test_data/cql/input/example44.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "or", 3 | "args": [ 4 | { 5 | "op": "isNull", 6 | "args": [ { "property": "ex:value" } ] 7 | }, 8 | { 9 | "op": "between", 10 | "args": [ 11 | { "property": "ex:value" }, 12 | 10, 20 13 | ] 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test_data/cql/input/example45.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_intersects", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { "bbox": [ -128.098193, -1.1, -99999.0, 180.0, 90.0, 100000.0 ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example46.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_equals", 3 | "args": [ 4 | { 5 | "type": "Polygon", 6 | "coordinates": [ [ [ -0.333333, 89.0 ], 7 | [ -102.723546, -0.5 ], 8 | [ -179.0, -89.0 ], 9 | [ -1.9, 89.0 ], 10 | [ -0.0, 89.0 ], 11 | [ 2.00001, -1.9 ], 12 | [ -0.333333, 89.0 ] ] ] 13 | }, 14 | { "property": "geometry" } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test_data/cql/input/example47.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_disjoint", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "MultiPolygon", 7 | "coordinates": [ [ [ [ 144.022387, 45.176126 ], 8 | [ -1.1, 0.0 ], 9 | [ 180.0, 47.808086 ], 10 | [ 144.022387, 45.176126 ] ] ] ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example48.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_touches", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "MultiLineString", 7 | "coordinates": [ [ [ -1.9, -0.99999 ], 8 | [ 75.292574, 1.5 ], 9 | [ -0.5, -4.016458 ], 10 | [ -31.708594, -74.743801 ], 11 | [ 179.0, -90.0 ] ], 12 | [ [ -1.9, -1.1 ], 13 | [ 1.5, 8.547371 ] ] ] 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test_data/cql/input/example49.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_within", 3 | "args": [ 4 | { 5 | "type": "Polygon", 6 | "coordinates": [ [ [ -49.88024, 0.5, -75993.341684 ], 7 | [ -1.5, -0.99999, -100000.0 ], 8 | [ 0.0, 0.5, -0.333333 ], 9 | [ -49.88024, 0.5, -75993.341684 ] ], 10 | [ [ -65.887123, 2.00001, -100000.0 ], 11 | [ 0.333333, -53.017711, -79471.332949 ], 12 | [ 180.0, 0.0, 1852.616704 ], 13 | [ -65.887123, 2.00001, -100000.0 ] ] ] 14 | }, 15 | { "property": "geometry" } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /test_data/cql/input/example50.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_overlaps", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { "bbox": [ -179.912109, 1.9, 180.0, 16.897016 ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example51.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_crosses", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "LineString", 7 | "coordinates": [ [ 172.03086, 1.5 ], 8 | [ 1.1, -90.0 ], 9 | [ -159.757695, 0.99999 ], 10 | [ -180.0, 0.5 ], 11 | [ -12.111235, 81.336403 ], 12 | [ -0.5, 64.43958 ], 13 | [ 0.0, 81.991815 ], 14 | [ -155.93831, 90.0 ] ] 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /test_data/cql/input/example52.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_contains", 3 | "args": [ 4 | { "property": "geometry" }, 5 | { 6 | "type": "Point", 7 | "coordinates": [ -3.508362, -1.754181 ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example53.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_after", 3 | "args": [ 4 | { "property": "ex:updated_at" }, 5 | { "date": "2010-02-10" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example54.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_before", 3 | "args": [ 4 | { "property": "ex:updated_at" }, 5 | { "timestamp": "2012-08-10T05:30:00Z" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example55.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_contains", 3 | "args": [ 4 | { "interval": [ "2000-01-01T00:00:00Z", "2005-01-10T01:01:01.393216Z" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /test_data/cql/input/example56.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_disjoint", 3 | "args": [ 4 | { "interval": [ "..", "2005-01-10T01:01:01.393216Z" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example57.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_during", 3 | "args": [ 4 | {"interval": [{ "property": "ex:starts_at" }, { "property": "ex:ends_at" }]}, 5 | {"interval": ["2005-01-10", "2010-02-10"] 6 | } 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /test_data/cql/input/example58.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_equals", 3 | "args": [ 4 | { "property": "ex:updated_at" }, 5 | { "date": "1851-04-29" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example59.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_finishedBy", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example60.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_finishes", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "1991-10-07", "2010-02-10T05:29:20.073225Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example61.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_intersects", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example62.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_meets", 3 | "args": [ 4 | { "interval": [ "2005-01-10", "2010-02-10" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example63.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_metBy", 3 | "args": [ 4 | { "interval": [ "2010-02-10T05:29:20.073225Z", "2010-10-07" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example64.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_overlappedBy", 3 | "args": [ 4 | { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example65.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_overlaps", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "1991-10-07T08:21:06.393262Z", "1992-10-09T08:08:08.393473Z" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example66.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_startedBy", 3 | "args": [ 4 | { "interval": [ "1991-10-07T08:21:06.393262Z", "2010-02-10T05:29:20.073225Z" ] }, 5 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example67.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "t_starts", 3 | "args": [ 4 | { "interval": [ { "property": "ex:starts_at" }, { "property": "ex:ends_at" } ] }, 5 | { "interval": [ "1991-10-07T08:21:06.393262Z", ".." ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example68.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { 5 | "op": "Foo", 6 | "args": [ { "property": "geometry" } ] 7 | }, 8 | true 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example69.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<>", 3 | "args": [ 4 | false, 5 | { 6 | "op": "Bar", 7 | "args": [ { "property": "geometry" }, 100, "a", "b", false ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example70.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "op": "accenti", "args": [ { "property": "ex:owner" } ] }, 5 | { "op": "accenti", "args": [ "Beyoncé" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example71.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "op": "casei", "args": [ { "property": "ex:owner" } ] }, 5 | { "op": "casei", "args": [ "somebody else" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example72.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": ">", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "+", 7 | "args": [ 8 | { "property": "foo" }, 9 | 10 10 | ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example73.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "-", 7 | "args": [ 8 | { "property": "ex:foo" }, 9 | 10 10 | ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example74.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<>", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "*", 7 | "args": [ 8 | 22.1, 9 | { "property": "foo" } 10 | ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example75.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "/", 7 | "args": [ 8 | 2, 9 | { "property": "ex:foo" } 10 | ] 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /test_data/cql/input/example76.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "<=", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "^", 7 | "args": [ 2, { "property": "ex:foo" } ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example77.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | 0, 5 | { 6 | "op": "%", 7 | "args": [ { "property": "ex:foo" }, 2 ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example78.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | 1, 5 | { 6 | "op": "div", 7 | "args": [ { "property": "ex:foo" }, 2 ] 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/cql/input/example79.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "a_containedBy", 3 | "args": [ 4 | { "property": "ex:values" }, 5 | [ "a", "b", "c" ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example80.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "a_contains", 3 | "args": [ 4 | { "property": "ex:values" }, 5 | [ "a", "b", "c" ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example81.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "a_equals", 3 | "args": [ 4 | [ "a", true, 1.0, 8 ], 5 | { "property": "ex:values" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example82.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "a_overlaps", 3 | "args": [ 4 | { "property": "ex:values" }, 5 | [ { "timestamp": "2012-08-10T05:30:00Z" }, { "date": "2010-02-10" }, false ] 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql/input/example83.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_equals", 3 | "args": [ 4 | { 5 | "type": "MultiPoint", 6 | "coordinates": [ [ 180.0, -0.5 ], 7 | [ 179.0, -47.121701 ], 8 | [ 180.0, -0.0 ], 9 | [ 33.470475, -0.99999 ], 10 | [ 179.0, -15.333062 ] ] 11 | }, 12 | { "property": "geometry" } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /test_data/cql/input/example84.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "s_equals", 3 | "args": [ 4 | { 5 | "type": "GeometryCollection", 6 | "geometries": [ 7 | { 8 | "type": "Point", 9 | "coordinates": [ 1.9, 2.00001 ] 10 | }, 11 | { 12 | "type": "Point", 13 | "coordinates": [ 0.0, -2.00001 ] 14 | }, 15 | { 16 | "type": "MultiLineString", 17 | "coordinates": [ [ [ -2.00001, -0.0 ], 18 | [ -77.292642, -0.5 ], 19 | [ -87.515626, -0.0 ], 20 | [ -180.0, 12.502773 ], 21 | [ 21.204842, -1.5 ], 22 | [ -21.878857, -90.0 ] ] ] 23 | }, 24 | { 25 | "type": "Point", 26 | "coordinates": [ 1.9, 0.5 ] 27 | }, 28 | { 29 | "type": "LineString", 30 | "coordinates": [ [ 179.0, 1.179148 ], 31 | [ -148.192487, -65.007816 ], 32 | [ 0.5, 0.333333 ] ] 33 | } 34 | ] 35 | }, 36 | { "property": "geometry" } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /test_data/cql/input/example85.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "=", 3 | "args": [ 4 | { "property": "ex:value" }, 5 | { 6 | "op": "-", 7 | "args": [ 8 | { 9 | "op": "+", 10 | "args": [ 11 | { 12 | "op": "*", 13 | "args": [ 14 | { 15 | "op": "*", 16 | "args": [ -1, { "property": "foo" } ] 17 | }, 18 | 2.0 19 | ] 20 | }, 21 | { 22 | "op": "/", 23 | "args": [ { "property": "bar" }, 6.1234 ] 24 | } 25 | ] 26 | }, 27 | { 28 | "op": "^", 29 | "args": [ { "property": "x" }, 2.0 ] 30 | } 31 | ] 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /test_data/cql/input/example86.json: -------------------------------------------------------------------------------- 1 | { 2 | "op": "like", 3 | "args": [ 4 | { "property": "ex:name" }, 5 | { "op": "casei", "args": [ "FOO%" ] } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/cql_queryable_shapes.ttl: -------------------------------------------------------------------------------- 1 | @prefix cql: . 2 | @prefix dcterms: . 3 | @prefix dwc: . 4 | @prefix ex: . 5 | @prefix sh: . 6 | @prefix sname: . 7 | @prefix sosa: . 8 | @prefix xsd: . 9 | 10 | ex:SpeciesQueryableShape 11 | a sh:PropertyShape ; 12 | a cql:Queryable ; 13 | sh:path ( 14 | [ sh:inversePath ex:hasFeatureOfInterest ] 15 | [ 16 | sh:zeroOrMorePath [ sh:inversePath ex:hasMember ] 17 | ] 18 | ex:hasSimpleResult 19 | ) ; 20 | sh:datatype xsd:string ; 21 | sh:in ( 22 | "Homo sapiens" 23 | "Canis lupus familiaris" 24 | "Felis catus" 25 | "Mus musculus" 26 | "Rattus norvegicus" 27 | ) ; 28 | sh:name "Species Name" ; 29 | dcterms:identifier "specname" ; 30 | . 31 | -------------------------------------------------------------------------------- /test_data/cql_queryable_shapes_bdr.ttl: -------------------------------------------------------------------------------- 1 | @prefix cql: . 2 | @prefix dcterms: . 3 | @prefix dwc: . 4 | @prefix ex: . 5 | @prefix sh: . 6 | @prefix sname: . 7 | @prefix sosa: . 8 | @prefix xsd: . 9 | 10 | ex:BDRScientificNameQueryableShape 11 | a sh:PropertyShape ; 12 | a cql:Queryable ; 13 | sh:path ( 14 | [ sh:inversePath sosa:hasFeatureOfInterest ] 15 | sosa:hasMember 16 | sosa:hasResult 17 | dwc:scientificNameID 18 | ) ; 19 | sh:name "Scientific Name" ; 20 | dcterms:identifier "scientificname" ; 21 | sh:datatype xsd:string ; 22 | sh:in ( 23 | sname:001 24 | sname:002 25 | sname:003 26 | sname:004 27 | sname:005 28 | sname:006 29 | sname:007 30 | sname:008 31 | sname:009 32 | sname:010 33 | ) ; 34 | . -------------------------------------------------------------------------------- /test_data/custom_endpoints_vanilla_5_level_data.ttl: -------------------------------------------------------------------------------- 1 | @prefix ex: . 2 | @prefix rdf: . 3 | @prefix rdfs: . 4 | @prefix xsd: . 5 | 6 | # Level One instance 7 | ex:lvl1 a ex:One ; 8 | rdfs:label "Level One Item" ; 9 | ex:one-to-two ex:lvl2 ; 10 | ex:alternative-one-to-two ex:lvl2other . 11 | 12 | # Level Two instance 13 | ex:lvl2 a ex:Two ; 14 | rdfs:label "Level Two Item" ; 15 | ex:two-to-three ex:lvl3 . 16 | 17 | # Level Two instance 18 | ex:lvl2other a ex:TwoOtherClass ; 19 | rdfs:label "Level Two OTHER Item" . 20 | 21 | # Level Three instance 22 | ex:lvl3 a ex:Three ; 23 | rdfs:label "Level Three Item" ; 24 | ex:three-to-four ex:lvl4 . 25 | 26 | # Level Four instance 27 | ex:lvl4 a ex:Four ; 28 | rdfs:label "Level Four Item" ; 29 | ex:four-to-five ex:lvl5 . 30 | 31 | # Level Five instance 32 | ex:lvl5 a ex:Five ; 33 | rdfs:label "Level Five Item" . -------------------------------------------------------------------------------- /test_data/fts_property_shapes.ttl: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix sdo: . 3 | @prefix ont: . 4 | @prefix dcterms: . 5 | @prefix dwc: . 6 | @prefix ex: . 7 | @prefix sh: . 8 | @prefix sname: . 9 | @prefix sosa: . 10 | @prefix xsd: . 11 | @prefix prez: . 12 | 13 | 14 | ex:FTSInverseShape 15 | a sh:PropertyShape ; 16 | a ont:JenaFTSPropertyShape ; 17 | sh:path [ sh:inversePath ex:hasFeatureOfInterest ] ; 18 | sh:name "Inverse" ; 19 | dcterms:identifier "inv" ; 20 | . 21 | 22 | ex:FTSSequenceShape 23 | a sh:PropertyShape ; 24 | a ont:JenaFTSPropertyShape ; 25 | sh:path 26 | ( 27 | ex:prop1 ex:prop2 ex:labelProp 28 | ) ; 29 | sh:name "Sequence" ; 30 | dcterms:identifier "seq" ; 31 | . 32 | 33 | ex:FTSInverseSequenceShape 34 | a sh:PropertyShape ; 35 | a ont:JenaFTSPropertyShape ; 36 | sh:path ( ex:hasSimpleResult [ sh:inversePath ex:hasFeatureOfInterest ] ) ; 37 | sh:name "Sequence Inverse" ; 38 | dcterms:identifier "seqinv" ; 39 | . 40 | 41 | ex:RealExample 42 | a sh:PropertyShape ; 43 | a ont:JenaFTSPropertyShape ; 44 | sh:path ( sosa:isFeatureOfInterestOf sosa:hasResult ) ; 45 | sh:name "Real Example" ; 46 | dcterms:identifier "real" ; 47 | ont:searchPredicate sdo:description ; 48 | . 49 | 50 | ex:OOMP 51 | a sh:PropertyShape ; 52 | a ont:JenaFTSPropertyShape ; 53 | sh:path [ sh:oneOrMorePath dcterms:hasPart ] ; 54 | sh:name "one ore more path" ; 55 | dcterms:identifier "oomp" ; 56 | ont:searchPredicate rdf:value ; 57 | . 58 | -------------------------------------------------------------------------------- /test_data/issue_286.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX ex: 4 | PREFIX geo: 5 | PREFIX rdfs: 6 | PREFIX sdo: 7 | PREFIX sosa: 8 | 9 | ex:cat a dcat:Catalog; 10 | rdfs:label "a catalog"; 11 | dcterms:hasPart ex:res . 12 | 13 | ex:res a dcat:Resource; 14 | rdfs:label "a resource"; 15 | sosa:isFeatureOfInterestOf [ a sosa:Observation; 16 | sosa:hasResult [ a sosa:Result; 17 | rdfs:label "a result"; 18 | sdo:value ex:feat 19 | ] 20 | ] . 21 | 22 | ex:feat a geo:Feature; 23 | rdfs:label "a feature" . -------------------------------------------------------------------------------- /test_data/object_catalog_bblocks_catalog.ttl: -------------------------------------------------------------------------------- 1 | @prefix dcat: . 2 | @prefix dcterms: . 3 | @prefix vocab: . 4 | @prefix catalog: . 5 | @prefix prez: . 6 | 7 | catalog:bblocks 8 | a dcat:Catalog ; 9 | dcterms:identifier "bblocks" ; 10 | dcterms:title "A catalog of Building Block Vocabularies" ; 11 | dcterms:hasPart vocab:api , vocab:datatype , vocab:parameter , vocab:schema ; 12 | . 13 | -------------------------------------------------------------------------------- /test_data/object_vocab_api_bblocks.ttl: -------------------------------------------------------------------------------- 1 | @prefix bblocks: . 2 | @prefix dct: . 3 | @prefix prov: . 4 | @prefix rdfs: . 5 | @prefix schema: . 6 | @prefix skos: . 7 | @prefix xsd: . 8 | @prefix vocab: . 9 | @prefix prez: . 10 | 11 | vocab:api 12 | a skos:ConceptScheme ; 13 | skos:prefLabel "API Building Blocks" ; 14 | skos:hasTopConcept bblocks:ogc.unstable.sosa ; 15 | dct:identifier "api" ; 16 | . 17 | 18 | bblocks:ogc.unstable.sosa a skos:Concept, 19 | bblocks:Api ; 20 | rdfs:label "Sensor, Observation, Sample, and Actuator (SOSA)" ; 21 | dct:abstract "The SOSA (Sensor, Observation, Sample, and Actuator) ontology is a realisation of the Observations, Measurements and Sampling (OMS) Conceptual model" ; 22 | dct:created "2023-04-13T00:00:00+00:00"^^xsd:dateTime ; 23 | dct:description [ dct:format "application/json" ; 24 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/unstable/sosa/index.json" ], 25 | [ dct:format "text/markdown" ; 26 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/unstable/sosa/index.md" ], 27 | [ dct:format "text/html" ; 28 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/unstable/sosa/" ] ; 29 | dct:hasVersion "1.0" ; 30 | dct:modified "2023-04-13"^^xsd:date ; 31 | dct:source ; 32 | skos:inScheme , vocab:api ; 33 | bblocks:hasJsonLdContext ; 34 | bblocks:hasSchema , 35 | ; 36 | bblocks:scope ; 37 | bblocks:status ; 38 | . 39 | -------------------------------------------------------------------------------- /test_data/object_vocab_datatype_bblocks.ttl: -------------------------------------------------------------------------------- 1 | @prefix bblocks: . 2 | @prefix dct: . 3 | @prefix prov: . 4 | @prefix rdfs: . 5 | @prefix schema: . 6 | @prefix skos: . 7 | @prefix xsd: . 8 | @prefix vocab: . 9 | 10 | vocab:datatype 11 | a skos:ConceptScheme ; 12 | skos:prefLabel "Datatype Building Blocks" ; 13 | skos:hasTopConcept bblocks:ogc.ogc-utils.iri-or-curie ; 14 | dct:identifier "datatype" ; 15 | . 16 | 17 | bblocks:ogc.ogc-utils.iri-or-curie a skos:Concept, 18 | bblocks:Datatype ; 19 | rdfs:label "IRI or CURIE" ; 20 | dct:abstract "This Building Block defines a data type for a full IRI/URI or a CURIE (with or without a prefix)" ; 21 | dct:created "2023-08-08T00:00:00+00:00"^^xsd:dateTime ; 22 | dct:description [ dct:format "text/markdown" ; 23 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/markdown/ogc-utils/iri-or-curie/index.md" ], 24 | [ dct:format "text/html" ; 25 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/slate-build/ogc-utils/iri-or-curie/" ], 26 | [ dct:format "application/json" ; 27 | rdfs:isDefinedBy "https://opengeospatial.github.io/bblocks/generateddocs/json-full/ogc-utils/iri-or-curie/index.json" ] ; 28 | dct:hasVersion "1.0" ; 29 | dct:modified "2023-03-09"^^xsd:date ; 30 | dct:source , 31 | , 32 | ; 33 | skos:inScheme , vocab:datatype ; 34 | bblocks:hasJsonLdContext ; 35 | bblocks:hasSchema , 36 | ; 37 | bblocks:scope ; 38 | bblocks:status . 39 | -------------------------------------------------------------------------------- /test_data/obs.ttl: -------------------------------------------------------------------------------- 1 | @prefix iam: . 2 | @prefix ilm: . 3 | @prefix schema: . 4 | @prefix sosa: . 5 | 6 | sosa:isFeatureOfInterestOf _:Ne7fb721e13d643e4a6d5b9cd2a06cfea ; 7 | a ; 8 | . 9 | 10 | _:Ne7fb721e13d643e4a6d5b9cd2a06cfea a sosa:Observation ; 11 | sosa:hasResult _:Nf0b4feb4ba064a39b6bd373d65a22abc ; 12 | sosa:observedProperty iam:MentionsIndigenousPlaceName . 13 | 14 | _:Nf0b4feb4ba064a39b6bd373d65a22abc a sosa:Result ; 15 | schema:description "yuendumu" ; 16 | schema:value . 17 | 18 | sosa:isFeatureOfInterestOf _:N9fc31556bca74a4d963ed93a5b912f0f . 19 | 20 | _:N9fc31556bca74a4d963ed93a5b912f0f a sosa:Observation ; 21 | sosa:hasResult _:N0086e670ba384871a1cbf11b48836cbb ; 22 | sosa:observedProperty iam:MentionsIndigenousPlaceName . 23 | 24 | _:N0086e670ba384871a1cbf11b48836cbb a sosa:Result ; 25 | schema:description "murdi paaki" ; 26 | schema:value . 27 | 28 | sosa:isFeatureOfInterestOf _:N57b531df78954db3ba9f27ee84102b3e . 29 | 30 | _:N57b531df78954db3ba9f27ee84102b3e a sosa:Observation ; 31 | sosa:hasResult _:N158974fc772e44919f58bf23b1febee6 ; 32 | sosa:observedProperty iam:MentionsIndigenousPlaceName . 33 | 34 | _:N158974fc772e44919f58bf23b1febee6 a sosa:Result ; 35 | schema:description "kuranda" ; 36 | schema:value . 37 | 38 | sosa:isFeatureOfInterestOf _:Nf0e1dbe9e76f437f8e820020f8e67c59 . 39 | 40 | _:Nf0e1dbe9e76f437f8e820020f8e67c59 a sosa:Observation ; 41 | sosa:hasResult _:N63268e068cf149f0b5ac1e7d9cacf671 ; 42 | sosa:observedProperty iam:MentionsIndigenousPlaceName . 43 | 44 | _:N63268e068cf149f0b5ac1e7d9cacf671 a sosa:Result ; 45 | schema:description "tangentyere" ; 46 | schema:value . 47 | -------------------------------------------------------------------------------- /test_data/redirect-foaf-homepage.ttl: -------------------------------------------------------------------------------- 1 | PREFIX foaf: 2 | 3 | foaf:homepage . 4 | -------------------------------------------------------------------------------- /test_data/spaceprez.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX sp: 4 | PREFIX geo: 5 | PREFIX rdfs: 6 | PREFIX void: 7 | 8 | 9 | sp:SpacePrezCatalog a dcat:Catalog ; 10 | dcterms:title "SpacePrez Catalog" ; 11 | dcterms:description "A catalog of SpacePrez data" ; 12 | dcterms:hasPart sp:SpacePrezDataset ; 13 | . 14 | 15 | sp:SpacePrezDataset a dcat:Dataset ; 16 | dcterms:title "SpacePrez Dataset" ; 17 | dcterms:description "A dataset of SpacePrez data" ; 18 | . 19 | 20 | sp:FeatureCollection a geo:FeatureCollection ; 21 | void:inDataset sp:SpacePrezDataset ; 22 | rdfs:label "Geo Feature Collection" ; 23 | rdfs:member sp:Feature1 , sp:Feature2 ; 24 | sp:property "lower level feature collection property" 25 | . 26 | 27 | sp:Feature1 a geo:Feature ; 28 | rdfs:label "Feature 1" ; 29 | geo:hasGeometry [ 30 | geo:asWKT "POLYGON((1 1, 0 10, 10 10, 10 0, 1 1))"^^geo:wktLiteral 31 | ] ; 32 | sp:property "feature property" ; 33 | . 34 | 35 | sp:Feature2 a geo:Feature ; 36 | rdfs:label "Feature 2" ; 37 | geo:hasGeometry [ 38 | geo:asWKT "POLYGON((2 2, 0 10, 10 10, 10 0, 2 2))"^^geo:wktLiteral 39 | ] ; 40 | sp:property "feature property" ; 41 | . -------------------------------------------------------------------------------- /test_data/vocprez.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX ex: 4 | PREFIX rdfs: 5 | PREFIX skos: 6 | 7 | ex:VocPrezCatalog a dcat:Catalog ; 8 | rdfs:label "A Demo Catalog" ; 9 | dcterms:hasPart ex:SchemingConceptScheme , ; 10 | ex:property "cataract" ; 11 | . 12 | 13 | ex:SchemingConceptScheme a skos:ConceptScheme ; 14 | skos:prefLabel "The Scheming Concept Scheme" ; 15 | skos:hasTopConcept ex:TopLevelConcept ; 16 | ex:property "schemish conceptual property" 17 | . 18 | 19 | ex:TopLevelConcept a skos:Concept ; 20 | skos:prefLabel "The toppiest of concepts" ; 21 | ex:property "a property of the toppiest concept" ; 22 | skos:narrower ex:SecondLevelConcept , ex:SiblingSecondLevelConcept ; 23 | skos:inScheme ex:SchemingConceptScheme ; 24 | . 25 | 26 | ex:SecondLevelConcept a skos:Concept ; 27 | skos:prefLabel "A second level concept" ; 28 | ex:property "a property of the second level concept" ; 29 | skos:narrower ex:ThirdLevelConcept ; 30 | skos:inScheme ex:SchemingConceptScheme ; 31 | . 32 | 33 | ex:SiblingSecondLevelConcept a skos:Concept ; 34 | skos:prefLabel "A sibling second level concept" ; 35 | ex:property "a property of the sibling second level concept" ; 36 | skos:inScheme ex:SchemingConceptScheme ; 37 | . 38 | 39 | ex:ThirdLevelConcept a skos:Concept ; 40 | skos:prefLabel "A third level concept" ; 41 | ex:property "a property of the third level concept" ; 42 | skos:inScheme ex:SchemingConceptScheme ; 43 | . 44 | 45 | ex:SecondLevelConceptBroader a skos:Concept ; 46 | skos:prefLabel "A second level concept broader" ; 47 | ex:property "a property of the first level concept" ; 48 | skos:broader ex:TopLevelConcept ; 49 | skos:inScheme ex:SchemingConceptScheme ; 50 | . 51 | 52 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RDFLib/prez/d3e8bec0157bea6b5602b2825aa664018bcfabf1/tests/__init__.py -------------------------------------------------------------------------------- /tests/_test_count.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | def get_curie(client: TestClient, iri: str) -> str: 6 | response = client.get(f"/identifier/curie/{iri}") 7 | if response.status_code != 200: 8 | raise ValueError(f"Failed to retrieve curie for {iri}. {response.text}") 9 | return response.text 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "iri, inbound, outbound, count", 14 | [ 15 | [ 16 | "http://linked.data.gov.au/def/borehole-purpose", 17 | "http://www.w3.org/2004/02/skos/core#inScheme", 18 | None, 19 | 0, 20 | ], 21 | [ 22 | "http://linked.data.gov.au/def/borehole-purpose-no-children", 23 | "http://www.w3.org/2004/02/skos/core#inScheme", 24 | None, 25 | 0, 26 | ], 27 | [ 28 | "http://linked.data.gov.au/def/borehole-purpose", 29 | None, 30 | "http://www.w3.org/2004/02/skos/core#hasTopConcept", 31 | 0, 32 | ], 33 | ], 34 | ) 35 | def test_count( 36 | client: TestClient, 37 | iri: str, 38 | inbound: str | None, 39 | outbound: str | None, 40 | count: int, 41 | ): 42 | curie = get_curie(client, iri) 43 | params = {"curie": curie, "inbound": inbound, "outbound": outbound} 44 | response = client.get("/count", params=params) 45 | assert int(response.text) == count 46 | -------------------------------------------------------------------------------- /tests/cql-fuseki-config.ttl: -------------------------------------------------------------------------------- 1 | ## Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0 2 | 3 | PREFIX : <#> 4 | PREFIX fuseki: 5 | PREFIX rdf: 6 | PREFIX rdfs: 7 | PREFIX ja: 8 | PREFIX geosparql: 9 | 10 | [] rdf:type fuseki:Server ; 11 | fuseki:services ( 12 | :service 13 | ) . 14 | 15 | :service rdf:type fuseki:Service ; 16 | fuseki:name "dataset" ; 17 | fuseki:endpoint [ fuseki:operation fuseki:query ; ] ; 18 | fuseki:endpoint [ fuseki:operation fuseki:query ; fuseki:name "sparql" ]; 19 | fuseki:endpoint [ fuseki:operation fuseki:query ; fuseki:name "query" ] ; 20 | fuseki:endpoint [ fuseki:operation fuseki:update ; fuseki:name "update" ]; 21 | fuseki:endpoint [ fuseki:operation fuseki:gsp-r ; fuseki:name "get" ] ; 22 | fuseki:endpoint [ fuseki:operation fuseki:gsp-rw ; fuseki:name "data" ]; 23 | fuseki:dataset <#geo_ds> ; 24 | . 25 | 26 | <#geo_ds> rdf:type geosparql:GeosparqlDataset ; 27 | geosparql:dataset :dataset ; 28 | geosparql:inference true ; 29 | geosparql:queryRewrite true ; 30 | geosparql:indexEnabled true ; 31 | geosparql:applyDefaultGeometry true ; 32 | . 33 | 34 | # Transactional in-memory dataset. 35 | :dataset rdf:type ja:MemoryDataset ; 36 | ## Optional load with data on start-up 37 | ja:data "/rdf/sandgate.ttl"; 38 | ja:data "/rdf/object_vocab_parameter_bblocks.ttl"; 39 | ja:data "/rdf/object_catalog_bblocks_catalog.ttl"; 40 | ja:data "/rdf/bnode_depth-1.ttl"; 41 | ja:data "/rdf/bnode_depth-2.ttl"; 42 | ja:data "/rdf/catprez.ttl"; 43 | ja:data "/rdf/object_vocab_datatype_bblocks.ttl"; 44 | ja:data "/rdf/vocprez.ttl"; 45 | ja:data "/rdf/bnode_depth-2-2.ttl"; 46 | ja:data "/rdf/bnode_depth-4.ttl"; 47 | ja:data "/rdf/spaceprez.ttl"; 48 | ja:data "/rdf/sandgate.ttl"; 49 | ja:data "/rdf/object_vocab_api_bblocks.ttl"; 50 | ja:data "/rdf/object_vocab_schema_bblocks.ttl"; 51 | ja:data "/rdf/redirect-foaf-homepage.ttl"; 52 | . 53 | -------------------------------------------------------------------------------- /tests/data/prefixes/data_using_prefixes.ttl: -------------------------------------------------------------------------------- 1 | PREFIX dcat: 2 | PREFIX dcterms: 3 | PREFIX ex: 4 | PREFIX rdf: 5 | PREFIX rdfs: 6 | 7 | a dcat:Catalog ; 8 | rdfs:label "A Catalog with prefixed david" ; 9 | dcterms:hasPart ex:DCATDataset ; 10 | ex:property "some property" ; 11 | . -------------------------------------------------------------------------------- /tests/data/prefixes/remote_prefixes.ttl: -------------------------------------------------------------------------------- 1 | PREFIX vann: 2 | PREFIX ldgovau: 3 | PREFIX gnaf: 4 | PREFIX addr: 5 | 6 | 7 | [ vann:preferredNamespacePrefix "davo" ; 8 | vann:preferredNamespaceUri ; 9 | ] . -------------------------------------------------------------------------------- /tests/test_alt_profiles.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from rdflib import Graph, URIRef 3 | from rdflib.namespace import DCAT, RDF 4 | 5 | from prez.reference_data.prez_ns import PREZ 6 | 7 | 8 | @pytest.fixture() 9 | def a_catalog_link(client): 10 | # get link for first catalog 11 | r = client.get("/catalogs") 12 | g = Graph().parse(data=r.text) 13 | member_uri = g.value(None, RDF.type, DCAT.Catalog) 14 | link = g.value(member_uri, URIRef("https://prez.dev/link", None)) 15 | return link 16 | 17 | 18 | @pytest.fixture() 19 | def a_resource_link(client, a_catalog_link): 20 | r = client.get(a_catalog_link) 21 | g = Graph().parse(data=r.text) 22 | links = g.objects(subject=None, predicate=URIRef("https://prez.dev/link")) 23 | for link in links: 24 | if link != a_catalog_link: 25 | return link 26 | 27 | 28 | def test_listing_alt_profile(client): 29 | r = client.get("/catalogs?_profile=altr-ext:alt-profile") 30 | response_graph = Graph().parse(data=r.text) 31 | assert ( 32 | URIRef("http://www.w3.org/ns/dx/connegp/altr-ext#alt-profile"), 33 | RDF.type, 34 | URIRef("https://prez.dev/ListingProfile"), 35 | ) in response_graph 36 | 37 | 38 | def test_object_alt_profile_token(client, a_catalog_link): 39 | r = client.get(f"{a_catalog_link}?_mediatype=text/turtle&_profile=alt") 40 | response_graph = Graph().parse(data=r.text) 41 | object_profiles = ( 42 | None, 43 | RDF.type, 44 | PREZ.ObjectProfile, 45 | ) 46 | listing_profiles = ( 47 | None, 48 | RDF.type, 49 | PREZ.ListingProfile, 50 | ) 51 | assert len(list(response_graph.triples(object_profiles))) > 1 52 | assert ( 53 | len(list(response_graph.triples(listing_profiles))) == 1 54 | ) # only the alt profile 55 | -------------------------------------------------------------------------------- /tests/test_bnode.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from rdflib import Graph, URIRef 5 | 6 | from prez.bnode import get_bnode_depth 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "input_file, iri, expected_depth", 11 | [ 12 | ("bnode_depth-1.ttl", "https://data.idnau.org/pid/democat", 1), 13 | ("bnode_depth-2.ttl", "https://data.idnau.org/pid/democat", 2), 14 | ("bnode_depth-4.ttl", "https://data.idnau.org/pid/democat", 4), 15 | ("bnode_depth-2-2.ttl", "https://draft.com/Australian-physiographic-units", 2), 16 | ], 17 | ) 18 | def test_bnode_depth(input_file: str, iri: str, expected_depth: int) -> None: 19 | file = Path(__file__).parent.parent / "test_data" / input_file 20 | 21 | graph = Graph() 22 | graph.parse(file) 23 | 24 | depth = get_bnode_depth(URIRef(iri), graph) 25 | assert depth == expected_depth 26 | -------------------------------------------------------------------------------- /tests/test_cql_queryable.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from rdflib import Graph, URIRef 4 | from sparql_grammar_pydantic import Var 5 | 6 | from prez.services.query_generation.shacl import PropertyShape 7 | 8 | test_file_1 = Path(__file__).parent.parent / "test_data/cql_queryable_shapes.ttl" 9 | test_file_2 = Path(__file__).parent.parent / "test_data/cql_queryable_shapes_bdr.ttl" 10 | data = Graph().parse(test_file_1, format="turtle") 11 | data.parse(test_file_2, format="turtle") 12 | 13 | 14 | def test_ps_1(): 15 | ps = PropertyShape( 16 | uri=URIRef("http://example.com/SpeciesQueryableShape"), 17 | graph=data, 18 | kind="endpoint", 19 | focus_node=Var(value="focus_node"), 20 | ) 21 | assert ( 22 | ps.tssp_list[0].to_string() 23 | == "?focus_node ^/(^)*/ ?path_node_3" 24 | ) 25 | 26 | 27 | def test_ps_2(): 28 | ps = PropertyShape( 29 | uri=URIRef("http://example.com/BDRScientificNameQueryableShape"), 30 | graph=data, 31 | kind="endpoint", 32 | focus_node=Var(value="focus_node"), 33 | ) 34 | assert ( 35 | ps.tssp_list[0].to_string() 36 | == "?focus_node ^/// ?path_node_4" 37 | ) 38 | -------------------------------------------------------------------------------- /tests/test_cql_time.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from prez.services.query_generation.cql import CQLParser 7 | 8 | cql_time_filenames = [ 9 | "example20.json", # t_before instant 10 | "example21.json", # t_after instant 11 | "example22.json", # t_during 12 | "example53.json", # t_after instant 13 | "example54.json", # t_before instant 14 | "example55.json", # t_contains interval 15 | "example56.json", # t_disjoint interval 16 | "example57.json", # t_during 17 | "clause7_13.json", # t_during 18 | # "clause7_17.json", # t_during 19 | "additional_temporal_disjoint_instant.json", 20 | "example58.json", # t_equals instant 21 | "example59.json", # t_finishedBy interval 22 | "example60.json", # t_finishes interval 23 | "additional_temporal_during_intervals.json", # t_before interval 24 | "example61.json", # t_intersects interval 25 | "example62.json", # t_meets interval 26 | "example63.json", # t_metBy interval 27 | "example64.json", # t_overlappedBy interval 28 | "example65.json", # t_overlaps interval 29 | "example66.json", # t_startedBy interval 30 | "example67.json", # t_starts interval 31 | "clause7_12.json", # t_intersects 32 | ] 33 | 34 | cql_time_generated_queries = [ 35 | Path(name).with_suffix(".rq") for name in cql_time_filenames 36 | ] 37 | 38 | 39 | @pytest.mark.parametrize( 40 | "cql_json_filename, output_query_filename", 41 | [i for i in (zip(cql_time_filenames, cql_time_generated_queries))], 42 | ) 43 | def test_time_funcs(cql_json_filename, output_query_filename): 44 | cql_json_path = ( 45 | Path(__file__).parent.parent / f"test_data/cql/input/{cql_json_filename}" 46 | ) 47 | cql_json = json.loads(cql_json_path.read_text()) 48 | reference_query = ( 49 | Path(__file__).parent.parent 50 | / f"test_data/cql/expected_generated_queries/{output_query_filename}" 51 | ).read_text() 52 | cql_parser = CQLParser(cql=cql_json) 53 | cql_parser.generate_jsonld() 54 | cql_parser.parse() 55 | if not cql_parser.query_str == reference_query: 56 | print(f"\n{cql_parser.query_str}") 57 | assert cql_parser.query_str == reference_query 58 | -------------------------------------------------------------------------------- /tests/test_curie_endpoint.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | @pytest.fixture 6 | def setup(client): 7 | iri = "http://example.com/namespace/test" 8 | client.get(f"/identifier/curie/{iri}") 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "iri, expected_status_code", 13 | [ 14 | ["d", 400], 15 | ["http://!", 400], 16 | ["http://example.com/namespace", 200], 17 | ], 18 | ) 19 | def test_iri(iri: str, expected_status_code: int, client: TestClient): 20 | response = client.get(f"/identifier/curie/{iri}") 21 | assert response.status_code == expected_status_code 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "curie, expected_status_code", 26 | [ 27 | ["d", 400], 28 | ["ns1", 400], 29 | ["namespace:test", 200], 30 | ], 31 | ) 32 | def test_curie(curie: str, expected_status_code: int, client: TestClient, setup): 33 | response = client.get(f"/identifier/iri/{curie}") 34 | assert response.status_code == expected_status_code 35 | -------------------------------------------------------------------------------- /tests/test_endpoints_cache.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph 2 | 3 | from prez.reference_data.prez_ns import PREZ 4 | 5 | 6 | def test_purge_cache(client): 7 | # add some annotations to the cache 8 | client.get("/catalogs") 9 | # purge the cache 10 | response = client.get("/purge-tbox-cache") 11 | assert response.status_code == 200 12 | # check that the cache is empty 13 | r = client.get("/tbox-cache") 14 | g = Graph().parse(data=r.text) 15 | assert len(g) == 0 16 | 17 | 18 | def test_cache(client): 19 | # add some annotations to the cache 20 | catalogs = client.get("/catalogs") 21 | assert catalogs.status_code == 200 22 | r = client.get("/tbox-cache") 23 | g = Graph().parse(data=r.text) 24 | labels = ( 25 | None, 26 | PREZ.label, 27 | None, 28 | ) 29 | descriptions = ( 30 | None, 31 | PREZ.description, 32 | None, 33 | ) 34 | assert len(list(g.triples(labels))) > 0 35 | assert len(list(g.triples(descriptions))) > 0 36 | -------------------------------------------------------------------------------- /tests/test_endpoints_catprez.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph, URIRef 2 | from rdflib.namespace import DCAT, RDF 3 | 4 | 5 | def test_catalog_listing_anot(client): 6 | r = client.get("/catalogs?_mediatype=text/turtle&_profile=prez:OGCListingProfile") 7 | response_graph = Graph().parse(data=r.text) 8 | expected_response_1 = ( 9 | URIRef("https://example.com/CatalogOne"), 10 | RDF.type, 11 | DCAT.Catalog, 12 | ) 13 | expected_response_2 = ( 14 | URIRef("https://example.com/CatalogTwo"), 15 | RDF.type, 16 | DCAT.Catalog, 17 | ) 18 | assert next(response_graph.triples(expected_response_1)) 19 | assert next(response_graph.triples(expected_response_2)) 20 | 21 | 22 | def test_catalog_anot(client, a_catprez_catalog_link): 23 | r = client.get(f"{a_catprez_catalog_link}?_mediatype=text/turtle") 24 | response_graph = Graph().parse(data=r.text) 25 | expected_response = ( 26 | URIRef("https://example.com/CatalogOne"), 27 | RDF.type, 28 | DCAT.Catalog, 29 | ) 30 | assert next(response_graph.triples(expected_response)) 31 | 32 | 33 | def test_catalog_no_mediatype(client, a_catprez_catalog_link): 34 | r = client.get(f"{a_catprez_catalog_link}", headers={"Accept": ""}) 35 | assert r.status_code == 200 36 | 37 | 38 | def test_lower_level_listing_anot(client, a_catprez_catalog_link): 39 | r = client.get(f"{a_catprez_catalog_link}/collections?_mediatype=text/turtle") 40 | response_graph = Graph().parse(data=r.text) 41 | expected_response = ( 42 | URIRef("https://example.com/DCATResource"), 43 | RDF.type, 44 | DCAT.Resource, 45 | ) 46 | assert next(response_graph.triples(expected_response)) 47 | -------------------------------------------------------------------------------- /tests/test_endpoints_concept_hierarchy.py: -------------------------------------------------------------------------------- 1 | from rdflib import SKOS, XSD, Graph, Literal, URIRef 2 | from rdflib.namespace import RDF 3 | 4 | from prez.reference_data.prez_ns import PREZ 5 | 6 | 7 | def test_concept_hierarchy_top_concepts(client): 8 | r = client.get( 9 | "/concept-hierarchy/exm:SchemingConceptScheme/top-concepts?_mediatype=text/turtle" 10 | ) 11 | response_graph = Graph().parse(data=r.text) 12 | expected_response_1 = ( 13 | URIRef("https://example.com/TopLevelConcept"), 14 | RDF.type, 15 | SKOS.Concept, 16 | ) 17 | expected_response_2 = ( 18 | URIRef("https://example.com/TopLevelConcept"), 19 | PREZ.hasChildren, 20 | Literal("true", datatype=XSD.boolean), 21 | ) 22 | assert next(response_graph.triples(expected_response_1)) 23 | assert next(response_graph.triples(expected_response_2)) 24 | 25 | 26 | def test_concept_hierarchy_narrowers(client): 27 | r = client.get( 28 | "/concept-hierarchy/exm:TopLevelConcept/narrowers?_mediatype=text/turtle" 29 | ) 30 | response_graph = Graph().parse(data=r.text) 31 | expected_response_1 = ( 32 | URIRef("https://example.com/SecondLevelConcept"), 33 | RDF.type, 34 | SKOS.Concept, 35 | ) 36 | expected_response_2 = ( 37 | URIRef("https://example.com/SecondLevelConcept"), 38 | PREZ.hasChildren, 39 | Literal("true", datatype=XSD.boolean), 40 | ) 41 | assert next(response_graph.triples(expected_response_1)) 42 | assert next(response_graph.triples(expected_response_2)) 43 | -------------------------------------------------------------------------------- /tests/test_endpoints_management.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph 2 | 3 | from prez.reference_data.prez_ns import PREZ 4 | 5 | 6 | def test_annotation_predicates(client): 7 | r = client.get("/") 8 | response_graph = Graph().parse(data=r.text) 9 | labelList = list( 10 | response_graph.objects( 11 | subject=PREZ["AnnotationPropertyList"], predicate=PREZ.labelList 12 | ) 13 | ) 14 | assert len(labelList) == 1 15 | descriptionList = list( 16 | response_graph.objects( 17 | subject=PREZ["AnnotationPropertyList"], predicate=PREZ.descriptionList 18 | ) 19 | ) 20 | assert len(descriptionList) == 1 21 | provList = list( 22 | response_graph.objects( 23 | subject=PREZ["AnnotationPropertyList"], predicate=PREZ.provenanceList 24 | ) 25 | ) 26 | assert len(provList) == 1 27 | -------------------------------------------------------------------------------- /tests/test_endpoints_object.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph, URIRef 2 | from rdflib.namespace import GEO, RDF 3 | 4 | 5 | def test_feature_collection(client): 6 | r = client.get( 7 | "/object?uri=https://example.com/spaceprez/FeatureCollection&_mediatype=text/turtle" 8 | ) 9 | response_graph = Graph().parse(data=r.text) 10 | assert ( 11 | URIRef("https://example.com/spaceprez/FeatureCollection"), 12 | RDF.type, 13 | GEO.FeatureCollection, 14 | ) in response_graph 15 | 16 | 17 | def test_feature(client): 18 | r = client.get( 19 | "/object?uri=https://example.com/spaceprez/Feature1&_mediatype=text/turtle" 20 | ) 21 | response_graph = Graph().parse(data=r.text) 22 | assert ( 23 | URIRef("https://example.com/spaceprez/Feature1"), 24 | RDF.type, 25 | GEO.Feature, 26 | ) in response_graph 27 | -------------------------------------------------------------------------------- /tests/test_endpoints_ok.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | from typing import Optional, Set 4 | 5 | from rdflib import Graph 6 | 7 | from prez.reference_data.prez_ns import PREZ 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | def wait_for_app_to_be_ready(client, timeout=10): 13 | start_time = time.time() 14 | while time.time() - start_time < timeout: 15 | try: 16 | response = client.get("/health") 17 | if response.status_code == 200: 18 | return 19 | except Exception as e: 20 | print(e) 21 | time.sleep(0.5) 22 | raise RuntimeError("App did not start within the specified timeout") 23 | 24 | 25 | def ogcprez_links( 26 | client, visited: Optional[Set] = None, link="/catalogs", total_links_visited=0 27 | ): 28 | if not visited: 29 | visited = set() 30 | response = client.get(link) 31 | g = Graph().parse(data=response.text, format="turtle") 32 | links = list(g.objects(None, PREZ.link)) 33 | member_bnode_list = list(g.objects(None, PREZ.members)) 34 | if member_bnode_list: 35 | member_bnode = member_bnode_list[0] 36 | member_links = list(g.objects(member_bnode, PREZ.link)) 37 | links.extend(member_links) 38 | assert response.status_code == 200 39 | for next_link in links: 40 | print(next_link) 41 | if next_link not in visited: 42 | visited.add(next_link) 43 | # Make the recursive call and update the total_links_visited 44 | # and visited set with the returned values 45 | visited, total_links_visited = ogcprez_links( 46 | client, visited, str(next_link), total_links_visited + 1 47 | ) 48 | # Return the updated count and visited set 49 | return visited, total_links_visited 50 | 51 | 52 | def test_visit_all_links(client): 53 | visited_links, total_count = ogcprez_links(client) 54 | print(f"Total links visited: {total_count}") 55 | -------------------------------------------------------------------------------- /tests/test_endpoints_profiles.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph, URIRef 2 | from rdflib.namespace import PROF, RDF 3 | 4 | 5 | def test_profile(client_no_override): 6 | r = client_no_override.get("/profiles?limit=50") 7 | g = Graph().parse(data=r.text) 8 | assert (URIRef("https://prez.dev/profile/prez"), RDF.type, PROF.Profile) in g 9 | 10 | 11 | def test_ogcprez_profile(client_no_override): 12 | r = client_no_override.get("/profiles/prez:OGCRecordsProfile") 13 | g = Graph().parse(data=r.text) 14 | assert (URIRef("https://prez.dev/OGCRecordsProfile"), RDF.type, PROF.Profile) in g 15 | -------------------------------------------------------------------------------- /tests/test_endpoints_spaceprez.py: -------------------------------------------------------------------------------- 1 | from rdflib import Graph, URIRef 2 | from rdflib.namespace import DCAT, GEO, RDF 3 | 4 | 5 | def test_dataset_anot(client, a_spaceprez_catalog_link): 6 | r = client.get(f"{a_spaceprez_catalog_link}?_mediatype=text/turtle") 7 | g_text = r.text 8 | response_graph = Graph().parse(data=g_text) 9 | expected_response_1 = ( 10 | URIRef("https://example.com/spaceprez/SpacePrezCatalog"), 11 | RDF.type, 12 | DCAT.Catalog, 13 | ) 14 | assert next(response_graph.triples(expected_response_1)) 15 | 16 | 17 | def test_feature_collection(client, an_fc_link): 18 | r = client.get(f"{an_fc_link}?_mediatype=text/turtle") 19 | g_text = r.text 20 | response_graph = Graph().parse(data=g_text) 21 | assert ( 22 | URIRef("https://example.com/spaceprez/FeatureCollection"), 23 | RDF.type, 24 | GEO.FeatureCollection, 25 | ) in response_graph 26 | 27 | 28 | def test_feature(client, a_feature_link): 29 | r = client.get(f"{a_feature_link}?_mediatype=text/turtle") 30 | g_text = r.text 31 | response_graph = Graph().parse(data=g_text) 32 | expected_response_1 = ( 33 | URIRef("https://example.com/spaceprez/Feature1"), 34 | RDF.type, 35 | GEO.Feature, 36 | ) 37 | assert next(response_graph.triples(expected_response_1)) 38 | 39 | 40 | def test_feature_listing_anot(client, an_fc_link): 41 | r = client.get(f"{an_fc_link}/items?_mediatype=text/turtle") 42 | g_text = r.text 43 | response_graph = Graph().parse(data=g_text) 44 | expected_response_1 = ( 45 | URIRef("https://example.com/spaceprez/Feature1"), 46 | RDF.type, 47 | GEO.Feature, 48 | ) 49 | expected_response_2 = ( 50 | URIRef("https://example.com/spaceprez/Feature2"), 51 | RDF.type, 52 | GEO.Feature, 53 | ) 54 | assert next(response_graph.triples(expected_response_1)) 55 | assert next(response_graph.triples(expected_response_2)) 56 | -------------------------------------------------------------------------------- /tests/test_issue_286.py: -------------------------------------------------------------------------------- 1 | def test_issue(client): 2 | r = client.get("/catalogs/ex:cat/collections/ex:res") 3 | assert r.status_code == 200 4 | -------------------------------------------------------------------------------- /tests/test_ogc.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | import pytest 5 | from fastapi.testclient import TestClient 6 | from ogctests.main import run_ogctests 7 | from pyoxigraph.pyoxigraph import Store 8 | 9 | from prez.app import assemble_app 10 | from prez.dependencies import get_data_repo 11 | from prez.repositories import PyoxigraphRepo, Repo 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def test_store() -> Store: 16 | # Create a new pyoxigraph Store 17 | store = Store() 18 | 19 | file = Path(__file__).parent.parent / "test_data/ogc_features.ttl" 20 | store.load(file.read_bytes(), "text/turtle") 21 | 22 | return store 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def test_repo(test_store: Store) -> Repo: 27 | # Create a PyoxigraphQuerySender using the test_store 28 | return PyoxigraphRepo(test_store) 29 | 30 | 31 | @pytest.fixture(scope="session") 32 | def client(test_repo: Repo) -> TestClient: 33 | # Override the dependency to use the test_repo 34 | def override_get_data_repo(): 35 | return test_repo 36 | 37 | app = assemble_app() 38 | 39 | app.dependency_overrides[get_data_repo] = override_get_data_repo 40 | 41 | with TestClient(app, backend_options={"loop_factory": asyncio.new_event_loop}) as c: 42 | c.base_url = "http://localhost:8000/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features" 43 | yield c 44 | 45 | # Remove the override to ensure subsequent tests are unaffected 46 | app.dependency_overrides.clear() 47 | 48 | 49 | @pytest.mark.parametrize( 50 | "test_file", 51 | [ 52 | pytest.param( 53 | "apidefinition", 54 | marks=pytest.mark.xfail( 55 | reason="see https://github.com/RDFLib/prez/pull/265#issuecomment-2367130294" 56 | ), 57 | ), 58 | "collection", 59 | "collections", 60 | "conformance", 61 | pytest.param( 62 | "crs", 63 | marks=pytest.mark.xfail( 64 | reason="see https://github.com/RDFLib/prez/issues/267" 65 | ), 66 | ), 67 | "errorconditions", 68 | "feature", 69 | pytest.param( 70 | "features", 71 | marks=pytest.mark.xfail( 72 | reason="endpoint that causes an error in pytest works manually with the same data in Fuseki" 73 | ), 74 | ), 75 | "general", 76 | "landingpage", 77 | ], 78 | ) 79 | def test_features_core(client: TestClient, test_file: str): 80 | scope = f"features/core/test_{test_file}.py" 81 | exit_code = run_ogctests(scope, test_client=client) 82 | assert exit_code == pytest.ExitCode.OK 83 | -------------------------------------------------------------------------------- /tests/test_ogc_features_manual.py: -------------------------------------------------------------------------------- 1 | def test_ogc_features_root(client): 2 | r = client.get("/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features") 3 | assert r.status_code == 200 4 | 5 | 6 | def test_ogc_features_queryables(client): 7 | r = client.get( 8 | "/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features/queryables" 9 | ) 10 | assert r.status_code == 200 11 | 12 | 13 | def test_bbox_200(client): 14 | r = client.get( 15 | "/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features/collections/ex:FeatureCollection/items?bbox=4.0,4.0,6.0,6.0&_mediatype=application/sparql-query" 16 | ) 17 | assert r.status_code == 200 18 | 19 | 20 | def test_datetime_200(client): 21 | r = client.get( 22 | "/catalogs/ex:DemoCatalog/collections/ex:GeoDataset/features/collections/ex:FeatureCollection/items?datetime=2021-01-01T00:00:00Z/2021-01-02T00:00:00Z&_mediatype=application/sparql-query" 23 | ) 24 | assert r.status_code == 200 25 | -------------------------------------------------------------------------------- /tests/test_predicates.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from prez.config import Settings 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "label_predicates, error", 8 | [ 9 | [["https://schema.org/name"], None], 10 | [["1", "2", "3"], None], 11 | [[1], TypeError], 12 | ["not a list", ValueError], 13 | ], 14 | ) 15 | def test_label_predicates(label_predicates, error): 16 | if error: 17 | with pytest.raises(error): 18 | assert Settings(label_predicates=label_predicates) 19 | else: 20 | assert Settings(label_predicates=label_predicates) 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "description_predicates, error", 25 | [ 26 | [["https://schema.org/description"], None], 27 | [["1", "2", "3"], None], 28 | [[1], TypeError], 29 | ["not a list", ValueError], 30 | ], 31 | ) 32 | def test_description_predicates(description_predicates, error): 33 | if error: 34 | with pytest.raises(error): 35 | assert Settings(description_predicates=description_predicates) 36 | else: 37 | assert Settings(description_predicates=description_predicates) 38 | 39 | 40 | @pytest.mark.parametrize( 41 | "provenance_predicates, error", 42 | [ 43 | [["https://schema.org/provenance"], None], 44 | [["1", "2", "3"], None], 45 | [[1], TypeError], 46 | ["not a list", ValueError], 47 | ], 48 | ) 49 | def test_provenance_predicates(provenance_predicates, error): 50 | if error: 51 | with pytest.raises(error): 52 | assert Settings(provenance_predicates=provenance_predicates) 53 | else: 54 | assert Settings(provenance_predicates=provenance_predicates) 55 | 56 | 57 | @pytest.mark.parametrize( 58 | "search_predicates, error", 59 | [ 60 | [["https://schema.org/search"], None], 61 | [["1", "2", "3"], None], 62 | [[1], TypeError], 63 | ["not a list", ValueError], 64 | ], 65 | ) 66 | def test_search_predicates(search_predicates, error): 67 | if error: 68 | with pytest.raises(error): 69 | assert Settings(search_predicates=search_predicates) 70 | else: 71 | assert Settings(search_predicates=search_predicates) 72 | 73 | 74 | @pytest.mark.parametrize( 75 | "other_predicates, error", 76 | [ 77 | [["https://schema.org/other"], None], 78 | [["1", "2", "3"], None], 79 | [[1], TypeError], 80 | ["not a list", ValueError], 81 | ], 82 | ) 83 | def test_other_predicates(other_predicates, error): 84 | if error: 85 | with pytest.raises(error): 86 | assert Settings(other_predicates=other_predicates) 87 | else: 88 | assert Settings(other_predicates=other_predicates) 89 | -------------------------------------------------------------------------------- /tests/test_redirect_endpoint.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from fastapi.testclient import TestClient 3 | 4 | 5 | @pytest.mark.parametrize( 6 | "iri, url, expected_response_code, accept_header_value", 7 | [ 8 | [ 9 | "http://data.bgs.ac.uk/id/dataHolding/13603129", 10 | "http://metadata.bgs.ac.uk/geonetwork/srv/eng/catalog.search#/metadata/9df8df53-2a1d-37a8-e044-0003ba9b0d98", 11 | 307, 12 | "", 13 | ], 14 | ["http://example.com/non-existent", None, 404, ""], 15 | [ 16 | "http://data.bgs.ac.uk/id/dataHolding/13603129", 17 | "http://metadata.bgs.ac.uk/geonetwork/srv/eng/catalog.search#/metadata/9df8df53-2a1d-37a8-e044-0003ba9b0d98", 18 | 307, 19 | "text/turtle", 20 | ], 21 | ], 22 | ) 23 | def test_redirect_endpoint( 24 | client: TestClient, 25 | iri: str, 26 | url: str, 27 | expected_response_code, 28 | accept_header_value: str | None, 29 | ): 30 | params = {"iri": iri} 31 | headers = {"accept": accept_header_value} 32 | response = client.get( 33 | "/identifier/redirect", params=params, headers=headers, follow_redirects=False 34 | ) 35 | 36 | if expected_response_code != 404: 37 | assert response.status_code == expected_response_code 38 | assert response.headers.get("location") == url 39 | 40 | if accept_header_value: 41 | assert response.headers.get("accept") == accept_header_value 42 | else: 43 | assert response.status_code == expected_response_code 44 | assert response.headers.get("content-type") == "application/json" 45 | data = response.json() 46 | assert data.get("status_code") == expected_response_code 47 | assert data.get("detail") == f"No homepage found for IRI {iri}." 48 | -------------------------------------------------------------------------------- /tests/test_remote_prefixes.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from fastapi.testclient import TestClient 5 | from pyoxigraph import Store 6 | from rdflib import Graph, URIRef 7 | from rdflib.namespace import DCAT, RDF 8 | 9 | from prez.app import assemble_app 10 | from prez.dependencies import get_data_repo 11 | from prez.repositories import PyoxigraphRepo, Repo 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def test_store() -> Store: 16 | # Create a new pyoxigraph Store 17 | store = Store() 18 | 19 | for file in Path(__file__).parent.glob("../tests/data/prefixes/*.ttl"): 20 | store.load(file.read_bytes(), "text/turtle") 21 | 22 | return store 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def test_repo(test_store: Store) -> Repo: 27 | # Create a PyoxigraphQuerySender using the test_store 28 | return PyoxigraphRepo(test_store) 29 | 30 | 31 | @pytest.fixture(scope="session") 32 | def client(test_repo: Repo) -> TestClient: 33 | # Override the dependency to use the test_repo 34 | def override_get_repo(): 35 | return test_repo 36 | 37 | app = assemble_app() 38 | 39 | app.dependency_overrides[get_data_repo] = override_get_repo 40 | 41 | with TestClient(app) as c: 42 | yield c 43 | 44 | # Remove the override to ensure subsequent tests are unaffected 45 | app.dependency_overrides.clear() 46 | 47 | 48 | @pytest.mark.xfail( 49 | reason="Dependency overrides not configured correctly. Test passes when manually tested using Fuseki" 50 | ) 51 | def test_catalog_link(client): 52 | # get link for first catalog 53 | r = client.get("/c/catalogs") 54 | g = Graph().parse(data=r.text) 55 | member_uri = g.value(None, RDF.type, DCAT.Catalog) 56 | link = str(g.value(member_uri, URIRef("https://prez.dev/link", None))) 57 | assert link == "/c/catalogs/davo:bogusCatalogous" 58 | -------------------------------------------------------------------------------- /tests/test_sparql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def test_select(client): 5 | """check that a valid select query returns a 200 response.""" 6 | r = client.get( 7 | "/sparql?query=SELECT%20*%0AWHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201" 8 | ) 9 | assert r.status_code == 200 10 | 11 | 12 | def test_construct(client): 13 | """check that a valid construct query returns a 200 response.""" 14 | r = client.get( 15 | "/sparql?query=CONSTRUCT%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20WHERE%20%7B%0A%20%20%3Fs%20%3Fp%20%3Fo%0A%7D%20LIMIT%201" 16 | ) 17 | assert r.status_code == 200 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "query,expected_result", 22 | [ 23 | ( 24 | "/sparql?query=PREFIX%20ex%3A%20%3Chttp%3A%2F%2Fexample.com%2Fdatasets%2F%3E%0APREFIX%20dcterms%3A%20%3Chttp%3A%2F%2Fpurl.org%2Fdc%2Fterms%2F%3E%0A%0AASK%0AWHERE%20%7B%0A%20%20%3Fsubject%20dcterms%3Atitle%20%3Ftitle%20.%0A%20%20FILTER%20CONTAINS(LCASE(%3Ftitle)%2C%20%22sandgate%22)%0A%7D", 25 | True, 26 | ), 27 | ( 28 | "/sparql?query=ASK%20%7B%20%3Chttps%3A%2F%2Ffake%3E%20%3Fp%20%3Fo%20%7D", 29 | False, 30 | ), 31 | ], 32 | ) 33 | def test_ask(client, query, expected_result): 34 | """Check that valid ASK queries return a 200 response with the expected boolean result.""" 35 | r = client.get(query) 36 | 37 | assert r.status_code == 200 38 | 39 | 40 | def test_post(client): 41 | """check that a valid post query returns a 200 response.""" 42 | r = client.post( 43 | "/sparql", 44 | data={ 45 | "query": "SELECT * WHERE { ?s ?p ?o } LIMIT 1", 46 | "format": "application/x-www-form-urlencoded", 47 | }, 48 | ) 49 | assert r.status_code == 200 50 | 51 | 52 | def test_post_invalid_data(client): 53 | """check that a post query with invalid data returns a 400 response.""" 54 | r = client.post( 55 | "/sparql", 56 | data={ 57 | "query": "INVALID QUERY", 58 | "format": "application/x-www-form-urlencoded", 59 | }, 60 | ) 61 | assert r.status_code == 400 62 | 63 | 64 | def test_insert_as_query(client): 65 | """ 66 | Also tested manually with Fuseki 67 | """ 68 | r = client.post( 69 | "/sparql", 70 | data={ 71 | "query": "INSERT {<:s> <:p> <:o>}", 72 | "format": "application/x-www-form-urlencoded", 73 | }, 74 | ) 75 | assert r.status_code == 400 76 | --------------------------------------------------------------------------------