├── .coveragerc
├── .dockerignore
├── .env_ows_root
├── .env_simple
├── .github
├── .codecov.yml
├── ISSUE_TEMPLATE
│ └── ISSUE_TEMPLATE.md
├── dependabot.yml
└── workflows
│ ├── complementary-config-test.yaml
│ ├── docker.yml
│ ├── dockerfile-lint.yml
│ ├── docpreview.yaml
│ ├── lint.yml
│ ├── pypi.yml
│ ├── pyspy-profiling.yaml
│ ├── scan.yml
│ ├── spellcheck.yaml
│ ├── test-prod.yaml
│ └── test.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── .yamllint
├── CONTRIBUTING.rst
├── Dockerfile
├── Dockerfile.micromamba
├── HISTORY.rst
├── LICENSE
├── MANIFEST.in
├── README.rst
├── SECURITY.md
├── cfg_parser.py
├── check-code-all.sh
├── check-code.sh
├── code-of-conduct.md
├── compare-cfg.sh
├── complementary_config_test
└── .env_complementary_config_dea_dev
├── datacube_ows
├── __init__.py
├── band_utils.py
├── cfg_parser_impl.py
├── config_toolkit.py
├── config_utils.py
├── data.py
├── feature_info.py
├── gunicorn_config.py
├── http_utils.py
├── index
│ ├── __init__.py
│ ├── api.py
│ ├── driver.py
│ ├── postgis
│ │ ├── __init__.py
│ │ ├── api.py
│ │ └── product_ranges.py
│ ├── postgres
│ │ ├── __init__.py
│ │ ├── api.py
│ │ ├── mv_index.py
│ │ └── product_ranges.py
│ └── sql.py
├── legend_generator.py
├── legend_utils.py
├── loading.py
├── ogc.py
├── ogc_exceptions.py
├── ogc_utils.py
├── ows_cfg_example.py
├── ows_configuration.py
├── protocol_versions.py
├── query_profiler.py
├── resource_limits.py
├── sql
│ ├── postgis
│ │ └── ows_schema
│ │ │ ├── create
│ │ │ ├── 001_create_schema.sql
│ │ │ └── 002_create_product_rng.sql
│ │ │ └── grants
│ │ │ ├── read_only
│ │ │ ├── 001_grant_usage_requires_role.sql
│ │ │ ├── 002_grant_range_read_requires_role.sql
│ │ │ └── 003_grant_odc_user_requires_role.sql
│ │ │ └── read_write
│ │ │ ├── 001_grant_usage_requires_role.sql
│ │ │ ├── 002_grant_writetables_requires_role.sql
│ │ │ └── 003_grant_odc_user_requires_role.sql
│ └── postgres
│ │ ├── extent_views
│ │ ├── create
│ │ │ ├── 001_postgis_extension.sql
│ │ │ ├── 002_timezone.sql
│ │ │ ├── 003_create_view_owner_role_ignore_duplicates.sql
│ │ │ ├── 004_grant_agdc_user_to_view_owner_role.sql
│ │ │ ├── 005_ows_read_to_view_owner_role.sql
│ │ │ ├── 006_odc_read_to_view_owner_role.sql
│ │ │ ├── 010_create_new_time_view.sql
│ │ │ ├── 011_create_new_space_view.sql
│ │ │ ├── 012_create_new_spacetime_view.sql
│ │ │ ├── 020_create_index_1.sql
│ │ │ ├── 021_create_index_2.sql
│ │ │ ├── 022_create_index_3.sql
│ │ │ ├── 023_create_index_4.sql
│ │ │ ├── 030_rename_old_space_time_view.sql
│ │ │ ├── 031_rename_new_space_time_view.sql
│ │ │ ├── 032_drop_old_space_time_view.sql
│ │ │ ├── 040_drop_old_space_view.sql
│ │ │ ├── 041_drop_old_time_view.sql
│ │ │ ├── 042_rename_new_space_view.sql
│ │ │ ├── 043_rename_new_time_view.sql
│ │ │ ├── 050_rename_index_1.sql
│ │ │ ├── 051_rename_index_2.sql
│ │ │ ├── 052_rename_index_3.sql
│ │ │ └── 053_rename_index_4.sql
│ │ ├── grants
│ │ │ ├── read_only
│ │ │ │ └── 001_grant_read_requires_role.sql
│ │ │ ├── refresh_owner
│ │ │ │ ├── 001_grant_space_view_perms_to_owner_role.sql
│ │ │ │ ├── 010_set_owner_time_view.sql
│ │ │ │ ├── 011_set_owner_space_view.sql
│ │ │ │ └── 012_set_owner_spacetime_view.sql
│ │ │ └── write_refresh
│ │ │ │ └── 001_grant_refresh_requires_role.sql
│ │ └── refresh
│ │ │ ├── 001_timezone.sql
│ │ │ ├── 002_refresh_time.sql
│ │ │ ├── 003_refresh_space.sql
│ │ │ └── 004_refresh_spacetime.sql
│ │ └── ows_schema
│ │ ├── cleanup
│ │ ├── 001_drop_space_time_view.sql
│ │ ├── 002_drop_time_view.sql
│ │ ├── 003_drop_space_view.sql
│ │ ├── 010_drop_subproduct_range.sql
│ │ ├── 011_drop_multiproduct_range.sql
│ │ └── 012_drop_product_range.sql
│ │ ├── create
│ │ ├── 001_create_schema.sql
│ │ └── 002_create_product_rng.sql
│ │ └── grants
│ │ ├── read_only
│ │ ├── 001_grant_usage_requires_role.sql
│ │ ├── 002_grant_range_read_requires_role.sql
│ │ └── 003_grant_agdc_user_requires_role.sql
│ │ └── read_write
│ │ ├── 001_grant_usage_requires_role.sql
│ │ ├── 002_grant_writetables_requires_role.sql
│ │ └── 003_grant_agdc_user_requires_role.sql
├── startup_utils.py
├── styles
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ └── base.py
│ ├── base.py
│ ├── colormap.py
│ ├── component.py
│ ├── expression.py
│ ├── hybrid.py
│ └── ramp.py
├── templates
│ ├── html_feature_info.html
│ ├── index.html
│ ├── ogc_error.xml
│ ├── ping.html
│ ├── test_client.html
│ ├── wcs_capabilities.xml
│ ├── wcs_desc_coverage.xml
│ ├── wms_capabilities.xml
│ └── wmts_capabilities.xml
├── tile_matrix_sets.py
├── time_utils.py
├── update_ranges_impl.py
├── utils.py
├── wcs1.py
├── wcs1_utils.py
├── wcs2.py
├── wcs2_utils.py
├── wcs_scaler.py
├── wcs_utils.py
├── wms.py
├── wms_utils.py
├── wmts.py
└── wsgi.py
├── dive-ci.yml
├── docker-compose.cleandb.yaml
├── docker-compose.db.yaml
├── docker-compose.index.yaml
├── docker-compose.prod.yaml
├── docker-compose.pyspy.yaml
├── docker-compose.yaml
├── docker
├── database
│ └── Dockerfile
├── files
│ └── remap-user.sh
└── pyspy
│ └── Dockerfile
├── docs
├── Makefile
├── cfg_colourmap_styles.rst
├── cfg_colourramp_styles.rst
├── cfg_component_styles.rst
├── cfg_functions.rst
├── cfg_global.rst
├── cfg_hybrid_styles.rst
├── cfg_layers.rst
├── cfg_masks.rst
├── cfg_style_api.rst
├── cfg_styling.rst
├── cfg_wcs.rst
├── cfg_wms.rst
├── cfg_wmts.rst
├── conf.py
├── configuration.rst
├── contributing.rst
├── database.rst
├── deployment.rst
├── diagrams
│ ├── ows_diagram.png
│ └── ows_diagram1.9.png
├── environment_variables.rst
├── history.rst
├── index.rst
├── installation.rst
├── make.bat
├── performance.rst
├── readme.rst
├── requirements.txt
├── style_howto_color_map.rst
├── style_howto_color_ramp.rst
├── style_howto_components.rst
├── style_howto_components_nonlinear.rst
├── style_howto_legends.rst
├── style_howto_transparency.rst
├── styling_howto.rst
├── styling_howto_jupyter.rst
└── usage.rst
├── env.micromamba.yaml
├── integration_tests
├── __init__.py
├── cfg
│ ├── __init__.py
│ ├── message.po
│ ├── ows_test_cfg.py
│ ├── ows_test_cfg_bad.py
│ ├── ows_test_cfg_no_i18n.py
│ ├── test_translations
│ │ ├── de
│ │ │ └── LC_MESSAGES
│ │ │ │ └── ows_cfg.po
│ │ └── en
│ │ │ └── LC_MESSAGES
│ │ │ └── ows_cfg.po
│ ├── translations
│ │ ├── de
│ │ │ └── LC_MESSAGES
│ │ │ │ ├── ows_cfg.mo
│ │ │ │ └── ows_cfg.po
│ │ └── en
│ │ │ └── LC_MESSAGES
│ │ │ ├── ows_cfg.mo
│ │ │ └── ows_cfg.po
│ └── utils.py
├── conftest.py
├── metadata
│ ├── COAST_100K_15_-40.yaml
│ ├── COAST_100K_8_-21.yaml
│ ├── metadata_importer.py
│ ├── product_geodata_coast_100k.yaml
│ ├── s2_l2a_ds_01.yaml
│ ├── s2_l2a_ds_02.yaml
│ ├── s2_l2a_ds_03.yaml
│ ├── s2_l2a_ds_04.yaml
│ ├── s2_l2a_ds_05.yaml
│ ├── s2_l2a_ds_06.yaml
│ ├── s2_l2a_ds_07.yaml
│ ├── s2_l2a_ds_08.yaml
│ ├── s2_l2a_ds_09.yaml
│ ├── s2_l2a_ds_10.yaml
│ ├── s2_l2a_ds_11.yaml
│ ├── s2_l2a_ds_12.yaml
│ ├── s2_l2a_ds_13.yaml
│ ├── s2_l2a_ds_14.yaml
│ ├── s2_l2a_ds_15.yaml
│ ├── s2_l2a_ds_16.yaml
│ ├── s2_l2a_ds_17.yaml
│ ├── s2_l2a_ds_18.yaml
│ ├── s2_l2a_ds_19.yaml
│ ├── s2_l2a_ds_20.yaml
│ ├── s2_l2a_ds_21.yaml
│ ├── s2_l2a_ds_22.yaml
│ ├── s2_l2a_ds_23.yaml
│ ├── s2_l2a_ds_24.yaml
│ ├── s2_l2a_ds_25.yaml
│ ├── s2_l2a_ds_26.yaml
│ ├── s2_l2a_ds_27.yaml
│ ├── s2_l2a_ds_28.yaml
│ ├── s2_l2a_ds_29.yaml
│ ├── s2_l2a_ds_30.yaml
│ ├── s2_l2a_ds_31.yaml
│ ├── s2_l2a_ds_32.yaml
│ ├── s2_l2a_ds_33.yaml
│ ├── s2_l2a_ds_34.yaml
│ ├── s2_l2a_ds_35.yaml
│ ├── s2_l2a_ds_36.yaml
│ ├── s2_l2a_ds_37.yaml
│ ├── s2_l2a_ds_38.yaml
│ ├── s2_l2a_ds_39.yaml
│ ├── s2_l2a_ds_40.yaml
│ ├── s2_l2a_ds_41.yaml
│ ├── s2_l2a_ds_42.yaml
│ ├── s2_l2a_ds_43.yaml
│ ├── s2_l2a_ds_44.yaml
│ ├── s2_l2a_ds_45.yaml
│ ├── s2_l2a_ds_46.yaml
│ ├── s2_l2a_ds_47.yaml
│ ├── s2_l2a_ds_48.yaml
│ ├── s2_l2a_ds_49.yaml
│ ├── s2_l2a_ds_50.yaml
│ ├── s2_l2a_ds_51.yaml
│ ├── s2_l2a_ds_52.yaml
│ ├── s2_l2a_ds_53.yaml
│ ├── s2_l2a_ds_54.yaml
│ ├── s2_l2a_ds_55.yaml
│ ├── s2_l2a_ds_56.yaml
│ ├── s2_l2a_ds_57.yaml
│ ├── s2_l2a_ds_58.yaml
│ ├── s2_l2a_ds_59.yaml
│ ├── s2_l2a_ds_60.yaml
│ ├── s2_l2a_ds_61.yaml
│ ├── s2_l2a_ds_62.yaml
│ ├── s2_l2a_ds_63.yaml
│ ├── s2_l2a_ds_64.yaml
│ └── s2_l2a_prod.yaml
├── test_cfg_parser.py
├── test_i18n.py
├── test_layers.py
├── test_mv_index.py
├── test_routes.py
├── test_update_ranges.py
├── test_version.py
├── test_wcs_server.py
├── test_wms_server.py
├── test_wmts_server.py
└── utils.py
├── inventory.json
├── license-headers.md
├── license-template.txt
├── licenseheaders.py
├── load_test.sh
├── messages.po
├── ows_cfg_report.json
├── pylintrc
├── pyproject.toml
├── s2_l2a_extractor.py
├── setup.py
├── spellcheck.yaml
├── test_urls.sh
├── tests
├── __init__.py
├── cfg
│ ├── README.txt
│ ├── __init__.py
│ ├── broken_nested.py
│ ├── infinite_1.json
│ ├── infinite_2.json
│ ├── infinite_2a.json
│ ├── infinite_2b.json
│ ├── minimal_cfg.py
│ ├── mixed_nested.json
│ ├── mixed_nested.py
│ ├── nested.py
│ ├── nested_1.json
│ ├── nested_2.json
│ ├── nested_3.json
│ ├── nested_4.json
│ ├── simple.json
│ └── simple.py
├── conftest.py
├── msg
│ ├── README.txt
│ ├── double_msgid.po
│ ├── double_msgstr.po
│ ├── duplicate_msgid.po
│ ├── good.po
│ ├── missing_msgid.po
│ ├── multiline_msgid.po
│ └── untagged_string.po
├── test_band_utils.py
├── test_cfg_bandidx.py
├── test_cfg_cache_ctrl.py
├── test_cfg_global.py
├── test_cfg_inclusion.py
├── test_cfg_layer.py
├── test_cfg_metadata_types.py
├── test_cfg_tile_matrix_set.py
├── test_cfg_wcs.py
├── test_config_toolkit.py
├── test_data.py
├── test_driver_cache.py
├── test_legend_generator.py
├── test_mpl_cmaps.py
├── test_multidate_handler.py
├── test_mv_selopts.py
├── test_no_db_routes.py
├── test_ogc_utils.py
├── test_ows_configuration.py
├── test_protocol_versions.py
├── test_pyproj.py
├── test_qprof.py
├── test_resource_limits.py
├── test_startup.py
├── test_style_api.py
├── test_styles.py
├── test_time_res_method.py
├── test_update_ranges.py
├── test_utils.py
├── test_wcs2_utils.py
├── test_wcs_scaler.py
├── test_wms_utils.py
├── translations
│ ├── de
│ │ └── LC_MESSAGES
│ │ │ ├── ows_cfg.mo
│ │ │ └── ows_cfg.po
│ └── en
│ │ └── LC_MESSAGES
│ │ ├── ows_cfg.mo
│ │ └── ows_cfg.po
└── utils.py
├── update_ranges.py
├── wms_xsds
├── capabilities_1_3_0.xsd
├── capabilities_extensions.xsd
├── capabilities_extensions_local.xsd
├── exceptions_1_3_0.xsd
└── wmtsGetCapabilities_response.xsd
└── wordlist.txt
/.coveragerc:
--------------------------------------------------------------------------------
1 | # .coveragerc to control coverage.py
2 | [run]
3 | data_file = /tmp/.coverage
4 |
5 | [xml]
6 | output = /tmp/coverage.xml
7 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | datacube_wms/wms_cfg_local.py
2 | **/.pytest_cache
3 | **/__pycache__
4 | .hypothesis
5 |
6 | venv
7 | .venv
8 |
9 | **/.pixi
10 | .git
11 |
--------------------------------------------------------------------------------
/.env_ows_root:
--------------------------------------------------------------------------------
1 | # Set some default vars, you can overwrite these by creating env vars
2 | # Example docker env file for OWS instance with (multi-file) configuration.
3 | ################
4 | # ODC DB Config
5 | # ##############
6 | DB_HOSTNAME=postgres
7 | DB_PORT=5434
8 | DB_USERNAME=opendatacubeusername
9 | DB_PASSWORD=opendatacubepassword
10 | DB_DATABASE=opendatacube
11 |
12 | #################
13 | # OWS CFG Config
14 | #################
15 | PYTHONPATH=/src/config
16 | DATACUBE_OWS_CFG=ows_refactored.ows_root_cfg.ows_cfg
17 |
18 | ################
19 | # Docker Volume
20 | ################
21 | # OWS_CFG_DIR config enables mounting an external CFG folder
22 | OWS_CFG_DIR=~/dea-config/dev/services/wms/ows_refactored
23 | # OWS_CFG_MOUNT_DIR defines the mount inside docker container
24 | OWS_CFG_MOUNT_DIR=/src/config/ows_refactored
25 |
26 | ################
27 | # AWS S3 Config
28 | ################
29 | AWS_REGION=ap-southeast-2
30 | AWS_NO_SIGN_REQUEST=yes
31 | AWS_S3_ENDPOINT=
32 |
33 | ###################
34 | # Dev Tools Config
35 | ###################
36 | # If you want to use pydev for interactive debugging
37 | PYDEV_DEBUG=
38 | # Will not work with pydev
39 | # Note that FLASK_ENV is now deprecated.
40 | FLASK_DEBUG=
41 | PROMETHEUS_MULTIPROC_DIR=/tmp
42 | SENTRY_DSN=
43 |
--------------------------------------------------------------------------------
/.env_simple:
--------------------------------------------------------------------------------
1 | # Example docker env file for OWS instance with (a single file) configuration.
2 | # Set some default vars, you can overwrite these by creating env vars
3 |
4 | ################
5 | # ODC DB Config
6 | # ##############
7 | ODC_DEFAULT_DB_URL=postgresql://opendatacubeusername:opendatacubepassword@postgres:5432/odc_postgres
8 | ODC_OWSPOSTGIS_DB_URL=postgresql://opendatacubeusername:opendatacubepassword@postgres:5432/odc_postgis
9 |
10 | # Needed for docker db image and db readiness probe.
11 | POSTGRES_PORT=5432
12 | POSTGRES_HOSTNAME=postgres
13 | POSTGRES_USER=opendatacubeusername
14 | SERVER_DB_USERNAME=opendatacubeusername
15 | POSTGRES_PASSWORD=opendatacubepassword
16 | POSTGRES_DB="odc_postgres,odc_postgis"
17 |
18 | #################
19 | # OWS CFG Config
20 | #################
21 | DATACUBE_OWS_CFG=config.ows_test_cfg.ows_cfg
22 | PYTHONPATH=/src
23 |
24 | ################
25 | # Docker Volume
26 | ################
27 | # OWS_CFG_DIR config enables mounting an external CFG folder
28 | OWS_CFG_DIR=./integration_tests/cfg
29 | # OWS_CFG_MOUNT_DIR defines the mount inside docker container
30 | OWS_CFG_MOUNT_DIR=/src/config
31 |
32 | ################
33 | # AWS S3 Config
34 | ################
35 | AWS_REGION=ap-southeast-2
36 | AWS_NO_SIGN_REQUEST=yes
37 | AWS_S3_ENDPOINT=
38 |
39 | ###################
40 | # Dev Tools Config
41 | ###################
42 | # If you want to use pydev for interactive debugging
43 | PYDEV_DEBUG=
44 | # Will not work with pydev
45 | # Note FLASK_ENV is now deprecated.
46 | FLASK_DEBUG=
47 | PROMETHEUS_MULTIPROC_DIR=/tmp
48 | SENTRY_DSN=
49 |
--------------------------------------------------------------------------------
/.github/.codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | require_ci_to_pass: yes
3 |
4 | coverage:
5 | status:
6 | project:
7 | default: # This can be anything, but it needs to exist as the name
8 | # basic settings
9 | target: auto
10 | threshold: 80%
11 | precision: 2
12 | round: down
13 | range: "20...100"
14 |
15 | parsers:
16 | gcov:
17 | branch_detection:
18 | conditional: yes
19 | loop: yes
20 | method: no
21 | macro: no
22 |
23 | fixes:
24 | - "/code::"
25 |
26 | ignore:
27 | - "/code/ows_cfg_example.py" # ignore cfg_example as it serves as reference
28 |
29 | comment:
30 | layout: "reach,diff,flags,tree"
31 | behavior: default
32 | require_changes: no
33 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: standard issue template
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 | ---
8 |
9 | ### Description
10 |
11 |
12 | ## Steps to Reproduce
13 |
14 |
15 | 1.
16 | 2.
17 | 3.
18 | 4.
19 |
20 | ## Context (Environment)
21 | ### `datacube-ows` version (datacube-ows-update --version):
22 |
23 |
24 | ### `ows_config.py` file (link, sample code)
25 |
26 |
27 | ### datacube product metadata (datacube product show product_name)
28 |
29 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: github-actions
4 | directory: "/"
5 | schedule:
6 | interval: "daily"
7 | target-branch: "develop"
8 | - package-ecosystem: docker
9 | directory: "/"
10 | schedule:
11 | interval: "daily"
12 | target-branch: "develop"
13 |
--------------------------------------------------------------------------------
/.github/workflows/complementary-config-test.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Complementary config test
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop-1.9'
8 | - 'develop'
9 | paths:
10 | - '**'
11 | - '!docs/**'
12 | - '!*.rst'
13 | - '!*.md'
14 | - '!datacube_ows/__init__.py'
15 | - '!.github/**'
16 | - '.github/workflows/complementary-config-test.yaml'
17 |
18 | push:
19 | branches:
20 | - 'develop-1.9'
21 | - 'develop'
22 | paths:
23 | - '**'
24 | - '!docs/**'
25 | - '!*.rst'
26 | - '!*.md'
27 | - '!datacube_ows/__init__.py'
28 | - '!.github/**'
29 | - '.github/workflows/complementary-config-test.yaml'
30 |
31 | env:
32 | ORG: opendatacube
33 | IMAGE: ows
34 |
35 | # When a PR is updated, cancel the jobs from the previous version. Merges
36 | # do not define head_ref, so use run_id to never cancel those jobs.
37 | concurrency:
38 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
39 | cancel-in-progress: true
40 |
41 | jobs:
42 | dea-config:
43 | runs-on: ubuntu-latest
44 |
45 | steps:
46 | - name: git checkout ows
47 | uses: actions/checkout@v4
48 | with:
49 | fetch-depth: 0
50 | path: datacube-ows
51 |
52 | - name: git checkout dea-config
53 | uses: actions/checkout@v4
54 | with:
55 | repository: GeoscienceAustralia/dea-config
56 | path: dea-config
57 |
58 | - name: Build dev OWS image
59 | run: |
60 | cd ./datacube-ows
61 | docker build \
62 | --tag ${ORG}/${IMAGE}:_builder \
63 | .
64 |
65 | - name: Config parser check
66 | run: |
67 | export LOCAL_UID=$(id -u $USER)
68 | export LOCAL_GID=$(id -g $USER)
69 | cd ./datacube-ows
70 | export $(grep -v '^#' ./complementary_config_test/.env_complementary_config_dea_dev | xargs)
71 | docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml up --quiet-pull -d --wait
72 | docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "datacube system init; datacube system check"
73 | docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml exec -T ows /bin/sh -c "curl https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/dev/services/wms/inventory.json -o /tmp/inventory.json"
74 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /src && ./compare-cfg.sh"
75 | docker compose -f docker-compose.yaml -f docker-compose.cleandb.yaml down
76 |
--------------------------------------------------------------------------------
/.github/workflows/docker.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Build Docker Image and push to GHCR
3 |
4 | on:
5 | push:
6 | branches:
7 | - develop
8 | paths:
9 | - "**"
10 | - '!docs/**'
11 | - '!*.rst'
12 | - '!*.md'
13 | - '!datacube_ows/__init__.py'
14 |
15 | release:
16 | types: [published]
17 |
18 | env:
19 | REGISTRY: ghcr.io
20 | IMAGE_NAME: opendatacube/ows
21 |
22 |
23 | jobs:
24 | build-and-push-image:
25 | runs-on: ubuntu-latest
26 |
27 | permissions:
28 | id-token: write # This is required for requesting the JWT
29 | contents: read # This is required for actions/checkout
30 | packages: write # This is required for pushing to ghcr
31 |
32 | steps:
33 | - name: Checkout repository
34 | uses: actions/checkout@v4
35 |
36 | - name: Log in to the Container registry
37 | uses: docker/login-action@v3
38 | with:
39 | registry: ${{ env.REGISTRY }}
40 | username: ${{ github.actor }}
41 | password: ${{ secrets.GITHUB_TOKEN }}
42 |
43 | - name: Extract metadata (tags, labels) for Docker
44 | id: meta
45 | uses: docker/metadata-action@v5
46 | with:
47 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
48 |
49 | - name: Build and push Docker image
50 | id: push
51 | uses: docker/build-push-action@v6
52 | with:
53 | context: .
54 | push: true
55 | tags: ${{ steps.meta.outputs.tags }}
56 | labels: ${{ steps.meta.outputs.labels }}
57 |
--------------------------------------------------------------------------------
/.github/workflows/dockerfile-lint.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: dockerfile Linting
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - 'Dockerfile'
11 | - '.github/workflows/dockerfile-lint.yml'
12 |
13 | push:
14 | branches:
15 | - 'develop'
16 | paths:
17 | - 'Dockerfile'
18 | - '.github/workflows/dockerfile-lint.yml'
19 |
20 |
21 | # When a PR is updated, cancel the jobs from the previous version. Merges
22 | # do not define head_ref, so use run_id to never cancel those jobs.
23 | concurrency:
24 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
25 | cancel-in-progress: true
26 |
27 | jobs:
28 | dockerfile-lint:
29 | runs-on: ubuntu-latest
30 |
31 | steps:
32 | - name: Checkout Code
33 | uses: actions/checkout@v4
34 | with:
35 | fetch-depth: 0
36 |
37 | - name: lint Dockerfile
38 | uses: hadolint/hadolint-action@v3.1.0
39 | with:
40 | dockerfile: Dockerfile
41 | ignore: DL3008,DL3002,DL3013,DL3059,SC2102
42 |
--------------------------------------------------------------------------------
/.github/workflows/docpreview.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Doc Preview
3 |
4 | on:
5 | pull_request_target:
6 | types:
7 | - opened
8 |
9 | permissions:
10 | pull-requests: write
11 |
12 | # When a PR is updated, cancel the jobs from the previous version. Merges
13 | # do not define head_ref, so use run_id to never cancel those jobs.
14 | concurrency:
15 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
16 | cancel-in-progress: true
17 |
18 | jobs:
19 |
20 | documentation-preview:
21 | runs-on: ubuntu-latest
22 | steps:
23 | - uses: readthedocs/actions/preview@v1
24 | with:
25 | project-slug: "datacube-ows"
26 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Code Linting
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - '**'
11 | - '!docs/**'
12 | - '!*.rst'
13 | - '!*.md'
14 | - '!datacube_ows/__init__.py'
15 | - '!.github/**'
16 | - '.github/workflows/lint.yml'
17 |
18 | push:
19 | branches:
20 | - 'develop'
21 | - 'develop-1.9'
22 | paths:
23 | - '**'
24 | - '!docs/**'
25 | - '!*.rst'
26 | - '!*.md'
27 | - '!datacube_ows/__init__.py'
28 | - '!.github/**'
29 | - '.github/workflows/lint.yml'
30 |
31 | # When a PR is updated, cancel the jobs from the previous version. Merges
32 | # do not define head_ref, so use run_id to never cancel those jobs.
33 | concurrency:
34 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
35 | cancel-in-progress: true
36 |
37 | jobs:
38 | pylint:
39 | runs-on: ubuntu-latest
40 | name: Pylint
41 | steps:
42 | - name: checkout git
43 | uses: actions/checkout@v4
44 | with:
45 | fetch-depth: 0
46 | - name: Install dependencies and run pylint
47 | run: |
48 | pip install .[test,dev]
49 | pylint -j 2 --reports no datacube_ows --disable=C,R,W,E1136
50 |
51 | ruff:
52 | name: ruff
53 | runs-on: ubuntu-latest
54 | steps:
55 | - name: Checkout
56 | uses: actions/checkout@v4
57 | - name: Set up Python
58 | uses: actions/setup-python@v5
59 | with:
60 | python-version: "3.10"
61 | - run: python -m pip install ruff
62 | - name: Ruff check
63 | run: ruff check --output-format=github
64 |
65 | mypy:
66 | runs-on: ubuntu-latest
67 | strategy:
68 | matrix:
69 | python-version: ["3.10"]
70 | name: MyPy
71 | steps:
72 | - name: checkout git
73 | uses: actions/checkout@v4
74 | with:
75 | fetch-depth: 0
76 | - name: Setup conda
77 | uses: s-weigand/setup-conda@v1
78 | with:
79 | update-conda: true
80 | python-version: ${{ matrix.python-version }}
81 | conda-channels: anaconda, conda-forge
82 | - name: run mypy
83 | run: |
84 | sudo apt-get remove python3-openssl
85 | pip install --upgrade -e '.[dev]'
86 | mypy datacube_ows
87 |
--------------------------------------------------------------------------------
/.github/workflows/pypi.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: PyPI
3 |
4 | on:
5 | release:
6 | types: [published]
7 |
8 | jobs:
9 | build-n-publish:
10 | name: Build and publish datacube-ows distributions 📦 to PyPI and TestPyPI
11 | runs-on: ubuntu-20.04
12 | if: github.event_name == 'release'
13 |
14 | steps:
15 | - uses: actions/checkout@v4
16 | - name: Fetch all history for all tags and branches
17 | run: git fetch --prune --unshallow
18 | - name: Set up Python 3.7
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: 3.7
22 | - name: Install pypa/build
23 | run: >-
24 | python -m
25 | pip install
26 | build
27 | --user
28 | - name: Build a binary wheel and a source tarball
29 | run: >-
30 | python -m
31 | build
32 | --sdist
33 | --wheel
34 | --outdir dist/
35 | .
36 | - name: Twine check
37 | run: |
38 | pip install twine
39 | twine check dist/*
40 | # - name: Publish distribution 📦 to Test PyPI
41 | # uses: pypa/gh-action-pypi-publish@master
42 | # with:
43 | # password: ${{ secrets.TEST_PYPI_API_TOKEN }}
44 | # repository_url: https://test.pypi.org/legacy/
45 | - name: Publish distribution 📦 to PyPI
46 | if: startsWith(github.ref, 'refs/tags')
47 | uses: pypa/gh-action-pypi-publish@release/v1
48 | with:
49 | password: ${{ secrets.PYPI_API_TOKEN }}
50 |
--------------------------------------------------------------------------------
/.github/workflows/pyspy-profiling.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Pyspy Profiling Test
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - '**'
11 | - '!docs/**'
12 | - '!*.rst'
13 | - '!*.md'
14 | - '!datacube_ows/__init__.py'
15 | - '!.github/**'
16 | - '.github/workflows/pyspy-profiling.yaml'
17 |
18 | push:
19 | branches:
20 | - 'develop'
21 | - 'develop-1.9'
22 | paths:
23 | - '**'
24 | - '!docs/**'
25 | - '!*.rst'
26 | - '!*.md'
27 | - '!datacube_ows/__init__.py'
28 | - '!.github/**'
29 | - '.github/workflows/pyspy-profiling.yaml'
30 |
31 | # When a PR is updated, cancel the jobs from the previous version. Merges
32 | # do not define head_ref, so use run_id to never cancel those jobs.
33 | concurrency:
34 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
35 | cancel-in-progress: true
36 |
37 | jobs:
38 | build:
39 | runs-on: ubuntu-latest
40 |
41 | steps:
42 | - name: Checkout code
43 | uses: actions/checkout@v4
44 | with:
45 | fetch-depth: 0
46 |
47 | # Run performance profiling
48 | - name: setup performance profiling with py-spy (stage 1 - run profiling containers)
49 | run: |
50 | export LOCAL_UID=$(id -u $USER)
51 | export LOCAL_GID=$(id -g $USER)
52 | export $(grep -v '^#' .env_simple | xargs)
53 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml up --quiet-pull -d
54 |
55 | - name: Sleep for 10 seconds (stage 1 - wait for services to be ready)
56 | uses: whatnick/wait-action@master
57 | with:
58 | time: '10s'
59 |
60 | - name: set output container pid (stage 1 - get ows container pid)
61 | id: set-output-container-id
62 | run: |
63 | export LOCAL_UID=$(id -u $USER)
64 | export LOCAL_GID=$(id -g $USER)
65 | export $(grep -v '^#' .env_simple | xargs)
66 | echo "PID=$(docker inspect --format '{{.State.Pid}}' $(docker inspect -f '{{.Name}}' \
67 | $(docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml ps -q ows) \
68 | | cut -c2-))" > $GITHUB_OUTPUT
69 |
70 | - name: Run py-spy profiling (stage 1 - run profiling service)
71 | timeout-minutes: 1
72 | continue-on-error: true
73 | run: |
74 | export LOCAL_UID=$(id -u $USER)
75 | export LOCAL_GID=$(id -g $USER)
76 | export $(grep -v '^#' .env_simple | xargs)
77 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml \
78 | exec -T ows /bin/sh -c "cd /code && ./test_urls.sh &"
79 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml \
80 | run pyspy record -f speedscope -o ./artifacts/profile.json --duration 30 \
81 | --pid ${{steps.set-output-container-id.outputs.PID}} --subprocesses
82 |
83 | - name: Stop py-spy profiling after timeout (stage 1 - stop profiling)
84 | run: |
85 | export $(grep -v '^#' .env_simple | xargs)
86 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml down
87 |
88 | - name: Upload profile to artifact (stage 1 - Upload profiling svg to artifacts)
89 | uses: actions/upload-artifact@v4
90 | with:
91 | name: profile.json
92 | path: ./artifacts/profile.json
93 |
--------------------------------------------------------------------------------
/.github/workflows/scan.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Scan
3 |
4 | on:
5 | push:
6 | branches:
7 | - develop
8 | - develop-1.9
9 | paths:
10 | - ".github/workflows/scan.yml"
11 | - "Dockerfile"
12 |
13 | pull_request:
14 | branches:
15 | - develop
16 | - develop-1.9
17 | paths:
18 | - ".github/workflows/scan.yml"
19 | - "Dockerfile"
20 |
21 | schedule:
22 | - cron: '0 0 * * *'
23 |
24 | env:
25 | IMAGE_NAME: opendatacube/ows
26 |
27 | # When a PR is updated, cancel the jobs from the previous version. Merges
28 | # do not define head_ref, so use run_id to never cancel those jobs.
29 | concurrency:
30 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
31 | cancel-in-progress: true
32 |
33 | jobs:
34 | cve-scanner:
35 | runs-on: ubuntu-latest
36 | steps:
37 | - name: Checkout git
38 | uses: actions/checkout@v4
39 | with:
40 | fetch-depth: 0
41 |
42 | - name: Get unstable git tag
43 | run: >
44 | echo "UNSTABLE_TAG=$(git describe --tags)" >> $GITHUB_ENV
45 |
46 | - name: Log the unstable tag
47 | run: echo $UNSTABLE_TAG
48 |
49 | - name: Build unstable + latest Docker image tag
50 | if: github.event_name != 'release'
51 | uses: whoan/docker-build-with-cache-action@v8
52 | with:
53 | image_name: ${{ env.IMAGE_NAME }}
54 | image_tag: ${{ env.UNSTABLE_TAG }},latest
55 | build_extra_args: "--build-arg=ENVIRONMENT=deployment"
56 | push_image_and_stages: false
57 |
58 | - name: Run vulnerability scanner
59 | if: github.event_name != 'release'
60 | uses: aquasecurity/trivy-action@master
61 | with:
62 | image-ref: "${{ env.IMAGE_NAME }}:${{ env.UNSTABLE_TAG }}"
63 | format: "sarif"
64 | output: 'trivy-results.sarif'
65 | # exit-code: "1"
66 | severity: "CRITICAL,HIGH"
67 |
68 | - name: Upload Trivy scan results to GitHub Security tab
69 | uses: github/codeql-action/upload-sarif@v3
70 | with:
71 | sarif_file: 'trivy-results.sarif'
72 |
73 | # - name: Notify Slack for Failures
74 | # uses: rtCamp/action-slack-notify@v2.1.0
75 | # if: failure()
76 | # env:
77 | # SLACK_CHANNEL: ga-wms-ops
78 | # SLACK_ICON: "https://github.com/docker.png?size=48"
79 | # SLACK_COLOR: "#482de1"
80 | # SLACK_MESSAGE: ""
81 | # SLACK_TITLE: CVE Scan alert
82 | # SLACK_USERNAME: OWS Scanner
83 | # SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
84 |
--------------------------------------------------------------------------------
/.github/workflows/spellcheck.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Spell check
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - 'docs/**'
11 | - '*.md'
12 | - '*.rst'
13 | - '.github/workflows/spellcheck.yaml'
14 |
15 |
16 | push:
17 | branches:
18 | - 'develop'
19 | - 'develop-1.9'
20 | paths:
21 | - 'docs/**'
22 | - '*.md'
23 | - '*.rst'
24 | - '.github/workflows/spellcheck.yaml'
25 |
26 | # When a PR is updated, cancel the jobs from the previous version. Merges
27 | # do not define head_ref, so use run_id to never cancel those jobs.
28 | concurrency:
29 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
30 | cancel-in-progress: true
31 |
32 | jobs:
33 |
34 | pyspellcheck:
35 | runs-on: ubuntu-latest
36 | steps:
37 | - uses: actions/checkout@v4
38 | - uses: igsekor/pyspelling-any@v1.0.4
39 | name: Spellcheck
40 |
--------------------------------------------------------------------------------
/.github/workflows/test-prod.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Prod dockercompose test
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - '**'
11 | - '!docs/**'
12 | - '!*.rst'
13 | - '!*.md'
14 | - '!datacube_ows/__init__.py'
15 | - '!.github/**'
16 | - '.github/workflows/test-prod.yaml'
17 |
18 | push:
19 | branches:
20 | - 'develop'
21 | - 'develop-1.9'
22 | paths:
23 | - '**'
24 | - '!docs/**'
25 | - '!*.rst'
26 | - '!*.md'
27 | - '!datacube_ows/__init__.py'
28 | - '!.github/**'
29 | - '.github/workflows/test-prod.yaml'
30 |
31 | env:
32 | ORG: opendatacube
33 | IMAGE: ows
34 |
35 | # When a PR is updated, cancel the jobs from the previous version. Merges
36 | # do not define head_ref, so use run_id to never cancel those jobs.
37 | concurrency:
38 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
39 | cancel-in-progress: true
40 |
41 | jobs:
42 | prod-docker-compose-tests:
43 | runs-on: ubuntu-latest
44 |
45 | steps:
46 | - uses: actions/checkout@v4
47 | with:
48 | fetch-depth: 0
49 |
50 | - name: Build and run prod OWS image
51 | run: |
52 | export LOCAL_UID=$(id -u $USER)
53 | export LOCAL_GID=$(id -g $USER)
54 | export $(grep -v '^#' .env_simple | xargs)
55 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.prod.yaml up --quiet-pull --build -d --wait
56 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /src && ./check-code-all.sh --no-test"
57 |
58 | # Run some tests on the images
59 | # These tests require a working database
60 | - name: Test ping
61 | run: |
62 | curl --show-error --fail \
63 | --connect-timeout 5 \
64 | --max-time 10 \
65 | --retry 5 \
66 | --retry-delay 0 \
67 | --retry-max-time 40 \
68 | "localhost:8000/ping" \
69 | > /dev/null
70 |
71 | - name: Test WMS GetCapabilities
72 | run: |
73 | curl --silent --show-error --fail \
74 | "localhost:8000/?service=WMS&version=1.3.0&request=GetCapabilities" \
75 | - name: Test WMTS GetCapabilities
76 | run: |
77 | curl --silent --show-error --fail \
78 | "localhost:8000/?service=WMS&version=1.0.0&request=GetCapabilities" \
79 | > /dev/null
80 | - name: Test WCS1 GetCapabilities
81 | run: |
82 | curl --silent --show-error --fail \
83 | "localhost:8000/?service=WCS&version=1.0.0&request=GetCapabilities"
84 | > /dev/null
85 | - name: Test WCS2 GetCapabilities
86 | run: |
87 | curl --silent --show-error --fail \
88 | "localhost:8000/?service=WCS&version=2.0.1&request=GetCapabilities"
89 | > /dev/null
90 | - name: Test Prometheus Metrics
91 | run: |
92 | curl --silent --show-error --fail \
93 | "localhost:8000/metrics"
94 | > /dev/null
95 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Tests
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - 'develop'
8 | - 'develop-1.9'
9 | paths:
10 | - '**'
11 | - '!docs/**'
12 | - '!*.rst'
13 | - '!*.md'
14 | - '!datacube_ows/__init__.py'
15 | - '!.github/**'
16 | - '.github/workflows/test.yml'
17 |
18 | push:
19 | branches:
20 | - 'develop'
21 | - 'develop-1.9'
22 | paths:
23 | - '**'
24 | - '!docs/**'
25 | - '!*.rst'
26 | - '!*.md'
27 | - '!datacube_ows/__init__.py'
28 | - '!.github/**'
29 | - '.github/workflows/test.yml'
30 |
31 | env:
32 | ORG: opendatacube
33 | IMAGE: ows
34 |
35 | # When a PR is updated, cancel the jobs from the previous version. Merges
36 | # do not define head_ref, so use run_id to never cancel those jobs.
37 | concurrency:
38 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
39 | cancel-in-progress: true
40 |
41 | jobs:
42 | unit-integration-performance-tests:
43 | runs-on: ubuntu-latest
44 |
45 | steps:
46 | - uses: actions/checkout@v4
47 | with:
48 | fetch-depth: 0
49 |
50 | # We build the stage 1 image, then run test on it
51 | # These tests require extra files we don't want in
52 | # the production image
53 | # We build the stage 1 image, then run test on it
54 | # These tests require extra files we don't want in
55 | # the production image
56 | - name: Build dev OWS image
57 | run: |
58 | docker build --build-arg ENVIRONMENT=test \
59 | --tag ${ORG}/${IMAGE}:_builder \
60 | .
61 |
62 | - name: Test and lint dev OWS image
63 | run: |
64 | mkdir artifacts
65 | docker run -e LOCAL_UID=$(id -u $USER) -e LOCAL_GID=$(id -g $USER) -v ${PWD}/artifacts:/mnt/artifacts ${ORG}/${IMAGE}:_builder /bin/sh -c "cd /src && ./check-code.sh"
66 | mv ./artifacts/coverage.xml ./artifacts/coverage-unit.xml
67 |
68 | - name: Dockerized Integration Pytest
69 | run: |
70 | export LOCAL_UID=$(id -u $USER)
71 | export LOCAL_GID=$(id -g $USER)
72 | export $(grep -v '^#' .env_simple | xargs)
73 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml up --quiet-pull -d --wait --build
74 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml exec -T ows /bin/sh -c "cd /src && ./check-code-all.sh"
75 | docker compose -f docker-compose.yaml -f docker-compose.db.yaml down
76 |
77 | - name: Upload All coverage to Codecov
78 | uses: codecov/codecov-action@v5
79 | with:
80 | directory: ./artifacts/
81 | fail_ci_if_error: false
82 | env:
83 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
84 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 |
55 | # Sphinx documentation
56 | docs/_build/
57 |
58 | # PyBuilder
59 | target/
60 |
61 | # pyenv python configuration file
62 | .python-version
63 |
64 | # Editor temp files
65 | .*.swp
66 |
67 | # PyCharm project files
68 | .idea/
69 |
70 | # VSCode project files
71 | .vscode/
72 |
73 | # Local pyre files
74 | .pyre/
75 |
76 | wsgi_local.py
77 | wms_cfg_local.py
78 | ows_*_cfg.py
79 |
80 | .DS_Store
81 | docker-compose.yaml
82 | ows_cfg.py
83 |
84 | _version.py
85 | .env
86 | ows_refactored/
87 | local_cfg/
88 | .mypy_cache/
89 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v5.0.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/astral-sh/ruff-pre-commit
9 | rev: v0.11.12
10 | hooks:
11 | - id: ruff
12 | args: [--fix, --show-fixes, --output-format, grouped]
13 | # - repo: https://github.com/PyCQA/bandit
14 | # rev: 1.7.4
15 | # hooks:
16 | # - id: bandit
17 | - repo: https://github.com/PyCQA/pylint
18 | rev: v3.3.7
19 | hooks:
20 | - id: pylint
21 | args: ["--disable=C,R,W,E1136"]
22 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 | # Required
4 | version: 2
5 |
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.10"
12 |
13 | # Build documentation in the "docs/" directory with Sphinx
14 | sphinx:
15 | configuration: docs/conf.py
16 |
17 |
18 | # Optionally build your docs in additional formats such as PDF and ePub
19 | # formats:
20 | # - pdf
21 | # - epub
22 |
23 | # Optional but recommended, declare the Python requirements required
24 | # to build your documentation
25 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
26 | python:
27 | install:
28 | - requirements: docs/requirements.txt
29 |
--------------------------------------------------------------------------------
/.yamllint:
--------------------------------------------------------------------------------
1 | extends: default
2 |
3 | rules:
4 | # We could enable these if we find a decent yaml autoformatter.
5 | # (yamlfmt in pip is too buggy at the moment for our files)
6 | #
7 | # The effort of fixing them by hand, especially auto-generated yamls, is not
8 | # currently worth it.
9 |
10 | # Many tools (eg. generated secure keys) don't output wrapped lines.
11 | line-length: disable
12 |
13 | # Pedantry & existing docs don't have it.
14 | document-start: disable
15 |
16 | # Warning that truthy values are not quoted.
17 | # Many documents currently use "True" without quotes, so this would be a
18 | # larger change across almost every doc.
19 | truthy: disable
20 |
21 | # Whitespace issues: hundreds in existing docs.
22 | indentation: disable
23 | trailing-spaces: disable
24 | commas: disable
25 | new-line-at-end-of-file: disable
26 | brackets: disable
27 | hyphens: disable
28 | colons: disable
29 | comments: disable
30 | comments-indentation: disable
31 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Note that this is now pinned to a fixed version. Remember to check for new versions periodically.
2 | FROM ghcr.io/osgeo/gdal:ubuntu-small-3.10.3 AS builder
3 |
4 | LABEL org.opencontainers.image.source=https://github.com/opendatacube/datacube-ows
5 | LABEL org.opencontainers.image.description="Datacube OWS"
6 | LABEL org.opencontainers.image.licences="Apache-2.0"
7 |
8 | # Environment is test or deployment.
9 | ARG ENVIRONMENT=deployment
10 |
11 | # Setup build env for postgresql-client-16
12 | USER root
13 | RUN apt-get update -y \
14 | && DEBIAN_FRONTEND=noninteractive apt-get install -y --fix-missing --no-install-recommends \
15 | git \
16 | # For Psycopg2
17 | libpq-dev python3-dev \
18 | gcc \
19 | python3-pip \
20 | postgresql-client-16 \
21 | # For Pyproj build \
22 | proj-bin libproj-dev \
23 | && apt-get clean \
24 | && rm -rf /var/lib/apt/lists/* /var/dpkg/* /var/tmp/* /var/log/dpkg.log
25 |
26 | WORKDIR /build
27 |
28 | RUN python3 -m pip --disable-pip-version-check -q wheel --no-binary psycopg2 psycopg2 \
29 | && ([ "$ENVIRONMENT" = "deployment" ] || \
30 | python3 -m pip --disable-pip-version-check -q wheel --no-binary pyproj pyproj)
31 |
32 | # Should match builder base.
33 | FROM ghcr.io/osgeo/gdal:ubuntu-small-3.10.3
34 |
35 | # Environment is test or deployment.
36 | ARG ENVIRONMENT=deployment
37 | RUN export DEBIAN_FRONTEND=noninteractive \
38 | && apt-get update -y \
39 | && apt-get install -y --no-install-recommends \
40 | git \
41 | gosu \
42 | python3-pip \
43 | tini \
44 | && ([ "$ENVIRONMENT" = "deployment" ] || \
45 | apt-get install -y --no-install-recommends \
46 | proj-bin) \
47 | && apt-get upgrade -y \
48 | && apt-get clean \
49 | && rm -rf /var/lib/apt/lists/* /var/dpkg/* /var/tmp/* /var/log/dpkg.log
50 |
51 | # Add login-script for UID/GID-remapping.
52 | COPY --chown=root:root --link docker/files/remap-user.sh /usr/local/bin/remap-user.sh
53 |
54 | # Copy source code and install it
55 | WORKDIR /src
56 | COPY . /src
57 |
58 | ## Only install pydev requirements if arg PYDEV_DEBUG is set to 'yes'
59 | ARG PYDEV_DEBUG="no"
60 | COPY --from=builder --link /build/*.whl ./
61 | RUN EXTRAS=$([ "$ENVIRONMENT" = "deployment" ] || echo ",test") && \
62 | python3 -m pip --disable-pip-version-check install ./*.whl --break-system-packages && \
63 | rm ./*.whl && \
64 | echo "version=\"$(python3 setup.py --version)\"" > datacube_ows/_version.py && \
65 | python3 -m pip --disable-pip-version-check install --no-cache-dir ".[ops$EXTRAS]" --break-system-packages && \
66 | ([ "$PYDEV_DEBUG" != "yes" ] || \
67 | python3 -m pip --disable-pip-version-check install --no-cache-dir .[dev] --break-system-packages) && \
68 | python3 -m pip freeze && \
69 | ([ "$ENVIRONMENT" != "deployment" ] || \
70 | (rm -rf /src/* /src/.git* && \
71 | apt-get purge -y \
72 | git \
73 | git-man \
74 | python3-pip))
75 |
76 | # Configure user
77 | WORKDIR "/home/ubuntu"
78 |
79 | ENV GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR" \
80 | CPL_VSIL_CURL_ALLOWED_EXTENSIONS=".tif, .tiff" \
81 | GDAL_HTTP_MAX_RETRY="10" \
82 | GDAL_HTTP_RETRY_DELAY="1"
83 |
84 | ENTRYPOINT ["/usr/local/bin/remap-user.sh"]
85 | CMD ["gunicorn", "-b", "0.0.0.0:8000", "--workers=3", "-k", "gevent", "--timeout", "121", "--pid", "/home/ubuntu/gunicorn.pid", "--log-level", "info", "--worker-tmp-dir", "/dev/shm", "--config", "python:datacube_ows.gunicorn_config", "datacube_ows.wsgi"]
86 |
--------------------------------------------------------------------------------
/Dockerfile.micromamba:
--------------------------------------------------------------------------------
1 | FROM mambaorg/micromamba:2.1.1
2 | COPY --chown=$MAMBA_USER:$MAMBA_USER env.micromamba.yaml /tmp/env.yaml
3 | RUN --mount=type=cache,target=/opt/conda/pkgs micromamba install -y -n base -f /tmp/env.yaml && \
4 | micromamba clean --all --yes --force-pkgs-dirs && \
5 | # find /home/mambauser/.mamba/pkgs -type d \( -name test -o -name tests \) -print0 | xargs -0 rm -rf && \
6 | find /opt/conda/lib -type d \( -name test -o -name tests \) -print0 | xargs -0 rm -rf && \
7 | rm -rf /opt/conda/lib/libpython3* /opt/conda/include /opt/conda/share/{gir-1.0,poppler,man}
8 | # TODO: pieces of botocore (98Mb) and scipy (72Mb) can likely be removed
9 |
10 | ARG MAMBA_DOCKERFILE_ACTIVATE=1 # (otherwise python will not be found)
11 |
12 |
13 | COPY --chown=$MAMBA_USER:$MAMBA_USER . /tmp/code
14 |
15 | ARG PSEUDO_VERSION # strongly recommended to update based on git describe
16 |
17 | RUN SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DATACUBE_OWS=${PSEUDO_VERSION} pip install /tmp/code #-e .[test]
18 | #RUN pip install /code
19 | #python -c 'import uuid; print(uuid.uuid4())' > /tmp/my_uuid
20 |
21 | ENV GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR" \
22 | CPL_VSIL_CURL_ALLOWED_EXTENSIONS=".tif, .tiff" \
23 | GDAL_HTTP_MAX_RETRY="10" \
24 | GDAL_HTTP_RETRY_DELAY="1"
25 |
26 | CMD ["gunicorn", "-b", "0.0.0.0:8000", "--workers=3", "-k", "gthread", "--timeout", "121", "--pid", "/tmp/gunicorn.pid", "--log-level", "info", "--worker-tmp-dir", "/dev/shm", "--config", "python:datacube_ows.gunicorn_config", "datacube_ows.wsgi"]
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache Software License 2.0
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 |
2 | include CONTRIBUTING.rst
3 | include HISTORY.rst
4 | include LICENSE
5 | include README.rst
6 | graft datacube_ows/templates
7 | graft datacube_ows/sql
8 |
9 | recursive-exclude * __pycache__
10 | recursive-exclude * *.py[co]
11 |
12 | prune integration_tests/
13 | prune tests/
14 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif
15 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | OWS does not currently issue special security updates. Security updates are
6 | included in the normal release cycle.
7 |
8 | ## Reporting a Vulnerability
9 |
10 | Security vulnerabilities can be reported as regular Github issues. If there
11 | are concerns about early public disclosure, security vulnerabilities can be
12 | reported to Paul Haesler, either by direct message on the opendatacube Slack, or
13 | by email to `paul (dot) haesler (at) ga (dot) gov (dot) au`.
14 |
--------------------------------------------------------------------------------
/cfg_parser.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.cfg_parser_impl import main
8 | from datacube_ows.startup_utils import initialise_debugging
9 |
10 | if __name__ == '__main__':
11 | initialise_debugging()
12 | main()
13 |
--------------------------------------------------------------------------------
/check-code.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Convenience script for running Travis-like checks.
3 |
4 | set -eu
5 | set -x
6 |
7 | # Run tests, taking coverage.
8 | # Users can specify extra folders as arguments.
9 | python3 -m pytest --cov=datacube_ows --cov-report=xml tests/
10 | cp /tmp/coverage.xml /mnt/artifacts
11 |
12 | set +x
13 |
--------------------------------------------------------------------------------
/code-of-conduct.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies within all project spaces, and it also applies when
49 | an individual is representing the project or its community in public spaces.
50 | Examples of representing a project or community include using an official
51 | project e-mail address, posting via an official social media account, or acting
52 | as an appointed representative at an online or offline event. Representation of
53 | a project may be further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the ODC Steering Council Chair,
59 | currently [Caitlin Adams](cadams@frontiersi.com.au) or alternatively, a member of
60 | the ODC Secretariat, currently [George Dyke](george@symbioscomms.com).
61 | All complaints will be reviewed and investigated and will result in a response that
62 | is deemed necessary and appropriate to the circumstances. The project team is
63 | obligated to maintain confidentiality with regard to the reporter of an incident.
64 | Further details of specific enforcement policies may be posted separately.
65 |
66 | Project maintainers who do not follow or enforce the Code of Conduct in good
67 | faith may face temporary or permanent repercussions as determined by other
68 | members of the project's leadership.
69 |
70 | ## Attribution
71 |
72 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
73 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
74 |
75 | [homepage]: https://www.contributor-covenant.org
76 |
77 | For answers to common questions about this code of conduct, see
78 | https://www.contributor-covenant.org/faq
79 |
--------------------------------------------------------------------------------
/compare-cfg.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Convenience script for running Travis-like checks.
3 |
4 | set -eu
5 | set -x
6 |
7 | datacube-ows-cfg check -i /tmp/inventory.json
8 |
9 | set +x
10 |
--------------------------------------------------------------------------------
/complementary_config_test/.env_complementary_config_dea_dev:
--------------------------------------------------------------------------------
1 | # Set some default vars, you can overwrite these by creating env vars
2 | # Example docker env file for OWS instance with (multi-file) configuration.
3 | ################
4 | # ODC DB Config
5 | # ##############
6 | ODC_DEFAULT_DB_URL=postgresql://opendatacubeusername:opendatacubepassword@postgres:5432/odc_postgres
7 | ODC_OWSPOSTGIS_DB_URL=postgresql://opendatacubeusername:opendatacubepassword@postgres:5432/odc_postgis
8 |
9 | # Needed for Docker db image and db readiness probe.
10 | POSTGRES_HOSTNAME=postgres
11 | POSTGRES_PORT=5434
12 | POSTGRES_USER=opendatacubeusername
13 | SERVER_DB_USERNAME=opendatacubeusername
14 | POSTGRES_PASSWORD=opendatacubepassword
15 | POSTGRES_DB="odc_postgres,odc_postgis"
16 | READY_PROBE_DB=odc_postgis
17 |
18 | #################
19 | # OWS CFG Config
20 | #################
21 | PYTHONPATH=/env/config
22 | DATACUBE_OWS_CFG=ows_refactored.ows_root_cfg.ows_cfg
23 |
24 | ################
25 | # Docker Volume
26 | ################
27 | # OWS_CFG_DIR config enables mounting an external CFG folder
28 | OWS_CFG_DIR=/home/runner/work/datacube-ows/datacube-ows/dea-config/dev/services/wms/ows_refactored
29 | # OWS_CFG_MOUNT_DIR defines the mount inside docker container
30 | OWS_CFG_MOUNT_DIR=/env/config/ows_refactored
31 |
32 | ################
33 | # AWS S3 Config
34 | ################
35 | AWS_REGION=ap-southeast-2
36 | AWS_NO_SIGN_REQUEST=yes
37 | AWS_S3_ENDPOINT=
38 |
39 | ###################
40 | # Dev Tools Config
41 | ###################
42 | # If you want to use pydev for interactive debugging
43 | PYDEV_DEBUG=
44 | # Will not work with pydev
45 | # Note that FLASK_ENV is now deprecated.
46 | FLASK_DEBUG=
47 | PROMETHEUS_MULTIPROC_DIR=/tmp
48 | SENTRY_DSN=
49 |
--------------------------------------------------------------------------------
/datacube_ows/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | try:
8 | from ._version import version as __version__
9 | except ImportError:
10 | # Default version number.
11 | # Will only be used when running datacube-ows direct from source code (not properly installed)
12 | __version__ = "1.9.2"
13 |
--------------------------------------------------------------------------------
/datacube_ows/config_toolkit.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from copy import deepcopy
8 | from typing import Any
9 |
10 |
11 | def deepinherit(parent: dict[str, Any], child: dict[str, Any]) -> dict[str, Any]:
12 | """
13 | Implements inheritance for configuration.
14 |
15 | :param parent: The parent configuration to inherit from
16 | :param child: The child configuration to override the parent config
17 | :return: A new dictionary reflecting the inherited configuration
18 | """
19 | expanded: dict[str, Any] = deepcopy(parent)
20 | deepupdate(expanded, child)
21 | return expanded
22 |
23 |
24 | def deepupdate(target: dict[str, Any], src: dict[str, Any]) -> None:
25 | for k in src:
26 | if isinstance(src[k], dict):
27 | if k not in target:
28 | target[k] = {}
29 | # recurse dictionary
30 | deepupdate(target[k], src[k])
31 | elif isinstance(src[k], str):
32 | # Use child's version of str
33 | target[k] = src[k]
34 | else:
35 | try:
36 | iter(src[k])
37 | # non-str iterable
38 | if not src[k]:
39 | # Empty list - replace target list
40 | target[k] = []
41 | elif isinstance(src[k][0], (int, float)):
42 | # Array of numbers or floats - replace target list
43 | target[k] = src[k]
44 | else:
45 | # iterables of other types - append child to parent
46 | if k in target:
47 | target[k] = target[k] + src[k]
48 | else:
49 | target[k] = src[k]
50 | except TypeError:
51 | # Non-iterable - Use child's version
52 | target[k] = src[k]
53 |
--------------------------------------------------------------------------------
/datacube_ows/gunicorn_config.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """Gunicorn config for Prometheus internal metrics
8 | """
9 | import os
10 |
11 | from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics
12 |
13 |
14 | def child_exit(server, worker) -> None:
15 | if os.environ.get("PROMETHEUS_MULTIPROC_DIR", False):
16 | GunicornInternalPrometheusMetrics.mark_process_dead_on_child_exit(worker.pid)
17 |
--------------------------------------------------------------------------------
/datacube_ows/index/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from .api import AbortRun, CoordRange, LayerExtent, LayerSignature, ows_index
8 |
9 | __all__ = ["ows_index", "AbortRun", "CoordRange", "LayerSignature", "LayerExtent"]
10 |
--------------------------------------------------------------------------------
/datacube_ows/index/driver.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from threading import Lock
8 | from typing import Optional
9 |
10 | from datacube.drivers.driver_cache import load_drivers
11 |
12 | TYPE_CHECKING = False
13 | if TYPE_CHECKING:
14 | from datacube_ows.index.api import OWSAbstractIndexDriver
15 |
16 | cache_lock = Lock()
17 |
18 |
19 | class OWSIndexDriverCache:
20 | _instance = None
21 | _initialised = False
22 | def __new__(cls, *args, **kwargs) -> "OWSIndexDriverCache":
23 | if cls._instance is None:
24 | with cache_lock:
25 | if cls._instance is None:
26 | cls._instance = super().__new__(cls)
27 | return cls._instance
28 |
29 | def __init__(self, group: str) -> None:
30 | with cache_lock:
31 | if not self._initialised:
32 | self._initialised = True
33 | self._drivers = load_drivers(group)
34 | def __call__(self, name: str) -> Optional["OWSAbstractIndexDriver"]:
35 | """
36 | :returns: None if driver with a given name is not found
37 |
38 | :param name: Driver name
39 | :return: Returns IndexDriver
40 | """
41 | return self._drivers.get(name, None)
42 |
43 | def drivers(self) -> list[str]:
44 | """ Returns list of driver names
45 | """
46 | return list(self._drivers.keys())
47 |
48 |
49 | def ows_index_drivers() -> list[str]:
50 | return OWSIndexDriverCache("datacube_ows.plugins.index").drivers()
51 |
52 |
53 | def ows_index_driver_by_name(name: str) -> Optional["OWSAbstractIndexDriver"]:
54 | return OWSIndexDriverCache("datacube_ows.plugins.index")(name)
55 |
--------------------------------------------------------------------------------
/datacube_ows/index/postgis/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/datacube_ows/index/postgres/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/datacube_ows/legend_generator.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import io
8 | import logging
9 |
10 | import matplotlib
11 | import numpy as np
12 |
13 | # from flask import make_response
14 | from PIL import Image
15 |
16 | from datacube_ows.http_utils import resp_headers
17 | from datacube_ows.ogc_exceptions import WMSException
18 | from datacube_ows.wms_utils import GetLegendGraphicParameters
19 |
20 | # Do not use X Server backend
21 |
22 | matplotlib.use('Agg')
23 |
24 | _LOG: logging.Logger = logging.getLogger(__name__)
25 |
26 |
27 | def legend_graphic(args) -> tuple | None:
28 | params = GetLegendGraphicParameters(args)
29 | img = create_legends_from_styles(params.styles,
30 | ndates=len(params.times))
31 | if img is None:
32 | raise WMSException("No legend is available for this request", http_response=404)
33 | return img
34 |
35 |
36 | def create_legend_for_style(product, style_name: str, ndates: int = 0) -> tuple | None:
37 | if style_name not in product.style_index:
38 | return None
39 | style = product.style_index[style_name]
40 | return create_legends_from_styles([style], ndates)
41 |
42 |
43 | def create_legends_from_styles(styles, ndates: int = 0) -> tuple | None:
44 | # Run through all values in style cfg and generate
45 | imgs = []
46 | for s in styles:
47 | img = s.render_legend(ndates)
48 | if img is not None:
49 | imgs.append(img)
50 |
51 | if not imgs:
52 | return None
53 | min_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1]
54 | imgs_comb = np.vstack([np.asarray(i.resize(min_shape)) for i in imgs])
55 | imgs_comb = Image.fromarray(imgs_comb)
56 | b = io.BytesIO()
57 | imgs_comb.save(b, 'png')
58 | # legend = make_response(b.getvalue())
59 | # legend.mimetype = 'image/png'
60 | # b.close()
61 | return b.getvalue(), 200, resp_headers({"Content-Type": "image/png"})
62 |
--------------------------------------------------------------------------------
/datacube_ows/legend_utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import io
8 | import logging
9 |
10 | import requests
11 | from PIL import Image
12 |
13 | from datacube_ows.ogc_exceptions import WMSException
14 |
15 | _LOG: logging.Logger = logging.getLogger(__name__)
16 |
17 |
18 | def get_image_from_url(url: str) -> Image.Image | None:
19 | """
20 | Fetch image a png from external URL, and return it as an Image.
21 |
22 | :param url: A URL pointing to some png image
23 | :return: A PIL image object (OR None if the url does not return a PNG image)
24 | """
25 | r = requests.get(url, timeout=1)
26 | if r.status_code != 200:
27 | raise WMSException(f"Could not retrieve legend - external URL is failing with http code {r.status_code}")
28 | if r.headers['content-type'] != 'image/png':
29 | _LOG.warning("External legend has MIME type %s. OWS strongly recommends PNG format for legend images.",
30 | r.headers['content-type'])
31 | bytesio = io.BytesIO()
32 | bytesio.write(r.content)
33 | bytesio.seek(0)
34 | return Image.open(bytesio)
35 |
--------------------------------------------------------------------------------
/datacube_ows/protocol_versions.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | import contextlib
9 | import re
10 | from collections.abc import Callable, Mapping, Sequence
11 |
12 | from datacube_ows.ogc_exceptions import (
13 | OGCException,
14 | WCS1Exception,
15 | WCS2Exception,
16 | WMSException,
17 | WMTSException,
18 | )
19 | from datacube_ows.ows_configuration import get_config
20 | from datacube_ows.wcs1 import handle_wcs1
21 | from datacube_ows.wcs2 import handle_wcs2
22 | from datacube_ows.wms import handle_wms
23 | from datacube_ows.wmts import handle_wmts
24 |
25 | FlaskResponse = tuple
26 | FlaskHandler = Callable[[Mapping[str, str]], FlaskResponse]
27 |
28 |
29 | class SupportedSvcVersion:
30 | def __init__(self, service: str, version: str, router, exception_class: type[OGCException]) -> None:
31 | self.service = service.lower()
32 | self.service_upper = service.upper()
33 | self.version = version
34 | self.version_parts = [int(i) for i in version.split(".")]
35 | assert len(self.version_parts) == 3
36 | self.router = router
37 | self.exception_class = exception_class
38 |
39 |
40 | class SupportedSvc:
41 | def __init__(self, versions: Sequence[SupportedSvcVersion], default_exception_class: type[OGCException] | None = None) -> None:
42 | self.versions = sorted(versions, key=lambda x: x.version_parts)
43 | assert len(self.versions) > 0
44 | self.service = self.versions[0].service
45 | self.service_upper = self.versions[0].service_upper
46 | assert self.service.upper() == self.service_upper
47 | assert self.service == self.service_upper.lower()
48 | for v in self.versions[1:]:
49 | assert v.service == self.service
50 | assert v.service_upper == self.service_upper
51 | if default_exception_class:
52 | self.default_exception_class = default_exception_class
53 | else:
54 | self.default_exception_class = self.versions[0].exception_class
55 |
56 | def _clean_version_parts(self, unclean: list[str]) -> list[int]:
57 | clean = []
58 | for part in unclean:
59 | try:
60 | clean.append(int(part))
61 | continue
62 | except ValueError:
63 | pass
64 | with contextlib.suppress(ValueError):
65 | clean.append(int(re.split(r"[^\d]", part)[0]))
66 | break
67 | return clean
68 |
69 | def negotiated_version(self, request_version: str) -> SupportedSvcVersion:
70 | if not request_version:
71 | return self.versions[-1]
72 | parts: list[str] = list(request_version.split("."))
73 | rv_parts: list[int] = self._clean_version_parts(parts)
74 | while len(rv_parts) < 3:
75 | rv_parts.append(0)
76 | for v in reversed(self.versions):
77 | if rv_parts >= v.version_parts:
78 | return v
79 | # The constructor asserted that self.versions is not empty, so this is safe.
80 | #pylint: disable=undefined-loop-variable
81 | return v
82 |
83 | def activated(self) -> bool:
84 | cfg = get_config()
85 | return bool(getattr(cfg, self.service))
86 |
87 |
88 | OWS_SUPPORTED = {
89 | "wms": SupportedSvc([
90 | SupportedSvcVersion("wms", "1.3.0", handle_wms, WMSException),
91 | ]),
92 | "wmts": SupportedSvc([
93 | SupportedSvcVersion("wmts", "1.0.0", handle_wmts, WMTSException),
94 | ]),
95 | "wcs": SupportedSvc([
96 | SupportedSvcVersion("wcs", "1.0.0", handle_wcs1, WCS1Exception),
97 | SupportedSvcVersion("wcs", "2.0.0", handle_wcs2, WCS2Exception),
98 | SupportedSvcVersion("wcs", "2.1.0", handle_wcs2, WCS2Exception),
99 | ]),
100 | }
101 |
102 |
103 | def supported_versions() -> dict[str, SupportedSvc]:
104 | return OWS_SUPPORTED
105 |
--------------------------------------------------------------------------------
/datacube_ows/query_profiler.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from time import time
8 |
9 |
10 | class QueryProfiler:
11 | def __init__(self, active: bool) -> None:
12 | self.active = active
13 | self._events: dict = {}
14 | self._stats: dict = {}
15 | if active:
16 | self.start_event("query")
17 |
18 | def start_event(self, name: str) -> None:
19 | if self.active:
20 | self._events[name] = [time(), None]
21 |
22 | def __setitem__(self, name: str, val) -> None:
23 | self._stats[name] = val
24 |
25 | def __getitem__(self, name: str):
26 | return self._stats[name]
27 |
28 | def end_event(self, name: str) -> None:
29 | if self.active:
30 | if name in self._events:
31 | self._events[name][1] = time()
32 | else:
33 | self._events[name] = [None, time()]
34 |
35 | def profile(self) -> dict:
36 | result: dict = {}
37 | if self.active:
38 | self.end_event("query")
39 | result["profile"] = {}
40 | for name, rng in self._events.items():
41 | if rng[0] and rng[1]:
42 | result["profile"][name] = rng[1] - rng[0]
43 | result["info"] = self._stats
44 | return result
45 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/create/001_create_schema.sql:
--------------------------------------------------------------------------------
1 | -- Creating/replacing ows schema
2 |
3 | create schema if not exists ows;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/create/002_create_product_rng.sql:
--------------------------------------------------------------------------------
1 | -- Creating/replacing product ranges table
2 |
3 | create table if not exists ows.layer_ranges (
4 | layer varchar(255) not null primary key,
5 |
6 | lat_min decimal not null,
7 | lat_max decimal not null,
8 | lon_min decimal not null,
9 | lon_max decimal not null,
10 |
11 | dates jsonb not null,
12 |
13 | bboxes jsonb not null,
14 |
15 | meta jsonb not null,
16 | last_updated timestamp not null
17 | );
18 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_only/001_grant_usage_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting usage on schema
2 |
3 | GRANT USAGE ON SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_only/002_grant_range_read_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting select on layer ranges table to {role}
2 |
3 | GRANT SELECT ON ows.layer_ranges TO {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_only/003_grant_odc_user_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting odc_user role to {role}
2 |
3 | GRANT odc_user to {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_write/001_grant_usage_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting usage on schema
2 |
3 | GRANT USAGE ON SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_write/002_grant_writetables_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting update/insert/delete on all tables in schema
2 |
3 | GRANT update, select, insert, delete ON ALL TABLES IN SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgis/ows_schema/grants/read_write/003_grant_odc_user_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting odc_user role to {role}
2 |
3 | GRANT odc_user to {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/001_postgis_extension.sql:
--------------------------------------------------------------------------------
1 | -- Installing Postgis extensions
2 |
3 | create extension if not exists postgis
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/002_timezone.sql:
--------------------------------------------------------------------------------
1 | -- Setting default timezone to UTC
2 |
3 | set timezone to 'Etc/UTC'
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/003_create_view_owner_role_ignore_duplicates.sql:
--------------------------------------------------------------------------------
1 | -- Create database role to own the materialised views
2 |
3 | create role ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/004_grant_agdc_user_to_view_owner_role.sql:
--------------------------------------------------------------------------------
1 | -- Grant read access to ODC (agdc) tables to view owner role.
2 |
3 | grant usage on schema agdc to ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/005_ows_read_to_view_owner_role.sql:
--------------------------------------------------------------------------------
1 | -- Grant read access to OWS tables to view owner role.
2 |
3 | GRANT USAGE ON SCHEMA ows TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/006_odc_read_to_view_owner_role.sql:
--------------------------------------------------------------------------------
1 | -- Grant read access to AGDC tables to view owner role.
2 |
3 | GRANT SELECT ON agdc.dataset, agdc.dataset_type, agdc.metadata_type, agdc.dataset_location TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/010_create_new_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW TIME Materialised View (start of hard work)
2 |
3 | -- Try all different locations for temporal extents and UNION them
4 |
5 | CREATE MATERIALIZED VIEW IF NOT EXISTS ows.time_view_new (dataset_type_ref, ID, temporal_extent)
6 | AS
7 | with
8 | -- Crib metadata to use as for string matching various types
9 | metadata_lookup as (
10 | select id,name from agdc.metadata_type
11 | )
12 | -- This is the eodataset variant of the temporal extent (from/to variant)
13 | select
14 | dataset_type_ref, id,
15 | case
16 | when metadata -> 'extent' ->> 'from_dt' is null then
17 | tstzrange(
18 | (metadata -> 'extent' ->> 'center_dt') :: timestamp,
19 | (metadata -> 'extent' ->> 'center_dt') :: timestamp,
20 | '[]'
21 | )
22 | else
23 | tstzrange(
24 | (metadata -> 'extent' ->> 'from_dt') :: timestamp,
25 | (metadata -> 'extent' ->> 'to_dt') :: timestamp,
26 | '[]'
27 | )
28 | end as temporal_extent
29 | from agdc.dataset where
30 | metadata_type_ref in (select id from metadata_lookup where name in ('eo','eo_s2_nrt', 'gqa_eo','eo_plus'))
31 | and archived is null
32 | UNION
33 | -- This is the eo3 variant of the temporal extent, the sample eo3 dataset uses a singleton
34 | -- timestamp, some other variants use start/end timestamps. From OWS perspective temporal
35 | -- resolution is 1 whole day
36 | -- Start/End timestamp variant product.
37 | -- http://dapds00.nci.org.au/thredds/fileServer/xu18/ga_ls8c_ard_3/092/090/2019/06/05/ga_ls8c_ard_3-0-0_092090_2019-06-05_final.odc-metadata.yaml
38 | select
39 | dataset_type_ref, id,tstzrange(
40 | coalesce(metadata->'properties'->>'dtr:start_datetime', metadata->'properties'->>'datetime'):: timestamp,
41 | coalesce((metadata->'properties'->>'dtr:end_datetime'):: timestamp,(metadata->'properties'->>'datetime'):: timestamp),
42 | '[]'
43 | ) as temporal_extent
44 | from agdc.dataset where
45 | metadata_type_ref in (select id from metadata_lookup where name like 'eo3%')
46 | and archived is null
47 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/012_create_new_spacetime_view.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW combined SPACE-TIME Materialised View
2 |
3 | CREATE MATERIALIZED VIEW IF NOT EXISTS ows.space_time_view_new (ID, dataset_type_ref, spatial_extent, temporal_extent)
4 | AS
5 | select space_view_new.id, dataset_type_ref, spatial_extent, temporal_extent from ows.space_view_new join ows.time_view_new on space_view_new.id=time_view_new.id
6 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/020_create_index_1.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW Materialised View Index 1/4
2 |
3 | CREATE INDEX space_time_view_geom_idx_new
4 | ON ows.space_time_view_new
5 | USING GIST (spatial_extent)
6 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/021_create_index_2.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW Materialised View Index 2/4
2 |
3 | CREATE INDEX space_time_view_time_idx_new
4 | ON ows.space_time_view_new
5 | USING SPGIST (temporal_extent)
6 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/022_create_index_3.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW Materialised View Index 3/4
2 |
3 | CREATE INDEX space_time_view_ds_idx_new
4 | ON ows.space_time_view_new
5 | USING BTREE(dataset_type_ref)
6 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/023_create_index_4.sql:
--------------------------------------------------------------------------------
1 | -- Creating NEW Materialised View Index 4/4
2 |
3 | CREATE unique INDEX space_time_view_idx_new
4 | ON ows.space_time_view_new
5 | USING BTREE(id)
6 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/030_rename_old_space_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Renaming old spacetime view (OWS down)
2 |
3 | ALTER MATERIALIZED VIEW IF EXISTS ows.space_time_view
4 | RENAME TO space_time_view_old
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/031_rename_new_space_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Renaming new view to space_time_view (OWS back up)
2 |
3 | ALTER MATERIALIZED VIEW ows.space_time_view_new
4 | RENAME to space_time_view
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/032_drop_old_space_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD spacetime view (and indexes)
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS ows.space_time_view_old
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/040_drop_old_space_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD space view
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS ows.space_view
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/041_drop_old_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD time view
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS ows.time_view
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/042_rename_new_space_view.sql:
--------------------------------------------------------------------------------
1 | -- Renaming NEW space_view
2 |
3 | ALTER MATERIALIZED VIEW ows.space_view_new
4 | RENAME to space_view
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/043_rename_new_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Renaming NEW time_view
2 |
3 | ALTER MATERIALIZED VIEW ows.time_view_new
4 | RENAME TO time_view
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/050_rename_index_1.sql:
--------------------------------------------------------------------------------
1 | -- Renaming new Materialised View Index 1/4
2 |
3 | ALTER INDEX ows.space_time_view_geom_idx_new
4 | RENAME TO space_time_view_geom_idx
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/051_rename_index_2.sql:
--------------------------------------------------------------------------------
1 | -- Renaming new Materialised View Index 2/4
2 |
3 | ALTER INDEX ows.space_time_view_time_idx_new
4 | RENAME TO space_time_view_time_idx
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/052_rename_index_3.sql:
--------------------------------------------------------------------------------
1 | -- Renaming new Materialised View Index 3/4
2 |
3 | ALTER INDEX ows.space_time_view_ds_idx_new
4 | RENAME TO space_time_view_ds_idx
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/create/053_rename_index_4.sql:
--------------------------------------------------------------------------------
1 | -- Renaming new Materialised View Index 4/4
2 |
3 | ALTER INDEX ows.space_time_view_idx_new
4 | RENAME TO space_time_view_idx
5 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/read_only/001_grant_read_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting read permission to materialised view
2 |
3 | GRANT SELECT ON ows.space_time_view TO {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/refresh_owner/001_grant_space_view_perms_to_owner_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting permission to space view to owner role
2 |
3 | GRANT agdc_user TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/refresh_owner/010_set_owner_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Set owner of time view.
2 |
3 | ALTER MATERIALIZED VIEW ows.time_view OWNER TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/refresh_owner/011_set_owner_space_view.sql:
--------------------------------------------------------------------------------
1 | -- Set owner of space view.
2 |
3 | ALTER MATERIALIZED VIEW ows.space_view OWNER TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/refresh_owner/012_set_owner_spacetime_view.sql:
--------------------------------------------------------------------------------
1 | -- Set owner of space-time view.
2 |
3 | ALTER MATERIALIZED VIEW ows.space_time_view OWNER TO ows_view_owner;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/grants/write_refresh/001_grant_refresh_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting View Owner role (refresh permissions)
2 |
3 | GRANT ows_view_owner TO {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/refresh/001_timezone.sql:
--------------------------------------------------------------------------------
1 | -- Setting default timezone to UTC
2 |
3 | set timezone to 'Etc/UTC'
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/refresh/002_refresh_time.sql:
--------------------------------------------------------------------------------
1 | -- Refreshing TIME materialized view (Blocking)
2 |
3 | REFRESH MATERIALIZED VIEW ows.time_view
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/refresh/003_refresh_space.sql:
--------------------------------------------------------------------------------
1 | -- Refreshing SPACE materialized view (blocking)
2 |
3 | REFRESH MATERIALIZED VIEW ows.space_view
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/extent_views/refresh/004_refresh_spacetime.sql:
--------------------------------------------------------------------------------
1 | -- Refreshing combined SPACE-TIME materialized view (concurrently)
2 |
3 | REFRESH MATERIALIZED VIEW CONCURRENTLY ows.space_time_view
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/001_drop_space_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD spacetime view (and indexes)
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS space_time_view;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/002_drop_time_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD time view
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS time_view;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/003_drop_space_view.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD space view
2 |
3 | DROP MATERIALIZED VIEW IF EXISTS space_view;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/010_drop_subproduct_range.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD subproduct range table.
2 |
3 | DROP TABLE IF EXISTS wms.sub_product_ranges;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/011_drop_multiproduct_range.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD multiproduct range table.
2 |
3 | DROP TABLE IF EXISTS wms.multiproduct_ranges;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/cleanup/012_drop_product_range.sql:
--------------------------------------------------------------------------------
1 | -- Dropping OLD product range table.
2 |
3 | DROP TABLE IF EXISTS wms.product_ranges;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/create/001_create_schema.sql:
--------------------------------------------------------------------------------
1 | -- Creating/replacing ows schema
2 |
3 | create schema if not exists ows;
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/create/002_create_product_rng.sql:
--------------------------------------------------------------------------------
1 | -- Creating/replacing product ranges table
2 |
3 | create table if not exists ows.layer_ranges (
4 | layer varchar(255) not null primary key,
5 |
6 | lat_min decimal not null,
7 | lat_max decimal not null,
8 | lon_min decimal not null,
9 | lon_max decimal not null,
10 |
11 | dates jsonb not null,
12 |
13 | bboxes jsonb not null,
14 |
15 | meta jsonb not null,
16 | last_updated timestamp not null
17 | );
18 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_only/001_grant_usage_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting usage on schema
2 |
3 | GRANT USAGE ON SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_only/002_grant_range_read_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting select on layer ranges table to {role}
2 |
3 | GRANT SELECT ON ows.layer_ranges TO {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_only/003_grant_agdc_user_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting agdc_user role to {role}
2 |
3 | GRANT agdc_user to {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_write/001_grant_usage_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting usage on schema
2 |
3 | GRANT USAGE ON SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_write/002_grant_writetables_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting update/insert/delete on all tables in schema
2 |
3 | GRANT update, select, insert, delete ON ALL TABLES IN SCHEMA ows TO {role}
4 |
--------------------------------------------------------------------------------
/datacube_ows/sql/postgres/ows_schema/grants/read_write/003_grant_agdc_user_requires_role.sql:
--------------------------------------------------------------------------------
1 | -- Granting agdc_user role to {role}
2 |
3 | GRANT agdc_user to {role};
4 |
--------------------------------------------------------------------------------
/datacube_ows/styles/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.styles.base import StyleDefBase
8 | from datacube_ows.styles.colormap import ColorMapStyleDef # noqa: F401
9 | from datacube_ows.styles.component import ComponentStyleDef # noqa: F401
10 | from datacube_ows.styles.hybrid import HybridStyleDef # noqa: F401
11 | from datacube_ows.styles.ramp import ColorRampDef # noqa: F401
12 |
13 | StyleDef = StyleDefBase
14 |
--------------------------------------------------------------------------------
/datacube_ows/styles/api/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | from datacube_ows.styles.api.base import ( # noqa: F401 isort:skip
9 | StandaloneStyle, apply_ows_style, apply_ows_style_cfg,
10 | generate_ows_legend_style, generate_ows_legend_style_cfg,
11 | plot_image, plot_image_with_style, plot_image_with_style_cfg)
12 |
13 | from datacube_ows.ogc_utils import create_geobox, xarray_image_as_png # noqa: F401 isort:skip
14 | from datacube_ows.band_utils import scale_data, scalable, band_modulator # noqa: F401 isort:skip
15 |
--------------------------------------------------------------------------------
/datacube_ows/styles/hybrid.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from typing import cast
8 |
9 | from typing_extensions import override
10 | from xarray import DataArray, Dataset
11 |
12 | from datacube_ows.config_utils import CFG_DICT, ConfigException
13 | from datacube_ows.styles.base import StyleDefBase
14 | from datacube_ows.styles.component import LINEAR_COMP_DICT, ComponentStyleDef
15 | from datacube_ows.styles.ramp import ColorRampDef
16 |
17 | TYPE_CHECKING = False
18 | if TYPE_CHECKING:
19 | from datacube_ows.ows_configuration import OWSNamedLayer
20 |
21 |
22 | class HybridStyleDef(ColorRampDef, ComponentStyleDef):
23 | """
24 | Hybrid component/colour ramp style type.
25 |
26 | Returns a linear blend of a component image and colour ramp image
27 | """
28 | auto_legend = False
29 |
30 | def __init__(self,
31 | product: "OWSNamedLayer",
32 | style_cfg: CFG_DICT,
33 | defer_multi_date: bool = False,
34 | stand_alone: bool = False,
35 | user_defined: bool = False) -> None:
36 | """
37 | See StyleBaseDef
38 | """
39 | super().__init__(product, style_cfg,
40 | defer_multi_date=defer_multi_date,
41 | stand_alone=stand_alone,
42 | user_defined=user_defined)
43 | style_cfg = cast(CFG_DICT, self._raw_cfg)
44 | self.component_ratio = float(cast(float | str, style_cfg["component_ratio"]))
45 | if self.component_ratio < 0.0 or self.component_ratio > 1.0:
46 | raise ConfigException("Component ratio must be a floating point number between 0 and 1")
47 |
48 | @override
49 | def transform_single_date_data(self, data: Dataset) -> Dataset:
50 | """
51 | Apply style to raw data to make an RGBA image xarray (single time slice only)
52 |
53 | :param data: Raw data, all bands.
54 | :return: RGBA uint8 xarray
55 | """
56 | #pylint: disable=too-many-locals
57 | if self.index_function is not None:
58 | data['index_function'] = (data.dims, self.index_function(data).data)
59 |
60 | imgdata = Dataset(coords=data)
61 |
62 | d: DataArray = data['index_function']
63 | for band, _ in self.rgb_components.items():
64 | rampdata = DataArray(self.color_ramp.get_value(d, band),
65 | coords=d.coords,
66 | dims=d.dims)
67 | component_band_data: DataArray | None = None
68 | for c_band, c_intensity in cast(LINEAR_COMP_DICT, self.rgb_components[band]).items():
69 | if callable(c_intensity):
70 | imgband_component_data = cast(DataArray, c_intensity(data[c_band], c_band, band))
71 | else:
72 | imgband_component_data = data[c_band] * cast(DataArray, c_intensity)
73 | if component_band_data is not None:
74 | component_band_data += imgband_component_data
75 | else:
76 | component_band_data = imgband_component_data
77 | if band != "alpha":
78 | component_band_data = self.compress_band(band, component_band_data)
79 | img_band_data = (rampdata * 255.0 * (1.0 - self.component_ratio)
80 | + self.component_ratio * cast(DataArray,
81 | component_band_data))
82 | imgdata[band] = (d.dims, img_band_data.astype("uint8").data)
83 |
84 | return imgdata
85 |
86 | # Register HybridStyleDef as a priority subclass of StyleDefBase
87 | # (priority means takes precedence over ComponentStyleDef and ColourRampDef)
88 | StyleDefBase.register_subclass(HybridStyleDef, "component_ratio", priority=True)
89 |
--------------------------------------------------------------------------------
/datacube_ows/templates/html_feature_info.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | OWS Feature Info
6 |
7 |
8 | {% set props = result["features"][0]["properties"] %}
9 | {% if props %}
10 | Location
11 |
12 | - Latitude: {{ props["lat"] }}
13 | - Longitude: {{ props["lon"] }}
14 |
15 | Pixel Data
16 | {% for pixel in props["data"] %}
17 |
18 |
19 | Date/Time: |
20 | {{ pixel["time"] }} |
21 |
22 | {% for band_name, band_val in pixel["bands"].items() %}
23 |
24 | {{ band_name }}: |
25 | {% if band_val is mapping %}
26 | |
27 |
28 | {% for flag, flag_val in band_val.items() %}
29 |
30 | {{ flag }}: |
31 | {{ flag_val }} |
32 |
33 | {% endfor %}
34 | {% else %}
35 | {{ band_val }} |
36 |
37 | {% endif %}
38 | {% endfor %}
39 | {% if "band_derived" in pixel %}
40 | {% for band_name, band_val in pixel["band_derived"].items() %}
41 |
42 | {{ band_name }} (derived): |
43 | {{ band_val }} |
44 |
45 | {% endfor %}
46 | {% endif %}
47 |
48 | {% endfor %}
49 | Data Available For Dates
50 |
51 | {% for d in props["data_available_for_dates"] %}
52 | {{ d }}
53 | {% endfor %}
54 |
55 | Data Sources
56 | {% for d in props["data_links"] %}
57 | {{ d }}
58 | {% endfor %}
59 | {% else %}
60 | No Data
61 | {% endif %}
62 |
63 |
64 |
--------------------------------------------------------------------------------
/datacube_ows/templates/ogc_error.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 | {% for err in exception.errors %}
7 |
8 | {{ err.msg }}
9 | {% if err.valid_keys %} - Perhaps you meant one of the following: {% endif %}
10 |
11 | {% if err.valid_keys %}
12 | {% for valid_key in err.valid_keys %}
13 |
14 | {{ valid_key }}
15 |
16 | {% endfor %}
17 | {% endif %}
18 | {% endfor %}
19 | {% if traceback %}
20 |
21 | {% for t in traceback %}
22 |
23 | {% endfor %}
24 |
25 | {% endif %}
26 |
27 |
--------------------------------------------------------------------------------
/datacube_ows/templates/ping.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Datacube OGC Health Check
6 |
7 |
8 | PONG!!
9 | Database: {{ status }}
10 |
11 | {% for env, status in statuses.items() %}
12 | -
13 | {{ env }}:
14 | {% if status %}UP{% else %}DOWN{% endif %}
15 |
16 | {% endfor %}
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/datacube_ows/templates/test_client.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Map
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/datacube_ows/wcs_utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.ogc_exceptions import WCS1Exception, WCS2Exception
8 |
9 |
10 | def get_bands_from_styles(styles, layer, version: int = 1) -> set:
11 | styles = styles.split(",")
12 | if len(styles) != 1:
13 | if version == 1:
14 | raise WCS1Exception("Multiple style parameters not supported")
15 | else:
16 | raise WCS2Exception("Multiple style parameters not supported")
17 | style = layer.style_index.get(styles[0])
18 | bands = set()
19 | if style:
20 | for b in style.needed_bands:
21 | if b not in style.flag_bands:
22 | bands.add(b)
23 | return bands
24 |
--------------------------------------------------------------------------------
/datacube_ows/wms.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from flask import render_template
8 |
9 | from datacube_ows.data import get_map
10 | from datacube_ows.feature_info import feature_info
11 | from datacube_ows.http_utils import cache_control_headers, get_service_base_url
12 | from datacube_ows.legend_generator import legend_graphic
13 | from datacube_ows.ogc_exceptions import WMSException
14 | from datacube_ows.ows_configuration import get_config
15 | from datacube_ows.utils import log_call
16 |
17 | WMS_REQUESTS = ("GETMAP", "GETFEATUREINFO", "GETLEGENDGRAPHIC")
18 |
19 |
20 | @log_call
21 | def handle_wms(nocase_args: dict[str, str]) -> tuple | None:
22 | operation = nocase_args.get("request", "").upper()
23 | # WMS operation Map
24 | if not operation:
25 | raise WMSException("No operation specified", locator="Request parameter")
26 | elif operation == "GETCAPABILITIES":
27 | return get_capabilities(nocase_args)
28 | elif operation == "GETMAP":
29 | return get_map(nocase_args)
30 | elif operation == "GETFEATUREINFO":
31 | return feature_info(nocase_args)
32 | elif operation == "GETLEGENDGRAPHIC":
33 | return legend_graphic(nocase_args)
34 | else:
35 | raise WMSException(f"Unrecognised operation: {operation}", WMSException.OPERATION_NOT_SUPPORTED,
36 | "Request parameter")
37 |
38 |
39 | @log_call
40 | def get_capabilities(args) -> tuple[str, int, dict[str, str]]:
41 | # TODO: Handle updatesequence request parameter for cache consistency.
42 | # Note: Only WMS v1.3.0 is fully supported at this stage, so no version negotiation is necessary
43 | # Extract layer metadata from Datacube.
44 | cfg = get_config()
45 | url = args.get('Host', args['url_root'])
46 | base_url = get_service_base_url(cfg.allowed_urls, url)
47 | headers = cache_control_headers(cfg.wms_cap_cache_age)
48 | headers["Content-Type"] = "application/xml"
49 | return (
50 | render_template(
51 | "wms_capabilities.xml",
52 | cfg=cfg,
53 | base_url=base_url),
54 | 200,
55 | cfg.response_headers(headers)
56 | )
57 |
--------------------------------------------------------------------------------
/datacube_ows/wsgi.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | #pylint: skip-file
9 | import os
10 | import sys
11 |
12 | # This is the directory of the source code that the web app will run from
13 | sys.path.append("/opt")
14 |
15 | # The location of the datcube config file.
16 | if os.path.isfile("/opt/odc/.datacube.conf.local"):
17 | os.environ.setdefault("ODC_CONFIG_PATH", "/opt/odc/.datacube.conf.local")
18 |
19 | from datacube_ows import __version__
20 |
21 | from datacube_ows.ogc import app # isort:skip
22 |
23 | application = app
24 |
25 |
26 | def main() -> None:
27 | if "--version" in sys.argv:
28 | print("Open Data Cube Open Web Services (datacube-ows) version",
29 | __version__
30 | )
31 | exit(0)
32 | app.run()
33 |
34 |
35 | if __name__ == '__main__':
36 | main()
37 |
--------------------------------------------------------------------------------
/dive-ci.yml:
--------------------------------------------------------------------------------
1 | rules:
2 | # If the efficiency is measured below X%, mark as failed.
3 | # Expressed as a ratio between 0-1.
4 | lowestEfficiency: 0.95
5 |
6 | # If the amount of wasted space is at least X or larger than X, mark as failed.
7 | # Expressed in B, KB, MB, and GB.
8 | highestWastedBytes: 60MB
9 |
10 | # If the amount of wasted space makes up for X% or more of the image, mark as failed.
11 | # Note: the base image layer is NOT included in the total image size.
12 | # Expressed as a ratio between 0-1; fails if the threshold is met or crossed.
13 | highestUserWastedPercent: 0.08
14 |
--------------------------------------------------------------------------------
/docker-compose.cleandb.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | postgres:
3 | # clean postgis db
4 | image: kartoza/postgis:16
5 | hostname: postgres
6 | environment:
7 | - POSTGRES_DB
8 | - POSTGRES_PORT
9 | - POSTGRES_PASSWORD
10 | - POSTGRES_USER
11 | ports:
12 | - "${POSTGRES_PORT}:5432"
13 | restart: always
14 | volumes:
15 | - type: tmpfs
16 | target: /var/lib/postgresql
17 | healthcheck:
18 | test: ["CMD", "pg_isready", "-h", "postgres", "-q", "-d", "$POSTGRES_DB", "-U", "$POSTGRES_USER"]
19 | timeout: 45s
20 | interval: 10s
21 | retries: 10
22 | # Overwrite ows so it can talk to docker db
23 | ows:
24 | ports:
25 | - "8000:8000"
26 | environment:
27 | POSTGRES_PORT: 5432
28 | depends_on:
29 | postgres:
30 | condition: service_healthy
31 |
--------------------------------------------------------------------------------
/docker-compose.db.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | postgres:
3 | # db
4 | build: docker/database/
5 | hostname: postgres
6 | environment:
7 | - POSTGRES_DB=${POSTGRES_DB}
8 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
9 | - POSTGRES_USER=${POSTGRES_USER}
10 | ports:
11 | - "${POSTGRES_PORT}:${POSTGRES_PORT}"
12 | restart: always
13 | volumes:
14 | - type: tmpfs
15 | target: /var/lib/postgresql
16 | healthcheck:
17 | test: ["CMD", "pg_isready", "-h", "postgres", "-q", "-d", "$POSTGRES_DB", "-U", "$POSTGRES_USER"]
18 | timeout: 45s
19 | interval: 10s
20 | retries: 10
21 | # Overwrite ows so it can talk to docker db
22 | ows:
23 | ports:
24 | - "8000:8000"
25 | depends_on:
26 | postgres:
27 | condition: service_healthy
28 |
--------------------------------------------------------------------------------
/docker-compose.index.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | index:
3 | image: opendatacube/datacube-index:latest
4 | environment:
5 | DB_HOSTNAME: ${DB_HOSTNAME}
6 | DB_PORT: 5432
7 | DB_USERNAME: ${DB_USERNAME}
8 | DB_PASSWORD: ${DB_PASSWORD}
9 | DB_DATABASE: ${DB_DATABASE}
10 | restart: always
11 | depends_on:
12 | postgres:
13 | condition: service_healthy
14 | command: tail -f /dev/null
15 |
--------------------------------------------------------------------------------
/docker-compose.prod.yaml:
--------------------------------------------------------------------------------
1 | # override default compose to change the launch command
2 |
3 | services:
4 | ows:
5 | command: gunicorn -b '0.0.0.0:8000' --workers=3 -k gevent --timeout 121 --pid /home/ubuntu/gunicorn.pid --log-level info --worker-tmp-dir /dev/shm --config python:datacube_ows.gunicorn_config datacube_ows.wsgi
6 |
--------------------------------------------------------------------------------
/docker-compose.pyspy.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | ows:
3 | cap_add:
4 | - SYS_PTRACE
5 | depends_on:
6 | postgres:
7 | condition: service_healthy
8 |
9 | pyspy:
10 | build: docker/pyspy/
11 | pid: "host"
12 | privileged: true
13 | volumes:
14 | - .:/profiles
15 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | # Default compose will create an ows image, with dev settings and connect to a local db
2 | services:
3 | ows:
4 | build:
5 | context: .
6 | args:
7 | PYDEV_DEBUG: "${PYDEV_DEBUG}"
8 | ENVIRONMENT: test
9 | cache_from:
10 | - opendatacube/ows:_builder
11 | image: opendatacube/ows:latest
12 | # Uncomment for use with non-dockerised postgres (for docker-compose 1.x)
13 | # network_mode: host
14 | environment:
15 | LOCAL_UID: ${LOCAL_UID:-1000}
16 | LOCAL_GID: ${LOCAL_GID:-1000}
17 | # Defaults are defined in .env file
18 | AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
19 | AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
20 | AWS_REQUEST_PAYER: ${AWS_REQUEST_PAYER}
21 | AWS_S3_ENDPOINT: ${AWS_S3_ENDPOINT}
22 | # Hard coded for now.
23 | ODC_ENVIRONMENT: default
24 | ODC_DEFAULT_INDEX_DRIVER: postgres
25 | ODC_OWSPOSTGIS_INDEX_DRIVER: postgis
26 | # Please switch to single entry url configuration for postgres url
27 | ODC_ODC2_INDEX_DRIVER: ${ODC_ODC2_INDEX_DRIVER}
28 | ODC_ODC2_DB_URL: ${ODC_ODC2_DB_URL}
29 | ODC_DEFAULT_DB_URL: ${ODC_DEFAULT_DB_URL}
30 | ODC_OWSPOSTGIS_DB_URL: ${ODC_OWSPOSTGIS_DB_URL}
31 | # for wait-for-db check
32 | POSTGRES_USER: ${POSTGRES_USER}
33 | POSTGRES_HOSTNAME: ${POSTGRES_HOSTNAME}
34 | SERVER_DB_USERNAME: ${SERVER_DB_USERNAME}
35 | # Path from the PYTHONPATH to the config object (default PYTHONPATH is /src)
36 | PYTHONPATH: ${PYTHONPATH}
37 | DATACUBE_OWS_CFG: ${DATACUBE_OWS_CFG}
38 | AWS_DEFAULT_REGION: ${AWS_REGION}
39 | # Talk to AWS without using credentials
40 | AWS_NO_SIGN_REQUEST: "${AWS_NO_SIGN_REQUEST}"
41 | # Enable Metrics
42 | PROMETHEUS_MULTIPROC_DIR: ${PROMETHEUS_MULTIPROC_DIR}
43 | # Dev flags
44 | FLASK_APP: /src/datacube_ows/ogc.py
45 | FLASK_ENV: ${FLASK_ENV}
46 | PYDEV_DEBUG: "${PYDEV_DEBUG}"
47 | SENTRY_DSN: "${SENTRY_DSN}"
48 | volumes:
49 | - ${OWS_CFG_DIR}:${OWS_CFG_MOUNT_DIR}
50 | - ./:/src/
51 | - ./artifacts:/mnt/artifacts
52 | restart: always
53 | command: ["flask", "run", "--host=0.0.0.0", "--port=8000"]
54 |
--------------------------------------------------------------------------------
/docker/database/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM kartoza/postgis:16
2 |
--------------------------------------------------------------------------------
/docker/files/remap-user.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | # Script that gives the container user uid $LOCAL_UID and gid $LOCAL_GID.
4 | # If $LOCAL_UID or $LOCAL_GID are not set, they default to 1000 (default
5 | # for the first user created in Ubuntu).
6 |
7 | USER_ID=${LOCAL_UID:-1000}
8 | GROUP_ID=${LOCAL_GID:-1000}
9 |
10 | [[ "$USER_ID" == "1000" ]] || usermod -u $USER_ID -o -m -d /home/ubuntu ubuntu
11 | [[ "$GROUP_ID" == "1000" ]] || groupmod -g $GROUP_ID ubuntu
12 | [[ $(id -u) != "0" ]] || GOSU="/usr/sbin/gosu ubuntu"
13 | exec /usr/bin/tini -- $GOSU "$@"
14 |
--------------------------------------------------------------------------------
/docker/pyspy/Dockerfile:
--------------------------------------------------------------------------------
1 | # pyspy/Dockerfile
2 | FROM python:3.12
3 | RUN pip install py-spy
4 | WORKDIR /profiles
5 | ENTRYPOINT [ "py-spy" ]
6 | CMD []
7 |
--------------------------------------------------------------------------------
/docs/cfg_hybrid_styles.rst:
--------------------------------------------------------------------------------
1 | =================================
2 | OWS Configuration - Hybrid Styles
3 | =================================
4 |
5 | .. contents:: Table of Contents
6 |
7 | Hybrid Styles
8 | -------------
9 |
10 | Hybrid styles are an experimental type of :doc:`style ` that
11 | return a linear combination of a component style and a colour ramp style.
12 |
13 | This can allow for a more easily visually interpreted image, but
14 | there are usually better ways to achieve the same effect on the
15 | client side.
16 |
17 | Hybrid styles support most [*]_ elements supported by either
18 | :doc:`component styles ` or
19 | :doc:`colour ramp styles ` and
20 | define two independent styles (one of each type) that
21 | are then blended according to the required `component_ratio` entry.
22 |
23 | If `component_ratio` should be a float between 0.0 and 1.0. A value
24 | of '0.0' means 100% colour ramp style, '1.0' means 100% component style
25 | and a value of '0.5' means a 50/50 blend of the two, etc.
26 |
27 | .. [*] Hybrid Styles do NOT support auto-legend generation. All other features
28 | of component and colour-ramp styles are supported.
29 |
30 | E.g.::
31 |
32 | "legend": {
33 | # Common stuff
34 | "name": "rgb_ndvi",
35 | "title": "NDVI plus RGB",
36 | "abstract": "NDVI combined with RGB for terrain detail",
37 |
38 | # Component Style
39 | "components": {
40 | "red": {
41 | "red": 1.0
42 | },
43 | "green": {
44 | "green": 1.0
45 | },
46 | "blue": {
47 | "blue": 1.0
48 | }
49 | },
50 | "scale_range": [0.0, 3000.0]
51 |
52 | # Colour-Ramp Style
53 | "index_function": {
54 | "function": "datacube_ows.band_utils.norm_diff",
55 | "pass_product_cfg": True,
56 | "kwargs": {
57 | "band1": "nir",
58 | "band2": "red"
59 | }
60 | },
61 | "range": [0.0, 1.0],
62 | "mpl_ramp": "RdBu",
63 |
64 | # Does not need to include "green" and "blue".
65 | # (But no harm in adding them explicitly either.)
66 | "needed_bands": ["red", "nir"],
67 |
68 | # Blend 60% RGB + 40% ndvi
69 | "component_ration": 0.6,
70 | }
71 |
--------------------------------------------------------------------------------
/docs/cfg_wms.rst:
--------------------------------------------------------------------------------
1 | ===============================
2 | OWS Configuration - WMS Section
3 | ===============================
4 |
5 | .. contents:: Table of Contents
6 |
7 | WMS Section
8 | --------------
9 |
10 | The "wms" section of the :doc:`root configuration object
11 | `
12 | contains config entries that apply
13 | to the WMS/WMTS services for all layers.
14 |
15 | All entries apply identically to both WMS and WMTS services unless
16 | stated otherwise. All entries in the WMS section are optional and the
17 | entire section can therefore be omitted.
18 |
19 | Max Tile Size (max_height/max_width)
20 | =======================================
21 |
22 | Tile size is fixed for WMTS requests, so these entries only apply to
23 | WMS requests. Requests for tiles larger than the configured maximum
24 | height and width will result in an error. Note that many WMS clients
25 | do not honour the maximum width and height. For these clients, please
26 | consider using WMTS instead.
27 |
28 | The ``max_width`` and ``max_height`` config entries in the ``wms`` section
29 | should be integers. Both entries are optional, and default to 256 if
30 | not set.
31 |
32 | E.g.:
33 |
34 | ::
35 |
36 | "max_height": 512,
37 | "max_width": 512,
38 |
39 | S3 Data URL Elements
40 | ====================
41 |
42 | These entries are used for constructing S3 data urls for use in GetFeatureInfo
43 | responses. This feature is restricted to data stored in AWS S3 and is fairly
44 | specialised to DEA requirements and may not be suitable for other use cases. All
45 | these entries are optional.
46 |
47 | s3_url
48 | The base url exposing the public S3 bucket containing the data.
49 |
50 | s3_bucket
51 | The name of the S3 bucket.
52 |
53 | s3_aws_zone
54 | The AWS zone where the data is stored.
55 |
56 | E.g.
57 |
58 | ::
59 |
60 | "s3_url": "http://data.au",
61 | "s3_bucket": "s3_bucket_name",
62 | "s3_aws_zone": "ap-southeast-2",
63 |
64 | Identifier Authorities (authorities)
65 | ====================================
66 |
67 | The ``authorities`` entry in the ``wms`` section defines URLs for the Identifier
68 | Authorities that can be used in the layer definitions. If you wish to declare
69 | identifiers for any of your layers, you must define the corresponding Identifier
70 | Authorities here.
71 |
72 | This entry is optional. If not provided, no identifier authorities are declared
73 | and no identifiers can be assigned to layers.
74 |
75 | Identifiers and Authorities only apply to WMS (not WMTS).
76 |
77 | If provided, this entry should be a dictionary mapping authority labels to URLs.
78 |
79 | E.g.
80 |
81 | ::
82 |
83 | "authorities": {
84 | "auth": "https://authoritative-authority.com",
85 | "idsrus": "https://www.identifiers-r-us.com",
86 | },
87 |
88 | GetCapabilities Cache Control Headers (caps_cache_maxage)
89 | =========================================================
90 |
91 | The ``caps_cache_maxage`` entry in the ``wms`` section controls the value of the
92 | ``Cache-control`` HTTP header returned with WMS/WMTS GetCapabilities responses.
93 |
94 | ``caps_cache_maxage`` is an optional integer value that defaults to 0, and represents
95 | the maximum age in seconds that the Capabilities document should be cached.
96 |
97 | Note that OWS does not manage any caching itself, this entry controls a standard HTTP
98 | header that instructs upstream cache layers (e.g. AWS Cloudfront) how to behave.
99 |
100 | A value of zero means that OWS will recommend that the Capabilities document not be
101 | cached at all, and is the default. Note that setting this entry to a non-zero value
102 | will introduce additional delays between new data being added to the datacube index
103 | and that data being advertised as available through the service. This value should therefore
104 | be kept fairly short (e.g. a few hours at most).
105 |
106 | E.g.
107 |
108 | "wms": {
109 | "caps_cache_maxage": 3600, # 3600 seconds = 1 hour
110 | ...
111 | }
112 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # This file is part of datacube-ows, part of the Open Data Cube project.
3 | # See https://opendatacube.org for more information.
4 | #
5 | # Copyright (c) 2017-2024 OWS Contributors
6 | # SPDX-License-Identifier: Apache-2.0
7 |
8 |
9 | import os
10 | import sys
11 |
12 | # Get the project root dir, which is the parent dir of this
13 | cwd = os.getcwd()
14 | project_root = os.path.dirname(cwd)
15 |
16 | # Insert the project root dir as the first element in the PYTHONPATH.
17 | # This lets us ensure that the source package is imported, and that its
18 | # version is used.
19 | sys.path.insert(0, project_root)
20 |
21 | import datacube_ows # isort:skip
22 |
23 | # -- General configuration ---------------------------------------------
24 |
25 | # If your documentation needs a minimal Sphinx version, state it here.
26 | # needs_sphinx = '1.0'
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
30 | extensions = [
31 | 'sphinx.ext.autodoc',
32 | 'sphinx.ext.viewcode',
33 | 'sphinx_click'
34 | ]
35 |
36 | # Add any paths that contain templates here, relative to this directory.
37 | templates_path = ['_templates']
38 |
39 | # The suffix of source filenames.
40 | source_suffix = {
41 | '.rst': 'restructuredtext'
42 | }
43 |
44 | # The master toctree document.
45 | master_doc = 'index'
46 |
47 | # General information about the project.
48 | project = 'datacube-ows'
49 | copyright = "2017-2024, Open Data Cube Steering Council and contributors (Open Source License)"
50 |
51 | # The version info for the project you're documenting, acts as replacement
52 | # for |version| and |release|, also used in various other places throughout
53 | # the built documents.
54 | #
55 | # The short X.Y version.
56 | version = datacube_ows.__version__
57 | # The full version, including alpha/beta/rc tags.
58 | release = datacube_ows.__version__
59 |
60 | # List of patterns, relative to source directory, that match files and
61 | # directories to ignore when looking for source files.
62 | exclude_patterns = ['_build', 'venv']
63 |
64 | # The name of the Pygments (syntax highlighting) style to use.
65 | pygments_style = 'sphinx'
66 |
67 | # -- Options for HTML output -------------------------------------------
68 |
69 | html_theme = 'default'
70 |
71 | # Output file base name for HTML help builder.
72 | htmlhelp_basename = 'datacube_owsdoc'
73 |
74 | # -- Options for manual page output ------------------------------------
75 |
76 | # One entry per manual page. List of tuples
77 | # (source start file, name, description, authors, manual section).
78 | man_pages = [
79 | ('index', 'datacube_ows',
80 | 'datacube-ows Documentation',
81 | ['Datacube OWS Team'], 1)
82 | ]
83 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/deployment.rst:
--------------------------------------------------------------------------------
1 | Deploying
2 | =========
3 |
4 | Deploying with Helm Chart
5 | --------------------------
6 |
7 | Prerequisites
8 | ^^^^^^^^^^^^^
9 |
10 | Make sure you have Helm `installed `_.
11 |
12 | Get Repo Info
13 | ^^^^^^^^^^^^^^
14 |
15 | .. code::
16 |
17 | helm repo add datacube-charts https://opendatacube.github.io/datacube-charts/charts/
18 | helm repo update
19 |
20 |
21 | See `helm repo `_ for command documentation.
22 |
23 |
24 | Deploy with default config
25 | ^^^^^^^^^^^^^^^^^^^^^^^^^^
26 |
27 | .. code::
28 |
29 | helm upgrade --install datacube-ows datacube-charts/datacube-ows
30 |
31 |
32 | Deploy in a custom namespace
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
34 |
35 | .. code::
36 |
37 | helm upgrade --install datacube-ows --namespace=web datacube-charts/datacube-ows
38 |
39 | Chart values
40 | ^^^^^^^^^^^^
41 |
42 | .. code::
43 |
44 | helm show values datacube-charts/datacube-ows
45 |
--------------------------------------------------------------------------------
/docs/diagrams/ows_diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/docs/diagrams/ows_diagram.png
--------------------------------------------------------------------------------
/docs/diagrams/ows_diagram1.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/docs/diagrams/ows_diagram1.9.png
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../HISTORY.rst
2 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to datacube-ows's documentation!
2 | ========================================
3 |
4 | Contents:
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 |
9 | readme
10 | installation
11 | deployment
12 | database
13 | configuration
14 | styling_howto
15 | usage
16 | environment_variables
17 | performance
18 | contributing
19 | history
20 |
21 | Indices and tables
22 | ==================
23 |
24 | * :ref:`genindex`
25 | * :ref:`search`
26 |
--------------------------------------------------------------------------------
/docs/performance.rst:
--------------------------------------------------------------------------------
1 | =====================
2 | Performance deep dive
3 | =====================
4 |
5 | ows_stats
6 | =========
7 |
8 | append ::
9 |
10 | &ows_stats=yes
11 |
12 | to getmaps query for a view like this ::
13 |
14 | {
15 | profile: {
16 | query: 0.060224294662475586,
17 | count-datasets: 0.027852535247802734,
18 | extent-in-query: 0.017885684967041016,
19 | write: 0.014366865158081055
20 | },
21 | info: {
22 | n_dates: 1,
23 | zoom_factor: 14.030289733687082,
24 | n_datasets: 9,
25 | too_many_datasets: false,
26 | zoomed_out: true,
27 | write_action: "Polygon"
28 | }
29 | }
30 |
31 | Run pyspy
32 | =========
33 |
34 | Docker-Compose
35 | --------------
36 | To make the chained docker-compose with pre-indexed database: ::
37 |
38 | COMPOSE_CHAIN='docker-compose -f docker-compose.yaml -f docker-compose.db.yaml -f docker-compose.pyspy.yaml'
39 |
40 | To make the chained docker-compose with local database: ::
41 |
42 | COMPOSE_CHAIN='docker-compose -f docker-compose.yaml -f docker-compose.pyspy.yaml'
43 |
44 | To start ows with pre-indexed db and pyspy on the side: ::
45 |
46 | $COMPOSE_CHAIN up -d
47 |
48 | Get Datacube-ows docker process id: ::
49 |
50 | OWS_PID=$(docker inspect --format '{{.State.Pid}}' $(docker inspect -f '{{.Name}}' \
51 | $($COMPOSE_CHAIN ps -q ows) | cut -c2-))
52 |
53 | Run py-spy: ::
54 |
55 | $COMPOSE_CHAIN run pyspy record -f speedscope -o profile.json \
56 | --pid $OWS_PID --subprocesses
57 |
--------------------------------------------------------------------------------
/docs/readme.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | click
2 | sphinx_click
3 | docutils==0.16
4 |
--------------------------------------------------------------------------------
/docs/styling_howto_jupyter.rst:
--------------------------------------------------------------------------------
1 | =================================================
2 | Datacube-OWS Styling JupyterHub Quick-Start Guide
3 | =================================================
4 |
5 | .. contents:: Table of Contents
6 |
7 | Introduction
8 | ------------
9 |
10 | This document assumes you have a working account with a JupyterHub-based ODC installation,
11 | e.g. DEA Sandbox.
12 |
13 | Installing Datacube-ows
14 | -----------------------
15 |
16 | At the time of writing datacube-ows is not included in the standard DEA Sandbox build.
17 |
18 | Simply installing via ``pip install`` in a JupyterHub tab is sufficient, but
19 | will not persist between sessions unless you have already set up a local virtual
20 | environment.
21 |
22 | ::
23 |
24 | pip install datacube-ows
25 |
26 | If you do not already have a local virtual environment set up, check that you have sufficient disk
27 | space available in your home directory (at least 3.5G), using a Jupyter Hub terminal tab:
28 |
29 | ::
30 |
31 | df -h | awk '/home/{print $6, "has", $4, "of disk space available"}'
32 |
33 | If you have sufficient space, you can create a virtual environment using the following commands in the Terminal
34 | tab:
35 |
36 | ::
37 |
38 | # create new empty env in ~/.envs/odc directory
39 | EE=odc
40 | cd $HOME
41 | mkdir $HOME/.envs
42 | cd $HOME/.envs
43 | /usr/bin/python3 -m venv ${EE}
44 |
45 | # transplant modules from default env
46 | (cd /env/lib; tar c .) | (cd ${EE}/lib; tar x)
47 | # make sure base libs are up-to-date
48 | ./${EE}/bin/python3 -m pip install -U pip wheel setuptools
49 |
50 | # Check that modules transplanted ok
51 | ./${EE}/bin/python3 -m pip list
52 |
53 | # Install new kernel (tell jupyter about it)
54 | ./${EE}/bin/python3 -m ipykernel install --user --name 'ows' --display-name 'ODC (OWS)'
55 |
56 | # Install datacube-ows into the new environment
57 | ./${EE}/bin/pip install datacube-ows
58 |
59 | If you return to the Jupyter homepage, and the new environment should be visible.
60 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | =====
2 | Usage
3 | =====
4 |
5 | .. contents:: Table of Contents
6 |
7 | As a Python Module
8 | ------------------
9 |
10 | To use datacube-ows in a project::
11 |
12 | import datacube_ows
13 |
14 | To use the :doc:`stand-alone styling API `::
15 |
16 | from datacube_ows.styles.api import *
17 |
18 |
19 | OWS Command Line Tools
20 | ----------------------------
21 |
22 | Datacube-OWS provides two command line tools:
23 |
24 | * ``datacube-ows-update`` which is used for creating and maintaining
25 | :doc:`OWS's database tables and views `.
26 | * ``datacube-ows-cfg`` which is used for managing
27 | :doc:`OWS configuration files `.
28 |
29 | .. click:: datacube_ows.update_ranges_impl:main
30 | :prog: datacube-ows-update
31 | :nested: full
32 |
33 | .. click:: datacube_ows.cfg_parser_impl:main
34 | :prog: datacube-ows-update
35 | :nested: full
36 |
37 | As a Web-Service in Docker with Layers deployed
38 | -----------------------------------------------
39 |
40 | Access a sample product definition. This playbook uses ALOS-PALSAR
41 | product definitions in the Digital Earth Africa deployment.
42 |
43 | .. code-block:: console
44 |
45 | $ wget https://raw.githubusercontent.com/digitalearthafrica/config/master/products/alos_palsar_mosaic.yaml
46 |
47 | Inject the sample product into datacube using datacube commands.
48 | These should be available in the OWS docker image.
49 |
50 | .. code-block:: console
51 |
52 | $ datacube product add https://raw.githubusercontent.com/digitalearthafrica/config/master/products/alos_palsar_mosaic.yaml
53 |
54 | Index all the ``YAML`` files for a particular year of ALOS-PALSAR
55 | using a classic Unix toolchain style,
56 | with `AWS CLI `_ grabbing them from S3.
57 |
58 | .. code-block:: console
59 |
60 | $ aws s3 ls s3://deafrica-data/jaxa/alos_palsar_mosaic/2017/ --recursive \
61 | | grep yaml | awk '{print $4}' \
62 | | xargs -n1 -I {} datacube dataset add s3://deafrica-data/{}
63 |
64 | Index a dataset when ``yaml`` file is not available and ONLY ``.json`` file is available.
65 |
66 | .. code-block:: console
67 |
68 | # How to index Sentinel-2 cogs
69 |
70 | ## Tooling
71 | pip install --upgrade --extra-index-url="https://packages.dea.ga.gov.au" odc-apps-dc-tools odc-index datacube
72 |
73 | ## Find the files
74 |
75 | s3-find s3://sentinel-cogs/sentinel-s2-l2a-cogs/2019/**/*.json > sentinel-cogs-2020.txt
76 |
77 | ## Tar them up
78 |
79 | s3-to-tar sentinel-cogs-2020.txt sentinel-cogs-2020.tar
80 |
81 | ## Install the fresh indexing tools (if not already installed)
82 |
83 | `pip install --upgrade --extra-index-url="https://packages.dea.ga.gov.au" odc-apps-dc-tools odc-index`
84 |
85 | ## And index
86 |
87 | dc-index-from-tar --stac --product=s2_l2a < sentinel-cogs-2020.tar
88 |
89 | .. note:: The next step will be superseded soon by an OWS sub-command.
90 |
91 | Update extents of a new product or to update a product in Datacube to make it easier for OWS to create getcapabilities documents where the `ows_cfg.py` file is within the code directory.
92 |
93 | .. code-block:: console
94 |
95 | $ datacube-ows-update --views
96 | $ datacube-ows-update alos_palsar_mosaic
97 |
98 | Deploy the `Digital Earth Africa OWS config `_
99 | by copying to ows_cfg.py. Ideally load the config outside
100 | a docker container to iterate faster.
101 |
102 | Run GetCapabilities via curl to ensure data is present.
103 | Perform GetMap via QGIS to ensure data is visible.
104 |
105 | .. code-block:: console
106 |
107 | $ curl "localhost:8000/?service=wms&request=getcapabilities"
108 |
--------------------------------------------------------------------------------
/env.micromamba.yaml:
--------------------------------------------------------------------------------
1 | name: base
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.12
6 | - datacube
7 | - flask
8 | - gunicorn
9 | - geoalchemy2
10 | - pillow
11 | - requests
12 | - lxml
13 | - deepdiff
14 | - scipy
15 | - flask-babel
16 | - regex
17 | - matplotlib-base # Not matplotlib, that pulls in a GUI backends
18 | - python-slugify
19 | - prometheus_flask_exporter
20 | - timezonefinder
21 | - iso8601
22 | - ciso8601
23 | - colour
24 | - setuptools_scm
25 | - bottleneck
26 | - opentelemetry-distro
27 | - pip:
28 | - pyows
29 |
--------------------------------------------------------------------------------
/integration_tests/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/integration_tests/cfg/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/integration_tests/cfg/message.po:
--------------------------------------------------------------------------------
1 |
2 | msgid "global.title"
3 | msgstr "Over-ridden: aardvark"
4 |
5 | msgid "folder.sentinel2.abstract"
6 | msgstr "Over-ridden: bunny-rabbit"
7 |
8 | msgid "layer.s2_l2a.title"
9 | msgstr "Over-ridden: chook"
10 |
11 | msgid "style.s2_l2a.simple_rgb.title"
12 | msgstr "Over-ridden: donkey"
13 |
--------------------------------------------------------------------------------
/integration_tests/cfg/ows_test_cfg_bad.py:
--------------------------------------------------------------------------------
1 | # pylint: skip-file
2 | # This file is part of datacube-ows, part of the Open Data Cube project.
3 | # See https://opendatacube.org for more information.
4 | #
5 | # Copyright (c) 2017-2024 OWS Contributors
6 | # SPDX-License-Identifier: Apache-2.0
7 |
8 |
9 |
10 | # THIS IS A TESTING FILE FOR TESTING ERROR HANDLING.
11 | # Do not use it as an example, it is deliberately invalid.
12 | #
13 | # Please refer to datacube_ows/ows_cfg_example.py for EXAMPLE CONFIG
14 |
15 | ows_cfg = {
16 | "glerbal": {
17 | "turtle": "An invalid configuration",
18 | },
19 | "liars": []
20 | }
21 |
--------------------------------------------------------------------------------
/integration_tests/cfg/test_translations/de/LC_MESSAGES/ows_cfg.po:
--------------------------------------------------------------------------------
1 | # German translations for PROJECT.
2 | # Copyright (C) 2024 ORGANIZATION
3 | # This file is distributed under the same license as the PROJECT project.
4 | # FIRST AUTHOR , 2024.
5 | #
6 | msgid ""
7 | msgstr ""
8 | "Project-Id-Version: PROJECT VERSION\n"
9 | "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
10 | "POT-Creation-Date: 2024-07-03 17:29+1000\n"
11 | "PO-Revision-Date: 2024-07-03 17:29+1000\n"
12 | "Last-Translator: FULL NAME \n"
13 | "Language: de\n"
14 | "Language-Team: de \n"
15 | "Plural-Forms: nplurals=2; plural=(n != 1);\n"
16 | "MIME-Version: 1.0\n"
17 | "Content-Type: text/plain; charset=utf-8\n"
18 | "Content-Transfer-Encoding: 8bit\n"
19 | "Generated-By: Babel 2.15.0\n"
20 |
21 | msgid "global.title"
22 | msgstr "Over-ridden: aardvark"
23 |
24 | msgid "folder.sentinel2.abstract"
25 | msgstr "Over-ridden: bunny-rabbit"
26 |
27 | msgid "layer.s2_l2a.title"
28 | msgstr "Over-ridden: chook"
29 |
30 | msgid "style.s2_l2a.simple_rgb.title"
31 | msgstr "Over-ridden: donkey"
32 |
--------------------------------------------------------------------------------
/integration_tests/cfg/test_translations/en/LC_MESSAGES/ows_cfg.po:
--------------------------------------------------------------------------------
1 | # English translations for PROJECT.
2 | # Copyright (C) 2024 ORGANIZATION
3 | # This file is distributed under the same license as the PROJECT project.
4 | # FIRST AUTHOR , 2024.
5 | #
6 | msgid ""
7 | msgstr ""
8 | "Project-Id-Version: PROJECT VERSION\n"
9 | "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
10 | "POT-Creation-Date: 2024-07-03 17:29+1000\n"
11 | "PO-Revision-Date: 2024-07-03 17:29+1000\n"
12 | "Last-Translator: FULL NAME \n"
13 | "Language: en\n"
14 | "Language-Team: en \n"
15 | "Plural-Forms: nplurals=2; plural=(n != 1);\n"
16 | "MIME-Version: 1.0\n"
17 | "Content-Type: text/plain; charset=utf-8\n"
18 | "Content-Transfer-Encoding: 8bit\n"
19 | "Generated-By: Babel 2.15.0\n"
20 |
21 | msgid "global.title"
22 | msgstr "Over-ridden: aardvark"
23 |
24 | msgid "folder.sentinel2.abstract"
25 | msgstr "Over-ridden: bunny-rabbit"
26 |
27 | msgid "layer.s2_l2a.title"
28 | msgstr "Over-ridden: chook"
29 |
30 | msgid "style.s2_l2a.simple_rgb.title"
31 | msgstr "Over-ridden: donkey"
32 |
--------------------------------------------------------------------------------
/integration_tests/cfg/translations/de/LC_MESSAGES/ows_cfg.mo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/integration_tests/cfg/translations/de/LC_MESSAGES/ows_cfg.mo
--------------------------------------------------------------------------------
/integration_tests/cfg/translations/de/LC_MESSAGES/ows_cfg.po:
--------------------------------------------------------------------------------
1 | # Translations for datacube-ows metadata instance:
2 | # Open web-services for the Open Data Cube
3 | #
4 | # Acme Corporation 2022-03-24T23:29:57.407805
5 | msgid ""
6 | msgstr ""
7 | "Project-Id-Version: Open web-services for the Open Data Cube "
8 | "2022-03-24T23:29:57.407805\n"
9 | "Report-Msgid-Bugs-To: test@example.com\n"
10 | "POT-Creation-Date: 2024-07-03 17:29+1000\n"
11 | "PO-Revision-Date: 2022-03-24 23:33+0000\n"
12 | "Last-Translator: FULL NAME \n"
13 | "Language: de\n"
14 | "Language-Team: en \n"
15 | "Plural-Forms: nplurals=2; plural=(n != 1);\n"
16 | "MIME-Version: 1.0\n"
17 | "Content-Type: text/plain; charset=utf-8\n"
18 | "Content-Transfer-Encoding: 8bit\n"
19 | "Generated-By: Babel 2.15.0\n"
20 |
21 | msgid "global.title"
22 | msgstr "This is the German translation of the title"
23 |
24 | msgid "folder.sentinel2.abstract"
25 | msgstr "Images from the sentinel 2 satellite"
26 |
27 | msgid "layer.s2_l2a.title"
28 | msgstr "Surface reflectance (Sentinel-2)"
29 |
30 | msgid "style.s2_l2a.simple_rgb.title"
31 | msgstr "Simple RGB"
32 |
--------------------------------------------------------------------------------
/integration_tests/cfg/translations/en/LC_MESSAGES/ows_cfg.mo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/integration_tests/cfg/translations/en/LC_MESSAGES/ows_cfg.mo
--------------------------------------------------------------------------------
/integration_tests/cfg/translations/en/LC_MESSAGES/ows_cfg.po:
--------------------------------------------------------------------------------
1 | # Translations for datacube-ows metadata instance:
2 | # Over-ridden: aardvark
3 | #
4 | # Acme Corporation 2022-03-24T23:29:57.407805
5 | msgid ""
6 | msgstr ""
7 | "Project-Id-Version: Over-ridden: aardvark 2022-03-24T23:29:57.407805\n"
8 | "Report-Msgid-Bugs-To: test@example.com\n"
9 | "POT-Creation-Date: 2024-07-03 17:29+1000\n"
10 | "PO-Revision-Date: 2022-03-24 23:33+0000\n"
11 | "Last-Translator: FULL NAME \n"
12 | "Language: en\n"
13 | "Language-Team: en \n"
14 | "Plural-Forms: nplurals=2; plural=(n != 1);\n"
15 | "MIME-Version: 1.0\n"
16 | "Content-Type: text/plain; charset=utf-8\n"
17 | "Content-Transfer-Encoding: 8bit\n"
18 | "Generated-By: Babel 2.15.0\n"
19 |
20 | msgid "global.title"
21 | msgstr "Open web-services for the Open Data Cube"
22 |
23 | msgid "folder.sentinel2.abstract"
24 | msgstr "Images from the sentinel 2 satellite"
25 |
26 | msgid "layer.s2_l2a.title"
27 | msgstr "Surface reflectance (Sentinel-2)"
28 |
29 | msgid "style.s2_l2a.simple_rgb.title"
30 | msgstr "Simple RGB"
31 |
--------------------------------------------------------------------------------
/integration_tests/cfg/utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | def trivial_identity(x):
9 | return x
10 |
11 |
12 | def legacy_finfo_data(data):
13 | return data
14 |
15 |
16 | def new_finfo_vars(data, ds):
17 | return list(data.data_vars.keys())
18 |
19 |
20 | def new_finfo_platform(data, ds):
21 | return ds.metadata.platform
22 |
23 |
24 | def new_twodate_finfo(data, band, band_mapper=None):
25 | if band_mapper is not None:
26 | band = band_mapper(band)
27 | data1, data2 = (data.sel(time=dt) for dt in data.coords["time"].values)
28 | return data2[band].item() - data1[band].item()
29 |
--------------------------------------------------------------------------------
/integration_tests/conftest.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | import os
9 |
10 | pytest_plugins = ["helpers_namespace"]
11 | import pytest
12 | from click.testing import CliRunner
13 | from datacube.cfg import ODCConfig
14 | from datacube_ows import ogc
15 | from datacube_ows.ogc import app
16 | from pytest_localserver.http import WSGIServer
17 |
18 |
19 | @pytest.fixture
20 | def flask_client():
21 | with app.test_client() as client:
22 | yield client
23 |
24 |
25 | class generic_obj:
26 | pass
27 |
28 |
29 | @pytest.fixture(scope="session")
30 | def ows_server(request):
31 | """
32 | Run the OWS server for the duration of these tests
33 | """
34 | external_url = os.environ.get("SERVER_URL")
35 | if external_url:
36 | server = generic_obj()
37 | server.url = external_url
38 | else:
39 | server = WSGIServer(port="5000", application=ogc.app)
40 | server.start()
41 | request.addfinalizer(server.stop)
42 |
43 | return server
44 |
45 |
46 | @pytest.fixture
47 | def runner():
48 | return CliRunner()
49 |
50 |
51 | @pytest.helpers.register
52 | def enclosed_bbox(bbox, flip: bool = False):
53 | lon_min, lat_min, lon_max, lat_max = bbox
54 | lon_range = lon_max - lon_min
55 | lat_range = lat_max - lat_min
56 |
57 | if flip:
58 | return (
59 | lat_min + 0.45 * lat_range,
60 | lon_min + 0.45 * lon_range,
61 | lat_max - 0.45 * lat_range,
62 | lon_max - 0.45 * lon_range,
63 | )
64 | else:
65 | return (
66 | lon_min + 0.45 * lon_range,
67 | lat_min + 0.45 * lat_range,
68 | lon_max - 0.45 * lon_range,
69 | lat_max - 0.45 * lat_range,
70 | )
71 |
72 |
73 | @pytest.helpers.register
74 | def disjoint_bbox(bbox):
75 | lon_min, lat_min, lon_max, lat_max = bbox
76 | lon_range = lon_max - lon_min
77 | lat_range = lat_max - lat_min
78 |
79 | return (
80 | lon_min - 0.4 * lon_range,
81 | lat_min - 0.4 * lat_range,
82 | lon_min - 0.2 * lon_range,
83 | lat_min - 0.2 * lat_range,
84 | )
85 |
86 | @pytest.helpers.register
87 | def representative_bbox(bbox):
88 | lon_min, lat_min, lon_max, lat_max = bbox
89 | lon_range = lon_max - lon_min
90 | lat_range = lat_max - lat_min
91 |
92 | return (
93 | lon_min + 0.40 * lon_range,
94 | lat_min + 0.45 * lat_range,
95 | lon_min + 0.41 * lon_range,
96 | lat_min + 0.46 * lat_range,
97 | )
98 |
99 |
100 | @pytest.fixture
101 | def product_name() -> str:
102 | return "s2_l2a"
103 |
104 |
105 | @pytest.fixture
106 | def write_role_name():
107 | odc_env = ODCConfig.get_environment()
108 | return odc_env.db_username
109 |
110 |
111 | @pytest.fixture
112 | def read_role_name(write_role_name: str):
113 | if read_role_name := os.environ.get("SERVER_DB_USERNAME"):
114 | return read_role_name
115 | else:
116 | return write_role_name
117 |
118 |
119 | @pytest.fixture
120 | def multiproduct_name() -> str:
121 | return "s2_ard_granule_nbar_t"
122 |
--------------------------------------------------------------------------------
/integration_tests/metadata/COAST_100K_15_-40.yaml:
--------------------------------------------------------------------------------
1 | $schema: https://schemas.opendatacube.org/dataset
2 | crs: epsg:3577
3 | extent:
4 | lat:
5 | begin: -35.72031832673588
6 | end: -34.70891190784338
7 | lon:
8 | begin: 148.63868967606328
9 | end: 149.73417611149185
10 | grids:
11 | default:
12 | shape:
13 | - 4000
14 | - 4000
15 | transform:
16 | - 25.0
17 | - 0
18 | - 1500000.0
19 | - 0
20 | - -25.0
21 | - -3900000.0
22 | - 0
23 | - 0
24 | - 1
25 | id: 2921b863-9c09-4d5b-a561-5b8d5ddf24bc
26 | label: COAST_100L_15_-40
27 | lineage:
28 | source_datasets: {}
29 | measurements:
30 | land:
31 | path: COAST_100K_15_-40.tif
32 | product:
33 | name: geodata_coast_100k
34 | properties:
35 | created: '2018-12-03T04:28:16.827902'
36 | datetime: '2004-01-01T00:00:00'
37 | odc:file_format: GeoTIFF
38 | odc:region_code: 15_-40
39 | eo:instrument: unknown
40 | eo:platform: unknown
41 | proj:epsg: 3577
42 |
--------------------------------------------------------------------------------
/integration_tests/metadata/COAST_100K_8_-21.yaml:
--------------------------------------------------------------------------------
1 | $schema: https://schemas.opendatacube.org/dataset
2 | crs: epsg:3577
3 | extent:
4 | lat:
5 | begin: -19.38776011021664
6 | end: -18.43108812058793
7 | lon:
8 | begin: 139.58869840733868
9 | end: 140.59834925204385
10 | grids:
11 | default:
12 | shape:
13 | - 4000
14 | - 4000
15 | transform:
16 | - 25.0
17 | - 0
18 | - 800000.0
19 | - 0
20 | - -25.0
21 | - -2000000.0
22 | - 0
23 | - 0
24 | - 1
25 | id: 701478bc-2625-4743-99bf-10865d3bb2da
26 | label: COAST_100K_8_-21
27 | lineage:
28 | source_datasets: {}
29 | measurements:
30 | land:
31 | path: COAST_100K_8_-21.tif
32 | product:
33 | name: geodata_coast_100k
34 | properties:
35 | created: '2018-12-03T04:31:43.146830'
36 | datetime: '2004-01-01T00:00:00'
37 | odc:file_format: GeoTIFF
38 | odc:region_code: 8_-21
39 | eo:instrument: unknown
40 | eo:platform: unknown
41 | proj:epsg: 3577
42 |
--------------------------------------------------------------------------------
/integration_tests/metadata/metadata_importer.py:
--------------------------------------------------------------------------------
1 | # Simple Python script for indexing pre-cached EO3 metadata for non-local data.
2 | #
3 | # TODO: Would ideally use stac-to-dc but it hasn't been migrated to datacube-1.9 yet.
4 | import fileinput
5 |
6 | import yaml
7 | from datacube import Datacube
8 | from datacube.index.hl import Doc2Dataset
9 |
10 | dc = Datacube()
11 | dc_pgis = Datacube(env="owspostgis")
12 |
13 | doc2ds = Doc2Dataset(dc.index, products=["s2_l2a", "geodata_coast_100k"], skip_lineage=True, verify_lineage=False)
14 | doc2ds_pgis = Doc2Dataset(dc_pgis.index, products=["s2_l2a", "geodata_coast_100k"], skip_lineage=True, verify_lineage=False)
15 |
16 | for line in fileinput.input(): # noqa: SIM115
17 | filename, uri = line.split()
18 | with open(filename) as fp:
19 | doc = yaml.safe_load(fp)
20 | if "grid_spatial" in doc:
21 | del doc["grid_spatial"]
22 | if "extent" in doc:
23 | del doc["extent"]
24 | ds, err = doc2ds(doc, uri)
25 | if ds:
26 | dc.index.datasets.add(ds, with_lineage=False)
27 | else:
28 | print("Dataset add (postgres) failed:", err)
29 | exit(1)
30 |
31 | ds, err = doc2ds_pgis(doc, uri)
32 | if ds:
33 | dc_pgis.index.datasets.add(ds, with_lineage=False)
34 | else:
35 | print("Dataset add (postgis) failed:", err)
36 | exit(1)
37 |
--------------------------------------------------------------------------------
/integration_tests/metadata/product_geodata_coast_100k.yaml:
--------------------------------------------------------------------------------
1 | name: geodata_coast_100k
2 | description: Coastline data for Australia
3 | metadata_type: eo3
4 | license: CC-BY-4.0
5 | metadata:
6 | product:
7 | name: geodata_coast_100k
8 | measurements:
9 | - name: land
10 | dtype: uint8
11 | flags_definition:
12 | land_type:
13 | bits:
14 | - 0
15 | - 1
16 | description: Sea, Mainland or Island
17 | values:
18 | 0: sea
19 | 1: island
20 | 2: mainland
21 | sea:
22 | bits:
23 | - 0
24 | - 1
25 | description: Sea
26 | values:
27 | 0: true
28 | nodata: 0
29 | units: '1'
30 | load:
31 | crs: 'EPSG:3577'
32 | resolution:
33 | y: -25
34 | x: 25
35 | align:
36 | y: 0
37 | x: 0
38 |
--------------------------------------------------------------------------------
/integration_tests/metadata/s2_l2a_prod.yaml:
--------------------------------------------------------------------------------
1 | name: s2_l2a
2 | metadata:
3 | product:
4 | name: s2_l2a
5 | description: Sentinel-2a and Sentinel-2b imagery, processed to Level 2A (Surface Reflectance) and converted to Cloud Optimized GeoTIFFs
6 | measurements:
7 | - name: B01
8 | dtype: uint16
9 | units: "1"
10 | nodata: 0
11 | aliases:
12 | - band_01
13 | - coastal_aerosol
14 | - name: B02
15 | dtype: uint16
16 | units: "1"
17 | nodata: 0
18 | aliases:
19 | - band_02
20 | - blue
21 | - name: B03
22 | dtype: uint16
23 | units: "1"
24 | nodata: 0
25 | aliases:
26 | - band_03
27 | - green
28 | - name: B04
29 | dtype: uint16
30 | units: "1"
31 | nodata: 0
32 | aliases:
33 | - band_04
34 | - red
35 | - name: B05
36 | dtype: uint16
37 | units: "1"
38 | nodata: 0
39 | aliases:
40 | - band_05
41 | - red_edge_1
42 | - name: B06
43 | dtype: uint16
44 | units: "1"
45 | nodata: 0
46 | aliases:
47 | - band_06
48 | - red_edge_2
49 | - name: B07
50 | dtype: uint16
51 | units: "1"
52 | nodata: 0
53 | aliases:
54 | - band_07
55 | - red_edge_3
56 | - name: B08
57 | dtype: uint16
58 | units: "1"
59 | nodata: 0
60 | aliases:
61 | - band_08
62 | - nir
63 | - nir_1
64 | - name: B8A
65 | dtype: uint16
66 | units: "1"
67 | nodata: 0
68 | aliases:
69 | - band_8a
70 | - nir_narrow
71 | - nir_2
72 | - name: B09
73 | dtype: uint16
74 | units: "1"
75 | nodata: 0
76 | aliases:
77 | - band_09
78 | - water_vapour
79 | - name: B11
80 | dtype: uint16
81 | units: "1"
82 | nodata: 0
83 | aliases:
84 | - band_11
85 | - swir_1
86 | - swir_16
87 | - name: B12
88 | dtype: uint16
89 | units: "1"
90 | nodata: 0
91 | aliases:
92 | - band_12
93 | - swir_2
94 | - swir_22
95 | - name: SCL
96 | dtype: uint8
97 | units: "1"
98 | nodata: 0
99 | aliases:
100 | - mask
101 | - qa
102 | flags_definition:
103 | qa:
104 | bits:
105 | - 0
106 | - 1
107 | - 2
108 | - 3
109 | - 4
110 | - 5
111 | - 6
112 | - 7
113 | values:
114 | '0': no data
115 | '1': saturated or defective
116 | '2': dark area pixels
117 | '3': cloud shadows
118 | '4': vegetation
119 | '5': bare soils
120 | '6': water
121 | '7': unclassified
122 | '8': cloud medium probability
123 | '9': cloud high probability
124 | '10': thin cirrus
125 | '11': snow or ice
126 | description: Sen2Cor Scene Classification
127 | - name: AOT
128 | dtype: uint16
129 | units: "1"
130 | nodata: 0
131 | aliases:
132 | - aerosol_optical_thickness
133 | - name: WVP
134 | dtype: uint16
135 | units: "1"
136 | nodata: 0
137 | aliases:
138 | - scene_average_water_vapour
139 | metadata_type: eo3
140 |
--------------------------------------------------------------------------------
/integration_tests/test_i18n.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import requests
8 |
9 |
10 | def test_wms_i18n(ows_server) -> None:
11 | resp = requests.get(
12 | ows_server.url + "/wms?request=GetCapabilities&service=WMS&version=1.3.0",
13 | timeout=10,
14 | headers={"Accept-Language": "de"}
15 | )
16 | # Confirm successf
17 | assert "German translation" in resp.text
18 |
19 | def test_wcs1_i18n(ows_server) -> None:
20 | resp = requests.get(
21 | ows_server.url + "/wcs?request=GetCapabilities&service=WCS&version=1.0.0",
22 | timeout=10,
23 | headers={"Accept-Language": "de"}
24 | )
25 | # Confirm success
26 | assert "German translation" in resp.text
27 |
28 |
29 | def test_wcs1_bands_i18n(ows_server, product_name: str) -> None:
30 | resp = requests.get(
31 | ows_server.url + "/wcs?request=DescribeCoverage&service=WCS&version=1.0.0&coverageid=" + product_name,
32 | timeout=10,
33 | headers={"Accept-Language": "de"}
34 | )
35 | # Confirm success
36 | assert "gruen" in resp.text
37 |
38 |
39 | def test_wcs2_i18n(ows_server) -> None:
40 | resp = requests.get(
41 | ows_server.url + "/wcs?request=GetCapabilities&service=WCS&version=2.0.1",
42 | timeout=10,
43 | headers={"Accept-Language": "de"}
44 | )
45 | # Confirm success
46 | assert "German translation" in resp.text
47 |
48 |
49 | def test_wcs2_bands_i18n(ows_server, product_name: str) -> None:
50 | resp = requests.get(
51 | ows_server.url + "/wcs?request=DescribeCoverage&service=WCS&version=2.0.1&coverageid=" + product_name,
52 | timeout=10,
53 | headers={"Accept-Language": "de"}
54 | )
55 | # Confirm success
56 | assert "gruen" in resp.text
57 |
--------------------------------------------------------------------------------
/integration_tests/test_layers.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import os
8 |
9 | from datacube_ows.ows_configuration import OWSConfig, get_config, read_config
10 |
11 | src_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
12 |
13 | def test_metadata_export() -> None:
14 | cfg = get_config(refresh=True)
15 |
16 | export = cfg.export_metadata()
17 | assert "folder.0.title" not in export
18 | assert "folder.sentinel2.title" in export
19 |
20 | # assert layers.platforms
21 | # for p in layers:
22 | # assert p.products
23 | # for prd in p.products:
24 | # assert prd.styles
25 | # assert layers.product_index[prd.name] == prd
26 | # assert prd.title
27 |
28 |
29 | def test_missing_metadata_file(monkeypatch) -> None:
30 | cached_cfg = OWSConfig._instance
31 | cached_reg = OWSConfig._metadata_registry
32 | cached_inh_reg = OWSConfig._inheritance_registry
33 | cached_catalog = OWSConfig._msg_src
34 |
35 | monkeypatch.chdir(src_dir)
36 | try:
37 | OWSConfig._instance = None
38 | OWSConfig._metadata_registry = {}
39 | OWSConfig._inheritance_registry = {}
40 | OWSConfig._msg_src = None
41 | raw_cfg = read_config()
42 | raw_cfg["global"]["message_file"] = "integration_tests/cfg/non-existent.po"
43 | raw_cfg["global"]["translations_directory"] = None
44 | raw_cfg["global"]["languages"] = ["en"]
45 | cfg = OWSConfig(refresh=True, cfg=raw_cfg)
46 | cfg.make_ready()
47 |
48 | assert "Over-ridden" not in cfg.title
49 | assert "aardvark" not in cfg.title
50 | finally:
51 | OWSConfig._instance = cached_cfg
52 | OWSConfig._metadata_registry = cached_reg
53 | OWSConfig._inheritance_registry = cached_inh_reg
54 | OWSConfig._msg_src = cached_catalog
55 |
56 |
57 | def test_metadata_file_ignore(monkeypatch) -> None:
58 | cached_cfg = OWSConfig._instance
59 | cached_reg = OWSConfig._metadata_registry
60 | cached_inh_reg = OWSConfig._inheritance_registry
61 | cached_catalog = OWSConfig._msg_src
62 | monkeypatch.chdir(src_dir)
63 | try:
64 | OWSConfig._instance = None
65 | OWSConfig._metadata_registry = {}
66 | OWSConfig._inheritance_registry = {}
67 | OWSConfig._msg_src = None
68 | raw_cfg = read_config()
69 | raw_cfg["global"]["message_file"] = "integration_tests/cfg/message.po"
70 | cfg = OWSConfig(refresh=True, cfg=raw_cfg, ignore_msgfile=True)
71 | cfg.make_ready()
72 |
73 | assert "Over-ridden" not in cfg.title
74 | assert "aardvark" not in cfg.title
75 | finally:
76 | OWSConfig._instance = cached_cfg
77 | OWSConfig._metadata_registry = cached_reg
78 | OWSConfig._inheritance_registry = cached_inh_reg
79 | OWSConfig._msg_src = cached_catalog
80 |
81 |
82 | def test_metadata_read(monkeypatch, product_name: str) -> None:
83 | cached_cfg = OWSConfig._instance
84 | monkeypatch.chdir(src_dir)
85 | try:
86 | OWSConfig._instance = None
87 | raw_cfg = read_config()
88 | raw_cfg["global"]["message_file"] = "integration_tests/cfg/message.po"
89 | cfg = OWSConfig(refresh=True, cfg=raw_cfg)
90 | cfg.make_ready()
91 |
92 | assert "Over-ridden" in cfg.title
93 | assert "aardvark" in cfg.title
94 |
95 | folder = cfg.folder_index["folder.sentinel2"]
96 | assert "Over-ridden" not in folder.title
97 | assert "Over-ridden" in folder.abstract
98 | assert "bunny-rabbit" in folder.abstract
99 |
100 | lyr = cfg.layer_index[product_name]
101 | assert "Over-ridden" in lyr.title
102 | assert "chook" in lyr.title
103 |
104 | styl = lyr.style_index["simple_rgb"]
105 | assert "Over-ridden" in styl.title
106 | assert "donkey" in styl.title
107 |
108 | styl = lyr.style_index["blue"]
109 | assert "Over-ridden" not in styl.title
110 | finally:
111 | OWSConfig._instance = cached_cfg
112 |
--------------------------------------------------------------------------------
/integration_tests/test_routes.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """Run with DB to simulate actual function
8 | """
9 |
10 |
11 | def test_db_connect_success(flask_client) -> None:
12 | """Start with a database connection"""
13 |
14 | rv = flask_client.get("/ping")
15 | assert rv.status_code == 200
16 |
17 |
18 | def test_wcs_base(flask_client) -> None:
19 | """WCS endpoint base"""
20 |
21 | rv = flask_client.get("/wcs")
22 | assert rv.status_code == 400
23 |
24 |
25 | def test_wms_base(flask_client) -> None:
26 | """WMS endpoint base"""
27 |
28 | rv = flask_client.get("/wms")
29 | assert rv.status_code == 400
30 |
31 |
32 | def test_wmts_base(flask_client) -> None:
33 | """WMTS endpoint base"""
34 |
35 | rv = flask_client.get("/wmts")
36 | assert rv.status_code == 400
37 |
38 |
39 | def test_legend_default(flask_client) -> None:
40 | """No-param on legend"""
41 |
42 | rv = flask_client.get("/legend/layer/style/legend.png")
43 | assert rv.status_code == 404
44 |
45 |
46 | def test_index(flask_client) -> None:
47 | """Base index endpoint"""
48 |
49 | rv = flask_client.get("/")
50 | assert rv.status_code == 200
51 |
--------------------------------------------------------------------------------
/integration_tests/test_version.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """Test update ranges on DB using Click testing
8 | https://click.palletsprojects.com/en/7.x/testing/
9 | """
10 | from datacube_ows.__init__ import __version__
11 | from datacube_ows.update_ranges_impl import main
12 |
13 |
14 | def test_updates_ranges_version(runner) -> None:
15 | result = runner.invoke(main, ["--version"])
16 | assert __version__ in result.output
17 | assert result.exit_code == 0
18 |
--------------------------------------------------------------------------------
/inventory.json:
--------------------------------------------------------------------------------
1 | {
2 | "total_layers_count": 6,
3 | "layers": [
4 | {
5 | "layer": "s2_l2a",
6 | "product": [
7 | "s2_l2a"
8 | ],
9 | "styles_count": 8,
10 | "styles_list": [
11 | "simple_rgb",
12 | "style_ls_simple_rgb_clone",
13 | "infra_red",
14 | "blue",
15 | "ndvi",
16 | "ndvi_expr",
17 | "rgb_ndvi",
18 | "ndvi_delta"
19 | ]
20 | },
21 | {
22 | "layer": "s2_l2a_clone",
23 | "product": [
24 | "s2_l2a"
25 | ],
26 | "styles_count": 8,
27 | "styles_list": [
28 | "simple_rgb",
29 | "style_ls_simple_rgb_clone",
30 | "infra_red",
31 | "blue",
32 | "ndvi",
33 | "ndvi_expr",
34 | "rgb_ndvi",
35 | "ndvi_delta"
36 | ]
37 | },
38 | {
39 | "layer": "s2_ard_granule_nbar_t",
40 | "product": [
41 | "ga_s2am_ard_3",
42 | "ga_s2bm_ard_3"
43 | ],
44 | "styles_count": 2,
45 | "styles_list": [
46 | "ndci",
47 | "mndwi"
48 | ]
49 | },
50 | {
51 | "layer": "s2_ard_latest_mosaic",
52 | "product": [
53 | "ga_s2am_ard_3",
54 | "ga_s2bm_ard_3"
55 | ],
56 | "styles_count": 2,
57 | "styles_list": [
58 | "ndci",
59 | "mndwi"
60 | ]
61 | },
62 | {
63 | "layer": "ga_ls_fc_3",
64 | "product": [
65 | "ga_ls_fc_3"
66 | ],
67 | "styles_count": 1,
68 | "styles_list": [
69 | "fc_rgb_unmasked"
70 | ]
71 | },
72 | {
73 | "layer": "ls8_geomedian",
74 | "product": [
75 | "ga_ls8c_nbart_gm_cyear_3"
76 | ],
77 | "styles_count": 3,
78 | "styles_list": [
79 | "simple_rgb",
80 | "infra_red",
81 | "ndvi"
82 | ]
83 | }
84 | ]
85 | }
86 |
--------------------------------------------------------------------------------
/license-headers.md:
--------------------------------------------------------------------------------
1 | # Applying or updating license headers
2 |
3 | To add or update license headers in this or other Open Data Cube
4 | projects, you can do the following:
5 |
6 | Download the tool from [https://github.com/johann-petrak/licenseheaders](https://github.com/johann-petrak/licenseheaders) and make it executable.
7 |
8 | ```bash
9 | wget https://raw.githubusercontent.com/johann-petrak/licenseheaders/master/licenseheaders.py
10 | chmod +x licenseheaders.py
11 | ```
12 |
13 | Change the sections on `python` files, to remove the `headerStartLine` and
14 | `headerEndLine`, like:
15 |
16 | ```python
17 | "headerStartLine": "",
18 | "headerEndLine": "",
19 | ```
20 |
21 | Run the tool:
22 |
23 | ```bash
24 | python3 ./licenseheaders.py --tmpl license-template.txt --years 2017-2020 --ext py
25 | ```
26 |
--------------------------------------------------------------------------------
/license-template.txt:
--------------------------------------------------------------------------------
1 | This file is part of datacube-ows, part of the Open Data Cube project.
2 | See https://opendatacube.org for more information.
3 |
4 | Copyright (c) ${years} OWS Contributors
5 | SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # pyproject.toml
2 | [tool.mypy]
3 | python_version = "3.10"
4 | allow_redefinition = true
5 | enable_error_code = ["explicit-override"]
6 | ignore_missing_imports = true
7 | warn_redundant_casts = true
8 | warn_unused_ignores = true
9 |
10 | [tool.ruff]
11 | src = ["datacube_ows", "docs", "tests", "integration_tests"]
12 |
13 | [tool.ruff.lint]
14 | select = [
15 | "A", # Don't shadow built-ins
16 | "B", # flake8-bugbear
17 | "C4", # flake8-comprehensions
18 | "E", # pycodestyle errors
19 | "EXE", # flake8-executable
20 | "I", # isort
21 | "RUF", # Ruff-specific rules
22 | "SIM", # flake8-simplify
23 | "UP", # pyupgrade
24 | "W", # pycodestyle warnings
25 | "F", # pyflakes
26 | "T10", # flake8-debugger
27 | ]
28 | ignore = [
29 | "B026",
30 | "E501",
31 | "RUF012",
32 | "RUF022",
33 | ]
34 |
35 | [tool.ruff.lint.per-file-ignores]
36 | # Stay close to the generated file for readthedocs, so ignore some rules.
37 | "docs/conf.py" = ["A001", "E402"]
38 | # Using pytest_namespace requires assignment before importing pytest.
39 | "integration_tests/conftest.py" = ["E402"]
40 | # Multiline string, no good place for adding a noqa.
41 | "integration_tests/cfg/ows_test_cfg.py" = ["RUF001"]
42 | # Third-party software, keep pristine.
43 | "licenseheaders.py" = ["B", "C", "I", "SIM", "UP"]
44 |
45 | [tool.ruff.lint.pycodestyle]
46 | max-line-length = 120
47 |
48 | [build-system]
49 | requires = ["setuptools>=65.5.1", "wheel>=0.38.1", "setuptools_scm[toml]>3.4"]
50 |
51 | [tool.setuptools_scm]
52 | write_to = "datacube_ows/_version.py"
53 | fallback_version = "1.9.2"
54 |
--------------------------------------------------------------------------------
/s2_l2a_extractor.py:
--------------------------------------------------------------------------------
1 | from datacube import Datacube
2 | from yaml import dump
3 |
4 | dc = Datacube()
5 |
6 | i = 1
7 | for ds in dc.index.datasets.search(product="s2_l2a"):
8 | filename = "s2_l2a_ds_%02d.yaml" % i # noqa: UP031
9 | with open(filename, "w") as fp:
10 | fp.write(dump(ds.metadata_doc, default_flow_style=False))
11 | print(filename, ds.uri)
12 |
13 | i = i + 1
14 |
--------------------------------------------------------------------------------
/spellcheck.yaml:
--------------------------------------------------------------------------------
1 | matrix:
2 | - name: Markdown
3 | sources:
4 | - '**/*.md'
5 | - '**/*.rst'
6 | default_encoding: utf-8
7 | aspell:
8 | lang: en
9 | dictionary:
10 | wordlists:
11 | - wordlist.txt
12 | encoding: utf-8
13 | pipeline:
14 | - pyspelling.filters.text:
15 | - pyspelling.filters.markdown:
16 | - pyspelling.filters.html:
17 | comments: false
18 | ignores:
19 | - code
20 | - pre
21 |
--------------------------------------------------------------------------------
/test_urls.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Run a bunch of test URL for performance
3 | while true
4 | do
5 | curl 'http://localhost:8000/wms?service=WMS&version=1.3.0&request=GetMap&layers=ls8_usgs_level1_scene_layer&styles=rgb_ndvi&width=150&height=150&crs=EPSG%3A4326&bbox=-43.82093348558336%2C145.040403833046%2C-42.53486321090564%2C147.787537852719&format=image%2Fpng&transparent=TRUE&bgcolor=0xFFFFFF&exceptions=XML&time=2019-07-09' -o map.png
6 | done
7 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/tests/cfg/README.txt:
--------------------------------------------------------------------------------
1 | Pseudo-config files to test the config framework.
2 |
--------------------------------------------------------------------------------
/tests/cfg/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
--------------------------------------------------------------------------------
/tests/cfg/broken_nested.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | mixed_3 = {
9 | "test": 2634,
10 | "subtest": {
11 | "include": "tests.cfg.simple.doesnt_exist",
12 | "type": "python"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/tests/cfg/infinite_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": "infinite_1.json",
3 | "type": "json"
4 | }
5 |
--------------------------------------------------------------------------------
/tests/cfg/infinite_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 7778,
3 | "subtest": {
4 | "include": "infinite_2a.json",
5 | "type": "json"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/tests/cfg/infinite_2a.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 7788,
3 | "subtest": {
4 | "include": "infinite_2b.json",
5 | "type": "json"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/tests/cfg/infinite_2b.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 7789,
3 | "subtest": {
4 | "include": "infinite_2.json",
5 | "type": "json"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/tests/cfg/minimal_cfg.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | ows_cfg = {
8 | "global": {
9 | "title": "Minimal test config",
10 | "allowed_urls": [],
11 | "info_url": "http://opendatacube.org",
12 | "env": "nosuchdb",
13 | "published_CRSs": {
14 | "EPSG:3857": { # Web Mercator
15 | "geographic": False,
16 | "horizontal_coord": "x",
17 | "vertical_coord": "y",
18 | },
19 | "EPSG:4326": { # WGS-84
20 | "geographic": True,
21 | "vertical_coord_first": True
22 | },
23 | },
24 | "services": {
25 | "wms": True,
26 | "wmts": True,
27 | "wcs": True
28 | },
29 | },
30 |
31 | "wms": {},
32 |
33 | "wcs": {
34 | "formats": {
35 | "GeoTIFF": {
36 | "renderers": {
37 | "1": "datacube_ows.wcs1_utils.get_tiff",
38 | "2": "datacube_ows.wcs2_utils.get_tiff",
39 | },
40 | "mime": "image/geotiff",
41 | "extension": "tif",
42 | "multi-time": False
43 | },
44 | "netCDF": {
45 | "renderers": {
46 | "1": "datacube_ows.wcs1_utils.get_netcdf",
47 | "2": "datacube_ows.wcs2_utils.get_netcdf",
48 | },
49 | "mime": "application/x-netcdf",
50 | "extension": "nc",
51 | "multi-time": True,
52 | }
53 | },
54 | "native_format": "GeoTIFF",
55 | },
56 |
57 | "layers": [],
58 | }
59 |
--------------------------------------------------------------------------------
/tests/cfg/mixed_nested.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 9364,
3 | "subtest": {
4 | "test_py": {
5 | "include": "tests.cfg.simple.simple",
6 | "type": "python"
7 | },
8 | "test_json": {
9 | "include": "tests/cfg/simple.json",
10 | "type": "json"
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/cfg/mixed_nested.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | mixed_1 = {
8 | "include": "tests/cfg/simple.json",
9 | "type": "json"
10 | }
11 |
12 |
13 | mixed_2 = {
14 | "test": 5224,
15 | "subtest": {
16 | "include": "tests/cfg/simple.json",
17 | "type": "json"
18 | }
19 | }
20 |
21 | mixed_3 = {
22 | "test": 2634,
23 | "subtest": {
24 | "test_py": {
25 | "include": "tests.cfg.simple.simple",
26 | "type": "python"
27 | },
28 | "test_json": {
29 | "include": "tests/cfg/simple.json",
30 | "type": "json"
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/tests/cfg/nested.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 |
9 | nested = {
10 | "include": "tests.cfg.simple.simple",
11 | "type": "python",
12 | }
13 |
14 | nested_1 = [
15 | {
16 | "test": 8888,
17 | },
18 | {
19 | "include": "tests.cfg.simple.simple1",
20 | "type": "python"
21 | }
22 | ]
23 |
24 | nested_2 = {
25 | "test": 3424,
26 | "subtest": {
27 | "include": "tests.cfg.simple.simple2",
28 | "type": "python"
29 | }
30 | }
31 |
32 | nested_3 = {
33 | "test": 233,
34 | "things": [
35 | {
36 | "test": 2562,
37 | "thing": None
38 | },
39 | {
40 | "test": 2563,
41 | "thing": {
42 | "include": "tests.cfg.simple.simple",
43 | "type": "python"
44 | }
45 | },
46 | {
47 | "test": 2564,
48 | "thing": {
49 | "include": "tests.cfg.simple.simple3",
50 | "type": "python"
51 | }
52 | },
53 | ]
54 | }
55 |
56 | nested_4 = {
57 | "test": 222,
58 | "things": [
59 | {
60 | "test": 2572,
61 | "thing": None
62 | },
63 | {
64 | "test": 2573,
65 | "thing": {
66 | "include": "tests.cfg.simple.simple",
67 | "type": "python"
68 | }
69 | },
70 | {
71 | "test": 2574,
72 | "thing": {
73 | "include": "tests.cfg.nested.nested_3",
74 | "type": "python"
75 | }
76 | },
77 | ]
78 | }
79 |
80 | infinite_1 = {
81 | "include": "tests.cfg.nested.infinite_1",
82 | "type": "python"
83 | }
84 |
85 |
86 | infinite_2 = {
87 | "test": 7777,
88 | "subtest": {
89 | "include": "tests.cfg.nested.infinite_2a",
90 | "type": "python"
91 | }
92 | }
93 |
94 |
95 | infinite_2a = {
96 | "test": 7778,
97 | "subtest": {
98 | "include": "tests.cfg.nested.infinite_2b",
99 | "type": "python"
100 | }
101 | }
102 |
103 |
104 | infinite_2b = {
105 | "test": 7779,
106 | "subtest": {
107 | "include": "tests.cfg.nested.infinite_2",
108 | "type": "python"
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/tests/cfg/nested_1.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": "simple.json",
3 | "type": "json"
4 | }
5 |
--------------------------------------------------------------------------------
/tests/cfg/nested_2.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "test": 88888
4 | },
5 | {
6 | "include": "simple.json",
7 | "type": "json"
8 | }
9 | ]
10 |
--------------------------------------------------------------------------------
/tests/cfg/nested_3.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 2222,
3 | "things": [
4 | {
5 | "test": 22562,
6 | "thing": null
7 | },
8 | {
9 | "test": 22563,
10 | "thing": {
11 | "include": "simple.json",
12 | "type": "json"
13 | }
14 | },
15 | {
16 | "test": 22564,
17 | "thing": {
18 | "include": "simple.json",
19 | "type": "json"
20 | }
21 | }
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/tests/cfg/nested_4.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 3222,
3 | "things": [
4 | {
5 | "test": 2572,
6 | "thing": null
7 | },
8 | {
9 | "test": 2573,
10 | "thing": {
11 | "include": "simple.json",
12 | "type": "json"
13 | }
14 | },
15 | {
16 | "test": 2574,
17 | "thing": {
18 | "include": "nested_3.json",
19 | "type": "json"
20 | }
21 | }
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/tests/cfg/simple.json:
--------------------------------------------------------------------------------
1 | {
2 | "test": 1234
3 | }
4 |
--------------------------------------------------------------------------------
/tests/cfg/simple.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 |
9 | simple = {
10 | "test": 123,
11 | }
12 |
13 |
14 | simple1 = {
15 | "test": 1,
16 | }
17 |
18 |
19 | simple2 = {
20 | "test": 2,
21 | }
22 |
23 |
24 | simple3 = {
25 | "test": 3,
26 | }
27 |
--------------------------------------------------------------------------------
/tests/msg/README.txt:
--------------------------------------------------------------------------------
1 | Pseudo-message files to test the message file parser.
2 |
--------------------------------------------------------------------------------
/tests/msg/double_msgid.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error: msgid where msgstr expected.
15 | msgid "bang.whirl.splat"
16 | msgid "ding.dang.dong"
17 |
--------------------------------------------------------------------------------
/tests/msg/double_msgstr.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error: msgstr where msgid or comments or whitespace expected.
15 | msgid "bang.whirl.splat"
16 | msgstr "Single line msgstr"
17 | msgstr "Another single line msgstr"
18 |
--------------------------------------------------------------------------------
/tests/msg/duplicate_msgid.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error
15 | msgid "tic.tac.toe"
16 | msgstr "This msgid appears twice in this test file."
17 |
--------------------------------------------------------------------------------
/tests/msg/good.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
--------------------------------------------------------------------------------
/tests/msg/missing_msgid.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error: msgstr with no msgid.
15 | msgstr "Single line msgstr"
16 |
--------------------------------------------------------------------------------
/tests/msg/multiline_msgid.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error: msgid with continuation
15 | msgid ""
16 | "To tic-tac-toe, or not to tic-tac-toe,\n"
17 | msgstr "That is the question."
18 |
--------------------------------------------------------------------------------
/tests/msg/untagged_string.po:
--------------------------------------------------------------------------------
1 |
2 | #. Valid test file
3 | #. Single line msgstr
4 | msgid "foo.bar.baz"
5 | msgstr "Single line msgstr"
6 |
7 | #. Multi line msgstr
8 | msgid "tic.tac.toe"
9 | msgstr ""
10 | "\n"
11 | "A multi-line string that continues\n"
12 | "Across line-breaks.\n"
13 |
14 | #. Error: String not part of a continuation.
15 | ""
16 |
--------------------------------------------------------------------------------
/tests/test_cfg_wcs.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from unittest.mock import patch
8 |
9 | import pytest
10 | from datacube_ows.config_utils import ConfigException
11 | from datacube_ows.ows_configuration import WCSFormat, parse_ows_layer
12 |
13 |
14 | def test_zero_grid(minimal_global_cfg, minimal_layer_cfg, minimal_dc, mock_range, empty_driver_cache) -> None:
15 | minimal_global_cfg.wcs = True
16 | minimal_layer_cfg["native_crs"] = "EPSG:4326"
17 | minimal_layer_cfg["product_name"] = "foo_nativeres"
18 | lyr = parse_ows_layer(minimal_layer_cfg,
19 | global_cfg=minimal_global_cfg)
20 | mock_range.bboxes["EPSG:4326"] = {
21 | "top": 0.1, "bottom": 0.1,
22 | "left": -0.1, "right": 0.1,
23 | }
24 | assert mock_range.bboxes["EPSG:4326"]["bottom"] > 0
25 | assert not lyr.ready
26 | with patch("datacube_ows.index.postgres.api.get_ranges_impl") as get_rng:
27 | get_rng.return_value = mock_range
28 | with pytest.raises(ConfigException) as excinfo:
29 | lyr.make_ready(minimal_dc)
30 | get_rng.assert_called()
31 | assert not lyr.ready
32 | assert "but vertical resolution is " in str(excinfo.value)
33 | assert "a_layer" in str(excinfo.value)
34 | assert "EPSG:4326" in str(excinfo.value)
35 | minimal_global_cfg.layer_index = {}
36 | lyr = parse_ows_layer(minimal_layer_cfg,
37 | global_cfg=minimal_global_cfg)
38 | mock_range.bboxes["EPSG:4326"] = {
39 | "top": 0.1, "bottom": -0.1,
40 | "left": -0.1, "right": -0.1,
41 | }
42 | with patch("datacube_ows.index.postgres.api.get_ranges_impl") as get_rng:
43 | get_rng.return_value = mock_range
44 | with pytest.raises(ConfigException) as excinfo:
45 | lyr.make_ready(minimal_dc)
46 | assert "but horizontal resolution is " in str(excinfo.value)
47 | assert "a_layer" in str(excinfo.value)
48 | assert "EPSG:4326" in str(excinfo.value)
49 |
50 |
51 | def test_wcs_renderer_detection() -> None:
52 | fmt = WCSFormat(
53 | "GeoTIFF",
54 | "image/geotiff",
55 | "tif",
56 | {
57 | "1": "datacube_ows.wcs1_utils.get_tiff",
58 | "2": "datacube_ows.wcs2_utils.get_tiff",
59 | },
60 | False
61 | )
62 | r = fmt.renderer("2.1.0")
63 | assert r == fmt.renderers[2]
64 |
--------------------------------------------------------------------------------
/tests/test_config_toolkit.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.config_toolkit import deepinherit
8 |
9 |
10 | def test_deepinherit_shallow() -> None:
11 | parent = {
12 | "a": 72,
13 | "b": "eagle",
14 | "c": False
15 | }
16 |
17 | child = {
18 | "a": 365
19 | }
20 | child = deepinherit(parent, child)
21 | assert child['a'] == 365
22 | assert child["b"] == "eagle"
23 | assert not child["c"]
24 |
25 |
26 | def test_deepinherit_deep() -> None:
27 | parent = {
28 | "a": 72,
29 | "b": {
30 | "fruit": "grapes",
31 | "spice": "cummin",
32 | "cake": "chocolate",
33 | "y": ["some", "body", "once"],
34 | "z": [44, 42, 53],
35 | "c": {
36 | "foo": "bar",
37 | "wing": "wang"
38 | }
39 | }
40 | }
41 |
42 | child = {
43 | "b": {
44 | "spice": "nutmeg",
45 | "c": {
46 | "wing": "chicken"
47 | },
48 | "y": ["told", "me"],
49 | "z": [11]
50 | }
51 | }
52 | child = deepinherit(parent, child)
53 | assert child["a"] == 72
54 | assert child["b"]["spice"] == "nutmeg"
55 | assert child["b"]["fruit"] == "grapes"
56 | assert child["b"]["c"]["foo"] == "bar"
57 | assert child["b"]["c"]["wing"] == "chicken"
58 | assert child["b"]["z"] == [11]
59 | assert child["b"]["y"] == ["some", "body", "once", "told", "me"]
60 |
61 |
62 | def test_array_inheritance() -> None:
63 | inherit_from = {
64 | "foo": "bar",
65 | "ding": "dong",
66 | "bing": "bang",
67 | "wham": ["a-lam", "a-bing", "bong"],
68 | "king": {
69 | "tide": "oceanography",
70 | "crab": "crustacean",
71 | "Sick-Nasty": "Spades",
72 | }
73 | }
74 | inherit_to = {
75 | "foo": "baz",
76 | "wham": [],
77 | "king": {
78 | "Penguin": "Antarctica"
79 | }
80 | }
81 | inherited = deepinherit(inherit_from, inherit_to)
82 | assert inherited["foo"] == "baz"
83 | assert inherited["wham"] == []
84 | assert inherited["king"]["Penguin"] == "Antarctica"
85 | assert inherited["king"]["tide"] == "oceanography"
86 |
87 | inherit_to["wham"] = ["bim", "bala", "boom"]
88 | inherited = deepinherit(inherit_from, inherit_to)
89 | assert "a-bing" in inherited["wham"]
90 | assert "bim" in inherited["wham"]
91 |
--------------------------------------------------------------------------------
/tests/test_driver_cache.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 |
8 | def test_index_driver_cache() -> None:
9 | from datacube_ows.index.driver import ows_index_drivers
10 | a = 2
11 | a = a + 1
12 | assert "postgres" in ows_index_drivers()
13 | assert "postgis" in ows_index_drivers()
14 | from datacube_ows.index.driver import ows_index_driver_by_name
15 | assert ows_index_driver_by_name("postgres") is not None
16 |
--------------------------------------------------------------------------------
/tests/test_legend_generator.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from decimal import Decimal
8 | from unittest.mock import MagicMock
9 |
10 | import pytest
11 | from datacube_ows.legend_utils import get_image_from_url
12 | from datacube_ows.ogc_exceptions import WMSException
13 | from datacube_ows.styles.base import StyleDefBase
14 | from datacube_ows.styles.ramp import ColorRamp, ColorRampDef
15 |
16 | from tests.test_band_utils import dummy_layer # noqa: F401
17 |
18 |
19 | @pytest.fixture
20 | def prelegend_style():
21 | style = StyleDefBase.__new__(StyleDefBase)
22 | style._unready_attributes = []
23 | return style
24 |
25 |
26 | @pytest.fixture
27 | def prelegend_colorramp_style():
28 | style = ColorRampDef.__new__(ColorRampDef)
29 | style._unready_attributes = []
30 | return style
31 |
32 |
33 | def test_create_legend_for_style(dummy_layer) -> None: # noqa: F811
34 | from datacube_ows.legend_generator import create_legend_for_style
35 | assert create_legend_for_style(dummy_layer, "stylish_steve") is None
36 |
37 |
38 | @pytest.fixture
39 | def image_url() -> str:
40 | return "https://github.com/fluidicon.png"
41 |
42 |
43 | @pytest.fixture
44 | def bad_image_url() -> str:
45 | return "https://github.com/not-a-real-github-image-i-hope-asdfgaskjdfghaskjdh.png"
46 |
47 |
48 | def test_image_from_url(image_url) -> None:
49 | img = get_image_from_url(image_url)
50 | assert img is not None
51 | assert img.mode == "RGBA"
52 |
53 |
54 | def test_image_from_bad_image_url(bad_image_url) -> None:
55 | with pytest.raises(WMSException):
56 | _ = get_image_from_url(bad_image_url)
57 |
58 | def test_parse_colorramp_defaults() -> None:
59 | legend = ColorRampDef.Legend(MagicMock(), {})
60 | _ = ColorRamp(MagicMock(),
61 | {
62 | "range": [0.0, 1.0],
63 | },
64 | legend)
65 | assert legend.begin == Decimal("0.0")
66 | assert legend.end == Decimal("1.0")
67 | assert legend.ticks == [Decimal("0.0"), Decimal("1.0")]
68 | assert legend.units is None
69 | assert legend.tick_labels == ["0.0", "1.0"]
70 | assert legend.width == 4.0
71 | assert legend.height == 1.25
72 | assert legend.strip_location == (0.05, 0.5, 0.9, 0.15)
73 |
74 |
75 | def test_parse_colorramp_legend_beginend() -> None:
76 | legend = ColorRampDef.Legend(MagicMock(), {
77 | "begin": "0.0",
78 | "end": "2.0"
79 | })
80 | assert legend.begin == Decimal("0.0")
81 | assert legend.end == Decimal("2.0")
82 |
--------------------------------------------------------------------------------
/tests/test_mpl_cmaps.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """
8 | Test creation of colour maps from matplotlib
9 | """
10 | from datacube_ows.ows_cfg_example import style_deform
11 | from datacube_ows.styles.ramp import read_mpl_ramp
12 |
13 |
14 | def test_get_mpl_cmap() -> None:
15 | matplotlib_ramp_name = style_deform['mpl_ramp']
16 | assert matplotlib_ramp_name
17 | ows_ramp_dict = read_mpl_ramp(matplotlib_ramp_name)
18 | assert len(ows_ramp_dict) == 11
19 | for cmap in ows_ramp_dict:
20 | assert cmap.color.startswith("#")
21 | assert isinstance(cmap.value, float)
22 |
--------------------------------------------------------------------------------
/tests/test_multidate_handler.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import numpy as np
8 | import pytest
9 | from datacube_ows.config_utils import ConfigException
10 | from datacube_ows.styles.base import StyleDefBase
11 |
12 |
13 | def test_multidate_handler() -> None:
14 | # TODO: Consolidate these into a fixture
15 | class FakeData:
16 | def __init__(self) -> None:
17 | self.nodata = np.nan
18 |
19 | def item(self):
20 | return np.nan
21 |
22 | class FakeDataset:
23 | def __getitem__(self, key):
24 | return FakeData()
25 |
26 | class FakeMdhStyle:
27 | include_in_feature_info = True
28 |
29 | def __init__(self) -> None:
30 | self.product = "test"
31 | self.needed_bands = ["test"]
32 | self.index_function = lambda x: FakeData()
33 | self.stand_alone = True
34 | self.transform_single_date_data = lambda x: x
35 |
36 | fake_cfg = {
37 | "allowed_count_range": [0, 10],
38 | "aggregator_function": "datacube_ows.band_utils.multi_date_delta",
39 | }
40 |
41 | fake_cfg_anim = {
42 | "allowed_count_range": [2, 10],
43 | "aggregator_function": "datacube_ows.band_utils.multi_date_pass",
44 | "animate": True,
45 | }
46 |
47 | fake_cfg_equal = {
48 | "allowed_count_range": [1, 1],
49 | "aggregator_function": "datacube_ows.band_utils.multi_date_delta",
50 | }
51 |
52 | mdh = StyleDefBase.MultiDateHandler(FakeMdhStyle(), fake_cfg)
53 | assert mdh is not None
54 | with pytest.raises(NotImplementedError):
55 | mdh.legend_cfg.render(None)
56 | assert isinstance(mdh.range_str(), str)
57 | assert mdh.applies_to(2)
58 | assert not mdh.applies_to(11)
59 | assert not mdh.animate
60 |
61 | mdh_anim = StyleDefBase.MultiDateHandler(FakeMdhStyle(), fake_cfg_anim)
62 | assert mdh_anim.animate
63 |
64 | mdh_equal = StyleDefBase.MultiDateHandler(FakeMdhStyle(), fake_cfg_equal)
65 | assert isinstance(mdh_equal.range_str(), str)
66 |
67 | with pytest.raises(ConfigException) as excinfo:
68 | _ = StyleDefBase.MultiDateHandler(FakeMdhStyle(), {})
69 |
70 | assert "must have an allowed_count_range" in str(excinfo.value)
71 |
72 | with pytest.raises(ConfigException) as excinfo:
73 | _ = StyleDefBase.MultiDateHandler(
74 | FakeMdhStyle(), {"allowed_count_range": [0, 5, 10], }
75 | )
76 |
77 | assert "allowed_count_range must have 2" in str(excinfo.value)
78 |
79 | with pytest.raises(ConfigException) as excinfo:
80 | _ = StyleDefBase.MultiDateHandler(
81 | FakeMdhStyle(), {"allowed_count_range": [10, 5], }
82 | )
83 |
84 | assert "minimum must be less than equal to maximum" in str(excinfo.value)
85 |
86 | with pytest.raises(ConfigException) as excinfo:
87 | _ = StyleDefBase.MultiDateHandler(
88 | FakeMdhStyle(), {"allowed_count_range": [0, 10], }
89 | )
90 |
91 | assert "Aggregator function is required" in str(excinfo.value)
92 |
93 | assert mdh.transform_data(None) is None
94 |
--------------------------------------------------------------------------------
/tests/test_mv_selopts.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.index.postgres.mv_index import MVSelectOpts
8 |
9 |
10 | def test_all() -> None:
11 | assert MVSelectOpts.ALL.sel("Ni!!") == ["Ni!!"]
12 |
13 |
14 | class MockSTV:
15 | def __init__(self, id_) -> None:
16 | self.id = id_
17 | self.c = self
18 |
19 |
20 | def test_ids_datasets() -> None:
21 | class MockSTV:
22 | def __init__(self, id_) -> None:
23 | self.id = id_
24 | self.c = self
25 | stv = MockSTV(42)
26 | assert MVSelectOpts.IDS.sel(stv) == [42]
27 | assert MVSelectOpts.DATASETS.sel(stv) == [42]
28 |
29 |
30 | def test_extent() -> None:
31 | sel = MVSelectOpts.EXTENT.sel(None)
32 | assert len(sel) == 1
33 | assert str(sel[0]) == "ST_AsGeoJSON(ST_Union(spatial_extent))"
34 |
35 |
36 | def test_count() -> None:
37 | from sqlalchemy import text
38 | stv = MockSTV(id_=text("foo"))
39 | sel = MVSelectOpts.COUNT.sel(stv)
40 | assert len(sel) == 1
41 | assert str(sel[0]) == "count(foo)"
42 |
--------------------------------------------------------------------------------
/tests/test_no_db_routes.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """Run with no DB to simulate connection failure
8 | """
9 | import os
10 | import sys
11 |
12 | import pytest
13 |
14 | src_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
15 | if src_dir not in sys.path:
16 | sys.path.append(src_dir)
17 |
18 |
19 | def reset_global_config() -> None:
20 | from datacube_ows.ows_configuration import OWSConfig
21 | OWSConfig._instance = None
22 |
23 |
24 | @pytest.fixture
25 | def no_db(monkeypatch):
26 | monkeypatch.setenv("DATACUBE_OWS_CFG", "tests.cfg.minimal_cfg.ows_cfg")
27 | reset_global_config()
28 | yield
29 | reset_global_config()
30 |
31 |
32 | def test_db_connect_fail(no_db, flask_client) -> None:
33 | """Start with a database connection"""
34 | rv = flask_client.get('/ping')
35 | assert rv.status_code == 500
36 |
37 |
38 | def test_wcs_fail(no_db, flask_client) -> None:
39 | """WCS endpoint fails"""
40 | rv = flask_client.get('/wcs')
41 | assert rv.status_code == 400
42 |
43 |
44 | def test_wms_fail(no_db, flask_client) -> None:
45 | """WMS endpoint fails"""
46 | rv = flask_client.get('/wms')
47 | assert rv.status_code == 400
48 |
49 |
50 | def test_wmts_fail(no_db, flask_client) -> None:
51 | """WMTS endpoint fails"""
52 | rv = flask_client.get('/wmts')
53 | assert rv.status_code == 400
54 |
55 |
56 | def test_legend_fail(no_db, flask_client) -> None:
57 | """Fail on legend"""
58 | rv = flask_client.get("/legend/layer/style/legend.png")
59 | assert rv.status_code == 404
60 |
61 |
62 | def test_index_fail(no_db, flask_client) -> None:
63 | """Base index endpoint fails"""
64 | # Should actually be 200 TODO
65 | rv = flask_client.get('/')
66 | assert rv.status_code == 500
67 |
--------------------------------------------------------------------------------
/tests/test_protocol_versions.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import datacube_ows.protocol_versions
8 | import pytest
9 |
10 |
11 | class DummyException1(Exception):
12 | pass
13 |
14 | class DummyException2(Exception):
15 | pass
16 |
17 | def fake_router(*args, **kwargs):
18 | return None
19 |
20 | @pytest.fixture
21 | def supported_service():
22 | return datacube_ows.protocol_versions.SupportedSvc([
23 | datacube_ows.protocol_versions.SupportedSvcVersion("wxs", "1.2.7", fake_router, DummyException1),
24 | datacube_ows.protocol_versions.SupportedSvcVersion("wxs", "1.13.0", fake_router, DummyException1),
25 | datacube_ows.protocol_versions.SupportedSvcVersion("wxs", "2.0.0", fake_router, DummyException1),
26 | ], DummyException2)
27 |
28 | def test_default_exception(supported_service) -> None:
29 | assert supported_service.default_exception_class == DummyException2
30 |
31 |
32 | def test_version_negotiation(supported_service) -> None:
33 | assert supported_service.negotiated_version("1.0").version == "1.2.7"
34 | assert supported_service.negotiated_version("1.2").version == "1.2.7"
35 | assert supported_service.negotiated_version("1.0.0").version == "1.2.7"
36 | assert supported_service.negotiated_version("1.2.1").version == "1.2.7"
37 | assert supported_service.negotiated_version("1.2.7").version == "1.2.7"
38 | assert supported_service.negotiated_version("1.2.8").version == "1.2.7"
39 | assert supported_service.negotiated_version("1.13.0").version == "1.13.0"
40 | assert supported_service.negotiated_version("1.13.100").version == "1.13.0"
41 | assert supported_service.negotiated_version("2.0").version == "2.0.0"
42 | assert supported_service.negotiated_version("2.7.22").version == "2.0.0"
43 |
44 |
45 | def test_version_cleaner(supported_service) -> None:
46 | assert supported_service._clean_version_parts(["0", "1", "2"]) == [0, 1, 2]
47 | assert supported_service._clean_version_parts(["0", "1", "2/spam"]) == [0, 1, 2]
48 | assert supported_service._clean_version_parts(["0", "1spam", "2"]) == [0, 1]
49 | assert supported_service._clean_version_parts(["0?bacon", "1/eggs", "2/spam"]) == [0]
50 | assert supported_service._clean_version_parts(["spam", "spam", "spam"]) == []
51 |
--------------------------------------------------------------------------------
/tests/test_pyproj.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from pyproj import CRS
8 |
9 | SUPPORTED_CRS = [
10 | 'EPSG:3857', # Web Mercator
11 | 'EPSG:4326', # WGS-84
12 | 'EPSG:3577', # GDA-94
13 | 'EPSG:3111', # VicGrid94
14 | 'EPSG:32648', # WGS 84 / Cambodiacube
15 | 'ESRI:102022', # Africa
16 | # 'EPSG:102022', # Depreciated Africa
17 | 'EPSG:6933', # Africa
18 | ]
19 |
20 |
21 | def test_pyproj_crs() -> None:
22 | for crs_string in SUPPORTED_CRS:
23 | try:
24 | crs = CRS(crs_string)
25 | assert crs is not None
26 | except Exception:
27 | assert True is False
28 |
--------------------------------------------------------------------------------
/tests/test_qprof.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.query_profiler import QueryProfiler
8 |
9 |
10 | def test_qpf_inactive() -> None:
11 | qp = QueryProfiler(False)
12 | qp.start_event("foo")
13 | qp.end_event("foo")
14 | qp["foo"] = "splunge"
15 | assert qp.profile() == {}
16 |
17 |
18 | def test_qpf_active() -> None:
19 | qp = QueryProfiler(True)
20 | prof = qp.profile()
21 | assert prof["info"] == {}
22 | assert prof["profile"]["query"] is not None
23 |
24 |
25 | def test_qpf_events() -> None:
26 | qp = QueryProfiler(True)
27 | qp.start_event("foo")
28 | qp.end_event("foo")
29 | prof = qp.profile()
30 | assert prof["profile"]["foo"] is not None
31 |
32 |
33 | def test_qpf_info() -> None:
34 | qp = QueryProfiler(True)
35 | qp["foo"] = "splunge"
36 | prof = qp.profile()
37 | assert prof["info"]["foo"] == "splunge"
38 |
--------------------------------------------------------------------------------
/tests/test_resource_limits.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import datacube_ows.resource_limits
8 | import pytest
9 | from datacube_ows.ogc_utils import create_geobox
10 | from odc.geo import CRS
11 |
12 |
13 | def test_request_scale() -> None:
14 | band = {'dtype': 'float64'}
15 | stdtile = create_geobox(minx=-20037508.342789, maxx=20037508.342789,
16 | miny=-20037508.342789, maxy=20037508.342789,
17 | crs=CRS("EPSG:3857"),
18 | width=256, height=256)
19 | bigtile = create_geobox(minx=-20037508.342789, maxx=20037508.342789,
20 | miny=-20037508.342789, maxy=20037508.342789,
21 | crs=CRS("EPSG:3857"),
22 | width=512, height=512)
23 | rs1 = datacube_ows.resource_limits.RequestScale(CRS("EPSG:3857"), (10.0, 10.0),
24 | bigtile, 2,
25 | request_bands=[band])
26 | assert pytest.approx(rs1.standard_scale / rs1.standard_scale, 1e-8) == 1.0
27 | assert pytest.approx(rs1 / rs1.standard_scale, 1e-8) == 200 / 3
28 | assert pytest.approx(rs1.load_factor, 1e-8) == 200 / 3
29 | assert pytest.approx(rs1.standard_scale.zoom_lvl_offset, 1e-64) == 0.0
30 | rs2 = datacube_ows.resource_limits.RequestScale(CRS("EPSG:3857"), (25.0, 25.0),
31 | stdtile, 4,
32 | total_band_size=6)
33 | assert pytest.approx(rs2.zoom_lvl_offset, 1e-8) == 1.0
34 | rs3 = datacube_ows.resource_limits.RequestScale(CRS("EPSG:3857"), (25.0, 25.0),
35 | stdtile, 64,
36 | total_band_size=6)
37 | assert pytest.approx(rs3.zoom_lvl_offset, 1e-8) == 3.0
38 | assert pytest.approx(rs3.base_zoom_level, 0.1) == 0.0
39 | assert pytest.approx(rs3.load_adjusted_zoom_level, 0.1) == -3.0
40 |
41 |
42 | def test_degree_to_metres() -> None:
43 | xres, yres = datacube_ows.resource_limits.RequestScale._metre_resolution(
44 | None,
45 | CRS("EPSG:4326"),
46 | (0.005, 0.005)
47 | )
48 | assert xres > 1.0
49 | assert yres > 1.0
50 |
--------------------------------------------------------------------------------
/tests/test_time_res_method.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datetime import datetime
8 |
9 | import pytest
10 | import pytz
11 | from datacube_ows.ows_configuration import TimeRes
12 |
13 |
14 | @pytest.fixture
15 | def simple_geobox():
16 | from affine import Affine
17 | from odc.geo.geobox import GeoBox
18 |
19 | aff = Affine.translation(145.0, -35.0) * Affine.scale(
20 | 1.0 / 256, 2.0 / 256
21 | )
22 | return GeoBox((256, 256), aff, 'EPSG:4326')
23 |
24 |
25 | def test_timeres_enum(simple_geobox) -> None:
26 | # Make sure no values trigger exceptions.
27 | for res in TimeRes:
28 | res.is_subday()
29 | res.is_solar()
30 | res.is_summary()
31 | res.search_times(datetime(2010, 1, 15, 13, 23, 55), geobox=simple_geobox)
32 | res.dataset_groupby()
33 |
34 |
35 | def test_subday() -> None:
36 | res = TimeRes.SUBDAY
37 | assert res.is_subday()
38 | assert not res.is_solar()
39 | assert not res.is_summary()
40 |
41 |
42 | def test_solar(simple_geobox) -> None:
43 | res = TimeRes.SOLAR
44 | assert not res.is_subday()
45 | assert res.is_solar()
46 | assert not res.is_summary()
47 |
48 | with pytest.raises(ValueError) as e:
49 | res.search_times(datetime(2020, 6, 7, 20, 20, 0, tzinfo=pytz.utc))
50 | assert "Solar time resolution search_times requires a geobox" in str(e.value)
51 |
52 | assert res.search_times(
53 | datetime(2020, 6, 7, 20, 20, 0, tzinfo=pytz.utc),
54 | simple_geobox,
55 | ) == (
56 | datetime(2020, 6, 6, 13, 55, tzinfo=pytz.utc),
57 | datetime(2020, 6, 7, 13, 54, 59, tzinfo=pytz.utc),
58 | )
59 |
60 |
61 | def test_summary() -> None:
62 | res = TimeRes.SUMMARY
63 | assert not res.is_subday()
64 | assert not res.is_solar()
65 | assert res.is_summary()
66 | assert res.search_times(
67 | datetime(2020, 6, 7, 0, 0, 0, tzinfo=pytz.utc)
68 | ) == datetime(2020, 6, 7, 0, 0, 0, tzinfo=pytz.utc)
69 |
70 |
71 | def test_legacy_aliases() -> None:
72 | assert TimeRes.parse("raw") == TimeRes.SOLAR
73 | assert TimeRes.parse("day") == TimeRes.SUMMARY
74 | assert TimeRes.parse("month") == TimeRes.SUMMARY
75 | assert TimeRes.parse("year") == TimeRes.SUMMARY
76 |
--------------------------------------------------------------------------------
/tests/test_update_ranges.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | """Test update ranges on DB using Click testing
8 | https://click.palletsprojects.com/en/7.x/testing/
9 | """
10 | import pytest
11 | from click.testing import CliRunner
12 | from datacube_ows.index.sql import run_sql
13 | from datacube_ows.update_ranges_impl import main
14 |
15 |
16 | @pytest.fixture
17 | def runner():
18 | return CliRunner()
19 |
20 |
21 | @pytest.fixture
22 | def role_name() -> str:
23 | return "role1"
24 |
25 |
26 | @pytest.fixture
27 | def layer_name() -> str:
28 | return "a_layer"
29 |
30 |
31 | def test_update_ranges_misuse_cases(runner, role_name: str, layer_name: str) -> None:
32 | result = runner.invoke(main, ["--schema", layer_name])
33 | assert "Sorry" in result.output
34 | assert result.exit_code == 1
35 |
36 | result = runner.invoke(main, ["--cleanup", layer_name])
37 | assert "Sorry" in result.output
38 | assert result.exit_code == 1
39 |
40 | result = runner.invoke(main, ["--read-role", role_name, layer_name])
41 | assert "Sorry" in result.output
42 | assert result.exit_code == 1
43 |
44 | result = runner.invoke(main, ["--write-role", role_name, layer_name])
45 | assert "Sorry" in result.output
46 | assert result.exit_code == 1
47 |
48 | result = runner.invoke(main, ["--views", "--cleanup"])
49 | assert "Sorry" in result.output
50 | assert result.exit_code == 1
51 |
52 | result = runner.invoke(main, ["--views", layer_name])
53 | assert "Sorry" in result.output
54 | assert result.exit_code == 1
55 |
56 | result = runner.invoke(main, ["--views", "--schema"])
57 | assert "Sorry" in result.output
58 | assert result.exit_code == 1
59 |
60 | result = runner.invoke(main, ["--views", "--read-role", role_name])
61 | assert "Sorry" in result.output
62 | assert result.exit_code == 1
63 |
64 | result = runner.invoke(main, ["--views", "--write-role", role_name])
65 | assert "Sorry" in result.output
66 | assert result.exit_code == 1
67 |
68 |
69 | def test_run_sql(minimal_dc) -> None:
70 | assert not run_sql(minimal_dc, "postgres", "no_such_directory")
71 |
72 | assert not run_sql(minimal_dc, "postgres", "templates")
73 |
74 | assert not run_sql(minimal_dc, "postgres", "ows_schema/grants/read_only")
75 |
--------------------------------------------------------------------------------
/tests/test_wcs2_utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from unittest.mock import MagicMock
8 |
9 | import pytest
10 | from datacube_ows.ogc_exceptions import WCS2Exception
11 | from datacube_ows.wcs2_utils import uniform_crs
12 |
13 |
14 | @pytest.fixture
15 | def minimal_cfg():
16 | cfg = MagicMock()
17 | cfg.published_CRSs = {
18 | "dummy": {},
19 | }
20 | return cfg
21 |
22 |
23 | def test_uniform_crs_url(minimal_cfg) -> None:
24 | crs = uniform_crs(minimal_cfg, "http://www.opengis.net/def/crs/EPSG/666")
25 | assert crs == "EPSG:666"
26 |
27 |
28 | def test_uniform_crs_urn(minimal_cfg) -> None:
29 | crs = uniform_crs(minimal_cfg, "urn:ogc:def:crs:EPSG:666")
30 | assert crs == "EPSG:666"
31 |
32 |
33 | def test_uniform_crs_epsg(minimal_cfg) -> None:
34 | crs = uniform_crs(minimal_cfg, "EPSG:666")
35 | assert crs == "EPSG:666"
36 |
37 |
38 | def test_uniform_crs_published(minimal_cfg) -> None:
39 | crs = uniform_crs(minimal_cfg, "dummy")
40 | assert crs == "dummy"
41 |
42 |
43 | def test_uniform_crs_published_with_exception(minimal_cfg) -> None:
44 | with pytest.raises(WCS2Exception) as e:
45 | _ = uniform_crs(minimal_cfg, "spam")
46 | assert "spam" in str(e.value)
47 | assert "Not a CRS" in str(e.value)
48 |
--------------------------------------------------------------------------------
/tests/translations/de/LC_MESSAGES/ows_cfg.mo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/tests/translations/de/LC_MESSAGES/ows_cfg.mo
--------------------------------------------------------------------------------
/tests/translations/en/LC_MESSAGES/ows_cfg.mo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opendatacube/datacube-ows/fad96b5e99f0b26154c0f5432c2540f76efdd46d/tests/translations/en/LC_MESSAGES/ows_cfg.mo
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | import datetime
8 |
9 | import numpy as np
10 | import xarray as xr
11 |
12 | MOTO_PORT = "5555"
13 | MOTO_S3_ENDPOINT_URI = "http://127.0.0.1:" + MOTO_PORT
14 |
15 | coords = [
16 | ('x', [
17 | 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,
18 | 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0,
19 | ]),
20 | ('y', [-5.0, -4.0, -3.0, -2.0, -1.0, 0.0, 1.0, 2.0, 3.0, 4.0]),
21 | ('time', [np.datetime64(datetime.date.today(), "ns")])
22 | ]
23 |
24 |
25 | def a_function(a, b: int = 2, c: int = 3, **kwargs) -> list:
26 | return [f"a{a} b{b} c{c}", kwargs]
27 |
28 |
29 | def dummy_da(val, name: str, coords, attrs=None, dtype=np.float64) -> xr.DataArray:
30 | if attrs is None:
31 | attrs = {}
32 | dims = [n for n, a in coords]
33 | data = np.ndarray([len(a) for n, a in coords], dtype=dtype)
34 | coords = dict(coords)
35 | data.fill(val)
36 | output = xr.DataArray(
37 | data,
38 | coords=coords,
39 | dims=dims,
40 | attrs=attrs,
41 | name=name,
42 | )
43 | return output
44 |
45 |
46 | def dim1_da(name: str, vals: list, coords: list, with_time: bool = True, attrs=None) -> xr.DataArray:
47 | if len(vals) != len(coords):
48 | raise Exception("vals and coords must match len")
49 | if attrs is None:
50 | attrs = {}
51 | dims = ["dim"]
52 | shape = [len(coords)]
53 | coords = {
54 | 'dim': coords,
55 | }
56 | if with_time:
57 | dims.append("time")
58 | coords["time"] = [np.datetime64(datetime.date.today(), "ns")]
59 | shape.append(1)
60 | buff_arr = np.array(vals)
61 | data = np.ndarray(shape, buffer=buff_arr, dtype=buff_arr.dtype)
62 | output = xr.DataArray(
63 | data,
64 | coords=coords,
65 | dims=dims,
66 | attrs=attrs,
67 | name=name,
68 | )
69 | return output
70 |
71 | def dim1_da_time(name: str, vals, dates, coords, attrs=None) -> xr.DataArray:
72 | if len(coords) != len(vals):
73 | raise Exception("vals and coords must match lengths")
74 | for v in vals:
75 | if len(v) != len(dates):
76 | raise Exception("dates and coords must match lengths")
77 | dims = ["dim", "time"]
78 | shape = [len(coords), len(dates)]
79 | coords = {
80 | "dim": coords,
81 | "time": [np.datetime64(d, "ns") for d in dates],
82 | }
83 | buff_arr = np.array(vals)
84 | data = np.ndarray(shape, buffer=buff_arr, dtype=buff_arr.dtype)
85 | output = xr.DataArray(
86 | data,
87 | coords=coords,
88 | dims=dims,
89 | attrs=attrs,
90 | name=name,
91 | )
92 | return output
93 |
--------------------------------------------------------------------------------
/update_ranges.py:
--------------------------------------------------------------------------------
1 | # This file is part of datacube-ows, part of the Open Data Cube project.
2 | # See https://opendatacube.org for more information.
3 | #
4 | # Copyright (c) 2017-2024 OWS Contributors
5 | # SPDX-License-Identifier: Apache-2.0
6 |
7 | from datacube_ows.update_ranges_impl import main
8 |
9 | if __name__ == '__main__':
10 | main()
11 |
--------------------------------------------------------------------------------
/wms_xsds/capabilities_extensions.xsd:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Declare a supported non-standard extension.
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | Identify the supported extension
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | URL to documentation of the extension. Preferably readable to
37 | both developers writing client software to consume the extension
38 | AND end users making use of the extension through a supporting client.
39 |
40 | E.g. For the user_band_math extension, this documentation should provide:
41 |
42 | 1) A high-level end-user-readable description of why user_band_math is cool and useful.
43 | 2) The syntax for user_band_math equations - targetted at both client implementers AND end-users.)
44 | 2) How to embed user_band_math equations in GetMap requests - mostly for client implementors, but
45 | also readable to advanced end-users ("power users").
46 | 3) The need for an "available_bands" ExtensionProperty (see below).
47 |
48 |
49 |
50 |
51 |
52 |
53 | Layers supporting this extension.
54 |
55 | Note that one Extension will require separate SupportedExtension elements for
56 | different layers where: The layers have different ExtensionProperties (see below).
57 |
58 |
59 |
60 |
61 |
62 |
63 | Extension-specific properties.
64 |
65 | E.g. for the user_band_math, the available_bands
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/wms_xsds/capabilities_extensions_local.xsd:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Declare a supported non-standard extension.
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | Identify the supported extension
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | URL to documentation of the extension. Preferably readable to
37 | both developers writing client software to consume the extension
38 | AND end users making use of the extension through a supporting client.
39 |
40 | E.g. For the user_band_math extension, this documentation should provide:
41 |
42 | 1) A high-level end-user-readable description of why user_band_math is cool and useful.
43 | 2) The syntax for user_band_math equations - targetted at both client implementers AND end-users.)
44 | 2) How to embed user_band_math equations in GetMap requests - mostly for client implementors, but
45 | also readable to advanced end-users ("power users").
46 | 3) The need for an "available_bands" ExtensionProperty (see below).
47 |
48 |
49 |
50 |
51 |
52 |
53 | Layers supporting this extension.
54 |
55 | Note that one Extension will require separate SupportedExtension elements for
56 | different layers where: The layers have different ExtensionProperties (see below).
57 |
58 |
59 |
60 |
61 |
62 |
63 | Extension-specific properties.
64 |
65 | E.g. for the user_band_math, the available_bands
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/wms_xsds/exceptions_1_3_0.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------