├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── pull_request_template.md └── workflows │ ├── cicd.yml │ ├── pr.yml │ ├── publish-charts-dev.yml │ ├── publish-charts.yml │ ├── publish-func-package-dev.yml │ └── publish-func-package.yml ├── .gitignore ├── .isort.cfg ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── SECURITY.md ├── SUPPORT.md ├── auxiliary └── az-cli-proxy │ ├── Dockerfile │ └── main.py ├── azure-pipelines.yml ├── deployment ├── Dockerfile ├── README.md ├── bin │ ├── azlogin │ ├── deploy │ ├── jinja │ ├── lib │ ├── nginx-values.yaml │ ├── publish_func │ └── update_func_index ├── docker-compose.yml ├── helm │ ├── deploy-values.template.yaml │ ├── ingress-nginx-4.8.3.tgz │ ├── pc-apis-ingress │ │ ├── Chart.yaml │ │ ├── templates │ │ │ ├── NOTES.txt │ │ │ ├── _helpers.tpl │ │ │ ├── cluster_issuer.yaml │ │ │ ├── ingress.yaml │ │ │ ├── nginx-configmap.yaml │ │ │ └── secret-provider.yaml │ │ └── values.yaml │ └── published │ │ ├── planetary-computer-stac │ │ ├── Chart.yaml │ │ ├── templates │ │ │ ├── NOTES.txt │ │ │ ├── _helpers.tpl │ │ │ ├── deployment.yaml │ │ │ ├── service.yaml │ │ │ └── serviceaccount.yaml │ │ └── values.yaml │ │ └── planetary-computer-tiler │ │ ├── Chart.yaml │ │ ├── templates │ │ ├── NOTES.txt │ │ ├── _helpers.tpl │ │ ├── deployment.yaml │ │ ├── service.yaml │ │ └── serviceaccount.yaml │ │ └── values.yaml └── terraform │ ├── dev │ └── main.tf │ ├── resources │ ├── acr.tf │ ├── ai.tf │ ├── aks.tf │ ├── azm.tf │ ├── functions.tf │ ├── ip.tf │ ├── keyvault.tf │ ├── maps.tf │ ├── output.tf │ ├── providers.tf │ ├── redis.tf │ ├── rg.tf │ ├── storage_account.tf │ ├── variables.tf │ └── vnet.tf │ └── staging │ └── main.tf ├── docker-compose.dev.yml ├── docker-compose.yml ├── docs ├── 01-deployment.md └── collection-config.md ├── mypy.ini ├── nginx ├── Dockerfile └── etc │ └── nginx │ ├── conf.d │ └── default.conf │ └── nginx.conf ├── pc-funcs.dev.env ├── pc-stac.dev.env ├── pc-tiler.dev.env ├── pccommon ├── pccommon │ ├── __init__.py │ ├── backoff.py │ ├── blob.py │ ├── cdn.py │ ├── cli.py │ ├── config │ │ ├── __init__.py │ │ ├── collections.py │ │ ├── containers.py │ │ └── core.py │ ├── constants.py │ ├── credential.py │ ├── logging.py │ ├── middleware.py │ ├── openapi.py │ ├── py.typed │ ├── redis.py │ ├── tables.py │ ├── tracing.py │ ├── utils.py │ └── version.py ├── pyproject.toml ├── requirements.txt └── tests │ ├── __init__.py │ ├── config │ ├── __init__.py │ ├── test_mosaic_info.py │ ├── test_render_config.py │ └── test_table_settings.py │ ├── data-files │ ├── collection_config.json │ └── container_config.json │ ├── data │ ├── __init__.py │ └── cql.py │ ├── test_timeouts.py │ ├── test_tracing.py │ └── test_utils.py ├── pcfuncs ├── .funcignore ├── Dockerfile ├── animation │ ├── __init__.py │ ├── animation.py │ ├── constants.py │ ├── function.json │ ├── models.py │ ├── settings.py │ └── utils.py ├── funclib │ ├── __init__.py │ ├── errors.py │ ├── models.py │ ├── raster.py │ ├── resources.py │ ├── resources │ │ ├── DejaVuSans.ttf │ │ ├── ms-logo-gray-sized.jpg │ │ └── ms-logo-sized.jpg │ ├── settings.py │ ├── stamps │ │ ├── branding.py │ │ ├── progress_bar.py │ │ └── stamp.py │ └── tiles.py ├── host.json ├── image │ ├── __init__.py │ ├── function.json │ ├── models.py │ ├── settings.py │ └── utils.py ├── ipban │ ├── __init__.py │ ├── config.py │ ├── function.json │ └── models.py ├── pytest.ini ├── requirements-deploy.txt ├── requirements.txt ├── test.geojson └── tests │ ├── __init__.py │ ├── conftest.py │ ├── data-files │ └── s2.png │ ├── funclib │ ├── __init__.py │ ├── test_models.py │ ├── test_raster.py │ └── test_tiles.py │ ├── image │ └── test_models.py │ └── ipban │ ├── __init__.py │ └── test_ipban.py ├── pcstac ├── Dockerfile ├── Dockerfile.dev ├── pcstac │ ├── __init__.py │ ├── api.py │ ├── client.py │ ├── config.py │ ├── contants.py │ ├── errors.py │ ├── filter.py │ ├── main.py │ ├── search.py │ ├── tiles.py │ └── version.py ├── pyproject.toml ├── requirements-server.txt └── tests │ ├── __init__.py │ ├── api │ ├── __init__.py │ └── test_api.py │ ├── conftest.py │ ├── data-files │ └── naip │ │ ├── collection.json │ │ └── items │ │ ├── al_m_3008501_ne_16_060_20191109_20200114.json │ │ ├── al_m_3008501_nw_16_060_20191109_20200114.json │ │ ├── al_m_3008502_ne_16_060_20191109_20200114.json │ │ ├── al_m_3008502_nw_16_060_20191109_20200114.json │ │ ├── al_m_3008503_ne_16_060_20191118_20200114.json │ │ ├── al_m_3008503_nw_16_060_20191118_20200114.json │ │ ├── al_m_3008504_ne_16_060_20191118_20200114.json │ │ ├── al_m_3008504_nw_16_060_20191118_20200114.json │ │ ├── al_m_3008505_ne_16_060_20191118_20200114.json │ │ ├── al_m_3008505_nw_16_060_20191118_20200114.json │ │ ├── al_m_3008506_ne_16_060_20191118_20200114.json │ │ └── al_m_3008506_nw_16_060_20191118_20200114.json │ ├── loadtestdata.py │ ├── resources │ ├── __init__.py │ ├── test_collection.py │ ├── test_conformance.py │ ├── test_item.py │ ├── test_mgmt.py │ └── test_queryables.py │ ├── test_headers.py │ ├── test_rate_limit.py │ └── util │ ├── __init__.py │ ├── ingest.py │ └── test_data.py ├── pctiler ├── Dockerfile ├── Dockerfile.dev ├── MANIFEST.in ├── pctiler │ ├── __init__.py │ ├── collections.py │ ├── colormaps │ │ ├── __init__.py │ │ ├── alos_palsar_mosaic.py │ │ ├── chloris.py │ │ ├── dependencies.py │ │ ├── io_bii.py │ │ ├── jrc.py │ │ ├── lidarusgs.py │ │ ├── lulc.py │ │ ├── modis.py │ │ ├── mtbs.py │ │ ├── noaa_c_cap.py │ │ ├── qpe.py │ │ └── viirs.py │ ├── config.py │ ├── endpoints │ │ ├── __init__.py │ │ ├── configuration.py │ │ ├── dependencies.py │ │ ├── health.py │ │ ├── item.py │ │ ├── legend.py │ │ ├── pg_mosaic.py │ │ ├── templates │ │ │ └── item_preview.html │ │ └── vector_tiles.py │ ├── errors.py │ ├── main.py │ ├── middleware.py │ ├── models.py │ ├── reader.py │ ├── reader_vector_tile.py │ └── version.py ├── pyproject.toml ├── requirements-dev.txt ├── requirements-server.txt └── tests │ ├── __init__.py │ ├── conftest.py │ ├── data-files │ └── naip │ │ └── collection.json │ ├── endpoints │ ├── __init__.py │ ├── test_colormaps.py │ ├── test_config.py │ ├── test_legends.py │ ├── test_pg_item.py │ └── test_pg_mosaic.py │ ├── test_asset_read.py │ ├── test_openapi.py │ └── test_routes.py ├── pgstac ├── Dockerfile └── REAMDE.md ├── pytest.ini ├── requirements-dev.txt └── scripts ├── bin ├── format-common ├── format-funcs ├── format-scripts ├── format-stac ├── format-tiler ├── setup_azurite.py ├── test-common ├── test-funcs ├── test-stac └── test-tiler ├── ciauthenticate ├── cibuild ├── cideploy ├── cipublish ├── cipublish-func ├── console ├── env ├── format ├── format-local ├── generate-requirements ├── install ├── migrate ├── server ├── setup ├── test ├── update └── validate /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | extend-ignore = E203, W503 4 | exclude = 5 | .git 6 | __pycache__ 7 | .venv -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report an issue with the Microsoft Planetary Computer APIs 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | __Note__: This repository contains a reference implementation; as such, bugs in deployment or usage of this project in other environments besides the Microsoft Planetary Computer have no guarantees of support. See SUPPORT.md for more information. 11 | 12 | **Describe the bug** 13 | A clear and concise description of what the bug is. 14 | 15 | **To reproduce** 16 | Steps to reproduce the behavior: 17 | 18 | > Ex. 19 | > 20 | > 1. Install stactools 21 | > 2. Run `scripts/test` 22 | > 3. See error 23 | 24 | **Expected behavior** 25 | A clear and concise description of what you expected to happen. 26 | 27 | **Screenshots and shell session dumps** 28 | If applicable, add session dumps and/or screenshots to help explain your problem. 29 | 30 | > ex. `scripts/lint >> lint_errors.txt` 31 | 32 | **Additional context** 33 | Add any other context about the problem here. 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for the Microsoft Planetary Computer 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. Ex. I would like to use stac to do [...] 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. 4 | 5 | Fixes # (issue) 6 | 7 | ## Type of change 8 | 9 | Please delete options that are not relevant. 10 | 11 | - [ ] Bug fix (non-breaking change which fixes an issue) 12 | - [ ] New feature (non-breaking change which adds functionality) 13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 14 | - [ ] This change requires a documentation update 15 | 16 | ## How Has This Been Tested? 17 | 18 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration 19 | 20 | ## Checklist: 21 | 22 | Please delete options that are not relevant. 23 | 24 | - [ ] I have performed a self-review 25 | - [ ] Changelog has been updated 26 | - [ ] Documentation has been updated 27 | - [ ] Unit tests pass locally (./scripts/test) 28 | - [ ] Code is linted and styled (./scripts/format) -------------------------------------------------------------------------------- /.github/workflows/pr.yml: -------------------------------------------------------------------------------- 1 | name: Planetary Computer APIs PR CI 2 | 3 | on: 4 | pull_request: 5 | branches: [main] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | 14 | - name: Set Azurite Default Key 15 | run: echo "AZURITE_ACCOUNT_KEY=$(curl https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite | grep "Account key:" | cut -b 24-111)" >> $GITHUB_ENV 16 | 17 | - name: Verify Azurite Key was retrieved correctly 18 | if: "!startsWith(env.AZURITE_ACCOUNT_KEY, 'Eby8')" 19 | run: echo Failed to find key at learn.microsoft.com && exit 1 20 | 21 | - name: Run cibuild 22 | run: ./scripts/cibuild 23 | 24 | validate: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@v3 28 | - uses: actions/setup-python@v4 29 | with: 30 | python-version: "3.10" # stac-api-validator requires >= 3.10 31 | cache: "pip" 32 | 33 | - name: Set Azurite Default Key 34 | run: echo "AZURITE_ACCOUNT_KEY=$(curl https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite | grep "Account key:" | cut -b 24-111)" >> $GITHUB_ENV 35 | 36 | - name: API Validator 37 | run: ./scripts/validate 38 | -------------------------------------------------------------------------------- /.github/workflows/publish-charts-dev.yml: -------------------------------------------------------------------------------- 1 | name: Publish charts (dev) 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | workflow_dispatch: 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-20.04 15 | permissions: 16 | contents: write 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | with: 21 | fetch-depth: 0 # Required due to the way Git works, without it this action won't be able to find any or the correct tags 22 | 23 | - name: "Get Previous tag" 24 | id: previoustag 25 | uses: "WyriHaximus/github-action-get-previous-tag@v1.2.2" 26 | with: 27 | fallback: 2022.2.0 28 | 29 | - name: "Get next minor version" 30 | id: semvers 31 | uses: "WyriHaximus/github-action-next-semvers@v1" 32 | with: 33 | version: ${{ steps.previoustag.outputs.tag }} 34 | 35 | - name: Publish Helm charts 36 | uses: stefanprodan/helm-gh-pages@master 37 | with: 38 | token: ${{ secrets.GITHUB_TOKEN }} 39 | charts_dir: "deployment/helm/published" 40 | linting: "off" 41 | helm_version: 3.5.4 42 | chart_version: ${{steps.semvers.outputs.minor}}-dev 43 | -------------------------------------------------------------------------------- /.github/workflows/publish-charts.yml: -------------------------------------------------------------------------------- 1 | name: Publish charts (release) 2 | 3 | on: 4 | push: 5 | tags: ["*"] 6 | workflow_dispatch: 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-20.04 15 | permissions: 16 | contents: write 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | 21 | - name: Get tag 22 | id: previoustag 23 | uses: "WyriHaximus/github-action-get-previous-tag@v1" 24 | 25 | - name: Publish Helm charts 26 | uses: stefanprodan/helm-gh-pages@master 27 | with: 28 | token: ${{ secrets.GITHUB_TOKEN }} 29 | charts_dir: "deployment/helm/published" 30 | linting: "off" 31 | helm_version: 3.5.4 32 | chart_version: ${{steps.previoustag.outputs.tag}} 33 | -------------------------------------------------------------------------------- /.github/workflows/publish-func-package-dev.yml: -------------------------------------------------------------------------------- 1 | name: Publish function package (dev) 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | workflow_dispatch: 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | publish: 14 | runs-on: ubuntu-20.04 15 | permissions: 16 | contents: write 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | with: 21 | fetch-depth: 0 22 | 23 | - name: "Get Previous tag" 24 | id: previoustag 25 | uses: "WyriHaximus/github-action-get-previous-tag@v1.2.2" 26 | with: 27 | fallback: 2022.2.0 28 | 29 | - name: "Get next minor version" 30 | id: semvers 31 | uses: "WyriHaximus/github-action-next-semvers@v1" 32 | with: 33 | version: ${{ steps.previoustag.outputs.tag }} 34 | 35 | - name: "Publish package" 36 | env: 37 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 38 | run: ./scripts/cipublish-func -t "${{ steps.semvers.outputs.minor }}-dev" 39 | -------------------------------------------------------------------------------- /.github/workflows/publish-func-package.yml: -------------------------------------------------------------------------------- 1 | name: Publish function package (release) 2 | 3 | on: 4 | push: 5 | tags: ["*"] 6 | workflow_dispatch: 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | publish: 14 | runs-on: ubuntu-20.04 15 | permissions: 16 | contents: write 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | with: 21 | fetch-depth: 0 22 | 23 | - name: "Get tag" 24 | id: previoustag 25 | uses: "WyriHaximus/github-action-get-previous-tag@v1.2.2" 26 | with: 27 | fallback: 2022.2.0 28 | 29 | - name: "Publish package" 30 | env: 31 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 32 | run: ./scripts/cipublish-func -t "${{ steps.previoustag.outputs.tag }}" 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | pc-stac.*.env 2 | pc-tiler.*.env 3 | pc-funcs.*.env 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | pyvenv.cfg 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | 110 | # Terraform 111 | .terraform 112 | *.tfstate* 113 | .terraform.lock* 114 | override.tf 115 | 116 | .DS_Store 117 | 118 | # Generated queryable schemas 119 | queryable_schemas/ 120 | 121 | .metals/ 122 | 123 | kubeconfig 124 | 125 | # gh-pages 126 | Gemfile 127 | Gemfile.lock 128 | _site/ 129 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | profile = black 3 | multi_line_output = 3 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Microsoft Open Source Code of Conduct 2 | 3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 4 | 5 | Resources: 6 | 7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) 8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) 9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Security 2 | 3 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 4 | 5 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. 6 | 7 | ## Reporting Security Issues 8 | 9 | **Please do not report security vulnerabilities through public GitHub issues.** 10 | 11 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). 12 | 13 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). 14 | 15 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). 16 | 17 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 18 | 19 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 20 | * Full paths of source file(s) related to the manifestation of the issue 21 | * The location of the affected source code (tag/branch/commit or direct URL) 22 | * Any special configuration required to reproduce the issue 23 | * Step-by-step instructions to reproduce the issue 24 | * Proof-of-concept or exploit code (if possible) 25 | * Impact of the issue, including how an attacker might exploit the issue 26 | 27 | This information will help us triage your report more quickly. 28 | 29 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. 30 | 31 | ## Preferred Languages 32 | 33 | We prefer all communications to be in English. 34 | 35 | ## Policy 36 | 37 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd). 38 | -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | # Support 2 | 3 | This repository contains the code for Planetary Computer APIs and example deployment of those components into Azure. It is meant to serve as a reference implementation for combining various open source libraries into a set of services that deploy on Azure. As this is a reference implementation and not not intended to be libraries or tools for others to run, users utilizing the code in this repository do so at their own risk, without an expectation of support. Feel free to file Issues for problems specifically with the APIs contained in this repository, with the understanding that there is no guarantee that issues in this repository will be addressed, particularly if the issues pertain to an external utilization of this code. 4 | 5 | ## Microsoft Support Policy 6 | 7 | Support for this project is limited to the resources listed above. 8 | -------------------------------------------------------------------------------- /auxiliary/az-cli-proxy/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/azure-cli:cbl-mariner2.0 2 | 3 | # URL used to download the packages from the CFS 4 | ARG INDEX_URL 5 | ENV PIP_INDEX_URL=$INDEX_URL 6 | 7 | # Setup pip and server dependencies 8 | RUN python3 -m ensurepip --upgrade 9 | RUN pip3 install fastapi uvicorn[standard] azure-identity 10 | 11 | WORKDIR /opt/src 12 | 13 | COPY . /opt/src 14 | 15 | CMD uvicorn main:app --host 0.0.0.0 --port 8086 --reload --log-level info 16 | -------------------------------------------------------------------------------- /auxiliary/az-cli-proxy/main.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any, Optional 3 | from typing import Dict 4 | 5 | from azure.core.credentials import AccessToken 6 | from azure.identity import AzureCliCredential 7 | from fastapi import FastAPI 8 | 9 | app = FastAPI() 10 | 11 | 12 | class TokenProvider: 13 | _instance: Optional["TokenProvider"] = None 14 | 15 | _tokens: Dict[str, Optional[AccessToken]] = {} 16 | 17 | def __init__(self) -> None: 18 | self._token = None 19 | 20 | def get_token(self, resource: str) -> AccessToken: 21 | token = self._tokens.get(resource) 22 | if token is None or token.expires_on < time.time() - 5: 23 | token = AzureCliCredential().get_token(resource) 24 | self._tokens[resource] = token 25 | assert token is not None # neede for mypy 26 | return token 27 | 28 | @classmethod 29 | def get_instance(cls) -> "TokenProvider": 30 | if cls._instance is None: 31 | cls._instance = cls() 32 | return cls._instance 33 | 34 | 35 | @app.get("/dev/token") 36 | async def cli_token(resource: str = "") -> Dict[str, Any]: 37 | """Uses the az cli credential to get a token for the given resource. This is 38 | meant to mimic the behavior of using managed identities in other spatio 39 | services in the development environment.""" 40 | accessToken = TokenProvider.get_instance().get_token(resource) 41 | return { 42 | "access_token": accessToken.token, 43 | "expires_on": accessToken.expires_on, 44 | "resource": resource, 45 | } 46 | -------------------------------------------------------------------------------- /deployment/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/azurelinux/base/python:3.12 2 | 3 | RUN tdnf install -y \ 4 | ca-certificates \ 5 | build-essential \ 6 | tar \ 7 | wget \ 8 | unzip \ 9 | jq \ 10 | git \ 11 | azure-cli \ 12 | && tdnf clean all 13 | 14 | # Install Azure Function Tools 15 | RUN wget https://github.com/Azure/azure-functions-core-tools/releases/download/4.0.5700/Azure.Functions.Cli.linux-x64.4.0.5700.zip \ 16 | && unzip Azure.Functions.Cli.linux-x64.4.0.5700.zip -d /usr/local/azure-functions-core-tools-4 \ 17 | && chmod +x /usr/local/azure-functions-core-tools-4/func \ 18 | && chmod +x /usr/local/azure-functions-core-tools-4/gozip \ 19 | && ln -s /usr/local/azure-functions-core-tools-4/func /usr/local/bin/func \ 20 | && ln -s /usr/local/azure-functions-core-tools-4/gozip /usr/local/bin/gozip 21 | 22 | # Install Terraform 23 | RUN wget -O terraform.zip https://releases.hashicorp.com/terraform/1.11.2/terraform_1.11.2_linux_amd64.zip \ 24 | && unzip terraform.zip \ 25 | && mv terraform /usr/local/bin 26 | 27 | # Install kubectl 28 | RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" \ 29 | && install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl 30 | 31 | # Install Helm 32 | RUN wget https://get.helm.sh/helm-v3.14.4-linux-amd64.tar.gz \ 33 | && tar -zxvf helm-v3.14.4-linux-amd64.tar.gz \ 34 | && mv linux-amd64/helm /usr/local/bin/helm 35 | 36 | # Install kubelogin 37 | RUN curl -sL https://github.com/Azure/kubelogin/releases/download/v0.2.8/kubelogin-linux-amd64.zip --output kubelogin.zip \ 38 | && unzip -j kubelogin.zip bin/linux_amd64/kubelogin -d /usr/local/bin/ \ 39 | && rm -rf kubelogin.zip 40 | 41 | # Install Jinja 42 | RUN pip install Jinja2 pyyaml==6.0.2 43 | 44 | WORKDIR /opt/src 45 | -------------------------------------------------------------------------------- /deployment/bin/azlogin: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | if [[ "${CI}" ]]; then 6 | set -x 7 | fi 8 | 9 | function usage() { 10 | echo -n \ 11 | "Usage: $(basename "$0") 12 | Login to Azure CLI 13 | " 14 | } 15 | 16 | while [[ "$#" -gt 0 ]]; do case $1 in 17 | *) 18 | usage "Unknown parameter passed: $1" 19 | shift 20 | shift 21 | ;; 22 | esac done 23 | 24 | 25 | if [ "${BASH_SOURCE[0]}" = "${0}" ]; then 26 | 27 | az login --service-principal \ 28 | --username ${ARM_CLIENT_ID} \ 29 | --password ${ARM_CLIENT_SECRET} \ 30 | --tenant ${ARM_TENANT_ID} 31 | 32 | fi 33 | -------------------------------------------------------------------------------- /deployment/bin/jinja: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import argparse 4 | import json 5 | import os 6 | 7 | from jinja2 import StrictUndefined, Template 8 | 9 | description = ( 10 | "Apply a terrform output JSON file " "and the environment to a Jinja template." 11 | ) 12 | 13 | epilog = ( 14 | 'The template will be applied with an "env" object ' 15 | 'with the the values in os.environ, and a "tf" object that ' 16 | "contains the contents of the terraform output JSON" 17 | ) 18 | 19 | parser = argparse.ArgumentParser(description=description) 20 | parser.add_argument("tf_output_file", help="JSON file holding the terraform output") 21 | parser.add_argument("template_file", help="File containing the text of the template") 22 | parser.add_argument("output_file", help="Path to the resulting rendered template") 23 | 24 | 25 | def template(values, template_text): 26 | template = Template(template_text, undefined=StrictUndefined) 27 | return template.render(**values) 28 | 29 | 30 | if __name__ == "__main__": 31 | args = parser.parse_args() 32 | 33 | values = {"env": os.environ.copy(), "tf": {}} 34 | 35 | with open(args.tf_output_file) as f: 36 | values["tf"] = json.load(f)["resources"]["value"] 37 | 38 | with open(args.template_file) as f: 39 | template_text = f.read() 40 | 41 | result = template(values, template_text) 42 | 43 | with open(args.output_file, "w") as f: 44 | f.write(result) 45 | -------------------------------------------------------------------------------- /deployment/bin/nginx-values.yaml: -------------------------------------------------------------------------------- 1 | controller: 2 | podLabels: 3 | azure.workload.identity/use: "true" 4 | extraVolumes: 5 | - name: secrets-store-inline 6 | csi: 7 | driver: secrets-store.csi.k8s.io 8 | readOnly: true 9 | volumeAttributes: 10 | secretProviderClass: "keyvault" 11 | extraVolumeMounts: 12 | - name: secrets-store-inline 13 | mountPath: "/mnt/secrets-store" 14 | readOnly: true 15 | extraArgs: 16 | default-ssl-certificate: pc/planetarycomputer-test-certificate -------------------------------------------------------------------------------- /deployment/bin/update_func_index: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import argparse 4 | import yaml 5 | 6 | from datetime import datetime, timezone 7 | 8 | description = "Publish a new version of the Planetary Computer pcfuncs package." 9 | 10 | 11 | parser = argparse.ArgumentParser(description=description) 12 | parser.add_argument("pkg_name", help="name of package being published") 13 | parser.add_argument("version", help="published version") 14 | 15 | BASE_URL = "https://microsoft.github.io/planetary-computer-apis/func-packages" 16 | 17 | created = datetime.now(tz=timezone.utc).isoformat() 18 | 19 | 20 | def update_index(pkg_name, version): 21 | entry = { 22 | "name": pkg_name, 23 | "created": created, 24 | "version": version, 25 | "url": f"{BASE_URL}/{pkg_name}.zip", 26 | } 27 | 28 | with open("func-index.yaml", "r") as f: 29 | index = yaml.safe_load(f) 30 | packages = index["packages"] or [] 31 | 32 | # Updates to the same version overwrite any existing entry 33 | updatable_packages = [p for p in packages if p["version"] != version] 34 | updatable_packages.append(entry) 35 | index["packages"] = updatable_packages 36 | index["generated"] = created 37 | 38 | with open("func-index.yaml", "w") as f: 39 | yaml.safe_dump(index, f, default_flow_style=False) 40 | 41 | 42 | if __name__ == "__main__": 43 | args = parser.parse_args() 44 | update_index(args.pkg_name, args.version) 45 | -------------------------------------------------------------------------------- /deployment/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | deploy: 3 | container_name: pc-apis-deploy 4 | image: pc-apis-deploy 5 | build: 6 | context: .. 7 | dockerfile: deployment/Dockerfile 8 | environment: 9 | - ACR_STAC_REPO=${ACR_STAC_REPO:-pccomponentstest.azurecr.io/planetary-computer-apis/stac} 10 | - ACR_TILER_REPO=${ACR_TILER_REPO:-pccomponentstest.azurecr.io/planetary-computer-apis/tiler} 11 | - IMAGE_TAG=${IMAGE_TAG:-latest} 12 | - GIT_COMMIT 13 | 14 | - ARM_SUBSCRIPTION_ID=${ARM_SUBSCRIPTION_ID:-a84a690d-585b-4c7c-80d9-851a48af5a50} 15 | - ARM_TENANT_ID=${ARM_TENANT_ID:-72f988bf-86f1-41af-91ab-2d7cd011db47} 16 | - ARM_CLIENT_ID 17 | - ARM_USE_OIDC 18 | - ARM_OIDC_TOKEN 19 | - ACTIONS_ID_TOKEN_REQUEST_URL 20 | - ACTIONS_ID_TOKEN_REQUEST_TOKEN 21 | - ARM_OIDC_REQUEST_TOKEN 22 | - ARM_OIDC_REQUEST_URL 23 | 24 | # Used in the dev stack as an identifier 25 | - TF_VAR_username=${USER} 26 | 27 | # Used in function deployment injected by GH Actions 28 | - GITHUB_TOKEN 29 | - GITHUB_REPOSITORY 30 | - GITHUB_ACTOR 31 | 32 | # Used to open KV firewall for accessing tf.secrets 33 | - KEY_VAULT_NAME=pc-test-deploy-secrets 34 | - KEY_VAULT_RESOURCE_GROUP_NAME=pc-test-manual-resources 35 | working_dir: /opt/src/deployment 36 | volumes: 37 | - ../deployment:/opt/src/deployment 38 | - ../pccommon:/opt/src/pccommon:ro 39 | - ../pcfuncs:/opt/src/pcfuncs:ro 40 | - ~/.azure:/root/.azure 41 | -------------------------------------------------------------------------------- /deployment/helm/ingress-nginx-4.8.3.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/deployment/helm/ingress-nginx-4.8.3.tgz -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: pc-apis-ingress 3 | description: A Helm chart for the ingress for Planetary Computer APIs 4 | type: application 5 | version: 0.1.0 6 | appVersion: 0.1.0 7 | -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | Application information: 2 | {{ include "pcingress.selectorLabels" . }} 3 | Ingress host: {{ .Values.pcingress.ingress.host }} 4 | Service Fullname: {{ include "pcingress.fullname" . }} 5 | Cert enabled: {{ .Values.pcingress.certIssuer.enabled }} -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "pcingress.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "pcingress.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "pcingress.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Selector labels 35 | */}} 36 | {{- define "pcingress.selectorLabels" -}} 37 | app.kubernetes.io/name: {{ include "pcingress.name" . }} 38 | app.kubernetes.io/instance: {{ .Release.Name }} 39 | {{- end }} 40 | 41 | {{/* 42 | Common labels 43 | */}} 44 | {{- define "pcingress.labels" -}} 45 | helm.sh/chart: {{ include "pcingress.chart" . }} 46 | {{ include "pcingress.selectorLabels" . }} 47 | {{- if .Chart.AppVersion }} 48 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 49 | {{- end }} 50 | app.kubernetes.io/managed-by: {{ .Release.Service }} 51 | {{- end }} 52 | 53 | {{/* 54 | Create the name of the service account to use 55 | */}} 56 | {{- define "pcingress.serviceAccountName" -}} 57 | {{- if .Values.serviceAccount.create }} 58 | {{- default (include "pcingress.fullname" .) .Values.serviceAccount.name }} 59 | {{- else }} 60 | {{- default "default" .Values.serviceAccount.name }} 61 | {{- end }} 62 | {{- end }} 63 | -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/cluster_issuer.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/deployment/helm/pc-apis-ingress/templates/cluster_issuer.yaml -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.pcingress.ingress.enabled -}} 2 | {{- $fullName := include "pcingress.fullname" . -}} 3 | {{- if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} 4 | apiVersion: networking.k8s.io/v1 5 | {{- else -}} 6 | apiVersion: extensions/v1beta1 7 | {{- end }} 8 | kind: Ingress 9 | metadata: 10 | name: {{ $fullName }} 11 | labels: 12 | {{- include "pcingress.labels" . | nindent 4 }} 13 | {{- with .Values.pcingress.ingress.annotations }} 14 | annotations: 15 | {{- toYaml . | nindent 4 }} 16 | {{- end }} 17 | spec: 18 | tls: 19 | - hosts: 20 | - {{ .Values.pcingress.ingress.tlsHost }} 21 | secretName: {{ .Values.secretProvider.kubernetesCertificateSecretName }} 22 | rules: 23 | {{- range .Values.pcingress.ingress.hosts }} 24 | - host: {{ . }} 25 | http: 26 | paths: 27 | {{ if $.Values.stac.enabled }} 28 | - path: {{ $.Values.pcingress.services.stac.path }} 29 | pathType: Exact 30 | backend: 31 | service: 32 | name: {{ $.Values.pcingress.services.stac.name }} 33 | port: 34 | number: {{ $.Values.pcingress.services.stac.port }} 35 | {{- end}} 36 | {{ if $.Values.tiler.enabled }} 37 | - path: {{ $.Values.pcingress.services.tiler.path }} 38 | pathType: Exact 39 | backend: 40 | service: 41 | name: {{ $.Values.pcingress.services.tiler.name }} 42 | port: 43 | number: {{ $.Values.pcingress.services.tiler.port }} 44 | {{- end}} 45 | {{- end }} 46 | {{- end }} -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/nginx-configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | annotations: 5 | labels: 6 | app: pc-apis-ingress 7 | name: nginx-configuration 8 | namespace: {{ .Values.namespace }} 9 | data: 10 | use-forwarded-headers: "true" 11 | enable-real-ip: "true" -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/templates/secret-provider.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.secretProvider.create -}} 2 | apiVersion: secrets-store.csi.x-k8s.io/v1 3 | kind: SecretProviderClass 4 | metadata: 5 | name: {{ .Values.secretProvider.providerName }} 6 | namespace: {{ .Values.namespace }} 7 | spec: 8 | provider: azure 9 | secretObjects: 10 | - secretName: {{ .Values.secretProvider.kubernetesCertificateSecretName }} 11 | type: kubernetes.io/tls 12 | data: 13 | - objectName: {{ .Values.secretProvider.keyvaultCertificateName }} 14 | key: tls.crt 15 | - objectName: {{ .Values.secretProvider.keyvaultCertificateName }} 16 | key: tls.key 17 | parameters: 18 | usePodIdentity: "false" 19 | clientID: "{{ .Values.secretProvider.userAssignedIdentityID }}" 20 | keyvaultName: "{{ .Values.secretProvider.keyvaultName }}" 21 | tenantId: "{{ .Values.secretProvider.tenantId }}" 22 | cloudName: "" 23 | objects: | 24 | array: 25 | - | 26 | objectName: {{ .Values.secretProvider.keyvaultCertificateName }} 27 | objectType: secret 28 | {{- end }} -------------------------------------------------------------------------------- /deployment/helm/pc-apis-ingress/values.yaml: -------------------------------------------------------------------------------- 1 | environment: "staging" 2 | namespace: "default" 3 | 4 | stac: 5 | enabled: true 6 | 7 | tiler: 8 | enabled: true 9 | 10 | secretProvider: 11 | create: true 12 | providerName: "keyvault" 13 | userAssignedIdentityID: "" 14 | tenantId: "" 15 | keyvaultName: "" 16 | keyvaultCertificateName: "" 17 | kubernetesCertificateSecretName: "" 18 | 19 | pcingress: 20 | services: 21 | stac: 22 | path: "" 23 | name: "" 24 | port: "" 25 | tiler: 26 | path: "" 27 | name: "" 28 | port: "" 29 | 30 | cert: 31 | secretName: "" 32 | 33 | certIssuer: 34 | enabled: false 35 | privateKeySecretRef: "letsencrypt-staging" 36 | server: "https://acme-staging-v02.api.letsencrypt.org/directory" 37 | issuerEmail: "" 38 | 39 | ingress: 40 | enabled: false 41 | tlsHost: "" 42 | hosts: [] 43 | annotations: {} 44 | 45 | nameOverride: "" 46 | fullnameOverride: "" 47 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-stac/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: planetary-computer-stac 3 | description: A Helm chart for the Planetary Computer STAC API 4 | type: application 5 | version: 0.1.1 6 | appVersion: 0.1.0 7 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-stac/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | Application information: 2 | {{ include "pcstac.selectorLabels" . }} 3 | Ingress host: {{ .Values.stac.ingress.host }} 4 | Service Fullname: {{ include "pcstac.fullname" . }} -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-stac/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "pcstac.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "pcstac.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "pcstac.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Selector labels 35 | */}} 36 | {{- define "pcstac.selectorLabels" -}} 37 | app.kubernetes.io/name: {{ include "pcstac.name" . }} 38 | app.kubernetes.io/instance: {{ .Release.Name }} 39 | {{- end }} 40 | 41 | {{/* 42 | Common labels 43 | */}} 44 | {{- define "pcstac.labels" -}} 45 | azure.workload.identity/use: {{ .Values.stac.deploy.useWorkloadIdentity | quote}} 46 | helm.sh/chart: {{ include "pcstac.chart" . }} 47 | {{ include "pcstac.selectorLabels" . }} 48 | {{- if .Chart.AppVersion }} 49 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 50 | {{- end }} 51 | app.kubernetes.io/managed-by: {{ .Release.Service }} 52 | {{- end }} 53 | 54 | {{/* 55 | Create the name of the service account to use 56 | */}} 57 | {{- define "pcstac.serviceAccountName" -}} 58 | {{- if .Values.serviceAccount.create }} 59 | {{- default (include "pcstac.fullname" .) .Values.serviceAccount.name }} 60 | {{- else }} 61 | {{- default "default" .Values.serviceAccount.name }} 62 | {{- end }} 63 | {{- end }} 64 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-stac/templates/service.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.stac.enabled -}} 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: {{ include "pcstac.fullname" . }} 6 | labels: 7 | {{- include "pcstac.labels" . | nindent 4 }} 8 | {{- with .Values.stac.service.annotations }} 9 | annotations: 10 | {{- toYaml . | nindent 4 }} 11 | {{- end }} 12 | spec: 13 | type: {{ .Values.stac.service.type }} 14 | ports: 15 | - port: {{ .Values.stac.service.port }} 16 | selector: 17 | {{- include "pcstac.selectorLabels" . | nindent 4 }} 18 | {{- end }} 19 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-stac/templates/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.stac.enabled -}} 2 | {{- if .Values.serviceAccount.create -}} 3 | apiVersion: v1 4 | kind: ServiceAccount 5 | metadata: 6 | name: {{ include "pcstac.serviceAccountName" . }} 7 | labels: 8 | {{- include "pcstac.labels" . | nindent 4 }} 9 | {{- with .Values.stac.deploy.serviceAccount.annotations }} 10 | annotations: 11 | {{- toYaml . | nindent 4 }} 12 | {{- end }} 13 | {{- end }} 14 | {{- end }} 15 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: planetary-computer-tiler 3 | description: A Helm chart for the Planetary Computer tiler 4 | type: application 5 | version: 0.1.0 6 | appVersion: 0.1.0 7 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | Application information: 2 | {{ include "pctiler.selectorLabels" . }} 3 | Service Fullname: {{ include "pctiler.fullname" . }} -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "pctiler.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "pctiler.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "pctiler.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Selector labels 35 | */}} 36 | {{- define "pctiler.selectorLabels" -}} 37 | app.kubernetes.io/name: {{ include "pctiler.name" . }} 38 | app.kubernetes.io/instance: {{ .Release.Name }} 39 | {{- end }} 40 | 41 | {{/* 42 | Common labels 43 | */}} 44 | {{- define "pctiler.labels" -}} 45 | azure.workload.identity/use: {{ .Values.tiler.deploy.useWorkloadIdentity | quote}} 46 | helm.sh/chart: {{ include "pctiler.chart" . }} 47 | {{ include "pctiler.selectorLabels" . }} 48 | {{- if .Chart.AppVersion }} 49 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 50 | {{- end }} 51 | app.kubernetes.io/managed-by: {{ .Release.Service }} 52 | {{- end }} 53 | 54 | {{/* 55 | Create the name of the service account to use 56 | */}} 57 | {{- define "pctiler.serviceAccountName" -}} 58 | {{- if .Values.serviceAccount.create }} 59 | {{- default (include "pctiler.fullname" .) .Values.serviceAccount.name }} 60 | {{- else }} 61 | {{- default "default" .Values.serviceAccount.name }} 62 | {{- end }} 63 | {{- end }} 64 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/templates/service.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.tiler.enabled -}} 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: {{ include "pctiler.fullname" . }} 6 | labels: 7 | {{- include "pctiler.labels" . | nindent 4 }} 8 | {{- with .Values.tiler.service.annotations }} 9 | annotations: 10 | {{- toYaml . | nindent 4 }} 11 | {{- end }} 12 | spec: 13 | type: {{ .Values.tiler.service.type }} 14 | ports: 15 | - port: {{ .Values.tiler.service.port }} 16 | selector: 17 | {{- include "pctiler.selectorLabels" . | nindent 4 }} 18 | {{- end }} -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/templates/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.tiler.enabled -}} 2 | {{- if .Values.serviceAccount.create -}} 3 | apiVersion: v1 4 | kind: ServiceAccount 5 | metadata: 6 | name: {{ include "pctiler.serviceAccountName" . }} 7 | labels: 8 | {{- include "pctiler.labels" . | nindent 4 }} 9 | {{- with .Values.tiler.deploy.serviceAccount.annotations }} 10 | annotations: 11 | {{- toYaml . | nindent 4 }} 12 | {{- end }} 13 | {{- end }} 14 | {{- end }} 15 | -------------------------------------------------------------------------------- /deployment/helm/published/planetary-computer-tiler/values.yaml: -------------------------------------------------------------------------------- 1 | environment: "staging" 2 | 3 | tiler: 4 | enabled: true 5 | image: 6 | # e.g. myacr.azurecr.io/my-repository 7 | repository: "" 8 | pullPolicy: Always 9 | tag: "latest" 10 | 11 | server: 12 | minDbConnections: "1" 13 | maxDbConnections: "1" 14 | webConcurrency: "1" 15 | request_timeout: 30 16 | 17 | service: 18 | type: ClusterIP 19 | port: 80 20 | annotations: {} 21 | 22 | deploy: 23 | replicaCount: 10 24 | podAnnotations: {} 25 | imagePullSecrets: [] 26 | securityContext: {} 27 | podSecurityContext: {} 28 | resources: {} 29 | nodeSelector: {} 30 | tolerations: [] 31 | affinity: {} 32 | autoscaling: 33 | enabled: false 34 | useWorkloadIdentity: false 35 | serviceAccount: 36 | annotations: {} 37 | 38 | stac_api_url: "" 39 | stac_api_href: "" 40 | pc_sdk_sas_url: "" 41 | pc_sdk_subscription_key: "" 42 | vectortile_sa_base_url: "" 43 | 44 | default_max_items_per_tile: 5 45 | host: "0.0.0.0" 46 | port: "80" 47 | appRootPath: "" 48 | 49 | storage: 50 | account_name: "" 51 | account_key: "" 52 | collection_config_table_name: "" 53 | container_config_table_name: "" 54 | ip_exception_config_table_name: "" 55 | table_value_ttl: 600 56 | 57 | redis: 58 | host: "" 59 | password: "" 60 | port: 6380 61 | ssl: true 62 | ttl: 600 63 | 64 | postgres: 65 | serverName: "" 66 | user: "" 67 | password: "" 68 | dbName: "postgis" 69 | port: "5432" 70 | 71 | metrics: 72 | instrumentationKey: "" 73 | 74 | serviceAccount: 75 | # Specifies whether a service account should be created 76 | create: true 77 | # Annotations to add to the service account 78 | annotations: {} 79 | # The name of the service account to use. 80 | # If not set and create is true, a name is generated using the fullname template 81 | name: "" 82 | 83 | nameOverride: "" 84 | fullnameOverride: "" 85 | -------------------------------------------------------------------------------- /deployment/terraform/dev/main.tf: -------------------------------------------------------------------------------- 1 | variable "username" { 2 | type = string 3 | } 4 | 5 | module "resources" { 6 | source = "../resources" 7 | 8 | environment = var.username 9 | region = "West Europe" 10 | 11 | k8s_version = "1.22.4" 12 | 13 | cluster_cert_issuer = "letsencrypt" 14 | cluster_cert_server = "https://acme-v02.api.letsencrypt.org/directory" 15 | 16 | aks_node_count = 1 17 | stac_replica_count = 1 18 | tiler_replica_count = 1 19 | 20 | # Funcs 21 | output_storage_account_name = "pcfilestest" 22 | output_container_name = "output" 23 | funcs_data_api_url = "https://planetarycomputer.microsoft.com/api/data/v1" 24 | funcs_tile_request_concurrency = 10 25 | 26 | animation_output_storage_url = "https://pcfilestest.blob.core.windows.net/output/animations" 27 | image_output_storage_url = "https://pcfilestest.blob.core.windows.net/output/images" 28 | 29 | } 30 | 31 | terraform { 32 | backend "local" { 33 | path = "terraform.tfstate" 34 | } 35 | } 36 | 37 | output "resources" { 38 | value = module.resources 39 | sensitive = true 40 | } 41 | -------------------------------------------------------------------------------- /deployment/terraform/resources/acr.tf: -------------------------------------------------------------------------------- 1 | data "azurerm_container_registry" "pc" { 2 | name = var.pc_test_resources_acr 3 | resource_group_name = var.pc_test_resources_rg 4 | } 5 | 6 | # add the role to the identity the kubernetes cluster was assigned 7 | resource "azurerm_role_assignment" "attach_acr" { 8 | scope = data.azurerm_container_registry.pc.id 9 | role_definition_name = "AcrPull" 10 | principal_id = azurerm_kubernetes_cluster.pc.kubelet_identity[0].object_id 11 | } 12 | -------------------------------------------------------------------------------- /deployment/terraform/resources/ai.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_application_insights" "pc_application_insights" { 2 | name = "${local.prefix}-app-insights" 3 | location = azurerm_resource_group.pc.location 4 | resource_group_name = azurerm_resource_group.pc.name 5 | application_type = "web" 6 | 7 | lifecycle { 8 | ignore_changes = [ 9 | workspace_id 10 | ] 11 | } 12 | } -------------------------------------------------------------------------------- /deployment/terraform/resources/azm.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_log_analytics_workspace" "pc_azm_workspace" { 2 | name = "${local.prefix}-azm-ws" 3 | location = azurerm_resource_group.pc.location 4 | resource_group_name = azurerm_resource_group.pc.name 5 | sku = "PerGB2018" 6 | retention_in_days = 30 7 | } -------------------------------------------------------------------------------- /deployment/terraform/resources/ip.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_public_ip" "pc" { 2 | name = "${local.prefix}-pc-ip" 3 | domain_name_label = "pct-apis-${var.environment}" 4 | resource_group_name = azurerm_kubernetes_cluster.pc.node_resource_group 5 | location = azurerm_resource_group.pc.location 6 | allocation_method = "Static" 7 | sku = "Standard" 8 | zones = ["1", "2", "3"] 9 | 10 | tags = { 11 | environment = var.environment 12 | } 13 | 14 | lifecycle { 15 | ignore_changes = [ 16 | # Ignore changes to tags, e.g. because a management agent 17 | # updates these based on some ruleset managed elsewhere. 18 | tags, 19 | ] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /deployment/terraform/resources/keyvault.tf: -------------------------------------------------------------------------------- 1 | data "azurerm_key_vault" "deploy_secrets" { 2 | name = var.pc_test_resources_kv 3 | resource_group_name = var.pc_test_resources_rg 4 | } 5 | 6 | data "azurerm_key_vault_secret" "db_admin_password" { 7 | name = var.pg_password_secret_name 8 | key_vault_id = data.azurerm_key_vault.deploy_secrets.id 9 | } 10 | 11 | data "azurerm_key_vault_secret" "pc_sdk_subscription_key" { 12 | name = var.pc_sdk_subscription_key_secret_name 13 | key_vault_id = data.azurerm_key_vault.deploy_secrets.id 14 | } 15 | -------------------------------------------------------------------------------- /deployment/terraform/resources/maps.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_maps_account" "azmaps" { 2 | name = "azmaps-${local.prefix}" 3 | resource_group_name = azurerm_resource_group.pc.name 4 | sku_name = "G2" 5 | } 6 | -------------------------------------------------------------------------------- /deployment/terraform/resources/providers.tf: -------------------------------------------------------------------------------- 1 | provider "azurerm" { 2 | features {} 3 | use_oidc = true 4 | 5 | # This could be used instead of temporarily enabling shared key access once 6 | # this issue is resolved. 7 | # https://github.com/hashicorp/terraform-provider-azurerm/issues/23142 8 | # storage_use_azuread = true 9 | } 10 | 11 | provider "azurerm" { 12 | alias = "planetary_computer_subscription" 13 | subscription_id = "9da7523a-cb61-4c3e-b1d4-afa5fc6d2da9" 14 | features {} 15 | } 16 | 17 | terraform { 18 | required_version = ">= 0.13" 19 | 20 | required_providers { 21 | azurerm = { 22 | source = "hashicorp/azurerm" 23 | version = "3.108.0" 24 | } 25 | } 26 | } 27 | 28 | data "azurerm_client_config" "current" {} 29 | -------------------------------------------------------------------------------- /deployment/terraform/resources/redis.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_redis_cache" "pc" { 2 | name = "${local.prefix}-cache" 3 | location = azurerm_resource_group.pc.location 4 | resource_group_name = azurerm_resource_group.pc.name 5 | capacity = 1 6 | family = "P" 7 | sku_name = "Premium" 8 | enable_non_ssl_port = false 9 | minimum_tls_version = "1.2" 10 | 11 | subnet_id = azurerm_subnet.cache_subnet.id 12 | 13 | redis_configuration { 14 | } 15 | } -------------------------------------------------------------------------------- /deployment/terraform/resources/rg.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_resource_group" "pc" { 2 | name = "${local.prefix}_rg" 3 | location = var.region 4 | 5 | tags = { 6 | "ringValue" = "r0" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /deployment/terraform/resources/storage_account.tf: -------------------------------------------------------------------------------- 1 | resource "azurerm_storage_account" "pc" { 2 | name = "${local.nodash_prefix}sa" 3 | resource_group_name = azurerm_resource_group.pc.name 4 | location = azurerm_resource_group.pc.location 5 | account_tier = "Standard" 6 | account_replication_type = "LRS" 7 | min_tls_version = "TLS1_2" 8 | allow_nested_items_to_be_public = false 9 | 10 | network_rules { 11 | default_action = "Deny" 12 | virtual_network_subnet_ids = [azurerm_subnet.node_subnet.id, azurerm_subnet.function_subnet.id, data.azurerm_subnet.sas_node_subnet.id] 13 | } 14 | 15 | # Disabling shared access keys breaks terraform's ability to do subsequent 16 | # resource fetching during terraform plan. As a result, this property is 17 | # ignored and managed outside of this apply session, via the deploy script. 18 | # https://github.com/hashicorp/terraform-provider-azurerm/issues/25218 19 | 20 | # shared_access_key_enabled = false 21 | lifecycle { 22 | ignore_changes = [ 23 | shared_access_key_enabled, 24 | ] 25 | } 26 | } 27 | 28 | 29 | # Tables 30 | 31 | resource "azurerm_storage_table" "collectionconfig" { 32 | name = "collectionconfig" 33 | storage_account_name = azurerm_storage_account.pc.name 34 | } 35 | 36 | resource "azurerm_storage_table" "containerconfig" { 37 | name = "containerconfig" 38 | storage_account_name = azurerm_storage_account.pc.name 39 | } 40 | 41 | resource "azurerm_storage_table" "ipexceptionlist" { 42 | name = "ipexceptionlist" 43 | storage_account_name = azurerm_storage_account.pc.name 44 | } 45 | 46 | resource "azurerm_storage_table" "blobstoragebannedip" { 47 | name = "blobstoragebannedip" 48 | storage_account_name = azurerm_storage_account.pc.name 49 | } 50 | 51 | # Output storage account for function app, "pcfilestest" 52 | data "azurerm_storage_account" "output-storage-account" { 53 | name = var.output_storage_account_name 54 | resource_group_name = var.pc_test_resources_rg 55 | 56 | } 57 | 58 | resource "azurerm_storage_account_network_rules" "pcfunc-vnet-access" { 59 | storage_account_id = data.azurerm_storage_account.output-storage-account.id 60 | 61 | default_action = "Deny" 62 | virtual_network_subnet_ids = [azurerm_subnet.function_subnet.id] 63 | } 64 | -------------------------------------------------------------------------------- /deployment/terraform/staging/main.tf: -------------------------------------------------------------------------------- 1 | module "resources" { 2 | source = "../resources" 3 | 4 | environment = "staging" 5 | region = "West Europe" 6 | 7 | cluster_cert_issuer = "letsencrypt" 8 | cluster_cert_server = "https://acme-v02.api.letsencrypt.org/directory" 9 | 10 | pc_test_resources_acr = "pccomponentstest" 11 | 12 | aks_node_count = 3 13 | stac_replica_count = 2 14 | tiler_replica_count = 2 15 | 16 | # Funcs 17 | output_storage_account_name = "pcfilestest" 18 | output_container_name = "output" 19 | funcs_data_api_url = "https://planetarycomputer.microsoft.com/api/data/v1" 20 | funcs_tile_request_concurrency = 10 21 | 22 | animation_output_storage_url = "https://pcfilestest.blob.core.windows.net/output/animations" 23 | image_output_storage_url = "https://pcfilestest.blob.core.windows.net/output/images" 24 | 25 | prod_log_analytics_workspace_name = "pc-api-loganalytics" 26 | prod_log_analytics_workspace_id = "78d48390-b6bb-49a9-b7fd-a86f6522e9c4" 27 | func_storage_account_url = "https://pctapisstagingsa.table.core.windows.net/" 28 | banned_ip_table = "blobstoragebannedip" 29 | 30 | sas_node_subnet_name = "pct-sas-westeurope-staging-node-subnet" 31 | sas_node_subnet_virtual_network_name = "pct-sas-westeurope-staging-network" 32 | sas_node_subnet_resource_group_name = "pct-sas-westeurope-staging_rg" 33 | } 34 | 35 | terraform { 36 | backend "azurerm" { 37 | resource_group_name = "pc-test-manual-resources" 38 | storage_account_name = "pctesttfstate" 39 | container_name = "pc-test-api" 40 | key = "pqe-apis.tfstate" 41 | use_oidc = true 42 | use_azuread_auth = true 43 | } 44 | } 45 | 46 | output "resources" { 47 | value = module.resources 48 | sensitive = true 49 | } 50 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | disallow_untyped_defs = True -------------------------------------------------------------------------------- /nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx:1.10 2 | 3 | COPY etc/nginx/nginx.conf /etc/nginx/nginx.conf 4 | COPY etc/nginx/conf.d/default.conf /etc/nginx/conf.d/default.conf -------------------------------------------------------------------------------- /nginx/etc/nginx/conf.d/default.conf: -------------------------------------------------------------------------------- 1 | upstream stac-upstream { 2 | server stac:8081; 3 | } 4 | 5 | upstream tiler-upstream { 6 | server tiler:8082; 7 | } 8 | 9 | upstream funcs-upstream { 10 | server funcs:80; 11 | } 12 | 13 | server { 14 | listen 80; 15 | server_name localhost; 16 | 17 | # STAC API 18 | location /stac/ { 19 | proxy_set_header Host $http_host; 20 | proxy_set_header X-Forwarded-For $remote_addr; 21 | proxy_connect_timeout 120; 22 | # Uncomment to test proxy protocol forwarding 23 | # proxy_set_header X-FORWARDED-PROTOCOL ssl; 24 | # proxy_set_header X-FORWARDED-PROTO https; 25 | # proxy_set_header X-FORWARDED-SSL on; 26 | 27 | proxy_pass http://stac-upstream/; 28 | } 29 | 30 | # Tiler 31 | location /data/ { 32 | proxy_set_header Host $http_host; 33 | proxy_set_header X-Forwarded-For $remote_addr; 34 | proxy_buffers 8 8k; 35 | proxy_buffer_size "16k"; 36 | proxy_connect_timeout 120; 37 | 38 | add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; 39 | add_header 'Access-Control-Allow-Headers' 'X-PC-Request-Entity,DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range'; 40 | 41 | proxy_pass http://tiler-upstream/; 42 | } 43 | 44 | # Funcs 45 | location /f/ { 46 | proxy_set_header Host $http_host; 47 | proxy_set_header X-Forwarded-For $remote_addr; 48 | proxy_buffers 8 8k; 49 | proxy_buffer_size "16k"; 50 | proxy_connect_timeout 120; 51 | 52 | # Pass requests for http://localhost:8080/f/{endpoint} to http://funcs:80/api/f/v1/{endpoint} 53 | proxy_pass http://funcs-upstream/api/f/v1/; 54 | 55 | # Allow CORS at the nginx level because function runtime doesn't expose the settings 56 | # https://github.com/Azure/azure-functions-host/issues/5090 57 | add_header 'Access-Control-Allow-Origin' '*'; 58 | add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; 59 | add_header 'Access-Control-Allow-Headers' 'X-PC-Request-Entity,DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range'; 60 | 61 | if ($request_method = 'OPTIONS') { 62 | return 204; 63 | } 64 | 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /nginx/etc/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | user nginx; 2 | 3 | error_log /var/log/nginx/error.log warn; 4 | pid /var/run/nginx.pid; 5 | 6 | events { 7 | 8 | } 9 | 10 | http { 11 | default_type application/octet-stream; 12 | 13 | include /etc/nginx/conf.d/*.conf; 14 | } -------------------------------------------------------------------------------- /pc-funcs.dev.env: -------------------------------------------------------------------------------- 1 | WEBSITE_HOSTNAME=funcs:8083 2 | ANIMATION_OUTPUT_STORAGE_URL="http://azurite:10000/devstoreaccount1/output/animations" 3 | 4 | ANIMATION_API_ROOT_URL="https://planetarycomputer-staging.microsoft.com/api/data/v1" 5 | ANIMATION_TILE_REQUEST_CONCURRENCY=2 6 | 7 | IMAGE_OUTPUT_STORAGE_URL="http://azurite:10000/devstoreaccount1/output/images" 8 | 9 | IMAGE_API_ROOT_URL="https://planetarycomputer-staging.microsoft.com/api/data/v1" 10 | IMAGE_TILE_REQUEST_CONCURRENCY=2 11 | STORAGE_ACCOUNT_URL=https://pctapisstagingsa.table.core.windows.net/ 12 | BANNED_IP_TABLE=blobstoragebannedip 13 | LOG_ANALYTICS_WORKSPACE_ID=78d48390-b6bb-49a9-b7fd-a86f6522e9c4 14 | 15 | AZURITE_ACCOUNT_KEY=${AZURITE_ACCOUNT_KEY} -------------------------------------------------------------------------------- /pc-stac.dev.env: -------------------------------------------------------------------------------- 1 | APP_ROOT_PATH=/stac 2 | APP_HOST=0.0.0.0 3 | APP_PORT=8081 4 | FORWARDED_ALLOW_IPS=* 5 | ENVIRONMENT=local 6 | PCAPIS_DEBUG=TRUE 7 | TILER_HREF=http://localhost:8080/data/ 8 | DB_MIN_CONN_SIZE=1 9 | DB_MAX_CONN_SIZE=1 10 | WEB_CONCURRENCY=1 11 | 12 | # PgSTAC API hydration 13 | USE_API_HYDRATE=TRUE 14 | 15 | # Azure Storage 16 | AZURITE_ACCOUNT_KEY=${AZURITE_ACCOUNT_KEY} 17 | PCAPIS_COLLECTION_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 18 | PCAPIS_COLLECTION_CONFIG__ACCOUNT_NAME=devstoreaccount1 19 | PCAPIS_COLLECTION_CONFIG__TABLE_NAME=collectionconfig 20 | 21 | PCAPIS_CONTAINER_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 22 | PCAPIS_CONTAINER_CONFIG__ACCOUNT_NAME=devstoreaccount1 23 | PCAPIS_CONTAINER_CONFIG__TABLE_NAME=containerconfig 24 | 25 | PCAPIS_IP_EXCEPTION_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 26 | PCAPIS_IP_EXCEPTION_CONFIG__ACCOUNT_NAME=devstoreaccount1 27 | PCAPIS_IP_EXCEPTION_CONFIG__TABLE_NAME=ipexceptionlist 28 | 29 | # Disable config and stac caching in development by setting TTL to 1 second 30 | PCAPIS_TABLE_VALUE_TTL=1 31 | PCAPIS_REDIS_TTL=1 32 | 33 | # Redis 34 | PCAPIS_REDIS_HOSTNAME=redis 35 | PCAPIS_REDIS_PASSWORD=devcache 36 | PCAPIS_REDIS_PORT=6380 37 | PCAPIS_REDIS_SSL=FALSE 38 | 39 | # Rate limit 40 | PCAPIS_RATE_LIMITS__COLLECTIONS=10 41 | PCAPIS_RATE_LIMITS__COLLECTION=10 42 | PCAPIS_RATE_LIMITS__ITEMS=10 43 | PCAPIS_RATE_LIMITS__ITEM=10 44 | PCAPIS_RATE_LIMITS__SEARCH=10 45 | 46 | # Backpressure 47 | PCAPIS_BACK_PRESSURES__COLLECTIONS__REQ_PER_SEC=100 48 | PCAPIS_BACK_PRESSURES__COLLECTIONS__INC_MS=10 49 | PCAPIS_BACK_PRESSURES__COLLECTION__REQ_PER_SEC=100 50 | PCAPIS_BACK_PRESSURES__COLLECTION__INC_MS=10 51 | PCAPIS_BACK_PRESSURES__ITEMS__REQ_PER_SEC=100 52 | PCAPIS_BACK_PRESSURES__ITEMS__INC_MS=10 53 | PCAPIS_BACK_PRESSURES__ITEM__REQ_PER_SEC=100 54 | PCAPIS_BACK_PRESSURES__ITEM__INC_MS=10 55 | PCAPIS_BACK_PRESSURES__SEARCH__REQ_PER_SEC=100 56 | PCAPIS_BACK_PRESSURES__SEARCH__INC_MS=10 57 | 58 | # Used by pgstac backend 59 | POSTGRES_USER=username 60 | POSTGRES_PASS=password 61 | POSTGRES_DBNAME=postgis 62 | POSTGRES_HOST_READER=database 63 | POSTGRES_HOST_WRITER=database 64 | POSTGRES_PORT=5432 65 | 66 | # Used by pypgstac migrate 67 | PGUSER=username 68 | PGPASSWORD=password 69 | PGHOST=database 70 | PGDATABASE=postgis 71 | 72 | # Used for logging and metrics 73 | APP_INSIGHTS_INSTRUMENTATION_KEY=${APP_INSIGHTS_INSTRUMENTATION_KEY} 74 | -------------------------------------------------------------------------------- /pc-tiler.dev.env: -------------------------------------------------------------------------------- 1 | APP_ROOT_PATH=/data 2 | APP_HOST=0.0.0.0 3 | APP_PORT=8082 4 | CPL_DEBUG=ON 5 | FORWARDED_ALLOW_IPS=* 6 | FF_VRT="yes" 7 | STAC_API_URL=http://stac:8081 8 | STAC_API_HREF=http://localhost:8080/stac/ 9 | PC_SDK_SAS_URL=https://planetarycomputer-staging.microsoft.com/api/sas/v1/token 10 | 11 | # titiler.pgstac 12 | POSTGRES_USER=username 13 | POSTGRES_PASS=password 14 | POSTGRES_DBNAME=postgis 15 | POSTGRES_HOST=database 16 | POSTGRES_HOST_READER=database 17 | POSTGRES_HOST_WRITER=database 18 | POSTGRES_PORT=5432 19 | DB_MIN_CONN_SIZE=1 20 | DB_MAX_CONN_SIZE=1 21 | WEB_CONCURRENCY=1 22 | DEFAULT_MAX_ITEMS_PER_TILE=5 23 | VECTORTILE_SA_BASE_URL=https://pcvectortiles.blob.core.windows.net 24 | 25 | # Azure Storage 26 | PCAPIS_COLLECTION_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 27 | PCAPIS_COLLECTION_CONFIG__ACCOUNT_NAME=devstoreaccount1 28 | PCAPIS_COLLECTION_CONFIG__TABLE_NAME=collectionconfig 29 | 30 | PCAPIS_CONTAINER_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 31 | PCAPIS_CONTAINER_CONFIG__ACCOUNT_NAME=devstoreaccount1 32 | PCAPIS_CONTAINER_CONFIG__TABLE_NAME=containerconfig 33 | 34 | PCAPIS_IP_EXCEPTION_CONFIG__ACCOUNT_URL=http://azurite:10002/devstoreaccount1 35 | PCAPIS_IP_EXCEPTION_CONFIG__ACCOUNT_NAME=devstoreaccount1 36 | PCAPIS_IP_EXCEPTION_CONFIG__TABLE_NAME=ipexceptionlist 37 | 38 | # Disable config and stac caching in development by setting TTL to 1 second 39 | PCAPIS_TABLE_VALUE_TTL=1 40 | PCAPIS_REDIS_TTL=1 41 | 42 | # Redis 43 | PCAPIS_REDIS_HOSTNAME=redis 44 | PCAPIS_REDIS_PASSWORD=devcache 45 | PCAPIS_REDIS_PORT=6380 46 | PCAPIS_REDIS_SSL=FALSE 47 | 48 | # Used for logging and metrics 49 | APP_INSIGHTS_INSTRUMENTATION_KEY=${APP_INSIGHTS_INSTRUMENTATION_KEY} 50 | -------------------------------------------------------------------------------- /pccommon/pccommon/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pccommon/pccommon/__init__.py -------------------------------------------------------------------------------- /pccommon/pccommon/blob.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional, Union 2 | 3 | from azure.identity import ManagedIdentityCredential 4 | from azure.storage.blob import ContainerClient 5 | 6 | from pccommon.constants import AZURITE_ACCOUNT_KEY 7 | 8 | 9 | def get_container_client( 10 | container_url: str, 11 | ) -> ContainerClient: 12 | credential: Optional[Union[Dict[str, str], ManagedIdentityCredential]] = None 13 | # Handle Azurite 14 | if container_url.startswith("http://azurite:"): 15 | credential = { 16 | "account_name": "devstoreaccount1", 17 | "account_key": AZURITE_ACCOUNT_KEY, 18 | } 19 | else: 20 | credential = ManagedIdentityCredential() 21 | 22 | return ContainerClient.from_container_url(container_url, credential=credential) 23 | -------------------------------------------------------------------------------- /pccommon/pccommon/cdn.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from pccommon.config.core import PCAPIsConfig 4 | 5 | BLOB_REGEX = re.compile(r".*/([^/]+?)\.blob\.core\.windows\.net/([^/]+?).*") 6 | 7 | 8 | class BlobCDN: 9 | @staticmethod 10 | def transform_if_available(asset_href: str) -> str: 11 | m = re.match(BLOB_REGEX, asset_href) 12 | if m: 13 | storage_account = m.group(1) 14 | container = m.group(2) 15 | config = ( 16 | PCAPIsConfig.from_environment() 17 | .get_container_config_table() 18 | .get_config(storage_account, container) 19 | ) 20 | if config and config.has_cdn: 21 | asset_href = asset_href.replace("blob.core.windows", "azureedge") 22 | 23 | return asset_href 24 | -------------------------------------------------------------------------------- /pccommon/pccommon/config/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | from pccommon.config.collections import CollectionConfig, DefaultRenderConfig 4 | from pccommon.config.core import PCAPIsConfig 5 | from pccommon.utils import map_opt 6 | 7 | 8 | def get_apis_config() -> PCAPIsConfig: 9 | return PCAPIsConfig.from_environment() 10 | 11 | 12 | def get_collection_config(collection_id: str) -> Optional[CollectionConfig]: 13 | table = get_apis_config().get_collection_config_table() 14 | return table.get_config(collection_id) 15 | 16 | 17 | def get_render_config(collection_id: str) -> Optional[DefaultRenderConfig]: 18 | return map_opt(lambda c: c.render_config, get_collection_config(collection_id)) 19 | 20 | 21 | def get_all_render_configs() -> Dict[str, DefaultRenderConfig]: 22 | return { 23 | id: coll.render_config 24 | for id, coll in get_apis_config() 25 | .get_collection_config_table() 26 | .get_all_configs() 27 | if id is not None 28 | } 29 | -------------------------------------------------------------------------------- /pccommon/pccommon/config/containers.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | from pccommon.tables import ModelTableService 6 | 7 | 8 | class ContainerConfig(BaseModel): 9 | has_cdn: bool = False 10 | 11 | # json_loads/json_dumps config have been removed 12 | # the authors seems to indicate that parsing/serialization 13 | # in Rust (pydantic-core) is fast (but maybe not as fast as orjson) 14 | # https://github.com/pydantic/pydantic/discussions/6388 15 | # class Config: 16 | # json_loads = orjson.loads 17 | # json_dumps = orjson_dumps 18 | 19 | 20 | class ContainerConfigTable(ModelTableService[ContainerConfig]): 21 | _model = ContainerConfig 22 | 23 | def get_config( 24 | self, storage_account: str, container: str 25 | ) -> Optional[ContainerConfig]: 26 | return self.get(storage_account, container) 27 | 28 | def set_config( 29 | self, storage_account: str, container: str, config: ContainerConfig 30 | ) -> None: 31 | self.upsert(storage_account, container, config) 32 | -------------------------------------------------------------------------------- /pccommon/pccommon/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from opencensus.trace.attributes_helper import COMMON_ATTRIBUTES 4 | 5 | CACHE_KEY_ITEM = "/item" 6 | 7 | DEFAULT_COLLECTION_CONFIG_TABLE_NAME = "collectionconfig" 8 | DEFAULT_CONTAINER_CONFIG_TABLE_NAME = "containerconfig" 9 | DEFAULT_IP_EXCEPTION_CONFIG_TABLE_NAME = "ipexceptionlist" 10 | 11 | DEFAULT_COLLECTION_REGION = "westeurope" 12 | DEFAULT_TTL = 600 # 10 minutes 13 | DEFAULT_IP_EXCEPTIONS_TTL = 43200 # 12 hours 14 | 15 | RATE_LIMIT_KEY_PREFIX = "rate:" 16 | BACKPRESSURE_KEY_PREFIX = "backp:" 17 | 18 | IP_EXCEPTION_PARTITION_KEY = "ipexception" 19 | 20 | # Headers containing information about the requester's 21 | # IP address. Checked in the order listed here. 22 | X_AZURE_CLIENTIP = "X-Azure-ClientIP" 23 | X_ORIGINAL_FORWARDED_FOR = "X-Original-Forwarded-For" 24 | X_FORWARDED_FOR = "X-Forwarded-For" 25 | 26 | X_REQUEST_ENTITY = "X-PC-Request-Entity" 27 | X_AZURE_REF = "X-Azure-Ref" 28 | 29 | QS_REQUEST_ENTITY = "request_entity" 30 | 31 | HTTP_429_TOO_MANY_REQUESTS = 429 32 | 33 | HTTP_PATH = COMMON_ATTRIBUTES["HTTP_PATH"] 34 | HTTP_URL = COMMON_ATTRIBUTES["HTTP_URL"] 35 | HTTP_STATUS_CODE = COMMON_ATTRIBUTES["HTTP_STATUS_CODE"] 36 | HTTP_METHOD = COMMON_ATTRIBUTES["HTTP_METHOD"] 37 | 38 | # This is the Azurite storage account key. 39 | # This is not a key for a real Storage Account and is publicly accessible 40 | # on Azurite's GitHub repo. This is used only in development. 41 | AZURITE_ACCOUNT_KEY: str = os.environ.get("AZURITE_ACCOUNT_KEY", "") 42 | -------------------------------------------------------------------------------- /pccommon/pccommon/credential.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from typing import Any 3 | 4 | from azure.core.credentials import AccessToken 5 | from azure.identity import DefaultAzureCredential 6 | 7 | 8 | class PcDefaultAzureCredential: 9 | """Singleton wrapper around DefaultAzureCredential to share in memory cache 10 | between requests and threads. Assumption of thread safety for method calls is 11 | based on: 12 | 13 | https://github.com/Azure/azure-sdk-for-python/issues/28665 14 | """ 15 | 16 | _instance = None 17 | _lock = threading.Lock() 18 | 19 | @classmethod 20 | def get_token(cls, *scopes: str, **kwargs: Any) -> AccessToken: 21 | return cls.get_credential().get_token(*scopes, **kwargs) 22 | 23 | @classmethod 24 | def get_credential(cls) -> DefaultAzureCredential: 25 | if cls._instance is None: 26 | with cls._lock: 27 | cls._instance = DefaultAzureCredential() 28 | return cls._instance 29 | -------------------------------------------------------------------------------- /pccommon/pccommon/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pccommon/pccommon/py.typed -------------------------------------------------------------------------------- /pccommon/pccommon/utils.py: -------------------------------------------------------------------------------- 1 | import urllib.parse 2 | from typing import Any, Callable, Dict, Optional, TypeVar 3 | 4 | import orjson 5 | from fastapi import Request 6 | 7 | from pccommon.constants import ( 8 | X_AZURE_CLIENTIP, 9 | X_FORWARDED_FOR, 10 | X_ORIGINAL_FORWARDED_FOR, 11 | ) 12 | 13 | T = TypeVar("T") 14 | U = TypeVar("U") 15 | 16 | 17 | def get_param_str(params: Dict[str, Any]) -> str: 18 | parts = [] 19 | for k, v in params.items(): 20 | if isinstance(v, list): 21 | for v2 in v: 22 | parts.append(f"{k}={urllib.parse.quote_plus(str(v2))}") 23 | else: 24 | parts.append(f"{k}={urllib.parse.quote_plus(str(v))}") 25 | 26 | return "&".join(parts) 27 | 28 | 29 | def map_opt(fn: Callable[[T], U], v: Optional[T]) -> Optional[U]: 30 | """Maps the value of an option to another value, returning 31 | None if the input option is None. 32 | """ 33 | return v if v is None else fn(v) 34 | 35 | 36 | def orjson_dumps(v: Dict[str, Any], *args: Any, default: Any) -> str: 37 | # orjson.dumps returns bytes, to match standard json.dumps we need to decode 38 | return orjson.dumps(v, default=default).decode() 39 | 40 | 41 | def get_request_ip(request: Request) -> str: 42 | """Gets the IP address of the request.""" 43 | 44 | ip_header = ( 45 | request.headers.get(X_AZURE_CLIENTIP) # set by Front Door 46 | or request.headers.get(X_ORIGINAL_FORWARDED_FOR) 47 | or request.headers.get(X_FORWARDED_FOR) 48 | ) 49 | 50 | # If multiple IPs, take the last one 51 | return ip_header.split(",")[-1] if ip_header else "" 52 | -------------------------------------------------------------------------------- /pccommon/pccommon/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | -------------------------------------------------------------------------------- /pccommon/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "pccommon" 7 | dynamic = ["version"] 8 | description = "Planetary Computer API - Common." 9 | license = { text = "MIT" } 10 | requires-python = ">=3.7" 11 | dependencies = [ 12 | "azure-data-tables>=12.5.0", 13 | "azure-identity>=1.16.1", 14 | "azure-storage-blob>=12.20.0", 15 | "cachetools~=5.3", 16 | "fastapi-slim>=0.111.0", 17 | "html-sanitizer>=2.4.4", 18 | "idna>=3.7.0", 19 | "lxml_html_clean>=0.1.0", 20 | "opencensus-ext-azure>=1.1.13", 21 | "opencensus-ext-logging>=0.1.1", 22 | "orjson>=3.10.4", 23 | "pydantic-settings>=2.3", 24 | "pydantic>=2.7", 25 | "pyhumps>=3.5.3", 26 | "redis>=4.6.0", 27 | "requests>=2.32.3", 28 | "starlette>=0.37.2", 29 | "types-cachetools>=4.2.9", 30 | "urllib3>=2.2.2", 31 | ] 32 | 33 | [project.optional-dependencies] 34 | dev = [ 35 | "pytest", 36 | "pytest-asyncio", 37 | "types-redis", 38 | "types-requests", 39 | ] 40 | 41 | [project.scripts] 42 | pcapis = "pccommon.cli:cli" 43 | 44 | [tool.hatch.version] 45 | path = "pccommon/version.py" 46 | 47 | [tool.hatch.build.targets.sdist] 48 | include = [ 49 | "/pccommon", 50 | ] 51 | -------------------------------------------------------------------------------- /pccommon/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pccommon/tests/__init__.py -------------------------------------------------------------------------------- /pccommon/tests/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pccommon/tests/config/__init__.py -------------------------------------------------------------------------------- /pccommon/tests/config/test_table_settings.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pccommon.config.core import TableConfig 4 | 5 | 6 | def test_raises_on_non_azurite_account_url() -> None: 7 | 8 | invalid_url = "https://example.com" 9 | with pytest.raises(ValueError) as exc_info: 10 | TableConfig(account_url=invalid_url, table_name="test", account_name="test") 11 | 12 | assert ( 13 | "Non-azurite account url provided. " 14 | "Account keys can only be used with Azurite emulator." 15 | ) in str(exc_info.value) 16 | 17 | 18 | def test_settings_accepts_azurite_url() -> None: 19 | valid_url = "http://azurite:12345" 20 | 21 | config = TableConfig(account_url=valid_url, table_name="test", account_name="test") 22 | assert config.account_url == valid_url 23 | -------------------------------------------------------------------------------- /pccommon/tests/data-files/container_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "naipeuwest/naip": { 3 | "has_cdn": true 4 | } 5 | } -------------------------------------------------------------------------------- /pccommon/tests/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pccommon/tests/data/__init__.py -------------------------------------------------------------------------------- /pccommon/tests/data/cql.py: -------------------------------------------------------------------------------- 1 | cql = { 2 | "and": [ 3 | {"lte": [{"property": "eo:cloud_cover"}, "10"]}, 4 | {"gte": [{"property": "datetime"}, "2021-04-08T04:39:23Z"]}, 5 | { 6 | "or": [ 7 | {"eq": [{"property": "collection"}, "landsat"]}, 8 | {"lte": [{"property": "gsd"}, "10"]}, 9 | ] 10 | }, 11 | {"lte": [{"property": "id"}, "l8_12345"]}, 12 | ] 13 | } 14 | 15 | cql_multi = { 16 | "and": [ 17 | {"lte": [{"property": "eo:cloud_cover"}, "10"]}, 18 | {"gte": [{"property": "datetime"}, "2021-04-08T04:39:23Z"]}, 19 | { 20 | "or": [ 21 | {"eq": [{"property": "collection"}, ["landsat", "sentinel"]]}, 22 | {"lte": [{"property": "gsd"}, "10"]}, 23 | ] 24 | }, 25 | ] 26 | } 27 | 28 | cql2 = { 29 | "op": "or", 30 | "args": [ 31 | {"op": ">=", "args": [{"property": "sentinel:data_coverage"}, 50]}, 32 | {"op": "=", "args": [{"property": "collection"}, "landsat"]}, 33 | { 34 | "op": "and", 35 | "args": [ 36 | {"op": "isNull", "args": {"property": "sentinel:data_coverage"}}, 37 | {"op": "isNull", "args": {"property": "landsat:coverage_percent"}}, 38 | ], 39 | }, 40 | ], 41 | } 42 | 43 | cql2_nested = { 44 | "op": "or", 45 | "args": [ 46 | {"op": ">=", "args": [{"property": "sentinel:data_coverage"}, 50]}, 47 | { 48 | "op": "and", 49 | "args": [ 50 | {"op": "isNull", "args": {"property": "sentinel:data_coverage"}}, 51 | { 52 | "op": "=", 53 | "args": [ 54 | {"property": "collection"}, 55 | ["landsat", "sentinel"], 56 | ], 57 | }, 58 | {"op": "in", "args": [{"property": "id"}, ["l8_12345", "s2_12345"]]}, 59 | ], 60 | }, 61 | ], 62 | } 63 | 64 | cql2_no_collection = { 65 | "op": "or", 66 | "args": [{"op": ">=", "args": [{"property": "sentinel:data_coverage"}, 50]}], 67 | } 68 | -------------------------------------------------------------------------------- /pccommon/tests/test_timeouts.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any 3 | 4 | import pytest 5 | from fastapi import FastAPI 6 | from httpx import ASGITransport, AsyncClient 7 | from starlette.status import HTTP_504_GATEWAY_TIMEOUT 8 | 9 | from pccommon.middleware import add_timeout 10 | 11 | TIMEOUT_SECONDS = 2 12 | BASE_URL = "http://test" 13 | 14 | 15 | @pytest.mark.asyncio 16 | async def test_add_timeout() -> None: 17 | 18 | # Setup test app and endpoints to test middleware on 19 | # ================================== 20 | 21 | app = FastAPI() 22 | app.state.service_name = "test" 23 | 24 | @app.get("/asleep") 25 | async def asleep() -> Any: 26 | await asyncio.sleep(1) 27 | return {} 28 | 29 | # Run this after registering the routes 30 | add_timeout(app, timeout_seconds=0.001) 31 | 32 | async with AsyncClient( 33 | transport=ASGITransport(app=app), # type: ignore 34 | base_url=BASE_URL, 35 | ) as client: 36 | response = await client.get("/asleep") 37 | 38 | assert response.status_code == HTTP_504_GATEWAY_TIMEOUT 39 | -------------------------------------------------------------------------------- /pccommon/tests/test_tracing.py: -------------------------------------------------------------------------------- 1 | from pccommon.tracing import _parse_cqljson 2 | 3 | from .data.cql import cql, cql2, cql2_nested, cql2_no_collection, cql_multi 4 | 5 | 6 | def test_tracing() -> None: 7 | pass 8 | 9 | 10 | def test_cql_collection_parsing() -> None: 11 | collection_id, item_id = _parse_cqljson(cql) 12 | 13 | assert collection_id == "landsat" 14 | assert item_id == "l8_12345" 15 | 16 | 17 | def test_cql_multi_collection_parsing() -> None: 18 | collection_id, item_id = _parse_cqljson(cql_multi) 19 | 20 | collection_id == "landsat,sentinel" 21 | assert item_id is None 22 | 23 | 24 | def test_cql2_collection_parsing() -> None: 25 | collection_id, item_id = _parse_cqljson(cql2) 26 | 27 | assert collection_id == "landsat" 28 | assert item_id is None 29 | 30 | 31 | def test_cql2_nested_multi_collection_parsing() -> None: 32 | collection_id, item_id = _parse_cqljson(cql2_nested) 33 | 34 | collection_id == "landsat,sentinel" 35 | item_id == "l8_12345,s2_12345" 36 | 37 | 38 | def test_cql2_no_collection() -> None: 39 | collection_id, item_id = _parse_cqljson(cql2_no_collection) 40 | 41 | collection_id is None 42 | item_id is None 43 | -------------------------------------------------------------------------------- /pccommon/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pccommon.utils 2 | 3 | 4 | def test_get_param_str() -> None: 5 | params = {"a": "one", "b": ["two", "1, 2"]} 6 | result = pccommon.utils.get_param_str(params) 7 | expected = "a=one&b=two&b=1%2C+2" 8 | assert result == expected 9 | -------------------------------------------------------------------------------- /pcfuncs/.funcignore: -------------------------------------------------------------------------------- 1 | tests/ 2 | Dockerfile -------------------------------------------------------------------------------- /pcfuncs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/azure-functions/python:4-python3.10 2 | 3 | # git required for pip installs from git 4 | RUN apt update && apt install -y git 5 | RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash 6 | 7 | ENV AzureWebJobsScriptRoot=/home/site/wwwroot \ 8 | AzureFunctionsJobHost__Logging__Console__IsEnabled=true 9 | 10 | COPY pccommon /home/site/pccommon 11 | 12 | RUN mkdir -p /home/site/wwwroot 13 | WORKDIR /home/site/wwwroot 14 | 15 | RUN --mount=type=cache,target=/root/.cache \ 16 | --mount=type=bind,source=pcfuncs/requirements.txt,target=/home/site/wwwroot/requirements.txt \ 17 | pip install -r /home/site/wwwroot/requirements.txt 18 | 19 | RUN --mount=type=cache,target=/root/.cache \ 20 | --mount=type=bind,source=requirements-dev.txt,target=/home/site/requirements-dev.txt \ 21 | pip install -r /home/site/requirements-dev.txt 22 | 23 | ENV PYTHONPATH=/home/site/wwwroot 24 | -------------------------------------------------------------------------------- /pcfuncs/animation/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Callable, List 4 | 5 | import azure.functions as func 6 | from funclib.errors import BBoxTooLargeError 7 | from funclib.stamps.branding import LogoStamp 8 | from funclib.stamps.progress_bar import ProgressBarStamp 9 | from funclib.stamps.stamp import ImageStamp 10 | from pydantic import ValidationError 11 | 12 | from .animation import PcMosaicAnimation 13 | from .models import AnimationRequest, AnimationResponse 14 | from .settings import AnimationSettings 15 | from .utils import upload_gif 16 | 17 | 18 | async def main(req: func.HttpRequest) -> func.HttpResponse: 19 | try: 20 | body = req.get_json() 21 | except ValueError: 22 | return func.HttpResponse( 23 | status_code=400, 24 | mimetype="application/text", 25 | body="Error: Invalid JSON", 26 | ) 27 | 28 | try: 29 | parsed_request = AnimationRequest(**body) 30 | except ValidationError as e: 31 | return func.HttpResponse( 32 | status_code=400, 33 | mimetype="application/json", 34 | body=e.json(), 35 | ) 36 | 37 | try: 38 | response = await handle_request(parsed_request) 39 | 40 | return func.HttpResponse( 41 | status_code=200, 42 | mimetype="application/json", 43 | body=response.json(), 44 | ) 45 | except BBoxTooLargeError as e: 46 | logging.exception(e) 47 | return func.HttpResponse( 48 | status_code=400, 49 | mimetype="application/json", 50 | body=json.dumps({"error": str(e)}), 51 | ) 52 | except Exception as e: 53 | logging.exception(e) 54 | return func.HttpResponse( 55 | status_code=500, 56 | mimetype="application/json", 57 | ) 58 | 59 | 60 | async def handle_request(req: AnimationRequest) -> AnimationResponse: 61 | settings = AnimationSettings.get() 62 | stamps: List[Callable[[int, int], ImageStamp]] = [] 63 | if req.show_progressbar: 64 | stamps.append( 65 | lambda frame_count, frame_number: ProgressBarStamp( 66 | frame_count, frame_number 67 | ) 68 | ) 69 | if req.show_branding: 70 | stamps.append(lambda x, y: LogoStamp()) 71 | 72 | animator = PcMosaicAnimation( 73 | bbox=req.bbox, 74 | zoom=req.zoom, 75 | cql=req.cql, 76 | render_options=req.get_render_options(), 77 | settings=settings, 78 | stamps=stamps, 79 | frame_duration=req.duration, 80 | data_api_url_override=req.data_api_url, 81 | ) 82 | 83 | gif = await animator.get( 84 | req.get_relative_delta(), 85 | req.start, 86 | req.get_valid_frames(), 87 | ) 88 | 89 | gif_url = upload_gif(gif, req.get_collection()) 90 | return AnimationResponse(url=gif_url) 91 | -------------------------------------------------------------------------------- /pcfuncs/animation/constants.py: -------------------------------------------------------------------------------- 1 | ANIMATION_SETTINGS_PREFIX = "ANIMATION_" 2 | 3 | DEFAULT_ANIMATION_CONTAINER_URL = "https://pcexplorer.blob.core.windows.net/animations" 4 | MAX_TILE_COUNT = 16 5 | 6 | MAX_FRAMES = 24 7 | DEFAULT_CONCURRENCY = 10 8 | -------------------------------------------------------------------------------- /pcfuncs/animation/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "anonymous", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req", 9 | "methods": ["post"] 10 | }, 11 | { 12 | "type": "http", 13 | "direction": "out", 14 | "name": "$return" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /pcfuncs/animation/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Any, Callable, Dict, List, Optional 3 | 4 | from dateutil.relativedelta import relativedelta 5 | from funclib.models import RenderOptions 6 | from pydantic import BaseModel, Field, field_validator 7 | 8 | from .constants import MAX_FRAMES 9 | 10 | 11 | def _get_render_options(render_params: str) -> Dict[str, List[str]]: 12 | result: Dict[str, List[str]] = {} 13 | for p in render_params.split("&"): 14 | k, v = p.split("=") 15 | if k not in result: 16 | result[k] = [] 17 | result[k].append(v) 18 | return result 19 | 20 | 21 | _deltas: Dict[str, Callable[[int], relativedelta]] = { 22 | "mins": lambda step: relativedelta(minutes=step), 23 | "hours": lambda step: relativedelta(hours=step), 24 | "days": lambda step: relativedelta(days=step), 25 | "weeks": lambda step: relativedelta(weeks=step), 26 | "months": lambda step: relativedelta(months=step), 27 | "years": lambda step: relativedelta(years=step), 28 | } 29 | 30 | 31 | class AnimationRequest(BaseModel): 32 | bbox: List[float] 33 | zoom: int 34 | cql: Dict[str, Any] 35 | render_params: str 36 | start: datetime 37 | duration: int 38 | step: int 39 | unit: str 40 | frames: int 41 | show_branding: bool = Field(default=True, alias="showBranding") 42 | show_progressbar: bool = Field(default=True, alias="showProgressBar") 43 | 44 | data_api_url: Optional[str] = None 45 | """Override for the data API URL. Useful for testing.""" 46 | 47 | @field_validator("render_params") 48 | def _validate_render_params(cls, v: str) -> str: 49 | RenderOptions.from_query_params(v) 50 | return v 51 | 52 | @field_validator("unit") 53 | def _validate_unit(cls, v: str) -> str: 54 | if v not in _deltas: 55 | raise ValueError( 56 | "Invalid unit. Must be one of: " + ", ".join(_deltas.keys()) 57 | ) 58 | return v 59 | 60 | def get_render_options(self) -> RenderOptions: 61 | return RenderOptions.from_query_params(self.render_params) 62 | 63 | def get_collection(self) -> str: 64 | render_options = _get_render_options(self.render_params) 65 | return render_options["collection"][0] 66 | 67 | def get_valid_frames(self) -> int: 68 | return min(self.frames, MAX_FRAMES) 69 | 70 | def get_relative_delta(self) -> relativedelta: 71 | return _deltas[self.unit](self.step) 72 | 73 | 74 | class AnimationResponse(BaseModel): 75 | url: str 76 | -------------------------------------------------------------------------------- /pcfuncs/animation/settings.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from cachetools import Cache, LRUCache, cachedmethod 4 | from funclib.settings import BaseExporterSettings 5 | 6 | from .constants import ( 7 | ANIMATION_SETTINGS_PREFIX, 8 | DEFAULT_ANIMATION_CONTAINER_URL, 9 | DEFAULT_CONCURRENCY, 10 | ) 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class AnimationSettings(BaseExporterSettings): 16 | _cache: Cache = LRUCache(maxsize=100) 17 | 18 | output_storage_url: str = DEFAULT_ANIMATION_CONTAINER_URL 19 | tile_request_concurrency: int = DEFAULT_CONCURRENCY 20 | 21 | model_config = { 22 | "env_prefix": ANIMATION_SETTINGS_PREFIX, 23 | "env_nested_delimiter": "__", # type: ignore 24 | } 25 | 26 | @classmethod 27 | @cachedmethod(lambda cls: cls._cache) 28 | def get(cls) -> "AnimationSettings": 29 | settings = AnimationSettings() # type: ignore 30 | logger.info(f"API URL: {settings.api_root_url}") 31 | logger.info(f"Concurrency limit: {settings.tile_request_concurrency}") 32 | return settings 33 | -------------------------------------------------------------------------------- /pcfuncs/animation/utils.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | from dataclasses import dataclass 4 | from uuid import uuid4 5 | 6 | import mercantile 7 | from pyproj import Transformer 8 | 9 | from .settings import AnimationSettings 10 | 11 | 12 | @dataclass 13 | class Point: 14 | x: float 15 | y: float 16 | 17 | 18 | def geop_to_imgp( 19 | geo_p: Point, bbox: mercantile.Bbox, pixel_width: float, pixel_height: float 20 | ) -> Point: 21 | left: float = bbox.left 22 | top: float = bbox.top 23 | 24 | x: float = (geo_p.x - left) / ((bbox.right - bbox.left) / pixel_width) 25 | y: float = (top - geo_p.y) / ((bbox.top - bbox.bottom) / pixel_height) 26 | return Point(x, y) 27 | 28 | 29 | to_3857 = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True) 30 | 31 | 32 | def upload_gif(gif: io.BytesIO, collection_name: str) -> str: 33 | settings = AnimationSettings.get() 34 | filename = f"planetarycomputer-{collection_name}-{uuid4().hex[:14]}.gif" 35 | blob_url = os.path.join(settings.output_storage_url, filename) 36 | with settings.get_container_client() as container_client: 37 | gif.seek(0) 38 | container_client.upload_blob(name=filename, data=gif) 39 | 40 | return blob_url 41 | -------------------------------------------------------------------------------- /pcfuncs/funclib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/funclib/__init__.py -------------------------------------------------------------------------------- /pcfuncs/funclib/errors.py: -------------------------------------------------------------------------------- 1 | class BBoxTooLargeError(Exception): 2 | pass 3 | 4 | 5 | class InvalidInputError(Exception): 6 | pass 7 | -------------------------------------------------------------------------------- /pcfuncs/funclib/resources.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | HERE = Path(__file__).parent 4 | 5 | 6 | class Resources: 7 | @classmethod 8 | def logo_path(self) -> Path: 9 | return HERE.joinpath("resources/ms-logo-sized.jpg") 10 | 11 | @classmethod 12 | def font_path(self) -> Path: 13 | return HERE.joinpath("resources/DejaVuSans.ttf") 14 | -------------------------------------------------------------------------------- /pcfuncs/funclib/resources/DejaVuSans.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/funclib/resources/DejaVuSans.ttf -------------------------------------------------------------------------------- /pcfuncs/funclib/resources/ms-logo-gray-sized.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/funclib/resources/ms-logo-gray-sized.jpg -------------------------------------------------------------------------------- /pcfuncs/funclib/resources/ms-logo-sized.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/funclib/resources/ms-logo-sized.jpg -------------------------------------------------------------------------------- /pcfuncs/funclib/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Optional 3 | 4 | from azure.storage.blob import ContainerClient 5 | from pydantic_settings import BaseSettings 6 | 7 | from pccommon.blob import get_container_client 8 | 9 | 10 | class BaseExporterSettings(BaseSettings): 11 | api_root_url: str = "https://planetarycomputer.microsoft.com/api/data/v1" 12 | output_storage_url: str 13 | 14 | def get_container_client(self) -> ContainerClient: 15 | return get_container_client( 16 | self.output_storage_url, 17 | ) 18 | 19 | def get_register_url(self, data_api_url_override: Optional[str] = None) -> str: 20 | return os.path.join( 21 | data_api_url_override or self.api_root_url, "mosaic/register/" 22 | ) 23 | 24 | def get_mosaic_info_url( 25 | self, collection_id: str, data_api_url_override: Optional[str] = None 26 | ) -> str: 27 | return os.path.join( 28 | data_api_url_override or self.api_root_url, 29 | f"mosaic/info?collection={collection_id}", 30 | ) 31 | -------------------------------------------------------------------------------- /pcfuncs/funclib/stamps/branding.py: -------------------------------------------------------------------------------- 1 | from funclib.resources import Resources 2 | from funclib.stamps.stamp import TRANSPARENT, ImageStamp 3 | from PIL import Image, ImageDraw, ImageFont 4 | from PIL.Image import Image as PILImage 5 | 6 | 7 | class LogoStamp(ImageStamp): 8 | def apply(self, image: PILImage) -> PILImage: 9 | logo = Image.open(Resources.logo_path()) 10 | 11 | RIGHT_OFFSET = 141 12 | BOTTOM_OFFSET = 55 13 | 14 | x, y = ( 15 | image.width - RIGHT_OFFSET, 16 | image.height - BOTTOM_OFFSET, 17 | ) 18 | 19 | image.paste(logo, (x, y)) 20 | 21 | return image 22 | 23 | 24 | class PcUrlStamp(ImageStamp): 25 | def __init__(self) -> None: 26 | self.font = ImageFont.truetype(Resources.font_path(), 12) # type: ignore 27 | self.text = "planetarycomputer.microsoft.com" 28 | self.text_width, self.text_height = self.font.getsize(self.text) 29 | 30 | def apply(self, image: PILImage) -> PILImage: 31 | brand_frame = Image.new("RGBA", (image.width, image.height), TRANSPARENT) 32 | 33 | BOTTOM_OFFSET = 16 34 | PADDING = 2 35 | 36 | draw = ImageDraw.Draw(brand_frame) 37 | x, y = ( 38 | image.width - self.text_width - PADDING * 4.5, 39 | image.height - self.text_height - BOTTOM_OFFSET, 40 | ) 41 | 42 | # Draw an padded background for the text 43 | draw.rounded_rectangle( 44 | ( 45 | (x - PADDING, y - self.text_height / 2 + PADDING), 46 | (x + self.text_width + PADDING, y + self.text_height + PADDING * 2), 47 | ), 48 | radius=1, 49 | fill=(255, 255, 255, 255), 50 | ) 51 | 52 | draw.text( 53 | (x, y), 54 | text=self.text, 55 | font=self.font, 56 | fill=(0, 0, 0, 255), 57 | ) 58 | 59 | return Image.alpha_composite(image, brand_frame) 60 | -------------------------------------------------------------------------------- /pcfuncs/funclib/stamps/progress_bar.py: -------------------------------------------------------------------------------- 1 | from PIL import Image, ImageDraw 2 | from PIL.Image import Image as PILImage 3 | 4 | from .stamp import TRANSPARENT, ImageStamp 5 | 6 | BAR_HEIGHT = 3 7 | BG_PAD = 0.2 8 | 9 | 10 | class ProgressBarStamp(ImageStamp): 11 | def __init__(self, frame_count: int, frame_number: int) -> None: 12 | self.frame_count = frame_count 13 | self.frame_number = frame_number 14 | 15 | def apply(self, image: PILImage) -> PILImage: 16 | bar_frame = Image.new("RGBA", (image.width, image.height), TRANSPARENT) 17 | 18 | bar_width = int(image.width * (self.frame_number / (self.frame_count - 1))) 19 | 20 | draw = ImageDraw.Draw(bar_frame) 21 | x0, y0 = 0, image.height - BAR_HEIGHT 22 | x1, y1 = bar_width, image.height 23 | 24 | # Draw an offset white "background" for the progress bar to stand out against 25 | draw.rectangle( 26 | ((x0, y0 - BG_PAD), (x1, y1 - BG_PAD)), fill=(255, 255, 255, 255) 27 | ) 28 | draw.rectangle(((x0, y0), (x1, y1)), fill=(0, 120, 212, 255)) 29 | 30 | return Image.alpha_composite(image, bar_frame) 31 | -------------------------------------------------------------------------------- /pcfuncs/funclib/stamps/stamp.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from PIL.Image import Image as PILImage 4 | 5 | TRANSPARENT = (255, 255, 255, 0) 6 | 7 | 8 | class ImageStamp(ABC): 9 | @abstractmethod 10 | def apply(self, image: PILImage) -> PILImage: 11 | pass 12 | -------------------------------------------------------------------------------- /pcfuncs/host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "logging": { 4 | "fileLoggingMode": "always", 5 | "logLevel": { 6 | "default": "Information", 7 | "Function": "Information" 8 | }, 9 | "applicationInsights": { 10 | "samplingSettings": { 11 | "isEnabled": true 12 | } 13 | } 14 | }, 15 | "extensionBundle": { 16 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 17 | "version": "[2.*, 3.0.0)" 18 | }, 19 | "extensions": { 20 | "http": { 21 | "routePrefix": "api/f/v1" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /pcfuncs/image/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "authLevel": "anonymous", 6 | "type": "httpTrigger", 7 | "direction": "in", 8 | "name": "req", 9 | "methods": ["post"] 10 | }, 11 | { 12 | "type": "http", 13 | "direction": "out", 14 | "name": "$return" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /pcfuncs/image/settings.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from cachetools import LRUCache, cached 4 | from funclib.settings import BaseExporterSettings 5 | 6 | IMAGE_SETTINGS_PREFIX = "IMAGE_" 7 | DEFAULT_CONCURRENCY = 10 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class ImageSettings(BaseExporterSettings): 13 | tile_request_concurrency: int = DEFAULT_CONCURRENCY 14 | 15 | # Maximum tiles to fetch for a single request 16 | max_tile_count: int = 144 17 | max_pixels: int = 144 * 512 * 512 18 | 19 | model_config = { 20 | "env_prefix": IMAGE_SETTINGS_PREFIX, 21 | "env_nested_delimiter": "__", # type: ignore 22 | } 23 | 24 | 25 | @cached(LRUCache(maxsize=100)) # type: ignore 26 | def get_settings() -> ImageSettings: 27 | settings = ImageSettings() # type: ignore 28 | logger.info(f"API URL: {settings.api_root_url}") 29 | logger.info(f"Concurrency limit: {settings.tile_request_concurrency}") 30 | return settings 31 | -------------------------------------------------------------------------------- /pcfuncs/ipban/__init__.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | 4 | import azure.functions as func 5 | from azure.data.tables import TableServiceClient 6 | from azure.identity import DefaultAzureCredential 7 | from azure.monitor.query import LogsQueryClient 8 | 9 | from .config import settings 10 | from .models import UpdateBannedIPTask 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | def main(mytimer: func.TimerRequest) -> None: 16 | utc_timestamp: str = ( 17 | datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat() 18 | ) 19 | logger.info("Updating the ip ban list at %s", utc_timestamp) 20 | credential: DefaultAzureCredential = DefaultAzureCredential() 21 | with LogsQueryClient(credential) as logs_query_client: 22 | with TableServiceClient( 23 | endpoint=settings.storage_account_url, credential=credential 24 | ) as table_service_client: 25 | with table_service_client.create_table_if_not_exists( 26 | settings.banned_ip_table 27 | ) as table_client: 28 | task = UpdateBannedIPTask(logs_query_client, table_client) 29 | task.run() 30 | -------------------------------------------------------------------------------- /pcfuncs/ipban/config.py: -------------------------------------------------------------------------------- 1 | # config.py 2 | from pydantic import Field 3 | from pydantic_settings import BaseSettings 4 | 5 | 6 | class Settings(BaseSettings): 7 | storage_account_url: str 8 | banned_ip_table: str 9 | log_analytics_workspace_id: str 10 | 11 | # Time and threshold settings 12 | time_window_in_hours: int = Field(default=24) 13 | threshold_read_count_in_gb: int = Field(default=5120) 14 | 15 | 16 | # Create a global settings instance 17 | settings = Settings() # type: ignore 18 | -------------------------------------------------------------------------------- /pcfuncs/ipban/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "mytimer", 6 | "type": "timerTrigger", 7 | "direction": "in", 8 | "schedule": "0 */1 * * *" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /pcfuncs/ipban/models.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, List, Set 3 | 4 | from azure.data.tables import TableClient, UpdateMode 5 | from azure.monitor.query import LogsQueryClient 6 | from azure.monitor.query._models import LogsTableRow 7 | 8 | from .config import settings 9 | 10 | 11 | class UpdateBannedIPTask: 12 | def __init__( 13 | self, 14 | logs_query_client: LogsQueryClient, 15 | table_client: TableClient, 16 | ) -> None: 17 | self.log_query_client = logs_query_client 18 | self.table_client = table_client 19 | 20 | def run(self) -> List[LogsTableRow]: 21 | query_result: List[LogsTableRow] = self.get_blob_logs_query_result() 22 | logging.info(f"Kusto query result: {query_result}") 23 | self.update_banned_ips(query_result) 24 | return query_result 25 | 26 | def get_blob_logs_query_result(self) -> List[LogsTableRow]: 27 | query: str = f""" 28 | StorageBlobLogs 29 | | where TimeGenerated > ago({settings.time_window_in_hours}h) 30 | | extend IpAddress = tostring(split(CallerIpAddress, ":")[0]) 31 | | where OperationName == 'GetBlob' 32 | | where not(ipv4_is_private(IpAddress)) 33 | | summarize readcount = sum(ResponseBodySize) / (1024 * 1024 * 1024) 34 | by IpAddress 35 | | where readcount > {settings.threshold_read_count_in_gb} 36 | """ 37 | response: Any = self.log_query_client.query_workspace( 38 | settings.log_analytics_workspace_id, query, timespan=None 39 | ) 40 | return response.tables[0].rows 41 | 42 | def update_banned_ips(self, query_result: List[LogsTableRow]) -> None: 43 | existing_ips = { 44 | entity["RowKey"] for entity in self.table_client.list_entities() 45 | } 46 | result_ips: Set[str] = set() 47 | for result in query_result: 48 | ip_address: str = result[0] 49 | read_count: int = int(result[1]) 50 | result_ips.add(ip_address) 51 | entity = { 52 | "PartitionKey": ip_address, 53 | "RowKey": ip_address, 54 | "ReadCount": read_count, 55 | "Threshold": settings.threshold_read_count_in_gb, 56 | "TimeWindow": settings.time_window_in_hours, 57 | } 58 | 59 | if ip_address in existing_ips: 60 | self.table_client.update_entity(entity, mode=UpdateMode.REPLACE) 61 | else: 62 | self.table_client.create_entity(entity) 63 | 64 | for ip_address in existing_ips: 65 | if ip_address not in result_ips: 66 | self.table_client.delete_entity( 67 | partition_key=ip_address, row_key=ip_address 68 | ) 69 | 70 | logging.info("IP ban list has been updated successfully") 71 | -------------------------------------------------------------------------------- /pcfuncs/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_mode = auto -------------------------------------------------------------------------------- /pcfuncs/requirements-deploy.txt: -------------------------------------------------------------------------------- 1 | # NOTE: Keep in sync with requirements.txt 2 | 3 | # DO NOT include azure-functions-worker in this file 4 | # The Python Worker is managed by Azure Functions platform 5 | # Manually managing azure-functions-worker may cause unexpected issues 6 | 7 | azure-functions 8 | 9 | requests==2.32.3 10 | aiohttp==3.9.5 11 | dateutils==0.6.12 12 | mercantile==1.2.1 13 | pillow==10.3.0 14 | pyproj==3.3.1 15 | pydantic>=2.7,<2.8 16 | rasterio==1.3.* 17 | azure-monitor-query==1.3.0 18 | pytest-mock==3.14.0 19 | # The deploy process needs symlinks to bring in 20 | # pctasks libraries. Symlink is created in deploy script 21 | ./pccommon_linked 22 | -------------------------------------------------------------------------------- /pcfuncs/requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: Keep in sync with requirements-deploy.txt 2 | 3 | # DO NOT include azure-functions-worker in this file 4 | # The Python Worker is managed by Azure Functions platform 5 | # Manually managing azure-functions-worker may cause unexpected issues 6 | 7 | azure-functions 8 | 9 | requests==2.32.3 10 | aiohttp==3.9.5 11 | dateutils==0.6.12 12 | mercantile==1.2.1 13 | pillow==10.3.0 14 | pyproj==3.3.1 15 | pydantic>=2.7,<2.8 16 | rasterio==1.3.* 17 | azure-monitor-query==1.3.0 18 | pytest-mock==3.14.0 19 | # Deployment needs to copy the local code into 20 | # the app code directory, so requires a separate 21 | # requirements file. 22 | -e ../pccommon 23 | -------------------------------------------------------------------------------- /pcfuncs/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/tests/__init__.py -------------------------------------------------------------------------------- /pcfuncs/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | 5 | 6 | def pytest_addoption(parser: pytest.Parser) -> None: 7 | parser.addoption( 8 | "--no-integration", 9 | action="store_true", 10 | default=False, 11 | help="don't run integration tests", 12 | ) 13 | 14 | 15 | def pytest_configure(config: pytest.Config) -> None: 16 | config.addinivalue_line("markers", "integration: mark as an integration test") 17 | 18 | 19 | def pytest_collection_modifyitems( 20 | config: pytest.Config, items: List[pytest.Item] 21 | ) -> None: 22 | if config.getoption("--no-integration"): 23 | # --no-integration given in cli: skip integration tests 24 | skip_integration = pytest.mark.skip( 25 | reason="needs --no-integration option to run" 26 | ) 27 | for item in items: 28 | if "integration" in item.keywords: 29 | item.add_marker(skip_integration) 30 | -------------------------------------------------------------------------------- /pcfuncs/tests/data-files/s2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/tests/data-files/s2.png -------------------------------------------------------------------------------- /pcfuncs/tests/funclib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/tests/funclib/__init__.py -------------------------------------------------------------------------------- /pcfuncs/tests/funclib/test_models.py: -------------------------------------------------------------------------------- 1 | from funclib.models import RenderOptions 2 | 3 | 4 | def test_render_options_encoded_parameters() -> None: 5 | render_params = ( 6 | "assets=B04&assets=B03&assets=B02&nodata=0&" 7 | "color_formula=Gamma RGB 3.2 Saturation 0.8 Sigmoidal RGB 25 0.35&" 8 | "collection=sentinel-2-l2a" 9 | ) 10 | render_options = RenderOptions.from_query_params(render_params) 11 | 12 | assert render_options.assets == ["B04", "B03", "B02"] 13 | 14 | encoded_params = render_options.encoded_query_string 15 | 16 | assert encoded_params.count("assets=") == 3 17 | 18 | 19 | def test_naip_render_params() -> None: 20 | render_params = "assets=image&asset_bidx=image|1,2,3&collection=naip" 21 | render_options = RenderOptions.from_query_params(render_params) 22 | _ = render_options.encoded_query_string 23 | 24 | assert render_options.assets == "image" 25 | 26 | 27 | def test_landsat_ndvi_params() -> None: 28 | render_params = ( 29 | "nodata=0&expression=(nir08-red)/(nir08+red)&rescale=-1,1&" 30 | "colormap_name=rdylgn&collection=landsat-c2-l2" 31 | ) 32 | render_options = RenderOptions.from_query_params(render_params) 33 | _ = render_options.encoded_query_string 34 | 35 | assert render_options.rescale == "-1,1" 36 | 37 | 38 | def test_modis_fire_params() -> None: 39 | render_params = ( 40 | "assets=LST_Day_1KM&colormap_name=jet&rescale=255,310&" 41 | "unscale=True&collection=modis-21A2-061" 42 | ) 43 | render_options = RenderOptions.from_query_params(render_params) 44 | _ = render_options.encoded_query_string 45 | 46 | assert render_options.colormap_name == "jet" 47 | -------------------------------------------------------------------------------- /pcfuncs/tests/funclib/test_raster.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from funclib.raster import Bbox, PILRaster, RasterExtent 4 | from PIL import Image 5 | 6 | HERE = Path(__file__).parent 7 | DATA_FILES = HERE / ".." / "data-files" 8 | 9 | 10 | def test_pil_crop() -> None: 11 | img = Image.open(DATA_FILES / "s2.png") 12 | raster = PILRaster( 13 | extent=RasterExtent( 14 | bbox=Bbox(0, 0, 10, 10), 15 | cols=img.size[0], 16 | rows=img.size[1], 17 | ), 18 | image=img, 19 | ) 20 | 21 | cropped = raster.crop(Bbox(0, 0, 5, 5)) 22 | 23 | assert abs(cropped.extent.cols - (img.size[0] / 2)) < 1 24 | assert abs(cropped.extent.rows - (img.size[1] / 2)) < 1 25 | 26 | 27 | def test_raster_extent_map_to_grid() -> None: 28 | extent = RasterExtent( 29 | bbox=Bbox(0, 0, 10, 10), 30 | cols=10, 31 | rows=10, 32 | ) 33 | 34 | x, y = extent.map_to_grid(5, 5) 35 | 36 | assert x == 5 37 | assert y == 5 38 | -------------------------------------------------------------------------------- /pcfuncs/tests/image/test_models.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | from image.models import ImageRequest 4 | 5 | 6 | def test_request_parses_geometry() -> None: 7 | geom = { 8 | "type": "Polygon", 9 | "coordinates": [ 10 | [ 11 | [-79.09062791441062, 43.08554661560049], 12 | [-79.0629876337021, 43.08554661560049], 13 | [-79.0629876337021, 43.067969831431895], 14 | [-79.09062791441062, 43.067969831431895], 15 | [-79.09062791441062, 43.08554661560049], 16 | ] 17 | ], 18 | } 19 | req1: Dict[str, Any] = { 20 | "geometry": geom, 21 | "cql": { 22 | "filter-lang": "cql2-json", 23 | "filter": { 24 | "op": "and", 25 | "args": [{"op": "=", "args": [{"property": "collection"}, "naip"]}], 26 | }, 27 | }, 28 | "render_params": "assets=image&asset_bidx=image|1,2,3&collection=naip", 29 | "cols": 1080, 30 | "rows": 1080, 31 | "showBranding": True, 32 | } 33 | assert ImageRequest(**req1).get_geometry() == geom 34 | 35 | req2: Dict[str, Any] = { 36 | "geometry": geom, 37 | "cql": { 38 | "filter-lang": "cql2-json", 39 | "filter": { 40 | "op": "and", 41 | "args": [ 42 | {"op": "s_intersects", "args": [{"property": "geometry"}, geom]}, 43 | {"op": "=", "args": [{"property": "collection"}, "naip"]}, 44 | ], 45 | }, 46 | }, 47 | "render_params": "assets=image&asset_bidx=image|1,2,3&collection=naip", 48 | "cols": 1080, 49 | "rows": 1080, 50 | "showBranding": True, 51 | } 52 | 53 | assert ImageRequest(**req2).get_geometry() == geom 54 | -------------------------------------------------------------------------------- /pcfuncs/tests/ipban/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcfuncs/tests/ipban/__init__.py -------------------------------------------------------------------------------- /pcstac/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/cbl-mariner/base/python:3.9 2 | 3 | RUN tdnf install -y ca-certificates build-essential \ 4 | && tdnf clean all 5 | 6 | ENV CURL_CA_BUNDLE /etc/ssl/certs/ca-certificates.crt 7 | 8 | WORKDIR /opt/src 9 | 10 | COPY pcstac /opt/src/pcstac 11 | COPY pccommon /opt/src/pccommon 12 | RUN --mount=type=cache,target=/root/.cache \ 13 | pip install -U "setuptools>=65.5.1" 14 | # The order of these pip installs is important :( 15 | RUN --mount=type=cache,target=/root/.cache \ 16 | pip install -r ./pccommon/requirements.txt 17 | RUN --mount=type=cache,target=/root/.cache \ 18 | pip install -r ./pcstac/requirements-server.txt 19 | RUN --mount=type=cache,target=/root/.cache \ 20 | pip install --no-deps -e ./pccommon -e ./pcstac[server] 21 | 22 | ENV APP_HOST=0.0.0.0 23 | ENV APP_PORT=81 24 | 25 | # This value should match that which is used as the root_path in FastAPI, which 26 | # is typically set via the APP_ROOT_PATH environment variable. 27 | ENV APP_ROOT_PATH="" 28 | 29 | CMD uvicorn pcstac.main:app --host ${APP_HOST} --port ${APP_PORT} --root-path ${APP_ROOT_PATH} --log-level info 30 | -------------------------------------------------------------------------------- /pcstac/Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM pc-apis-stac 2 | 3 | RUN --mount=type=cache,target=/root/.cache \ 4 | --mount=type=bind,source=requirements-dev.txt,target=requirements-dev.txt \ 5 | pip install -r requirements-dev.txt 6 | 7 | RUN --mount=type=cache,target=/root/.cache \ 8 | pip install --no-deps -e ./pccommon[dev] -e ./pcstac 9 | -------------------------------------------------------------------------------- /pcstac/pcstac/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcstac/pcstac/__init__.py -------------------------------------------------------------------------------- /pcstac/pcstac/api.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from stac_fastapi.api.app import StacApi 4 | 5 | from pccommon.openapi import fixup_schema 6 | from pcstac.config import STAC_API_VERSION 7 | 8 | STAC_API_OPENAPI_TAG = f"STAC API {STAC_API_VERSION}" 9 | 10 | 11 | class PCStacApi(StacApi): 12 | """StacApi factory. 13 | 14 | Factory for creating a STAC-compliant FastAPI application. After instantation, the 15 | application is accessible from the `PCStacApi.app` attribute. This class differs 16 | from stac-fastapi in that it generates OpenAPI output compliant with OpenAPI 3.1. 17 | Future versions of FastAPI are likely to make this extension unnecessary. 18 | 19 | See related, upstream issue here: https://github.com/tiangolo/fastapi/pull/3038 20 | """ 21 | 22 | def customize_openapi(self) -> Optional[Dict[str, Any]]: 23 | """Customize openapi schema.""" 24 | schema = super().customize_openapi() 25 | assert schema is not None 26 | return fixup_schema(self.app.root_path, schema, tag=STAC_API_OPENAPI_TAG) 27 | -------------------------------------------------------------------------------- /pcstac/pcstac/contants.py: -------------------------------------------------------------------------------- 1 | CACHE_KEY_COLLECTIONS = "/collections" 2 | CACHE_KEY_COLLECTION = "/collection" 3 | CACHE_KEY_ITEMS = "/items" 4 | CACHE_KEY_QUERYABLES = "/queryables" 5 | CACHE_KEY_SEARCH = "/search" 6 | CACHE_KEY_LANDING_PAGE = "/landing-page" 7 | CACHE_KEY_BASE_ITEM = "/base-item" 8 | -------------------------------------------------------------------------------- /pcstac/pcstac/errors.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Dict 3 | 4 | from asyncpg.exceptions import InvalidPasswordError 5 | from fastapi import HTTPException 6 | from starlette import status 7 | 8 | MESSAGE_500 = ( 9 | "Service encountered an error. Please contact planetarycomputer@microsoft.com" 10 | ) 11 | 12 | PC_DEFAULT_STATUS_CODES = {InvalidPasswordError: status.HTTP_500_INTERNAL_SERVER_ERROR} 13 | 14 | 15 | def generic_500() -> HTTPException: 16 | return HTTPException(status_code=500, detail=MESSAGE_500) 17 | 18 | 19 | class PCStacError(Exception, ABC): 20 | @abstractmethod 21 | def to_http(self) -> HTTPException: 22 | pass 23 | 24 | 25 | class DuplicateRowError(PCStacError): 26 | def __init__( 27 | self, collection_id: str, item_id: str, *args: Any, **kwargs: Dict[str, Any] 28 | ) -> None: 29 | # MyPy is confused by inheritance; ignore 'too many arguments' error here 30 | super().__init__( # type: ignore 31 | f"Duplicate row found for collection {collection_id}, item {item_id}", 32 | *args, 33 | **kwargs, 34 | ) 35 | self.collection_id = collection_id 36 | self.item_id = item_id 37 | 38 | def to_http(self) -> HTTPException: 39 | return HTTPException( 40 | status_code=500, 41 | detail=MESSAGE_500, 42 | ) 43 | -------------------------------------------------------------------------------- /pcstac/pcstac/filter.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from buildpg import render 4 | from fastapi import Request 5 | from stac_fastapi.pgstac.extensions.filter import FiltersClient 6 | from stac_fastapi.types.errors import NotFoundError 7 | 8 | from pccommon.redis import cached_result 9 | from pcstac.contants import CACHE_KEY_QUERYABLES 10 | 11 | 12 | class PCFiltersClient(FiltersClient): 13 | async def get_queryables( 14 | self, request: Request, collection_id: Optional[str] = None, **kwargs: Any 15 | ) -> Dict[str, Any]: 16 | """Override pgstac backend get_queryables to make use of cached results""" 17 | 18 | async def _fetch() -> Dict: 19 | pool = request.app.state.readpool 20 | 21 | async with pool.acquire() as conn: 22 | q, p = render( 23 | """ 24 | SELECT * FROM get_queryables(:collection::text); 25 | """, 26 | collection=collection_id, 27 | ) 28 | queryables = await conn.fetchval(q, *p) 29 | if not queryables: 30 | raise NotFoundError(f"Collection {collection_id} not found") 31 | 32 | queryables["$id"] = str(request.url) 33 | return queryables 34 | 35 | cache_key = f"{CACHE_KEY_QUERYABLES}:{collection_id}" 36 | return await cached_result(_fetch, cache_key, request) 37 | -------------------------------------------------------------------------------- /pcstac/pcstac/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | -------------------------------------------------------------------------------- /pcstac/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "pcstac" 7 | dynamic = ["version"] 8 | description = "Planetary Computer API - STAC." 9 | license = { text = "MIT" } 10 | requires-python = ">=3.7" 11 | dependencies = [ 12 | "idna>=3.7.0", 13 | "orjson==3.10.4", 14 | "pypgstac[psycopg]>=0.8.5,<0.9", 15 | "pystac==1.10.1", 16 | "stac-fastapi.api==3.0.0b2", 17 | "stac-fastapi.extensions==3.0.0b2", 18 | "stac-fastapi.pgstac==3.0.0a4", 19 | "stac-fastapi.types==3.0.0b2", 20 | "typing_extensions>=4.6.1", 21 | "urllib3>=2.2.2", 22 | ] 23 | 24 | [project.optional-dependencies] 25 | dev = [ 26 | "types-requests", 27 | ] 28 | server = [ 29 | "uvicorn[standard]==0.30.1", 30 | ] 31 | 32 | [tool.hatch.version] 33 | path = "pcstac/version.py" 34 | 35 | [tool.hatch.build.targets.sdist] 36 | include = [ 37 | "/pcstac", 38 | ] 39 | -------------------------------------------------------------------------------- /pcstac/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcstac/tests/__init__.py -------------------------------------------------------------------------------- /pcstac/tests/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcstac/tests/api/__init__.py -------------------------------------------------------------------------------- /pcstac/tests/api/test_api.py: -------------------------------------------------------------------------------- 1 | STAC_CORE_ROUTES = [ 2 | "GET /", 3 | "GET /collections", 4 | "GET /collections/{collection_id}", 5 | "GET /collections/{collection_id}/items", 6 | "GET /collections/{collection_id}/items/{item_id}", 7 | "GET /conformance", 8 | "GET /search", 9 | "POST /search", 10 | ] 11 | 12 | 13 | def test_core_router(api_client): 14 | core_routes = set(STAC_CORE_ROUTES) 15 | api_routes = set( 16 | [f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes] 17 | ) 18 | assert not core_routes - api_routes 19 | -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008501_ne_16_060_20191109_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008501_ne_16_060_20191109_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-09T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.875, 26 | 30.9375 27 | ], 28 | [ 29 | -85.875, 30 | 31.0 31 | ], 32 | [ 33 | -85.9375, 34 | 31.0 35 | ], 36 | [ 37 | -85.9375, 38 | 30.9375 39 | ], 40 | [ 41 | -85.875, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008501_ne_16_060_20191109_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008501_ne_16_060_20191109.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008501_ne_16_060_20191109.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008501_ne_16_060_20191109.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.9375, 97 | 30.9375, 98 | -85.875, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008502_ne_16_060_20191109_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008502_ne_16_060_20191109_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-09T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.75, 26 | 30.9375 27 | ], 28 | [ 29 | -85.75, 30 | 31.0 31 | ], 32 | [ 33 | -85.8125, 34 | 31.0 35 | ], 36 | [ 37 | -85.8125, 38 | 30.9375 39 | ], 40 | [ 41 | -85.75, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008502_ne_16_060_20191109_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008502_ne_16_060_20191109.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008502_ne_16_060_20191109.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008502_ne_16_060_20191109.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.8125, 97 | 30.9375, 98 | -85.75, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008503_ne_16_060_20191118_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008503_ne_16_060_20191118_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-18T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.625, 26 | 30.9375 27 | ], 28 | [ 29 | -85.625, 30 | 31.0 31 | ], 32 | [ 33 | -85.6875, 34 | 31.0 35 | ], 36 | [ 37 | -85.6875, 38 | 30.9375 39 | ], 40 | [ 41 | -85.625, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008503_ne_16_060_20191118_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008503_ne_16_060_20191118.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008503_ne_16_060_20191118.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008503_ne_16_060_20191118.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.6875, 97 | 30.9375, 98 | -85.625, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008503_nw_16_060_20191118_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008503_nw_16_060_20191118_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-18T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.6875, 26 | 30.9375 27 | ], 28 | [ 29 | -85.6875, 30 | 31.0 31 | ], 32 | [ 33 | -85.75, 34 | 31.0 35 | ], 36 | [ 37 | -85.75, 38 | 30.9375 39 | ], 40 | [ 41 | -85.6875, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008503_nw_16_060_20191118_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008503_nw_16_060_20191118.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008503_nw_16_060_20191118.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008503_nw_16_060_20191118.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.75, 97 | 30.9375, 98 | -85.6875, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008504_ne_16_060_20191118_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008504_ne_16_060_20191118_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-18T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.5, 26 | 30.9375 27 | ], 28 | [ 29 | -85.5, 30 | 31.0 31 | ], 32 | [ 33 | -85.5625, 34 | 31.0 35 | ], 36 | [ 37 | -85.5625, 38 | 30.9375 39 | ], 40 | [ 41 | -85.5, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008504_ne_16_060_20191118_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008504_ne_16_060_20191118.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008504_ne_16_060_20191118.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008504_ne_16_060_20191118.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.5625, 97 | 30.9375, 98 | -85.5, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/data-files/naip/items/al_m_3008505_nw_16_060_20191118_20200114.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "al_m_3008505_nw_16_060_20191118_20200114", 5 | "properties": { 6 | "naip:state": "al", 7 | "providers": [ 8 | { 9 | "name": "USDA Farm Service Agency", 10 | "roles": [ 11 | "producer", 12 | "licensor" 13 | ], 14 | "url": "https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/" 15 | } 16 | ], 17 | "proj:epsg": 32616, 18 | "datetime": "2019-11-18T00:00:00Z" 19 | }, 20 | "geometry": { 21 | "type": "Polygon", 22 | "coordinates": [ 23 | [ 24 | [ 25 | -85.4375, 26 | 30.9375 27 | ], 28 | [ 29 | -85.4375, 30 | 31.0 31 | ], 32 | [ 33 | -85.5, 34 | 31.0 35 | ], 36 | [ 37 | -85.5, 38 | 30.9375 39 | ], 40 | [ 41 | -85.4375, 42 | 30.9375 43 | ] 44 | ] 45 | ] 46 | }, 47 | "links": [ 48 | { 49 | "rel": "self", 50 | "href": "collections/naip/items/al_m_3008505_nw_16_060_20191118_20200114.json", 51 | "type": "application/json" 52 | } 53 | ], 54 | "assets": { 55 | "image": { 56 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008505_nw_16_060_20191118.tif", 57 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 58 | "title": "RGBIR COG tile", 59 | "eo:bands": [ 60 | { 61 | "name": "Red" 62 | }, 63 | { 64 | "name": "Green" 65 | }, 66 | { 67 | "name": "Blue" 68 | }, 69 | { 70 | "name": "NIR", 71 | "description": "near-infrared" 72 | } 73 | ], 74 | "roles": [ 75 | "data" 76 | ] 77 | }, 78 | "metadata": { 79 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_fgdc_2019/30085/m_3008505_nw_16_060_20191118.txt", 80 | "type": "text/plain", 81 | "title": "FGDC Metdata", 82 | "roles": [ 83 | "metadata" 84 | ] 85 | }, 86 | "thumbnail": { 87 | "href": "https://naipeuwest.blob.core.windows.net/naip/v002/al/2019/al_60cm_2019/30085/m_3008505_nw_16_060_20191118.200.jpg", 88 | "type": "image/jpeg", 89 | "title": "Thumbnail", 90 | "roles": [ 91 | "thumbnail" 92 | ] 93 | } 94 | }, 95 | "bbox": [ 96 | -85.5, 97 | 30.9375, 98 | -85.4375, 99 | 31.0 100 | ], 101 | "stac_extensions": [ 102 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 103 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json" 104 | ], 105 | "collection": "naip" 106 | } -------------------------------------------------------------------------------- /pcstac/tests/loadtestdata.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import orjson 5 | from pypgstac.load import Loader, Methods, PgstacDB 6 | 7 | DATA_DIR = os.path.join(os.path.dirname(__file__), "data-files") 8 | collection = os.path.join(DATA_DIR, "naip/collection.json") 9 | items = os.path.join(DATA_DIR, "naip/items") 10 | 11 | 12 | def load_test_data() -> None: 13 | with PgstacDB() as conn: 14 | loader = Loader(db=conn) 15 | with open(collection, "rb") as f: 16 | c = orjson.loads(f.read()) 17 | loader.load_collections([c], Methods.upsert) 18 | pathlist = Path(items).glob("*.json") 19 | for path in pathlist: 20 | with open(str(path), "rb") as f: 21 | i = orjson.loads(f.read()) 22 | loader.load_items([i], Methods.upsert) 23 | 24 | 25 | def load_test_queryables() -> None: 26 | with PgstacDB() as db: 27 | conn = db.connect() 28 | 29 | # Ensure the username user has pgstac on the search_path 30 | conn.execute("ALTER ROLE username SET search_path TO pgstac, public") 31 | 32 | # Delete any existing naip queryables 33 | conn.execute("DELETE FROM queryables WHERE 'naip' = any(collection_ids)") 34 | 35 | conn.execute( 36 | """ 37 | INSERT INTO pgstac.queryables (name, collection_ids, definition) 38 | VALUES 39 | ('naip:year', '{"naip"}', '{"title": "Year", "type": "string"}'), 40 | ('naip:state', '{"naip"}', '{"title": "State", "type": "string"}'); 41 | """ 42 | ) 43 | 44 | 45 | load_test_data() 46 | load_test_queryables() 47 | -------------------------------------------------------------------------------- /pcstac/tests/resources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcstac/tests/resources/__init__.py -------------------------------------------------------------------------------- /pcstac/tests/resources/test_collection.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_get_collections(app_client): 8 | """Test read /collections""" 9 | resp = await app_client.get("/collections") 10 | assert resp.status_code == 200 11 | 12 | 13 | @pytest.mark.asyncio 14 | async def test_get_collection(app_client): 15 | """Test read a collection which does exist""" 16 | resp = await app_client.get("/collections/naip") 17 | assert resp.status_code == 200 18 | 19 | 20 | @pytest.mark.asyncio 21 | async def test_collection_not_found(app_client): 22 | """Test read a collection which does not exist""" 23 | resp = await app_client.get("/collections/does-not-exist") 24 | print(json.dumps(resp.json(), indent=2)) 25 | assert resp.status_code == 404 26 | 27 | 28 | @pytest.mark.asyncio 29 | async def test_all_collections_have_msft_regions(app_client): 30 | """Test that all collections have msft:region""" 31 | resp = await app_client.get("/collections") 32 | assert resp.status_code == 200 33 | collections = resp.json()["collections"] 34 | for collection in collections: 35 | assert "msft:region" in collection 36 | -------------------------------------------------------------------------------- /pcstac/tests/resources/test_conformance.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | import pystac 4 | import pytest 5 | 6 | from pcstac.config import STAC_API_VERSION 7 | 8 | 9 | def remove_root(stac_object: Dict[str, Any]) -> None: 10 | links = [] 11 | for link in stac_object["links"]: 12 | if link["rel"] != "root": 13 | links.append(link) 14 | stac_object["links"] = links 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_landing_page(app_client): 19 | """Test landing page""" 20 | resp = await app_client.get("/") 21 | assert resp.status_code == 200 22 | resp_json = resp.json() 23 | 24 | remove_root(resp_json) 25 | pystac.Catalog.from_dict(resp_json).validate() 26 | 27 | assert "stac_version" in resp_json 28 | 29 | # Make sure OpenAPI docs are linked 30 | docs = next(filter(lambda link: link["rel"] == "service-doc", resp_json["links"]))[ 31 | "href" 32 | ] 33 | resp = await app_client.get(docs) 34 | assert resp.status_code == 200 35 | 36 | # Make sure conformance classes are linked 37 | conformance_link = next( 38 | filter(lambda link: link["rel"] == "conformance", resp_json["links"]) 39 | ) 40 | 41 | assert "conformsTo" in resp_json 42 | conforms_to = resp_json["conformsTo"] 43 | 44 | # Make sure conformance classes are of the right STAC version 45 | for conformance_class in conforms_to: 46 | if "api.stacspec.org" in conformance_class: 47 | assert STAC_API_VERSION in conformance_class 48 | resp = await app_client.get(conformance_link["href"]) 49 | assert resp.status_code == 200 50 | -------------------------------------------------------------------------------- /pcstac/tests/resources/test_mgmt.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.asyncio 5 | async def test_ping_no_param(app_client): 6 | """ 7 | Test ping endpoint with a mocked client. 8 | Args: 9 | app_client (TestClient): mocked client fixture 10 | """ 11 | res = await app_client.get("/_mgmt/ping") 12 | assert res.status_code == 200 13 | assert res.json() == {"message": "PONG"} 14 | -------------------------------------------------------------------------------- /pcstac/tests/resources/test_queryables.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.asyncio 5 | async def test_queryables(app_client): 6 | resp = await app_client.get("/queryables") 7 | assert resp.status_code == 200 8 | properties = resp.json()["properties"] 9 | assert "id" in properties 10 | assert "datetime" in properties 11 | assert "naip:year" in properties 12 | assert "naip:state" in properties 13 | 14 | 15 | @pytest.mark.asyncio 16 | async def test_collection_queryables_naip(app_client): 17 | resp = await app_client.get("/collections/naip/queryables") 18 | assert resp.status_code == 200 19 | properties = resp.json()["properties"] 20 | assert "id" in properties 21 | assert "datetime" in properties 22 | assert "naip:year" in properties 23 | assert "naip:state" in properties 24 | 25 | 26 | @pytest.mark.asyncio 27 | async def test_collection_queryables_404(app_client): 28 | resp = await app_client.get("/collections/does-not-exist/queryables") 29 | assert resp.status_code == 404 30 | -------------------------------------------------------------------------------- /pcstac/tests/test_headers.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from fastapi.testclient import TestClient 4 | 5 | from pcstac.main import app 6 | 7 | 8 | class TestRoot(unittest.TestCase): 9 | def test_cors_enabled(self) -> None: 10 | with TestClient(app) as client: 11 | """ 12 | When the request supplies an origin header (as a browser would), ensure 13 | that the response has an `access-control-allow` header, set to all origins. 14 | """ 15 | expected_header = "access-control-allow-origin" 16 | response = client.get("/", headers={"origin": "http://example.com"}) 17 | 18 | self.assertEqual(response.status_code, 200) 19 | self.assertIn(expected_header, response.headers) 20 | self.assertEqual(response.headers[expected_header], "*") 21 | -------------------------------------------------------------------------------- /pcstac/tests/test_rate_limit.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import pytest 4 | from fastapi import FastAPI 5 | from httpx import ASGITransport, AsyncClient 6 | 7 | from pccommon.constants import HTTP_429_TOO_MANY_REQUESTS 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_rate_limit_collection(app: FastAPI): 12 | pytest.skip("Non-deterministic. Set the rate limit in the config file to test.") 13 | 14 | # set the ip to one that doesn't have the rate limit exception 15 | async with AsyncClient( 16 | transport=ASGITransport(app=app), 17 | base_url="http://test", 18 | headers={"X-Forwarded-For": "127.0.0.2"}, 19 | ) as app_client: 20 | resp = None 21 | for _ in range(0, 400): 22 | tic = time.perf_counter() 23 | resp = await app_client.get("/collections/naip") 24 | toc = time.perf_counter() 25 | print(f"{toc - tic:.3f}") 26 | if resp.status_code == HTTP_429_TOO_MANY_REQUESTS: 27 | break 28 | else: 29 | assert resp.status_code == 200 30 | 31 | assert resp.status_code == HTTP_429_TOO_MANY_REQUESTS 32 | 33 | 34 | @pytest.mark.asyncio 35 | async def test_rate_limit_collection_ip_Exception(app_client: AsyncClient): 36 | for _ in range(0, 400): 37 | resp = await app_client.get("/collections/naip") 38 | assert resp.status_code == 200 39 | 40 | 41 | @pytest.mark.asyncio 42 | async def test_reregistering_rate_limit_script(app: FastAPI, app_client: AsyncClient): 43 | # set the ip to one that doesn't have the rate limit exception 44 | async with AsyncClient( 45 | transport=ASGITransport(app=app), 46 | base_url="http://test", 47 | headers={"X-Forwarded-For": "127.0.0.2"}, 48 | ) as app_client: 49 | 50 | async def _hash_exists(): 51 | exists = await app.state.redis.script_exists( 52 | app.state.redis_rate_limit_script_hash 53 | ) 54 | return exists[0] 55 | 56 | # Script is registered and requests should succeed 57 | assert await _hash_exists() 58 | resp = await app_client.get("/collections/naip/items") 59 | assert resp.status_code == 200 60 | 61 | # Simulate scenario when all scripts are flushed from the redis script cache 62 | await app.state.redis.script_flush() 63 | assert await _hash_exists() is False 64 | 65 | # Request with unregistered script should succeed and re-register the script 66 | resp = await app_client.get("/collections/naip/items") 67 | assert resp.status_code == 200 68 | assert await _hash_exists() 69 | -------------------------------------------------------------------------------- /pcstac/tests/util/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pcstac/tests/util/__init__.py -------------------------------------------------------------------------------- /pcstac/tests/util/ingest.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from urllib.parse import urljoin 4 | 5 | import requests 6 | 7 | from .test_data import TestData 8 | 9 | 10 | def ingest_development_data(app_host=None): 11 | if app_host is None: 12 | app_host = os.environ.get("STAC_API_HOST") 13 | if app_host is None: 14 | raise Exception("STAC_API_HOST env var must be set.") 15 | 16 | collection = None 17 | with open(TestData.get_path("naip/collection.json")) as f: 18 | collection = json.load(f) 19 | 20 | items_dir = TestData.get_path("naip/items") 21 | items = [] 22 | for item_fname in os.listdir(items_dir): 23 | item_path = os.path.join(items_dir, item_fname) 24 | with open(item_path) as f: 25 | items.append(json.load(f)) 26 | 27 | existing_collections = requests.get(urljoin(app_host, "/collections")).json() 28 | if collection["id"] in [c["id"] for c in existing_collections["collections"]]: 29 | items = [] 30 | all = False 31 | items_href = urljoin(app_host, f"/collections/{collection['id']}/items") 32 | while not all: 33 | r = requests.get(items_href) 34 | r.raise_for_status() 35 | item_collection = r.json() 36 | next_link = next( 37 | iter( 38 | [link for link in item_collection["links"] if link["rel"] == "next"] 39 | ), 40 | None, 41 | ) 42 | if next_link is None: 43 | all = True 44 | for i in item_collection["features"]: 45 | r = requests.delete( 46 | urljoin( 47 | app_host, f"/collections/{collection['id']}/items/{i['id']}" 48 | ) 49 | ) 50 | 51 | r = requests.delete(urljoin(app_host, f"/collections/{collection['id']}")) 52 | r.raise_for_status() 53 | 54 | r = requests.post(urljoin(app_host, "/collections"), json=collection) 55 | r.raise_for_status() 56 | 57 | for item in items: 58 | r = requests.post( 59 | urljoin(app_host, f"/collections/{collection['id']}/items"), json=item 60 | ) 61 | r.raise_for_status() 62 | 63 | 64 | if __name__ == "__main__": 65 | ingest_development_data() 66 | -------------------------------------------------------------------------------- /pcstac/tests/util/test_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | class TestData: 5 | @staticmethod 6 | def get_path(rel_path: str) -> str: 7 | return os.path.abspath( 8 | os.path.join(os.path.dirname(__file__), "..", "data-files", rel_path) 9 | ) 10 | -------------------------------------------------------------------------------- /pctiler/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/cbl-mariner/base/python:3.9 2 | 3 | RUN tdnf install -y ca-certificates build-essential \ 4 | && tdnf clean all 5 | 6 | WORKDIR /opt/src 7 | 8 | COPY pccommon /opt/src/pccommon 9 | COPY pctiler /opt/src/pctiler 10 | 11 | # Install the local modules in the new environment 12 | RUN --mount=type=cache,target=/root/.cache \ 13 | /bin/sh -c "python3 -m pip install -U 'setuptools>=65.5.1' uv" 14 | # The order of these pip installs is important :( 15 | RUN --mount=type=cache,target=/root/.cache \ 16 | /bin/sh -c "uv pip install --system -r ./pccommon/requirements.txt" 17 | RUN --mount=type=cache,target=/root/.cache \ 18 | /bin/sh -c "uv pip install --system -r ./pctiler/requirements-server.txt" 19 | RUN --mount=type=cache,target=/root/.cache \ 20 | /bin/sh -c "uv pip install --system --no-deps -e ./pccommon -e ./pctiler[server]" 21 | 22 | # GDAL config 23 | ENV GDAL_CACHEMAX 200 24 | ENV GDAL_INGESTED_BYTES_AT_OPEN 32768 25 | ENV GDAL_DISABLE_READDIR_ON_OPEN EMPTY_DIR 26 | ENV GDAL_HTTP_MERGE_CONSECUTIVE_RANGES YES 27 | ENV GDAL_HTTP_MULTIPLEX YES 28 | ENV GDAL_HTTP_VERSION 2 29 | ENV GDAL_HTTP_MAX_RETRY 3 30 | ENV GDAL_HTTP_RETRY_DELAY 0.2 31 | # Avoid segfault in rasterio 1.2.10 when reading compound CRS. 32 | # https://github.com/rasterio/rasterio/issues/2415 33 | ENV GTIFF_REPORT_COMPD_CS=0 34 | ENV VSI_CACHE FALSE 35 | ENV VSI_CACHE_SIZE 0 36 | ENV CPL_VSIL_CURL_CACHE_SIZE 200000000 37 | 38 | # Experimental flag to deallocate process memory quickly 39 | ENV MALLOC_TRIM_THRESHOLD_=0 40 | 41 | # TiTiler mosaic config 42 | ENV MOSAIC_CONCURRENCY 1 43 | 44 | ENV APP_ROOT_PATH="" 45 | ENV APP_HOST=0.0.0.0 46 | ENV APP_PORT=80 47 | 48 | CMD uvicorn pctiler.main:app --host ${APP_HOST} --port ${APP_PORT} --root-path ${APP_ROOT_PATH} --log-level info 49 | -------------------------------------------------------------------------------- /pctiler/Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM pc-apis-tiler 2 | 3 | RUN tdnf install azure-cli -y 4 | 5 | COPY requirements-dev.txt requirements-dev.txt 6 | 7 | RUN --mount=type=cache,target=/root/.cache \ 8 | --mount=type=bind,source=requirements-dev.txt,target=requirements-dev.txt \ 9 | python3 -m pip install -r requirements-dev.txt 10 | 11 | RUN --mount=type=cache,target=/root/.cache \ 12 | python3 -m pip install -r pctiler/requirements-dev.txt 13 | -------------------------------------------------------------------------------- /pctiler/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include pctiler/endpoints/templates/*.html 2 | 3 | recursive-exclude tests * 4 | -------------------------------------------------------------------------------- /pctiler/pctiler/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pctiler/pctiler/__init__.py -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from rio_tiler.colormap import cmap 4 | from rio_tiler.types import ColorMapType 5 | 6 | from .alos_palsar_mosaic import alos_palsar_mosaic_colormaps 7 | from .chloris import chloris_colormaps 8 | from .dependencies import create_colormap_dependency 9 | from .io_bii import io_bii_colormaps 10 | from .jrc import jrc_colormaps 11 | from .lidarusgs import lidar_colormaps 12 | from .lulc import lulc_colormaps 13 | from .modis import modis_colormaps 14 | from .mtbs import mtbs_colormaps 15 | from .noaa_c_cap import noaa_c_cap_colormaps 16 | from .qpe import qpe_colormaps 17 | from .viirs import viirs_colormaps 18 | 19 | ################################################################################ 20 | # Custom ColorMap Query Parameter Support 21 | # The use of enums requires us to roll our own RenderParams dependency type 22 | # if we want documentation on par with the default RenderParams class 23 | ################################################################################ 24 | registered_cmaps = cmap 25 | custom_colormaps: Dict[str, ColorMapType] = { 26 | **io_bii_colormaps, 27 | **jrc_colormaps, 28 | **lulc_colormaps, 29 | **modis_colormaps, 30 | **mtbs_colormaps, 31 | **lidar_colormaps, 32 | **chloris_colormaps, 33 | **noaa_c_cap_colormaps, 34 | **alos_palsar_mosaic_colormaps, 35 | **qpe_colormaps, 36 | **viirs_colormaps, 37 | } 38 | 39 | for k, v in custom_colormaps.items(): 40 | # rio-tiler 6.6.1 doesn't support upper case cmap names 41 | registered_cmaps = registered_cmaps.register({k.lower(): v}) 42 | 43 | all_cmap_keys = list(custom_colormaps.keys()) + list(cmap.data.keys()) 44 | PCColorMapParams = create_colormap_dependency(registered_cmaps, all_cmap_keys) 45 | 46 | 47 | # Placeholder for non-discrete range colormaps (unsupported) 48 | # "hgb-above": { 49 | # 0: [225, 252, 238, 255], 50 | # 0.1: [220, 245, 233, 255], 51 | # 0.2: [200, 230, 212, 255], 52 | # 0.4: [182, 217, 194, 255], 53 | # 0.8: [161, 201, 173, 255], 54 | # 1.5: [143, 189, 155, 255], 55 | # 3: [127, 176, 139, 255], 56 | # 6: [109, 163, 122, 255], 57 | # 12.5: [93, 150, 107, 255], 58 | # 25: [77, 138, 90, 255], 59 | # 50: [62, 125, 77, 255], 60 | # 100: [47, 112, 62, 255], 61 | # 200: [34, 102, 51, 255], 62 | # 3000: [34, 102, 51, 255], 63 | # }, 64 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/alos_palsar_mosaic.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from rio_tiler.types import ColorMapType 4 | 5 | alos_palsar_mosaic_colormaps: Dict[str, ColorMapType] = { 6 | "alos-palsar-mask": { 7 | 0: (0, 0, 0, 0), 8 | 50: (0, 0, 255, 255), 9 | 100: (168, 168, 0, 255), 10 | 150: (0, 84, 84, 255), 11 | 255: (168, 153, 135, 255), 12 | }, 13 | "alos-fnf": { 14 | 0: (0, 0, 0, 255), # nodata 15 | 1: (0, 178, 0, 255), # forest (> 90% canopy cover) 16 | 2: (131, 239, 98, 255), # forest (10-90% canopy coer) 17 | 3: (255, 255, 153, 255), # no-forest 18 | 4: (0, 0, 255, 255), # water 19 | }, 20 | } 21 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/chloris.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, cast 2 | 3 | import matplotlib 4 | import numpy as np 5 | from rio_tiler.types import ColorMapType, ColorTuple 6 | 7 | 8 | def make_biomass_colormap() -> ColorMapType: 9 | biomass = matplotlib.colors.LinearSegmentedColormap.from_list( 10 | "chloris-biomass", 11 | [ 12 | "#c6c875", 13 | "#77a865", 14 | "#3d8757", 15 | "#29583a", 16 | "#2e3926", 17 | "#050603", 18 | ], 19 | 256, 20 | ) 21 | ramp = np.linspace(0, 1, 256) 22 | cmap_vals = biomass(ramp)[:, :] 23 | cmap_uint8 = (cmap_vals * 255).astype("uint8") 24 | colormap = { 25 | idx: cast(ColorTuple, tuple(value)) for idx, value in enumerate(cmap_uint8) 26 | } 27 | return colormap 28 | 29 | 30 | chloris_colormaps: Dict[str, ColorMapType] = { 31 | "chloris-biomass": make_biomass_colormap(), 32 | } 33 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/dependencies.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | import json 4 | from typing import Callable, List, Literal, Optional, Sequence, Union 5 | 6 | from fastapi import HTTPException, Query 7 | from rio_tiler.colormap import ColorMaps, parse_color 8 | from rio_tiler.types import ColorMapType 9 | from typing_extensions import Annotated 10 | 11 | 12 | # Port of titiler.core.dependencies.create_colormap_dependency (0.18.3) which 13 | # supports case-sensitive keys in QueryParams and the pydantic validation response 14 | def create_colormap_dependency( 15 | cmap: ColorMaps, original_casing_keys: List[str] 16 | ) -> Callable: 17 | """Create Colormap Dependency.""" 18 | 19 | def deps( # type: ignore 20 | colormap_name: Annotated[ # type: ignore 21 | Literal[tuple(original_casing_keys)], 22 | Query(description="Colormap name"), 23 | ] = None, 24 | colormap: Annotated[ 25 | Optional[str], Query(description="JSON encoded custom Colormap") 26 | ] = None, 27 | ) -> Union[ColorMapType, None]: 28 | if colormap_name: 29 | return cmap.get(colormap_name.lower()) 30 | 31 | if colormap: 32 | try: 33 | c = json.loads( 34 | colormap, 35 | object_hook=lambda x: { 36 | int(k): parse_color(v) for k, v in x.items() 37 | }, 38 | ) 39 | 40 | # Make sure to match colormap type 41 | if isinstance(c, Sequence): 42 | c = [(tuple(inter), parse_color(v)) for (inter, v) in c] 43 | 44 | return c 45 | except json.JSONDecodeError as e: 46 | raise HTTPException( 47 | status_code=400, detail="Could not parse the colormap value." 48 | ) from e 49 | 50 | return None 51 | 52 | return deps 53 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/io_bii.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, cast 2 | 3 | import matplotlib 4 | import numpy as np 5 | from rio_tiler.types import ColorMapType, ColorTuple 6 | 7 | 8 | def make_io_bii_colormap() -> ColorMapType: 9 | io_bii = matplotlib.colors.LinearSegmentedColormap.from_list( 10 | "io_bii", 11 | [ 12 | (0.0, "#72736c"), 13 | (0.2, "#ccd3c5"), 14 | (0.4, "#cceaa2"), 15 | (0.6, "#69be72"), 16 | (0.8, "#309d53"), 17 | (1.0, "#006a37"), 18 | ], 19 | 256, 20 | ) 21 | ramp = np.linspace(0, 1, 256) 22 | cmap_vals = io_bii(ramp)[:, :] 23 | cmap_uint8 = (cmap_vals * 255).astype("uint8") 24 | colormap = { 25 | idx: cast(ColorTuple, tuple(value)) for idx, value in enumerate(cmap_uint8) 26 | } 27 | return colormap 28 | 29 | 30 | io_bii_colormaps: Dict[str, ColorMapType] = { 31 | "io-bii": make_io_bii_colormap(), 32 | } 33 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/mtbs.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from rio_tiler.types import ColorMapType 4 | 5 | mtbs_colormaps: Dict[str, ColorMapType] = { 6 | "mtbs-severity": { 7 | 0: (0, 0, 0, 0), 8 | 1: (0, 100, 0, 255), 9 | 2: (127, 255, 212, 255), 10 | 3: (255, 255, 0, 255), 11 | 4: (255, 0, 0, 255), 12 | 5: (127, 255, 0, 255), 13 | 6: (255, 255, 255, 255), 14 | }, 15 | } 16 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/noaa_c_cap.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from rio_tiler.types import ColorMapType 4 | 5 | noaa_c_cap_colormaps: Dict[str, ColorMapType] = { 6 | "c-cap": { 7 | 0: (0, 0, 0, 0), 8 | 1: (0, 0, 0, 0), 9 | 2: (235, 235, 235, 255), 10 | 3: (168, 153, 168, 255), 11 | 4: (143, 117, 121, 255), 12 | 5: (194, 204, 56, 255), 13 | 6: (82, 31, 0, 255), 14 | 7: (194, 159, 79, 255), 15 | 8: (235, 184, 133, 255), 16 | 9: (0, 235, 0, 255), 17 | 10: (0, 57, 0, 255), 18 | 11: (6, 159, 57, 255), 19 | 12: (108, 108, 0, 255), 20 | 13: (0, 94, 94, 255), 21 | 14: (235, 108, 0, 255), 22 | 15: (235, 0, 235, 255), 23 | 16: (57, 0, 57, 255), 24 | 17: (108, 0, 108, 255), 25 | 18: (181, 0, 181, 255), 26 | 19: (0, 235, 235, 255), 27 | 20: (235, 235, 0, 255), 28 | 21: (0, 0, 120, 255), 29 | 22: (0, 0, 236, 255), 30 | 23: (102, 117, 219, 255), 31 | 24: (249, 209, 0, 255), 32 | 25: (170, 249, 239, 255), 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/qpe.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, cast 2 | 3 | import matplotlib 4 | import numpy as np 5 | from rio_tiler.types import ColorMapType, ColorTuple 6 | 7 | 8 | def make_qpe_colormap() -> ColorMapType: 9 | qpe = matplotlib.colors.LinearSegmentedColormap.from_list( 10 | "qpe", 11 | [ 12 | (0.000, "#00000000"), 13 | (0.003, "#1863b7"), 14 | (0.010, "#1863b7"), 15 | (0.011, "#30817e"), 16 | (0.025, "#30817e"), 17 | (0.026, "#419944"), 18 | (0.050, "#419944"), 19 | (0.051, "#5dac12"), 20 | (0.075, "#5dac12"), 21 | (0.076, "#94ba15"), 22 | (0.100, "#94ba15"), 23 | (0.101, "#c6c61e"), 24 | (0.150, "#c6c61e"), 25 | (0.151, "#f1cb24"), 26 | (0.200, "#f1cb24"), 27 | (0.201, "#fbb621"), 28 | (0.250, "#fbb621"), 29 | (0.251, "#f8981b"), 30 | (0.300, "#f8981b"), 31 | (0.301, "#f47916"), 32 | (0.400, "#f47916"), 33 | (0.401, "#f15a1e"), 34 | (0.500, "#f15a1e"), 35 | (0.501, "#fa605d"), 36 | (0.600, "#fa605d"), 37 | (0.601, "#ff79aa"), 38 | (0.800, "#ff79aa"), 39 | (0.801, "#fe92fb"), 40 | (1.000, "#fe92fb"), 41 | ], 42 | 256, 43 | ) 44 | ramp = np.linspace(0, 1, 256) 45 | cmap_vals = qpe(ramp)[:, :] 46 | cmap_uint8 = (cmap_vals * 255).astype("uint8") 47 | colormap = { 48 | idx: cast(ColorTuple, tuple(value)) for idx, value in enumerate(cmap_uint8) 49 | } 50 | return colormap 51 | 52 | 53 | qpe_colormaps: Dict[str, ColorMapType] = { 54 | "qpe": make_qpe_colormap(), 55 | } 56 | -------------------------------------------------------------------------------- /pctiler/pctiler/colormaps/viirs.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from rio_tiler.types import ColorMapType 4 | 5 | viirs_colormaps: Dict[str, ColorMapType] = { 6 | "viirs-10a1": [ 7 | ((0, 25), (43, 140, 190, 255)), 8 | ((25, 50), (116, 169, 207, 255)), 9 | ((50, 75), (189, 201, 225, 255)), 10 | ((75, 100), (241, 238, 246, 255)), 11 | ((101, 255), (0, 0, 0, 0)), 12 | ], 13 | "viirs-13a1": [ 14 | ((0, 0), (255, 255, 255, 255)), 15 | ((0, 1300), (214, 196, 181, 255)), 16 | ((1300, 2600), (172, 137, 106, 255)), 17 | ((2600, 3900), (233, 234, 220, 255)), 18 | ((3900, 5200), (183, 204, 173, 255)), 19 | ((5200, 6500), (134, 173, 126, 255)), 20 | ((6500, 7800), (84, 143, 79, 255)), 21 | ((7800, 9000), (38, 114, 36, 255)), 22 | ((9000, 10000), (0, 91, 0, 255)), 23 | ], 24 | "viirs-14a1": { 25 | 0: (255, 255, 255, 255), 26 | 1: (255, 255, 255, 255), 27 | 2: (255, 255, 255, 255), 28 | 3: (255, 255, 255, 0), 29 | 4: (128, 128, 128, 255), 30 | 5: (255, 255, 255, 0), 31 | 6: (255, 255, 255, 0), 32 | 7: (247, 160, 10, 255), 33 | 8: (222, 96, 0, 255), 34 | 9: (242, 4, 0, 255), 35 | }, 36 | "viirs-15a2H": [ 37 | ((0, 1), (99, 190, 53, 255)), 38 | ((1, 3), (73, 158, 52, 255)), 39 | ((3, 6), (49, 120, 34, 255)), 40 | ((6, 10), (24, 83, 17, 255)), 41 | ((10, 100), (0, 46, 0, 255)), 42 | ((100, 255), (0, 0, 0, 0)), 43 | ], 44 | } 45 | -------------------------------------------------------------------------------- /pctiler/pctiler/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dataclasses import dataclass 3 | from functools import lru_cache 4 | from urllib.parse import urljoin 5 | 6 | from fastapi import Request 7 | from pydantic import Field 8 | from pydantic_settings import BaseSettings 9 | 10 | # Hostname to fetch STAC information from 11 | STAC_API_URL_ENV_VAR = "STAC_API_URL" 12 | # HREF base to be used when sending responses 13 | STAC_API_HREF_ENV_VAR = "STAC_API_HREF" 14 | 15 | DEFAULT_MAX_ITEMS_PER_TILE_ENV_VAR = "DEFAULT_MAX_ITEMS_PER_TILE" 16 | REQUEST_TIMEOUT_ENV_VAR = "REQUEST_TIMEOUT" 17 | VECTORTILE_SA_BASE_URL_ENV_VAR = "VECTORTILE_SA_BASE_URL" 18 | 19 | 20 | @dataclass 21 | class FeatureFlags: 22 | VRT: bool = True if os.environ.get("FF_VRT") else False 23 | 24 | 25 | class Settings(BaseSettings): 26 | stac_api_url: str = os.environ[STAC_API_URL_ENV_VAR] 27 | """Internal URL to access the STAC API""" 28 | 29 | stac_api_href: str = os.environ[STAC_API_HREF_ENV_VAR] 30 | """Public URL to access the STAC API. 31 | 32 | If relative, will use the request's base URL to generate the 33 | full HREF. 34 | """ 35 | 36 | title: str = "Preview of Tile Access Services" 37 | openapi_url: str = "/openapi.json" 38 | configuration_endpoint_prefix: str = "/config" 39 | item_endpoint_prefix: str = "/item" 40 | mosaic_endpoint_prefix: str = "/mosaic" 41 | legend_endpoint_prefix: str = "/legend" 42 | vector_tile_endpoint_prefix: str = "/vector" 43 | vector_tile_sa_base_url: str = Field( 44 | default="", 45 | validation_alias=VECTORTILE_SA_BASE_URL_ENV_VAR, 46 | ) 47 | 48 | debug: bool = os.getenv("TILER_DEBUG", "False").lower() == "true" 49 | api_version: str = "1.0" 50 | default_max_items_per_tile: int = Field( 51 | default=10, 52 | validation_alias=DEFAULT_MAX_ITEMS_PER_TILE_ENV_VAR, 53 | ) 54 | request_timeout: int = Field( 55 | default=30, 56 | validation_alias=REQUEST_TIMEOUT_ENV_VAR, 57 | ) 58 | 59 | feature_flags: FeatureFlags = FeatureFlags() 60 | 61 | def get_stac_api_href(self, request: Request) -> str: 62 | """Generates the STAC API HREF. 63 | 64 | If the setting for the stac_api_href 65 | is relative, then use the request's base URL to generate the 66 | absolute URL. 67 | """ 68 | if request: 69 | base_hostname = f"{request.url.scheme}://{request.url.netloc}/" 70 | return urljoin(base_hostname, self.stac_api_href) 71 | else: 72 | return self.stac_api_href 73 | 74 | 75 | @lru_cache 76 | def get_settings() -> Settings: 77 | return Settings() 78 | -------------------------------------------------------------------------------- /pctiler/pctiler/endpoints/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/planetary-computer-apis/cc8cf429cf2a5206ff14b492e4d26ffad62749f3/pctiler/pctiler/endpoints/__init__.py -------------------------------------------------------------------------------- /pctiler/pctiler/endpoints/configuration.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from pccommon.credential import PcDefaultAzureCredential 4 | from pctiler.models import AzMapsToken 5 | 6 | tokenProvider = PcDefaultAzureCredential() 7 | configuration_router = APIRouter() 8 | 9 | 10 | @configuration_router.get("/map/token", response_model=AzMapsToken) 11 | def get_azmaps_token() -> AzMapsToken: 12 | """ 13 | Fetch a token for the Azure Maps API service based on the current 14 | service identity. This token is used for the Explorer to authenticate 15 | with the Azure Maps API service. 16 | """ 17 | accessToken = tokenProvider.get_token("https://atlas.microsoft.com/.default") 18 | return AzMapsToken(token=accessToken.token, expires_on=accessToken.expires_on) 19 | -------------------------------------------------------------------------------- /pctiler/pctiler/endpoints/dependencies.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Callable 3 | 4 | import fastapi 5 | import starlette 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def get_endpoint_function( 11 | router: fastapi.APIRouter, path: str, method: str 12 | ) -> Callable: 13 | for route in router.routes: 14 | match, _ = route.matches({"type": "http", "path": path, "method": method}) 15 | if match == starlette.routing.Match.FULL: 16 | # The abstract BaseRoute doesn't have a `.endpoint` attribute, 17 | # but all of its subclasses do. 18 | return route.endpoint # type: ignore [attr-defined] 19 | 20 | logger.warning(f"Could not find endpoint. method={method} path={path}") 21 | raise fastapi.HTTPException(detail="Internal system error", status_code=500) 22 | -------------------------------------------------------------------------------- /pctiler/pctiler/endpoints/health.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | health_router = APIRouter() 4 | 5 | 6 | @health_router.get("/_mgmt/ping") 7 | async def ping() -> dict: 8 | """Liveliness/readiness probe, matching spec used in stac-fastapi""" 9 | return {"message": "PONG"} 10 | -------------------------------------------------------------------------------- /pctiler/pctiler/endpoints/templates/item_preview.html: -------------------------------------------------------------------------------- 1 | 2 |
3 |