├── .binder └── environment.yml ├── .gitattributes ├── .github ├── dependabot.yaml └── workflows │ ├── binderbadge.yaml │ ├── cron.yaml │ ├── main.yaml │ └── pypipublish.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── ci ├── .coveragerc ├── environment-3.10.yml ├── environment-3.8.yml ├── environment-3.9.yml ├── environment-dev.yml ├── environment-docs.yml ├── environment-unpinned.yml └── environment-upstream.yml ├── codecov.yml ├── docs ├── Makefile ├── make.bat └── source │ ├── api.rst │ ├── conf.py │ ├── contributing.rst │ ├── index.rst │ ├── installation.rst │ └── tutorial.rst ├── examples ├── aws-earth-search.ipynb ├── intake-cmr-stac.ipynb ├── intake-holoviz.ipynb ├── landsat8-l1.ipynb ├── my-s2-l2a-cogs.json └── planet-disaster-data.ipynb ├── intake_stac ├── __init__.py ├── catalog.py └── tests │ ├── __init__.py │ ├── data │ ├── 1.0.0 │ │ ├── catalog │ │ │ ├── catalog.json │ │ │ └── child-catalog.json │ │ ├── collection │ │ │ ├── collection.json │ │ │ ├── simple-item.json │ │ │ └── zarr-collection.json │ │ ├── item │ │ │ └── zarr-item.json │ │ └── itemcollection │ │ │ └── example-search.json │ └── 1.0.0beta2 │ │ └── earthsearch │ │ ├── readme.md │ │ └── single-file-stac.json │ └── test_catalog.py ├── pyproject.toml ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg └── setup.py /.binder/environment.yml: -------------------------------------------------------------------------------- 1 | # binder development environment 2 | name: intake-stac-dev 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - aiohttp 7 | - bokeh=2.2 8 | - coverage 9 | - dask 10 | - dask-labextension 11 | - datashader 12 | - distributed 13 | - gcsfs 14 | - geopandas 15 | - geoviews 16 | - h5netcdf 17 | - hvplot 18 | - intake 19 | - intake-geopandas 20 | - intake-parquet 21 | - intake-xarray 22 | - ipywidgets 23 | - ipyleaflet 24 | - jupyterlab>=3 25 | - matplotlib-base 26 | - netcdf4 27 | - numpy 28 | - panel 29 | - pip 30 | - pyproj 31 | - python=3.9 32 | - pyyaml 33 | - pystac 34 | - pytest 35 | - rasterio 36 | - rioxarray 37 | - requests 38 | - s3fs 39 | - sat-search 40 | - sat-stac 41 | - scikit-image 42 | - xarray 43 | - pip: 44 | - -e ../ 45 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | xarray/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | - package-ecosystem: "github-actions" 8 | directory: "/" 9 | schedule: 10 | # Check for updates to GitHub Actions every weekday 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /.github/workflows/binderbadge.yaml: -------------------------------------------------------------------------------- 1 | # # create a mybinder badge issue comment for testing PRs 2 | # name: AddBinderBadge 3 | # on: 4 | # pull_request: 5 | # types: [opened, reopened] 6 | # jobs: 7 | # build-image-without-pushing: 8 | # runs-on: ubuntu-latest 9 | # steps: 10 | # - name: Checkout PR 11 | # uses: actions/checkout@v2.4.0 12 | 13 | # - name: Comment on PR with Binder link 14 | # uses: actions/github-script@v4.1 15 | # env: 16 | # BRANCH_NAME: ${{ github.event.pull_request.head.ref }} 17 | # with: 18 | # github-token: ${{secrets.GITHUB_TOKEN}} 19 | # script: | 20 | # var BRANCH_NAME = process.env.BRANCH_NAME; 21 | # github.issues.createComment({ 22 | # issue_number: context.issue.number, 23 | # owner: context.repo.owner, 24 | # repo: context.repo.repo, 25 | # body: `[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/${context.repo.owner}/${context.repo.repo}/${BRANCH_NAME}) :point_left: Launch a binder notebook on this branch` 26 | # }) 27 | -------------------------------------------------------------------------------- /.github/workflows/cron.yaml: -------------------------------------------------------------------------------- 1 | # Run a nightly test against unpinned conda environment 2 | name: Cron 3 | 4 | on: 5 | schedule: 6 | - cron: "0 0 * * *" 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v3.0.2 14 | 15 | - name: Setup Miniconda 16 | uses: conda-incubator/setup-miniconda@v2.1.1 17 | with: 18 | auto-update-conda: true 19 | auto-activate-base: false 20 | activate-environment: intake-stac 21 | environment-file: ci/environment-unpinned.yml 22 | 23 | - name: Development Install Intake-STAC 24 | shell: bash -l {0} 25 | run: | 26 | python -m pip install --no-deps -e . 27 | conda list 28 | 29 | - name: Run Tests 30 | shell: bash -l {0} 31 | run: | 32 | pytest --verbose --junitxml=test-reports/junit.xml --cov=intake_stac --cov-config ci/.coveragerc --cov-report term-missing 33 | -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: "*" 6 | pull_request: 7 | branches: main 8 | 9 | jobs: 10 | test: 11 | name: ${{ matrix.CONDA_ENV }}-test 12 | runs-on: ubuntu-latest 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | CONDA_ENV: ["3.8", "3.9", "3.10", "upstream"] 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v3.0.2 20 | 21 | - name: Cache Conda Packages 22 | uses: actions/cache@v3 23 | env: 24 | # Increase this value to reset cache if etc/example-environment.yml has not changed 25 | CACHE_NUMBER: 0 26 | with: 27 | path: ~/conda_pkgs_dir 28 | key: conda-${{ env.CACHE_NUMBER }}-${{hashFiles(format('ci/environment-{0}.yml',matrix.CONDA_ENV)) }} 29 | 30 | - name: Cache Pip Packages (upstream environment) 31 | uses: actions/cache@v3 32 | with: 33 | path: ~/.cache/pip 34 | key: ${{ runner.os }}-pip-${{ hashFiles('ci/environment-upstream.yml') }} 35 | 36 | - name: Setup Miniconda (Mambaforge) 37 | uses: conda-incubator/setup-miniconda@v2.1.1 38 | with: 39 | miniforge-variant: Mambaforge 40 | miniforge-version: latest 41 | use-mamba: true 42 | activate-environment: intake-stac 43 | environment-file: ci/environment-${{ matrix.CONDA_ENV }}.yml 44 | use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly! 45 | 46 | - name: Development Install Intake-STAC 47 | shell: bash -l {0} 48 | run: | 49 | python -m pip install --no-deps -e . --no-build-isolation 50 | conda list 51 | 52 | - name: Run Tests 53 | shell: bash -l {0} 54 | run: | 55 | python -m pytest --cov=./ --cov-report=xml --verbose 56 | 57 | - name: Upload code coverage to Codecov 58 | uses: codecov/codecov-action@v3.1.1 59 | with: 60 | file: ./coverage.xml 61 | flags: unittests 62 | env_vars: OS,PYTHON 63 | name: codecov-umbrella 64 | fail_ci_if_error: false 65 | -------------------------------------------------------------------------------- /.github/workflows/pypipublish.yaml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3.0.2 12 | - name: Set up Python 13 | uses: actions/setup-python@v4.2.0 14 | with: 15 | python-version: "3.x" 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install setuptools setuptools-scm wheel twine 20 | - name: Build and publish 21 | env: 22 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 23 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 24 | run: | 25 | python setup.py sdist bdist_wheel 26 | twine upload dist/* 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | test-reports/ 2 | docs/source/generated/ 3 | examples/ 4 | *pdf 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.4.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-docstring-first 8 | - id: check-json 9 | - id: check-yaml 10 | - id: double-quote-string-fixer 11 | 12 | - repo: https://github.com/psf/black 13 | rev: 22.12.0 14 | hooks: 15 | - id: black 16 | args: ["--line-length", "100", "--skip-string-normalization"] 17 | 18 | - repo: https://github.com/PyCQA/flake8 19 | rev: 6.0.0 20 | hooks: 21 | - id: flake8 22 | - repo: https://github.com/asottile/seed-isort-config 23 | rev: v2.2.0 24 | hooks: 25 | - id: seed-isort-config 26 | - repo: https://github.com/PyCQA/isort 27 | rev: 5.11.4 28 | hooks: 29 | - id: isort 30 | 31 | - repo: https://github.com/pre-commit/mirrors-prettier 32 | rev: v3.0.0-alpha.4 33 | hooks: 34 | - id: prettier 35 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: "ubuntu-20.04" 10 | tools: 11 | python: "mambaforge-4.10" 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: docs/source/conf.py 16 | 17 | # Optionally set the version of Python and requirements required to build your docs 18 | conda: 19 | environment: ci/environment-docs.yml 20 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 6 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). 7 | 8 | ## [v0.4.0] - 2021-XX-XX 9 | 10 | - Switched from sat-stac to pystac dependency (#72) 11 | - CI nightly test against py36 to py39 (#71) 12 | 13 | ### Added 14 | 15 | - Tests against python3.8 (#63) 16 | 17 | ## [v0.3.0] - 2020-10-01 18 | 19 | ### Added 20 | 21 | - Tests against python3.8 (#63) 22 | - Intake GUI predefined holoviews plots for thumbnails and geotiffs (#58) 23 | - New STAC Asset mimetype driver associations (#56) 24 | - STAC>1.0 support, with additional Jupyter Notebooks in examples/ (#52) 25 | - StacItemCollection `to_geopandas()` method (#38) 26 | - Use GitHub Actions for CI (#37) 27 | 28 | ## [v0.2.3] - 2019-01-21 29 | 30 | ### Removed 31 | 32 | - Dependency of scikit-image 33 | 34 | ### Fixed 35 | 36 | - Failed doc builds due to missing satsearch and rasterio dependency 37 | 38 | ## [v0.2.2] - 2019-12-06 39 | 40 | ### Added 41 | 42 | - Support for `satstac.ItemCollection` objects. This facilitates integration with STAC search APIs like sat-search. 43 | 44 | ## [v0.2.1] - 2019-10-31 45 | 46 | ### Fixed 47 | 48 | - Intake entrypoint warnings 49 | 50 | ### Added 51 | 52 | - DOC: Setup readthedocs integration 53 | - DOC: Add basic tutorial to documentation 54 | - Style: Black code formatting 55 | 56 | ## [v0.2.0] - 2019-10-08 57 | 58 | ### Fixed 59 | 60 | - Added missing requirements (intake-xarray, scikit-image) 61 | - Add manifest to fix install 62 | 63 | ### Added 64 | 65 | - Allow stacking of assets into a single xarray 66 | - Updated documentation including contributing doc, readme updates 67 | - Changelog format now uses [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 68 | 69 | ## [v0.1.0] - 2019-05-24 70 | 71 | Initial Release 72 | 73 | [v0.3.0]: https://github.com/intake/intake-stac/compare/0.2.3...0.3.0 74 | [v0.2.3]: https://github.com/intake/intake-stac/compare/0.2.2...0.2.3 75 | [v0.2.2]: https://github.com/intake/intake-stac/compare/0.2.1...0.2.2 76 | [v0.2.1]: https://github.com/pangeo-data/intake-stac/compare/0.2.0...0.2.1 77 | [v0.2.0]: https://github.com/pangeo-data/intake-stac/compare/0.1.0...0.2.0 78 | [v0.1.0]: https://github.com/pangeo-data/intake-stac/tree/0.1.0 79 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright (c) 2019, Pangeo Data 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.md 3 | include requirements.txt 4 | 5 | include versioneer.py 6 | 7 | recursive-include intake_stac *.py 8 | prune intake_stac/tests 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Intake-STAC 2 | 3 | ![CI](https://github.com/intake/intake-stac/workflows/CI/badge.svg) 4 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/intake/intake-stac/binder?urlpath=git-pull%3Frepo%3Dhttps%253A%252F%252Fgithub.com%252Fintake%252Fintake-stac%26urlpath%3Dlab%252Ftree%252Fintake-stac%252Fexamples%26branch%3Dmain) 5 | [![PyPI version](https://badge.fury.io/py/intake-stac.svg)](https://badge.fury.io/py/intake-stac) 6 | [![Documentation Status](https://readthedocs.org/projects/intake-stac/badge/?version=latest)](https://intake-stac.readthedocs.io/en/latest/?badge=latest) 7 | [![codecov](https://codecov.io/gh/intake/intake-stac/branch/main/graph/badge.svg?token=8VQEcrFJz9)](https://codecov.io/gh/intake/intake-stac) 8 | 9 | This is an [Intake](https://intake.readthedocs.io/en/latest) data source for [SpatioTemporal Asset Catalogs (STAC)](https://stacspec.org/). The STAC specification provides a common metadata specification, API, and catalog format to describe geospatial assets, so they can more easily indexed and discovered. A 'spatiotemporal asset' is any file that represents information about the earth captured in a certain space and time. 10 | 11 | Intake-STAC provides an opinionated way for users to load Assets from STAC catalogs into the scientific Python ecosystem. It uses the [intake-xarray](https://github.com/intake/intake-xarray) plugin and supports several file formats including GeoTIFF, netCDF, GRIB, and OpenDAP. 12 | 13 | ## Installation 14 | 15 | Intake-STAC has a few [requirements](requirements.txt), such as [Intake](https://intake.readthedocs.io), [intake-xarray](https://intake-xarray.readthedocs.io/) and [pystac](https://github.com/stac-utils/pystac). Intake-stac can be installed in any of the following ways: 16 | 17 | We recommend installing the latest release with `conda`: 18 | 19 | ```bash 20 | $ conda install -c conda-forge intake-stac 21 | ``` 22 | 23 | Or the latest development version with `pip`: 24 | 25 | ```bash 26 | $ pip install git+https://github.com/intake/intake-stac 27 | ``` 28 | 29 | ## Quickstart 30 | 31 | ```python 32 | import intake 33 | 34 | catalog_url = 'https://www.planet.com/data/stac/catalog.json' 35 | cat = intake.open_stac_catalog(catalog_url) 36 | 37 | collection = cat['planet-disaster-data'] 38 | subset = collection['hurricane-harvey']['hurricane-harvey-0831'] 39 | item = subset['Houston-East-20170831-103f-100d-0f4f-RGB'] 40 | 41 | da = item['thumbnail'].to_dask() 42 | da 43 | ``` 44 | 45 | The [examples/](examples/) directory contains several Jupyter Notebooks illustrating common workflows. 46 | 47 | [STAC Index](https://stacindex.org/catalogs) is a convenient website for finding datasets with STACs 48 | 49 | #### Versions 50 | 51 | To install a specific version of intake-stac, specify the version in the install command 52 | 53 | ```bash 54 | pip install intake-stac==0.4.0 55 | ``` 56 | 57 | The table below shows the corresponding versions between intake-stac and STAC: 58 | 59 | | intake-stac | STAC | 60 | | ----------- | ----------- | 61 | | 0.2.x | 0.6.x | 62 | | 0.3.x | 1.0.0-betaX | 63 | | 0.4.x | 1.0.0 | 64 | 65 | ## About 66 | 67 | [intake-stac](https://github.com/intake/intake-stac) was created as part of the [Pangeo](http://pangeo.io) initiative under support from the NASA-ACCESS program. See the initial [design document](https://hackmd.io/cyJZkjV5TCWTJg1mUAoEVA). 68 | -------------------------------------------------------------------------------- /ci/.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | omit = 3 | intake_stac/tests/* 4 | setup.py 5 | -------------------------------------------------------------------------------- /ci/environment-3.10.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.10 6 | - fsspec 7 | - geopandas 8 | - intake 9 | - intake-xarray 10 | - pystac 11 | - pytest-cov 12 | - rasterio 13 | - xarray 14 | -------------------------------------------------------------------------------- /ci/environment-3.8.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.8 6 | - fsspec 7 | - geopandas 8 | - intake 9 | - intake-xarray 10 | - pystac 11 | - pytest-cov 12 | - rasterio 13 | - xarray 14 | -------------------------------------------------------------------------------- /ci/environment-3.9.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.9 6 | - fsspec 7 | - geopandas 8 | - intake 9 | - intake-xarray 10 | - pystac 11 | - pytest-cov 12 | - rasterio 13 | - xarray 14 | -------------------------------------------------------------------------------- /ci/environment-dev.yml: -------------------------------------------------------------------------------- 1 | # Intake GUI also requires jupyterlab extensions installed 2 | # conda env create -f ci/environment-dev.yml 3 | # conda activate intake-stac-dev 4 | name: intake-stac-dev 5 | channels: 6 | - conda-forge 7 | dependencies: 8 | - python 9 | - aiohttp 10 | - autopep8 11 | - black 12 | - boto3 13 | - codecov 14 | - coverage 15 | - dask 16 | - datashader 17 | - distributed 18 | - flake8 19 | - geopandas 20 | - geoviews 21 | - hvplot 22 | - intake 23 | - intake-geopandas 24 | - intake-parquet 25 | - intake-xarray 26 | - ipykernel 27 | - ipywidgets 28 | - isort 29 | - jupyterlab 30 | - make 31 | - matplotlib 32 | - nbsphinx 33 | - netcdf4 34 | - numpy 35 | - numpydoc 36 | - pandoc 37 | - panel 38 | - pip 39 | - pre_commit 40 | - pystac 41 | - pystac-client 42 | - pytest 43 | - pytest-cov 44 | - pytoml 45 | - pyyaml 46 | - rasterio 47 | - recommonmark 48 | - requests 49 | - scikit-image 50 | - sphinx-copybutton 51 | - sphinx_rtd_theme 52 | - sphinx >=1.6 53 | - xarray 54 | -------------------------------------------------------------------------------- /ci/environment-docs.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | - nodefaults 5 | dependencies: 6 | - python 7 | - aiohttp 8 | - fsspec 9 | - geopandas 10 | - intake 11 | - intake-xarray 12 | - ipython 13 | - mock 14 | - nbsphinx 15 | - numpydoc 16 | - pillow 17 | - pip 18 | - pystac 19 | - pystac-client 20 | - pytest-cov 21 | - rasterio 22 | - scikit-image 23 | - sphinx 24 | - sphinx_rtd_theme 25 | - xarray 26 | - pip: 27 | - ../ #installs intake-stac from root folder 28 | -------------------------------------------------------------------------------- /ci/environment-unpinned.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - fsspec 7 | - geopandas 8 | - intake 9 | - intake-xarray 10 | - pystac 11 | - pytest-cov 12 | - rasterio 13 | - xarray 14 | -------------------------------------------------------------------------------- /ci/environment-upstream.yml: -------------------------------------------------------------------------------- 1 | name: intake-stac 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - geopandas 7 | - pip 8 | - pytest-cov 9 | - rasterio 10 | - xarray 11 | - pip: 12 | - git+https://github.com/fsspec/filesystem_spec.git 13 | - git+https://github.com/stac-utils/pystac.git 14 | - git+https://github.com/intake/intake.git 15 | - git+https://github.com/intake/intake-xarray.git 16 | - git+https://github.com/intake/intake_geopandas.git 17 | - git+https://github.com/intake/intake-parquet.git 18 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | max_report_age: off 3 | require_ci_to_pass: no 4 | 5 | comment: false 6 | 7 | coverage: 8 | precision: 2 9 | round: down 10 | status: 11 | project: 12 | default: 13 | threshold: 0.2 14 | if_not_found: success 15 | patch: off 16 | changes: off 17 | 18 | ignore: 19 | - "**/tests" 20 | - "**/_version.py" 21 | - "setup.py" 22 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/complexity.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/complexity.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/complexity" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/complexity" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\complexity.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\complexity.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: intake 2 | 3 | ############# 4 | API reference 5 | ############# 6 | 7 | This is a reference API class listing, and modules. 8 | 9 | Top-level functions 10 | =================== 11 | 12 | .. autosummary:: 13 | :toctree: generated/ 14 | 15 | open_stac_catalog 16 | open_stac_collection 17 | open_stac_item_collection 18 | open_stac_item 19 | 20 | .. currentmodule:: intake_stac 21 | 22 | Catalog Objects 23 | =============== 24 | 25 | .. autosummary:: 26 | :toctree: generated/ 27 | 28 | StacCatalog 29 | StacCollection 30 | StacItemCollection 31 | StacItem 32 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # complexity documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import os 15 | import sys 16 | 17 | import intake # noqa: F401 18 | from pkg_resources import get_distribution 19 | 20 | import intake_stac # noqa: F401 21 | 22 | # If extensions (or modules to document with autodoc) are in another directory, 23 | # add these directories to sys.path here. If the directory is relative to the 24 | # documentation root, use os.path.abspath to make it absolute, like shown here. 25 | # sys.path.insert(0, os.path.abspath('.')) 26 | 27 | cwd = os.getcwd() 28 | parent = os.path.dirname(cwd) 29 | sys.path.insert(0, parent) 30 | 31 | 32 | # -- General configuration ----------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # needs_sphinx = '1.0' 36 | 37 | # Add any Sphinx extension module names here, as strings. They can be extensions 38 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 39 | extensions = [ 40 | 'sphinx.ext.autodoc', 41 | 'sphinx.ext.viewcode', 42 | 'sphinx.ext.autosummary', 43 | 'sphinx.ext.doctest', 44 | 'sphinx.ext.intersphinx', 45 | 'sphinx.ext.extlinks', 46 | 'numpydoc', 47 | 'IPython.sphinxext.ipython_console_highlighting', 48 | 'IPython.sphinxext.ipython_directive', 49 | 'nbsphinx', 50 | ] 51 | 52 | extlinks = { 53 | 'issue': ('https://github.com/intake/intake-stac/issues/%s', 'GH#'), 54 | 'pr': ('https://github.com/intake/intake-stac/pull/%s', 'GH#'), 55 | } 56 | # Add any paths that contain templates here, relative to this directory. 57 | templates_path = ['_templates'] 58 | 59 | # Generate the API documentation when building 60 | autosummary_generate = True 61 | numpydoc_show_class_members = False 62 | 63 | 64 | # The suffix of source filenames. 65 | source_suffix = '.rst' 66 | 67 | # The encoding of source files. 68 | # source_encoding = 'utf-8-sig' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # General information about the project. 74 | project = 'Intake-STAC' 75 | copyright = '2019 onwards, The Pangeo Project and its Contributors' 76 | author = 'The Pangeo Project' 77 | 78 | # The version info for the project you're documenting, acts as replacement for 79 | # |version| and |release|, also used in various other places throughout the 80 | # built documents. 81 | # 82 | release = get_distribution('intake_stac').version 83 | version = '.'.join(release.split('.')[:2]) 84 | 85 | # The language for content autogenerated by Sphinx. Refer to documentation 86 | # for a list of supported languages. 87 | # language = None 88 | 89 | # There are two options for replacing |today|: either, you set today to some 90 | # non-false value, then it is used: 91 | # today = '' 92 | # Else, today_fmt is used as the format for a strftime call. 93 | # today_fmt = '%B %d, %Y' 94 | 95 | # List of patterns, relative to source directory, that match files and 96 | # directories to ignore when looking for source files. 97 | exclude_patterns = ['_build'] 98 | 99 | # The reST default role (used for this markup: `text`) to use for all documents. 100 | # default_role = None 101 | 102 | # If true, '()' will be appended to :func: etc. cross-reference text. 103 | # add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | # add_module_names = True 108 | 109 | # If true, sectionauthor and moduleauthor directives will be shown in the 110 | # output. They are ignored by default. 111 | # show_authors = False 112 | 113 | # The name of the Pygments (syntax highlighting) style to use. 114 | pygments_style = 'sphinx' 115 | 116 | # A list of ignored prefixes for module index sorting. 117 | # modindex_common_prefix = [] 118 | 119 | # If true, keep warnings as "system message" paragraphs in the built documents. 120 | # keep_warnings = False 121 | 122 | 123 | # -- Options for HTML output --------------------------------------------------- 124 | 125 | # The theme to use for HTML and HTML Help pages. See the documentation for 126 | # a list of builtin themes. 127 | html_theme = 'sphinx_rtd_theme' 128 | 129 | # Theme options are theme-specific and customize the look and feel of a theme 130 | # further. For a list of options available for each theme, see the 131 | # documentation. 132 | # html_theme_options = {} 133 | 134 | # Add any paths that contain custom themes here, relative to this directory. 135 | # html_theme_path = [] 136 | 137 | # The name for this set of Sphinx documents. If None, it defaults to 138 | # " v documentation". 139 | # html_title = None 140 | 141 | # A shorter title for the navigation bar. Default is the same as html_title. 142 | # html_short_title = None 143 | 144 | # The name of an image file (relative to this directory) to place at the top 145 | # of the sidebar. 146 | # html_logo = None 147 | 148 | # The name of an image file (within the static path) to use as favicon of the 149 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 150 | # pixels large. 151 | # html_favicon = None 152 | 153 | # Add any paths that contain custom static files (such as style sheets) here, 154 | # relative to this directory. They are copied after the builtin static files, 155 | # so a file named "default.css" will overwrite the builtin "default.css". 156 | # html_static_path = ['_static'] 157 | 158 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 159 | # using the given strftime format. 160 | # html_last_updated_fmt = '%b %d, %Y' 161 | 162 | # If true, SmartyPants will be used to convert quotes and dashes to 163 | # typographically correct entities. 164 | # html_use_smartypants = True 165 | 166 | # Custom sidebar templates, maps document names to template names. 167 | # html_sidebars = {} 168 | 169 | # Additional templates that should be rendered to pages, maps page names to 170 | # template names. 171 | # html_additional_pages = {} 172 | 173 | # If false, no module index is generated. 174 | # html_domain_indices = True 175 | 176 | # If false, no index is generated. 177 | # html_use_index = True 178 | 179 | # If true, the index is split into individual pages for each letter. 180 | # html_split_index = False 181 | 182 | # If true, links to the reST sources are added to the pages. 183 | # html_show_sourcelink = True 184 | 185 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 186 | # html_show_sphinx = True 187 | 188 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 189 | # html_show_copyright = True 190 | 191 | # If true, an OpenSearch description file will be output, and all pages will 192 | # contain a tag referring to it. The value of this option must be the 193 | # base URL from which the finished HTML is served. 194 | # html_use_opensearch = '' 195 | 196 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 197 | # html_file_suffix = None 198 | 199 | # Output file base name for HTML help builder. 200 | htmlhelp_basename = 'intake_stacdoc' 201 | 202 | 203 | # -- Options for LaTeX output -------------------------------------------------- 204 | 205 | latex_elements = { 206 | # The paper size ('letterpaper' or 'a4paper'). 207 | # 'papersize': 'letterpaper', 208 | # The font size ('10pt', '11pt' or '12pt'). 209 | # 'pointsize': '10pt', 210 | # Additional stuff for the LaTeX preamble. 211 | # 'preamble': '', 212 | } 213 | 214 | # Grouping the document tree into LaTeX files. List of tuples 215 | # (source start file, target name, title, author, documentclass [howto/manual]). 216 | latex_documents = [('index', 'intake-stac.tex', 'intake-stac Documentation', author, 'manual')] 217 | 218 | # The name of an image file (relative to this directory) to place at the top of 219 | # the title page. 220 | # latex_logo = None 221 | 222 | # For "manual" documents, if this is true, then toplevel headings are parts, 223 | # not chapters. 224 | # latex_use_parts = False 225 | 226 | # If true, show page references after internal links. 227 | # latex_show_pagerefs = False 228 | 229 | # If true, show URL addresses after external links. 230 | # latex_show_urls = False 231 | 232 | # Documents to append as an appendix to all manuals. 233 | # latex_appendices = [] 234 | 235 | # If false, no module index is generated. 236 | # latex_domain_indices = True 237 | 238 | 239 | # -- Options for manual page output -------------------------------------------- 240 | 241 | # One entry per manual page. List of tuples 242 | # (source start file, name, description, authors, manual section). 243 | man_pages = [('index', 'intake-stac', 'intake-stac Documentation', [author], 1)] 244 | 245 | # If true, show URL addresses after external links. 246 | # man_show_urls = False 247 | 248 | 249 | # -- Options for Texinfo output ------------------------------------------------ 250 | 251 | # Grouping the document tree into Texinfo files. List of tuples 252 | # (source start file, target name, title, author, 253 | # dir menu entry, description, category) 254 | texinfo_documents = [ 255 | ( 256 | 'index', 257 | 'intake-stac', 258 | 'intake-stac Documentation', 259 | author, 260 | 'intake-stac', 261 | 'One line description of project.', 262 | 'Miscellaneous', 263 | ) 264 | ] 265 | 266 | # Documents to append as an appendix to all manuals. 267 | # texinfo_appendices = [] 268 | 269 | # If false, no module index is generated. 270 | # texinfo_domain_indices = True 271 | 272 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 273 | # texinfo_show_urls = 'footnote' 274 | 275 | # If true, do not generate a @detailmenu in the "Top" node's menu. 276 | # texinfo_no_detailmenu = False 277 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | Contributing to intake-stac 3 | ============================ 4 | 5 | Contributions are highly welcomed and appreciated. Every little help counts, 6 | so do not hesitate! 7 | 8 | .. contents:: Contribution links 9 | :depth: 2 10 | 11 | 12 | .. _submitfeedback: 13 | 14 | Feature requests and feedback 15 | ----------------------------- 16 | 17 | Do you like intake-stac? Share some love on Twitter or in your blog posts! 18 | 19 | We'd also like to hear about your propositions and suggestions. Feel free to 20 | `submit them as issues `_ and: 21 | 22 | * Explain in detail how they should work. 23 | * Keep the scope as narrow as possible. This will make it easier to implement. 24 | 25 | 26 | .. _reportbugs: 27 | 28 | Report bugs 29 | ----------- 30 | 31 | Report bugs for intake-stac in the `issue tracker `_. 32 | 33 | If you are reporting a bug, please include: 34 | 35 | * Your operating system name and version. 36 | * Any details about your local setup that might be helpful in troubleshooting, 37 | specifically the Python interpreter version, installed libraries, and intake-stac 38 | version. 39 | * Detailed steps to reproduce the bug. 40 | 41 | If you can write a demonstration test that currently fails but should pass 42 | (xfail), that is a very useful commit to make as well, even if you cannot 43 | fix the bug itself. 44 | 45 | 46 | .. _fixbugs: 47 | 48 | Fix bugs 49 | -------- 50 | 51 | Look through the `GitHub issues for bugs `_. 52 | 53 | Talk to developers to find out how you can fix specific bugs. 54 | 55 | 56 | Write documentation 57 | ------------------- 58 | 59 | intake-stac could always use more documentation. What exactly is needed? 60 | 61 | * More complementary documentation. Have you perhaps found something unclear? 62 | * Docstrings. There can never be too many of them. 63 | * Blog posts, articles and such -- they're all very appreciated. 64 | 65 | You can also edit documentation files directly in the GitHub web interface, 66 | without using a local copy. This can be convenient for small fixes. 67 | 68 | .. note:: 69 | Build the documentation locally with the following command: 70 | 71 | .. code:: bash 72 | 73 | $ conda env create -f ci/environment-docs.yml 74 | $ cd docs 75 | $ make html 76 | 77 | The built documentation should be available in the ``docs/_build/``. 78 | 79 | 80 | 81 | .. _`pull requests`: 82 | .. _pull-requests: 83 | 84 | Preparing Pull Requests 85 | ----------------------- 86 | 87 | 88 | #. Fork the 89 | `intake-stac GitHub repository `__. It's 90 | fine to use ``intake-stac`` as your fork repository name because it will live 91 | under your user. 92 | 93 | 94 | #. Clone your fork locally using `git `_ and create a branch:: 95 | 96 | $ git clone git@github.com:YOUR_GITHUB_USERNAME/intake-stac.git 97 | $ cd intake-stac 98 | 99 | # now, to fix a bug or add feature create your own branch off "main": 100 | 101 | $ git checkout -b your-bugfix-feature-branch-name main 102 | 103 | 104 | #. Install development version in a conda environment:: 105 | 106 | $ conda env create -f ci/environment-dev.yml 107 | $ conda activate intake-stac-dev 108 | $ pip install -e . 109 | 110 | 111 | #. Install `pre-commit `_ and its hook on the intake-stac repo:: 112 | 113 | $ pip install --user pre-commit 114 | $ pre-commit install 115 | 116 | Afterwards ``pre-commit`` will run whenever you commit. 117 | 118 | https://pre-commit.com/ is a framework for managing and maintaining multi-language pre-commit hooks 119 | to ensure code-style and code formatting is consistent. 120 | 121 | 122 | #. Run all the tests 123 | 124 | Now running tests is as simple as issuing this command:: 125 | 126 | $ pytest --junitxml=test-reports/junit.xml --cov=./ --verbose 127 | 128 | 129 | This command will run tests via the "pytest" tool against Python 3.7. 130 | 131 | 132 | #. You can now edit your local working copy and run the tests again as necessary. Please follow PEP-8 for naming. 133 | 134 | When committing, ``pre-commit`` will re-format the files if necessary. 135 | 136 | 137 | #. Commit and push once your tests pass and you are happy with your change(s):: 138 | 139 | $ git commit -a -m "" 140 | $ git push -u 141 | 142 | 143 | #. Finally, submit a pull request through the GitHub website using this data:: 144 | 145 | head-fork: YOUR_GITHUB_USERNAME/intake-stac 146 | compare: your-branch-name 147 | 148 | base-fork: intake/intake-stac 149 | base: main 150 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | Intake-stac 3 | =========== 4 | 5 | Intake-stac is an open source project and Python package for discovering, 6 | exploring, and loading spatio-temporal datasets. 7 | 8 | Intake-stac provides Intake Drivers for SpatioTemporal Asset Catalogs (STAC). 9 | It provides tools for opening STAC ``Catalogs``, ``Collections``, 10 | ``ItemCollections``, and ``Items`` as Intake catalogs. Intake and Intake-xarray 11 | provide the tooling for loading assets described in STAC into Xarray objects. 12 | 13 | .. toctree:: 14 | :maxdepth: 2 15 | :caption: Documentation Contents 16 | 17 | installation 18 | tutorial 19 | api 20 | contributing 21 | 22 | Feedback 23 | -------- 24 | 25 | If you encounter any errors or problems with **intake-stac**, 26 | please open an issue at the GitHub `main repository `_. 27 | 28 | Indices and tables 29 | ================== 30 | 31 | * :ref:`genindex` 32 | * :ref:`modindex` 33 | * :ref:`search` 34 | -------------------------------------------------------------------------------- /docs/source/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | 6 | If you are using `Anaconda`_ or Miniconda, install Intake-stac with the following commands:: 7 | 8 | conda install -c conda-forge intake-stac 9 | 10 | If you are using virtualenv/pip, run the following command:: 11 | 12 | pip install intake-stac 13 | 14 | .. _Anaconda: https://www.anaconda.com/download/ 15 | -------------------------------------------------------------------------------- /docs/source/tutorial.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Tutorial 3 | ======== 4 | 5 | .. ipython:: python 6 | :suppress: 7 | 8 | import warnings 9 | with warnings.catch_warnings(record=True) as w: 10 | warnings.simplefilter("ignore") 11 | import pandas 12 | import xarray 13 | 14 | Intake-stac simply provides a thin interface that combines `pystac` and 15 | `intake`. It's basic usage is shown below: 16 | 17 | To begin, import intake: 18 | 19 | .. ipython:: python 20 | 21 | import intake 22 | 23 | Loading a catalog 24 | ----------------- 25 | 26 | You can load data from a STAC Catalog by providing the URL to valid STAC 27 | Catalog (>1.0): 28 | 29 | .. ipython:: python 30 | 31 | url = 'https://raw.githubusercontent.com/radiantearth/stac-spec/v1.0.0/examples/catalog.json' 32 | catalog = intake.open_stac_catalog(url) 33 | list(catalog) 34 | 35 | Intake-Stac uses `pystac `_ to parse 36 | STAC objects. You can also pass ``pystac`` objects (e.g. 37 | ``pystac.Catalog``) directly to the Intake-stac constructors: 38 | 39 | .. ipython:: python 40 | 41 | import pystac 42 | root_url = 'https://raw.githubusercontent.com/relativeorbit/aws-rtc-12SYJ/main' 43 | pystac_cat = pystac.read_file(f'{root_url}/catalog.json') 44 | cat = intake.open_stac_catalog(pystac_cat) 45 | 46 | You can also point to STAC Collections or Items. Each constructor returns a 47 | Intake Catalog with the top level corresponding to the STAC object used for 48 | initialization: 49 | 50 | .. ipython:: python 51 | 52 | stac_cat = intake.open_stac_catalog( 53 | f'{root_url}/catalog.json', 54 | ) 55 | collection_cat = intake.open_stac_collection( 56 | f'{root_url}/sentinel1-rtc-aws/collection.json', 57 | ) 58 | item_cat = intake.open_stac_item( 59 | f'{root_url}/sentinel1-rtc-aws/12SYJ/2021/S1A_20210105_12SYJ_DSC/S1A_20210105_12SYJ_DSC.json' 60 | ) 61 | 62 | Using the catalog 63 | ----------------- 64 | 65 | Once you have a catalog, you can display its entries by iterating through its 66 | contents: 67 | 68 | .. ipython:: python 69 | 70 | print(list(stac_cat)) 71 | cat = stac_cat['sentinel1-rtc-aws'] 72 | 73 | print(list(cat)) 74 | subcat = cat['12SYJ'] 75 | 76 | print(list(subcat)) 77 | subsubcat = subcat['2021'] 78 | 79 | print(list(subsubcat)[:3]) 80 | 81 | 82 | When you locate an item of interest, you have access to metadata and methods to load assets into Python objects 83 | 84 | .. ipython:: python 85 | 86 | item = subsubcat['S1A_20210105_12SYJ_DSC'] 87 | print(type(item)) 88 | print(item.metadata) 89 | 90 | assets = list(item) 91 | print(assets) 92 | 93 | 94 | Loading a dataset 95 | ----------------- 96 | 97 | Once you have identified a dataset, you can load it into a ``xarray.DataArray`` 98 | using Intake's `to_dask()` method. This reads only metadata, and streams values over the network when required by computations or visualizations: 99 | 100 | .. ipython:: python 101 | 102 | da = item['gamma0_vv'].to_dask() 103 | display(da) 104 | 105 | 106 | Working with `pystac-client` 107 | ---------------------------- 108 | 109 | Intake-stac integrates with `pystac-client` to faciliate dynamic search and 110 | discovery of assets through a STAC-API. To begin, construct a search query 111 | using `pystac-client`: 112 | 113 | .. ipython:: python 114 | 115 | import pystac_client 116 | URL = "https://earth-search.aws.element84.com/v0" 117 | catalog = pystac_client.Client.open(URL) 118 | 119 | results = catalog.search( 120 | collections=["sentinel-s2-l2a-cogs"], 121 | bbox = [35.48, -3.24, 35.58, -3.14], 122 | datetime="2020-07-01/2020-08-15") 123 | 124 | items = results.get_all_items() 125 | print(len(items)) 126 | 127 | In the code section above, `items` is a `pystac.ItemsCollection` object. 128 | Intake-stac can turn this object into an Intake catalog: 129 | 130 | .. ipython:: python 131 | 132 | catalog = intake.open_stac_item_collection(items) 133 | list(catalog) 134 | 135 | Using xarray-assets 136 | ------------------- 137 | 138 | Intake-stac uses the `xarray-assets`_ STAC extension to automatically use the appropriate keywords to load a STAC asset into a data container. 139 | 140 | Intake-stac will automatically use the keywords from the `xarray-assets`_ STAC extension, if present, when loading data into a container. 141 | For example, the STAC collection at defines an 142 | asset ``zarr-https`` with the metadata ``"xarray:open_kwargs": {"consolidated": true}"`` to indicate that this dataset should be 143 | opened with the ``consolidated=True`` keyword argument. This will be used automatically by ``.to_dask()`` 144 | 145 | 146 | .. code-block:: python 147 | 148 | >>> collection = intake.open_stac_collection( 149 | ... "https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-hi" 150 | ... ) 151 | 152 | >>> source = collection.get_asset("zarr-https") 153 | >>> source.to_dask() 154 | 155 | Dimensions: (nv: 2, time: 41, x: 284, y: 584) 156 | Coordinates: 157 | lat (y, x) float32 dask.array 158 | lon (y, x) float32 dask.array 159 | * time (time) datetime64[ns] 1980-07-01T12:00:00 ... 20... 160 | * x (x) float32 -5.802e+06 -5.801e+06 ... -5.519e+06 161 | * y (y) float32 -3.9e+04 -4e+04 ... -6.21e+05 -6.22e+05 162 | Dimensions without coordinates: nv 163 | Data variables: 164 | lambert_conformal_conic int16 ... 165 | prcp (time, y, x) float32 dask.array 166 | swe (time, y, x) float32 dask.array 167 | time_bnds (time, nv) datetime64[ns] dask.array 168 | tmax (time, y, x) float32 dask.array 169 | tmin (time, y, x) float32 dask.array 170 | vp (time, y, x) float32 dask.array 171 | Attributes: 172 | Conventions: CF-1.6 173 | Version_data: Daymet Data Version 4.0 174 | Version_software: Daymet Software Version 4.0 175 | citation: Please see http://daymet.ornl.gov/ for current Daymet ... 176 | references: Please see http://daymet.ornl.gov/ for current informa... 177 | source: Daymet Software Version 4.0 178 | start_year: 1980 179 | 180 | .. _xarray-assets: https://github.com/stac-extensions/xarray-assets 181 | -------------------------------------------------------------------------------- /examples/aws-earth-search.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Intake-STAC + sat-search\n", 8 | "================\n", 9 | "\n", 10 | "Commonly, we'll use an API to search a large STAC catalog to narrow down the data we want to work with. Here we'll demonstrate using the [sat-search](https://github.com/sat-utils/sat-search) library that uses https://www.element84.com/earth-search/." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "%matplotlib inline\n", 20 | "import intake\n", 21 | "import satsearch" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "bbox = [35.48, -3.24, 35.58, -3.14] # (min lon, min lat, max lon, max lat)\n", 31 | "dates = '2020-07-01/2020-08-15'\n", 32 | "\n", 33 | "URL='https://earth-search.aws.element84.com/v0'\n", 34 | "results = satsearch.Search.search(url=URL,\n", 35 | " collections=['sentinel-s2-l2a-cogs'], # note collection='sentinel-s2-l2a-cogs' doesn't work\n", 36 | " datetime=dates,\n", 37 | " bbox=bbox, \n", 38 | " sort=[''B5', 'red'->'B4'\n", 194 | "# An alternative organization is to store as a DataSet with common names:\n", 195 | "da['band'] = bands\n", 196 | "ds = da.to_dataset(dim='band')\n", 197 | "ds" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [ 206 | "# Now we can calculate band indices of subregions easily\n", 207 | "NDVI = (ds['nir'] - ds['red']) / (ds['nir'] + ds['red'])" 208 | ] 209 | }, 210 | { 211 | "cell_type": "code", 212 | "execution_count": null, 213 | "metadata": {}, 214 | "outputs": [], 215 | "source": [ 216 | "NDVI.isel(y=slice(2000,3000), x=slice(1500,2000)).plot.imshow(cmap='BrBG', vmin=-1, vmax=1)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": null, 222 | "metadata": {}, 223 | "outputs": [], 224 | "source": [] 225 | } 226 | ], 227 | "metadata": { 228 | "kernelspec": { 229 | "display_name": "Python 3", 230 | "language": "python", 231 | "name": "python3" 232 | }, 233 | "language_info": { 234 | "codemirror_mode": { 235 | "name": "ipython", 236 | "version": 3 237 | }, 238 | "file_extension": ".py", 239 | "mimetype": "text/x-python", 240 | "name": "python", 241 | "nbconvert_exporter": "python", 242 | "pygments_lexer": "ipython3", 243 | "version": "3.7.8" 244 | } 245 | }, 246 | "nbformat": 4, 247 | "nbformat_minor": 4 248 | } 249 | -------------------------------------------------------------------------------- /examples/planet-disaster-data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Intake-STAC + Planetscope\n", 8 | "================\n", 9 | "\n", 10 | "In this notebook, we'll take a look at some of the functionality in Intake-STAC by exploring STAC catalogs such as the open-access [Planet Disaster data catalog](https://planet.stac.cloud/?t=catalogs) hosted at https://raw.githubusercontent.com/cholmes/sample-stac/master/stac/catalog.json.\n", 11 | "\n", 12 | "STAC metadata is organized into a hierarchy of **Catalogs, Collections, and Items**. Items ultimately hold *Assets* which are data files such as satellite raster images. Ultimately the goal of intake-STAC is to facilitate loading these *Assets* directly into Python objects for interactive computation without worrying about filepaths and URLs." 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "%matplotlib inline\n", 22 | "import intake" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "# intake checks for registered drivers when imported\n", 32 | "# You should see 'stac_catalog, stac_collection, stac_item, and stac_item_collection' if intake-stac is installed\n", 33 | "list(intake.registry)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "## Catalog adapter\n", 41 | "\n", 42 | "We'll start by connecting to the STAC Catalog for `planet-disaster-data`. We read the top-level `catalog.json` file and drill down until we get to specific STAC Items." 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "# Load root catalog\n", 52 | "url = 'https://raw.githubusercontent.com/cholmes/sample-stac/master/stac/catalog.json'\n", 53 | "cat = intake.open_stac_catalog(url)\n", 54 | "print(cat.name)\n", 55 | "\n", 56 | "# This lists available subcatalogs:\n", 57 | "list(cat)" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "# STAC files are in JSON format, which is accessible as Python dictionaries:\n", 67 | "cat.metadata" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# Drill down into subcatalogs\n", 77 | "subcat = cat['hurricane-harvey']\n", 78 | "list(subcat)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [ 87 | "# Another subcatalog!\n", 88 | "subcat1 = subcat['hurricane-harvey-0831']\n", 89 | "list(subcat1)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# Load a STAC Item\n", 99 | "item = subcat1['Houston-East-20170831-103f-100d-0f4f-RGB']" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "item.metadata" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": null, 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "# Item Assets are accessible via lists just like subcatalogs:\n", 118 | "list(item)" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": null, 124 | "metadata": {}, 125 | "outputs": [], 126 | "source": [ 127 | "item['thumbnail'].metadata" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "# Finally we can display an image!\n", 137 | "from IPython.display import Image\n", 138 | "Image(item['thumbnail'].urlpath)" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "# Or we can load the image directly into Xarray for analysis\n", 148 | "# The full resolution image is big, but we use dask behind the scenes to only read metadata at first\n", 149 | "asset = item['mosaic']\n", 150 | "da = asset.to_dask()\n", 151 | "da" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": null, 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "# The chunk structure isn't set by default\n", 161 | "# setting a chunk structure makes operating on subsets of pixels more efficient\n", 162 | "da = da.chunk(dict(band=1, x=2560, y=2560))\n", 163 | "da" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "da.isel(band=0, x=slice(0, 2560), y=slice(0, 1280)).plot.imshow()" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [] 181 | } 182 | ], 183 | "metadata": { 184 | "kernelspec": { 185 | "display_name": "Python 3", 186 | "language": "python", 187 | "name": "python3" 188 | }, 189 | "language_info": { 190 | "codemirror_mode": { 191 | "name": "ipython", 192 | "version": 3 193 | }, 194 | "file_extension": ".py", 195 | "mimetype": "text/x-python", 196 | "name": "python", 197 | "nbconvert_exporter": "python", 198 | "pygments_lexer": "ipython3", 199 | "version": "3.7.8" 200 | } 201 | }, 202 | "nbformat": 4, 203 | "nbformat_minor": 4 204 | } 205 | -------------------------------------------------------------------------------- /intake_stac/__init__.py: -------------------------------------------------------------------------------- 1 | import intake # noqa: F401 2 | from pkg_resources import DistributionNotFound, get_distribution 3 | 4 | from .catalog import StacCatalog, StacCollection, StacItem, StacItemCollection # noqa: F401 5 | 6 | try: 7 | __version__ = get_distribution(__name__).version 8 | except DistributionNotFound: # noqa: F401; pragma: no cover 9 | # package is not installed 10 | __version__ = '999' 11 | -------------------------------------------------------------------------------- /intake_stac/catalog.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import warnings 3 | 4 | import pystac 5 | from intake.catalog import Catalog 6 | from intake.catalog.local import LocalCatalogEntry 7 | from intake.source import DataSource 8 | from pkg_resources import get_distribution 9 | from pystac.extensions.eo import EOExtension 10 | 11 | __version__ = get_distribution('intake_stac').version 12 | 13 | # STAC catalog asset 'type' determines intake driver: 14 | # https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#media-types 15 | default_type = 'application/rasterio' 16 | default_driver = 'rasterio' 17 | 18 | drivers = { 19 | 'application/netcdf': 'netcdf', 20 | 'application/x-netcdf': 'netcdf', 21 | 'application/parquet': 'parquet', 22 | 'application/x-parquet': 'parquet', 23 | 'application/x-hdf': 'netcdf', 24 | 'application/x-hdf5': 'netcdf', 25 | 'application/rasterio': 'rasterio', 26 | 'image/vnd.stac.geotiff': 'rasterio', 27 | 'image/vnd.stac.geotiff; cloud-optimized=true': 'rasterio', 28 | 'image/x.geotiff': 'rasterio', 29 | 'image/tiff; application=geotiff': 'rasterio', 30 | 'image/tiff; application=geotiff; profile=cloud-optimized': 'rasterio', # noqa: E501 31 | 'image/tiff': 'rasterio', 32 | 'image/jp2': 'rasterio', 33 | 'image/png': 'xarray_image', 34 | 'image/jpg': 'xarray_image', 35 | 'image/jpeg': 'xarray_image', 36 | 'text/xml': 'textfiles', 37 | 'text/plain': 'textfiles', 38 | 'text/html': 'textfiles', 39 | 'application/json': 'textfiles', 40 | 'application/geo+json': 'geopandas', 41 | 'application/geopackage+sqlite3': 'geopandas', 42 | 'application/vnd+zarr': 'zarr', 43 | 'application/xml': 'textfiles', 44 | } 45 | 46 | 47 | class AbstractStacCatalog(Catalog): 48 | 49 | version = __version__ 50 | partition_access = False 51 | 52 | def __init__(self, stac_obj, **kwargs): 53 | """ 54 | Initialize the catalog. 55 | 56 | Parameters 57 | ---------- 58 | stac_obj: stastac.STACObject 59 | A pystac.STACObject pointing to a STAC object 60 | kwargs : dict, optional 61 | Passed to intake.Catalog.__init__ 62 | """ 63 | if isinstance(stac_obj, self._stac_cls): 64 | self._stac_obj = stac_obj 65 | elif isinstance(stac_obj, str): 66 | self._stac_obj = self._stac_cls.from_file(stac_obj) 67 | else: 68 | raise ValueError('Expected %s instance, got: %s' % (self._stac_cls, type(stac_obj))) 69 | 70 | metadata = self._get_metadata(**kwargs.pop('metadata', {})) 71 | try: 72 | name = kwargs.pop('name', self._stac_obj.id) 73 | except AttributeError: 74 | # Not currently tested. 75 | # ItemCollection does not require an id 76 | # Unclear what the state of ItemCollection is. 77 | name = str(type(self._stac_obj)) 78 | 79 | super().__init__(name=name, metadata=metadata, **kwargs) 80 | 81 | @classmethod 82 | def from_url(cls, url, **kwargs): 83 | """ 84 | Initialize the catalog from a STAC url. 85 | 86 | Parameters 87 | ---------- 88 | url: str 89 | A URL pointing to a STAC json object 90 | kwargs : dict, optional 91 | Passed to intake.Catolog.__init__ 92 | """ 93 | stac_obj = cls._stac_cls.from_file(url) 94 | return cls(stac_obj, **kwargs) 95 | 96 | def _get_metadata(self, **kwargs): 97 | return kwargs # pragma: no cover 98 | 99 | def serialize(self): 100 | """ 101 | Serialize the catalog to yaml. 102 | 103 | Returns 104 | ------- 105 | A string with the yaml-formatted catalog (just top-level). 106 | """ 107 | return self.yaml() 108 | 109 | 110 | class StacCatalog(AbstractStacCatalog): 111 | """ 112 | Maps Intake Catalog to a STAC Catalog 113 | https://pystac.readthedocs.io/en/latest/api.html?#catalog-spec 114 | """ 115 | 116 | name = 'stac_catalog' 117 | _stac_cls = pystac.Catalog 118 | 119 | def _load(self): 120 | """ 121 | Load the STAC Catalog. 122 | """ 123 | for subcatalog in self._stac_obj.get_children(): 124 | if isinstance(subcatalog, pystac.Collection): 125 | # Collection subclasses Catalog, so check it first 126 | driver = StacCollection 127 | else: 128 | driver = StacCatalog 129 | 130 | self._entries[subcatalog.id] = LocalCatalogEntry( 131 | name=subcatalog.id, 132 | description=subcatalog.description, 133 | driver=driver, # recursive 134 | catalog=self, 135 | args={'stac_obj': subcatalog.get_self_href()}, 136 | ) 137 | 138 | for item in self._stac_obj.get_items(): 139 | self._entries[item.id] = LocalCatalogEntry( 140 | name=item.id, 141 | description='', 142 | driver=StacItem, 143 | catalog=self, 144 | args={'stac_obj': item}, 145 | ) 146 | 147 | def _get_metadata(self, **kwargs): 148 | """ 149 | Keep copy of all STAC JSON except for links 150 | """ 151 | # NOTE: why not links? 152 | metadata = self._stac_obj.to_dict() 153 | del metadata['links'] 154 | return metadata 155 | 156 | 157 | class StacCollection(StacCatalog): 158 | """ 159 | Maps Intake Catalog to a STAC Collection 160 | https://pystac.readthedocs.io/en/latest/api.html#collection-spec 161 | 162 | Collections have a number of properties that Catalogs do not, most notably 163 | the spatial and temporal extents. This is currently a placeholder for 164 | future Collection-specific attributes and methods. 165 | """ 166 | 167 | name = 'stac_catalog' 168 | _stac_cls = pystac.Collection 169 | 170 | def get_asset( 171 | self, 172 | key, 173 | storage_options=None, 174 | merge_asset_storage_options=True, 175 | merge_asset_open_kwargs=True, 176 | **kwargs, 177 | ): 178 | r""" 179 | Get a datasource for a collection-level asset. 180 | 181 | Parameters 182 | ---------- 183 | key : str, optional 184 | The asset key to use if multiple Zarr assets are provided. 185 | storage_options : dict, optional 186 | Additional arguments for the backend fsspec filesystem. 187 | merge_asset_storage_option : bool, default True 188 | Whether to merge the storage options provided by the asset under the 189 | ``xarray:storage_options`` key with `storage_options`. 190 | merge_asset_open_kwargs : bool, default True 191 | Whether to merge the keywords provided by the asset under the 192 | ``xarray:open_kwargs`` key with ``**kwargs``. 193 | **kwargs 194 | Additional keyword options are provided to the loader, for example ``consolidated=True`` 195 | to pass to :meth:`xarray.open_zarr`. 196 | 197 | Notes 198 | ----- 199 | The Media Type of the asset will be used to determine how to load the data. 200 | 201 | Returns 202 | ------- 203 | DataSource 204 | The dataset described by the asset loaded into a dask-backed object. 205 | """ 206 | try: 207 | asset = self._stac_obj.assets[key] 208 | except KeyError: 209 | raise KeyError( 210 | f'No asset named {key}. Should be one of {list(self._stac_obj.assets)}' 211 | ) from None 212 | 213 | storage_options = storage_options or {} 214 | if merge_asset_storage_options: 215 | asset_storage_options = asset.extra_fields.get('xarray:storage_options', {}) 216 | storage_options.update(asset_storage_options) 217 | 218 | if merge_asset_open_kwargs: 219 | asset_open_kwargs = asset.extra_fields.get('xarray:open_kwargs', {}) 220 | kwargs.update(asset_open_kwargs) 221 | 222 | return StacAsset(key, asset)(storage_options=storage_options, **kwargs) 223 | 224 | 225 | class StacItemCollection(AbstractStacCatalog): 226 | """ 227 | Maps ItemCollection returned from a STAC API to Intake (Sub)Catalog 228 | https://github.com/radiantearth/stac-api-spec/tree/master/fragments/itemcollection 229 | 230 | Note search results often use the single file stac extension: 231 | https://pystac.readthedocs.io/en/latest/api.html?#single-file-stac-extension 232 | """ 233 | 234 | name = 'stac_itemcollection' 235 | _stac_cls = pystac.ItemCollection 236 | 237 | def _load(self): 238 | """ 239 | Load the STAC Item Collection. 240 | """ 241 | # if not self._stac_obj.ext.implements('single-file-stac'): 242 | # raise ValueError("StacItemCollection requires 'single-file-stac' extension") 243 | for item in self._stac_obj.items: 244 | self._entries[item.id] = LocalCatalogEntry( 245 | name=item.id, 246 | description='', 247 | driver=StacItem, 248 | catalog=self, 249 | args={'stac_obj': item}, 250 | ) 251 | 252 | def to_geopandas(self, crs=None): 253 | """ 254 | Load the STAC Item Collection into a geopandas GeoDataFrame 255 | 256 | Parameters 257 | ---------- 258 | crs : str or dict (optional) 259 | Coordinate reference system to set on the resulting frame. 260 | 261 | Returns 262 | ------- 263 | GeoDataFrame 264 | 265 | """ 266 | try: 267 | import geopandas as gpd 268 | except ImportError: 269 | raise ImportError( 270 | 'Using to_geopandas requires the `geopandas` package.' 271 | 'You can install it via Pip or Conda.' 272 | ) 273 | 274 | if crs is None: 275 | crs = 'epsg:4326' 276 | gf = gpd.GeoDataFrame.from_features(self._stac_obj.to_dict(), crs=crs) 277 | return gf 278 | 279 | 280 | class StacItem(AbstractStacCatalog): 281 | """ 282 | Maps STAC Item to Intake (Sub)Catalog 283 | https://pystac.readthedocs.io/en/latest/api.html#item-spec 284 | """ 285 | 286 | name = 'stac_item' 287 | _stac_cls = pystac.Item 288 | 289 | def __getitem__(self, key): 290 | result = super().__getitem__(key) 291 | # TODO: handle non-string assets? 292 | asset = self._entries[key] 293 | storage_options = asset._stac_obj.extra_fields.get('xarray:storage_options', {}) 294 | open_kwargs = asset._stac_obj.extra_fields.get('xarray:open_kwargs', {}) 295 | 296 | if isinstance(result, DataSource): 297 | kwargs = result._captured_init_kwargs 298 | kwargs = {**kwargs, **dict(storage_options=storage_options), **open_kwargs} 299 | result = result(*result._captured_init_args, **kwargs) 300 | 301 | return result 302 | 303 | def _load(self): 304 | """ 305 | Load the STAC Item. 306 | """ 307 | for key, value in self._stac_obj.assets.items(): 308 | self._entries[key] = StacAsset(key, value) 309 | 310 | def _get_metadata(self, **kwargs): 311 | metadata = self._stac_obj.properties.copy() 312 | for attr in ['bbox', 'geometry', 'datetime', 'date']: 313 | metadata[attr] = getattr(self._stac_obj, attr, None) 314 | metadata.update(kwargs) 315 | return metadata 316 | 317 | def _get_band_info(self): 318 | """ 319 | Return list of band info dictionaries (name, common_name, etc.)... 320 | """ 321 | band_info = [] 322 | for band in EOExtension.ext(self._stac_obj).bands: 323 | band_info.append(band.to_dict()) 324 | return band_info 325 | 326 | def stack_bands(self, bands, path_as_pattern=None, concat_dim='band'): 327 | """ 328 | Stack the listed bands over the ``band`` dimension. 329 | 330 | This method only works for STAC Items using the 'eo' Extension 331 | https://github.com/radiantearth/stac-spec/tree/master/extensions/eo 332 | 333 | NOTE: This method is not aware of geotransform information. It *assumes* 334 | bands for a given STAC Item have the same coordinate reference system (CRS). 335 | This is usually the case for a given multi-band satellite acquisition. 336 | Coordinate alignment is performed automatically upon calling the 337 | `to_dask()` method to load into an Xarray DataArray if bands have diffent 338 | ground sample distance (gsd) or array shapes. 339 | 340 | Parameters 341 | ---------- 342 | bands : list of strings representing the different bands 343 | (e.g. ['B4', B5'], ['red', 'nir']). 344 | 345 | Returns 346 | ------- 347 | StacAsset with mapping of Asset names to Xarray bands 348 | 349 | Examples 350 | -------- 351 | stack = item.stack_bands(['nir','red']) 352 | da = stack(chunks=dict(band=1, x=2048, y=2048)).to_dask() 353 | 354 | stack = item.stack_bands(['B4','B5'], path_as_pattern='{band}.TIF') 355 | da = stack(chunks=dict(band=1, x=2048, y=2048)).to_dask() 356 | """ 357 | if not EOExtension.has_extension(self._stac_obj): 358 | raise ValueError('STAC Item must implement "eo" extension to use this method') 359 | 360 | band_info = self._get_band_info() 361 | configDict = {} 362 | metadatas = {} 363 | titles = [] 364 | hrefs = [] 365 | types = [] 366 | assets = self._stac_obj.assets 367 | for band in bands: 368 | # band can be band id, name or common_name 369 | if band in assets: 370 | info = next( 371 | (b for b in band_info if b.get('id', b.get('name')) == band), 372 | None, 373 | ) 374 | else: 375 | info = next((b for b in band_info if b.get('common_name') == band), None) 376 | if info is not None: 377 | band = info.get('id', info.get('name')) 378 | 379 | if band not in assets or info is None: 380 | valid_band_names = [] 381 | for b in band_info: 382 | valid_band_names.append(b.get('id', b.get('name'))) 383 | valid_band_names.append(b.get('common_name')) 384 | raise ValueError( 385 | f'{band} not found in list of eo:bands in collection.' 386 | f'Valid values: {sorted(list(set(valid_band_names)))}' 387 | ) 388 | asset = assets.get(band) 389 | metadatas[band] = asset.to_dict() 390 | titles.append(band) 391 | types.append(asset.media_type) 392 | hrefs.append(asset.href) 393 | 394 | unique_types = set(types) 395 | if len(unique_types) != 1: 396 | raise ValueError( 397 | f'Stacking failed: bands must have type, multiple found: {unique_types}' 398 | ) 399 | 400 | configDict['name'] = '_'.join(bands) 401 | configDict['description'] = ', '.join(titles) 402 | configDict['args'] = dict( 403 | chunks={}, concat_dim=concat_dim, path_as_pattern=path_as_pattern, urlpath=hrefs 404 | ) 405 | configDict['metadata'] = metadatas 406 | 407 | return CombinedAssets(configDict) 408 | 409 | def _yaml(self): 410 | data = {'metadata': {}, 'sources': {}} 411 | data['metadata'].update(self.metadata) 412 | for key, source in self.items(): 413 | data['sources'][key] = source._yaml()['sources']['stac_asset'] 414 | data['sources'][key]['direct_access'] = 'allow' 415 | data['sources'][key]['metadata'].pop('catalog_dir', None) 416 | return data 417 | 418 | 419 | class StacAsset(LocalCatalogEntry): 420 | """ 421 | Maps 1 STAC Item Asset to 1 Intake Catalog Entry 422 | https://pystac.readthedocs.io/en/latest/api.html#asset 423 | """ 424 | 425 | name = 'stac_asset' 426 | _stac_cls = pystac.item.Asset 427 | 428 | def __init__(self, key, asset): 429 | """ 430 | Construct an Intake catalog 'Source' from a STAC Item Asset. 431 | asset = pystac.item.Asset 432 | """ 433 | self._stac_obj = asset 434 | driver = self._get_driver(asset) 435 | 436 | super().__init__( 437 | name=key, 438 | description=asset.title, 439 | driver=driver, 440 | direct_access='allow', 441 | args=self._get_args(asset, driver), 442 | metadata=self._get_metadata(asset), 443 | ) 444 | 445 | def _get_metadata(self, asset): 446 | """ 447 | Copy STAC Asset Metadata and setup default plot 448 | """ 449 | metadata = asset.to_dict() 450 | default_plot = self._get_plot(asset) 451 | if default_plot: 452 | metadata['plots'] = default_plot 453 | 454 | return metadata 455 | 456 | def _get_plot(self, asset): 457 | """ 458 | Default hvplot plot based on Asset mimetype 459 | """ 460 | # NOTE: consider geojson, parquet, hdf defaults in future... 461 | default_plot = None 462 | type = asset.media_type 463 | if type: 464 | if type in ['image/jpeg', 'image/jpg', 'image/png']: 465 | default_plot = dict( 466 | thumbnail=dict( 467 | kind='rgb', 468 | x='x', 469 | y='y', 470 | bands='channel', 471 | data_aspect=1, 472 | flip_yaxis=True, 473 | xaxis=False, 474 | yaxis=False, 475 | ) 476 | ) 477 | 478 | elif 'tiff' in type: 479 | default_plot = dict( 480 | geotiff=dict( 481 | kind='image', 482 | x='x', 483 | y='y', 484 | frame_width=500, 485 | data_aspect=1, 486 | rasterize=True, 487 | dynamic=True, 488 | cmap='viridis', 489 | ) 490 | ) 491 | 492 | return default_plot 493 | 494 | def _get_driver(self, asset): 495 | """ 496 | Assign intake driver for data I/O 497 | """ 498 | entry_type = asset.media_type 499 | 500 | if entry_type in ['', 'null', None]: 501 | 502 | suffix = '.tif' 503 | if asset.media_type: 504 | suffix = os.path.splitext(asset.media_type)[-1] 505 | if suffix in ['.nc', '.h5', '.hdf']: 506 | asset.media_type = 'application/netcdf' 507 | warnings.warn( 508 | f'STAC Asset "type" missing, assigning {entry_type} based on href suffix {suffix}:\n{asset.media_type}' # noqa: E501 509 | ) 510 | else: 511 | asset.media_type = default_type 512 | warnings.warn( 513 | f'STAC Asset "type" missing, assuming default type={default_type}:\n{asset}' # noqa: E501 514 | ) 515 | entry_type = asset.media_type 516 | 517 | # if mimetype not registered try rasterio driver 518 | driver = drivers.get(entry_type, default_driver) 519 | 520 | return driver 521 | 522 | def _get_args(self, asset, driver): 523 | """ 524 | Optional keyword arguments to pass to intake driver 525 | """ 526 | args = {'urlpath': asset.href} 527 | if driver in ['netcdf', 'rasterio', 'xarray_image']: 528 | # NOTE: force using dask? 529 | args.update(chunks={}) 530 | 531 | return args 532 | 533 | 534 | class CombinedAssets(LocalCatalogEntry): 535 | """ 536 | Maps multiple STAC Item Assets to 1 Intake Catalog Entry 537 | """ 538 | 539 | def __init__(self, configDict): 540 | """ 541 | configDict = intake Entry dictionary from stack_bands() method 542 | """ 543 | super().__init__( 544 | name=configDict['name'], 545 | description=configDict['description'], 546 | driver='rasterio', # stack_bands only relevant to rasterio driver? 547 | direct_access=True, 548 | args=configDict['args'], 549 | metadata=configDict['metadata'], 550 | ) 551 | -------------------------------------------------------------------------------- /intake_stac/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intake/intake-stac/227a74c5d3968b926ab0bda1526fa7ae3558deab/intake_stac/tests/__init__.py -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/catalog/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "test", 4 | "stac_version": "1.0.0", 5 | "description": "test catalog", 6 | "links": [ 7 | { 8 | "rel": "child", 9 | "href": "./child-catalog.json", 10 | "type": "application/json" 11 | }, 12 | { 13 | "rel": "root", 14 | "href": "./catalog.json", 15 | "type": "application/json" 16 | } 17 | ], 18 | "stac_extensions": [] 19 | } 20 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/catalog/child-catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "test", 4 | "stac_version": "1.0.0", 5 | "description": "child catalog", 6 | "links": [ 7 | { 8 | "rel": "root", 9 | "href": "./catalog.json", 10 | "type": "application/json" 11 | } 12 | ], 13 | "stac_extensions": [] 14 | } 15 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/collection/collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "simple-collection", 3 | "type": "Collection", 4 | "stac_extensions": [ 5 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json", 6 | "https://stac-extensions.github.io/view/v1.0.0/schema.json" 7 | ], 8 | "stac_version": "1.0.0", 9 | "description": "A simple collection demonstrating core catalog fields with links to a couple of items", 10 | "title": "Simple Example Collection", 11 | "providers": [ 12 | { 13 | "name": "Remote Data, Inc", 14 | "description": "Producers of awesome spatiotemporal assets", 15 | "roles": ["producer", "processor"], 16 | "url": "http://remotedata.io" 17 | } 18 | ], 19 | "extent": { 20 | "spatial": { 21 | "bbox": [ 22 | [ 23 | 172.91173669923782, 1.3438851951615003, 172.95469614953714, 24 | 1.3690476620161975 25 | ] 26 | ] 27 | }, 28 | "temporal": { 29 | "interval": [["2020-12-11T22:38:32.125Z", "2020-12-14T18:02:31.437Z"]] 30 | } 31 | }, 32 | "license": "CC-BY-4.0", 33 | "summaries": { 34 | "platform": ["cool_sat1", "cool_sat2"], 35 | "constellation": ["ion"], 36 | "instruments": ["cool_sensor_v1", "cool_sensor_v2"], 37 | "gsd": { 38 | "minimum": 0.512, 39 | "maximum": 0.66 40 | }, 41 | "eo:cloud_cover": { 42 | "minimum": 1.2, 43 | "maximum": 1.2 44 | }, 45 | "proj:epsg": { 46 | "minimum": 32659, 47 | "maximum": 32659 48 | }, 49 | "view:sun_elevation": { 50 | "minimum": 54.9, 51 | "maximum": 54.9 52 | }, 53 | "view:off_nadir": { 54 | "minimum": 3.8, 55 | "maximum": 3.8 56 | }, 57 | "view:sun_azimuth": { 58 | "minimum": 135.7, 59 | "maximum": 135.7 60 | } 61 | }, 62 | "links": [ 63 | { 64 | "rel": "root", 65 | "href": "./collection.json", 66 | "type": "application/json" 67 | }, 68 | { 69 | "rel": "item", 70 | "href": "./simple-item.json", 71 | "type": "application/geo+json", 72 | "title": "Simple Item" 73 | } 74 | ] 75 | } 76 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/collection/simple-item.json: -------------------------------------------------------------------------------- 1 | { 2 | "stac_version": "1.0.0", 3 | "stac_extensions": [ 4 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json", 5 | "https://stac-extensions.github.io/eo/v1.0.0/schema.json" 6 | ], 7 | "type": "Feature", 8 | "id": "S2B_MSIL2A_20171227T160459_N0212_R054_T17QLA_20201014T165101", 9 | "bbox": [ 10 | 172.91173669923782, 1.3438851951615003, 172.95469614953714, 11 | 1.3690476620161975 12 | ], 13 | "geometry": { 14 | "coordinates": [ 15 | [ 16 | [-82.89978, 18.98277161], 17 | [-81.85693, 18.99053787], 18 | [-81.85202, 17.99825755], 19 | [-82.888855, 17.99092482], 20 | [-82.89978, 18.98277161] 21 | ] 22 | ], 23 | "type": "Polygon" 24 | }, 25 | "properties": { 26 | "datetime": "2017-12-27T16:04:59.027000Z" 27 | }, 28 | "collection": "simple-collection", 29 | "links": [ 30 | { 31 | "rel": "collection", 32 | "href": "./collection.json", 33 | "type": "application/json", 34 | "title": "Simple Example Collection" 35 | }, 36 | { 37 | "rel": "root", 38 | "href": "./collection.json", 39 | "type": "application/json" 40 | }, 41 | { 42 | "rel": "parent", 43 | "href": "./collection.json", 44 | "type": "application/json" 45 | } 46 | ], 47 | "assets": { 48 | "B02": { 49 | "href": "https://sentinel2l2a01.blob.core.windows.net/sentinel2-l2/17/Q/LA/2017/12/27/S2B_MSIL2A_20171227T160459_N0212_R054_T17QLA_20201014T165101.SAFE/GRANULE/L2A_T17QLA_A004227_20171227T160750/IMG_DATA/R10m/T17QLA_20171227T160459_B02_10m.tif", 50 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 51 | "title": "Band 2 - Blue", 52 | "eo:bands": [ 53 | { 54 | "name": "B02", 55 | "common_name": "blue", 56 | "description": "Band 2 - Blue", 57 | "center_wavelength": 0.49, 58 | "full_width_half_max": 0.098 59 | } 60 | ], 61 | "gsd": 10, 62 | "proj:shape": [10980, 10980], 63 | "proj:bbox": [300000, 1990200, 409800, 2100000], 64 | "proj:transform": [10, 0, 300000, 0, -10, 2100000], 65 | "roles": ["data"] 66 | }, 67 | "B03": { 68 | "href": "https://sentinel2l2a01.blob.core.windows.net/sentinel2-l2/17/Q/LA/2017/12/27/S2B_MSIL2A_20171227T160459_N0212_R054_T17QLA_20201014T165101.SAFE/GRANULE/L2A_T17QLA_A004227_20171227T160750/IMG_DATA/R10m/T17QLA_20171227T160459_B03_10m.tif", 69 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 70 | "title": "Band 3 - Green", 71 | "eo:bands": [ 72 | { 73 | "name": "B03", 74 | "common_name": "green", 75 | "description": "Band 3 - Green", 76 | "center_wavelength": 0.56, 77 | "full_width_half_max": 0.045 78 | } 79 | ], 80 | "gsd": 10, 81 | "proj:shape": [10980, 10980], 82 | "proj:bbox": [300000, 1990200, 409800, 2100000], 83 | "proj:transform": [10, 0, 300000, 0, -10, 2100000], 84 | "roles": ["data"] 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/collection/zarr-collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Collection", 3 | "id": "daymet-daily-hi", 4 | "stac_version": "1.0.0", 5 | "description": "{{ collection.description }}", 6 | "links": [ 7 | { 8 | "rel": "license", 9 | "href": "https://science.nasa.gov/earth-science/earth-science-data/data-information-policy" 10 | } 11 | ], 12 | "stac_extensions": [ 13 | "https://stac-extensions.github.io/datacube/v2.0.0/schema.json" 14 | ], 15 | "cube:dimensions": { 16 | "time": { 17 | "type": "temporal", 18 | "description": "24-hour day based on local time", 19 | "extent": ["1980-01-01T12:00:00Z", "2020-12-30T12:00:00Z"] 20 | }, 21 | "x": { 22 | "type": "spatial", 23 | "axis": "x", 24 | "description": "x coordinate of projection", 25 | "extent": [-5802250.0, -5519250.0], 26 | "step": 1000.0, 27 | "reference_system": { 28 | "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", 29 | "type": "ProjectedCRS", 30 | "name": "undefined", 31 | "base_crs": { 32 | "name": "undefined", 33 | "datum": { 34 | "type": "GeodeticReferenceFrame", 35 | "name": "undefined", 36 | "ellipsoid": { 37 | "name": "undefined", 38 | "semi_major_axis": 6378137, 39 | "inverse_flattening": 298.257223563 40 | } 41 | }, 42 | "coordinate_system": { 43 | "subtype": "ellipsoidal", 44 | "axis": [ 45 | { 46 | "name": "Longitude", 47 | "abbreviation": "lon", 48 | "direction": "east", 49 | "unit": "degree" 50 | }, 51 | { 52 | "name": "Latitude", 53 | "abbreviation": "lat", 54 | "direction": "north", 55 | "unit": "degree" 56 | } 57 | ] 58 | } 59 | }, 60 | "conversion": { 61 | "name": "unknown", 62 | "method": { 63 | "name": "Lambert Conic Conformal (2SP)", 64 | "id": { 65 | "authority": "EPSG", 66 | "code": 9802 67 | } 68 | }, 69 | "parameters": [ 70 | { 71 | "name": "Latitude of 1st standard parallel", 72 | "value": 25, 73 | "unit": "degree", 74 | "id": { 75 | "authority": "EPSG", 76 | "code": 8823 77 | } 78 | }, 79 | { 80 | "name": "Latitude of 2nd standard parallel", 81 | "value": 60, 82 | "unit": "degree", 83 | "id": { 84 | "authority": "EPSG", 85 | "code": 8824 86 | } 87 | }, 88 | { 89 | "name": "Latitude of false origin", 90 | "value": 42.5, 91 | "unit": "degree", 92 | "id": { 93 | "authority": "EPSG", 94 | "code": 8821 95 | } 96 | }, 97 | { 98 | "name": "Longitude of false origin", 99 | "value": -100, 100 | "unit": "degree", 101 | "id": { 102 | "authority": "EPSG", 103 | "code": 8822 104 | } 105 | }, 106 | { 107 | "name": "Easting at false origin", 108 | "value": 0, 109 | "unit": "metre", 110 | "id": { 111 | "authority": "EPSG", 112 | "code": 8826 113 | } 114 | }, 115 | { 116 | "name": "Northing at false origin", 117 | "value": 0, 118 | "unit": "metre", 119 | "id": { 120 | "authority": "EPSG", 121 | "code": 8827 122 | } 123 | } 124 | ] 125 | }, 126 | "coordinate_system": { 127 | "subtype": "Cartesian", 128 | "axis": [ 129 | { 130 | "name": "Easting", 131 | "abbreviation": "E", 132 | "direction": "east", 133 | "unit": "metre" 134 | }, 135 | { 136 | "name": "Northing", 137 | "abbreviation": "N", 138 | "direction": "north", 139 | "unit": "metre" 140 | } 141 | ] 142 | } 143 | } 144 | }, 145 | "y": { 146 | "type": "spatial", 147 | "axis": "y", 148 | "description": "y coordinate of projection", 149 | "extent": [-622000.0, -39000.0], 150 | "step": -1000.0, 151 | "reference_system": { 152 | "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", 153 | "type": "ProjectedCRS", 154 | "name": "undefined", 155 | "base_crs": { 156 | "name": "undefined", 157 | "datum": { 158 | "type": "GeodeticReferenceFrame", 159 | "name": "undefined", 160 | "ellipsoid": { 161 | "name": "undefined", 162 | "semi_major_axis": 6378137, 163 | "inverse_flattening": 298.257223563 164 | } 165 | }, 166 | "coordinate_system": { 167 | "subtype": "ellipsoidal", 168 | "axis": [ 169 | { 170 | "name": "Longitude", 171 | "abbreviation": "lon", 172 | "direction": "east", 173 | "unit": "degree" 174 | }, 175 | { 176 | "name": "Latitude", 177 | "abbreviation": "lat", 178 | "direction": "north", 179 | "unit": "degree" 180 | } 181 | ] 182 | } 183 | }, 184 | "conversion": { 185 | "name": "unknown", 186 | "method": { 187 | "name": "Lambert Conic Conformal (2SP)", 188 | "id": { 189 | "authority": "EPSG", 190 | "code": 9802 191 | } 192 | }, 193 | "parameters": [ 194 | { 195 | "name": "Latitude of 1st standard parallel", 196 | "value": 25, 197 | "unit": "degree", 198 | "id": { 199 | "authority": "EPSG", 200 | "code": 8823 201 | } 202 | }, 203 | { 204 | "name": "Latitude of 2nd standard parallel", 205 | "value": 60, 206 | "unit": "degree", 207 | "id": { 208 | "authority": "EPSG", 209 | "code": 8824 210 | } 211 | }, 212 | { 213 | "name": "Latitude of false origin", 214 | "value": 42.5, 215 | "unit": "degree", 216 | "id": { 217 | "authority": "EPSG", 218 | "code": 8821 219 | } 220 | }, 221 | { 222 | "name": "Longitude of false origin", 223 | "value": -100, 224 | "unit": "degree", 225 | "id": { 226 | "authority": "EPSG", 227 | "code": 8822 228 | } 229 | }, 230 | { 231 | "name": "Easting at false origin", 232 | "value": 0, 233 | "unit": "metre", 234 | "id": { 235 | "authority": "EPSG", 236 | "code": 8826 237 | } 238 | }, 239 | { 240 | "name": "Northing at false origin", 241 | "value": 0, 242 | "unit": "metre", 243 | "id": { 244 | "authority": "EPSG", 245 | "code": 8827 246 | } 247 | } 248 | ] 249 | }, 250 | "coordinate_system": { 251 | "subtype": "Cartesian", 252 | "axis": [ 253 | { 254 | "name": "Easting", 255 | "abbreviation": "E", 256 | "direction": "east", 257 | "unit": "metre" 258 | }, 259 | { 260 | "name": "Northing", 261 | "abbreviation": "N", 262 | "direction": "north", 263 | "unit": "metre" 264 | } 265 | ] 266 | } 267 | } 268 | }, 269 | "nv": { 270 | "type": "count", 271 | "description": "Size of the 'time_bnds' variable.", 272 | "values": [0, 1] 273 | } 274 | }, 275 | "cube:variables": { 276 | "dayl": { 277 | "type": "data", 278 | "description": "daylength", 279 | "dimensions": ["time", "y", "x"], 280 | "unit": "s", 281 | "shape": [14965, 584, 284], 282 | "chunks": [365, 584, 284], 283 | "attrs": { 284 | "cell_methods": "area: mean", 285 | "grid_mapping": "lambert_conformal_conic", 286 | "long_name": "daylength", 287 | "units": "s" 288 | } 289 | }, 290 | "lambert_conformal_conic": { 291 | "type": "data", 292 | "dimensions": [], 293 | "shape": [], 294 | "attrs": { 295 | "false_easting": 0.0, 296 | "false_northing": 0.0, 297 | "grid_mapping_name": "lambert_conformal_conic", 298 | "inverse_flattening": 298.257223563, 299 | "latitude_of_projection_origin": 42.5, 300 | "longitude_of_central_meridian": -100.0, 301 | "semi_major_axis": 6378137.0, 302 | "standard_parallel": [25.0, 60.0] 303 | } 304 | }, 305 | "lat": { 306 | "type": "auxiliary", 307 | "description": "latitude coordinate", 308 | "dimensions": ["y", "x"], 309 | "unit": "degrees_north", 310 | "shape": [584, 284], 311 | "chunks": [584, 284], 312 | "attrs": { 313 | "long_name": "latitude coordinate", 314 | "standard_name": "latitude", 315 | "units": "degrees_north" 316 | } 317 | }, 318 | "lon": { 319 | "type": "auxiliary", 320 | "description": "longitude coordinate", 321 | "dimensions": ["y", "x"], 322 | "unit": "degrees_east", 323 | "shape": [584, 284], 324 | "chunks": [584, 284], 325 | "attrs": { 326 | "long_name": "longitude coordinate", 327 | "standard_name": "longitude", 328 | "units": "degrees_east" 329 | } 330 | }, 331 | "prcp": { 332 | "type": "data", 333 | "description": "daily total precipitation", 334 | "dimensions": ["time", "y", "x"], 335 | "unit": "mm/day", 336 | "shape": [14965, 584, 284], 337 | "chunks": [365, 584, 284], 338 | "attrs": { 339 | "cell_methods": "area: mean time: sum", 340 | "grid_mapping": "lambert_conformal_conic", 341 | "long_name": "daily total precipitation", 342 | "units": "mm/day" 343 | } 344 | }, 345 | "srad": { 346 | "type": "data", 347 | "description": "daylight average incident shortwave radiation", 348 | "dimensions": ["time", "y", "x"], 349 | "unit": "W/m2", 350 | "shape": [14965, 584, 284], 351 | "chunks": [365, 584, 284], 352 | "attrs": { 353 | "cell_methods": "area: mean time: mean", 354 | "grid_mapping": "lambert_conformal_conic", 355 | "long_name": "daylight average incident shortwave radiation", 356 | "units": "W/m2" 357 | } 358 | }, 359 | "swe": { 360 | "type": "data", 361 | "description": "snow water equivalent", 362 | "dimensions": ["time", "y", "x"], 363 | "unit": "kg/m2", 364 | "shape": [14965, 584, 284], 365 | "chunks": [365, 584, 284], 366 | "attrs": { 367 | "cell_methods": "area: mean time: mean", 368 | "grid_mapping": "lambert_conformal_conic", 369 | "long_name": "snow water equivalent", 370 | "units": "kg/m2" 371 | } 372 | }, 373 | "time_bnds": { 374 | "type": "data", 375 | "dimensions": ["time", "nv"], 376 | "shape": [14965, 2], 377 | "chunks": [365, 2], 378 | "attrs": {} 379 | }, 380 | "tmax": { 381 | "type": "data", 382 | "description": "daily maximum temperature", 383 | "dimensions": ["time", "y", "x"], 384 | "unit": "degrees C", 385 | "shape": [14965, 584, 284], 386 | "chunks": [365, 584, 284], 387 | "attrs": { 388 | "cell_methods": "area: mean time: maximum", 389 | "grid_mapping": "lambert_conformal_conic", 390 | "long_name": "daily maximum temperature", 391 | "units": "degrees C" 392 | } 393 | }, 394 | "tmin": { 395 | "type": "data", 396 | "description": "daily minimum temperature", 397 | "dimensions": ["time", "y", "x"], 398 | "unit": "degrees C", 399 | "shape": [14965, 584, 284], 400 | "chunks": [365, 584, 284], 401 | "attrs": { 402 | "cell_methods": "area: mean time: minimum", 403 | "grid_mapping": "lambert_conformal_conic", 404 | "long_name": "daily minimum temperature", 405 | "units": "degrees C" 406 | } 407 | }, 408 | "vp": { 409 | "type": "data", 410 | "description": "daily average vapor pressure", 411 | "dimensions": ["time", "y", "x"], 412 | "unit": "Pa", 413 | "shape": [14965, 584, 284], 414 | "chunks": [365, 584, 284], 415 | "attrs": { 416 | "cell_methods": "area: mean time: mean", 417 | "grid_mapping": "lambert_conformal_conic", 418 | "long_name": "daily average vapor pressure", 419 | "units": "Pa" 420 | } 421 | }, 422 | "yearday": { 423 | "type": "data", 424 | "description": "day of year (DOY) starting with day 1 on January 1st", 425 | "dimensions": ["time"], 426 | "shape": [14965], 427 | "chunks": [365], 428 | "attrs": { 429 | "long_name": "day of year (DOY) starting with day 1 on January 1st" 430 | } 431 | } 432 | }, 433 | "title": "Daymet Daily Hawaii", 434 | "keywords": [ 435 | "Daymet", 436 | "Hawaii", 437 | "Temperature", 438 | "Precipitation", 439 | "Vapor Pressure", 440 | "Weather" 441 | ], 442 | "providers": [ 443 | { 444 | "name": "Microsoft", 445 | "roles": ["host", "processor"], 446 | "url": "https://planetarycomputer.microsoft.com" 447 | }, 448 | { 449 | "name": "ORNL DAAC", 450 | "roles": ["producer"], 451 | "url": "https://doi.org/10.3334/ORNLDAAC/1840" 452 | } 453 | ], 454 | "assets": { 455 | "zarr-https": { 456 | "href": "https://daymeteuwest.blob.core.windows.net/daymet-zarr/daily/hi.zarr", 457 | "type": "application/vnd+zarr", 458 | "title": "Daily Hawaii Daymet HTTPS Zarr root", 459 | "description": "HTTPS URI of the daily Hawaii Daymet Zarr Group on Azure Blob Storage.", 460 | "xarray:open_kwargs": { 461 | "consolidated": true 462 | }, 463 | "roles": ["data", "zarr", "https"] 464 | }, 465 | "zarr-abfs": { 466 | "href": "abfs://daymet-zarr/daily/hi.zarr", 467 | "type": "application/vnd+zarr", 468 | "title": "Daily Hawaii Daymet Azure Blob File System Zarr root", 469 | "description": "Azure Blob File System of the daily Hawaii Daymet Zarr Group on Azure Blob Storage for use with adlfs.", 470 | "xarray:storage_options": { 471 | "account_name": "daymeteuwest" 472 | }, 473 | "xarray:open_kwargs": { 474 | "consolidated": true 475 | }, 476 | "roles": ["data", "zarr", "abfs"] 477 | }, 478 | "thumbnail": { 479 | "href": "https://ai4edatasetspublicassets.blob.core.windows.net/assets/pc_thumbnails/daymet-daily-hi.png", 480 | "type": "image/png", 481 | "title": "Daymet daily Hawaii map thumbnail", 482 | "roles": ["thumbnail"] 483 | } 484 | }, 485 | "msft:short_description": "Daily surface weather data on a 1-km grid for Hawaii", 486 | "msft:storage_account": "daymeteuwest", 487 | "msft:container": "daymet-zarr", 488 | "msft:group_id": "daymet", 489 | "msft:group_keys": ["daily", "hawaii"], 490 | "extent": { 491 | "spatial": { 492 | "bbox": [[-160.3056, 17.9539, -154.772, 23.5186]] 493 | }, 494 | "temporal": { 495 | "interval": [["1980-01-01T12:00:00Z", "2020-12-30T12:00:00Z"]] 496 | } 497 | }, 498 | "license": "proprietary" 499 | } 500 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/item/zarr-item.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "daymet-daily-hi", 5 | "properties": { 6 | "cube:dimensions": { 7 | "time": { 8 | "type": "temporal", 9 | "description": "24-hour day based on local time", 10 | "extent": ["1980-01-01T12:00:00Z", "2020-12-30T12:00:00Z"] 11 | }, 12 | "x": { 13 | "type": "spatial", 14 | "axis": "x", 15 | "description": "x coordinate of projection", 16 | "extent": [-5802250.0, -5519250.0], 17 | "step": 1000.0, 18 | "reference_system": { 19 | "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", 20 | "type": "ProjectedCRS", 21 | "name": "undefined", 22 | "base_crs": { 23 | "name": "undefined", 24 | "datum": { 25 | "type": "GeodeticReferenceFrame", 26 | "name": "undefined", 27 | "ellipsoid": { 28 | "name": "undefined", 29 | "semi_major_axis": 6378137, 30 | "inverse_flattening": 298.257223563 31 | } 32 | }, 33 | "coordinate_system": { 34 | "subtype": "ellipsoidal", 35 | "axis": [ 36 | { 37 | "name": "Longitude", 38 | "abbreviation": "lon", 39 | "direction": "east", 40 | "unit": "degree" 41 | }, 42 | { 43 | "name": "Latitude", 44 | "abbreviation": "lat", 45 | "direction": "north", 46 | "unit": "degree" 47 | } 48 | ] 49 | } 50 | }, 51 | "conversion": { 52 | "name": "unknown", 53 | "method": { 54 | "name": "Lambert Conic Conformal (2SP)", 55 | "id": { 56 | "authority": "EPSG", 57 | "code": 9802 58 | } 59 | }, 60 | "parameters": [ 61 | { 62 | "name": "Latitude of 1st standard parallel", 63 | "value": 25, 64 | "unit": "degree", 65 | "id": { 66 | "authority": "EPSG", 67 | "code": 8823 68 | } 69 | }, 70 | { 71 | "name": "Latitude of 2nd standard parallel", 72 | "value": 60, 73 | "unit": "degree", 74 | "id": { 75 | "authority": "EPSG", 76 | "code": 8824 77 | } 78 | }, 79 | { 80 | "name": "Latitude of false origin", 81 | "value": 42.5, 82 | "unit": "degree", 83 | "id": { 84 | "authority": "EPSG", 85 | "code": 8821 86 | } 87 | }, 88 | { 89 | "name": "Longitude of false origin", 90 | "value": -100, 91 | "unit": "degree", 92 | "id": { 93 | "authority": "EPSG", 94 | "code": 8822 95 | } 96 | }, 97 | { 98 | "name": "Easting at false origin", 99 | "value": 0, 100 | "unit": "metre", 101 | "id": { 102 | "authority": "EPSG", 103 | "code": 8826 104 | } 105 | }, 106 | { 107 | "name": "Northing at false origin", 108 | "value": 0, 109 | "unit": "metre", 110 | "id": { 111 | "authority": "EPSG", 112 | "code": 8827 113 | } 114 | } 115 | ] 116 | }, 117 | "coordinate_system": { 118 | "subtype": "Cartesian", 119 | "axis": [ 120 | { 121 | "name": "Easting", 122 | "abbreviation": "E", 123 | "direction": "east", 124 | "unit": "metre" 125 | }, 126 | { 127 | "name": "Northing", 128 | "abbreviation": "N", 129 | "direction": "north", 130 | "unit": "metre" 131 | } 132 | ] 133 | } 134 | } 135 | }, 136 | "y": { 137 | "type": "spatial", 138 | "axis": "y", 139 | "description": "y coordinate of projection", 140 | "extent": [-622000.0, -39000.0], 141 | "step": -1000.0, 142 | "reference_system": { 143 | "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", 144 | "type": "ProjectedCRS", 145 | "name": "undefined", 146 | "base_crs": { 147 | "name": "undefined", 148 | "datum": { 149 | "type": "GeodeticReferenceFrame", 150 | "name": "undefined", 151 | "ellipsoid": { 152 | "name": "undefined", 153 | "semi_major_axis": 6378137, 154 | "inverse_flattening": 298.257223563 155 | } 156 | }, 157 | "coordinate_system": { 158 | "subtype": "ellipsoidal", 159 | "axis": [ 160 | { 161 | "name": "Longitude", 162 | "abbreviation": "lon", 163 | "direction": "east", 164 | "unit": "degree" 165 | }, 166 | { 167 | "name": "Latitude", 168 | "abbreviation": "lat", 169 | "direction": "north", 170 | "unit": "degree" 171 | } 172 | ] 173 | } 174 | }, 175 | "conversion": { 176 | "name": "unknown", 177 | "method": { 178 | "name": "Lambert Conic Conformal (2SP)", 179 | "id": { 180 | "authority": "EPSG", 181 | "code": 9802 182 | } 183 | }, 184 | "parameters": [ 185 | { 186 | "name": "Latitude of 1st standard parallel", 187 | "value": 25, 188 | "unit": "degree", 189 | "id": { 190 | "authority": "EPSG", 191 | "code": 8823 192 | } 193 | }, 194 | { 195 | "name": "Latitude of 2nd standard parallel", 196 | "value": 60, 197 | "unit": "degree", 198 | "id": { 199 | "authority": "EPSG", 200 | "code": 8824 201 | } 202 | }, 203 | { 204 | "name": "Latitude of false origin", 205 | "value": 42.5, 206 | "unit": "degree", 207 | "id": { 208 | "authority": "EPSG", 209 | "code": 8821 210 | } 211 | }, 212 | { 213 | "name": "Longitude of false origin", 214 | "value": -100, 215 | "unit": "degree", 216 | "id": { 217 | "authority": "EPSG", 218 | "code": 8822 219 | } 220 | }, 221 | { 222 | "name": "Easting at false origin", 223 | "value": 0, 224 | "unit": "metre", 225 | "id": { 226 | "authority": "EPSG", 227 | "code": 8826 228 | } 229 | }, 230 | { 231 | "name": "Northing at false origin", 232 | "value": 0, 233 | "unit": "metre", 234 | "id": { 235 | "authority": "EPSG", 236 | "code": 8827 237 | } 238 | } 239 | ] 240 | }, 241 | "coordinate_system": { 242 | "subtype": "Cartesian", 243 | "axis": [ 244 | { 245 | "name": "Easting", 246 | "abbreviation": "E", 247 | "direction": "east", 248 | "unit": "metre" 249 | }, 250 | { 251 | "name": "Northing", 252 | "abbreviation": "N", 253 | "direction": "north", 254 | "unit": "metre" 255 | } 256 | ] 257 | } 258 | } 259 | }, 260 | "nv": { 261 | "type": "count", 262 | "description": "Size of the 'time_bnds' variable.", 263 | "values": [0, 1] 264 | } 265 | }, 266 | "cube:variables": { 267 | "dayl": { 268 | "type": "data", 269 | "description": "daylength", 270 | "dimensions": ["time", "y", "x"], 271 | "unit": "s", 272 | "shape": [14965, 584, 284], 273 | "chunks": [365, 584, 284], 274 | "attrs": { 275 | "cell_methods": "area: mean", 276 | "grid_mapping": "lambert_conformal_conic", 277 | "long_name": "daylength", 278 | "units": "s" 279 | } 280 | }, 281 | "lambert_conformal_conic": { 282 | "type": "data", 283 | "dimensions": [], 284 | "shape": [], 285 | "attrs": { 286 | "false_easting": 0.0, 287 | "false_northing": 0.0, 288 | "grid_mapping_name": "lambert_conformal_conic", 289 | "inverse_flattening": 298.257223563, 290 | "latitude_of_projection_origin": 42.5, 291 | "longitude_of_central_meridian": -100.0, 292 | "semi_major_axis": 6378137.0, 293 | "standard_parallel": [25.0, 60.0] 294 | } 295 | }, 296 | "lat": { 297 | "type": "auxiliary", 298 | "description": "latitude coordinate", 299 | "dimensions": ["y", "x"], 300 | "unit": "degrees_north", 301 | "shape": [584, 284], 302 | "chunks": [584, 284], 303 | "attrs": { 304 | "long_name": "latitude coordinate", 305 | "standard_name": "latitude", 306 | "units": "degrees_north" 307 | } 308 | }, 309 | "lon": { 310 | "type": "auxiliary", 311 | "description": "longitude coordinate", 312 | "dimensions": ["y", "x"], 313 | "unit": "degrees_east", 314 | "shape": [584, 284], 315 | "chunks": [584, 284], 316 | "attrs": { 317 | "long_name": "longitude coordinate", 318 | "standard_name": "longitude", 319 | "units": "degrees_east" 320 | } 321 | }, 322 | "prcp": { 323 | "type": "data", 324 | "description": "daily total precipitation", 325 | "dimensions": ["time", "y", "x"], 326 | "unit": "mm/day", 327 | "shape": [14965, 584, 284], 328 | "chunks": [365, 584, 284], 329 | "attrs": { 330 | "cell_methods": "area: mean time: sum", 331 | "grid_mapping": "lambert_conformal_conic", 332 | "long_name": "daily total precipitation", 333 | "units": "mm/day" 334 | } 335 | }, 336 | "srad": { 337 | "type": "data", 338 | "description": "daylight average incident shortwave radiation", 339 | "dimensions": ["time", "y", "x"], 340 | "unit": "W/m2", 341 | "shape": [14965, 584, 284], 342 | "chunks": [365, 584, 284], 343 | "attrs": { 344 | "cell_methods": "area: mean time: mean", 345 | "grid_mapping": "lambert_conformal_conic", 346 | "long_name": "daylight average incident shortwave radiation", 347 | "units": "W/m2" 348 | } 349 | }, 350 | "swe": { 351 | "type": "data", 352 | "description": "snow water equivalent", 353 | "dimensions": ["time", "y", "x"], 354 | "unit": "kg/m2", 355 | "shape": [14965, 584, 284], 356 | "chunks": [365, 584, 284], 357 | "attrs": { 358 | "cell_methods": "area: mean time: mean", 359 | "grid_mapping": "lambert_conformal_conic", 360 | "long_name": "snow water equivalent", 361 | "units": "kg/m2" 362 | } 363 | }, 364 | "time_bnds": { 365 | "type": "data", 366 | "dimensions": ["time", "nv"], 367 | "shape": [14965, 2], 368 | "chunks": [365, 2], 369 | "attrs": {} 370 | }, 371 | "tmax": { 372 | "type": "data", 373 | "description": "daily maximum temperature", 374 | "dimensions": ["time", "y", "x"], 375 | "unit": "degrees C", 376 | "shape": [14965, 584, 284], 377 | "chunks": [365, 584, 284], 378 | "attrs": { 379 | "cell_methods": "area: mean time: maximum", 380 | "grid_mapping": "lambert_conformal_conic", 381 | "long_name": "daily maximum temperature", 382 | "units": "degrees C" 383 | } 384 | }, 385 | "tmin": { 386 | "type": "data", 387 | "description": "daily minimum temperature", 388 | "dimensions": ["time", "y", "x"], 389 | "unit": "degrees C", 390 | "shape": [14965, 584, 284], 391 | "chunks": [365, 584, 284], 392 | "attrs": { 393 | "cell_methods": "area: mean time: minimum", 394 | "grid_mapping": "lambert_conformal_conic", 395 | "long_name": "daily minimum temperature", 396 | "units": "degrees C" 397 | } 398 | }, 399 | "vp": { 400 | "type": "data", 401 | "description": "daily average vapor pressure", 402 | "dimensions": ["time", "y", "x"], 403 | "unit": "Pa", 404 | "shape": [14965, 584, 284], 405 | "chunks": [365, 584, 284], 406 | "attrs": { 407 | "cell_methods": "area: mean time: mean", 408 | "grid_mapping": "lambert_conformal_conic", 409 | "long_name": "daily average vapor pressure", 410 | "units": "Pa" 411 | } 412 | }, 413 | "yearday": { 414 | "type": "data", 415 | "description": "day of year (DOY) starting with day 1 on January 1st", 416 | "dimensions": ["time"], 417 | "shape": [14965], 418 | "chunks": [365], 419 | "attrs": { 420 | "long_name": "day of year (DOY) starting with day 1 on January 1st" 421 | } 422 | } 423 | }, 424 | "start_datetime": "1980-01-01T12:00:00Z", 425 | "end_datetime": "2020-12-30T12:00:00Z", 426 | "datetime": null 427 | }, 428 | "geometry": { 429 | "type": "Polygon", 430 | "coordinates": [ 431 | [ 432 | [-154.7780670634169, 17.960033949329812], 433 | [-154.7780670634169, 23.51232608231902], 434 | [-160.2988400944475, 23.51232608231902], 435 | [-160.2988400944475, 17.960033949329812], 436 | [-154.7780670634169, 17.960033949329812] 437 | ] 438 | ] 439 | }, 440 | "links": [], 441 | "assets": { 442 | "zarr-https": { 443 | "href": "https://daymeteuwest.blob.core.windows.net/daymet-zarr/daily/hi.zarr", 444 | "type": "application/vnd+zarr", 445 | "title": "Daily Hawaii Daymet HTTPS Zarr root", 446 | "description": "HTTPS URI of the daily Hawaii Daymet Zarr Group on Azure Blob Storage.", 447 | "xarray:open_kwargs": { 448 | "consolidated": true 449 | }, 450 | "roles": ["data", "zarr", "https"] 451 | }, 452 | "zarr-abfs": { 453 | "href": "abfs://daymet-zarr/daily/hi.zarr", 454 | "type": "application/vnd+zarr", 455 | "title": "Daily Hawaii Daymet Azure Blob File System Zarr root", 456 | "description": "Azure Blob File System of the daily Hawaii Daymet Zarr Group on Azure Blob Storage for use with adlfs.", 457 | "xarray:storage_options": { 458 | "account_name": "daymeteuwest" 459 | }, 460 | "xarray:open_kwargs": { 461 | "consolidated": true 462 | }, 463 | "roles": ["data", "zarr", "abfs"] 464 | }, 465 | "thumbnail": { 466 | "href": "https://ai4edatasetspublicassets.blob.core.windows.net/assets/pc_thumbnails/daymet-daily-hi.png", 467 | "type": "image/png", 468 | "title": "Daymet daily Hawaii map thumbnail" 469 | } 470 | }, 471 | "bbox": [ 472 | -160.2988400944475, 17.960033949329812, -154.7780670634169, 473 | 23.51232608231902 474 | ], 475 | "stac_extensions": [ 476 | "https://stac-extensions.github.io/datacube/v2.0.0/schema.json" 477 | ] 478 | } 479 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0/itemcollection/example-search.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "mysearchresults", 3 | "stac_version": "1.0.0-beta.2", 4 | "stac_extensions": ["single-file-stac"], 5 | "description": "A bunch of results from a search", 6 | "type": "FeatureCollection", 7 | "features": [ 8 | { 9 | "stac_version": "1.0.0-beta.2", 10 | "stac_extensions": [ 11 | "https://stac-extensions.github.io/projection/v1.0.0/schema.json", 12 | "https://stac-extensions.github.io/view/v1.0.0/schema.json" 13 | ], 14 | "type": "Feature", 15 | "id": "LC80370332018039LGN00", 16 | "collection": "landsat-8-l1", 17 | "bbox": [-112.21054, 37.83042, -109.4992, 39.95532], 18 | "geometry": { 19 | "type": "Polygon", 20 | "coordinates": [ 21 | [ 22 | [-111.6768167850251, 39.952817693022276], 23 | [-109.5010938553632, 39.55607811527241], 24 | [-110.03573868784865, 37.83172334507642], 25 | [-112.20846353249907, 38.236456540046845], 26 | [-111.6768167850251, 39.952817693022276] 27 | ] 28 | ] 29 | }, 30 | "properties": { 31 | "datetime": "2018-02-08T18:02:15.719478+00:00", 32 | "view:sun_azimuth": 152.63804142, 33 | "view:sun_elevation": 31.82216637, 34 | "proj:epsg": 32612 35 | }, 36 | "assets": { 37 | "index": { 38 | "type": "text/html", 39 | "title": "HTML index page", 40 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/index.html" 41 | }, 42 | "thumbnail": { 43 | "title": "Thumbnail image", 44 | "type": "image/jpeg", 45 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_thumb_large.jpg" 46 | }, 47 | "B1": { 48 | "type": "image/tiff; application=geotiff", 49 | "title": "Band 1 (coastal)", 50 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B1.TIF" 51 | }, 52 | "B2": { 53 | "type": "image/tiff; application=geotiff", 54 | "title": "Band 2 (blue)", 55 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B2.TIF" 56 | }, 57 | "B3": { 58 | "type": "image/tiff; application=geotiff", 59 | "title": "Band 3 (green)", 60 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B3.TIF" 61 | }, 62 | "B4": { 63 | "type": "image/tiff; application=geotiff", 64 | "title": "Band 4 (red)", 65 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B4.TIF" 66 | }, 67 | "B5": { 68 | "type": "image/tiff; application=geotiff", 69 | "title": "Band 5 (nir)", 70 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B5.TIF" 71 | }, 72 | "B6": { 73 | "type": "image/tiff; application=geotiff", 74 | "title": "Band 6 (swir16)", 75 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B6.TIF" 76 | }, 77 | "B7": { 78 | "type": "image/tiff; application=geotiff", 79 | "title": "Band 7 (swir22)", 80 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B7.TIF" 81 | }, 82 | "B8": { 83 | "type": "image/tiff; application=geotiff", 84 | "title": "Band 8 (pan)", 85 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B8.TIF" 86 | }, 87 | "B9": { 88 | "type": "image/tiff; application=geotiff", 89 | "title": "Band 9 (cirrus)", 90 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B9.TIF" 91 | }, 92 | "B10": { 93 | "type": "image/tiff; application=geotiff", 94 | "title": "Band 10 (lwir)", 95 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B10.TIF" 96 | }, 97 | "B11": { 98 | "type": "image/tiff; application=geotiff", 99 | "title": "Band 11 (lwir)", 100 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_B11.TIF" 101 | }, 102 | "ANG": { 103 | "title": "Angle coefficients file", 104 | "type": "text/plain", 105 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_ANG.txt" 106 | }, 107 | "MTL": { 108 | "title": "original metadata file", 109 | "type": "text/plain", 110 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_MTL.txt" 111 | }, 112 | "BQA": { 113 | "title": "Band quality data", 114 | "type": "image/tiff; application=geotiff", 115 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/037/033/LC08_L1TP_037033_20180208_20180221_01_T1/LC08_L1TP_037033_20180208_20180221_01_T1_BQA.TIF" 116 | } 117 | }, 118 | "links": [] 119 | }, 120 | { 121 | "stac_version": "1.0.0", 122 | "stac_extensions": ["projection", "view"], 123 | "type": "Feature", 124 | "id": "LC80340332018034LGN00", 125 | "collection": "landsat-8-l1", 126 | "bbox": [-107.6044, 37.8096, -104.86884, 39.97508], 127 | "geometry": { 128 | "type": "Polygon", 129 | "coordinates": [ 130 | [ 131 | [-107.03912158283073, 39.975078807631036], 132 | [-104.87161559271382, 39.548160703908025], 133 | [-105.43927721248009, 37.81075859503169], 134 | [-107.60423259994965, 38.24485405534073], 135 | [-107.03912158283073, 39.975078807631036] 136 | ] 137 | ] 138 | }, 139 | "properties": { 140 | "datetime": "2018-02-03T17:43:44Z", 141 | "view:sun_azimuth": 153.39513457, 142 | "view:sun_elevation": 30.41894816, 143 | "proj:epsg": 32613 144 | }, 145 | "assets": { 146 | "index": { 147 | "type": "text/html", 148 | "title": "HTML index page", 149 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/index.html" 150 | }, 151 | "thumbnail": { 152 | "title": "Thumbnail image", 153 | "type": "image/jpeg", 154 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_thumb_large.jpg" 155 | }, 156 | "B1": { 157 | "type": "image/tiff; application=geotiff", 158 | "title": "Band 1 (coastal)", 159 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B1.TIF" 160 | }, 161 | "B2": { 162 | "type": "image/tiff; application=geotiff", 163 | "title": "Band 2 (blue)", 164 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B2.TIF" 165 | }, 166 | "B3": { 167 | "type": "image/tiff; application=geotiff", 168 | "title": "Band 3 (green)", 169 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B3.TIF" 170 | }, 171 | "B4": { 172 | "type": "image/tiff; application=geotiff", 173 | "title": "Band 4 (red)", 174 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B4.TIF" 175 | }, 176 | "B5": { 177 | "type": "image/tiff; application=geotiff", 178 | "title": "Band 5 (nir)", 179 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B5.TIF" 180 | }, 181 | "B6": { 182 | "type": "image/tiff; application=geotiff", 183 | "title": "Band 6 (swir16)", 184 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B6.TIF" 185 | }, 186 | "B7": { 187 | "type": "image/tiff; application=geotiff", 188 | "title": "Band 7 (swir22)", 189 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B7.TIF" 190 | }, 191 | "B8": { 192 | "type": "image/tiff; application=geotiff", 193 | "title": "Band 8 (pan)", 194 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B8.TIF" 195 | }, 196 | "B9": { 197 | "type": "image/tiff; application=geotiff", 198 | "title": "Band 9 (cirrus)", 199 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B9.TIF" 200 | }, 201 | "B10": { 202 | "type": "image/tiff; application=geotiff", 203 | "title": "Band 10 (lwir)", 204 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B10.TIF" 205 | }, 206 | "B11": { 207 | "type": "image/tiff; application=geotiff", 208 | "title": "Band 11 (lwir)", 209 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_B11.TIF" 210 | }, 211 | "ANG": { 212 | "title": "Angle coefficients file", 213 | "type": "text/plain", 214 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_ANG.txt" 215 | }, 216 | "MTL": { 217 | "title": "original metadata file", 218 | "type": "text/plain", 219 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_MTL.txt" 220 | }, 221 | "BQA": { 222 | "title": "Band quality data", 223 | "type": "image/tiff; application=geotiff", 224 | "href": "https://s3-us-west-2.amazonaws.com/landsat-pds/c1/L8/034/033/LC08_L1TP_034033_20180203_20180220_01_T1/LC08_L1TP_034033_20180203_20180220_01_T1_BQA.TIF" 225 | } 226 | }, 227 | "links": [] 228 | } 229 | ], 230 | "collections": [ 231 | { 232 | "id": "landsat-8-l1", 233 | "title": "Landsat 8 L1", 234 | "description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", 235 | "keywords": ["landsat", "earth observation", "usgs"], 236 | "stac_version": "1.0.0-beta.2", 237 | "stac_extensions": ["item_assets"], 238 | "extent": { 239 | "spatial": { 240 | "bbox": [[-180, -90, 180, 90]] 241 | }, 242 | "temporal": { 243 | "interval": [["2013-06-01T00:00:00Z", null]] 244 | } 245 | }, 246 | "providers": [ 247 | { 248 | "name": "USGS", 249 | "roles": ["producer"], 250 | "url": "https://landsat.usgs.gov/" 251 | }, 252 | { 253 | "name": "Planet Labs", 254 | "roles": ["processor"], 255 | "url": "https://github.com/landsat-pds/landsat_ingestor" 256 | }, 257 | { 258 | "name": "AWS", 259 | "roles": ["host"], 260 | "url": "https://landsatonaws.com/" 261 | }, 262 | { 263 | "name": "Development Seed", 264 | "roles": ["processor"], 265 | "url": "https://github.com/sat-utils/sat-api" 266 | } 267 | ], 268 | "license": "PDDL-1.0", 269 | "summaries": { 270 | "gsd": [15], 271 | "platform": ["landsat-8"], 272 | "instruments": ["oli", "tirs"], 273 | "view:off_nadir": [0] 274 | }, 275 | "item_assets": { 276 | "index": { 277 | "type": "text/html", 278 | "title": "HTML index page" 279 | }, 280 | "thumbnail": { 281 | "title": "Thumbnail image", 282 | "type": "image/jpeg" 283 | }, 284 | "B1": { 285 | "type": "image/tiff; application=geotiff", 286 | "title": "Band 1 (coastal)" 287 | }, 288 | "B2": { 289 | "type": "image/tiff; application=geotiff", 290 | "title": "Band 2 (blue)" 291 | }, 292 | "B3": { 293 | "type": "image/tiff; application=geotiff", 294 | "title": "Band 3 (green)" 295 | }, 296 | "B4": { 297 | "type": "image/tiff; application=geotiff", 298 | "title": "Band 4 (red)" 299 | }, 300 | "B5": { 301 | "type": "image/tiff; application=geotiff", 302 | "title": "Band 5 (nir)" 303 | }, 304 | "B6": { 305 | "type": "image/tiff; application=geotiff", 306 | "title": "Band 6 (swir16)" 307 | }, 308 | "B7": { 309 | "type": "image/tiff; application=geotiff", 310 | "title": "Band 7 (swir22)" 311 | }, 312 | "B8": { 313 | "type": "image/tiff; application=geotiff", 314 | "title": "Band 8 (pan)" 315 | }, 316 | "B9": { 317 | "type": "image/tiff; application=geotiff", 318 | "title": "Band 9 (cirrus)" 319 | }, 320 | "B10": { 321 | "type": "image/tiff; application=geotiff", 322 | "title": "Band 10 (lwir)" 323 | }, 324 | "B11": { 325 | "type": "image/tiff; application=geotiff", 326 | "title": "Band 11 (lwir)" 327 | }, 328 | "ANG": { 329 | "title": "Angle coefficients file", 330 | "type": "text/plain" 331 | }, 332 | "MTL": { 333 | "title": "original metadata file", 334 | "type": "text/plain" 335 | }, 336 | "BQA": { 337 | "title": "Band quality data", 338 | "type": "image/tiff; application=geotiff" 339 | } 340 | }, 341 | "links": [ 342 | { 343 | "rel": "self", 344 | "href": "./example-search.json" 345 | } 346 | ] 347 | } 348 | ], 349 | "links": [] 350 | } 351 | -------------------------------------------------------------------------------- /intake_stac/tests/data/1.0.0beta2/earthsearch/readme.md: -------------------------------------------------------------------------------- 1 | Generated with: 2 | 3 | ```python 4 | import satsearch 5 | import pystac 6 | import json 7 | 8 | bbox = [35.48, -3.24, 35.58, -3.14] 9 | dates = '2020-07-01/2020-08-15' 10 | URL='https://earth-search.aws.element84.com/v0' 11 | results = satsearch.Search.search(url=URL, 12 | collections=['sentinel-s2-l2a-cogs'], 13 | datetime=dates, 14 | bbox=bbox, 15 | sort=['-properties.datetime']) 16 | 17 | # 18 items found 18 | items = results.items() 19 | print(len(items)) 20 | items.save('single-file-stac.json') 21 | 22 | # validation returns empty list 23 | import json 24 | from pystac.validation import validate_dict 25 | with open('single-file-stac.json') as f: 26 | js = json.load(f) 27 | print(validate_dict(js)) 28 | 29 | cat = pystac.read_file('single-file-stac.json') 30 | ``` 31 | -------------------------------------------------------------------------------- /intake_stac/tests/test_catalog.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os.path 3 | import sys 4 | from pathlib import Path 5 | 6 | import fsspec 7 | import intake 8 | import intake_xarray 9 | import numpy as np 10 | import pystac 11 | import pytest 12 | import xarray as xr 13 | import yaml 14 | from intake.catalog import Catalog 15 | from intake.catalog.local import LocalCatalogEntry 16 | 17 | from intake_stac import StacCatalog, StacCollection, StacItem, StacItemCollection 18 | from intake_stac.catalog import CombinedAssets, StacAsset, drivers 19 | 20 | here = Path(__file__).parent 21 | 22 | 23 | cat_url = str(here / 'data/1.0.0/catalog/catalog.json') 24 | col_url = str(here / 'data/1.0.0/collection/collection.json') 25 | item_url = str(here / 'data/1.0.0/collection/simple-item.json') 26 | itemcol_url = str(here / 'data/1.0.0/itemcollection/example-search.json') 27 | 28 | 29 | @pytest.fixture(scope='module') 30 | def pystac_cat(): 31 | return pystac.Catalog.from_file(cat_url) 32 | 33 | 34 | @pytest.fixture(scope='module') 35 | def pystac_col(): 36 | col = pystac.Collection.from_file(col_url) 37 | return col 38 | 39 | 40 | @pytest.fixture(scope='module') 41 | def pystac_item(): 42 | return pystac.Item.from_file(item_url) 43 | 44 | 45 | @pytest.fixture(scope='module') 46 | def pystac_itemcol(): 47 | # return pystac.read_file(itemcol_url) 48 | # ItemCollection is not a valid pystac STACObject, so can't use read_file. 49 | return pystac.ItemCollection.from_file(itemcol_url) 50 | 51 | 52 | @pytest.fixture(scope='module') 53 | def intake_stac_cat(): 54 | return StacCatalog.from_url(cat_url) 55 | 56 | 57 | class TestCatalog: 58 | def test_init_catalog_from_url(self): 59 | cat = StacCatalog(cat_url) 60 | assert isinstance(cat, intake.catalog.Catalog) 61 | assert cat.name == 'test' 62 | assert cat.discover()['container'] == 'catalog' 63 | assert int(cat.metadata['stac_version'][0]) >= 1 64 | 65 | cat = StacCatalog.from_url(cat_url) 66 | assert isinstance(cat, intake.catalog.Catalog) 67 | assert cat.name == 'test' 68 | assert cat.discover()['container'] == 'catalog' 69 | assert int(cat.metadata['stac_version'][0]) >= 1 70 | 71 | # test kwargs are passed through 72 | cat = StacCatalog.from_url(cat_url, name='intake-stac-test') 73 | assert 'intake-stac-test' == cat.name 74 | 75 | def test_init_catalog_from_pystac_obj(self, pystac_cat): 76 | cat = StacCatalog(pystac_cat) 77 | assert isinstance(cat, intake.catalog.Catalog) 78 | assert cat.discover()['container'] == 'catalog' 79 | assert cat.name == 'test' 80 | assert cat.name == pystac_cat.id 81 | 82 | # test kwargs are passed through 83 | cat = StacCatalog(pystac_cat, name='intake-stac-test') 84 | assert 'intake-stac-test' == cat.name 85 | 86 | def test_init_catalog_with_wrong_type_raises(self, pystac_cat): 87 | with pytest.raises(ValueError): 88 | StacCollection(pystac_cat) 89 | 90 | def test_init_catalog_with_bad_url_raises(self): 91 | # json.decoder.JSONDecodeError or FileNotFoundError 92 | with pytest.raises(Exception): 93 | StacCatalog('https://raw.githubusercontent.com/') 94 | 95 | def test_serialize(self, intake_stac_cat): 96 | cat_str = intake_stac_cat.serialize() 97 | assert isinstance(cat_str, str) 98 | 99 | def test_cat_entries(self, intake_stac_cat): 100 | assert list(intake_stac_cat) 101 | assert all( 102 | [isinstance(v, (LocalCatalogEntry, Catalog)) for _, v in intake_stac_cat.items()] 103 | ) 104 | 105 | def test_cat_name_from_pystac_catalog_id(self, intake_stac_cat): 106 | assert intake_stac_cat.name == 'test' 107 | 108 | 109 | class TestCollection: 110 | def test_cat_from_collection(self, pystac_col): 111 | cat = StacCollection(pystac_col) 112 | subcat_name = 'S2B_MSIL2A_20171227T160459_N0212_R054_T17QLA_20201014T165101' 113 | assert cat.name == pystac_col.id 114 | assert subcat_name in cat 115 | # This is taking way too long 116 | # item_name = 'S2B_25WFU_20200610_0_L1C' 117 | # assert item_name in cat[subcat_name] 118 | # assert 'B04' in cat[subcat_name][item_name] 119 | 120 | 121 | class TestItemCollection: 122 | def test_cat_from_item_collection(self, pystac_itemcol): 123 | cat = StacItemCollection(pystac_itemcol) 124 | assert 'LC80340332018034LGN00' in cat 125 | assert 'B5' in cat.LC80340332018034LGN00 126 | 127 | @pytest.mark.parametrize('crs', ['IGNF:ETRS89UTM28', 'epsg:26909']) 128 | def test_cat_to_geopandas_crs(self, crs, pystac_itemcol): 129 | nfeatures = len(pystac_itemcol.items) 130 | geopandas = pytest.importorskip('geopandas') 131 | 132 | cat = StacItemCollection(pystac_itemcol) 133 | df = cat.to_geopandas(crs=crs) 134 | assert isinstance(df, geopandas.GeoDataFrame) 135 | assert len(df) == nfeatures 136 | assert df.crs == crs 137 | 138 | def test_cat_to_missing_geopandas(self, pystac_itemcol, monkeypatch): 139 | from unittest import mock 140 | 141 | with pytest.raises(ImportError): 142 | with mock.patch.dict(sys.modules, {'geopandas': None}): 143 | cat = StacItemCollection(pystac_itemcol) 144 | _ = cat.to_geopandas() 145 | 146 | def test_load_satsearch_results(self, pystac_itemcol): 147 | test_file = os.path.join(here, 'data/1.0.0beta2/earthsearch/single-file-stac.json') 148 | catalog = intake.open_stac_item_collection(test_file) 149 | assert isinstance(catalog, StacItemCollection) 150 | assert len(catalog) == 18 151 | 152 | 153 | class TestItem: 154 | def test_cat_from_item(self, pystac_item): 155 | cat = StacItem(pystac_item) 156 | assert 'B02' in cat 157 | 158 | def test_cat_item_stacking(self, pystac_item): 159 | item = StacItem(pystac_item) 160 | list_of_bands = ['B02', 'B03'] 161 | new_entry = item.stack_bands(list_of_bands) 162 | assert isinstance(new_entry, CombinedAssets) 163 | assert new_entry._description == 'B02, B03' 164 | assert new_entry.name == 'B02_B03' 165 | 166 | def test_cat_item_stacking_common_name(self, pystac_item): 167 | item = StacItem(pystac_item) 168 | list_of_bands = ['blue', 'green'] 169 | new_entry = item.stack_bands(list_of_bands) 170 | assert isinstance(new_entry, CombinedAssets) 171 | assert new_entry._description == 'B02, B03' 172 | assert new_entry.name == 'blue_green' 173 | 174 | def test_cat_item_stacking_path_as_pattern(self, pystac_item): 175 | item = StacItem(pystac_item) 176 | list_of_bands = ['B02', 'B03'] 177 | new_entry = item.stack_bands(list_of_bands, path_as_pattern='{}{band:2}.TIF') 178 | assert isinstance(new_entry, CombinedAssets) 179 | 180 | def test_cat_item_stacking_dims_of_different_type_raises_error(self, pystac_item): 181 | item = StacItem(pystac_item) 182 | list_of_bands = ['B02', 'ANG'] 183 | with pytest.raises(ValueError, match=('ANG not found in list of eo:bands in collection')): 184 | item.stack_bands(list_of_bands) 185 | 186 | def test_cat_item_stacking_dims_with_nonexistent_band_raises_error( 187 | self, 188 | pystac_item, 189 | ): # noqa: E501 190 | item = StacItem(pystac_item) 191 | list_of_bands = ['B01', 'foo'] 192 | with pytest.raises(ValueError, match="'B02', 'B03', 'blue', 'green'"): 193 | item.stack_bands(list_of_bands) 194 | 195 | # def test_cat_item_stacking_dims_of_different_size_regrids(self, pystac_item): 196 | # item = StacItem(pystac_item) 197 | # list_of_bands = ['B1', 'B8'] 198 | # B1_da = item.B1.to_dask() 199 | # assert B1_da.shape == (1, 8391, 8311) 200 | # B8_da = item.B8.to_dask() 201 | # assert B8_da.shape == (1, 16781, 16621) 202 | # new_entry = item.stack_bands(list_of_bands) 203 | # new_da = new_entry().to_dask() 204 | # assert new_da.shape == (2, 16781, 16621) 205 | # assert sorted([dim for dim in new_da.dims]) == ['band', 'x', 'y'] 206 | 207 | def test_asset_describe(self, pystac_item): 208 | item = StacItem(pystac_item) 209 | key = 'B02' 210 | asset = item[key] 211 | d = asset.describe() 212 | 213 | assert d['name'] == key 214 | assert d['container'] == 'xarray' 215 | assert d['plugin'] == ['rasterio'] 216 | assert d['args']['urlpath'] == asset.urlpath 217 | assert d['description'] == asset.description 218 | # NOTE: note sure why asset.metadata has 'catalog_dir' key ? 219 | # assert d['metadata'] == asset.metadata 220 | 221 | def test_asset_missing_type(self, pystac_item): 222 | key = 'B02' 223 | asset = pystac_item.assets.get('B02') 224 | asset.media_type = '' 225 | with pytest.warns(Warning, match='STAC Asset'): 226 | entry = StacAsset(key, asset) 227 | d = entry.describe() 228 | 229 | assert d['name'] == key 230 | assert d['metadata']['type'] == 'application/rasterio' # default_type 231 | assert d['container'] == 'xarray' 232 | assert d['plugin'] == ['rasterio'] 233 | 234 | def test_asset_unknown_type(self, pystac_item): 235 | key = 'B02' 236 | asset = pystac_item.assets.get('B02') 237 | asset.media_type = 'unrecognized' 238 | entry = StacAsset(key, asset) 239 | d = entry.describe() 240 | 241 | assert d['name'] == key 242 | assert d['metadata']['type'] == 'unrecognized' 243 | assert d['container'] == 'xarray' 244 | assert d['plugin'] == ['rasterio'] 245 | 246 | def test_cat_item_yaml(self, pystac_item): 247 | cat_str = StacItem(pystac_item).yaml() 248 | d = yaml.load(cat_str, Loader=yaml.SafeLoader) 249 | 250 | for key in ['bbox', 'date', 'datetime', 'geometry']: 251 | assert key in d['metadata'] 252 | for key in ['B02', 'B03']: 253 | assert key in d['sources'] 254 | 255 | def test_cat_item_yaml_roundtrip(self, pystac_item, tmp_path): 256 | cat1 = StacItem(pystac_item) 257 | cat_str = cat1.yaml() 258 | 259 | temp_file = tmp_path / 'temp.yaml' 260 | with open(temp_file, 'w') as f: 261 | f.write(cat_str) 262 | 263 | cat2 = intake.open_catalog(temp_file) 264 | 265 | assert cat1.metadata == cat2.metadata 266 | assert set(cat1.walk()) == set(cat2.walk()) 267 | 268 | keys = ['B02', 'B03'] 269 | for k in keys: 270 | assert k in cat1 271 | assert k in cat2 272 | assert isinstance(cat1[k], intake_xarray.raster.RasterIOSource) 273 | assert isinstance(cat2[k], intake_xarray.raster.RasterIOSource) 274 | 275 | # cat1[k] will have no `catalog_dir` key because it is a temp file 276 | cat2[k].metadata.pop('catalog_dir', None) 277 | assert set(cat1[k].metadata) == set(cat2[k].metadata) 278 | for j in set(cat1[k].metadata): 279 | assert cat1[k].metadata[j] == cat1[k].metadata[j] 280 | 281 | assert set(cat1[k].describe()) == set(cat2[k].describe()) 282 | for j in set(cat1[k].describe()): 283 | assert cat1[k].describe()[j] == cat2[k].describe()[j] 284 | 285 | assert set(cat1.walk()[k].describe()) == set(cat2.walk()[k].describe()) 286 | for j in set(cat1.walk()[k].describe()): 287 | assert cat1.walk()[k].describe()[j] == cat2.walk()[k].describe()[j] 288 | 289 | 290 | class TestDrivers: 291 | def test_drivers_include_all_pystac_media_types(self): 292 | for media_type in pystac.MediaType: 293 | if media_type != 'application/pdf': 294 | assert media_type in drivers 295 | 296 | def test_drivers_can_open_all_earthsearch_sentinel_s2_l2a_cogs_assets(self): 297 | test_file = os.path.join(here, 'data/1.0.0beta2/earthsearch/single-file-stac.json') 298 | catalog = intake.open_stac_item_collection(test_file) 299 | _, item = next(catalog.items()) 300 | for _, asset in item.items(): 301 | assert asset.metadata['type'] in drivers 302 | 303 | 304 | def test_cat_to_geopandas(pystac_itemcol): 305 | nfeatures = len(pystac_itemcol) 306 | geopandas = pytest.importorskip('geopandas') 307 | 308 | cat = StacItemCollection(pystac_itemcol) 309 | df = cat.to_geopandas() 310 | assert isinstance(df, geopandas.GeoDataFrame) 311 | assert len(df) == nfeatures 312 | assert isinstance(df.geometry, geopandas.GeoSeries) 313 | assert isinstance(df.geometry.values, geopandas.array.GeometryArray) 314 | assert isinstance(df.geometry.dtype, geopandas.array.GeometryDtype) 315 | epsg = df.crs.to_epsg() 316 | assert epsg == 4326 317 | 318 | 319 | def test_collection_of_collection(): 320 | space = pystac.SpatialExtent([[0, 1, 2, 3]]) 321 | time = pystac.TemporalExtent([[datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 1)]]) 322 | child = pystac.Collection('child', 'child-description', extent=pystac.Extent(space, time)) 323 | parent = pystac.Collection( 324 | 'parent', 325 | 'parent-description', 326 | extent=pystac.Extent(space, time), 327 | ) 328 | parent.add_child(child) 329 | 330 | result = StacCollection(parent) 331 | result._load() 332 | 333 | 334 | def test_collection_level_assets(): 335 | data = xr.DataArray(np.ones((5, 5, 5)), dims=('time', 'y', 'x')) 336 | ds = xr.Dataset({'data': data}) 337 | store = fsspec.filesystem('memory').get_mapper('data.zarr') 338 | ds.to_zarr(store, mode='w') 339 | 340 | extent = pystac.Extent( 341 | spatial=pystac.SpatialExtent([[]]), temporal=pystac.TemporalExtent([[None, None]]) 342 | ) 343 | collection = pystac.Collection( 344 | id='id', description='description', license='license', extent=extent 345 | ) 346 | collection.add_asset( 347 | 'data', pystac.Asset(href='memory://data.zarr', media_type='application/vnd+zarr') 348 | ) 349 | 350 | # test 351 | intake_collection = StacCollection(collection) 352 | result = intake_collection.get_asset('data') 353 | xr.testing.assert_equal(result.to_dask(), ds) 354 | 355 | 356 | def test_xarray_assets_item(): 357 | item = intake.open_stac_item(str(here / 'data/1.0.0/item/zarr-item.json')) 358 | asset = item['zarr-abfs'] 359 | assert asset.kwargs == {'consolidated': True} 360 | assert asset.storage_options == {'account_name': 'daymeteuwest'} 361 | 362 | 363 | def test_xarray_assets_collection(): 364 | item = intake.open_stac_collection(str(here / 'data/1.0.0/collection/zarr-collection.json')) 365 | asset = item.get_asset('zarr-abfs') 366 | assert asset.kwargs == {'consolidated': True} 367 | assert asset.storage_options == {'account_name': 'daymeteuwest'} 368 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 80 3 | target-version = ['py37', 'py38'] 4 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pytest>=4.5.0 2 | pytest-cov>=2.7.1 3 | -r requirements.txt 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | fsspec>=0.8.4 2 | intake>=0.5.1 3 | intake-xarray>=0.4 4 | pystac>=1.0.0 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | exclude = docs, versioneer.py, intake_stac/_version.py 6 | ignore = E402,E731,E741,W503,W504 7 | max-line-length = 100 8 | max-complexity = 18 9 | select = B,C,E,F,W,T4,B9 10 | 11 | 12 | [isort] 13 | known_first_party=intake_stac 14 | known_third_party=fsspec,intake,intake_xarray,numpy,pkg_resources,pystac,pytest,setuptools,xarray,yaml 15 | multi_line_output=3 16 | include_trailing_comma=True 17 | force_grid_wrap=0 18 | combine_as_imports=True 19 | line_length=100 20 | skip= 21 | docs/source/conf.py 22 | setup.py 23 | versioneer.py 24 | intake_stac/_version.py 25 | intake_stac/__init__.py 26 | 27 | 28 | [versioneer] 29 | VCS = git 30 | style = pep440 31 | versionfile_source = intake_stac/_version.py 32 | versionfile_build = intake_stac/_version.py 33 | tag_prefix = 34 | 35 | [aliases] 36 | test = pytest 37 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | 5 | import sys 6 | 7 | from setuptools import find_packages, setup 8 | 9 | with open('requirements.txt') as f: 10 | INSTALL_REQUIRES = f.read().strip().split('\n') 11 | 12 | with open('README.md') as f: 13 | LONG_DESCRIPTION = f.read() 14 | 15 | needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv) 16 | PYTHON_REQUIRES = '>=3.8' 17 | SETUP_REQUIRES = ['setuptools_scm'] 18 | if needs_pytest: 19 | SETUP_REQUIRES.appen('pytest-runner >= 4.2') 20 | 21 | TESTS_REQUIRE = ['pytest >= 2.7.1'] 22 | ENTRY_POINTS = { 23 | 'intake.drivers': [ 24 | 'stac_catalog = intake_stac.catalog:StacCatalog', 25 | 'stac_collection = intake_stac.catalog:StacCollection', 26 | 'stac_item_collection = intake_stac.catalog:StacItemCollection', 27 | 'stac_item = intake_stac.catalog:StacItem', 28 | ] 29 | } 30 | 31 | description = ( 32 | 'An intake adapter for building intake catalogs begining ' 33 | 'with SpatioTemporal Asset Catalogs (STAC)' 34 | ) 35 | setup( 36 | name='intake_stac', 37 | description=description, 38 | long_description=LONG_DESCRIPTION, 39 | long_description_content_type='text/markdown', 40 | maintainer='Joe Hamman', 41 | maintainer_email='jhamman@ucar.edu', 42 | url='https://github.com/pangeo-data/intake-stac', 43 | py_modules=['intake_stac'], 44 | packages=find_packages(exclude=['*tests']), 45 | package_dir={'intake_stac': 'intake_stac'}, 46 | include_package_data=True, 47 | python_requires=PYTHON_REQUIRES, 48 | install_requires=INSTALL_REQUIRES, 49 | setup_requires=SETUP_REQUIRES, 50 | tests_require=TESTS_REQUIRE, 51 | entry_points=ENTRY_POINTS, 52 | license='BSD 2-Clause', 53 | zip_safe=False, 54 | keywords='intake stac', 55 | use_scm_version={'version_scheme': 'post-release', 'local_scheme': 'dirty-tag'}, 56 | ) 57 | --------------------------------------------------------------------------------