├── .devcontainer ├── Dockerfile ├── devcontainer.json └── noop.txt ├── .editorconfig ├── .github ├── ISSUE_TEMPLATE.md ├── ISSUE_TEMPLATE │ └── feature_request.md └── pull_request_template.md ├── .gitignore ├── .travis.yml ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── README.rst ├── demo.ipynb ├── docs ├── Makefile ├── README.md ├── conf.py ├── contributing.rst ├── history.rst ├── index.rst ├── installation.rst ├── make.bat ├── readme.rst ├── sample_notebooks │ ├── visualize-geodataframe.ipynb │ ├── visualize-geojson.ipynb │ ├── visualize-raster.ipynb │ └── visualize-wms.ipynb └── usage.rst ├── environment.yml ├── public └── images │ ├── about-map-visualization-solution.png │ ├── getting-started-conda-activate.png │ ├── getting-started-correct-tabs.png │ ├── getting-started-links.png │ ├── jlab-screenshot.png │ └── loading.gif ├── requirements.txt ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── stac_ipyleaflet ├── __init__.py ├── constants.py ├── core.py ├── stac_discovery │ ├── __init__.py │ ├── catalogs │ │ ├── nasa_maap_stac.json │ │ ├── nasa_maap_stac.py │ │ └── nasa_maap_stac.tsv │ ├── stac.py │ └── stac_widget.py ├── utilities │ ├── __init__.py │ ├── data │ │ └── biomass-layers.csv │ └── helpers.py └── widgets │ ├── __init__.py │ ├── basemaps.py │ ├── draw.py │ └── inspect.py ├── tests ├── __init__.py └── test_core.py ├── tox.ini └── write_biomass_layers.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/devcontainers/miniconda:0-3 2 | 3 | # Copy environment.yml (if found) to a temp location so we update the environment. Also 4 | # copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists. 5 | USER root 6 | COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/ 7 | RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \ 8 | && rm -rf /tmp/conda-tmp 9 | 10 | USER vscode 11 | 12 | # WORKDIR /tmp/stac_ipyleaflet 13 | # COPY . . 14 | # RUN /opt/conda/bin/pip install -e . 15 | 16 | 17 | # [Optional] Uncomment to install a different version of Python than the default 18 | # RUN conda install -y python=3.6 \ 19 | # && pip install --no-cache-dir pipx \ 20 | # && pipx reinstall-all 21 | 22 | # [Optional] Uncomment this section to install additional OS packages. 23 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 24 | # && apt-get -y install --no-install-recommends 25 | 26 | 27 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/miniconda 3 | { 4 | "name": "Miniconda (Python 3)", 5 | "build": { 6 | "context": "..", 7 | "dockerfile": "Dockerfile" 8 | }, 9 | "features": { 10 | "ghcr.io/devcontainers/features/node:1": {} 11 | } 12 | 13 | // Features to add to the dev container. More info: https://containers.dev/features. 14 | // "features": {}, 15 | 16 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 17 | // "forwardPorts": [], 18 | 19 | // Use 'postCreateCommand' to run commands after the container is created. 20 | //"postCreateCommand": "pip install -e ." 21 | //"postCreateCommand": "conda init bash" 22 | 23 | // Configure tool-specific properties. 24 | // "customizations": {}, 25 | 26 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 27 | // "remoteUser": "root" 28 | } 29 | -------------------------------------------------------------------------------- /.devcontainer/noop.txt: -------------------------------------------------------------------------------- 1 | This file is copied into the container along with environment.yml* from the 2 | parent folder. This is done to prevent the Dockerfile COPY instruction from 3 | failing if no environment.yml is found. -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * stac ipyleaflet version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | * stac ipyleaflet version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ## Context 6 | Provide a brief explanation about how this fits into a broader project goal or challenge 7 | 8 | 9 | ## Problem 10 | Describe the specific problem to be addressed (frame as an opportunity if it isn't problem-driven) 11 | 12 | ``` 13 | Paste the command(s) you ran and the output 14 | If there was a crash, please include the traceback here 15 | ``` 16 | 17 | ``` 18 | Provide supporting user input if it was based on feedback 19 | If there are any quotes, please include here 20 | ``` 21 | 22 | ## Ideas 23 | If relevant, provide thoughts on the solution, whether it's code or design ideas to help someone get started 24 | 25 | 26 | ## Acceptance Criteria 27 | - [ ] Indicate outcome to close issue 28 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Summary: 2 | - **What** did I do? 3 | - **Why** *** did I do it? (What was the motivation?) 4 | - **How** *** can someone test or demo my changes? 5 | 6 | _*** **When relevant, use screenshots to document**_ 7 | 8 | 9 | ### Fixes or Addresses Issue \#: [ticket number & link] 10 | 11 | 12 | ## Checklist before requesting a review: 13 | - [ ] My code follows the guidelines of this project 14 | - [ ] I performed a self-review of my code changes 15 | - [ ] I have added my changes to the change log (`HISTORY.rst`) 16 | - [ ] I have completed spell-checking, removed unnecessary print statements & commented-out code 17 | - [ ] I have commented my code, particularly in hard-to-understand areas 18 | - [ ] I have made corresponding changes to the documentation 19 | - [ ] New and existing unit tests pass locally with my changes 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | # IDE settings 105 | .vscode/ 106 | .idea/ 107 | 108 | # macos files 109 | *.DS_Store -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Config file for automatic testing at travis-ci.com 2 | 3 | language: python 4 | python: 5 | - 3.8 6 | - 3.7 7 | - 3.6 8 | 9 | # Command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors 10 | install: pip install -U tox-travis 11 | 12 | # Command to run tests, e.g. python setup.py test 13 | script: tox 14 | 15 | 16 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/abarciauskas-bgse/stac_ipyleaflet/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the GitHub issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | stac ipyleaflet could always use more documentation, whether as part of the 42 | official stac ipyleaflet docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/abarciauskas-bgse/stac_ipyleaflet/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | * Remember that this is a volunteer-driven project, and that contributions 55 | are welcome :) 56 | 57 | Get Started! 58 | ------------ 59 | 60 | Ready to contribute? Here's how to set up `stac_ipyleaflet` for local development. 61 | 62 | 1. Fork the `stac_ipyleaflet` repo on GitHub. 63 | 2. Clone your fork locally:: 64 | 65 | $ git clone git@github.com:your_name_here/stac_ipyleaflet.git 66 | 67 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 68 | 69 | $ mkvirtualenv stac_ipyleaflet 70 | $ cd stac_ipyleaflet/ 71 | $ python setup.py develop 72 | 73 | 4. Create a branch for local development:: 74 | 75 | $ git checkout -b name-of-your-bugfix-or-feature 76 | 77 | Now you can make your changes locally. 78 | 79 | 5. When you're done making changes, check that your changes pass flake8 and the 80 | tests, including testing other Python versions with tox:: 81 | 82 | $ flake8 stac_ipyleaflet tests 83 | $ python setup.py test or pytest 84 | $ tox 85 | 86 | To get flake8 and tox, just pip install them into your virtualenv. 87 | 88 | 6. Commit your changes and push your branch to GitHub:: 89 | 90 | $ git add . 91 | $ git commit -m "Your detailed description of your changes." 92 | $ git push origin name-of-your-bugfix-or-feature 93 | 94 | 7. Submit a pull request through the GitHub website. 95 | 96 | Pull Request Guidelines 97 | ----------------------- 98 | 99 | Before you submit a pull request, check that it meets these guidelines: 100 | 101 | 1. The pull request should include tests. 102 | 2. If the pull request adds functionality, the docs should be updated. Put 103 | your new functionality into a function with a docstring, and add the 104 | feature to the list in README.rst. 105 | 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check 106 | https://travis-ci.com/abarciauskas-bgse/stac_ipyleaflet/pull_requests 107 | and make sure that the tests pass for all supported Python versions. 108 | 109 | Tips 110 | ---- 111 | 112 | To run a subset of tests:: 113 | 114 | $ pytest tests.test_stac_ipyleaflet 115 | 116 | 117 | Deploying 118 | --------- 119 | 120 | A reminder for the maintainers on how to deploy. 121 | Make sure all your changes are committed (including an entry in HISTORY.rst). 122 | Then run:: 123 | 124 | $ bump2version patch # possible: major / minor / patch 125 | $ git push 126 | $ git push --tags 127 | 128 | Travis will then deploy to PyPI if tests pass. 129 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 0.1.0 (2023-01-19) 6 | ------------------ 7 | 8 | * First release (beta). 9 | 10 | 11 | 0.2.0 (2023-05-19) 12 | ------------------ 13 | 14 | * First public release (beta). 15 | 16 | 17 | 0.3.0 (2023-08-30) 18 | ---------------------- 19 | 20 | * Added automatic COG filter to narrow down available collection (Issue #66) 21 | * Added interact by point tool to get coordinates and raster values at a location (Issue #64) 22 | 23 | 0.3.1 (2023-08-31) 24 | ---------------------- 25 | 26 | * Patch to split STAC_CATALOG environment variable up to STAC_CATALOG_NAME and STAC_CATALOG_URL 27 | 28 | 0.3.2 (2023-09-01) 29 | ---------------------- 30 | 31 | * Bug fix to add dotenv to install_requires 32 | 33 | 0.3.3 (2023-09-06) 34 | ---------------------- 35 | 36 | * Bug fix to return collections even when non-compliant STAC collections and items exist 37 | 38 | 0.3.4 (2023-09-27) 39 | ---------------------- 40 | 41 | * Bug fix, agnostic parsing of STAC_BROWSER_URL (Issue #116) 42 | 43 | 0.3.5 (2023-09-27) 44 | ---------------------- 45 | 46 | * Bug fix, error in manifest of files to include in python package (#120) 47 | 48 | 0.3.6 (2023-11-29) 49 | ---------------------- 50 | 51 | * Updated datasets for MAAP STAC 52 | * Removed NASA JPL Biomass Layer 53 | * Disabled Stamen Basemap (see issue #124) 54 | 55 | Unreleased 56 | ---------------------- 57 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | Copyright (c) 2022 California Institute of Technology (“Caltech”) U.S. Government sponsorship acknowledged, 179 | and United States Government as represented by the Administrator of the National Aeronautics and Space Administration. 180 | All rights reserved. 181 | 182 | Licensed under the Apache License, Version 2.0 (the "License"); 183 | you may not use this file except in compliance with the License. 184 | You may obtain a copy of the License at 185 | 186 | http://www.apache.org/licenses/LICENSE-2.0 187 | 188 | Unless required by applicable law or agreed to in writing, software 189 | distributed under the License is distributed on an "AS IS" BASIS, 190 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 191 | See the License for the specific language governing permissions and 192 | limitations under the License. 193 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CONTRIBUTING.rst 2 | include HISTORY.rst 3 | include LICENSE 4 | include README.rst 5 | include stac_ipyleaflet/utilities/data/* 6 | include stac_ipyleaflet/widgets/* 7 | include stac_ipyleaflet/stac_discovery/catalogs/* 8 | 9 | recursive-include tests * 10 | recursive-exclude * __pycache__ 11 | recursive-exclude * *.py[co] 12 | 13 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-build clean-pyc clean-test coverage dist docs help install lint lint/flake8 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -f {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr htmlcov/ 48 | rm -fr .pytest_cache 49 | 50 | lint/flake8: ## check style with flake8 51 | flake8 stac_ipyleaflet tests 52 | 53 | lint: lint/flake8 ## check style 54 | 55 | test: ## run tests quickly with the default Python 56 | pytest 57 | 58 | test-all: ## run tests on every Python version with tox 59 | tox 60 | 61 | coverage: ## check code coverage quickly with the default Python 62 | coverage run --source stac_ipyleaflet -m pytest 63 | coverage report -m 64 | coverage html 65 | $(BROWSER) htmlcov/index.html 66 | 67 | docs: ## generate Sphinx HTML documentation, including API docs 68 | rm -f docs/stac_ipyleaflet.rst 69 | rm -f docs/modules.rst 70 | sphinx-apidoc -o docs/ stac_ipyleaflet 71 | $(MAKE) -C docs clean 72 | $(MAKE) -C docs html 73 | $(BROWSER) docs/_build/html/index.html 74 | 75 | servedocs: docs ## compile the docs watching for changes 76 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 77 | 78 | release: dist ## package and upload a release 79 | twine upload dist/* 80 | 81 | dist: clean ## builds source and wheel package 82 | python setup.py sdist 83 | python setup.py bdist_wheel 84 | ls -l dist 85 | 86 | install: clean ## install the package to the active Python's site-packages 87 | python setup.py install 88 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # STAC ipyleaflet 2 | 3 | [![DOI](https://zenodo.org/badge/591005076.svg)](https://zenodo.org/doi/10.5281/zenodo.10015863) 4 | 5 | ### What 6 | stac_ipyleaflet is a customized version of [ipyleaflet](https://ipyleaflet.readthedocs.io/en/latest/) built to be an in-jupyter-notebook interactive mapping library that prioritizes access to STAC catalog data. The library provides ([ipywidgets](https://ipywidgets.readthedocs.io/en/stable/)-based) components that are meant to help users working in Jupyter Notebook environments to quickly visualize and interact with geospatial data at different stages throughout their research process. 7 | 8 | ### Why 9 | The intended users of this library are members of the scientific community who are doing research in areas like climate and active remote sensing. While they can write code to visualize and analyze data, they find it time consuming and may end up exporting results to visualize/explore data in & out of a jupyter notebook. This library allows them to visualize remotely sensed data as quickly as possible at scale as fast as possible right in the notebook. It does not replace full featured GIS tools but offers enough visualization capabilities to support their scientific analysis cycles. 10 | 11 | ![stac_ipyleaflet as part of the explore/visualization solution](/public/images/about-map-visualization-solution.png) 12 | > stac_ipyleaflet as part of the explore/visualization solution 13 | 14 | WORK IN PROGRESS. Right now this connects to the MAAP STAC - providing a module on top of ipyleaflet demonstrating how to load & control opacity for tile layers (from `biomass-layers.csv`), view pre-determined Basemaps, and derive coordinates from a user-defined bounding box. 15 | 16 | Much of this project is inspired from [leafmap](https://leafmap.org/) 17 | 18 | ![Jupyter Lab ScreenShot](/public/images/jlab-screenshot.png) 19 | 20 | ## Features 21 | * Layers widget with Biomass and Basemap layers with opacity control 22 | * STAC integration to display COGs on map 23 | * Ability to draw AOI and copy coordinates 24 | 25 | ## Contributing 26 | To contribute to this codebase, clone this repository down and create a new branch. All PRs should be against the `main` branch. Branch names should be prefixed with either *feature, fix, docs, refactor, cleanup* and be in kebab-case format. 27 | 28 | For example when adding documentation, the branch name should look something like `docs/{special-branch-name}`. Or when refactoring for code optimization, the branch name should look something like `refactor/{special-branch-name}` 29 | 30 | ### Testing 31 | ### Unit Testing with Pytest 32 | @TODO: To Be Developed further...Think about what is the point of testing? We should just test for core pieces of functionality and not stick so closely to a threshold? 33 | 34 | **How to run unit tests** 35 | 36 | From the root of this project and in your dev container environment. Run `pytest` in the terminal. If you want to enable logging, run `pytest -s`. 37 | 38 | ### Manual Testing 39 | To test new features or changes in the `Algorithm Development Environment` (ADE). Navigate to your jupyter notebook's terminal and run the following... 40 | 1. `pip uninstall stac_ipyleaflet` 41 | 2. `pip install git+https://github.com/MAAP-Project/stac_ipyleaflet.git#egg-info=stac_ipyleaflet` 42 | This should install `stac_ipyleaflet` with the latest commits from the `main` branch 43 | > Note: pip installing with the `--upgrade` flag will not work unless a change in version in the `setup.cfg` file was detected 44 | 45 | ## Additional requirements 46 | 47 | * jupyter lab, node>=12.0.0 48 | 49 | ## Getting started locally 50 | This project will run locally as a dev container which will need to be installed. If you are using VSCode, add the `Dev Containers` extension and then also install the `Remote Development` extensions. Conda is used to manage our base environment packages and then we will use pip to manage our libraries. 51 | 52 | Once these have been installed, follow these steps below: 53 | 1. To start the dev container, click on the `Remote Development` extensions icon in the bottom left corner, and then from the options list choose `Reopen in Container` 54 | 2. Making sure you are at the root of `stac_ipyleaflet`, In the terminal in VSCode, run `conda activate base`. Your terminal should look something like this... with the red text referencing your branch name 55 | ![](/public/images/getting-started-conda-activate.png) 56 | 3. Run `conda init` 57 | 4. Run `pip install -r requirements.txt` 58 | 5. Create a `.env` file at the root of this codebase. The file should contain and look like... 59 | ``` 60 | TITILER_STAC_ENDPOINT=https://titiler-stac.maap-project.org 61 | TITILER_ENDPOINT=https://titiler.maap-project.org 62 | STAC_CATALOG_NAME=MAAP STAC 63 | STAC_CATALOG_URL=https://stac.maap-project.org 64 | STAC_BROWSER_URL=https://stac-browser.maap-project.org/external/ 65 | ``` 66 | 5. Run `jupyter lab` and this should reveal two links in the log, you would want to click and open any of those two! 67 | ![](/public/images/getting-started-links.png) 68 | 6. Once opening the link, click on the `demo.ipynb` file and run the code snippet to produce the map! If you can get the map to run, then you have successfully ran the code in your local env :raised_hands: 69 | 70 | > Note: If you notice the the widgets (tabs) are not displaying the correct styling, stop the container and run `conda install -c conda-forge ipywidgets` then restart the container. This should fix this issue and the outcome if successful should look like... ![](/public/images/getting-started-correct-tabs.png) 71 | 72 | ## Setup 73 | 74 | For demo purposes, `write_biomass_layers.py` creates a CSV file with current map layers. 75 | ```sh 76 | conda create -n stac_ipyleaflet python=3.9 77 | conda activate stac_ipyleaflet 78 | pip install -r requirements.txt 79 | python -m ipykernel install --user --name=stac_ipyleaflet 80 | export AWS_PROFILE=maap 81 | jupyter lab 82 | ``` 83 | 84 | Note this library currently includes `rio.open` so must be run with an AWS identity that has access to the bucket the biomass products are in. 85 | 86 | **Styling Notes** 87 | - By default, ipywidget icons can be set to any from the font-awesome library v4: https://fontawesome.com/v4/icons/ 88 | - By default, ipywidget buttons can be styled to any html colors: https://htmlcolorcodes.com/color-names/ 89 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | stac ipyleaflet 3 | =============== 4 | 5 | 6 | .. image:: https://img.shields.io/pypi/v/stac_ipyleaflet.svg 7 | :target: https://pypi.python.org/pypi/stac_ipyleaflet 8 | 9 | .. image:: https://img.shields.io/travis/abarciauskas-bgse/stac_ipyleaflet.svg 10 | :target: https://travis-ci.com/abarciauskas-bgse/stac_ipyleaflet 11 | 12 | .. image:: https://readthedocs.org/projects/stac-ipyleaflet/badge/?version=latest 13 | :target: https://stac-ipyleaflet.readthedocs.io/en/latest/?version=latest 14 | :alt: Documentation Status 15 | 16 | 17 | 18 | 19 | ipyleaflet customized for discovering, visualizing and interacting with STAC and workspace data. 20 | 21 | 22 | * Free software: MIT license 23 | * Documentation: https://docs.maap-project.org/en/latest/technical_tutorials/visualization/stac_ipyleaflet.html. 24 | 25 | 26 | Features 27 | -------- 28 | 29 | * TODO 30 | 31 | Credits 32 | ------- 33 | 34 | This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template. 35 | 36 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter 37 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage 38 | -------------------------------------------------------------------------------- /demo.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "9648b56f-dbf3-4aac-af19-643067596220", 6 | "metadata": { 7 | "tags": [] 8 | }, 9 | "source": [ 10 | "# Use stac_ipyleaflet to visually explore MAAP data\n", 11 | "\n", 12 | "Inspired by Leafmap." 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 1, 18 | "id": "4f7d5321-d82b-4ed4-8136-832f932b30b5", 19 | "metadata": { 20 | "tags": [] 21 | }, 22 | "outputs": [ 23 | { 24 | "data": { 25 | "application/vnd.jupyter.widget-view+json": { 26 | "model_id": "a889bd5030ec400ebf2bd0ee01bded38", 27 | "version_major": 2, 28 | "version_minor": 0 29 | }, 30 | "text/plain": [ 31 | "HBox(children=(ToggleButton(value=False, description='Layers', icon='map-o', layout=Layout(border_bottom='1px …" 32 | ] 33 | }, 34 | "metadata": {}, 35 | "output_type": "display_data" 36 | }, 37 | { 38 | "data": { 39 | "application/vnd.jupyter.widget-view+json": { 40 | "model_id": "03a7d6cea86b4f2ab0de29781bb1942c", 41 | "version_major": 2, 42 | "version_minor": 0 43 | }, 44 | "text/plain": [ 45 | "Output()" 46 | ] 47 | }, 48 | "metadata": {}, 49 | "output_type": "display_data" 50 | }, 51 | { 52 | "data": { 53 | "application/vnd.jupyter.widget-view+json": { 54 | "model_id": "a0f8457e810d46f284779d9d6b201f24", 55 | "version_major": 2, 56 | "version_minor": 0 57 | }, 58 | "text/plain": [ 59 | "Output()" 60 | ] 61 | }, 62 | "metadata": {}, 63 | "output_type": "display_data" 64 | }, 65 | { 66 | "name": "stdout", 67 | "output_type": "stream", 68 | "text": [ 69 | "\n" 70 | ] 71 | }, 72 | { 73 | "data": { 74 | "application/vnd.jupyter.widget-view+json": { 75 | "model_id": "20b7d3527fcf4a4384ffd3df79e25d44", 76 | "version_major": 2, 77 | "version_minor": 0 78 | }, 79 | "text/plain": [ 80 | "StacIpyleaflet(center=[20, 0], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title', 'zo…" 81 | ] 82 | }, 83 | "execution_count": 1, 84 | "metadata": {}, 85 | "output_type": "execute_result" 86 | } 87 | ], 88 | "source": [ 89 | "import stac_ipyleaflet\n", 90 | "# from ipywidgets import Layout\n", 91 | "m = stac_ipyleaflet.StacIpyleaflet()\n", 92 | "m" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "id": "d7a3e19b-6e6c-4a4d-95ef-b253d28af4ad", 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "id": "0d2c9bb0-8435-449e-bf44-03e628556239", 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [] 110 | } 111 | ], 112 | "metadata": { 113 | "kernelspec": { 114 | "display_name": "Python 3 (ipykernel)", 115 | "language": "python", 116 | "name": "python3" 117 | }, 118 | "language_info": { 119 | "codemirror_mode": { 120 | "name": "ipython", 121 | "version": 3 122 | }, 123 | "file_extension": ".py", 124 | "mimetype": "text/x-python", 125 | "name": "python", 126 | "nbconvert_exporter": "python", 127 | "pygments_lexer": "ipython3", 128 | "version": "3.10.10" 129 | } 130 | }, 131 | "nbformat": 4, 132 | "nbformat_minor": 5 133 | } 134 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = stac_ipyleaflet 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # STAC ipyleaflet Docs 2 | 3 | ### This documentation section provides *sample* Jupyter notebooks to demonstrate how to extend the current version of the `stac_ipyleaflet` library beyond the included widgets. 4 | 5 | _Browse through the sample notebooks within the `/sample_notebooks` directory to learn how to load and visualize geospatial datasets that are outside of a STAC Catalog, but within your cloud development environment or publicly accessible over the web._ 6 | 7 | Since `stac_ipyleaflet` is built on top of ipyleaflet, additional layer types can be added by following the [ipyleaflet documentation](https://ipyleaflet.readthedocs.io/en/latest/layers/index.html). 8 | 9 | 10 | 11 | ## Sample Notebooks 12 | 13 | - [visualize-**geodataframe**.ipynb](./sample_notebooks/visualize-geodataframe.ipynb): Load a geojson file located within your cloud development environment as a Pandas Geodataframe, create a **GeoData Layer** and add it to the interactive map. 14 | 15 | - [visualize-**geojson**.ipynb](./sample_notebooks/visualize-geojson.ipynb): Load a geojson file located within your cloud development environment, create a **GeoJSON Layer** and add it to the interactive map. 16 | 17 | - [visualize-**raster**.ipynb](./sample_notebooks/visualize-raster.ipynb): Load a raster within your cloud development environment, create a **TileLayer** and add it to the interactive map. 18 | 19 | - [visualize-**wms**.ipynb](./sample_notebooks/visualize-geojson.ipynb): Create a **WMS Layer** from a publicly accessible URL and add to the interactive map. 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # stac_ipyleaflet documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another 16 | # directory, add these directories to sys.path here. If the directory is 17 | # relative to the documentation root, use os.path.abspath to make it 18 | # absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | 23 | sys.path.insert(0, os.path.abspath("..")) 24 | 25 | import stac_ipyleaflet 26 | 27 | # -- General configuration --------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 35 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ["_templates"] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = ".rst" 45 | 46 | # The master toctree document. 47 | master_doc = "index" 48 | 49 | # General information about the project. 50 | project = "stac ipyleaflet" 51 | copyright = "2023, Aimee Barciauskas" 52 | author = "Aimee Barciauskas" 53 | 54 | # The version info for the project you're documenting, acts as replacement 55 | # for |version| and |release|, also used in various other places throughout 56 | # the built documents. 57 | # 58 | # The short X.Y version. 59 | version = stac_ipyleaflet.__version__ 60 | # The full version, including alpha/beta/rc tags. 61 | release = stac_ipyleaflet.__version__ 62 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation 64 | # for a list of supported languages. 65 | # 66 | # This is also used if you do content translation via gettext catalogs. 67 | # Usually you set "language" from the command line for these cases. 68 | language = None 69 | 70 | # List of patterns, relative to source directory, that match files and 71 | # directories to ignore when looking for source files. 72 | # This patterns also effect to html_static_path and html_extra_path 73 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 74 | 75 | # The name of the Pygments (syntax highlighting) style to use. 76 | pygments_style = "sphinx" 77 | 78 | # If true, `todo` and `todoList` produce output, else they produce nothing. 79 | todo_include_todos = False 80 | 81 | 82 | # -- Options for HTML output ------------------------------------------- 83 | 84 | # The theme to use for HTML and HTML Help pages. See the documentation for 85 | # a list of builtin themes. 86 | # 87 | html_theme = "alabaster" 88 | 89 | # Theme options are theme-specific and customize the look and feel of a 90 | # theme further. For a list of options available for each theme, see the 91 | # documentation. 92 | # 93 | # html_theme_options = {} 94 | 95 | # Add any paths that contain custom static files (such as style sheets) here, 96 | # relative to this directory. They are copied after the builtin static files, 97 | # so a file named "default.css" will overwrite the builtin "default.css". 98 | html_static_path = ["_static"] 99 | 100 | 101 | # -- Options for HTMLHelp output --------------------------------------- 102 | 103 | # Output file base name for HTML help builder. 104 | htmlhelp_basename = "stac_ipyleafletdoc" 105 | 106 | 107 | # -- Options for LaTeX output ------------------------------------------ 108 | 109 | latex_elements = { 110 | # The paper size ('letterpaper' or 'a4paper'). 111 | # 112 | # 'papersize': 'letterpaper', 113 | # The font size ('10pt', '11pt' or '12pt'). 114 | # 115 | # 'pointsize': '10pt', 116 | # Additional stuff for the LaTeX preamble. 117 | # 118 | # 'preamble': '', 119 | # Latex figure (float) alignment 120 | # 121 | # 'figure_align': 'htbp', 122 | } 123 | 124 | # Grouping the document tree into LaTeX files. List of tuples 125 | # (source start file, target name, title, author, documentclass 126 | # [howto, manual, or own class]). 127 | latex_documents = [ 128 | ( 129 | master_doc, 130 | "stac_ipyleaflet.tex", 131 | "stac ipyleaflet Documentation", 132 | "Aimee Barciauskas", 133 | "manual", 134 | ), 135 | ] 136 | 137 | 138 | # -- Options for manual page output ------------------------------------ 139 | 140 | # One entry per manual page. List of tuples 141 | # (source start file, name, description, authors, manual section). 142 | man_pages = [ 143 | (master_doc, "stac_ipyleaflet", "stac ipyleaflet Documentation", [author], 1) 144 | ] 145 | 146 | 147 | # -- Options for Texinfo output ---------------------------------------- 148 | 149 | # Grouping the document tree into Texinfo files. List of tuples 150 | # (source start file, target name, title, author, 151 | # dir menu entry, description, category) 152 | texinfo_documents = [ 153 | ( 154 | master_doc, 155 | "stac_ipyleaflet", 156 | "stac ipyleaflet Documentation", 157 | author, 158 | "stac_ipyleaflet", 159 | "One line description of project.", 160 | "Miscellaneous", 161 | ), 162 | ] 163 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to stac ipyleaflet's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | readme 9 | installation 10 | usage 11 | modules 12 | contributing 13 | history 14 | 15 | Indices and tables 16 | ================== 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install stac ipyleaflet, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install stac_ipyleaflet 16 | 17 | This is the preferred method to install stac ipyleaflet, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for stac ipyleaflet can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/abarciauskas-bgse/stac_ipyleaflet 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OJL https://github.com/abarciauskas-bgse/stac_ipyleaflet/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/abarciauskas-bgse/stac_ipyleaflet 51 | .. _tarball: https://github.com/abarciauskas-bgse/stac_ipyleaflet/tarball/master 52 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=stac_ipyleaflet 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/sample_notebooks/visualize-geodataframe.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "1076dced-70e8-448e-84e9-0b35e21ce427", 7 | "metadata": { 8 | "tags": [] 9 | }, 10 | "outputs": [], 11 | "source": [ 12 | "import os\n", 13 | "os.environ['USE_PYGEOS'] = '0'\n", 14 | "import geopandas\n", 15 | "from pathlib import Path" 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "id": "1cb328be-2cb5-408c-b8bd-365ada170617", 21 | "metadata": {}, 22 | "source": [ 23 | "### Right-click on the file and select option to Copy Path" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "id": "7bf748ba-4bcb-4922-96d0-41bad377e41e", 30 | "metadata": { 31 | "tags": [] 32 | }, 33 | "outputs": [ 34 | { 35 | "name": "stdin", 36 | "output_type": "stream", 37 | "text": [ 38 | "Path to file in bucket: ne_110m_land.geojson\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "path = input(\"Path to file in bucket:\")" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "id": "dfdc653b-c341-4729-9d13-fc7c21eb9678", 50 | "metadata": { 51 | "tags": [] 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "home = Path.home()\n", 56 | "f_path = f'{home}/{path}'" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 4, 62 | "id": "8b88a7b4-84b5-4965-830b-1a1d61e9655d", 63 | "metadata": { 64 | "tags": [] 65 | }, 66 | "outputs": [ 67 | { 68 | "name": "stdout", 69 | "output_type": "stream", 70 | "text": [ 71 | "FILE EXISTS AND IS COMPATIBLE\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "try:\n", 77 | " if not Path(f_path).exists:\n", 78 | " print('FILE DOES NOT EXIST!')\n", 79 | " else:\n", 80 | " if Path(f_path).suffix == \".geojson\":\n", 81 | " print('FILE EXISTS AND IS COMPATIBLE')\n", 82 | " else:\n", 83 | " print('FILE EXISTS BUT IS INCOMPATIBLE')\n", 84 | "except CRSError:\n", 85 | " print('error', OSError)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "id": "f6fd34f6-e529-42b2-9617-10769198ba3b", 91 | "metadata": {}, 92 | "source": [ 93 | "### If compatible, open file & create GeoData layer using GeoPandas Dataframe" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 7, 99 | "id": "55b1663d-9a9d-418a-97da-1f89624af1d0", 100 | "metadata": { 101 | "tags": [] 102 | }, 103 | "outputs": [], 104 | "source": [ 105 | "from ipyleaflet import GeoData\n", 106 | "\n", 107 | "# url = \"http://d2ad6b4ur7yvpq.cloudfront.net/naturalearth-3.3.0/ne_110m_land.geojson\"\n", 108 | "data = geopandas.read_file(f_path)\n", 109 | "\n", 110 | "geo_data = GeoData(\n", 111 | " geo_dataframe = data,\n", 112 | " style={'color': 'black', 'fillColor': '#3366cc', 'opacity':0.05, 'weight':1.9, 'dashArray':'2', 'fillOpacity':0.6},\n", 113 | " hover_style={'fillColor': 'red' , 'fillOpacity': 0.2},\n", 114 | " name = 'Natural Earth Countries'\n", 115 | ")" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 8, 121 | "id": "2408919b-90a8-4586-8e38-e76bc54adbc3", 122 | "metadata": { 123 | "tags": [] 124 | }, 125 | "outputs": [ 126 | { 127 | "data": { 128 | "application/vnd.jupyter.widget-view+json": { 129 | "model_id": "6bb514eefc7540c88ddeeaa3ef047540", 130 | "version_major": 2, 131 | "version_minor": 0 132 | }, 133 | "text/plain": [ 134 | "HBox(children=(ToggleButton(value=False, description='Interact', icon='pencil', layout=Layout(border_bottom='1…" 135 | ] 136 | }, 137 | "metadata": {}, 138 | "output_type": "display_data" 139 | }, 140 | { 141 | "data": { 142 | "application/vnd.jupyter.widget-view+json": { 143 | "model_id": "59fc002b694c45bf964a62fdb01b5b70", 144 | "version_major": 2, 145 | "version_minor": 0 146 | }, 147 | "text/plain": [ 148 | "Output()" 149 | ] 150 | }, 151 | "metadata": {}, 152 | "output_type": "display_data" 153 | }, 154 | { 155 | "name": "stdout", 156 | "output_type": "stream", 157 | "text": [ 158 | "\n" 159 | ] 160 | }, 161 | { 162 | "data": { 163 | "application/vnd.jupyter.widget-view+json": { 164 | "model_id": "011425a2cb8d47fea605ccc870ae0f99", 165 | "version_major": 2, 166 | "version_minor": 0 167 | }, 168 | "text/plain": [ 169 | "Output()" 170 | ] 171 | }, 172 | "metadata": {}, 173 | "output_type": "display_data" 174 | }, 175 | { 176 | "data": { 177 | "application/vnd.jupyter.widget-view+json": { 178 | "model_id": "f4b59c382bca47fe8965f54ebaf4ad35", 179 | "version_major": 2, 180 | "version_minor": 0 181 | }, 182 | "text/plain": [ 183 | "StacIpyleaflet(center=[20, 0], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title', 'zo…" 184 | ] 185 | }, 186 | "execution_count": 8, 187 | "metadata": {}, 188 | "output_type": "execute_result" 189 | } 190 | ], 191 | "source": [ 192 | "import stac_ipyleaflet\n", 193 | "\n", 194 | "m = stac_ipyleaflet.StacIpyleaflet()\n", 195 | "m.add_layer(geo_data)\n", 196 | "m" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "id": "23ae4988-0b73-41dc-887b-0f92475648c7", 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [] 206 | } 207 | ], 208 | "metadata": { 209 | "kernelspec": { 210 | "display_name": "Python [conda env:nasa-veda-singleuser]", 211 | "language": "python", 212 | "name": "conda-env-nasa-veda-singleuser-py" 213 | }, 214 | "language_info": { 215 | "codemirror_mode": { 216 | "name": "ipython", 217 | "version": 3 218 | }, 219 | "file_extension": ".py", 220 | "mimetype": "text/x-python", 221 | "name": "python", 222 | "nbconvert_exporter": "python", 223 | "pygments_lexer": "ipython3", 224 | "version": "3.10.12" 225 | } 226 | }, 227 | "nbformat": 4, 228 | "nbformat_minor": 5 229 | } 230 | -------------------------------------------------------------------------------- /docs/sample_notebooks/visualize-geojson.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "29f10874-05b0-4c01-9134-309f08d095c8", 7 | "metadata": { 8 | "tags": [] 9 | }, 10 | "outputs": [], 11 | "source": [ 12 | "import json\n", 13 | "from pathlib import Path" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "id": "fba9d92f-7f9c-4e83-845e-2420f67ba1ff", 19 | "metadata": { 20 | "tags": [] 21 | }, 22 | "source": [ 23 | "### Right-click on the file and select option to Copy Path" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "id": "7027f167-ffbb-4617-a8a0-a9e21a1c53fa", 30 | "metadata": { 31 | "tags": [] 32 | }, 33 | "outputs": [ 34 | { 35 | "name": "stdin", 36 | "output_type": "stream", 37 | "text": [ 38 | "Path to file in bucket: local-data/map.geojson\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "path = input(\"Path to file in bucket:\")" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "id": "dd28c59a-2971-4bd2-9202-1bf0cf593597", 50 | "metadata": { 51 | "tags": [] 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "home = Path.home()\n", 56 | "f_path = f'{home}/{path}'" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 4, 62 | "id": "19530b2d-6b5d-4493-a218-7f1e65121e35", 63 | "metadata": { 64 | "tags": [] 65 | }, 66 | "outputs": [ 67 | { 68 | "name": "stdout", 69 | "output_type": "stream", 70 | "text": [ 71 | "FILE EXISTS AND IS COMPATIBLE\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "try:\n", 77 | " if not Path(f_path).exists:\n", 78 | " print('FILE DOES NOT EXIST!')\n", 79 | " else:\n", 80 | " if Path(f_path).suffix == \".geojson\":\n", 81 | " print('FILE EXISTS AND IS COMPATIBLE')\n", 82 | " else:\n", 83 | " print('FILE EXISTS BUT IS INCOMPATIBLE')\n", 84 | "except CRSError:\n", 85 | " print('error', OSError)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "id": "53762e45-47ce-402c-9aaa-55a5c6199e00", 91 | "metadata": { 92 | "tags": [] 93 | }, 94 | "source": [ 95 | "### If compatible, open file & create GeoJSON layer" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 5, 101 | "id": "7e93648b-f193-4bb5-8310-00c6b26581fd", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "from ipyleaflet import GeoJSON\n", 106 | "\n", 107 | "with open(f_path, 'r') as f:\n", 108 | " data = json.load(f)\n", 109 | "\n", 110 | "geojson = GeoJSON(\n", 111 | " data=data,\n", 112 | " style={\n", 113 | " 'opacity': 1, 'dashArray': '9', 'fillOpacity': 0.1, 'weight': 1\n", 114 | " },\n", 115 | " hover_style={\n", 116 | " 'color': 'white', 'dashArray': '0', 'fillOpacity': 0.5\n", 117 | " },\n", 118 | ")\n", 119 | "# print(json.dumps(geojson.data, indent=4))" 120 | ] 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "id": "a2c79234-ff56-41b1-8400-6a74a3eadb0b", 125 | "metadata": { 126 | "tags": [] 127 | }, 128 | "source": [ 129 | "### Calculate bounds (data extent) from all features" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": 6, 135 | "id": "90378a88-04f4-4d93-a4d8-7ccb44244709", 136 | "metadata": { 137 | "tags": [] 138 | }, 139 | "outputs": [], 140 | "source": [ 141 | "xcoords = []\n", 142 | "ycoords = []\n", 143 | "for f in data['features']:\n", 144 | " geom = f['geometry']\n", 145 | " for coord in geom['coordinates']:\n", 146 | " if type(coord) == float: # then its a point feature\n", 147 | " xcoords.append(geom['coordinates'][0])\n", 148 | " ycoords.append(geom['coordinates'][1])\n", 149 | " elif type(coord) == list:\n", 150 | " for c in coord:\n", 151 | " if type(c) == float: # then its a linestring feature\n", 152 | " xcoords.append(coord[0])\n", 153 | " ycoords.append(coord[1])\n", 154 | " elif type(c) == list: # then its a polygon feature\n", 155 | " xcoords.append(c[0])\n", 156 | " ycoords.append(c[1])\n", 157 | "bounds = [\n", 158 | " [min(ycoords), min(xcoords)],\n", 159 | " [max(ycoords), max(xcoords)]\n", 160 | "]" 161 | ] 162 | }, 163 | { 164 | "cell_type": "markdown", 165 | "id": "dafa7ebf-32e0-4724-8d06-86bbcf1ff46a", 166 | "metadata": { 167 | "tags": [] 168 | }, 169 | "source": [ 170 | "### Add geojson to `stac_ipyleaflet` map" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 7, 176 | "id": "b2f6e35a-15ab-4beb-bcdb-8fb6357ec879", 177 | "metadata": { 178 | "tags": [] 179 | }, 180 | "outputs": [ 181 | { 182 | "data": { 183 | "application/vnd.jupyter.widget-view+json": { 184 | "model_id": "74cf5cf758774ccba5bff1bc9c6eb3f5", 185 | "version_major": 2, 186 | "version_minor": 0 187 | }, 188 | "text/plain": [ 189 | "HBox(children=(ToggleButton(value=False, description='Draw', icon='square-o', layout=Layout(border_bottom='1px…" 190 | ] 191 | }, 192 | "metadata": {}, 193 | "output_type": "display_data" 194 | }, 195 | { 196 | "data": { 197 | "application/vnd.jupyter.widget-view+json": { 198 | "model_id": "1fe1e87ada4b4af796ecbb9d3e2f769b", 199 | "version_major": 2, 200 | "version_minor": 0 201 | }, 202 | "text/plain": [ 203 | "Output()" 204 | ] 205 | }, 206 | "metadata": {}, 207 | "output_type": "display_data" 208 | }, 209 | { 210 | "name": "stdout", 211 | "output_type": "stream", 212 | "text": [ 213 | "\n" 214 | ] 215 | }, 216 | { 217 | "data": { 218 | "application/vnd.jupyter.widget-view+json": { 219 | "model_id": "5455fc5b1d5f44f795120b9dbcf9a93e", 220 | "version_major": 2, 221 | "version_minor": 0 222 | }, 223 | "text/plain": [ 224 | "StacIpyleaflet(center=[20, 0], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title', 'zo…" 225 | ] 226 | }, 227 | "execution_count": 7, 228 | "metadata": {}, 229 | "output_type": "execute_result" 230 | } 231 | ], 232 | "source": [ 233 | "import stac_ipyleaflet\n", 234 | "\n", 235 | "m = stac_ipyleaflet.StacIpyleaflet()\n", 236 | "m.add_layer(geojson)\n", 237 | "m.fit_bounds(bounds)\n", 238 | "m" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": null, 244 | "id": "ce50b0c2-b844-420d-9493-aa1409a4a23a", 245 | "metadata": {}, 246 | "outputs": [], 247 | "source": [] 248 | } 249 | ], 250 | "metadata": { 251 | "kernelspec": { 252 | "display_name": "Python 3 (ipykernel)", 253 | "language": "python", 254 | "name": "python3" 255 | }, 256 | "language_info": { 257 | "codemirror_mode": { 258 | "name": "ipython", 259 | "version": 3 260 | }, 261 | "file_extension": ".py", 262 | "mimetype": "text/x-python", 263 | "name": "python", 264 | "nbconvert_exporter": "python", 265 | "pygments_lexer": "ipython3", 266 | "version": "3.10.12" 267 | } 268 | }, 269 | "nbformat": 4, 270 | "nbformat_minor": 5 271 | } 272 | -------------------------------------------------------------------------------- /docs/sample_notebooks/visualize-raster.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "tags": [] 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import boto3\n", 12 | "import httpx\n", 13 | "import json\n", 14 | "import os" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": { 20 | "tags": [] 21 | }, 22 | "source": [ 23 | "### Set variables and helper functions" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "metadata": { 30 | "tags": [] 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "def checkFilePath(file_path):\n", 35 | " result = s3.list_objects(Bucket=bucket, Prefix=file_path)\n", 36 | " exists = True if 'Contents' in result else False\n", 37 | " if exists:\n", 38 | " print('PATH EXISTS')\n", 39 | " return result['Contents']\n", 40 | " return exists" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": { 47 | "tags": [] 48 | }, 49 | "outputs": [], 50 | "source": [ 51 | "user = os.getenv('CHE_WORKSPACE_NAMESPACE')\n", 52 | "titiler_url = \"https://titiler.maap-project.org\" # MAAP titiler endpoint\n", 53 | "titiler_tilejson_url = f\"{titiler_url}/cog/tilejson.json\"\n", 54 | "bucket = \"maap-ops-workspace\"\n", 55 | "band_min = 0\n", 56 | "band_max = 50\n", 57 | "color_map = \"gist_earth_r\"" 58 | ] 59 | }, 60 | { 61 | "cell_type": "markdown", 62 | "metadata": { 63 | "jp-MarkdownHeadingCollapsed": true, 64 | "tags": [] 65 | }, 66 | "source": [ 67 | "#### Option to query possible projections supported by titiler service" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 25, 73 | "metadata": { 74 | "tags": [] 75 | }, 76 | "outputs": [ 77 | { 78 | "name": "stdout", 79 | "output_type": "stream", 80 | "text": [ 81 | "Supported TMS:\n", 82 | "- CanadianNAD83_LCC\n", 83 | "- EuropeanETRS89_LAEAQuad\n", 84 | "- LINZAntarticaMapTilegrid\n", 85 | "- NZTM2000\n", 86 | "- NZTM2000Quad\n", 87 | "- UPSAntarcticWGS84Quad\n", 88 | "- UPSArcticWGS84Quad\n", 89 | "- UTM31WGS84Quad\n", 90 | "- WGS1984Quad\n", 91 | "- WebMercatorQuad\n", 92 | "- WorldCRS84Quad\n", 93 | "- WorldMercatorWGS84Quad\n" 94 | ] 95 | } 96 | ], 97 | "source": [ 98 | "tileMatrixSets = httpx.get(f\"{titiler_url}/tileMatrixSets\").json()\n", 99 | "print(\"Supported TMS:\")\n", 100 | "for tms in tileMatrixSets[\"tileMatrixSets\"]:\n", 101 | " print(\"-\", tms[\"id\"])" 102 | ] 103 | }, 104 | { 105 | "cell_type": "markdown", 106 | "metadata": {}, 107 | "source": [ 108 | "### Right-click on the file and select option to Copy Path" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 4, 114 | "metadata": { 115 | "tags": [] 116 | }, 117 | "outputs": [ 118 | { 119 | "name": "stdout", 120 | "output_type": "stream", 121 | "text": [ 122 | "Path to raster in bucket: shared-buckets/alexdevseed/landsat8/viz/Copernicus_30439_covars_cog_topo_stack.tif\n" 123 | ] 124 | } 125 | ], 126 | "source": [ 127 | "path = input(\"Path to raster in bucket:\")" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 5, 133 | "metadata": { 134 | "tags": [] 135 | }, 136 | "outputs": [ 137 | { 138 | "name": "stdout", 139 | "output_type": "stream", 140 | "text": [ 141 | "PATH EXISTS\n", 142 | "[{'Key': 'shared/alexdevseed/landsat8/viz/Copernicus_30439_covars_cog_topo_stack.tif', 'LastModified': datetime.datetime(2021, 7, 22, 22, 39, 57, tzinfo=tzutc()), 'ETag': '\"0dbc859db2e921cda2b2ef403fa41f97-3\"', 'Size': 20084240, 'StorageClass': 'STANDARD', 'Owner': {'DisplayName': 'MSFC-IMPACT-MAAP-Ops-root', 'ID': '801c37e81ec7d7b327915c96502ec5f346f48f2cdc819da9284110dbc39b64e7'}}, {'Key': 'shared/alexdevseed/landsat8/viz/Copernicus_30439_covars_cog_topo_stack.tif.aux.xml', 'LastModified': datetime.datetime(2023, 3, 4, 1, 31, 21, tzinfo=tzutc()), 'ETag': '\"8ca416537cb9d82de62177a3a65d3751\"', 'Size': 2052, 'StorageClass': 'STANDARD', 'Owner': {'DisplayName': 'MSFC-IMPACT-MAAP-Ops-root', 'ID': '801c37e81ec7d7b327915c96502ec5f346f48f2cdc819da9284110dbc39b64e7'}}]\n" 143 | ] 144 | } 145 | ], 146 | "source": [ 147 | "s3 = boto3.client('s3')\n", 148 | "file_name = path.split('/', 1)[-1]\n", 149 | "if 'shared-buckets' in path:\n", 150 | " file_path = f'shared/{file_name}'\n", 151 | "if 'my-private-bucket' in path:\n", 152 | " file_path = f'{user}/{file_name}'\n", 153 | "if 'my-public-bucket' in path:\n", 154 | " file_path = f'shared/{user}/{file_name}'\n", 155 | "print(checkFilePath(file_path))" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 6, 161 | "metadata": { 162 | "tags": [] 163 | }, 164 | "outputs": [], 165 | "source": [ 166 | "url = f\"s3://maap-ops-workspace/{file_path}\"" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "#### If Path exists, continue..." 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": { 179 | "tags": [] 180 | }, 181 | "source": [ 182 | "### Open raster from url" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 11, 188 | "metadata": { 189 | "tags": [] 190 | }, 191 | "outputs": [], 192 | "source": [ 193 | "import rioxarray as rxr\n", 194 | "\n", 195 | "raster = rxr.open_rasterio(url, masked=True)\n", 196 | "raster" 197 | ] 198 | }, 199 | { 200 | "cell_type": "markdown", 201 | "metadata": { 202 | "tags": [] 203 | }, 204 | "source": [ 205 | "#### Project to default map projection" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 13, 211 | "metadata": { 212 | "tags": [] 213 | }, 214 | "outputs": [ 215 | { 216 | "name": "stdout", 217 | "output_type": "stream", 218 | "text": [ 219 | "The CRS of this dataset is: EPSG:3857\n" 220 | ] 221 | } 222 | ], 223 | "source": [ 224 | "crs = raster.rio.crs\n", 225 | "print(\"The CRS of this dataset is:\", crs)\n", 226 | "crs_number = crs.to_epsg()\n", 227 | "if crs_number != 3857:\n", 228 | " raster = raster.rio.reproject(\"EPSG:3857\")\n", 229 | " crs = raster.rio.crs\n", 230 | " print(\"\\n\", \"The NEW CRS of this dataset is:\", crs)" 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "metadata": { 236 | "tags": [] 237 | }, 238 | "source": [ 239 | "#### Get raster info (bounds, zoom, data type)" 240 | ] 241 | }, 242 | { 243 | "cell_type": "code", 244 | "execution_count": 15, 245 | "metadata": { 246 | "tags": [] 247 | }, 248 | "outputs": [ 249 | { 250 | "name": "stdout", 251 | "output_type": "stream", 252 | "text": [ 253 | "Bounds: [-163.51338693693168, 67.17121197506852, -162.6566451826878, 67.49580310072406]\n", 254 | "Zoom: 8\n", 255 | "Data type: float32\n", 256 | "Bands: [['b1', {}], ['b2', {}], ['b3', {}], ['b4', {}], ['b5', {}]]\n" 257 | ] 258 | } 259 | ], 260 | "source": [ 261 | "r = httpx.get(\n", 262 | " f\"{titiler_url}/cog/info\",\n", 263 | " params = {\n", 264 | " \"url\": url,\n", 265 | " }\n", 266 | ").json()\n", 267 | "\n", 268 | "# print(json.dumps(r, indent=4))\n", 269 | "\n", 270 | "bounds = r.get(\"bounds\")\n", 271 | "minzoom = r.get(\"minzoom\")\n", 272 | "zoom = minzoom + 1 if minzoom == 0 else minzoom\n", 273 | "bands = r.get(\"band_metadata\")\n", 274 | "\n", 275 | "print(\"Bounds:\", bounds)\n", 276 | "print(\"Zoom:\", zoom)\n", 277 | "print(\"Data type:\", r.get(\"dtype\"))\n", 278 | "print(\"Bands:\", bands)" 279 | ] 280 | }, 281 | { 282 | "cell_type": "markdown", 283 | "metadata": { 284 | "tags": [] 285 | }, 286 | "source": [ 287 | "#### Calculate raster center for map placement" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 16, 293 | "metadata": { 294 | "tags": [] 295 | }, 296 | "outputs": [ 297 | { 298 | "name": "stdout", 299 | "output_type": "stream", 300 | "text": [ 301 | "Center: (67.33350753789628, -163.0850160598097)\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "from shapely.geometry import box\n", 307 | "\n", 308 | "polygon = box(*bounds)\n", 309 | "center = (polygon.centroid.y, polygon.centroid.x)\n", 310 | "print(\"Center:\", center)" 311 | ] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "metadata": { 316 | "tags": [] 317 | }, 318 | "source": [ 319 | "#### Get value statistics for rescaling" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 17, 325 | "metadata": { 326 | "tags": [] 327 | }, 328 | "outputs": [ 329 | { 330 | "name": "stdout", 331 | "output_type": "stream", 332 | "text": [ 333 | "minv: -1.829763650894165 maxv: 557.8629150390625\n" 334 | ] 335 | } 336 | ], 337 | "source": [ 338 | "r = httpx.get(\n", 339 | " f\"{titiler_url}/cog/statistics\",\n", 340 | " params = {\n", 341 | " \"url\": url,\n", 342 | " }\n", 343 | ").json()\n", 344 | "\n", 345 | "# print(json.dumps(r, indent=4))\n", 346 | "band = r.get(\"b1\")\n", 347 | "if band:\n", 348 | " band_min, band_max = band.get(\"min\"), band.get(\"max\")\n", 349 | " print(\"min:\", band_min, \"max:\", band_max)" 350 | ] 351 | }, 352 | { 353 | "cell_type": "markdown", 354 | "metadata": {}, 355 | "source": [ 356 | "### Display local raster" 357 | ] 358 | }, 359 | { 360 | "cell_type": "markdown", 361 | "metadata": {}, 362 | "source": [ 363 | "#### Create TileLayer" 364 | ] 365 | }, 366 | { 367 | "cell_type": "code", 368 | "execution_count": 19, 369 | "metadata": { 370 | "tags": [] 371 | }, 372 | "outputs": [], 373 | "source": [ 374 | "from ipyleaflet import TileLayer\n", 375 | "\n", 376 | "params = {\n", 377 | " \"url\": url,\n", 378 | " \"tile_scale\": \"1\",\n", 379 | " \"tile_format\": \"png\",\n", 380 | " \"TileMatrixSetId\": \"WebMercatorQuad\",\n", 381 | " \"return_mask\": \"true\",\n", 382 | " \"rescale\": f\"{band_min}, {band_max}\",\n", 383 | " \"resampling_method\": \"nearest\",\n", 384 | " \"pixel_selection\": \"first\",\n", 385 | " \"bidx\": \"1\",\n", 386 | " \"colormap_name\": color_map\n", 387 | "}\n", 388 | "r = httpx.get(titiler_tilejson_url, params=params)\n", 389 | "\n", 390 | "if r.status_code == 200:\n", 391 | " json = r.json()\n", 392 | " layer_url = json['tiles'][0]\n", 393 | " custom_layer = TileLayer(url=layer_url, show_loading=True, transparent=True)" 394 | ] 395 | }, 396 | { 397 | "cell_type": "code", 398 | "execution_count": 20, 399 | "metadata": { 400 | "tags": [] 401 | }, 402 | "outputs": [ 403 | { 404 | "data": { 405 | "application/vnd.jupyter.widget-view+json": { 406 | "model_id": "2efdfdb4829d4b8cad7b7e1134c52206", 407 | "version_major": 2, 408 | "version_minor": 0 409 | }, 410 | "text/plain": [ 411 | "HBox(children=(ToggleButton(value=False, description='Draw', icon='square-o', layout=Layout(border_bottom='1px…" 412 | ] 413 | }, 414 | "metadata": {}, 415 | "output_type": "display_data" 416 | }, 417 | { 418 | "data": { 419 | "application/vnd.jupyter.widget-view+json": { 420 | "model_id": "cbd0566497db42748ddf829d1dc7f20a", 421 | "version_major": 2, 422 | "version_minor": 0 423 | }, 424 | "text/plain": [ 425 | "Output()" 426 | ] 427 | }, 428 | "metadata": {}, 429 | "output_type": "display_data" 430 | }, 431 | { 432 | "name": "stdout", 433 | "output_type": "stream", 434 | "text": [ 435 | "\n" 436 | ] 437 | }, 438 | { 439 | "data": { 440 | "application/vnd.jupyter.widget-view+json": { 441 | "model_id": "adba37daf3bf4184bf6a822115cc331f", 442 | "version_major": 2, 443 | "version_minor": 0 444 | }, 445 | "text/plain": [ 446 | "StacIpyleaflet(center=[67.33350753789628, -163.0850160598097], controls=(ZoomControl(options=['position', 'zoo…" 447 | ] 448 | }, 449 | "execution_count": 20, 450 | "metadata": {}, 451 | "output_type": "execute_result" 452 | } 453 | ], 454 | "source": [ 455 | "import stac_ipyleaflet\n", 456 | "\n", 457 | "m = stac_ipyleaflet.StacIpyleaflet(zoom=zoom, center=center)\n", 458 | "m.add_layer(custom_layer)\n", 459 | "m" 460 | ] 461 | }, 462 | { 463 | "cell_type": "markdown", 464 | "metadata": { 465 | "jp-MarkdownHeadingCollapsed": true, 466 | "tags": [] 467 | }, 468 | "source": [ 469 | "#### If adding layer after map was created, use the fit_bounds method to navigate to it\n", 470 | "`m.fit_bounds(bounds)`" 471 | ] 472 | }, 473 | { 474 | "cell_type": "markdown", 475 | "metadata": { 476 | "tags": [] 477 | }, 478 | "source": [ 479 | "#### Option to remove custom layer\n", 480 | "`m.remove_layer(custom_layer)`" 481 | ] 482 | }, 483 | { 484 | "cell_type": "code", 485 | "execution_count": null, 486 | "metadata": {}, 487 | "outputs": [], 488 | "source": [] 489 | } 490 | ], 491 | "metadata": { 492 | "kernelspec": { 493 | "display_name": "Python 3 (ipykernel)", 494 | "language": "python", 495 | "name": "python3" 496 | }, 497 | "language_info": { 498 | "codemirror_mode": { 499 | "name": "ipython", 500 | "version": 3 501 | }, 502 | "file_extension": ".py", 503 | "mimetype": "text/x-python", 504 | "name": "python", 505 | "nbconvert_exporter": "python", 506 | "pygments_lexer": "ipython3", 507 | "version": "3.10.11" 508 | } 509 | }, 510 | "nbformat": 4, 511 | "nbformat_minor": 4 512 | } 513 | -------------------------------------------------------------------------------- /docs/sample_notebooks/visualize-wms.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "9ccad2ce-fe7f-49c6-ae88-e9c64776e1eb", 7 | "metadata": { 8 | "tags": [] 9 | }, 10 | "outputs": [], 11 | "source": [ 12 | "### Create a simple WMS layer" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "id": "d3d198cf-9943-4727-b05d-54ae40166bbb", 19 | "metadata": { 20 | "tags": [] 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "from ipyleaflet import WMSLayer\n", 25 | "\n", 26 | "wms = WMSLayer(\n", 27 | " url='http://mesonet.agron.iastate.edu/cgi-bin/wms/nexrad/n0r.cgi',\n", 28 | " layers='nexrad-n0r-900913',\n", 29 | " format='image/png',\n", 30 | " transparent=True,\n", 31 | " attribution='Weather data © 2012 IEM Nexrad'\n", 32 | ")" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 19, 38 | "id": "eb827652-d931-4c1f-a51f-1bb5c2915ece", 39 | "metadata": { 40 | "tags": [] 41 | }, 42 | "outputs": [ 43 | { 44 | "ename": "ImportError", 45 | "evalue": "attempted relative import with no known parent package", 46 | "output_type": "error", 47 | "traceback": [ 48 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 49 | "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", 50 | "Cell \u001b[0;32mIn[19], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m stac_ipyleaflet\n\u001b[1;32m 3\u001b[0m m \u001b[38;5;241m=\u001b[39m stac_ipyleaflet\u001b[38;5;241m.\u001b[39mStacIpyleaflet()\n\u001b[1;32m 4\u001b[0m m\u001b[38;5;241m.\u001b[39madd_layer(wms)\n", 51 | "\u001b[0;31mImportError\u001b[0m: attempted relative import with no known parent package" 52 | ] 53 | } 54 | ], 55 | "source": [ 56 | "import stac_ipyleaflet\n", 57 | "\n", 58 | "m = stac_ipyleaflet.StacIpyleaflet()\n", 59 | "m.add_layer(wms)\n", 60 | "m" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "id": "fb26a46a-a210-47dc-a658-d85d9bcfe294", 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "### Create a tiled layer using TiTiler WMTS Capabilities" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "id": "a285eb54", 76 | "metadata": {}, 77 | "source": [ 78 | "from ipyleaflet import WMSLayer\n" 79 | ] 80 | } 81 | ], 82 | "metadata": { 83 | "kernelspec": { 84 | "display_name": "Python 3 (ipykernel)", 85 | "language": "python", 86 | "name": "python3" 87 | }, 88 | "language_info": { 89 | "codemirror_mode": { 90 | "name": "ipython", 91 | "version": 3 92 | }, 93 | "file_extension": ".py", 94 | "mimetype": "text/x-python", 95 | "name": "python", 96 | "nbconvert_exporter": "python", 97 | "pygments_lexer": "ipython3", 98 | "version": "3.10.8" 99 | } 100 | }, 101 | "nbformat": 4, 102 | "nbformat_minor": 5 103 | } 104 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | To use stac ipyleaflet in a project:: 6 | 7 | import stac_ipyleaflet 8 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: stac_ipyleaflet 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - jupyterlab 6 | - pip 7 | - gdal 8 | - rasterio>=1.3 9 | -------------------------------------------------------------------------------- /public/images/about-map-visualization-solution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/about-map-visualization-solution.png -------------------------------------------------------------------------------- /public/images/getting-started-conda-activate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/getting-started-conda-activate.png -------------------------------------------------------------------------------- /public/images/getting-started-correct-tabs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/getting-started-correct-tabs.png -------------------------------------------------------------------------------- /public/images/getting-started-links.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/getting-started-links.png -------------------------------------------------------------------------------- /public/images/jlab-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/jlab-screenshot.png -------------------------------------------------------------------------------- /public/images/loading.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/public/images/loading.gif -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ipyleaflet>=0.17.2 2 | ipywidgets>=8.0.4 3 | ipyevents 4 | matplotlib 5 | pydantic 6 | shapely 7 | requests 8 | rio_tiler 9 | rioxarray 10 | xarray 11 | pystac_client==0.6.1 12 | localtileserver 13 | python-dotenv -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | pip==19.2.3 2 | bump2version==0.5.11 3 | wheel==0.33.6 4 | watchdog==0.9.0 5 | flake8==3.7.8 6 | tox==3.14.0 7 | coverage==4.5.4 8 | Sphinx==1.8.5 9 | twine==1.14.0 10 | pytest==6.2.4 11 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = stac_ipyleaflet 3 | version=0.3.6 4 | author = Aimee Barciauskas 5 | description=ipyleaflet customized for discovering, visualizing and interacting with STAC data. 6 | long_description = file: README.md 7 | url = https://github.com/abarciauskas-bgse/stac_ipyleaflet 8 | keywords = stac_ipyleaflet 9 | author_email=aimee@developmentseed.org 10 | license=Apache-2.0 license 11 | classifiers = 12 | Development Status :: 2 - Beta 13 | Intended Audience :: NASA Scientists 14 | License :: OSI Approved :: Apache-2.0 license 15 | Natural Language :: English 16 | Programming Language :: Python :: 3.8 17 | Programming Language :: Python :: 3.9 18 | Programming Language :: Python :: 3.10 19 | 20 | [bumpversion] 21 | current_version = 0.3.6 22 | commit = True 23 | tag = True 24 | 25 | [bumpversion:file:setup.py] 26 | search = version='{current_version}' 27 | replace = version='{new_version}' 28 | 29 | [bumpversion:file:stac_ipyleaflet/__init__.py] 30 | search = __version__ = '{current_version}' 31 | replace = __version__ = '{new_version}' 32 | 33 | [bdist_wheel] 34 | universal = 1 35 | 36 | [flake8] 37 | exclude = docs 38 | [tool:pytest] 39 | collect_ignore = ['setup.py'] 40 | 41 | [options] 42 | packages = find: 43 | include_package_data=True 44 | zip_safe=False 45 | tests_require=[pytest>=3] 46 | install_requires = 47 | ipyleaflet>=0.17.2 48 | ipywidgets>=8.0.4 49 | ipyevents 50 | matplotlib 51 | pydantic 52 | shapely 53 | rasterio 54 | requests 55 | rio_tiler 56 | rioxarray 57 | xarray 58 | python-dotenv 59 | pystac_client==0.6.1 60 | 61 | [options.extras_require] 62 | test = 63 | pytest 64 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | 5 | from setuptools import setup 6 | 7 | if __name__ == "__main__": 8 | setup() 9 | -------------------------------------------------------------------------------- /stac_ipyleaflet/__init__.py: -------------------------------------------------------------------------------- 1 | from stac_ipyleaflet.core import * 2 | -------------------------------------------------------------------------------- /stac_ipyleaflet/constants.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | import json 3 | import os 4 | 5 | load_dotenv() 6 | 7 | REQUEST_TIMEOUT = 3 # three second timeout 8 | RESCALE = "0,50" 9 | TITILER_ENDPOINT = os.getenv("TITILER_ENDPOINT") 10 | TITILER_STAC_ENDPOINT = os.getenv("TITILER_STAC_ENDPOINT") 11 | STAC_CATALOG = {"name": os.getenv("STAC_CATALOG_NAME"), "url": os.getenv("STAC_CATALOG_URL")} 12 | STAC_BROWSER_URL = os.getenv("STAC_BROWSER_URL") 13 | -------------------------------------------------------------------------------- /stac_ipyleaflet/core.py: -------------------------------------------------------------------------------- 1 | """Main module.""" 2 | from ipyleaflet import Map, Popup, TileLayer, WidgetControl 3 | from IPython.display import display 4 | from ipywidgets import Box, HBox, VBox, Layout, SelectionSlider, HTML, IntSlider, Image 5 | from ipywidgets import Checkbox, Dropdown, Tab, ToggleButton, Button 6 | from ipywidgets import HTML, Output, jslink 7 | import logging 8 | import matplotlib.pyplot as plt 9 | from rio_tiler.io import Reader 10 | from rio_tiler.mosaic import mosaic_reader 11 | from rio_tiler.models import ImageData 12 | from shapely.geometry import Polygon 13 | import xarray as xr 14 | 15 | from stac_ipyleaflet.constants import ( 16 | STAC_CATALOG, 17 | TITILER_STAC_ENDPOINT, 18 | TITILER_ENDPOINT, 19 | ) 20 | from stac_ipyleaflet.stac_discovery.stac_widget import StacDiscoveryWidget 21 | from stac_ipyleaflet.widgets.basemaps import BasemapsWidget 22 | from stac_ipyleaflet.utilities.helpers import add_layers_options 23 | 24 | 25 | class StacIpyleaflet(Map): 26 | histogram_layer: Popup 27 | warning_layer: Popup = None 28 | loading_widget_layer: Popup = None 29 | bbox_centroid: list = [] 30 | 31 | titiler_stac_endpoint = TITILER_STAC_ENDPOINT 32 | 33 | def __init__(self, **kwargs): 34 | from stac_ipyleaflet.widgets.inspect import InspectControlWidget 35 | from stac_ipyleaflet.widgets.draw import DrawControlWidget 36 | 37 | if "center" not in kwargs: 38 | kwargs["center"] = [20, 0] 39 | 40 | if "zoom" not in kwargs: 41 | kwargs["zoom"] = 4 42 | 43 | if "layout" not in kwargs: 44 | kwargs["layout"] = Layout(height="600px") 45 | 46 | if "scroll_wheel_zoom" not in kwargs: 47 | kwargs["scroll_wheel_zoom"] = True 48 | 49 | # Create map 50 | super().__init__(**kwargs) 51 | 52 | self.accent_color = "SteelBlue" 53 | self.layers = BasemapsWidget.template(self) 54 | 55 | self.buttons = {} 56 | self.selected_data = [] 57 | self.histogram_layer = None 58 | self.draw_control_added = False 59 | self.point_control_added = False 60 | self.aoi_coordinates = [] 61 | self.aoi_bbox = () 62 | self.applied_layers = [] 63 | self.inspect_widget = None 64 | self.marker_added = False 65 | 66 | self.create_buttons_layout() 67 | self.check_for_env_vars() 68 | self.add_default_layer_options() 69 | self.add_custom_tools() 70 | 71 | self.point_control = InspectControlWidget.template(self) 72 | self.draw_control = DrawControlWidget.template(self) 73 | return None 74 | 75 | def check_for_env_vars(self): 76 | missing = [] 77 | warning_log = "" 78 | if TITILER_STAC_ENDPOINT is None: 79 | missing.append("TITILER_STAC_ENDPOINT") 80 | warning_log += ( 81 | "os.environ['TITILER_STAC_ENDPOINT']='REPLACE_WITH_TITILER_STAC_URL'\n" 82 | ) 83 | if TITILER_ENDPOINT is None: 84 | missing.append("TITILER_ENDPOINT") 85 | warning_log += "os.environ['TITILER_ENDPOINT']='REPLACE_WITH_TITILER_URL'\n" 86 | if STAC_CATALOG["name"] is None: 87 | missing.append("STAC_CATALOG_NAME") 88 | warning_log += ( 89 | "os.environ['STAC_CATALOG_NAME']='REPLACE_WITH_NAME_OF_STAC_CATALOG'\n" 90 | ) 91 | if STAC_CATALOG["url"] is None: 92 | missing.append("STAC_CATALOG_URL") 93 | warning_log += ( 94 | "os.environ['STAC_CATALOG_URL']='REPLACE_WITH_URL_OF_STAC_CATALOG'\n" 95 | ) 96 | if len(missing) > 0: 97 | logging.warning( 98 | f"Following environment variable(s) are missing {missing} \n To set these environment variables, run this code: \n \n import os \n {warning_log}" 99 | ) 100 | return 101 | 102 | def add_default_layer_options(self): 103 | if TITILER_ENDPOINT is not None and "maap" in TITILER_ENDPOINT: 104 | add_layers_options(self.add_layer, "biomass-layers.csv") 105 | return 106 | 107 | def create_buttons_layout(self): 108 | interact_btn = self.create_widget_button( 109 | buttonId="interact", 110 | toolTipMsg="Interact with the map", 111 | description="Interact", 112 | icon="pencil", 113 | onClick=self.toggle_interact_widget_display, 114 | ) 115 | layers_btn = self.create_widget_button( 116 | buttonId="layers", 117 | toolTipMsg="Open/Close Layers Menu", 118 | description="Layers", 119 | icon="map-o", 120 | onClick=self.toggle_layers_widget_display, 121 | ) 122 | stac_btn = self.create_widget_button( 123 | buttonId="stac", 124 | toolTipMsg="Open/Close STAC Data Search", 125 | description="STAC Data", 126 | icon="search", 127 | onClick=self.toggle_stac_widget_display, 128 | ) 129 | 130 | buttons_box_layout = Layout( 131 | display="flex", 132 | flex_flow="row", 133 | align_items="center", 134 | justify_content="center", 135 | width="100%", 136 | height="50px", 137 | ) 138 | buttons_box = HBox( 139 | children=[interact_btn, layers_btn, stac_btn], 140 | layout=buttons_box_layout, 141 | ) 142 | display(buttons_box) 143 | return buttons_box 144 | 145 | def create_widget_button(self, **kwargs): 146 | main_button_layout = Layout( 147 | width="120px", height="35px", border="1px solid #4682B4" 148 | ) 149 | btn = ToggleButton( 150 | description=kwargs["description"], 151 | icon=kwargs["icon"], 152 | layout=main_button_layout, 153 | ) 154 | btn.style.text_color = self.accent_color 155 | btn.style.button_color = "transparent" 156 | btn.tooltip = kwargs["toolTipMsg"] 157 | btn.observe(kwargs["onClick"], type="change", names=["value"]) 158 | btn_id = kwargs["buttonId"] 159 | self.buttons[btn_id] = btn 160 | return btn 161 | 162 | def create_widget_tab(self, **kwargs): 163 | desc = HTML( 164 | value=f"

{kwargs['desc']}

", 165 | ) 166 | data_value = HTML( 167 | value=f"{kwargs['emptyValueState']}", 168 | description="", 169 | ) 170 | 171 | button = Button( 172 | description=kwargs["btnDesc"], 173 | tooltip=kwargs["btnDesc"], 174 | icon="trash", 175 | disabled=True, 176 | ) 177 | 178 | item = HBox( 179 | [ 180 | desc, 181 | data_value, 182 | button, 183 | ] 184 | ) 185 | 186 | item.layout.flex_flow = "column" 187 | return item 188 | 189 | def remove_draw_controls(self): 190 | if self.draw_control_added: 191 | self.remove(self.draw_control) 192 | self.draw_control_added = False 193 | if self.point_control_added: 194 | self.remove(self.point_control) 195 | self.point_control_added = False 196 | return self 197 | 198 | # logic to handle main menu toggle buttons 199 | def toggle_layers_widget_display(self, b): 200 | if b["new"]: 201 | if self.layers_widget.layout.display == "none": 202 | self.layers_widget.layout.display = "block" 203 | self.stac_widget.layout.display = "none" 204 | self.interact_widget.layout.display = "none" 205 | self.buttons["stac"].value = False 206 | self.buttons["interact"].value = False 207 | self.remove_draw_controls() 208 | if not b["new"]: 209 | if self.layers_widget.layout.display == "block": 210 | self.layers_widget.layout.display = "none" 211 | 212 | def toggle_stac_widget_display(self, b): 213 | if b["new"]: 214 | if self.stac_widget.layout.display == "none": 215 | self.stac_widget.layout.display = "block" 216 | self.layers_widget.layout.display = "none" 217 | self.interact_widget.layout.display = "none" 218 | self.buttons["layers"].value = False 219 | self.buttons["interact"].value = False 220 | self.remove_draw_controls 221 | if not b["new"]: 222 | if self.stac_widget.layout.display == "block": 223 | self.stac_widget.layout.display = "none" 224 | 225 | def toggle_interact_widget_display(self, b): 226 | if b["new"]: 227 | if self.interact_widget.layout.display == "none": 228 | self.interact_widget.layout.display = "block" 229 | self.stac_widget.layout.display = "none" 230 | self.layers_widget.layout.display = "none" 231 | self.buttons["stac"].value = False 232 | self.buttons["layers"].value = False 233 | selected_tab = self.interact_widget.children[0].titles[ 234 | self.interact_widget.children[0].selected_index 235 | ] 236 | if selected_tab == "Point": 237 | self.add_control(self.point_control) 238 | self.point_control_added = True 239 | elif selected_tab == "Area": 240 | self.add_control(self.draw_control) 241 | self.draw_control_added = True 242 | if not b["new"]: 243 | if self.interact_widget.layout.display == "block": 244 | self.interact_widget.layout.display = "none" 245 | self.remove_draw_controls() 246 | 247 | @staticmethod 248 | def create_widget_layout(): 249 | widget = Box(style={"max-width: 420px"}) 250 | widget.layout.flex_flow = "column" 251 | widget.layout.max_height = "360px" 252 | widget.layout.overflow = "auto" 253 | return widget 254 | 255 | def create_interact_widget(self): 256 | interact_widget = self.create_widget_layout() 257 | 258 | tab_headers = ["Point", "Area"] 259 | tab_children = [] 260 | tab_widget = Tab() 261 | 262 | out = Output() 263 | display(out) 264 | 265 | def toggle_interact_tab_change(event): 266 | selected_tab = self.interact_widget.children[0].titles[ 267 | event.owner.selected_index 268 | ] 269 | if selected_tab == "Point": # Inspect Control 270 | if self.draw_control_added: 271 | self.remove(self.draw_control) 272 | self.draw_control_added = False 273 | self.add_control(self.point_control) 274 | self.point_control_added = True 275 | elif selected_tab == "Area": # Draw Control 276 | if self.point_control_added: 277 | self.remove(self.point_control) 278 | self.point_control_added = False 279 | self.add_control(self.draw_control) 280 | self.draw_control_added = True 281 | 282 | for tab in tab_headers: 283 | tab_content = VBox() 284 | if tab == "Point": 285 | hbox = self.create_widget_tab( 286 | desc="Marker", 287 | emptyValueState="Waiting for points of interest...", 288 | btnDesc="Clear Markers", 289 | ) 290 | tab_content.children = [VBox([hbox])] 291 | tab_children.append(tab_content) 292 | elif tab == "Area": 293 | hbox = self.create_widget_tab( 294 | desc="Polygon", 295 | emptyValueState="Waiting for area of interest...", 296 | btnDesc="Clear AOI Polygon", 297 | ) 298 | tab_content.children = [VBox([hbox])] 299 | tab_children.append(tab_content) 300 | tab_widget.children = tab_children 301 | tab_widget.titles = tab_headers 302 | interact_widget.children = [tab_widget] 303 | interact_widget.layout.display = "none" 304 | tab_widget.observe(toggle_interact_tab_change, names="selected_index") 305 | return interact_widget 306 | 307 | # @NOTE: Possibly move into its own child class file 308 | def create_layers_widget(self): 309 | layers_widget = self.create_widget_layout() 310 | 311 | tab_headers = ["Biomass Layers", "Basemaps"] 312 | tab_children = [] 313 | tab_widget = Tab() 314 | 315 | out = Output() 316 | display(out) 317 | 318 | opacity_values = [i * 10 for i in range(10 + 1)] # [0.001, 0.002, ...] 319 | 320 | def layer_checkbox_changed(change): 321 | layer = next( 322 | (x for x in self.layers if x.name == change.owner.description), None 323 | ) 324 | if change.owner.value: 325 | self.applied_layers.append(layer) 326 | if not change.owner.value: 327 | self.applied_layers.remove(layer) 328 | 329 | def handle_basemap_opacity_change(change): 330 | selected_bm = self.basemap_selection_dd.value 331 | for l in self.layers: 332 | if l.base: 333 | if l.name == selected_bm: 334 | l.opacity = change["new"] / 100 335 | 336 | def handle_layer_opacity_change(change): 337 | selected_layer = change.owner.description 338 | if selected_layer not in self.applied_layers: 339 | return 340 | for l in self.layers: 341 | if l.name == selected_layer: 342 | l.opacity = change["new"] 343 | 344 | for tab in tab_headers: 345 | tab_content = VBox() 346 | listed_layers = [] 347 | # sort layers by name property 348 | layers_in_drawing_order = [l for l in self.layers] 349 | layerlist_layers = sorted( 350 | layers_in_drawing_order, key=lambda x: x.name, reverse=False 351 | ) 352 | if tab == "Biomass Layers": 353 | layers_hbox = [] 354 | for layer in layerlist_layers: 355 | # check if layer name is a basemap 356 | if not layer.base: 357 | layer_checkbox = Checkbox( 358 | value=layer.visible, description=layer.name, indent=False 359 | ) 360 | jslink((layer_checkbox, "value"), (layer, "visible")) 361 | layer_checkbox.observe( 362 | layer_checkbox_changed, names="value", type="change" 363 | ) 364 | hbox = HBox([layer_checkbox]) 365 | layer_opacity_slider = SelectionSlider( 366 | value=1, 367 | options=[("%g" % i, i / 100) for i in opacity_values], 368 | description=layer.name, 369 | continuous_update=False, 370 | orientation="horizontal", 371 | layout=Layout(margin="-12px 0 4px 0"), 372 | ) 373 | layer_opacity_slider.style.description_width = "0px" 374 | layer_opacity_slider.style.handle_color = self.accent_color 375 | layer_opacity_slider.observe( 376 | handle_layer_opacity_change, names="value" 377 | ) 378 | layers_hbox.append(hbox) 379 | layers_hbox.append(layer_opacity_slider) 380 | listed_layers.append(layer.name) 381 | tab_content.children = [VBox(layers_hbox)] 382 | tab_children.append(tab_content) 383 | elif tab == "Basemaps": 384 | basemaps = [] 385 | for layer in layerlist_layers: 386 | # check if layer is a basemap 387 | if layer.base: 388 | basemaps.append((f"{layer.name}", f"{layer.name}")) 389 | 390 | def on_change(change): 391 | if change["type"] == "change" and change["name"] == "value": 392 | with out: 393 | out.clear_output() 394 | # print("changed to %s" % change['new']) 395 | for l in self.layers: 396 | if l.base: 397 | if l.name == change["new"]: 398 | l.opacity = basemap_opacity_slider.value / 100 399 | l.visible = True 400 | else: 401 | l.visible = False 402 | return None 403 | 404 | dropdown = Dropdown(options=basemaps, value="Open Street Map") 405 | self.basemap_selection_dd = dropdown 406 | dropdown.observe(on_change) 407 | 408 | basemap_opacity_slider = IntSlider( 409 | value=100, 410 | min=0, 411 | max=100, 412 | step=10, 413 | description="% Opacity:", 414 | # disabled=False, 415 | style={"bar_color": "maroon"}, 416 | continuous_update=False, 417 | orientation="horizontal", 418 | readout=True, 419 | readout_format="d", 420 | ) 421 | 422 | basemap_opacity_slider.style.handle_color = self.accent_color 423 | basemap_opacity_slider.observe( 424 | handle_basemap_opacity_change, names="value" 425 | ) 426 | tab_content.children = [dropdown, basemap_opacity_slider] 427 | tab_children.append(tab_content) 428 | 429 | tab_widget.children = tab_children 430 | tab_widget.titles = tab_headers 431 | print(tab_widget.box_style) 432 | layers_widget.children = [tab_widget] 433 | layers_widget.layout.display = "none" 434 | return layers_widget 435 | 436 | def add_custom_tools(self): 437 | # Create custom map widgets 438 | self.layers_widget = self.create_layers_widget() 439 | self.stac_widget = StacDiscoveryWidget.template(self) 440 | self.interact_widget = self.create_interact_widget() 441 | 442 | layers_widget = VBox([self.layers_widget]) 443 | stac_widget = VBox([self.stac_widget]) 444 | interact_widget = VBox([self.interact_widget]) 445 | 446 | layers_control = WidgetControl( 447 | widget=layers_widget, position="topright", id="layers_widget" 448 | ) 449 | stack_control = WidgetControl( 450 | widget=stac_widget, position="topright", id="stac_widget" 451 | ) 452 | interact_control = WidgetControl( 453 | widget=interact_widget, position="topright", id="interact_widget" 454 | ) 455 | 456 | self.add(layers_control) 457 | self.add(stack_control) 458 | self.add(interact_control) 459 | 460 | def find_layer(self, name: str): 461 | layers = self.layers 462 | for layer in layers: 463 | if layer.name == name: 464 | return layer 465 | return None 466 | 467 | def add_layer(self, layer): 468 | existing_layer = self.find_layer(layer.name) 469 | if existing_layer is not None: 470 | self.remove_layer(existing_layer) 471 | super().add_layer(layer) 472 | 473 | def add_tile_layer( 474 | self, 475 | url, 476 | name, 477 | attribution, 478 | opacity=1.0, 479 | shown=True, 480 | **kwargs, 481 | ): 482 | """Adds a TileLayer to the map. 483 | Args: 484 | url (str): The URL of the tile layer. 485 | name (str): The layer name to use for the layer. 486 | attribution (str): The attribution to use. 487 | opacity (float, optional): The opacity of the layer. Defaults to 1. 488 | shown (bool, optional): A flag indicating whether the layer should be on by default. Defaults to True. 489 | """ 490 | if "max_zoom" not in kwargs: 491 | kwargs["max_zoom"] = 100 492 | if "max_native_zoom" not in kwargs: 493 | kwargs["max_native_zoom"] = 100 494 | try: 495 | tile_layer = TileLayer( 496 | url=url, 497 | name=name, 498 | attribution=attribution, 499 | opacity=opacity, 500 | visible=shown, 501 | **kwargs, 502 | ) 503 | self.add_layer(tile_layer) 504 | return tile_layer 505 | 506 | except Exception as e: 507 | logging.error("Failed to add the specified TileLayer.") 508 | raise Exception(e) 509 | 510 | """ def gen_mosaic_dataset_reader(self, assets, bounds): 511 | # see https://github.com/cogeotiff/rio-tiler/blob/main/rio_tiler/io/rasterio.py#L368-L380 512 | def _part_read(src_path: str, *args, **kwargs) -> ImageData: 513 | with Reader(src_path) as src: 514 | # src.part((minx, miny, maxx, maxy), **kwargs) 515 | return src.part(bounds, *args, **kwargs) 516 | # mosaic_reader will use multithreading to distribute the image fetching 517 | # and then merge all arrays together 518 | # Vincent: This will not work if the image do not have the same resolution (because we won't be able to overlay them). 519 | # If you know the resolution you want to use you can use width=.., height=.. instead of max_size=512 (it will ensure you create the same array size for all the images. 520 | # change the max_size to make it faster/slower 521 | # TODO(aimee): make this configurable 522 | img, _ = mosaic_reader(assets, reader=_part_read, max_size=512) 523 | 524 | # create Masked Array from ImageData 525 | data = img.as_masked() 526 | # Avoid non-masked nan/inf values 527 | numpy.ma.fix_invalid(data, copy=False) 528 | # TODO(aimee): determine if this might help for creating the histograms quickly 529 | # hist = {} 530 | # for ii, b in enumerate(img.count): 531 | # h_counts, h_keys = numpy.histogram(data[b].compressed()) 532 | # hist[f"b{ii + 1}"] = [h_counts.tolist(), h_keys.tolist()] 533 | return xr.DataArray(data) """ 534 | 535 | """ def update_selected_data(self): 536 | layers = self.layers 537 | # TODO(aimee): if geometry hasn't changed and a previously selected layer is still selected, don't re-fetch it. 538 | self.selected_data = [] 539 | visible_layers = [layer for layer in layers if type(layer) == TileLayer and layer.visible and not layer.base] 540 | geometries = [self.draw_control.last_draw['geometry']] 541 | if geometries[0]: 542 | box = Polygon(geometries[0]['coordinates'][0]) 543 | # https://shapely.readthedocs.io/en/latest/reference/shapely.bounds.html?highlight=bounds#shapely.bounds 544 | # For geometries these 4 numbers are returned: min x, min y, max x, max y. 545 | bounds = box.bounds 546 | self.bbox_centroid = [box.centroid.y, box.centroid.x] 547 | 548 | if len(visible_layers) !=0: 549 | self.loading_widget_layer.location = self.bbox_centroid 550 | if self.loading_widget_layer not in self.layers: 551 | self.add_layer(self.loading_widget_layer) 552 | else: 553 | self.loading_widget_layer.open_popup() 554 | 555 | for layer in visible_layers: 556 | layer_url = layer.url 557 | ds = None 558 | title = layer.name.replace('_', ' ').upper() 559 | match = re.search('url=(.+.tif)', layer_url) 560 | if match and match.group(1): 561 | s3_url = match.group(1) 562 | xds = rioxarray.open_rasterio(s3_url) 563 | # Slice into `y` using slice(maxy, miny) because 564 | # `y` will be high to low typically because origin = upper left corner 565 | # Aimee(TODO): Check the assumption (origin = upper left corner) 566 | ds = xds.sel(x=slice(bounds[0], bounds[2]), y=slice(bounds[3], bounds[1])) 567 | else: 568 | uuid_pattern = r'([a-f\d]{8}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{4}-[a-f\d]{12})' 569 | match = re.search(f"({titiler_endpoint}/mosaics/{uuid_pattern})/tiles", layer_url) 570 | if match: 571 | mosaic_url = match.groups()[0] 572 | # From titiler docs http://titiler.maap-project.org/docs 573 | # /{minx},{miny},{maxx},{maxy}/assets 574 | str_bounds = f"{bounds[0]},{bounds[1]},{bounds[2]},{bounds[3]}" 575 | assets_endpoint = f"{self.titiler_stac_endpoint}/mosaicjson/{str_bounds}/assets?url={mosaic_url}/mosaicjson" 576 | # create a dataset from multiple COGs 577 | assets_response = requests.get(assets_endpoint) 578 | if assets_response.status_code == 200: 579 | assets = assets_response.json() 580 | ds = self.gen_mosaic_dataset_reader(assets, bounds) 581 | if ds.any(): 582 | ds.attrs["title"] = title 583 | self.selected_data.append(ds) 584 | return self.selected_data """ 585 | 586 | """ def error_message(self, msg): 587 | out = Output() 588 | with out: 589 | print(msg) 590 | self.gen_popup_icon(msg) 591 | display() 592 | return """ 593 | 594 | # TODO(aimee): if you try and create a histogram for more than one layer, it creates duplicates in the popup 595 | """ def create_histograms(self, b): 596 | print(self, b) 597 | if self.histogram_layer in self.layers: 598 | self.remove_layer(self.histogram_layer) 599 | # TODO(aimee): make this configurable 600 | minx, maxx = [0, 500] 601 | plot_args = {"range": (minx, maxx)} 602 | fig = plt.figure() 603 | hist_widget = VBox() 604 | try: 605 | self.update_selected_data() 606 | except Exception as e: 607 | return self.error_message(e) 608 | 609 | if len(self.selected_data) == 0: 610 | return self.error_message("No data or bounding box selected.") 611 | else: 612 | for idx, dataset in enumerate(self.selected_data): 613 | axes = fig.add_subplot(int(f"22{idx+1}")) 614 | plot_args['ax'] = axes 615 | # create a histogram 616 | out = Output() 617 | with out: 618 | out.clear_output() 619 | try: 620 | dataset.plot.hist(**plot_args) 621 | except Exception as err: 622 | self.remove_layer(self.loading_widget_layer) 623 | self.gen_popup_icon(f"Error: {err}") 624 | return 625 | axes.set_title(dataset.attrs['title']) 626 | display(fig) 627 | 628 | hist_widget.children = [out] 629 | hist_location = self.bbox_centroid or self.center 630 | histogram_layer = Popup(child=hist_widget, location=hist_location, min_width=500, min_height=300) 631 | self.histogram_layer = histogram_layer 632 | self.remove_layer(self.loading_widget_layer) 633 | self.add_layer(histogram_layer) 634 | return None """ 635 | 636 | # generates warning/error popup 637 | """ def gen_popup_icon(self, msg): 638 | warning_msg = HTML() 639 | warning_msg.value=f"{msg}" 640 | popup_warning = Popup(location=self.bbox_centroid or self.center, draggable=True, child=warning_msg) 641 | self.warning_layer=popup_warning 642 | self.add_layer(popup_warning) """ 643 | -------------------------------------------------------------------------------- /stac_ipyleaflet/stac_discovery/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/stac_ipyleaflet/stac_discovery/__init__.py -------------------------------------------------------------------------------- /stac_ipyleaflet/stac_discovery/catalogs/nasa_maap_stac.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import pandas as pd 4 | from pystac_client import Client 5 | 6 | out_dir = "catalogs" 7 | 8 | url = "https://stac.maap-project.org/" 9 | 10 | root = Client.open(url, headers=[]) 11 | 12 | collections = root.get_all_collections() 13 | 14 | output_collections = [] 15 | 16 | for collection in collections: 17 | try: 18 | data = collection.to_dict() 19 | print(data["id"]) 20 | collection_obj = {} 21 | output = out_dir + "/" + data["id"].replace("/", "_") + ".json" 22 | if not os.path.exists(os.path.dirname(output)): 23 | os.makedirs(os.path.dirname(output)) 24 | 25 | collection_obj["id"] = data["id"].strip() 26 | 27 | collection_obj["title"] = data["title"].strip() 28 | 29 | start_date = data["extent"]["temporal"]["interval"][0][0] 30 | end_date = data["extent"]["temporal"]["interval"][0][1] 31 | 32 | if start_date is not None: 33 | collection_obj["start_date"] = start_date.split("T")[0] 34 | else: 35 | collection_obj["start_date"] = "" 36 | 37 | if end_date is not None: 38 | collection_obj["end_date"] = end_date.split("T")[0] 39 | else: 40 | collection_obj["end_date"] = "" 41 | collection_obj["bbox"] = ", ".join( 42 | [str(coord) for coord in data["extent"]["spatial"]["bbox"][0]] 43 | ) 44 | 45 | # url = "" 46 | metadata = "" 47 | href = "" 48 | 49 | for l in data["links"]: 50 | if l["rel"] == "about": 51 | metadata = l["href"] 52 | if l["rel"] == "self": 53 | href = l["href"] 54 | # if l["rel"] == "via": 55 | # url = l["href"] 56 | 57 | # collection_obj["url"] = url 58 | collection_obj["metadata"] = metadata 59 | collection_obj["href"] = href 60 | 61 | collection_obj["description"] = ( 62 | data["description"] 63 | .replace("\n", " ") 64 | .replace("\r", " ") 65 | .replace("\\u", " ") 66 | .replace(" ", " ") 67 | ) 68 | 69 | collection_obj["license"] = data["license"] 70 | 71 | output_collections.append(collection_obj) 72 | except Exception as e: 73 | print("Error: ", collection) 74 | print(e) 75 | 76 | print("Total collections: ", len(output_collections)) 77 | 78 | print() 79 | df = pd.DataFrame(output_collections) 80 | df.sort_values(by=["id"], inplace=True) 81 | df.drop(columns=["metadata"]).to_csv("nasa_maap_stac.tsv", index=False, sep="\t") 82 | 83 | with open("nasa_maap_stac.json", "w") as f: 84 | json.dump(df.to_dict("records"), f, indent=4) 85 | -------------------------------------------------------------------------------- /stac_ipyleaflet/stac_discovery/stac.py: -------------------------------------------------------------------------------- 1 | # [SOME] code taken from https://github.com/giswqs/leafmap/blob/master/leafmap/stac.py 2 | import logging 3 | from pystac_client import ItemSearch 4 | from stac_ipyleaflet.utilities.helpers import make_get_request 5 | from stac_ipyleaflet.constants import RESCALE 6 | from typing import TypedDict, Optional 7 | 8 | class OutputCollectionObj(TypedDict): 9 | id: str 10 | title: str 11 | has_cog: bool 12 | start_date: str 13 | end_date: str 14 | bbox: str 15 | metadata: Optional[str] 16 | href: Optional[str] 17 | description: str 18 | license: str 19 | 20 | 21 | class Stac: 22 | @staticmethod 23 | def organize_collections(collections=[]): 24 | output_collections = [] 25 | bad_collections = [] 26 | try: 27 | for collection in collections: 28 | try: 29 | data = collection.to_dict() 30 | has_cog = True if data["item_assets"] else False 31 | id = data["id"].strip() 32 | title = data["title"].strip() 33 | start_date = data["extent"]["temporal"]["interval"][0][0] 34 | end_date = data["extent"]["temporal"]["interval"][0][1] 35 | 36 | if start_date is not None: 37 | start_date = start_date.split("T")[0] 38 | else: 39 | start_date = "" 40 | 41 | if end_date is not None: 42 | end_date = end_date.split("T")[0] 43 | else: 44 | end_date = "" 45 | 46 | if (bbox := collection.extent.spatial.bboxes): 47 | bbox = ", ".join( 48 | [str(coord) for coord in bbox[0]] 49 | ) 50 | metadata = None 51 | href = None 52 | for l in data["links"]: 53 | if l["rel"] == "about": 54 | metadata = l["href"] 55 | if l["rel"] == "self": 56 | href = l["href"] 57 | 58 | description = ( 59 | data["description"] 60 | .replace("\n", " ") 61 | .replace("\r", " ") 62 | .replace("\\u", " ") 63 | .replace(" ", " ") 64 | ) 65 | 66 | license = data["license"] 67 | 68 | collection_obj = OutputCollectionObj( 69 | { 70 | "id": id, 71 | "title": title, 72 | "has_cog": has_cog, 73 | "start_date": start_date, 74 | "end_date": end_date, 75 | "bbox": bbox, 76 | "metadata": metadata, 77 | "href": href, 78 | "description": description, 79 | "license": license, 80 | } 81 | ) 82 | output_collections.append(collection_obj) 83 | else: 84 | bad_collections.append(collection.id) 85 | except Exception as err: 86 | error = {"message": "Error caught with collection", "error": err, "collection": collection.to_dict()} 87 | logging.error(error) 88 | except Exception as err: 89 | error = {"message": "Error caught when organizing collections", "error": err} 90 | # @TODO: We should report these errors to some type of monitoring service for us to debug 91 | if len(output_collections) > 0: 92 | output_collections.sort(key=lambda x: x["title"]) 93 | return output_collections, bad_collections 94 | 95 | @staticmethod 96 | def get_item_info(url=None, **kwargs): 97 | """Get INFO of a single SpatialTemporal Asset Catalog (STAC) **COG** item. 98 | Args: 99 | url (str): HTTP URL to a STAC item 100 | Returns: 101 | json: Response with Item info. 102 | """ 103 | if url is None: 104 | raise ValueError("Item url must be specified to get stac_bands") 105 | 106 | if isinstance(url, str): 107 | r = make_get_request(url).json() 108 | 109 | return r 110 | 111 | @staticmethod 112 | def stac_tile( 113 | url=None, 114 | collection=None, 115 | item=None, 116 | assets=None, 117 | bands=None, 118 | titiler_stac_endpoint=None, 119 | **kwargs, 120 | ): 121 | """Get a tile layer from a single SpatialTemporal Asset Catalog (STAC) item. 122 | Args: 123 | url (str): HTTP URL to a STAC item 124 | collection (str): STAC collection ID, e.g., landsat-8-c2-l2. 125 | item (str): STAC item ID, e.g., LC08_L2SP_047027_20201204_02_T1. 126 | assets (str | list): STAC asset ID, e.g., ["SR_B7", "SR_B5", "SR_B4"]. 127 | bands (list): A list of band names, e.g., ["SR_B7", "SR_B5", "SR_B4"] 128 | titiler_stac_endpoint (str, optional): Titiler endpoint, Defaults to None. 129 | Returns: 130 | str: Returns the STAC Tile layer URL. 131 | """ 132 | if url is None and collection is None: 133 | raise ValueError("Either url or collection must be specified. stac_tile") 134 | 135 | kwargs["rescale"] = RESCALE 136 | 137 | if url is not None: 138 | kwargs["url"] = url 139 | if collection is not None: 140 | kwargs["collection"] = collection 141 | if item is not None: 142 | kwargs["item"] = item 143 | 144 | if "palette" in kwargs: 145 | kwargs["colormap_name"] = kwargs["palette"].lower() 146 | del kwargs["palette"] 147 | 148 | if isinstance(bands, list) and len(set(bands)) == 1: 149 | bands = bands[0] 150 | 151 | if isinstance(assets, list) and len(set(assets)) == 1: 152 | assets = assets[0] 153 | 154 | if isinstance(bands, str): 155 | bands = bands.split(",") 156 | if isinstance(assets, str): 157 | assets = assets.split(",") 158 | 159 | kwargs["assets"] = assets 160 | 161 | TileMatrixSetId = "WebMercatorQuad" 162 | if "TileMatrixSetId" in kwargs.keys(): 163 | TileMatrixSetId = kwargs["TileMatrixSetId"] 164 | kwargs.pop("TileMatrixSetId") 165 | 166 | if isinstance(titiler_stac_endpoint, str): 167 | r = make_get_request( 168 | f"{titiler_stac_endpoint}/stac/{TileMatrixSetId}/tilejson.json", kwargs 169 | ).json() 170 | else: 171 | r = make_get_request( 172 | titiler_stac_endpoint.url_for_stac_item(), kwargs 173 | ).json() 174 | return r["tiles"][0] 175 | 176 | @staticmethod 177 | def stac_bounds( 178 | url=None, collection=None, item=None, titiler_stac_endpoint=None, **kwargs 179 | ): 180 | """Get the bounding box of a single SpatialTemporal Asset Catalog (STAC) item. 181 | Args: 182 | url (str): HTTP URL to a STAC item 183 | collection (str): STAC collection ID, e.g., landsat-8-c2-l2. 184 | item (str): STAC item ID, e.g., LC08_L2SP_047027_20201204_02_T1. 185 | titiler_stac_endpoint (str, optional): Titiler endpoint, Defaults to None. 186 | Returns: 187 | list: A list of values representing [left, bottom, right, top] 188 | """ 189 | if url is None and collection is None: 190 | raise ValueError("Either url or collection must be specified.") 191 | 192 | if url is not None: 193 | kwargs["url"] = url 194 | if collection is not None: 195 | kwargs["collection"] = collection 196 | if item is not None: 197 | kwargs["item"] = item 198 | 199 | if isinstance(titiler_stac_endpoint, str): 200 | r = make_get_request(f"{titiler_stac_endpoint}/stac/bounds", kwargs).json() 201 | else: 202 | r = make_get_request( 203 | titiler_stac_endpoint.url_for_stac_bounds(), kwargs 204 | ).json() 205 | 206 | bounds = r["bounds"] 207 | return bounds 208 | 209 | def add_stac_layer( 210 | self, 211 | url=None, 212 | collection=None, 213 | item=None, 214 | assets=None, 215 | bands=None, 216 | titiler_stac_endpoint=None, 217 | # name="STAC Layer", 218 | # attribution="", 219 | # opacity=1.0, 220 | # shown=True, 221 | **kwargs, 222 | ): 223 | """Adds a STAC TileLayer to the map. 224 | Args: 225 | url (str): HTTP URL to a STAC item 226 | collection (str): STAC collection ID, e.g., landsat-8-c2-l2. 227 | item (str): STAC item ID, e.g., LC08_L2SP_047027_20201204_02_T1. 228 | assets (str | list): STAC asset ID, e.g., ["SR_B7", "SR_B5", "SR_B4"]. 229 | bands (list): A list of band names, e.g., ["SR_B7", "SR_B5", "SR_B4"] 230 | titiler_stac_endpoint (str, optional): Titiler endpoint, Defaults to None. 231 | name (str, optional): The layer name to use for the layer. Defaults to 'STAC Layer'. 232 | attribution (str, optional): The attribution to use. Defaults to ''. 233 | opacity (float, optional): The opacity of the layer. Defaults to 1. 234 | shown (bool, optional): A flag indicating whether the layer should be on by default. Defaults to True. 235 | """ 236 | tile_url = Stac.stac_tile( 237 | url, collection, item, assets, bands, titiler_stac_endpoint, **kwargs 238 | ) 239 | return tile_url 240 | # bounds = Stac.stac_bounds(url, collection, item, titiler_stac_endpoint) 241 | # self.add_tile_layer(tile_url, name, attribution, opacity, shown) 242 | # self.fit_bounds([[bounds[1], bounds[0]], [bounds[3], bounds[2]]]) 243 | 244 | # if not hasattr(self, "cog_layer_dict"): 245 | # self.cog_layer_dict = {} 246 | 247 | # if assets is None and bands is not None: 248 | # assets = bands 249 | 250 | # params = { 251 | # "url": url, 252 | # "collection": collection, 253 | # "item": item, 254 | # "assets": assets, 255 | # "bounds": bounds, 256 | # "titiler_stac_endpoint": self.titiler_stac_endpoint, 257 | # "type": "STAC", 258 | # } 259 | 260 | # self.cog_layer_dict[name] = params 261 | 262 | @staticmethod 263 | def set_default_bands(bands): 264 | if len(bands) == 0: 265 | return [None] 266 | 267 | if isinstance(bands, str): 268 | bands = [bands] 269 | 270 | if len(bands) == 1: 271 | return bands 272 | 273 | if not isinstance(bands, list): 274 | raise ValueError("bands must be a list or a string.") 275 | 276 | @staticmethod 277 | def stac_search( 278 | url, 279 | method="GET", 280 | max_items=None, 281 | limit=100, 282 | ids=None, 283 | bbox=None, 284 | intersects=None, 285 | datetime=None, 286 | query=None, 287 | filter=None, 288 | filter_lang=None, 289 | sortby=None, 290 | fields=None, 291 | get_info=False, 292 | **kwargs, 293 | ): 294 | if isinstance(intersects, dict) and "geometry" in intersects: 295 | intersects = intersects["geometry"] 296 | 297 | search = ItemSearch( 298 | url=url, 299 | method=method, 300 | max_items=max_items, 301 | limit=limit, 302 | ids=ids, 303 | bbox=bbox, 304 | intersects=intersects, 305 | datetime=datetime, 306 | query=query, 307 | filter=filter, 308 | filter_lang=filter_lang, 309 | sortby=sortby, 310 | fields=fields, 311 | ) 312 | if get_info: 313 | items = list(search.item_collection()) 314 | info = {} 315 | for item in items: 316 | info[item.id] = { 317 | "id": item.id, 318 | "href": item.get_self_href(), 319 | "bands": list(item.get_assets().keys()), 320 | "assets": item.get_assets(), 321 | } 322 | return info 323 | else: 324 | return search 325 | 326 | @staticmethod 327 | def get_metadata( 328 | data_type="cog", 329 | titiler_stac_endpoint=None, 330 | url=None, 331 | max_size=None, 332 | **kwargs, 333 | ): 334 | if url is not None: 335 | kwargs["url"] = url 336 | if max_size is not None: 337 | kwargs["max_size"] = max_size 338 | 339 | if isinstance(titiler_stac_endpoint, str): 340 | r = make_get_request( 341 | f"{titiler_stac_endpoint}/{data_type}/metadata", kwargs 342 | ).json() 343 | return r 344 | else: 345 | return "Cannot process request: titiler stac endpoint not provided." 346 | 347 | @staticmethod 348 | def get_tile_url( 349 | data_type="cog", 350 | url=None, 351 | collection=None, 352 | item=None, 353 | assets=None, 354 | bands=None, 355 | palette=None, 356 | titiler_stac_endpoint=None, 357 | **kwargs, 358 | ): 359 | """Get a tile layer url from a single SpatialTemporal Asset Catalog (STAC) item. 360 | Args: 361 | url (str): HTTP URL to a STAC item 362 | collection (str): STAC collection ID, e.g., landsat-8-c2-l2. 363 | item (str): STAC item ID, e.g., LC08_L2SP_047027_20201204_02_T1. 364 | assets (str | list): STAC asset ID, e.g., ["SR_B7", "SR_B5", "SR_B4"]. 365 | bands (list): A list of band names, e.g., ["SR_B7", "SR_B5", "SR_B4"] 366 | titiler_stac_endpoint (str, optional): Titiler endpoint, Defaults to None. 367 | Returns: 368 | str: Returns the STAC Tile layer URL. 369 | """ 370 | if url is None and collection is None: 371 | raise ValueError("url and collection must be specified. stac_tile") 372 | 373 | kwargs["rescale"] = RESCALE 374 | 375 | if url is not None: 376 | kwargs["url"] = url 377 | if collection is not None: 378 | kwargs["collection"] = collection 379 | if item is not None: 380 | kwargs["item"] = item 381 | 382 | if palette is not None: 383 | # kwargs["colormap_name"] = kwargs["palette"].lower() 384 | kwargs["colormap_name"] = palette 385 | # del kwargs["palette"] 386 | 387 | if isinstance(bands, list) and len(set(bands)) == 1: 388 | bands = bands[0] 389 | 390 | if isinstance(assets, list) and len(set(assets)) == 1: 391 | assets = assets[0] 392 | 393 | if isinstance(bands, str): 394 | bands = bands.split(",") 395 | if isinstance(assets, str): 396 | assets = assets.split(",") 397 | 398 | kwargs["assets"] = assets 399 | 400 | TileMatrixSetId = "WebMercatorQuad" 401 | if "TileMatrixSetId" in kwargs.keys(): 402 | TileMatrixSetId = kwargs["TileMatrixSetId"] 403 | kwargs.pop("TileMatrixSetId") 404 | 405 | if isinstance(titiler_stac_endpoint, str): 406 | r = make_get_request( 407 | f"{titiler_stac_endpoint}/{data_type}/{TileMatrixSetId}/tilejson.json", 408 | kwargs, 409 | ).json() 410 | return r 411 | else: 412 | return "STAC ENDPOINT IS NECESSARY." 413 | -------------------------------------------------------------------------------- /stac_ipyleaflet/stac_discovery/stac_widget.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from ipywidgets import Box, Checkbox, DatePicker, Dropdown, HBox, HTML 3 | from ipywidgets import ( 4 | Label, 5 | Layout, 6 | Output, 7 | RadioButtons, 8 | SelectionSlider, 9 | Tab, 10 | ToggleButtons, 11 | VBox, 12 | ) 13 | import logging 14 | from pystac_client import Client 15 | from stac_ipyleaflet.constants import STAC_BROWSER_URL, STAC_CATALOG, TITILER_ENDPOINT 16 | from stac_ipyleaflet.stac_discovery.stac import Stac 17 | 18 | 19 | class StacDiscoveryWidget: 20 | def template(self) -> Box(style={"max_height: 200px"}): 21 | opacity_values = [i * 10 for i in range(10 + 1)] # [0.001, 0.002, ...] 22 | standard_width = "440px" 23 | styles = { 24 | "init": { 25 | "description_width": "initial", 26 | }, 27 | "desc": "white-space:normal;font-size:smaller; max-height:80px;", 28 | "label": "font-weight:bold;", 29 | } 30 | layouts = { 31 | "default": Layout(width=standard_width, padding="2px 6px"), 32 | "checkbox": Layout(width="auto", padding="2px 0px 2px 6px"), 33 | "header": Layout( 34 | width=standard_width, padding="2px 6px", margin="2px 2px -6px 2px" 35 | ), 36 | "subtitle": Layout( 37 | width=standard_width, padding="0px 0px", margin="-12px 2px 2px 34px" 38 | ), 39 | "buttons": Layout( 40 | display="flex", 41 | flex_flow="row", 42 | align_items="flex-end", 43 | justify_content="flex-end", 44 | margin="0.5rem 1.5rem", 45 | ), 46 | "radio": Layout(display="flex", width="max-content", padding="2px 6px"), 47 | } 48 | 49 | output = Output( 50 | layout=Layout( 51 | width=standard_width, 52 | height="200px", 53 | padding="4px 8px 4px 8px", 54 | overflow="auto", 55 | ) 56 | ) 57 | 58 | # Templates for the STAC Discovery Widget 59 | stac_widget = VBox() 60 | stac_widget.layout.width = "480px" 61 | stac_widget.layout.height = "400px" 62 | stac_widget.layout.flex_flow = "column" 63 | stac_widget.layout.overflow = "auto" 64 | 65 | stac_catalogs = [STAC_CATALOG] 66 | # make list of name values from stac_catalogs 67 | catalog_options = sorted([c["name"] for c in stac_catalogs]) 68 | for cat in stac_catalogs: 69 | stac_client = Client.open(cat["url"], headers=[]) 70 | collections_object = stac_client.get_all_collections() 71 | collections, bad_collections = Stac.organize_collections(collections_object) 72 | cat["collections"] = collections if collections is not None else [] 73 | # set default catalog based on name 74 | selected_catalog = [ 75 | cat for cat in stac_catalogs if cat["name"] == STAC_CATALOG["name"] 76 | ][0] 77 | if "collections" not in selected_catalog: 78 | logging.warn("NO COLLECTIONS FOUND") 79 | # @TODO: Currently if this was the case, this app would break, we need to move all collections logic into its on fn and then apply this logic conditionally 80 | collections_filter_checkbox = Checkbox( 81 | value=True, layout=layouts["checkbox"], indent=False 82 | ) 83 | 84 | # @TODO: This selected collections logic should be broken out into its own function for cleaner code 85 | # available collections have been tagged as `has_cog: True` when reviewing item_assets 86 | def get_available_collections(catalog): 87 | if collections_filter_checkbox.value: 88 | return sorted( 89 | [c for c in catalog["collections"] if c["has_cog"]], 90 | key=lambda c: c["id"], 91 | ) 92 | else: 93 | return sorted( 94 | [c for c in catalog["collections"]], key=lambda c: c["id"] 95 | ) 96 | 97 | selected_collection = None 98 | selected_collection_options = [] 99 | if ( 100 | "collections" in selected_catalog 101 | and len(selected_catalog["collections"]) > 0 102 | ): 103 | selected_collection_options = get_available_collections( 104 | catalog=selected_catalog 105 | ) 106 | selected_collection = selected_collection_options[0] 107 | self.stac_data = { 108 | "catalog": selected_catalog, 109 | "collection": selected_collection, 110 | "items": [], 111 | "layer_added": False, 112 | } 113 | 114 | # STAC Widget Items 115 | catalogs_dropdown = Dropdown( 116 | options=catalog_options, 117 | value=self.stac_data["catalog"]["name"], 118 | style=styles["init"], 119 | layout=layouts["default"], 120 | disabled=True, 121 | ) 122 | catalogs_box = VBox( 123 | [ 124 | HTML( 125 | value="Catalog", 126 | style=styles["init"], 127 | layout=layouts["header"], 128 | ), 129 | catalogs_dropdown, 130 | ] 131 | ) 132 | 133 | # @TODO: We need to come back here and isolate the collections logic to also make it easier to conditionally show 134 | collections_dropdown = None 135 | collections_box = None 136 | collection_description_box = None 137 | collection_url_box = None 138 | collection_dates_box = None 139 | bad_collections_msg = None 140 | if ( 141 | len(selected_collection_options) > 0 142 | or self.stac_data["collection"] is not None 143 | ): 144 | collections_dropdown = Dropdown( 145 | options=[c["id"] for c in selected_collection_options] 146 | if len(selected_collection_options) > 0 147 | else [], 148 | value=self.stac_data["collection"]["id"], 149 | style=styles["init"], 150 | layout=layouts["default"], 151 | ) 152 | collections_filter_label = HTML(value="Only Show Displayable Items") 153 | collections_filter_desc = HTML( 154 | value="Currently, only Cloud-Optimized GeoTiffs are supported", 155 | style=styles["init"], 156 | layout=layouts["subtitle"], 157 | ) 158 | collections_checkbox_box = HBox( 159 | [collections_filter_checkbox, collections_filter_label] 160 | ) 161 | collections_filter_box = VBox( 162 | [ 163 | collections_checkbox_box, 164 | collections_filter_desc, 165 | ] 166 | ) 167 | collections_box = VBox( 168 | [ 169 | HTML( 170 | value="Collection", 171 | style=styles["init"], 172 | layout=layouts["default"], 173 | ), 174 | collections_dropdown, 175 | collections_filter_box, 176 | ] 177 | ) 178 | collection_description = HTML( 179 | value=f'
{self.stac_data["collection"]["description"]}
', 180 | style=styles["init"], 181 | layout=layouts["default"], 182 | ) 183 | collection_description_box = VBox( 184 | [ 185 | HTML( 186 | value="Description", 187 | style=styles["init"], 188 | layout=layouts["header"], 189 | ), 190 | collection_description, 191 | ] 192 | ) 193 | collection_url = HTML( 194 | value=f'{self.stac_data["collection"]["href"]}', 195 | style=styles["init"], 196 | layout=layouts["default"], 197 | ) 198 | 199 | # If STAC_BROWSER_URL does not exist or is not set, fallback to STAC URL 200 | if STAC_BROWSER_URL is not None: 201 | stac_browser_url = self.stac_data["collection"]["href"].replace( 202 | STAC_CATALOG["url"], STAC_BROWSER_URL 203 | ) 204 | else: 205 | stac_browser_url = self.stac_data["collection"]["href"] 206 | 207 | collection_url_browser = HTML( 208 | value=f'View in STAC Browser', 209 | style=styles["init"], 210 | layout=layouts["default"], 211 | ) 212 | collection_url.style.text_color = "blue" 213 | collection_url_browser.style.text_color = "blue" 214 | collection_url_box = VBox( 215 | [ 216 | HTML( 217 | value="URL", 218 | style=styles["init"], 219 | layout=layouts["header"], 220 | ), 221 | collection_url, 222 | collection_url_browser, 223 | ] 224 | ) 225 | collection_start_date = DatePicker( 226 | value=datetime.strptime( 227 | self.stac_data["collection"]["start_date"], "%Y-%m-%d" 228 | ), 229 | description="Start", 230 | disabled=False if collections_dropdown.value else True, 231 | style=styles["init"], 232 | layout=layouts["default"], 233 | ) 234 | collection_end_date = DatePicker( 235 | value=datetime.strptime( 236 | self.stac_data["collection"]["end_date"], "%Y-%m-%d" 237 | ), 238 | description="End", 239 | disabled=False if collections_dropdown.value else True, 240 | style=styles["init"], 241 | layout=layouts["default"], 242 | ) 243 | collection_dates_box = VBox( 244 | [ 245 | HTML( 246 | value="Date Range", 247 | style=styles["init"], 248 | layout=layouts["header"], 249 | ), 250 | HBox([collection_start_date, collection_end_date]), 251 | ] 252 | ) 253 | if len(bad_collections) > 0: 254 | bad_collections_msg = HTML( 255 | value=f'
Invalid STAC Collections {bad_collections}
', 256 | style=styles["init"], 257 | layout=layouts["default"], 258 | ) 259 | defaultItemsDropdownText = "Select an Item" 260 | items_dropdown = Dropdown( 261 | options=[], value=None, style=styles["init"], layout=layouts["default"] 262 | ) 263 | items_box = VBox( 264 | [ 265 | HTML( 266 | value="Items", 267 | style=styles["init"], 268 | layout=layouts["header"], 269 | ), 270 | items_dropdown, 271 | ] 272 | ) 273 | 274 | band_width = "125px" 275 | 276 | singular_band_dropdown = Dropdown( 277 | options=[1], 278 | tooltip="Present Band", 279 | style=styles["init"], 280 | layout=layouts["default"], 281 | ) 282 | 283 | singular_band_dropdown_box = VBox( 284 | [ 285 | HTML( 286 | value="Band(s)", 287 | style=styles["init"], 288 | layout=layouts["header"], 289 | ), 290 | singular_band_dropdown, 291 | ] 292 | ) 293 | 294 | vmin = HTML( 295 | value=None, 296 | description="vmin:", 297 | tooltip="Minimum value of the raster to visualize", 298 | style=styles["init"], 299 | layout=Layout(width=band_width, padding="4px 8px"), 300 | ) 301 | vmax = HTML( 302 | value=None, 303 | description="vmax:", 304 | tooltip="Maximum value of the raster to visualize", 305 | style=styles["init"], 306 | layout=Layout(width=band_width, padding="4px 8px"), 307 | ) 308 | nodata = HTML( 309 | value=None, 310 | description="Nodata:", 311 | tooltip="Nodata the raster to visualize", 312 | style=styles["init"], 313 | layout=Layout(width=band_width, padding="4px 8px"), 314 | ) 315 | 316 | cmaps = [ 317 | ( 318 | "Perceptually Uniform Sequential", 319 | ["viridis", "plasma", "inferno", "magma", "cividis"], 320 | ), 321 | ( 322 | "Sequential", 323 | [ 324 | "Greys", 325 | "Purples", 326 | "Blues", 327 | "Greens", 328 | "Oranges", 329 | "Reds", 330 | "YlOrBr", 331 | "YlOrRd", 332 | "OrRd", 333 | "PuRd", 334 | "RdPu", 335 | "BuPu", 336 | "GnBu", 337 | "PuBu", 338 | "YlGnBu", 339 | "PuBuGn", 340 | "BuGn", 341 | "YlGn", 342 | ], 343 | ), 344 | ( 345 | "Sequential (2)", 346 | [ 347 | "binary", 348 | "gist_yarg", 349 | "gist_gray", 350 | "gray", 351 | "bone", 352 | "pink", 353 | "spring", 354 | "summer", 355 | "autumn", 356 | "winter", 357 | "cool", 358 | "Wistia", 359 | "hot", 360 | "afmhot", 361 | "gist_heat", 362 | "copper", 363 | ], 364 | ), 365 | ( 366 | "Diverging", 367 | [ 368 | "PiYG", 369 | "PRGn", 370 | "BrBG", 371 | "PuOr", 372 | "RdGy", 373 | "RdBu", 374 | "RdYlBu", 375 | "RdYlGn", 376 | "Spectral", 377 | "coolwarm", 378 | "bwr", 379 | "seismic", 380 | ], 381 | ), 382 | ("Cyclic", ["twilight", "twilight_shifted", "hsv"]), 383 | ( 384 | "Qualitative", 385 | [ 386 | "Pastel1", 387 | "Pastel2", 388 | "Paired", 389 | "Accent", 390 | "Dark2", 391 | "Set1", 392 | "Set2", 393 | "Set3", 394 | "tab10", 395 | "tab20", 396 | "tab20b", 397 | "tab20c", 398 | ], 399 | ), 400 | ( 401 | "Miscellaneous", 402 | [ 403 | "flag", 404 | "prism", 405 | "ocean", 406 | "gist_earth", 407 | "terrain", 408 | "gist_stern", 409 | "gnuplot", 410 | "gnuplot2", 411 | "CMRmap", 412 | "cubehelix", 413 | "brg", 414 | "gist_rainbow", 415 | "rainbow", 416 | "jet", 417 | "turbo", 418 | "nipy_spectral", 419 | "gist_ncar", 420 | ], 421 | ), 422 | ] 423 | 424 | def list_palettes(add_extra=False, lowercase=False, category=""): 425 | """List all available colormaps. See a complete lost of colormaps at https://matplotlib.org/stable/tutorials/colors/colormaps.html. 426 | Returns: 427 | list: The list of colormap names. 428 | """ 429 | import matplotlib.pyplot as plt 430 | 431 | if not category == "": 432 | all_colormap_options = plt.colormaps() 433 | filtered_color_options = list( 434 | filter(lambda x: x[0].startswith(category), cmaps) 435 | ) 436 | palette_options = list(map(lambda x: x[1], filtered_color_options))[0] 437 | if add_extra: 438 | palette_options += ["dem", "ndvi", "ndwi"] 439 | if lowercase: 440 | palette_options = [i.lower() for i in palette_options] 441 | palette_options.sort() 442 | return palette_options 443 | 444 | def list_palette_categories(): 445 | palette_categories = list(map(lambda x: x[0], cmaps)) 446 | return palette_categories 447 | 448 | palette_category_options = list_palette_categories() 449 | palette_categories_dropdown = Dropdown( 450 | options=palette_category_options, 451 | value=palette_category_options[0], 452 | layout=layouts["default"], 453 | style=styles["init"], 454 | ) 455 | palette_categories_dropdown_box = VBox( 456 | [ 457 | HTML( 458 | value="Palette Category", 459 | style=styles["init"], 460 | layout=layouts["header"], 461 | ), 462 | palette_categories_dropdown, 463 | ] 464 | ) 465 | palette_options = list_palettes( 466 | lowercase=True, category=palette_categories_dropdown.value 467 | ) 468 | # palettes_dropdown = Dropdown( 469 | # options=palette_options, 470 | # value=palette_options[0], 471 | # description="Palette:", 472 | # layout=layouts["default"], 473 | # style=styles["init"], 474 | # ) 475 | palette_radiobuttons = RadioButtons( 476 | options=palette_options, 477 | value=palette_options[0], 478 | layout=layouts["radio"], 479 | style=styles["init"], 480 | ) 481 | palettes_radiobuttons_box = VBox( 482 | [ 483 | HTML( 484 | value="Palette", 485 | style=styles["init"], 486 | layout=layouts["header"], 487 | ), 488 | palette_radiobuttons, 489 | ] 490 | ) 491 | # TODO: Add STAC layers to LayerGroup instead of base 492 | # TODO: Add LayerGroup control to utilize STAC LayerGroup 493 | # TODO: Add back in Checkbox for layer name and additional visual parameters 494 | # checkbox = Checkbox( 495 | # value=False, 496 | # description="Additional params", 497 | # indent=False, 498 | # layout=layouts["default"], 499 | # style=styles["init"], 500 | # ) 501 | # add_params_text = "Additional parameters in the format of a dictionary, for example, \n {'palette': ['#006633', '#E5FFCC', '#662A00', '#D8D8D8', '#F5F5F5'], 'expression': '(SR_B5-SR_B4)/(SR_B5+SR_B4)'}" 502 | # add_params = Textarea( 503 | # value="", 504 | # placeholder=add_params_text, 505 | # layout=layouts["default"], 506 | # style=styles["init"], 507 | # ) 508 | # params_widget = VBox([checkbox, add_params]) 509 | raster_options = VBox( 510 | [ 511 | HBox([singular_band_dropdown_box]), 512 | HBox([palette_categories_dropdown_box]), 513 | HBox([palettes_radiobuttons_box]), 514 | ] 515 | ) 516 | stac_buttons = ToggleButtons( 517 | value=None, 518 | options=["Display "], 519 | icons=["map"], 520 | disabled=True, 521 | tooltips=["Display selected Item on the Map"], 522 | ) 523 | stac_opacity_slider = SelectionSlider( 524 | value=1, 525 | options=[("%g" % i, i / 100) for i in opacity_values], 526 | description="% Opacity:", 527 | continuous_update=False, 528 | orientation="horizontal", 529 | layout=Layout(margin="-12px 0 4px 0"), 530 | ) 531 | 532 | buttons_box = Box( 533 | [stac_opacity_slider, stac_buttons], layout=layouts["buttons"] 534 | ) 535 | stac_tab_labels = ["Catalog", "Visualization"] 536 | tab_widget_children = [] 537 | stac_tab_widget = Tab() 538 | 539 | for label in stac_tab_labels: 540 | tab_content = VBox() 541 | if label == "Catalog": 542 | to_display = [] 543 | if None not in [ 544 | collections_box, 545 | collection_description_box, 546 | collection_url_box, 547 | collection_dates_box, 548 | ]: 549 | to_display = [ 550 | catalogs_box, 551 | collections_box, 552 | collection_description_box, 553 | collection_url_box, 554 | collection_dates_box, 555 | items_box, 556 | ] 557 | if bad_collections_msg is not None: 558 | to_display.append(bad_collections_msg) 559 | else: 560 | to_display = [ 561 | catalogs_box, 562 | items_box, 563 | ] 564 | tab_content.children = to_display 565 | elif label == "Visualization": 566 | tab_content.children = [raster_options] 567 | tab_widget_children.append(tab_content) 568 | stac_tab_widget.children = tab_widget_children 569 | stac_tab_widget.titles = stac_tab_labels 570 | stac_widget.children = [ 571 | # catalogs_box, 572 | # collections_box, 573 | # collection_description_box, 574 | # collection_url_box, 575 | # collection_dates_box, 576 | # items_box, 577 | # palettes_dropdown, 578 | # raster_options, 579 | stac_tab_widget, 580 | buttons_box, 581 | output, 582 | ] 583 | 584 | def handle_stac_layer_opacity(change): 585 | if self.stac_data["layer_added"] == True: 586 | l = self.find_layer(items_dropdown.value) 587 | if l.name: 588 | l.opacity = change["new"] 589 | 590 | def prep_data_display_settings(): 591 | is_displayable = False 592 | stac_opacity_slider.disabled = True 593 | assets = [ 594 | i for i in self.stac_data["items"] if i["id"] == items_dropdown.value 595 | ][0]["assets"] 596 | item_href = [ 597 | i for i in self.stac_data["items"] if i["id"] == items_dropdown.value 598 | ][0]["href"] 599 | metadata = Stac.get_item_info(url=item_href) 600 | if "assets" in metadata: 601 | self.stac_data["metadata"] = metadata 602 | 603 | # with output: 604 | # output.clear_output() 605 | # print("SELECTED ITEM", [i for i in self.stac_data["items"] if i["id"] == items_dropdown.value][0]) 606 | # print("METADATA", json.dumps(metadata)) 607 | 608 | for asset in assets: 609 | data_asset = assets[asset] 610 | self.stac_data["data_href"] = data_asset.get_absolute_href() 611 | data_types = data_asset.media_type 612 | # print(f"{asset} data type:", data_types) 613 | if ( 614 | "application=geotiff" in data_types 615 | and "profile=cloud-optimized" in data_types 616 | ): 617 | is_displayable = True 618 | # if "statistics" in metadata: 619 | # minv, maxv = metadata["statistics"]["1"]["min"], metadata["statistics"]["1"]["max"] 620 | # print("MIN/MAX", minv, maxv) 621 | if "band_metadata" in metadata: 622 | bands = [b for b in metadata["band_metadata"][0] if len(b) > 0] 623 | default_bands = Stac.set_default_bands(bands) 624 | # print("BANDS", default_bands) 625 | if len(bands) == 1: 626 | raster_options.children = [ 627 | HBox([singular_band_dropdown_box]), 628 | HBox([palette_categories_dropdown_box]), 629 | HBox([palettes_radiobuttons_box]), 630 | # checkbox, 631 | # params_widget, 632 | ] 633 | singular_band_dropdown.options = default_bands 634 | singular_band_dropdown.value = default_bands[0] 635 | # stac_tab_widget.selected_index = 1 636 | else: 637 | raster_options.children = [] 638 | singular_band_dropdown.value = None 639 | 640 | if is_displayable: 641 | stac_buttons.disabled = False 642 | with output: 643 | output.clear_output() 644 | print("Item is ready for display.") 645 | else: 646 | stac_buttons.disabled = True 647 | stac_opacity_slider.disabled = True 648 | with output: 649 | output.clear_output() 650 | print( 651 | "This item cannot displayed. Only Cloud-Optimized GeoTIFFs are supported at this time." 652 | ) 653 | 654 | def query_collection_items(selected_collection): 655 | # print("SELECTED TO QUERY", selected_collection) 656 | items_dropdown.options = [] 657 | items_dropdown.value = None 658 | with output: 659 | output.clear_output() 660 | print("Retrieving items...") 661 | try: 662 | # geometries = [self.draw_control.last_draw['geometry']] 663 | # print(geometries) 664 | if isinstance(collection_start_date.value, datetime): 665 | start_date_query = collection_start_date.value.strftime( 666 | "%Y-%m-%d" 667 | ) 668 | else: 669 | start_date_query = str(collection_start_date.value) 670 | 671 | if isinstance(collection_end_date.value, datetime): 672 | end_date_query = collection_end_date.value.strftime("%Y-%m-%d") 673 | else: 674 | end_date_query = str(collection_end_date.value) 675 | 676 | _datetime = start_date_query 677 | if collection_end_date.value is not None: 678 | _datetime = _datetime + "/" + end_date_query 679 | url = selected_collection["href"] 680 | _query_url = url if url.endswith("/items") else url + "/items" 681 | 682 | print("from ", _query_url, "...") 683 | 684 | collection_items = Stac.stac_search( 685 | url=_query_url, 686 | max_items=20, 687 | # intersects=geometries[0], 688 | datetime=_datetime, 689 | titiler_endpoint=TITILER_ENDPOINT, 690 | get_info=True, 691 | ) 692 | result_items = list(collection_items.values()) 693 | self.stac_data["items"] = result_items 694 | items = list(collection_items.keys()) 695 | default = [defaultItemsDropdownText] 696 | if len(items) > 0: 697 | options = [*default, *items] 698 | items_dropdown.options = options 699 | items_dropdown.value = options[0] 700 | output.clear_output() 701 | print( 702 | f"{len(items)} items were found - please select 1 to determine if it can be displayed." 703 | ) 704 | else: 705 | output.clear_output() 706 | print( 707 | "No items were found within this Collection. Please select another." 708 | ) 709 | 710 | except Exception as err: 711 | output.clear_output() 712 | print("COLLECTION QUERY ERROR", err) 713 | 714 | # sets and refreshes which collections are set based on selected catalog 715 | def set_collection_options(): 716 | selected_catalog = [ 717 | cat for cat in stac_catalogs if cat["name"] == catalogs_dropdown.value 718 | ][0] 719 | selected_collection_options = get_available_collections( 720 | catalog=selected_catalog 721 | ) 722 | collections_dropdown.options = [ 723 | c["id"] for c in selected_collection_options 724 | ] 725 | 726 | selected_collection = [ 727 | c 728 | for c in selected_collection_options 729 | if c["id"] == collections_dropdown.value 730 | ][0] 731 | collections_dropdown.value = selected_collection["id"] 732 | 733 | collection_description.value = f'
{selected_collection["description"]}
' 734 | collection_url.value = f'{selected_collection["href"]}' 735 | # @TODO: We need to come here clean this up to make it more agnostic 736 | if "maap" in STAC_BROWSER_URL: 737 | stac_browser_url = selected_collection["href"].replace( 738 | "https://", STAC_BROWSER_URL 739 | ) 740 | elif "veda" in STAC_BROWSER_URL: 741 | stac_browser_url = selected_collection["href"].replace( 742 | STAC_CATALOG["url"], STAC_BROWSER_URL 743 | ) 744 | 745 | collection_url_browser.value = f'View in STAC Browser' 746 | if selected_collection["start_date"] != "": 747 | collection_start_date.value = datetime.strptime( 748 | selected_collection["start_date"], "%Y-%m-%d" 749 | ) 750 | else: 751 | collection_start_date.value = None 752 | if selected_collection["end_date"] != "": 753 | collection_end_date.value = datetime.strptime( 754 | selected_collection["end_date"], "%Y-%m-%d" 755 | ) 756 | else: 757 | collection_end_date.value = None 758 | 759 | self.stac_data["catalog"] = selected_catalog 760 | self.stac_data["collection"] = selected_collection 761 | query_collection_items(selected_collection) 762 | 763 | # Event Watchers 764 | def catalogs_changed(change): 765 | if change["new"]: 766 | set_collection_options() 767 | 768 | catalogs_dropdown.observe(catalogs_changed, names="value") 769 | 770 | def collection_changed(change): 771 | if change["new"]: 772 | set_collection_options() 773 | 774 | if collections_dropdown is not None: 775 | collections_dropdown.observe(collection_changed, names="value") 776 | 777 | def collections_filtered_checkbox_changed(change): 778 | if change["type"] == "change": 779 | set_collection_options() 780 | 781 | collections_filter_checkbox.observe( 782 | collections_filtered_checkbox_changed, names="value" 783 | ) 784 | 785 | def items_changed(change): 786 | if change["new"] and change["new"] != defaultItemsDropdownText: 787 | prep_data_display_settings() 788 | 789 | items_dropdown.observe(items_changed, names="value") 790 | 791 | def palette_category_changed(change): 792 | if change["new"]: 793 | new_palettes = list_palettes( 794 | lowercase=True, category=palette_categories_dropdown.value 795 | ) 796 | palette_radiobuttons.options = new_palettes 797 | palette_radiobuttons.value = new_palettes[0] 798 | 799 | palette_categories_dropdown.observe(palette_category_changed, names="value") 800 | 801 | """ def reset_values(): 802 | selected_collection = selected_collection_options[0] 803 | collections_dropdown.value = selected_collection["id"] 804 | collection_description.value = f'
{selected_collection["description"]}
' 805 | collection_url.value = f'{selected_collection["href"]}' 806 | collection_url_browser.value = f'View in Browser' 807 | collection_start_date.value = datetime.strptime(selected_collection["start_date"], "%Y-%m-%d") 808 | collection_end_date.value = datetime.strptime(selected_collection["end_date"], "%Y-%m-%d") 809 | items_dropdown.options = [] 810 | query_collection_items(selected_collection) 811 | # palette.value = None 812 | # raster_options.children = [] """ 813 | 814 | def reset_stac_opacity_slider(): 815 | stac_opacity_slider.value = 1 816 | stac_opacity_slider.disabled = False 817 | 818 | def button_clicked(change): 819 | if change["new"] == "Display ": 820 | with output: 821 | output.clear_output() 822 | if not items_dropdown.value == defaultItemsDropdownText: 823 | print(f"Loading data for {items_dropdown.value}...") 824 | # if ( 825 | # checkbox.value 826 | # and add_params.value.strip().startswith("{") 827 | # and add_params.value.strip().endswith("}") 828 | # ): 829 | # vis_params = eval(add_params.value) 830 | # else: 831 | vis_params = {} 832 | 833 | if ( 834 | palette_radiobuttons.value 835 | and singular_band_dropdown.options 836 | ) or ( 837 | palette_radiobuttons.value and "expression" in vis_params 838 | ): 839 | vis_params["colormap_name"] = palette_radiobuttons.value 840 | 841 | if vmin.value and vmax.value: 842 | vis_params["rescale"] = f"{vmin.value},{vmax.value}" 843 | 844 | if nodata.value: 845 | vis_params["nodata"] = nodata.value 846 | 847 | if singular_band_dropdown.options: 848 | assets = singular_band_dropdown.value 849 | else: 850 | assets = "" 851 | 852 | stac_url = Stac.get_tile_url( 853 | url=self.stac_data["data_href"], 854 | collection=self.stac_data["collection"]["id"], 855 | item=items_dropdown.value, 856 | assets=assets, 857 | palette=vis_params["colormap_name"], 858 | titiler_stac_endpoint=TITILER_ENDPOINT, 859 | ) 860 | print("stac url:", stac_url) 861 | if "tiles" in stac_url: 862 | self.stac_data["tiles_url"] = stac_url["tiles"][0] 863 | try: 864 | if "metadata" in self.stac_data: 865 | metadata = self.stac_data["metadata"] 866 | if "bounds" in metadata: 867 | bounds = self.stac_data["metadata"]["bounds"] 868 | else: 869 | bounds = self.stac_data["metadata"]["bbox"] 870 | else: 871 | bounds = [] 872 | 873 | tile_url = self.stac_data["tiles_url"] 874 | if self.stac_data["layer_added"] == True: 875 | self.layers = self.layers[: len(self.layers) - 1] 876 | self.stac_data["layer_added"] = False 877 | applied_tile_layer = self.add_tile_layer( 878 | url=tile_url, 879 | name=f"{collections_dropdown.value}, {items_dropdown.value}", 880 | attribution=items_dropdown.value, 881 | ) 882 | self.applied_layers.append(applied_tile_layer) 883 | stac_opacity_slider.observe( 884 | handle_stac_layer_opacity, names="value" 885 | ) 886 | self.stac_data["layer_added"] = True 887 | reset_stac_opacity_slider() 888 | if len(bounds) > 0: 889 | self.fit_bounds( 890 | [[bounds[1], bounds[0]], [bounds[3], bounds[2]]] 891 | ) 892 | output.clear_output() 893 | # print("STAC URL", stac_url["tiles"][0]) 894 | except Exception as err: 895 | output.clear_output() 896 | print("Display error: ", err) 897 | 898 | """ elif change["new"] == "Reset": 899 | reset_values() """ 900 | 901 | """ elif change["new"] == "Close": 902 | stac_widget.layout.display = 'none' """ 903 | 904 | stac_buttons.value = None 905 | 906 | stac_buttons.observe(button_clicked, "value") 907 | 908 | query_collection_items(selected_collection) 909 | 910 | stac_widget.layout.display = "none" 911 | 912 | return stac_widget 913 | -------------------------------------------------------------------------------- /stac_ipyleaflet/utilities/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/stac_ipyleaflet/utilities/__init__.py -------------------------------------------------------------------------------- /stac_ipyleaflet/utilities/data/biomass-layers.csv: -------------------------------------------------------------------------------- 1 | Layer Name,Tiles URL 2 | ICESat-2 Boreal Biomass (Use zoom >=8),"https://titiler.maap-project.org/mosaics/74c9966e-e865-4c6b-bfe8-d12130f9d6ad/tiles/{z}/{x}/{y}.png?bidx=1&rescale=0,400&colormap_name=gist_earth_r" 3 | ICESat-2 Boreal Biomass SE (Use zoom >=8),"https://titiler.maap-project.org/mosaics/e7e26f4d-9a2f-4a3b-9256-0821e372e40e/tiles/{z}/{x}/{y}.png?bidx=2&rescale=0%2C20&colormap_name=reds" 4 | NCEO Africa,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://maap-landing-zone-gccops/user-added/uploaded_objects/ae841191-8a43-4226-9c5e-db4f500adb45/AGB_map_2017v0m_COG.tif&rescale=0,400&colormap_name=gist_earth_r" 5 | NCEO Africa SD,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://maap-landing-zone-gccops/user-added/uploaded_objects/96a28977-02db-46a0-8429-70f5b4267d38/SD_map_2017v0m_COG.tif&rescale=0%2C310&colormap_name=reds" 6 | GEDI L4B,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://ornl-cumulus-prod-protected/gedi/GEDI_L4B_Gridded_Biomass_V2_1/data/GEDI04_B_MW019MW223_02_002_02_R01000M_MU.tif&rescale=0,400&colormap_name=gist_earth_r" 7 | GEDI L4B SE,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://ornl-cumulus-prod-protected/gedi/GEDI_L4B_Gridded_Biomass_V2_1/data/GEDI04_B_MW019MW223_02_002_02_R01000M_SE.tif&rescale=0,310&colormap_name=reds" 8 | CCI Biomass,"https://titiler-pgstac.maap-project.org/mosaic/4f2b21b1d359e0169117e84c35ed5f7d/tiles/{z}/{x}/{y}?assets=estimates&rescale=0,400&bidx=1&colormap_name=gist_earth_r" 9 | CCI Biomass SD,"https://titiler-pgstac.maap-project.org/mosaic/4f2b21b1d359e0169117e84c35ed5f7d/tiles/{z}/{x}/{y}?assets=standard_deviation&rescale=0,500&bidx=2&colormap_name=reds" 10 | Paraguay Estimated Biomass,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://maap-landing-zone-gccops/user-added/uploaded_objects/0bfec58c-45fb-464e-b301-b1afbdf5249e/5_biomass_cog.masked.tif&nodata=0&bidx=1&rescale=0,400&colormap_name=gist_earth_r" 11 | Paraguay Forest Mask,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://maap-landing-zone-gccops/user-added/uploaded_objects/45fe2e6f-2007-4cb1-964a-f337f39f4fdc/1_forest_cog.masked.tif&rescale=0,1&nodata=0&colormap_name=greens" 12 | Paraguay Tree Cover,"https://titiler.maap-project.org/cog/tiles/{z}/{x}/{y}.png?url=s3://maap-landing-zone-gccops/user-added/uploaded_objects/ee5eb60c-3c01-4789-ae8e-c03f1d719440/4_tree_cover_cog.masked.tif&rescale=0,75&nodata=0&colormap_name=greens" 13 | -------------------------------------------------------------------------------- /stac_ipyleaflet/utilities/helpers.py: -------------------------------------------------------------------------------- 1 | import csv 2 | from importlib.resources import files 3 | from ipyleaflet import TileLayer 4 | import logging 5 | import requests 6 | from stac_ipyleaflet.constants import REQUEST_TIMEOUT 7 | 8 | 9 | def make_get_request(url, params=None, timeout=REQUEST_TIMEOUT): 10 | """GET Request wrapper to watch for and catch specific errors 11 | Args: 12 | url (str): HTTP URL to a STAC item 13 | params (dict[str, Any]): request parameters 14 | timeout: duration for requests to stop waiting for a response after a given number of seconds 15 | """ 16 | try: 17 | req = requests.get(url, params=params, timeout=timeout) 18 | return req 19 | except requests.exceptions.Timeout: 20 | # QUESTION: Should we retry? 21 | logging.error(f"timeout raised during get request") 22 | except requests.exceptions.RequestException as e: 23 | logging.error(e) 24 | return None 25 | 26 | 27 | def add_layers_options(add_layer, file_name): 28 | layers_file = files("stac_ipyleaflet.utilities.data").joinpath(file_name) 29 | with open(layers_file, newline="") as f: 30 | csv_reader = csv.reader(f) 31 | next(csv_reader, None) # skip the headers 32 | sorted_csv = sorted(csv_reader, key=lambda row: row[0], reverse=True) 33 | for row in sorted_csv: 34 | name, tile_url = row[0], row[1] 35 | tile_layer = TileLayer( 36 | url=tile_url, attribution=name, name=name, visible=False 37 | ) 38 | add_layer(tile_layer) 39 | return 40 | -------------------------------------------------------------------------------- /stac_ipyleaflet/widgets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MAAP-Project/stac_ipyleaflet/a880a19ad8eafff605ca2c8895f07032733c6034/stac_ipyleaflet/widgets/__init__.py -------------------------------------------------------------------------------- /stac_ipyleaflet/widgets/basemaps.py: -------------------------------------------------------------------------------- 1 | from ipyleaflet import basemaps, basemap_to_tiles, TileLayer 2 | from ipywidgets import Box 3 | 4 | 5 | class BasemapsWidget: 6 | def template(self, **kwargs) -> Box(): 7 | base_layers = [] 8 | 9 | def make_base_layer(url=None, visible=False, name="", layer=""): 10 | if url: 11 | layer = TileLayer( 12 | url=url, name=name, base=True, visible=False, **kwargs 13 | ) 14 | return layer 15 | else: 16 | layer = basemap_to_tiles(layer) 17 | layer.base = True 18 | layer.visible = visible 19 | layer.name = name 20 | return layer 21 | 22 | tile_layers = [ 23 | { 24 | "url": "https://mt1.google.com/vt/lyrs=p&x={x}&y={y}&z={z}", 25 | "name": "Google Terrain", 26 | }, 27 | { 28 | "url": "http://services.arcgisonline.com/ArcGIS/rest/services/Canvas/World_Light_Gray_Base/MapServer/tile/{z}/{y}/{x}", 29 | "name": "Esri Light Gray", 30 | }, 31 | ] 32 | 33 | for tl in tile_layers: 34 | tile_layer = make_base_layer(url=tl["url"], name=tl["name"]) 35 | base_layers.append(tile_layer) 36 | 37 | basemap_layers = [ 38 | {"layer": basemaps.Esri.WorldImagery, "name": "Esri World Imagery"}, 39 | {"layer": basemaps.Esri.NatGeoWorldMap, "name": "Esri National Geographic"}, 40 | {"layer": basemaps.OpenStreetMap.Mapnik, "name": "Open Street Map"}, 41 | {"layer": basemaps.OpenTopoMap, "name": "Open Topo Map"}, 42 | # {"layer": basemaps.Stamen.Toner, "name": "Black & White"}, 43 | {"layer": basemaps.Strava.Water, "name": "Water"}, 44 | ] 45 | 46 | for bm in basemap_layers: 47 | if bm["name"] == "Open Street Map": 48 | bm_layer = make_base_layer( 49 | layer=bm["layer"], name=bm["name"], visible=True 50 | ) 51 | else: 52 | bm_layer = make_base_layer(layer=bm["layer"], name=bm["name"]) 53 | base_layers.append(bm_layer) 54 | 55 | return base_layers 56 | -------------------------------------------------------------------------------- /stac_ipyleaflet/widgets/draw.py: -------------------------------------------------------------------------------- 1 | from ipyleaflet import DrawControl, GeoJSON 2 | from ipywidgets import Box, Output 3 | from stac_ipyleaflet.core import StacIpyleaflet 4 | 5 | 6 | # @TODO: Break out shared logic between widgets into a utilities directory 7 | class DrawControlWidget(StacIpyleaflet): 8 | def template(self) -> Box(style={"max_height: 200px"}): 9 | bbox_out = Output() 10 | 11 | # Set unwanted draw controls to False or empty objects 12 | # @TODO-CLEANUP: Create only one DrawControl and pass in the attributes instead 13 | draw_control = DrawControl( 14 | edit=False, 15 | remove=False, 16 | circlemarker={}, 17 | polygon={}, 18 | polyline={}, 19 | marker={}, 20 | ) 21 | 22 | # Add rectangle draw control for bounding box 23 | draw_control.rectangle = { 24 | "shapeOptions": { 25 | "fillColor": "transparent", 26 | "color": "#333", 27 | "fillOpacity": 1.0, 28 | }, 29 | "repeatMode": False, 30 | } 31 | 32 | tabs = {} 33 | 34 | for i in range(2): 35 | tabs[f"child{i}"] = ( 36 | self.interact_widget.children[0] 37 | .children[i] 38 | .children[0] 39 | .children[0] 40 | .children 41 | ) 42 | 43 | point_tab_children = tabs["child0"] 44 | area_tab_children = tabs["child1"] 45 | 46 | aoi_coords = area_tab_children[1] 47 | aoi_clear_button = area_tab_children[2] 48 | 49 | # @TODO-CLEANUP: Duplication between tabs, pull logic out into a common utilities file 50 | def handle_clear(event): 51 | draw_layer = self.find_layer("draw_layer") 52 | self.remove_layer(draw_layer) 53 | aoi_coords.value = "Waiting for area of interest..." 54 | aoi_clear_button.disabled = True 55 | 56 | def handle_draw(event, action, geo_json, **kwargs): 57 | self.aoi_coordinates = [] 58 | self.aoi_bbox = () 59 | 60 | if "Coordinates" in point_tab_children[1].value: 61 | area_tab_children[ 62 | 1 63 | ].value = "Waiting for points of interest..." 64 | 65 | if action == "created": 66 | if geo_json["geometry"]: 67 | geojson_layer = GeoJSON( 68 | name="draw_layer", 69 | data=geo_json, 70 | style={ 71 | "fillColor": "transparent", 72 | "color": "#333", 73 | "weight": 3, 74 | }, 75 | ) 76 | self.add_layer(geojson_layer) 77 | raw_coordinates = geo_json["geometry"]["coordinates"][0] 78 | 79 | def bounding_box(points): 80 | x_coordinates, y_coordinates = zip(*points) 81 | return ( 82 | min(x_coordinates), 83 | min(y_coordinates), 84 | max(x_coordinates), 85 | max(y_coordinates), 86 | ) 87 | 88 | bbox = bounding_box(raw_coordinates) 89 | self.aoi_coordinates = raw_coordinates 90 | self.aoi_bbox = bbox 91 | coords_list = [coord for coord in raw_coordinates] 92 | coords = ",
".join(map(str, coords_list)) 93 | aoi_coords.value = f"

Coordinates:

{coords}

BBox:

{bbox}" 94 | event.clear() 95 | aoi_clear_button.disabled = False 96 | aoi_clear_button.on_click(handle_clear) 97 | 98 | return 99 | 100 | draw_control.on_draw(callback=handle_draw) 101 | draw_control.output = bbox_out 102 | 103 | return draw_control 104 | -------------------------------------------------------------------------------- /stac_ipyleaflet/widgets/inspect.py: -------------------------------------------------------------------------------- 1 | from ipyleaflet import DrawControl, MarkerCluster, Marker, DrawControl, GeoJSON 2 | from urllib.parse import urlparse, parse_qs 3 | from typing import List 4 | from stac_ipyleaflet.constants import TITILER_ENDPOINT 5 | from stac_ipyleaflet.core import StacIpyleaflet 6 | from stac_ipyleaflet.utilities.helpers import make_get_request 7 | 8 | 9 | class COGRequestedData: 10 | coordinates: List[float] 11 | values: List[float] 12 | band_names: List[str] 13 | 14 | 15 | class LayerData: 16 | layer_name: str 17 | data: COGRequestedData 18 | 19 | 20 | # @TODO: Break out shared logic between widgets into a utilities directory 21 | class InspectControlWidget(StacIpyleaflet): 22 | def template(self): 23 | # @TODO-CLEANUP: Create only one DrawControl and pass in the attributes instead 24 | draw_control = DrawControl( 25 | edit=False, 26 | remove=False, 27 | circlemarker={}, 28 | polygon={}, 29 | polyline={}, 30 | rectangle={}, 31 | ) 32 | 33 | draw_control.marker = { 34 | "repeatMode": False, 35 | } 36 | 37 | tabs = {} 38 | 39 | for i in range(2): 40 | tabs[f"child{i}"] = ( 41 | self.interact_widget.children[0] 42 | .children[i] 43 | .children[0] 44 | .children[0] 45 | .children 46 | ) 47 | 48 | point_tab_children = tabs["child0"] 49 | area_tab_children = tabs["child1"] 50 | 51 | point_data = point_tab_children[1] 52 | clear_button = point_tab_children[2] 53 | 54 | def get_visible_layers_data(coordinates) -> List[LayerData]: 55 | visible_layers_data = [] 56 | for layer in self.applied_layers: 57 | if "/cog" in layer.url: 58 | parsed_url = urlparse(layer.url) 59 | parsed_query = parse_qs(parsed_url.query) 60 | url = parsed_query["url"][0] 61 | cog_partial_request_path = f"{TITILER_ENDPOINT}/cog/point/{coordinates[0]},{coordinates[1]}?url=" 62 | response = make_get_request(f"{cog_partial_request_path}{url}") 63 | if response.status_code == 200: 64 | data = response.json() 65 | visible_layers_data.append( 66 | {"layer_name": layer.name, "data": data} 67 | ) 68 | elif "/mosaics" in layer.url: 69 | parsed_url = urlparse(layer.url) 70 | mosaic_id = parsed_url.path.split("/")[2] 71 | mosaic_request_url = f"{TITILER_ENDPOINT}/mosaics/{mosaic_id}/point/{coordinates[0]},{coordinates[1]}" 72 | response = make_get_request(mosaic_request_url) 73 | data = response.json() 74 | if response.status_code == 200: 75 | data_to_display = { 76 | "coordinates": data["coordinates"], 77 | "values": data["values"][0][1], 78 | "band_names": data["values"][0][-1], 79 | } 80 | visible_layers_data.append( 81 | {"layer_name": layer.name, "data": data_to_display} 82 | ) 83 | if response.status_code == 404: 84 | visible_layers_data.append({"layer_name": layer.name, **data}) 85 | return visible_layers_data 86 | 87 | def display_layer_data(coordinates: List[int], layers_data: LayerData): 88 | point_data.value = f""" 89 |

90 | Coordinates: {coordinates} 91 |

92 | """ 93 | 94 | def create_layer_data_html(layer_name, values, band_names): 95 | return f""" 96 |

97 | 98 | {layer_name} 99 | 100 |

101 | 109 | """ 110 | 111 | def create_no_data_html(layer_name, msg): 112 | return f""" 113 |

114 | 115 | {layer_name} 116 | 117 |

118 |

119 | {msg} 120 |

121 | """ 122 | 123 | for layer in layers_data: 124 | if "data" in layer: 125 | point_data.value += create_layer_data_html( 126 | layer["layer_name"], 127 | layer["data"]["values"], 128 | layer["data"]["band_names"], 129 | ) 130 | elif "data" not in layer and "detail" in layer: 131 | point_data.value += create_no_data_html( 132 | layer["layer_name"], layer["detail"] 133 | ) 134 | else: 135 | point_data.value += create_no_data_html( 136 | layer["layer_name"], "No data to report" 137 | ) 138 | return 139 | 140 | def handle_interaction(event, action, geo_json, **kwargs): 141 | # @TODO-CLEANUP: Duplication between tabs, pull logic out into a common utilities file 142 | def handle_clear(event): 143 | draw_layer = self.find_layer("draw_layer") 144 | self.remove_layer(draw_layer) 145 | point_data.value = "Waiting for points of interest..." 146 | clear_button.disabled = True 147 | return 148 | 149 | event.coordinates = [] 150 | if "Coordinates" in area_tab_children[1].value: 151 | area_tab_children[ 152 | 1 153 | ].value = "Waiting for area of interest..." 154 | 155 | if action == "created": 156 | if geo_json["geometry"] and geo_json["geometry"]["type"] == "Point": 157 | geojson_layer = GeoJSON( 158 | name="draw_layer", 159 | data=geo_json, 160 | ) 161 | self.add_layer(geojson_layer) 162 | event.coordinates = geo_json["geometry"]["coordinates"] 163 | 164 | if len(self.applied_layers): 165 | layers_data = get_visible_layers_data(event.coordinates) 166 | if layers_data: 167 | display_layer_data(event.coordinates, layers_data) 168 | else: 169 | point_data.value = f"

Coordinates:

{event.coordinates}
" 170 | elif not len(self.applied_layers): 171 | point_data.value = f"

Coordinates:

{event.coordinates}
" 172 | 173 | event.clear() 174 | clear_button.disabled = False 175 | clear_button.on_click(handle_clear) 176 | return 177 | 178 | draw_control.on_draw(callback=handle_interaction) 179 | 180 | return draw_control 181 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Unit test package for stac_ipyleaflet.""" 2 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | from stac_ipyleaflet.core import StacIpyleaflet 2 | 3 | 4 | # UI TEST CASES 5 | def test_create_buttons_layout(): 6 | test_map_instance = StacIpyleaflet() 7 | buttons_container = StacIpyleaflet.create_buttons_layout(test_map_instance) 8 | button_descriptions = [item.description for item in buttons_container.children] 9 | assert len(buttons_container.children) == 3 10 | assert button_descriptions == ["Interact", "Layers", "STAC Data"] 11 | 12 | 13 | # DRAW CONTROL TEST CASES 14 | def test_remove_draw_controls(): 15 | test_map_instance = StacIpyleaflet() 16 | test_map_instance.add_control(test_map_instance.point_control) 17 | test_map_instance.point_control_added = True 18 | list_of_controls = list(map(lambda x: str(type(x)), test_map_instance.controls)) 19 | assert "" in list_of_controls 20 | 21 | test_map_instance = StacIpyleaflet.remove_draw_controls(test_map_instance) 22 | list_of_controls = list(map(lambda x: str(type(x)), test_map_instance.controls)) 23 | assert "" not in list_of_controls 24 | 25 | 26 | # LAYERS TEST CASES 27 | def test_layers(): 28 | test_map_instance = StacIpyleaflet() 29 | 30 | def test_add_biomass_layers_options(): 31 | all_layer_names = [layer.name for layer in test_map_instance.layers] 32 | assert len(test_map_instance.layers) == 21 33 | assert "CCI Biomass" in all_layer_names 34 | assert "NCEO Africa" in all_layer_names 35 | 36 | def test_create_layers_widget(): 37 | # Changing checkbox value to True for layer should add to the applied_layers list 38 | # @NOTE: Extremely fragile, possibly come to revisit to figure out better way to test 39 | test_map_instance.controls[2].widget.children[0].children[0].children[ 40 | 0 41 | ].children[0].children[0].children[0].value = True 42 | assert len(test_map_instance.applied_layers) == 1 43 | assert test_map_instance.applied_layers[0].name == "CCI Biomass" 44 | 45 | test_add_biomass_layers_options() 46 | test_create_layers_widget() 47 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py36, py37, py38, flake8 3 | 4 | [travis] 5 | python = 6 | 3.8: py38 7 | 3.7: py37 8 | 3.6: py36 9 | 10 | [testenv:flake8] 11 | basepython = python 12 | deps = flake8 13 | commands = flake8 stac_ipyleaflet tests 14 | 15 | [testenv] 16 | setenv = 17 | PYTHONPATH = {toxinidir} 18 | deps = 19 | -r{toxinidir}/requirements_dev.txt 20 | ; If you want to make tox run the tests with the same versions, create a 21 | ; requirements.txt with the pinned versions and uncomment the following line: 22 | ; -r{toxinidir}/requirements.txt 23 | commands = 24 | pip install -U pip 25 | pytest --basetemp={envtmpdir} 26 | 27 | -------------------------------------------------------------------------------- /write_biomass_layers.py: -------------------------------------------------------------------------------- 1 | #!git clone git@github.com:maap-project/biomass-dashboard-datasets 2 | import csv 3 | import json 4 | import os 5 | 6 | 7 | def write_biomass_layers(): 8 | data_dir = "biomass-dashboard-datasets/datasets/" 9 | files = os.listdir(data_dir) 10 | 11 | biomass_file = os.path.join( 12 | os.path.dirname(__package__), 13 | "data", 14 | "biomass-layers.csv", 15 | ) 16 | with open(biomass_file, "w", newline="") as csv_file: 17 | fieldnames = ["Layer Name", "Tiles URL"] 18 | writer = csv.DictWriter(csv_file, fieldnames=fieldnames) 19 | writer.writeheader() 20 | for filename in files: 21 | with open(f"{data_dir}{filename}", "r") as file_obj: 22 | data = json.loads(file_obj.read()) 23 | if data["source"].get("tiles"): 24 | tile_url = data["source"]["tiles"][0] 25 | tile_url = tile_url.replace("&color_formula=gamma r {gamma}", "") 26 | tile_url = tile_url.replace( 27 | "{titiler_server_url}", "https://titiler.maap-project.org" 28 | ) 29 | file_obj.close() 30 | writer.writerow( 31 | { 32 | fieldnames[0]: data["id"].capitalize().replace("_", " "), 33 | fieldnames[1]: tile_url, 34 | } 35 | ) 36 | csv_file.close() 37 | 38 | 39 | if __name__ == "__main__": 40 | write_biomass_layers() 41 | --------------------------------------------------------------------------------