├── Optical ├── notebooks │ ├── sentinelhub.id │ ├── util.py │ ├── credentials_SH.ipynb │ ├── data_sources_explorer.ipynb │ └── earth_observation_with_StatAPI.ipynb ├── environment.yml ├── data │ └── statapi_kranj.geojson └── README.md ├── SAR └── README.md ├── README.md ├── .gitignore └── LICENSE /Optical/notebooks/sentinelhub.id: -------------------------------------------------------------------------------- 1 | INSTANCE_ID = "" 2 | CLIENT_ID = "" 3 | CLIENT_SECRET = "" 4 | -------------------------------------------------------------------------------- /Optical/environment.yml: -------------------------------------------------------------------------------- 1 | name: eo 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.10 6 | - numpy=1.24 7 | - rasterio=1.3.4 8 | - shapely=2.0.1 9 | - pillow=9.4 10 | - geopandas=0.12.2 11 | - scikit-learn=1.2.1 12 | - jupyterlab=3.6.1 13 | - ipywidgets=8.0.4 14 | - pip: 15 | - eo-learn 16 | -------------------------------------------------------------------------------- /SAR/README.md: -------------------------------------------------------------------------------- 1 | # SAR Remote Sensing for Agriculture 2 | 3 | Materials for the [*Remote Sensing and Change Detection with Sentinel Time Series Data*](https://geoclassroom.fgg.uni-lj.si/course/view.php?id=13). 4 | 5 | Prepared by: 6 | Krištof Oštir 7 | Bujar Fetai 8 | 9 | ## Installation instructions 10 | Software used for the exercise is provided by [STEP](https://step.esa.int/main/) by ESA. The latest version can be downloaded through the [website](https://step.esa.int/main/download/snap-download/). For the practicals we will require the Sentinel Toolboxes from the site. 11 | 12 | ## Sample data 13 | Is available on the following [link](https://unilj-my.sharepoint.com/:f:/g/personal/bfetai_fgg_uni-lj_si/Eo-quoAjKetCnhf0ixvZEc8Bsho_DUU-pJbMdFmTdThJUQ?e=ZYYCjp). 14 | 15 | 16 | ## Acknowledgment 17 | 18 | Preparation of the materials was part financed by the Slovenian Research Agency core funding No. P2-0406. 19 | 20 | ## License 21 | This project is licensed under the terms of the [Apache License](LICENSE). 22 | 23 | © Copyright 2022 University of Ljubljana, Faculty of Civil and Geodetic Engineering -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Remote Sensing and Change Detection with Sentinel Time Series Data 2 | 3 | Materials for the [*Remote Sensing and Change Detection with Sentinel Time Series Data*](https://geoclassroom.fgg.uni-lj.si/course/view.php?id=13). 4 | 5 | Prepared by: 6 | Krištof Oštir, Bujar Fetai, Matej Račič (University of Ljubljana) 7 | 8 | 9 | ## Theory 10 | 11 | Slides for the theory part of the course are available here [Remote Sensing and Change Detection](./Theory/Slides.pdf). 12 | 13 | ## Preparation 14 | 15 | The repository consists of two directories with the materials, installation instructions, theory and practical information. 16 | 17 | * [Optical](Optical/README.md) 18 | * [SAR](SAR/README.md) 19 | 20 | The repository can be synchronized using `git pull` or downloaded as a zip file. The data used for the practical exercise can be downloaded from [geoclassroom](https://geoclassroom.fgg.uni-lj.si/course/view.php?id=13). 21 | 22 | Each practical has corresponding install instructions which should be completed in advance. 23 | 24 | ## Acknowledgment 25 | 26 | Preparation of the materials was part financed by the Slovenian Research Agency core funding No. P2-0406. 27 | 28 | ## License 29 | This project is licensed under the terms of the [Apache License](LICENSE). 30 | 31 | © Copyright 2022 University of Ljubljana, Faculty of Civil and Geodetic Engineering -------------------------------------------------------------------------------- /Optical/notebooks/util.py: -------------------------------------------------------------------------------- 1 | # Once StatAPI reaches maturity, these functions will be available in sh-py. 2 | 3 | from collections import defaultdict 4 | from sentinelhub import parse_time 5 | import pandas as pd 6 | 7 | def stat_to_df(stat_data): 8 | """ transform response from StatAPI into dataframe""" 9 | df_data = defaultdict(list) 10 | 11 | for single_data in stat_data['data']: 12 | df_data['interval_from'].append(parse_time(single_data['interval']['from']).date()) 13 | df_data['interval_to'].append(parse_time(single_data['interval']['to']).date()) 14 | 15 | for output_data in single_data['outputs'].keys(): 16 | single_band = len(single_data['outputs'][output_data]['bands']) == 1 17 | for band_name, band_values in single_data['outputs'][output_data]['bands'].items(): 18 | for stat_name, value in band_values['stats'].items(): 19 | col_name = f'{output_data}_{band_name}_{stat_name}' 20 | if stat_name == 'percentiles': 21 | for perc, perc_val in value.items(): 22 | perc_col_name = f'{col_name}_{perc}' 23 | df_data[perc_col_name].append(perc_val) 24 | else: 25 | df_data[col_name].append(value) 26 | 27 | df = pd.DataFrame(df_data) 28 | # df = df.astype({c:'float' for c in df.columns if any(n in c for n in ['mean','min','max','stDev','percentil'])}) 29 | return df -------------------------------------------------------------------------------- /Optical/notebooks/credentials_SH.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "872142e4-9880-4595-9872-06af72c2565d", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "# read the credentials\n", 11 | "f = open('sentinelhub.id', 'r')\n", 12 | "for _ in range(3):\n", 13 | " exec(f.readline())" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "id": "0ca3a19f-c70e-4d40-8ea1-7795ffa70964", 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "!sentinelhub.config --instance_id \"$INSTANCE_ID\" --sh_client_id \"$CLIENT_ID\" --sh_client_secret \"$CLIENT_SECRET\"" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 1, 29 | "id": "d954492a-70e7-41ca-980c-64f2418e7ce4", 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "# confirm the values have been set \n", 34 | "!sentinelhub.config --show" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "id": "aa2ad73c-cf9b-4649-a186-07fbe0a587bc", 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [] 44 | } 45 | ], 46 | "metadata": { 47 | "kernelspec": { 48 | "display_name": "Python 3 (ipykernel)", 49 | "language": "python", 50 | "name": "python3" 51 | }, 52 | "language_info": { 53 | "codemirror_mode": { 54 | "name": "ipython", 55 | "version": 3 56 | }, 57 | "file_extension": ".py", 58 | "mimetype": "text/x-python", 59 | "name": "python", 60 | "nbconvert_exporter": "python", 61 | "pygments_lexer": "ipython3", 62 | "version": "3.10.8" 63 | }, 64 | "vscode": { 65 | "interpreter": { 66 | "hash": "6a6edf0266831650904d45ebea3a1b4640f37efcd9e2004c6886f3018479f897" 67 | } 68 | } 69 | }, 70 | "nbformat": 4, 71 | "nbformat_minor": 5 72 | } 73 | -------------------------------------------------------------------------------- /Optical/data/statapi_kranj.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { "type": "Feature", "properties": { "type": "Meadow" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.324733118535027, 46.232120703379195 ], [ 14.325133609522998, 46.230464635301018 ], [ 14.325649993215356, 46.230429990756534 ], [ 14.325465369520465, 46.232200409470316 ], [ 14.325465369520465, 46.232200409470316 ], [ 14.324733118535027, 46.232120703379195 ] ] ] } }, 5 | { "type": "Feature", "properties": { "type": "Agricultural" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.349124266115675, 46.255349134706137 ], [ 14.349398297126779, 46.253452675425393 ], [ 14.350555663164773, 46.253982847255074 ], [ 14.353607866700328, 46.253847430466237 ], [ 14.353652052176216, 46.255440270824948 ], [ 14.349124266115675, 46.255349134706137 ] ] ] } }, 6 | { "type": "Feature", "properties": { "type": "Water" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.400925333849163, 46.200240459565158 ], [ 14.401537181409934, 46.198869224149711 ], [ 14.406619640533259, 46.196626800435936 ], [ 14.4087228152554, 46.196790413452071 ], [ 14.408334828009844, 46.197813782340418 ], [ 14.405253603520835, 46.200939371054154 ], [ 14.400925333849163, 46.200240459565158 ] ] ] } }, 7 | { "type": "Feature", "properties": { "type": "Forest" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.338801479931115, 46.265696572582584 ], [ 14.338816429777498, 46.264453131526096 ], [ 14.341721895635359, 46.263488164854316 ], [ 14.341705184097, 46.264884310232659 ], [ 14.338801479931115, 46.265696572582584 ] ] ] } }, 8 | { "type": "Feature", "properties": { "type": "Urban" }, "geometry": { "type": "Polygon", "coordinates": [ [ [ 14.354960838786276, 46.24099737080369 ], [ 14.354268171395052, 46.238397313115691 ], [ 14.355952030212931, 46.236901481763489 ], [ 14.356805779206331, 46.239175190759333 ], [ 14.35621563676556, 46.241244423590061 ], [ 14.354960838786276, 46.24099737080369 ] ] ] } } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /Optical/README.md: -------------------------------------------------------------------------------- 1 | # Change detection with optical time series 2 | 3 | Materials for the *Remote Sensing and Change Detection with Sentinel Time Series Data*. 4 | 5 | Prepared by: 6 | Krištof Oštir 7 | Matej Račič 8 | 9 | ## Installation instructions 10 | We will be using [Anaconda](https://www.anaconda.com/), which can be installed from the [website](https://www.anaconda.com/products/distribution#Downloads). 11 | Once installed open Anaconda Prompt and move to the location of the extracted repository `cd Downloads/EduServ23/Optical`. If you have downloaded it to a different drive type the letter of the drive first `D:`. 12 | 13 | Here you can create a new environment for this tutorial using the provided environment.yml file: 14 | 15 | ``` 16 | conda update -n base -c defaults conda 17 | conda env create --name eo --file environment.yml 18 | conda activate eo 19 | ``` 20 | 21 | Alternatively, you can use pip to install the libraries using 'pip' and follow the tutorial. This will take some time. Once installed run `jupyter lab` and a browser tab will open. 22 | 23 | ## Practicals 24 | We will be using the notebooks available in the corresponding folders. To run the notebook after the practical you will need a [Sentinel Hub](https://www.sentinel-hub.com/develop/api/ogc/standard-parameters/) account. 25 | Free trial is also available. Once registered you can follow the [instructions](https://sentinelhub-py.readthedocs.io/en/latest/configure.html) to configure access to the services or use `sentinelhub.id` with `credentials_SH.ipynb`. 26 | 27 | ## Additional resources 28 | This tutorial is based on the [materials](https://github.com/sentinel-hub/eo-learn-workshop/) provided by Sinergise. Where you can find even more examples and resources for the [eo-learn](https://github.com/sentinel-hub/eo-learn) library. 29 | 30 | ## Acknowledgment 31 | 32 | Preparation of the materials was part financed by the Slovenian Research Agency core funding No. P2-0406. 33 | 34 | ## License 35 | This project is licensed under the terms of the [Apache License](LICENSE). 36 | 37 | © Copyright 2022 University of Ljubljana, Faculty of Civil and Geodetic Engineering -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | .ipynb_checkpoints/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | cache/ 133 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 ZRC SAZU and University of Ljubljana 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Optical/notebooks/data_sources_explorer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Data-sources exploration using `eo-learn`" 8 | ] 9 | }, 10 | { 11 | "attachments": {}, 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "This notebook shows some examples on how to retrieve EO and non-EO data using `eo-learn`. \n", 16 | "\n", 17 | "The steps are as follow:\n", 18 | " * split area of interest into easy-to-process EOPatches\n", 19 | " * add Sentinel-2 imaging data\n", 20 | " * add Sentinel-1 imaging data" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "Add generic packages" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "%matplotlib inline\n", 37 | "\n", 38 | "import os\n", 39 | "from pathlib import Path\n", 40 | "\n", 41 | "from matplotlib import dates\n", 42 | "from mpl_toolkits.axes_grid1 import make_axes_locatable\n", 43 | "from shapely.geometry import Polygon, box, shape, mapping\n", 44 | "import matplotlib.pyplot as plt\n", 45 | "import geopandas as gpd\n", 46 | "import numpy as np" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "Set path to data" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "data_dir = Path('..', 'data')\n", 63 | "os.listdir(data_dir)" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "`eo-learn` and `sentinelhub` imports" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": null, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "from eolearn.core import FeatureType\n", 80 | "from eolearn.io import SentinelHubInputTask, SentinelHubEvalscriptTask" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "from sentinelhub import BBoxSplitter, BBox, CRS, DataCollection" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "## 1. Split country into smaller bounding boxes " 97 | ] 98 | }, 99 | { 100 | "cell_type": "markdown", 101 | "metadata": {}, 102 | "source": [ 103 | "Load shapefile of Denmark" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": null, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "country_filename = data_dir / 'denmark.geojson'\n", 113 | "country = gpd.read_file(country_filename)\n", 114 | "\n", 115 | "country.plot()\n", 116 | "country.crs" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "Set CRS to UTM" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "country_crs = CRS.UTM_32N\n", 133 | "country = country.to_crs(country_crs.pyproj_crs())\n", 134 | "\n", 135 | "country.plot()\n", 136 | "country.crs" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "Get size of country in pixels to decide number and size of bounding boxes" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "country_shape = country.geometry.values[-1]\n", 153 | "\n", 154 | "width_pix = int((country_shape.bounds[2] - country_shape.bounds[0]) / 10)\n", 155 | "height_pix = int((country_shape.bounds[3] - country_shape.bounds[1]) / 10)\n", 156 | "\n", 157 | "print(f'Dimension of the area is {width_pix} x {height_pix} pixels')" 158 | ] 159 | }, 160 | { 161 | "cell_type": "markdown", 162 | "metadata": {}, 163 | "source": [ 164 | "Split area into 45x35 boxes bounding " 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": null, 170 | "metadata": {}, 171 | "outputs": [], 172 | "source": [ 173 | "bbox_splitter = BBoxSplitter([country_shape], country_crs, (45, 35))" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "geometry = [bbox.geometry for bbox in bbox_splitter.get_bbox_list()]\n", 183 | "bbox_list = bbox_splitter.get_bbox_list()\n", 184 | "idxs_x = [info['index_x'] for info in bbox_splitter.get_info_list()]\n", 185 | "idxs_y = [info['index_y'] for info in bbox_splitter.get_info_list()]\n", 186 | "\n", 187 | "gdf = gpd.GeoDataFrame(\n", 188 | " {'index_x':idxs_x, 'index_y':idxs_y},\n", 189 | " geometry=[bbox.geometry for bbox in bbox_list],\n", 190 | " crs=bbox_list[0].crs.pyproj_crs()\n", 191 | ")\n", 192 | "\n", 193 | "gdf.head()" 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "metadata": {}, 199 | "source": [ 200 | "Plot results" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": null, 206 | "metadata": {}, 207 | "outputs": [], 208 | "source": [ 209 | "# if bboxes have all same size, estimate offset\n", 210 | "xl, yl, xu, yu = gdf.geometry[0].bounds\n", 211 | "xoff, yoff = (xu - xl) / 3, (yu - yl) / 5\n", 212 | "\n", 213 | "# figure\n", 214 | "fig, ax = plt.subplots(figsize=(45,35))\n", 215 | "gdf.plot(ax=ax, facecolor='w', edgecolor='r', alpha=0.5, linewidth=2)\n", 216 | "country.plot(ax=ax, facecolor='w', edgecolor='b', alpha=0.5, linewidth=2.5)\n", 217 | "ax.set_title('Denmark tiled in a 45 x 35 grid');\n", 218 | "\n", 219 | "# add annotiation text\n", 220 | "fontdict = {'family': 'monospace', 'weight': 'normal', 'size': 14}\n", 221 | "for idx in gdf.index:\n", 222 | " eop_name = '{0}x{1}'.format(gdf.index_x[idx], gdf.index_y[idx])\n", 223 | " centroid, = list(gdf.geometry[idx].centroid.coords)\n", 224 | " ax.text(centroid[0] - xoff, centroid[1] + yoff, str(idx), fontdict=fontdict)\n", 225 | " ax.text(centroid[0] - xoff, centroid[1] - yoff, eop_name, fontdict=fontdict)" 226 | ] 227 | }, 228 | { 229 | "cell_type": "markdown", 230 | "metadata": {}, 231 | "source": [ 232 | "## 2. Retrieve S2 L1C data " 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "s2_rgb_task = SentinelHubInputTask(\n", 242 | " data_collection=DataCollection.SENTINEL2_L1C,\n", 243 | " bands=['B04', 'B03', 'B02'],\n", 244 | " bands_feature=(FeatureType.DATA, 'S2-RGB'),\n", 245 | " additional_data=[(FeatureType.MASK, 'dataMask')],\n", 246 | " resolution=(10, 10),\n", 247 | " maxcc=0.1\n", 248 | ")" 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": null, 254 | "metadata": {}, 255 | "outputs": [], 256 | "source": [ 257 | "ndvi_evalscript = \"\"\"\n", 258 | "//VERSION=3\n", 259 | "\n", 260 | "function setup() {\n", 261 | " return {\n", 262 | " input: [\"B04\", \"B08\"],\n", 263 | " output:[\n", 264 | " {\n", 265 | " id: \"ndvi\",\n", 266 | " bands: 1,\n", 267 | " sampleType: SampleType.FLOAT32\n", 268 | " },\n", 269 | " ]\n", 270 | " }\n", 271 | "}\n", 272 | "\n", 273 | "function evaluatePixel(sample) {\n", 274 | " let ndvi = index(sample.B08, sample.B04);\n", 275 | " return {\n", 276 | " ndvi: [ndvi],\n", 277 | " };\n", 278 | "}\n", 279 | "\"\"\"\n", 280 | "\n", 281 | "s2_ndvi_task = SentinelHubEvalscriptTask(\n", 282 | " features=[(FeatureType.DATA, 'ndvi', 'NDVI')],\n", 283 | " evalscript=ndvi_evalscript,\n", 284 | " data_collection=DataCollection.SENTINEL2_L1C,\n", 285 | " resolution=(10, 10),\n", 286 | " maxcc=0.1\n", 287 | ")" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": null, 293 | "metadata": {}, 294 | "outputs": [], 295 | "source": [ 296 | "time_interval = ['2019-05-01','2019-09-01']\n", 297 | "idx = 436\n", 298 | "bbox = bbox_splitter.bbox_list[idx]" 299 | ] 300 | }, 301 | { 302 | "cell_type": "markdown", 303 | "metadata": {}, 304 | "source": [ 305 | "Download TRUE-COLOR" 306 | ] 307 | }, 308 | { 309 | "cell_type": "code", 310 | "execution_count": null, 311 | "metadata": {}, 312 | "outputs": [], 313 | "source": [ 314 | "eopatch = s2_rgb_task.execute(bbox=bbox, time_interval=time_interval)\n", 315 | "\n", 316 | "eopatch" 317 | ] 318 | }, 319 | { 320 | "cell_type": "markdown", 321 | "metadata": {}, 322 | "source": [ 323 | "Download NDVI" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": null, 329 | "metadata": {}, 330 | "outputs": [], 331 | "source": [ 332 | "eopatch = s2_ndvi_task.execute(eopatch)\n", 333 | "\n", 334 | "eopatch" 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": null, 340 | "metadata": {}, 341 | "outputs": [], 342 | "source": [ 343 | "eopatch.timestamp" 344 | ] 345 | }, 346 | { 347 | "cell_type": "markdown", 348 | "metadata": {}, 349 | "source": [ 350 | "Plot RGB of time frames" 351 | ] 352 | }, 353 | { 354 | "cell_type": "code", 355 | "execution_count": null, 356 | "metadata": {}, 357 | "outputs": [], 358 | "source": [ 359 | "time_idx = 0\n", 360 | "\n", 361 | "rgb = eopatch.data['S2-RGB']\n", 362 | "\n", 363 | "fig, ax = plt.subplots(figsize=(15, 15))\n", 364 | "ax.imshow(rgb[time_idx] * 3.5);" 365 | ] 366 | }, 367 | { 368 | "cell_type": "markdown", 369 | "metadata": {}, 370 | "source": [ 371 | "Plot the median RGB values" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": null, 377 | "metadata": {}, 378 | "outputs": [], 379 | "source": [ 380 | "fig, ax = plt.subplots(figsize=(15, 15))\n", 381 | "\n", 382 | "ax.imshow(np.median(rgb, axis=0).squeeze() * 3.5);" 383 | ] 384 | }, 385 | { 386 | "cell_type": "markdown", 387 | "metadata": {}, 388 | "source": [ 389 | "Plot the median NDVI values" 390 | ] 391 | }, 392 | { 393 | "cell_type": "code", 394 | "execution_count": null, 395 | "metadata": {}, 396 | "outputs": [], 397 | "source": [ 398 | "ndvi = eopatch.data['NDVI']\n", 399 | "median_ndvi = np.median(ndvi, axis=0).squeeze()\n", 400 | "\n", 401 | "fig, ax = plt.subplots(figsize=(15,15))\n", 402 | "im = ax.imshow(median_ndvi, cmap=plt.cm.YlGn)\n", 403 | "\n", 404 | "divider = make_axes_locatable(ax)\n", 405 | "cax = divider.append_axes('right', size='5%', pad=0.05)\n", 406 | "fig.colorbar(im, cax=cax, orientation='vertical');" 407 | ] 408 | }, 409 | { 410 | "cell_type": "markdown", 411 | "metadata": {}, 412 | "source": [ 413 | "Plot temporal NDVI of a given location" 414 | ] 415 | }, 416 | { 417 | "cell_type": "code", 418 | "execution_count": null, 419 | "metadata": {}, 420 | "outputs": [], 421 | "source": [ 422 | "dates_num = dates.date2num(eopatch.timestamp)\n", 423 | "dates_str = [timestamp.date().isoformat() for timestamp in eopatch.timestamp]\n", 424 | "\n", 425 | "fig, ax = plt.subplots(figsize=(15, 15))\n", 426 | "ax.plot(dates_num, ndvi[:, 100, 550, :].squeeze(), 'g')\n", 427 | "\n", 428 | "ax.set_title('NDVI evolution')\n", 429 | "ax.set_xticks(dates_num);\n", 430 | "ax.set_xticklabels(dates_str, rotation=45, ha='right');\n", 431 | "ax.set_ylabel('NDVI');" 432 | ] 433 | }, 434 | { 435 | "attachments": {}, 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "## 4. Retrieve S1 data" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": null, 445 | "metadata": {}, 446 | "outputs": [], 447 | "source": [ 448 | "\n", 449 | "s1_iw_des_task = SentinelHubInputTask(\n", 450 | " data_collection=DataCollection.SENTINEL1_IW_DES,\n", 451 | " bands=['VV'],\n", 452 | " bands_feature=(FeatureType.DATA, 'S1-IW-DES'),\n", 453 | " additional_data=[(FeatureType.MASK, 'dataMask')],\n", 454 | " resolution=(10, 10)\n", 455 | ")\n", 456 | "\n", 457 | "s1_iw_asc_task = SentinelHubInputTask(\n", 458 | " data_collection=DataCollection.SENTINEL1_IW_ASC,\n", 459 | " bands=['VV'],\n", 460 | " bands_feature=(FeatureType.DATA, 'S1-IW-ASC'),\n", 461 | " additional_data=[(FeatureType.MASK, 'dataMask')],\n", 462 | " resolution=(10, 10)\n", 463 | ")" 464 | ] 465 | }, 466 | { 467 | "cell_type": "code", 468 | "execution_count": null, 469 | "metadata": {}, 470 | "outputs": [], 471 | "source": [ 472 | "\n", 473 | "eopatch_s1_des = s1_iw_des_task.execute(bbox=bbox, time_interval=['2019-07-01','2019-08-01'])\n", 474 | "\n", 475 | "eopatch_s1_des" 476 | ] 477 | }, 478 | { 479 | "cell_type": "code", 480 | "execution_count": null, 481 | "metadata": {}, 482 | "outputs": [], 483 | "source": [ 484 | "vv_data = eopatch_s1_des.data['S1-IW-DES']\n", 485 | "vv_data[np.isnan(vv_data)] = 0\n", 486 | "\n", 487 | "vv_des_r = np.percentile(vv_data, 80, axis=0)\n", 488 | "vv_des_g = np.percentile(vv_data, 20, axis=0)\n", 489 | "vv_des_b = np.std(vv_data, axis=0)\n", 490 | "\n", 491 | "vv_rgb = np.concatenate((vv_des_r, vv_des_g, vv_des_b), axis=-1)\n", 492 | "\n", 493 | "plt.figure(figsize=(15, 15))\n", 494 | "plt.imshow(vv_rgb);" 495 | ] 496 | }, 497 | { 498 | "cell_type": "code", 499 | "execution_count": null, 500 | "metadata": {}, 501 | "outputs": [], 502 | "source": [ 503 | "eopatch_s1_asc = s1_iw_asc_task.execute(bbox=bbox, time_interval=['2019-07-01','2019-08-01'])\n", 504 | "\n", 505 | "eopatch_s1_asc" 506 | ] 507 | }, 508 | { 509 | "cell_type": "code", 510 | "execution_count": null, 511 | "metadata": {}, 512 | "outputs": [], 513 | "source": [ 514 | "vv_data = eopatch_s1_asc.data['S1-IW-ASC']\n", 515 | "vv_data[np.isnan(vv_data)] = 0\n", 516 | "\n", 517 | "vv_des_r = vv_data[0]\n", 518 | "vv_des_g = vv_data[2]\n", 519 | "vv_des_b = vv_data[10]\n", 520 | "\n", 521 | "vv_rgb = np.concatenate((vv_des_r, vv_des_g, vv_des_b), axis=-1)\n", 522 | "\n", 523 | "plt.figure(figsize=(15, 15))\n", 524 | "plt.imshow(vv_rgb);" 525 | ] 526 | }, 527 | { 528 | "attachments": {}, 529 | "cell_type": "markdown", 530 | "metadata": {}, 531 | "source": [ 532 | "Similarly, Sentinel-2 L2A data can be added, as well as Digital Elevation data" 533 | ] 534 | } 535 | ], 536 | "metadata": { 537 | "kernelspec": { 538 | "display_name": "eo", 539 | "language": "python", 540 | "name": "eo" 541 | }, 542 | "language_info": { 543 | "codemirror_mode": { 544 | "name": "ipython", 545 | "version": 3 546 | }, 547 | "file_extension": ".py", 548 | "mimetype": "text/x-python", 549 | "name": "python", 550 | "nbconvert_exporter": "python", 551 | "pygments_lexer": "ipython3", 552 | "version": "3.10.9" 553 | }, 554 | "vscode": { 555 | "interpreter": { 556 | "hash": "6a6edf0266831650904d45ebea3a1b4640f37efcd9e2004c6886f3018479f897" 557 | } 558 | } 559 | }, 560 | "nbformat": 4, 561 | "nbformat_minor": 4 562 | } 563 | -------------------------------------------------------------------------------- /Optical/notebooks/earth_observation_with_StatAPI.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "%reload_ext autoreload\n", 10 | "%autoreload 2\n", 11 | "%matplotlib inline" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "# Retrieving data with StatAPI" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "### Imports" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import datetime as dt\n", 35 | "from functools import partial\n", 36 | "\n", 37 | "import json\n", 38 | "import pandas as pd\n", 39 | "import geopandas as gpd\n", 40 | "import numpy as np\n", 41 | "from tqdm.auto import tqdm\n", 42 | "\n", 43 | "import matplotlib.pyplot as plt\n", 44 | "import matplotlib.cm as cm\n", 45 | "import matplotlib.colors as cols\n", 46 | "\n", 47 | "import shapely" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "from util import stat_to_df\n", 57 | "from sentinelhub import (\n", 58 | " SentinelHubRequest, DataCollection, SHConfig,\n", 59 | " SentinelHubStatisticalDownloadClient, SentinelHubStatistical,\n", 60 | " CRS, BBox, Geometry, bbox_to_dimensions, \n", 61 | " parse_time_interval, MimeType, WcsRequest\n", 62 | ")" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "# helper function\n", 72 | "def to_download_requests(gdf, data_folder=None):\n", 73 | " \"\"\" function to create StatAPI request per each geometry in geopandas geoDataFrame \n", 74 | " \n", 75 | " :param: data_folder: specify a folder to cache the responses from SH service - particularly useful when testing\n", 76 | " \"\"\"\n", 77 | " \n", 78 | " stat_requests = []\n", 79 | " for row in gdf.itertuples():\n", 80 | " req = SentinelHubStatistical(\n", 81 | " aggregation=aggregation, \n", 82 | " calculations=calculations, \n", 83 | " input_data=[input_data], \n", 84 | " geometry=Geometry(row.geometry, crs=CRS(gdf.crs.to_epsg())),\n", 85 | " data_folder=data_folder\n", 86 | " )\n", 87 | " stat_requests.append(req)\n", 88 | " \n", 89 | " download_requests = [stat_request.download_list[0] for stat_request in stat_requests]\n", 90 | " \n", 91 | " if data_folder:\n", 92 | " for download_request in download_requests:\n", 93 | " download_request.save_response = True\n", 94 | " \n", 95 | " return download_requests" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": {}, 101 | "source": [ 102 | "## Fetch statististat_to_df using sh-py + StatAPI" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "Evalscript to retrieve data:" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": null, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "evalscript = \"\"\"\n", 119 | "//VERSION=3\n", 120 | "function setup() {\n", 121 | " return {\n", 122 | " input: [{\n", 123 | " bands: [\"B01\", \"B02\", \"B03\", \"B04\", \"B05\", \"B06\", \"B07\", \"B08\", \"B8A\", \"B09\", \"B10\", \"B11\", \"B12\", \"CLM\", \"CLP\", \"dataMask\"],\n", 124 | " units: \"DN\"\n", 125 | " }],\n", 126 | " output: [\n", 127 | " {\n", 128 | " id: \"bands\",\n", 129 | " bands: [\"B01\", \"B02\", \"B03\", \"B04\", \"B05\", \"B06\", \"B07\", \"B08\", \"B8A\", \"B09\", \"B10\", \"B11\", \"B12\"],\n", 130 | " sampleType: \"UINT16\"\n", 131 | " },\n", 132 | " {\n", 133 | " id: \"masks\",\n", 134 | " bands: [\"CLM\"],\n", 135 | " sampleType: \"UINT16\"\n", 136 | " },\n", 137 | " {\n", 138 | " id: \"indices\",\n", 139 | " bands: [\"NDVI\", \"NDVI_RE1\", \"NBSI\", \"CLP\"],\n", 140 | " sampleType: \"UINT16\"\n", 141 | " },\n", 142 | " {\n", 143 | " id: \"dataMask\",\n", 144 | " bands: 1\n", 145 | " }]\n", 146 | " }\n", 147 | "}\n", 148 | "\n", 149 | "function evaluatePixel(samples) {\n", 150 | " // Normalised Difference Vegetation Index and variation\n", 151 | " let NDVI = index(samples.B08, samples.B04);\n", 152 | " let NDVI_RE1 = index(samples.B08, samples.B05);\n", 153 | "\n", 154 | " // Bare Soil Index \n", 155 | " let NBSI = index((samples.B11 + samples.B04), (samples.B08 + samples.B02));\n", 156 | " \n", 157 | " // cloud probability normalized to 0..1\n", 158 | " let CLP = samples.CLP/255.0;\n", 159 | " \n", 160 | " const f = 5000;\n", 161 | " return {\n", 162 | " bands: [samples.B01, samples.B02, samples.B03, samples.B04, samples.B05, samples.B06, \n", 163 | " samples.B07, samples.B08, samples.B8A, samples.B09, samples.B10, samples.B11, samples.B12],\n", 164 | " masks: [samples.CLM],\n", 165 | " indices: [toUINT(NDVI, f), toUINT(NDVI_RE1, f), toUINT(NBSI, f), toUINT(CLP, f)],\n", 166 | " dataMask: [samples.dataMask]\n", 167 | " };\n", 168 | "}\n", 169 | "\n", 170 | "function toUINT(product, constant){\n", 171 | " // Clamp the output to [-1, 10] and convert it to a UNIT16\n", 172 | " // value that can be converted back to float later.\n", 173 | " if (product < -1) {\n", 174 | " product = -1;\n", 175 | " } else if (product > 10) {\n", 176 | " product = 10;\n", 177 | " }\n", 178 | " return Math.round(product * constant) + constant;\n", 179 | "}\n", 180 | "\n", 181 | "function feature_ratio(band_a, band_b, constant){\n", 182 | " // Compute Band Ratio of the form A/B + constant.\n", 183 | " return (band_a / band_b) + constant;\n", 184 | "}\n", 185 | "\n", 186 | "\"\"\"" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "kranj_wgs84 = [14.26, 46.26, 14.51, 46.11]\n", 196 | "bbox = BBox(bbox=kranj_wgs84, crs=CRS.WGS84)" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "time_interval = ('2022-03-01', '2022-09-01')" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [ 214 | "wcs_true_color_request = WcsRequest(\n", 215 | " data_collection=DataCollection.SENTINEL2_L1C,\n", 216 | " data_folder=\"./cache/\",\n", 217 | " layer='TRUE-COLOR-S2-L1C',\n", 218 | " bbox=bbox,\n", 219 | " time=time_interval,\n", 220 | " resx = \"60m\",\n", 221 | " resy = \"60m\",\n", 222 | " image_format= MimeType.TIFF\n", 223 | ")\n", 224 | "images = wcs_true_color_request.get_data(save_data=True)\n", 225 | "len(images)" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": null, 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "fig, axs = plt.subplots(3,5, figsize=(20,10))\n", 235 | "for idx, (image, datum) in enumerate(zip(images[:15], wcs_true_color_request.get_dates()[:15])):\n", 236 | " axs.flat[idx].imshow(image)\n", 237 | " axs.flat[idx].set_title(datum.date().isoformat())" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": null, 243 | "metadata": {}, 244 | "outputs": [], 245 | "source": [ 246 | "resolution=(10,10)" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "aggregation = SentinelHubStatistical.aggregation(\n", 256 | " evalscript=evalscript,\n", 257 | " time_interval=time_interval,\n", 258 | " aggregation_interval='P1D',\n", 259 | " size=bbox_to_dimensions(bbox, 100))" 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": null, 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "input_data = SentinelHubRequest.input_data(DataCollection.SENTINEL2_L1C, maxcc=1)" 269 | ] 270 | }, 271 | { 272 | "cell_type": "markdown", 273 | "metadata": {}, 274 | "source": [ 275 | "