├── .gitignore ├── LICENSE ├── README.md ├── cloudnativegeo └── extract_timeseries_cogs.ipynb ├── coding_exercises └── first_non_repeating_character.ipynb ├── ee-python ├── README.md ├── asset_size.py ├── delete_gee_assets.ipynb ├── download_data.py ├── dynamic_visualization_parameters.ipynb ├── export_a_collection.ipynb ├── export_climate_timeseries.ipynb ├── export_image_bands.ipynb ├── large_gridded_exports.ipynb ├── list_all_assets.py ├── manage_tasks.ipynb ├── python_api_syntax.ipynb ├── rename_collection.py ├── stac_gee_catalog.ipynb ├── tf_classification_local.ipynb └── update_acl.py ├── environment.yml ├── fill_nodata ├── README.md ├── demo_fix_missing_data.ipynb └── demo_simulate_missing_data.ipynb ├── geeup ├── README.md ├── rename_files.py └── update_metadata.py ├── h3 ├── ASAM_shp.zip ├── gridcounts.gpkg └── point_in_polygon.ipynb ├── imd ├── README.md ├── download_all.py ├── imd_annual_average.ipynb └── imd_to_geotiff.ipynb ├── misc ├── ascii_to_csv.ipynb ├── buildings.png ├── csv_to_gml.ipynb ├── dat_to_csv.ipynb ├── data.csv ├── import.png ├── import_wkt.png ├── result.png └── unpivot.ipynb ├── pyqgis ├── README.md ├── actions │ ├── buffer_select_action.py │ ├── hello_world.py │ ├── mapillary_action.py │ ├── point_select_action.py │ ├── reverse_geocode_street.py │ ├── tileindex_load.py │ ├── tileindex_remove.py │ └── update_field.py ├── attributeiterator.py ├── copy_raster.py ├── ee_qgis.py ├── filter_layer.py ├── gee_annual_precipitation.py ├── rastercalculator.py └── rename_layers.py ├── python ├── .gitignore ├── README.md ├── articles │ ├── article1.txt │ ├── article2.txt │ └── article3.txt ├── ascii_to_csv.ipynb ├── band_combinations.ipynb ├── buildings.png ├── complex_kml_to_gpkg.ipynb ├── convert_multilayer_kml.ipynb ├── csv_to_gml.ipynb ├── dat_to_csv.ipynb ├── data.csv ├── douglas_peucker.gif ├── douglas_peucker.png ├── geocoding_with_manual_update.ipynb ├── ghcn.ipynb ├── import.png ├── import_wkt.png ├── kml.png ├── line.dbf ├── line.png ├── line.prj ├── line.shp ├── line.shx ├── line_interpolation.ipynb ├── line_interpolation.png ├── mapping_news_articles_openai.ipynb ├── maritime_piracy.ipynb ├── matplotlib_animation.ipynb ├── netcdf_nco.ipynb ├── pandas_scraping.ipynb ├── raster_from_array.ipynb ├── rasterio_cloudmask.ipynb ├── reprojection_and_coordinate_transform.ipynb ├── result.png ├── select_subset_from_file.ipynb ├── simple_animation.gif ├── stacked_barchart.jpg ├── unpivot.ipynb ├── visvalingam_whyatt.gif ├── visvalingam_whyatt.png ├── xarray_netcdf.ipynb └── xarray_wrf.ipynb └── qgis └── freestyle └── palette.txt /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints 2 | h3/.ipynb_checkpoints/* 3 | */.ipynb_checkpoints/* 4 | ghcn/data/* 5 | ghcn/output/* 6 | __pycache__ 7 | .DS_Store 8 | */*/.DS_Store 9 | 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # projects -------------------------------------------------------------------------------- /coding_exercises/first_non_repeating_character.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "467e040e-f19e-41cc-a21c-6bd87cf5c0b4", 6 | "metadata": {}, 7 | "source": [ 8 | "## First Non-Repeating Character\n", 9 | "\n", 10 | "Extract the first non-repeating character from a string.\n", 11 | "\n", 12 | "'aaabbbcdeee' -> 'c'\n", 13 | "'xxxyssxxyyz' -> 'z'" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 31, 19 | "id": "64600185-f527-4d27-9eb8-c11046329793", 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "test = {'aaabbbcdeee': 'c', 'xxxyssxxyyz': 'y', 'tddgbs': 't', 'abc': 'a'}" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "id": "ee4e15e1-e338-463b-aa27-7cda112eed54", 29 | "metadata": {}, 30 | "source": [ 31 | "## Comparing Each Character with Previous One" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 41, 37 | "id": "da6a8258-f9c6-4965-b50f-2441f6947bdc", 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "def find_non_repeating(input):\n", 42 | " found_repeating = False\n", 43 | " last_character = input[0]\n", 44 | " for x in input[1:]:\n", 45 | " if not found_repeating and x != last_character:\n", 46 | " return last_character\n", 47 | " break\n", 48 | " if x == last_character:\n", 49 | " found_repeating = True\n", 50 | " else:\n", 51 | " found_repeating = False\n", 52 | " last_character = x" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 42, 58 | "id": "4b2648d0-2b3b-4e99-81ef-32ff7bb731f2", 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "name": "stdout", 63 | "output_type": "stream", 64 | "text": [ 65 | "c c\n", 66 | "y y\n", 67 | "t t\n", 68 | "a a\n" 69 | ] 70 | } 71 | ], 72 | "source": [ 73 | "for k, v in test.items():\n", 74 | " print(find_non_repeating(k), v)" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "id": "4c78652e-f63f-4231-83d8-2f3d4e3c9e84", 80 | "metadata": {}, 81 | "source": [ 82 | "## Using a Dictionary" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "id": "d4b5c267-30c1-49cb-9b7c-46138995314b", 88 | "metadata": {}, 89 | "source": [ 90 | "Python dictionaries are sorted as of 3.7 and using defaultdict ensures we can increment the character count without initializing it" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 48, 96 | "id": "50ee6986-9240-42a3-9f18-f54486954b83", 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "name": "stdout", 101 | "output_type": "stream", 102 | "text": [ 103 | "c c\n", 104 | "z y\n", 105 | "t t\n", 106 | "a a\n" 107 | ] 108 | } 109 | ], 110 | "source": [ 111 | "from collections import defaultdict\n", 112 | " \n", 113 | "def find_non_repeating(input):\n", 114 | " c = defaultdict(int)\n", 115 | " for x in input:\n", 116 | " c[x] += 1\n", 117 | " for k, v in c.items():\n", 118 | " if v == 1:\n", 119 | " return(k)\n", 120 | " break\n", 121 | " \n", 122 | "for k, v in test.items():\n", 123 | " print(find_non_repeating(k), v)\n", 124 | " " 125 | ] 126 | } 127 | ], 128 | "metadata": { 129 | "kernelspec": { 130 | "display_name": "Python 3", 131 | "language": "python", 132 | "name": "python3" 133 | }, 134 | "language_info": { 135 | "codemirror_mode": { 136 | "name": "ipython", 137 | "version": 3 138 | }, 139 | "file_extension": ".py", 140 | "mimetype": "text/x-python", 141 | "name": "python", 142 | "nbconvert_exporter": "python", 143 | "pygments_lexer": "ipython3", 144 | "version": "3.9.4" 145 | } 146 | }, 147 | "nbformat": 4, 148 | "nbformat_minor": 5 149 | } 150 | -------------------------------------------------------------------------------- /ee-python/README.md: -------------------------------------------------------------------------------- 1 | ## Google Earth Engine Python API Code 2 | 3 | This folder contains Scripts and Jupyter notebooks that demonstrate the working of Google Earth Engine API. 4 | 5 | ### Notebooks 6 | 7 | - [`python_api_syntax.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/python_api_syntax.ipynb): Getting started with Python API 8 | - [`export_a_collection.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/export_a_collection.ipynb): How to export all images in a collection using `ee.Task` 9 | - [`large_gridded_exports.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/large_gridded_exports.ipynb): How to created tiled exports from large images. 10 | - [`export_image_bands.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/export_image_bands.ipynb): How to export all bands of an image as separate image using `ee.Task` 11 | - [`manage_tasks.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/manage_tasks.ipynb): How to list and cancel running tasks 12 | - [`delete_gee_assets.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/delete_gee_assets.ipynb): How to delete multiple assets, including all assets within a folder/collection 13 | - [`dynamic_visualization_parameters.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/dynamic_visualization_parameters.ipynb): How to compute image statistics and use them in visualization parameters 14 | - [`stac_gee_catalog.ipynb`](https://github.com/spatialthoughts/projects/blob/master/ee-python/stac_gee_catalog.ipynb): How to query a static STAC catalog as a JSON file on Google Cloud Storage. 15 | 16 | ### Scripts 17 | - [`rename_collection.py`](https://github.com/spatialthoughts/projects/blob/master/ee-python/rename_collection.py): Renames a collection by copying the child assets to a new collection and deleting old collection recursively. 18 | - [`update_acl.py`](https://github.com/spatialthoughts/projects/blob/master/ee-python/update_acl.py): Changes permissions of all assets in a folder. 19 | - [`download_data.py`](https://github.com/spatialthoughts/projects/blob/master/ee-python/download_data.py): How to automate downloading of data using Google Earth Engine API. 20 | - [`asset_size.py`](https://github.com/spatialthoughts/projects/blob/master/ee-python/asset_size.py): Recursively calculate size of all assets in a folder 21 | -------------------------------------------------------------------------------- /ee-python/asset_size.py: -------------------------------------------------------------------------------- 1 | """This script queries your Asset folder and generates a CSV file 2 | with the size and type of each asset. 3 | 4 | Usage: 5 | 6 | python asset_size.py --asset_folder --output_file output.csv 7 | 8 | Example 9 | python asset_size.py \ 10 | --asset_folder projects/earthengine-legacy/assets/users/ujavalgandhi/temp \ 11 | --output_file output.csv 12 | """ 13 | import argparse 14 | import ee 15 | import csv 16 | 17 | parser = argparse.ArgumentParser(usage='python asset_size.py ') 18 | parser.add_argument('--asset_folder', help='full path to the asset folder') 19 | parser.add_argument('--output_file', help='output file to write') 20 | 21 | args = parser.parse_args() 22 | parent = args.asset_folder 23 | 24 | # Replace the cloud_project with your own project 25 | cloud_project = 'spatialthoughts' 26 | 27 | try: 28 | ee.Initialize(project=cloud_project) 29 | except: 30 | ee.Authenticate() 31 | ee.Initialize(project=cloud_project) 32 | 33 | def get_asset_list(parent): 34 | parent_asset = ee.data.getAsset(parent) 35 | parent_id = parent_asset['name'] 36 | parent_type = parent_asset['type'] 37 | asset_list = [] 38 | child_assets = ee.data.listAssets({'parent': parent_id})['assets'] 39 | for child_asset in child_assets: 40 | child_id = child_asset['name'] 41 | child_type = child_asset['type'] 42 | if child_type in ['FOLDER','IMAGE_COLLECTION']: 43 | # Recursively call the function to get child assets 44 | asset_list.extend(get_asset_list(child_id)) 45 | else: 46 | asset_list.append(child_id) 47 | return asset_list 48 | 49 | all_assets = get_asset_list(parent) 50 | 51 | print('Found {} assets'.format(len(all_assets))) 52 | 53 | data = [] 54 | 55 | for asset in all_assets: 56 | print('Processing {}'.format(asset)) 57 | info = ee.data.getAsset(asset) 58 | asset_type = info['type'] 59 | size = info['sizeBytes'] 60 | size_mb = round(int(size)/1e6, 2) 61 | data.append({ 62 | 'asset': asset, 63 | 'type': asset_type, 64 | 'size_mb': size_mb 65 | }) 66 | 67 | 68 | # Sort the assets by size 69 | sorted_data = sorted(data, key=lambda d: d['size_mb'], reverse=True) 70 | 71 | # Write the data to a file 72 | fieldnames = ['asset', 'type', 'size_mb'] 73 | with open(args.output_file, mode='w') as output_file: 74 | csv_writer = csv.DictWriter(output_file, fieldnames=fieldnames) 75 | csv_writer.writeheader() 76 | for row in sorted_data: 77 | csv_writer.writerow(row) 78 | 79 | print('Successfully written output file at {}'.format(args.output_file)) 80 | -------------------------------------------------------------------------------- /ee-python/delete_gee_assets.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "view-in-github" 8 | }, 9 | "source": [ 10 | "\"Open" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": { 16 | "id": "1941iYEFRRIL" 17 | }, 18 | "source": [ 19 | "# Delete GEE Assets Recursively\n", 20 | "\n", 21 | "Notebook to delete all assets from a folder/collection recursively. \n", 22 | "\n", 23 | "*Warning: The delete operation is not reversible. Check the list of assets before uncommenting the line to delete.*" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 1, 29 | "metadata": { 30 | "id": "9wz0EtbcNhtK" 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "import ee" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "# Replace the cloud_project with your own project\n", 44 | "cloud_project = 'spatialthoughts'\n", 45 | "\n", 46 | "try:\n", 47 | " ee.Initialize(project=cloud_project)\n", 48 | "except:\n", 49 | " ee.Authenticate()\n", 50 | " ee.Initialize(project=cloud_project)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 13, 56 | "metadata": { 57 | "id": "hlBiTb8YNoFf" 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "def get_asset_list(parent):\n", 62 | " parent_asset = ee.data.getAsset(parent)\n", 63 | " parent_id = parent_asset['name']\n", 64 | " parent_type = parent_asset['type']\n", 65 | " asset_list = []\n", 66 | " child_assets = ee.data.listAssets({'parent': parent_id})['assets']\n", 67 | " for child_asset in child_assets:\n", 68 | " child_id = child_asset['name']\n", 69 | " child_type = child_asset['type']\n", 70 | " if child_type in ['FOLDER','IMAGE_COLLECTION']:\n", 71 | " # Recursively call the function to get child assets\n", 72 | " asset_list.extend(get_asset_list(child_id))\n", 73 | " else:\n", 74 | " asset_list.append(child_id)\n", 75 | " return asset_list\n", 76 | "\n", 77 | "parent = 'users/ujavalgandhi/temp'\n", 78 | "all_assets = get_asset_list(parent)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": { 85 | "id": "BVxA3jdjOBHO" 86 | }, 87 | "outputs": [], 88 | "source": [ 89 | "all_assets" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": { 95 | "id": "Zra-KDYrRpFX" 96 | }, 97 | "source": [ 98 | "Uncomment the line below to run the delete operation." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": { 105 | "id": "GXY6jQaVOC1l" 106 | }, 107 | "outputs": [], 108 | "source": [ 109 | "for asset in all_assets:\n", 110 | " print(asset)\n", 111 | " #ee.data.deleteAsset(asset)" 112 | ] 113 | } 114 | ], 115 | "metadata": { 116 | "colab": { 117 | "authorship_tag": "ABX9TyMc/nXmZO8mOmQSB4VDAAgX", 118 | "include_colab_link": true, 119 | "name": "delete_gee_assets.ipynb", 120 | "provenance": [] 121 | }, 122 | "kernelspec": { 123 | "display_name": "Python 3 (ipykernel)", 124 | "language": "python", 125 | "name": "python3" 126 | }, 127 | "language_info": { 128 | "codemirror_mode": { 129 | "name": "ipython", 130 | "version": 3 131 | }, 132 | "file_extension": ".py", 133 | "mimetype": "text/x-python", 134 | "name": "python", 135 | "nbconvert_exporter": "python", 136 | "pygments_lexer": "ipython3", 137 | "version": "3.12.3" 138 | } 139 | }, 140 | "nbformat": 4, 141 | "nbformat_minor": 4 142 | } 143 | -------------------------------------------------------------------------------- /ee-python/download_data.py: -------------------------------------------------------------------------------- 1 | """This script is an example of automating a download 2 | using Google Earth Engine API. 3 | 4 | This script computes the average soil moisture for the 5 | past 1-week over all districts in a state. The result 6 | is then downloaded as a JSON file and saved locally. 7 | 8 | The Python environment needs to have earthengine-api 9 | package installed. After install, a one-time authentication 10 | needs to be completed using 'earthengine authenticate' 11 | command. 12 | """ 13 | import ee 14 | import json 15 | import os 16 | 17 | # Replace the cloud_project with your own project 18 | cloud_project = 'spatialthoughts' 19 | 20 | try: 21 | ee.Initialize(project=cloud_project) 22 | except: 23 | ee.Authenticate() 24 | ee.Initialize(project=cloud_project) 25 | 26 | # Get current date and convert to milliseconds 27 | end_date = ee.Date(datetime.datetime.now().timestamp()*1000) 28 | start_date = end_date.advance(-1, 'week') 29 | 30 | date_string = end_date.format('YYYY_MM_dd') 31 | filename = 'ssm_{}.geojson'.format(date_string.getInfo()) 32 | 33 | # Saving to current directory. You can change the path to appropriate location 34 | output_path = os.path.join(filename) 35 | 36 | # Datasets 37 | soilmoisture = ee.ImageCollection("NASA_USDA/HSL/SMAP10KM_soil_moisture") 38 | admin2 = ee.FeatureCollection("FAO/GAUL_SIMPLIFIED_500m/2015/level2") 39 | 40 | # Filter to a state 41 | karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka')) 42 | 43 | # Select the ssm band 44 | ssm = soilmoisture.select('ssm') 45 | 46 | filtered = ssm .filter(ee.Filter.date(start_date, end_date)) 47 | 48 | mean = filtered.mean() 49 | 50 | stats = mean.reduceRegions(**{ 51 | 'collection': karnataka, 52 | 'reducer': ee.Reducer.mean().setOutputs(['meanssm']), 53 | 'scale': 10000, 54 | }) 55 | 56 | # Select columns to keep and remove geometry to make the result lightweight 57 | # Change column names to match your uploaded shapefile 58 | columns = ['ADM2_NAME', 'meanssm'] 59 | exportCollection = stats.select(**{ 60 | 'propertySelectors': columns, 61 | 'retainGeometry': False}) 62 | 63 | # Get the result from the server 64 | output = json.dumps(exportCollection.getInfo()) 65 | 66 | with open(output_path, 'w') as f: 67 | f.write(output) 68 | print('Success: File written at', output_path) 69 | 70 | -------------------------------------------------------------------------------- /ee-python/export_a_collection.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Export an image collection as individual images" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": { 14 | "colab": {}, 15 | "colab_type": "code", 16 | "executionInfo": { 17 | "elapsed": 876, 18 | "status": "ok", 19 | "timestamp": 1592476956910, 20 | "user": { 21 | "displayName": "Ujaval Gandhi", 22 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 23 | "userId": "08961717268892623937" 24 | }, 25 | "user_tz": -330 26 | }, 27 | "id": "JuN5rEoyjmjK" 28 | }, 29 | "outputs": [], 30 | "source": [ 31 | "import ee" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": { 38 | "colab": {}, 39 | "colab_type": "code", 40 | "executionInfo": { 41 | "elapsed": 5450, 42 | "status": "ok", 43 | "timestamp": 1592476984784, 44 | "user": { 45 | "displayName": "Ujaval Gandhi", 46 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 47 | "userId": "08961717268892623937" 48 | }, 49 | "user_tz": -330 50 | }, 51 | "id": "UPsm1_Qxg72j" 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "# Replace the cloud_project with your own project\n", 56 | "cloud_project = 'spatialthoughts'\n", 57 | "\n", 58 | "try:\n", 59 | " ee.Initialize(project=cloud_project)\n", 60 | "except:\n", 61 | " ee.Authenticate()\n", 62 | " ee.Initialize(project=cloud_project)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": { 68 | "colab_type": "text", 69 | "id": "mW0ljMgeg72n" 70 | }, 71 | "source": [ 72 | "## Create a Collection" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": null, 78 | "metadata": { 79 | "colab": {}, 80 | "colab_type": "code", 81 | "executionInfo": { 82 | "elapsed": 879, 83 | "status": "ok", 84 | "timestamp": 1592477072602, 85 | "user": { 86 | "displayName": "Ujaval Gandhi", 87 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 88 | "userId": "08961717268892623937" 89 | }, 90 | "user_tz": -330 91 | }, 92 | "id": "S60c2-FOjx0h" 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766])\n", 97 | "s2 = ee.ImageCollection(\"COPERNICUS/S2\")\n", 98 | "rgbVis = {\n", 99 | " 'min': 0.0,\n", 100 | " 'max': 3000,\n", 101 | " 'bands': ['B4', 'B3', 'B2'],\n", 102 | "}\n", 103 | "\n", 104 | "# Write a function for Cloud masking\n", 105 | "def maskS2clouds(image):\n", 106 | " qa = image.select('QA60')\n", 107 | " cloudBitMask = 1 << 10\n", 108 | " cirrusBitMask = 1 << 11\n", 109 | " mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(\n", 110 | " qa.bitwiseAnd(cirrusBitMask).eq(0))\n", 111 | " return image.updateMask(mask) \\\n", 112 | " .select(\"B.*\") \\\n", 113 | " .copyProperties(image, [\"system:time_start\"])\n", 114 | "\n", 115 | "filtered = s2 \\\n", 116 | " .filter(ee.Filter.date('2019-01-01', '2020-01-01')) \\\n", 117 | " .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) \\\n", 118 | " .filter(ee.Filter.intersects('.geo', geometry)) \\\n", 119 | " .map(maskS2clouds)\n", 120 | "\n", 121 | "# Write a function that computes NDVI for an image and adds it as a band\n", 122 | "def addNDVI(image):\n", 123 | " ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi')\n", 124 | " return image.addBands(ndvi)\n", 125 | "\n", 126 | "withNdvi = filtered.map(addNDVI)" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": { 132 | "colab_type": "text", 133 | "id": "s-zNDMS9g72r" 134 | }, 135 | "source": [ 136 | "## Export All Images\n", 137 | "\n", 138 | "Exports are done via the ``ee.batch`` module. A key difference between javascript and Python version is that the `region` parameter needs to be supplied with actual geometry coordinates." 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": { 145 | "colab": { 146 | "base_uri": "https://localhost:8080/", 147 | "height": 33 148 | }, 149 | "colab_type": "code", 150 | "executionInfo": { 151 | "elapsed": 1596, 152 | "status": "ok", 153 | "timestamp": 1592477084873, 154 | "user": { 155 | "displayName": "Ujaval Gandhi", 156 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 157 | "userId": "08961717268892623937" 158 | }, 159 | "user_tz": -330 160 | }, 161 | "id": "STvfd9ABg72s", 162 | "outputId": "61a6907a-38b1-4119-b349-807b49b2f4e7" 163 | }, 164 | "outputs": [], 165 | "source": [ 166 | "image_ids = withNdvi.aggregate_array('system:index').getInfo()\n", 167 | "print('Total images: ', len(image_ids))" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": null, 173 | "metadata": { 174 | "colab": {}, 175 | "colab_type": "code", 176 | "id": "CoXn1_hgoj81" 177 | }, 178 | "outputs": [], 179 | "source": [ 180 | "palette = [\n", 181 | " 'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718',\n", 182 | " '74A901', '66A000', '529400', '3E8601', '207401', '056201',\n", 183 | " '004C00', '023B01', '012E01', '011D01', '011301'];\n", 184 | "\n", 185 | "ndviVis = {'min':0, 'max':0.5, 'palette': palette }\n", 186 | "\n", 187 | "# Export with 100m resolution for this demo\n", 188 | "for i, image_id in enumerate(image_ids):\n", 189 | " image = ee.Image(withNdvi.filter(ee.Filter.eq('system:index', image_id)).first())\n", 190 | " task = ee.batch.Export.image.toDrive(**{\n", 191 | " 'image': image.select('ndvi').visualize(**ndviVis),\n", 192 | " 'description': 'Image Export {}'.format(i+1),\n", 193 | " 'fileNamePrefix': image.id().getInfo(),\n", 194 | " 'folder':'earthengine',\n", 195 | " 'scale': 100,\n", 196 | " 'region': image.geometry().getInfo()['coordinates'],\n", 197 | " 'maxPixels': 1e10\n", 198 | " })\n", 199 | " task.start()\n", 200 | " print('Started Task: ', i+1)" 201 | ] 202 | }, 203 | { 204 | "cell_type": "markdown", 205 | "metadata": { 206 | "colab_type": "text", 207 | "id": "J8BtAZftg720" 208 | }, 209 | "source": [ 210 | "## Manage Running/Waiting Tasks\n", 211 | "\n", 212 | "You can manage tasks as well. Get a list of tasks and get state information on them" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": null, 218 | "metadata": { 219 | "colab": {}, 220 | "colab_type": "code", 221 | "id": "AhNgXPb1XGBX" 222 | }, 223 | "outputs": [], 224 | "source": [ 225 | "tasks = ee.batch.Task.list()\n", 226 | "for task in tasks:\n", 227 | " task_id = task.status()['id']\n", 228 | " task_state = task.status()['state']\n", 229 | " print(task_id, task_state)" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": { 235 | "colab_type": "text", 236 | "id": "Y4kf_z5Wg723" 237 | }, 238 | "source": [ 239 | "You can cancel tasks as well" 240 | ] 241 | }, 242 | { 243 | "cell_type": "code", 244 | "execution_count": null, 245 | "metadata": { 246 | "colab": { 247 | "base_uri": "https://localhost:8080/", 248 | "height": 33 249 | }, 250 | "colab_type": "code", 251 | "executionInfo": { 252 | "elapsed": 1496, 253 | "status": "ok", 254 | "timestamp": 1592477751297, 255 | "user": { 256 | "displayName": "Ujaval Gandhi", 257 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 258 | "userId": "08961717268892623937" 259 | }, 260 | "user_tz": -330 261 | }, 262 | "id": "-u2_GpPog724", 263 | "outputId": "a5e0a7fa-9513-4844-d421-05b5923bd126" 264 | }, 265 | "outputs": [], 266 | "source": [ 267 | "if task_state == 'RUNNING' or task_state == 'READY':\n", 268 | " task.cancel()\n", 269 | " print('Task {} canceled'.format(task_id))\n", 270 | "else:\n", 271 | " print('Task {} state is {}'.format(task_id, task_state))\n" 272 | ] 273 | } 274 | ], 275 | "metadata": { 276 | "colab": { 277 | "collapsed_sections": [], 278 | "name": "02_managing_tasks.ipynb", 279 | "provenance": [], 280 | "toc_visible": true 281 | }, 282 | "kernelspec": { 283 | "display_name": "Python 3 (ipykernel)", 284 | "language": "python", 285 | "name": "python3" 286 | }, 287 | "language_info": { 288 | "codemirror_mode": { 289 | "name": "ipython", 290 | "version": 3 291 | }, 292 | "file_extension": ".py", 293 | "mimetype": "text/x-python", 294 | "name": "python", 295 | "nbconvert_exporter": "python", 296 | "pygments_lexer": "ipython3", 297 | "version": "3.12.3" 298 | } 299 | }, 300 | "nbformat": 4, 301 | "nbformat_minor": 4 302 | } 303 | -------------------------------------------------------------------------------- /ee-python/export_climate_timeseries.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "796b1233-fc06-4759-a9d8-84736ed3f4ba", 6 | "metadata": {}, 7 | "source": [ 8 | "## Exporting Climate Time-Series as COGs" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 2, 14 | "id": "9432e434-b0b2-44dd-9936-2165970bfebe", 15 | "metadata": { 16 | "id": "JuN5rEoyjmjK" 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "import ee" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 3, 26 | "id": "ec567485-3ff9-4f8c-a420-51fa1043ed9e", 27 | "metadata": { 28 | "id": "gjRJhBKF2iLf" 29 | }, 30 | "outputs": [], 31 | "source": [ 32 | "cloud_project = 'spatialthoughts'\n", 33 | "\n", 34 | "try:\n", 35 | " ee.Initialize(project=cloud_project)\n", 36 | "except:\n", 37 | " ee.Authenticate()\n", 38 | " ee.Initialize(project=cloud_project)" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "id": "707c7164-ae23-4374-a4e7-e52e042179de", 44 | "metadata": {}, 45 | "source": [ 46 | "#### Pre-Processing" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 4, 52 | "id": "9b6e0b09-c20b-4c85-9f7f-c0c6b5f58a45", 53 | "metadata": { 54 | "id": "27Bwko2sPWZ2" 55 | }, 56 | "outputs": [ 57 | { 58 | "name": "stdout", 59 | "output_type": "stream", 60 | "text": [ 61 | "Total images: 12\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "terraclimate = ee.ImageCollection('IDAHO_EPSCOR/TERRACLIMATE')\n", 67 | "# Soil Moisture\n", 68 | "soil = terraclimate.select('soil')\n", 69 | "\n", 70 | "def scale(image):\n", 71 | " return image.multiply(0.1) \\\n", 72 | " .copyProperties(image,['system:time_start'])\n", 73 | "\n", 74 | "soilScaled = soil.map(scale)\n", 75 | "\n", 76 | "filtered = soilScaled \\\n", 77 | " .filter(ee.Filter.date('2023-01-01', '2025-01-01')) \\\n", 78 | "\n", 79 | "image_ids = filtered.aggregate_array('system:index').getInfo()\n", 80 | "print('Total images: ', len(image_ids))" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "id": "e3efe2ab-bb72-4925-9c9d-fce57a5dd159", 86 | "metadata": {}, 87 | "source": [ 88 | "#### Export Images" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": 6, 94 | "id": "64fc5c8b-a121-4419-a55d-793f9f859e79", 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# Change the bucket name you a bucket where you may write access\n", 99 | "gcs_bucket_name = 'spatialthoughts-public-data'\n", 100 | "gcs_bucker_folder = 'terraclimate'" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 8, 106 | "id": "90986d50-6d82-48d0-b6d7-1c0a5d6b11b4", 107 | "metadata": { 108 | "id": "mbR6LaBDPWZ2" 109 | }, 110 | "outputs": [ 111 | { 112 | "name": "stdout", 113 | "output_type": "stream", 114 | "text": [ 115 | "Started Task: soil_moisture_202301\n", 116 | "Started Task: soil_moisture_202302\n", 117 | "Started Task: soil_moisture_202303\n", 118 | "Started Task: soil_moisture_202304\n", 119 | "Started Task: soil_moisture_202305\n", 120 | "Started Task: soil_moisture_202306\n", 121 | "Started Task: soil_moisture_202307\n", 122 | "Started Task: soil_moisture_202308\n", 123 | "Started Task: soil_moisture_202309\n", 124 | "Started Task: soil_moisture_202310\n", 125 | "Started Task: soil_moisture_202311\n", 126 | "Started Task: soil_moisture_202312\n" 127 | ] 128 | } 129 | ], 130 | "source": [ 131 | "for i, image_id in enumerate(image_ids):\n", 132 | " exportImage = ee.Image(filtered.filter(ee.Filter.eq('system:index', image_id)).first())\n", 133 | " geometry = ee.Algorithms.GeometryConstructors.BBox(-180, -90, 180, 90)\n", 134 | " task_name = f'soil_moisture_{image_id}'\n", 135 | "\n", 136 | " task = ee.batch.Export.image.toCloudStorage(**{\n", 137 | " 'image': exportImage,\n", 138 | " 'description': f'Image_Export_{task_name}',\n", 139 | " 'fileNamePrefix': f'{gcs_bucker_folder}/{task_name}',\n", 140 | " 'bucket': gcs_bucket_name,\n", 141 | " 'region': geometry,\n", 142 | " 'maxPixels': 1e10,\n", 143 | " 'formatOptions': {'cloudOptimized': True},\n", 144 | " })\n", 145 | " task.start()\n", 146 | " print(f'Started Task: {task_name}')" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "id": "add4426f-d96d-4a50-9ec5-039ba475c0e9", 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [] 156 | } 157 | ], 158 | "metadata": { 159 | "kernelspec": { 160 | "display_name": "Python 3 (ipykernel)", 161 | "language": "python", 162 | "name": "python3" 163 | }, 164 | "language_info": { 165 | "codemirror_mode": { 166 | "name": "ipython", 167 | "version": 3 168 | }, 169 | "file_extension": ".py", 170 | "mimetype": "text/x-python", 171 | "name": "python", 172 | "nbconvert_exporter": "python", 173 | "pygments_lexer": "ipython3", 174 | "version": "3.13.1" 175 | } 176 | }, 177 | "nbformat": 4, 178 | "nbformat_minor": 5 179 | } 180 | -------------------------------------------------------------------------------- /ee-python/export_image_bands.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Export a multi-band image as individual images" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": { 14 | "colab": {}, 15 | "colab_type": "code", 16 | "executionInfo": { 17 | "elapsed": 876, 18 | "status": "ok", 19 | "timestamp": 1592476956910, 20 | "user": { 21 | "displayName": "Ujaval Gandhi", 22 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 23 | "userId": "08961717268892623937" 24 | }, 25 | "user_tz": -330 26 | }, 27 | "id": "JuN5rEoyjmjK" 28 | }, 29 | "outputs": [], 30 | "source": [ 31 | "import ee" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": { 38 | "colab": {}, 39 | "colab_type": "code", 40 | "executionInfo": { 41 | "elapsed": 5450, 42 | "status": "ok", 43 | "timestamp": 1592476984784, 44 | "user": { 45 | "displayName": "Ujaval Gandhi", 46 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 47 | "userId": "08961717268892623937" 48 | }, 49 | "user_tz": -330 50 | }, 51 | "id": "UPsm1_Qxg72j" 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "# Replace the cloud_project with your own project\n", 56 | "cloud_project = 'spatialthoughts'\n", 57 | "\n", 58 | "try:\n", 59 | " ee.Initialize(project=cloud_project)\n", 60 | "except:\n", 61 | " ee.Authenticate()\n", 62 | " ee.Initialize(project=cloud_project)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": { 68 | "colab_type": "text", 69 | "id": "mW0ljMgeg72n" 70 | }, 71 | "source": [ 72 | "## Load the image" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 11, 78 | "metadata": { 79 | "colab": {}, 80 | "colab_type": "code", 81 | "executionInfo": { 82 | "elapsed": 879, 83 | "status": "ok", 84 | "timestamp": 1592477072602, 85 | "user": { 86 | "displayName": "Ujaval Gandhi", 87 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 88 | "userId": "08961717268892623937" 89 | }, 90 | "user_tz": -330 91 | }, 92 | "id": "S60c2-FOjx0h" 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "gsw = ee.Image(\"JRC/GSW1_2/GlobalSurfaceWater\")\n", 97 | "bangalore = ee.FeatureCollection(\"users/ujavalgandhi/public/bangalore_boundary\")\n", 98 | "geometry = bangalore.geometry()" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "metadata": {}, 104 | "source": [ 105 | "## Get Band Names" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "bands = gsw.bandNames().getInfo()\n", 115 | "print(bands)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": { 121 | "colab_type": "text", 122 | "id": "s-zNDMS9g72r" 123 | }, 124 | "source": [ 125 | "## Export All Bands\n", 126 | "\n", 127 | "Exports are done via the ``ee.batch`` module. A key difference between javascript and Python version is that the `region` parameter needs to be supplied with actual geometry coordinates." 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "metadata": { 134 | "colab": {}, 135 | "colab_type": "code", 136 | "id": "CoXn1_hgoj81" 137 | }, 138 | "outputs": [], 139 | "source": [ 140 | "for i, band in enumerate(bands):\n", 141 | " image = gsw.select(band).clip(geometry)\n", 142 | " task = ee.batch.Export.image.toDrive(**{\n", 143 | " 'image': image,\n", 144 | " 'description': 'Band Export {}'.format(band),\n", 145 | " 'fileNamePrefix': band,\n", 146 | " 'folder':'earthengine',\n", 147 | " 'scale': 100,\n", 148 | " 'region': geometry,\n", 149 | " 'maxPixels': 1e10\n", 150 | " })\n", 151 | " task.start()\n", 152 | " print('Started Task: ', i+1)" 153 | ] 154 | } 155 | ], 156 | "metadata": { 157 | "colab": { 158 | "collapsed_sections": [], 159 | "name": "02_managing_tasks.ipynb", 160 | "provenance": [], 161 | "toc_visible": true 162 | }, 163 | "kernelspec": { 164 | "display_name": "Python 3 (ipykernel)", 165 | "language": "python", 166 | "name": "python3" 167 | }, 168 | "language_info": { 169 | "codemirror_mode": { 170 | "name": "ipython", 171 | "version": 3 172 | }, 173 | "file_extension": ".py", 174 | "mimetype": "text/x-python", 175 | "name": "python", 176 | "nbconvert_exporter": "python", 177 | "pygments_lexer": "ipython3", 178 | "version": "3.12.3" 179 | } 180 | }, 181 | "nbformat": 4, 182 | "nbformat_minor": 4 183 | } 184 | -------------------------------------------------------------------------------- /ee-python/list_all_assets.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import ee 3 | 4 | parser = argparse.ArgumentParser() 5 | parser.add_argument('--asset_folder', help='full path to the asset folder') 6 | args = parser.parse_args() 7 | parent = args.asset_folder 8 | 9 | # Replace the cloud_project with your own project 10 | cloud_project = 'spatialthoughts' 11 | 12 | try: 13 | ee.Initialize(project=cloud_project) 14 | except: 15 | ee.Authenticate() 16 | ee.Initialize(project=cloud_project) 17 | 18 | 19 | def get_asset_list(parent): 20 | parent_asset = ee.data.getAsset(parent) 21 | parent_id = parent_asset['name'] 22 | parent_type = parent_asset['type'] 23 | asset_list = [] 24 | child_assets = ee.data.listAssets({'parent': parent_id})['assets'] 25 | for child_asset in child_assets: 26 | child_id = child_asset['name'] 27 | child_type = child_asset['type'] 28 | if child_type in ['FOLDER','IMAGE_COLLECTION']: 29 | # Recursively call the function to get child assets 30 | asset_list.extend(get_asset_list(child_id)) 31 | else: 32 | asset_list.append(child_id) 33 | return asset_list 34 | 35 | all_assets = get_asset_list(parent) 36 | 37 | print('Found {} assets'.format(len(all_assets))) 38 | 39 | for asset in all_assets: 40 | print(asset) -------------------------------------------------------------------------------- /ee-python/manage_tasks.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Manage tasks" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": { 14 | "colab": {}, 15 | "colab_type": "code", 16 | "executionInfo": { 17 | "elapsed": 876, 18 | "status": "ok", 19 | "timestamp": 1592476956910, 20 | "user": { 21 | "displayName": "Ujaval Gandhi", 22 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 23 | "userId": "08961717268892623937" 24 | }, 25 | "user_tz": -330 26 | }, 27 | "id": "JuN5rEoyjmjK" 28 | }, 29 | "outputs": [], 30 | "source": [ 31 | "import ee" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": { 38 | "colab": { 39 | "base_uri": "https://localhost:8080/", 40 | "height": 167 41 | }, 42 | "colab_type": "code", 43 | "executionInfo": { 44 | "elapsed": 18518, 45 | "status": "ok", 46 | "timestamp": 1592476976324, 47 | "user": { 48 | "displayName": "Ujaval Gandhi", 49 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 50 | "userId": "08961717268892623937" 51 | }, 52 | "user_tz": -330 53 | }, 54 | "id": "Sz-01eMjg72f", 55 | "outputId": "60cc7912-220e-4396-8c35-531101a33c3a" 56 | }, 57 | "outputs": [], 58 | "source": [ 59 | "# Replace the cloud_project with your own project\n", 60 | "cloud_project = 'spatialthoughts'\n", 61 | "\n", 62 | "try:\n", 63 | " ee.Initialize(project=cloud_project)\n", 64 | "except:\n", 65 | " ee.Authenticate()\n", 66 | " ee.Initialize(project=cloud_project)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": { 73 | "colab": {}, 74 | "colab_type": "code", 75 | "id": "AhNgXPb1XGBX" 76 | }, 77 | "outputs": [], 78 | "source": [ 79 | "tasks = ee.batch.Task.list()\n", 80 | "for task in tasks:\n", 81 | " task_id = task.status()['id']\n", 82 | " task_state = task.status()['state']\n", 83 | " print(task_id, task_state)" 84 | ] 85 | }, 86 | { 87 | "cell_type": "markdown", 88 | "metadata": { 89 | "colab_type": "text", 90 | "id": "Y4kf_z5Wg723" 91 | }, 92 | "source": [ 93 | "## Cancel Tasks" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": { 100 | "colab": { 101 | "base_uri": "https://localhost:8080/", 102 | "height": 33 103 | }, 104 | "colab_type": "code", 105 | "executionInfo": { 106 | "elapsed": 1496, 107 | "status": "ok", 108 | "timestamp": 1592477751297, 109 | "user": { 110 | "displayName": "Ujaval Gandhi", 111 | "photoUrl": "https://lh3.googleusercontent.com/a-/AOh14Gh-mr8b4yrvpjGrFWxEv60USCC0iYFAwW7Snk8sIISRZwewKGfTeuUrgVjxkvRoELvKK3NB5xkADIgEizMQyhbKx3A9G77B5XWYhrxzQJk8llQgVyvltfzlA9gPDjhFLCICbfYzj85RXVP-mcV5WeIVqCImzuwDpW2BliZjcAdSb-_WjYErFvJaD8U02SYADAk8ULNl20nkpQdJjCqzj4h_hOeOALl7CABZqMG4Uq3fRxa2Yg-Z0tJEY77GkUsu6lraXZJs25nM5GCgUbEg_JtuEKBePKqsmoQrYPOpdUB8lxDff4Hak2nSyGvFz-LbFoKc0ynGk39b1ycyeKVhjMlXvmvoJqCx4L3yoMP5jf0jMzKJMrvuov2PEUzMfUh2lIVZSaavzJqXT1jRiTtld5pA8DKhXKdtqIjsGCHoBkr5oCh7mxDI2QXjhWspibwWk2NgmXNDhZxSuVzxTWde50sAAkZ8Afm9k2LSygBFFi-uBuWg-dZWCsH-GCPIIn1lCJ_2z_aPgVKNp-llxU__ne74OE1J2Y4ob8Sq9d-QJk1ZglWtLZtffqMzhQBoLkOQgyRJrgerkjG1HArFS37c5LF7KhzUIi0hk9uSIuIBWaRHwMLfChfCWL9uVSmakAPCYwlEjS-JUkJVUSEQA60CUn8yU5f78WOjprA_PvUDNG7uXe6HDDtavrexokboYQVXvw-I952YwnpUSZP6gWzuqRdbhqQCZz51TyEA087lzNi6jP_YJ4GT2bDDVM-6wiLhVoNUdg=s64", 112 | "userId": "08961717268892623937" 113 | }, 114 | "user_tz": -330 115 | }, 116 | "id": "-u2_GpPog724", 117 | "outputId": "a5e0a7fa-9513-4844-d421-05b5923bd126" 118 | }, 119 | "outputs": [], 120 | "source": [ 121 | "if task_state == 'RUNNING' or task_state == 'READY':\n", 122 | " task.cancel()\n", 123 | " print('Task {} canceled'.format(task_id))\n", 124 | "else:\n", 125 | " print('Task {} state is {}'.format(task_id, task_state))\n" 126 | ] 127 | } 128 | ], 129 | "metadata": { 130 | "colab": { 131 | "collapsed_sections": [], 132 | "name": "02_managing_tasks.ipynb", 133 | "provenance": [], 134 | "toc_visible": true 135 | }, 136 | "kernelspec": { 137 | "display_name": "Python 3 (ipykernel)", 138 | "language": "python", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.12.3" 152 | } 153 | }, 154 | "nbformat": 4, 155 | "nbformat_minor": 4 156 | } 157 | -------------------------------------------------------------------------------- /ee-python/rename_collection.py: -------------------------------------------------------------------------------- 1 | """ A python script to rename Earth Engine Collections 2 | 3 | GEE collections cannot be renamed directly, so this script 4 | provides a simple way to get all assets in a collection 5 | and copies it to the new collection 6 | 7 | Sample usage: 8 | python rename_collection.py --old_collection --new_collection 9 | 10 | Add a --delete option to delete old_collection 11 | python rename_collection.py --old_collection --new_collection --delete 12 | """ 13 | import argparse 14 | import ee 15 | 16 | parser = argparse.ArgumentParser(usage='python rename_collection.py --old_collection --new_collection ') 17 | parser.add_argument('--old_collection', help='old collection') 18 | parser.add_argument('--new_collection', help='new collection') 19 | parser.add_argument('--delete', help='delete old collection', action=argparse.BooleanOptionalAction) 20 | 21 | args = parser.parse_args() 22 | 23 | old_collection = args.old_collection 24 | new_collection = args.new_collection 25 | 26 | # Replace the cloud_project with your own project 27 | cloud_project = 'spatialthoughts' 28 | 29 | try: 30 | ee.Initialize(project=cloud_project) 31 | except: 32 | ee.Authenticate() 33 | ee.Initialize(project=cloud_project) 34 | 35 | # Check if new collection exists 36 | try: 37 | ee.ImageCollection(new_collection).getInfo() 38 | except: 39 | print('Collection {} does not exist'.format(new_collection)) 40 | ee.data.createAsset({'type': ee.data.ASSET_TYPE_IMAGE_COLL}, new_collection) 41 | print('Created a new empty collection {}.'.format(new_collection)) 42 | 43 | 44 | assets = ee.data.listAssets({'parent': old_collection})['assets'] 45 | 46 | 47 | for asset in assets: 48 | old_name = asset['name'] 49 | new_name = old_name.replace(old_collection, new_collection) 50 | print('Copying {} to {}'.format(old_name, new_name)) 51 | ee.data.copyAsset(old_name, new_name, True) 52 | if args.delete: 53 | print('Deleting <{}>'.format(old_name)) 54 | ee.data.deleteAsset(old_name) 55 | 56 | if args.delete: 57 | print('Deleting Collection <{}>'.format(old_collection)) 58 | ee.data.deleteAsset(old_collection) -------------------------------------------------------------------------------- /ee-python/update_acl.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import ee 3 | 4 | parser = argparse.ArgumentParser() 5 | parser.add_argument('--asset_folder', help='full path to the asset folder') 6 | args = parser.parse_args() 7 | parent = args.asset_folder 8 | 9 | # Replace the cloud_project with your own project 10 | cloud_project = 'spatialthoughts' 11 | 12 | try: 13 | ee.Initialize(project=cloud_project) 14 | except: 15 | ee.Authenticate() 16 | ee.Initialize(project=cloud_project) 17 | 18 | def get_asset_list(parent): 19 | parent_asset = ee.data.getAsset(parent) 20 | parent_id = parent_asset['name'] 21 | parent_type = parent_asset['type'] 22 | asset_list = [] 23 | child_assets = ee.data.listAssets({'parent': parent_id})['assets'] 24 | for child_asset in child_assets: 25 | child_id = child_asset['name'] 26 | child_type = child_asset['type'] 27 | if child_type in ['FOLDER','IMAGE_COLLECTION']: 28 | # Recursively call the function to get child assets 29 | asset_list.extend(get_asset_list(child_id)) 30 | else: 31 | asset_list.append(child_id) 32 | return asset_list 33 | 34 | all_assets = get_asset_list(parent) 35 | 36 | print('Found {} assets'.format(len(all_assets))) 37 | 38 | # Define update operations to perform 39 | acl_update = { 40 | 'all_users_can_read': True 41 | } 42 | 43 | for asset in all_assets: 44 | print('Updating permissions for {}'.format(asset)) 45 | ee.data.setAssetAcl(asset, acl_update) -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: projects 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python=3.7 7 | - geopandas=0.7 8 | - h3-py 9 | prefix: /Users/ujaval/opt/anaconda3/envs/projects 10 | 11 | -------------------------------------------------------------------------------- /fill_nodata/README.md: -------------------------------------------------------------------------------- 1 | # Fill missing rows of data in aerial imagery 2 | -------------------------------------------------------------------------------- /fill_nodata/demo_fix_missing_data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fill Missing Rows with Data\n", 8 | "\n", 9 | "This script shows how to read an image where certain rows have missing data (i.e. 0) and fill that with the average of adjacent rows." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 110, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import rasterio\n", 19 | "import numpy as np" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 111, 25 | "metadata": {}, 26 | "outputs": [ 27 | { 28 | "data": { 29 | "text/plain": [ 30 | "12994" 31 | ] 32 | }, 33 | "execution_count": 111, 34 | "metadata": {}, 35 | "output_type": "execute_result" 36 | } 37 | ], 38 | "source": [ 39 | "filename = 'bad.tif'\n", 40 | "dataset = rasterio.open(filename)\n", 41 | "metadata = dataset.meta\n", 42 | "dataset.height" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 112, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "red = dataset.read(1)\n", 52 | "green = dataset.read(2)\n", 53 | "blue = dataset.read(3)" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "The following returns an array where each item is True/False based on the condition `red==0`" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 113, 66 | "metadata": {}, 67 | "outputs": [ 68 | { 69 | "data": { 70 | "text/plain": [ 71 | "(12994,)" 72 | ] 73 | }, 74 | "execution_count": 113, 75 | "metadata": {}, 76 | "output_type": "execute_result" 77 | } 78 | ], 79 | "source": [ 80 | "result = np.all(red == 0, axis=1)\n", 81 | "result.shape" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 114, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "def average_rows(array, index):\n", 91 | " result = np.round(np.mean( np.array([array[index[0]-1], array[index[0]+1] ]), axis=0 ))\n", 92 | " array[index] = result\n", 93 | " \n", 94 | "for index, x in np.ndenumerate(result):\n", 95 | " if (x and index[0] != 0 and index[0] != (dataset.height - 1)):\n", 96 | " average_rows(red, index)\n", 97 | " average_rows(blue, index)\n", 98 | " average_rows(green, index)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 115, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "output_filename = 'fixed.tif'\n", 108 | "metadata.update({'driver': 'GTiff'})\n", 109 | "\n", 110 | "rgb_dataset = rasterio.open(output_filename, 'w', **metadata)\n", 111 | "rgb_dataset.write(red, 1)\n", 112 | "rgb_dataset.write(green, 2)\n", 113 | "rgb_dataset.write(blue, 3)\n", 114 | "rgb_dataset.close()" 115 | ] 116 | } 117 | ], 118 | "metadata": { 119 | "kernelspec": { 120 | "display_name": "Python 3", 121 | "language": "python", 122 | "name": "python3" 123 | }, 124 | "language_info": { 125 | "codemirror_mode": { 126 | "name": "ipython", 127 | "version": 3 128 | }, 129 | "file_extension": ".py", 130 | "mimetype": "text/x-python", 131 | "name": "python", 132 | "nbconvert_exporter": "python", 133 | "pygments_lexer": "ipython3", 134 | "version": "3.8.3" 135 | } 136 | }, 137 | "nbformat": 4, 138 | "nbformat_minor": 4 139 | } 140 | -------------------------------------------------------------------------------- /fill_nodata/demo_simulate_missing_data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Simulate Rows with Missing Data\n", 8 | "\n", 9 | "This script takes a perfectly good images and sets a few random rows to 0 to simulate a bad image.\n", 10 | "\n", 11 | "Source Image: © Commission for Lands (COLA) ; Revolutionary Government of Zanzibar (RGoZ), Downloaded from [OpenAerialMap](https://map.openaerialmap.org/#/39.20338153839111,-6.167072977220436,15/user/5ac4842b26964b0010033104/5ae39f0a0b093000130aff55?_k=g4tbwo)" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 95, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import rasterio\n", 21 | "import numpy as np\n", 22 | "from numpy import random" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 96, 28 | "metadata": {}, 29 | "outputs": [ 30 | { 31 | "data": { 32 | "text/plain": [ 33 | "12994" 34 | ] 35 | }, 36 | "execution_count": 96, 37 | "metadata": {}, 38 | "output_type": "execute_result" 39 | } 40 | ], 41 | "source": [ 42 | "filename = 'original.tif'\n", 43 | "dataset = rasterio.open(filename)\n", 44 | "metadata = dataset.meta\n", 45 | "dataset.height" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 97, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "red = dataset.read(1)\n", 55 | "green = dataset.read(2)\n", 56 | "blue = dataset.read(3)" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 98, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "x = random.randint(200, size=(dataset.height))" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 99, 71 | "metadata": {}, 72 | "outputs": [ 73 | { 74 | "name": "stdout", 75 | "output_type": "stream", 76 | "text": [ 77 | "Set 70 rows to 0\n" 78 | ] 79 | } 80 | ], 81 | "source": [ 82 | "zeros = np.where(x==10)\n", 83 | "print('Set {} rows to 0'.format(zeros[0].shape[0]))" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 100, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "for x in np.nditer(zeros):\n", 93 | " red[x] = 0\n", 94 | " green[x] = 0\n", 95 | " blue[x] = 0" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 101, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "output_filename = 'bad.tif'\n", 105 | "metadata.update({'driver': 'GTiff'})\n", 106 | "\n", 107 | "rgb_dataset = rasterio.open(output_filename, 'w', **metadata)\n", 108 | "rgb_dataset.write(red, 1)\n", 109 | "rgb_dataset.write(green, 2)\n", 110 | "rgb_dataset.write(blue, 3)\n", 111 | "rgb_dataset.close()" 112 | ] 113 | } 114 | ], 115 | "metadata": { 116 | "kernelspec": { 117 | "display_name": "Python 3", 118 | "language": "python", 119 | "name": "python3" 120 | }, 121 | "language_info": { 122 | "codemirror_mode": { 123 | "name": "ipython", 124 | "version": 3 125 | }, 126 | "file_extension": ".py", 127 | "mimetype": "text/x-python", 128 | "name": "python", 129 | "nbconvert_exporter": "python", 130 | "pygments_lexer": "ipython3", 131 | "version": "3.7.7" 132 | } 133 | }, 134 | "nbformat": 4, 135 | "nbformat_minor": 4 136 | } 137 | -------------------------------------------------------------------------------- /geeup/README.md: -------------------------------------------------------------------------------- 1 | # Scripts for geeup Workflow 2 | 3 | This scripts were written for uploading some eMODIS NDVI files to GEE via the [geeup](https://github.com/samapriya/geeup) tool. 4 | 5 | The scripts assume the following directory structure. For other datasets, the scripts should be modified to match the filename pattern and directory structure. 6 | 7 | ``` 8 | Desktop 9 | │ 10 | └───eModis 11 | │ rename_files.py 12 | │ update_metadata.py 13 | └─── data 14 | | US_eMAH_NDVI_2020.287-293.1KM.VI_NDVI.006.2020300170330.tif 15 | | US_eMAH_NDVI_2020.294-300.1KM_VI_NDVI.006_2020303221006.tif 16 | | US_eMAH_NDVI_2020.294-307.1KM_VI_NDVI.006_2020311150401.tif 17 | | US_eMAH_NDVI_2020.301-307.1KM_VI_NDVI.006_2020311141002.tif 18 | ``` 19 | 20 | 1. If the filenames have a `.`, the `geeup` tool can't extract the filename correctly. So we run the `rename_files.py` script. 21 | 22 | ``` 23 | python rename_files.py 24 | ``` 25 | The new filenames now look like below 26 | 27 | ``` 28 | US_eMAH_NDVI_2020_287-293_1KM_VI_NDVI_006_2020300170330.tif 29 | US_eMAH_NDVI_2020_294-300_1KM_VI_NDVI_006_2020303221006.tif 30 | US_eMAH_NDVI_2020_294-307_1KM_VI_NDVI_006_2020311150401.tif 31 | US_eMAH_NDVI_2020_301-307_1KM_VI_NDVI_006_2020311141002.tif 32 | ``` 33 | 34 | 2. Run `geeup getmeta` to extract metadata. 35 | 36 | > Note: geeup needs full paths to the folders 37 | 38 | ``` 39 | geeup getmeta \ 40 | --input /Users/ujavalgandhi/Desktop/eModis/data/ \ 41 | --metadata /Users/ujavalgandhi/Desktop/eModis/meta.csv 42 | ``` 43 | 44 | 3. Extract image date from the filename and update the `meta.csv` file. 45 | 46 | ``` 47 | python update_metadata.py 48 | ``` 49 | 50 | 4. Generate cookies using `geeup cookie_setup`. 51 | 52 | > Note: Make sure you grab cookies from the main Code Editor application at https://code.earthengine.google.com. Cookies from subdomains such as https://code.earthengine.google.co.in/ will not work. 53 | 54 | ``` 55 | geeup cookie_setup 56 | 57 | ``` 58 | 59 | On Mac, need to switch to Bash and disble canonical mode. 60 | ``` 61 | /bin/sh 62 | stty -icanon 63 | geeup cookie_setup 64 | 65 | stty icanon 66 | /bin/zsh 67 | ``` 68 | 69 | 5. It's a good idea to create a collection for the data first. You can use the `earthengine` command-line tool. 70 | 71 | ``` 72 | earthengine create collection users/ujavalgandhi/eModis 73 | ``` 74 | 75 | 7. Upload the data to the new collection using cookies. 76 | ``` 77 | geeup upload \ 78 | --source /Users/ujavalgandhi/Desktop/eModis/data/ \ 79 | --dest users/ujavalgandhi/eModis \ 80 | -m /Users/ujavalgandhi/Desktop/eModis/meta.csv \ 81 | -u ujaval@spatialthoughts.com \ 82 | --method cookies 83 | ``` 84 | 85 | 8. Verify and test. https://code.earthengine.google.com/2dc49d0edca3ee311c2132d4a35cdf19 86 | -------------------------------------------------------------------------------- /geeup/rename_files.py: -------------------------------------------------------------------------------- 1 | # This script replaces '.' with '_' in the filenames 2 | 3 | import os 4 | 5 | data_dir = 'data' 6 | files = os.listdir(data_dir) 7 | for old_name in files: 8 | filename, extension = os.path.splitext(old_name) 9 | new_name = filename.replace('.', '_') + extension 10 | old_path = os.path.join(data_dir, old_name) 11 | new_path = os.path.join(data_dir, new_name) 12 | os.rename(old_path, new_path) 13 | print('Renamed: {} to {}'.format(old_path, new_path)) 14 | -------------------------------------------------------------------------------- /geeup/update_metadata.py: -------------------------------------------------------------------------------- 1 | # Script to extract image date from filename 2 | # and update the meta.csv file with system:time_start property 3 | import pandas as pd 4 | import re 5 | 6 | filename = 'meta.csv' 7 | df = pd.read_csv(filename) 8 | 9 | def get_date_from_name(row): 10 | name = row['id_no'] 11 | # Extract the year and doy from name 12 | # Date is a substring in YYYY_jjj format 13 | matches = re.search(r'(\d{4})_(\d{3})', name) 14 | year = int(matches.group(1)) 15 | doy = int(matches.group(2)) 16 | date = pd.to_datetime(year * 1000 + doy, format='%Y%j') 17 | # GEE expects dates in timestamp format 18 | return int(date.timestamp() * 1000) 19 | 20 | df['system:time_start'] = df.apply(get_date_from_name, axis=1) 21 | df.to_csv(filename, index=False) 22 | -------------------------------------------------------------------------------- /h3/ASAM_shp.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/h3/ASAM_shp.zip -------------------------------------------------------------------------------- /h3/gridcounts.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/h3/gridcounts.gpkg -------------------------------------------------------------------------------- /imd/README.md: -------------------------------------------------------------------------------- 1 | # Working with IMD (Indian Meteorologoical Department) Gridded Rainfall data 2 | 3 | The notebooks in this folder are for processing [IMD New High Spatial Resolution (0.25X0.25 degree) Long Period (1901-2021) Daily Gridded Rainfall Data Set](https://www.imdpune.gov.in/cmpg/Griddata/Rainfall_25_Bin.html) Over India. 4 | 5 | - [imd_to_geotiff.ipynb](https://github.com/spatialthoughts/projects/blob/master/imd/imd_to_geotiff.ipynb) uses `imdlib` to convert binary grid files to georeferenced GeoTiff files suitable to be used in a GIS. 6 | - [imd_annual_average.ipynb](https://github.com/spatialthoughts/projects/blob/master/imd/imd_annual_average.ipynb) uses `imdlib` to convert binary grid files to calculate long-term annual mean and download it as a GeoTiff file. 7 | - [download_all.py](https://github.com/spatialthoughts/projects/blob/master/imd/download_all.py) shows how to download, convert and create annual rainfall rasters from 2901-2021. 8 | -------------------------------------------------------------------------------- /imd/download_all.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Script to download GeoTIFF files of yearly rainfall 3 | from IMD Gridded Rainfall Data. 4 | ''' 5 | import os 6 | import imdlib as imd 7 | import rioxarray as xr 8 | 9 | data_folder = r'C:\Users\ujava\Downloads\imd\data' 10 | output_folder = r'C:\Users\ujava\Downloads\imd\geotiff' 11 | 12 | if not os.path.exists(data_folder): 13 | os.makedirs(data_folder) 14 | if not os.path.exists(output_folder): 15 | os.makedirs(output_folder) 16 | 17 | start_year = 1901 18 | end_year = 2021 19 | variable = 'rain' # other options are ('tmin'/ 'tmax') 20 | data = imd.get_data(variable, start_year, end_year, fn_format='yearwise', file_dir=data_folder) 21 | 22 | for year in range(start_year, end_year+1): 23 | data = imd.open_data(variable, year, year,'yearwise', data_folder) 24 | ds = data.get_xarray() 25 | ds = ds.where(ds['rain'] != -999.) 26 | total = ds.sum('time') 27 | total = total.rio.set_crs('EPSG:4326') 28 | total = total.rio.set_spatial_dims('lon', 'lat') 29 | output_file = '{}.tif'.format(year) 30 | output_path = os.path.join(output_folder, output_file) 31 | total.rio.to_raster(output_path) 32 | print('Successfully created GeoTIFF file', output_path) 33 | 34 | 35 | -------------------------------------------------------------------------------- /misc/ascii_to_csv.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "charged-milan", 6 | "metadata": {}, 7 | "source": [ 8 | "# Converting Tab Delimited ASCII file to a Vector Layer\n", 9 | "\n", 10 | "We have an ASCII Tab-Delimited text in the following format\n", 11 | "```\n", 12 | "(cross section index) (no. of points along transect)\n", 13 | "x-coordinates of transect points\n", 14 | "y-coordinates of transect points\n", 15 | "pre-flood elevation z94 of transect points\n", 16 | "post-flood elevation z96 of transect points\n", 17 | "reconstructed bedrock elevation at transect points\n", 18 | "```\n", 19 | "CRS: MTM (Modified Transverse Mercator projection) zone 7 coordinates (NAD83)\n", 20 | "\n", 21 | "We can creat a CSV with the polygon geometry stored as text in WKT format. QGIS can read this format easily and display the data." 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 19, 27 | "id": "noted-processor", 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "input = 'crossSections.txt'\n", 32 | "output = 'crossSections.csv'" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "id": "unauthorized-weight", 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "data = []\n", 43 | "with open(input, 'r') as f:\n", 44 | " # skip first line\n", 45 | " f.readline()\n", 46 | " for line in f:\n", 47 | " # Get number of verticies from the first line\n", 48 | " fid, numvertices = line.split()\n", 49 | " x_coordinates = f.readline().split()\n", 50 | " y_coordinates = f.readline().split()\n", 51 | " z94_elevation = f.readline().split()\n", 52 | " z96_elevation = f.readline().split()\n", 53 | " bedrock_elevation = f.readline().split()\n", 54 | " for x, y, z94, z96, bedrock in zip(x_coordinates, y_coordinates, z94_elevation, z96_elevation, z96_elevation):\n", 55 | " data.append({'x': x, 'y': y, 'transact_id': int(fid), 'z94': float(z94), 'z96': float(z96), 'bedrock': float(bedrock)})" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 21, 61 | "id": "unexpected-contribution", 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "import csv\n", 66 | "\n", 67 | "with open(output, 'w') as csvfile:\n", 68 | " fieldnames = ['transact_id', 'z94', 'z96', 'bedrock', 'x', 'y']\n", 69 | " writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n", 70 | " writer.writeheader()\n", 71 | " for row in data:\n", 72 | " writer.writerow(row)\n" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "about-volunteer", 78 | "metadata": {}, 79 | "source": [ 80 | "The resulting CSV can be imported using the *Add Delimited Text* tab in the QGIS Data Source Manager\n", 81 | "\n", 82 | "![](import.png)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "id": "partial-secretariat", 88 | "metadata": {}, 89 | "source": [ 90 | "The point layers loads in QGIS with the correct CRS specified.\n", 91 | "\n", 92 | "![](result.png)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "id": "classical-watts", 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [] 102 | } 103 | ], 104 | "metadata": { 105 | "kernelspec": { 106 | "display_name": "Python 3", 107 | "language": "python", 108 | "name": "python3" 109 | }, 110 | "language_info": { 111 | "codemirror_mode": { 112 | "name": "ipython", 113 | "version": 3 114 | }, 115 | "file_extension": ".py", 116 | "mimetype": "text/x-python", 117 | "name": "python", 118 | "nbconvert_exporter": "python", 119 | "pygments_lexer": "ipython3", 120 | "version": "3.9.1" 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 5 125 | } 126 | -------------------------------------------------------------------------------- /misc/buildings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/misc/buildings.png -------------------------------------------------------------------------------- /misc/csv_to_gml.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "69aa978b-1b03-4388-bdb9-f8a13d2da79e", 6 | "metadata": {}, 7 | "source": [ 8 | "We have a CSV file in the following format. For normal vertex, we tag it as GEO (for geodesic string) and for an arc segment, we are tagging it with a combination of two(2) successive ABE (arc-by-edge)\n", 9 | "\n", 10 | "```\n", 11 | "25.270806364429227 51.60333739375988 GEO\n", 12 | "25.271009225305253 51.60324801592732 GEO\n", 13 | "25.271113964392683 51.603201869139234 GEO\n", 14 | "25.2707782569834 51.60228029931405 GEO\n", 15 | "25.270673510842762 51.602326451082504 GEO\n", 16 | "25.27046820899583 51.60241690493056 ABE\n", 17 | "25.270466156352832 51.60261064947953 ABE\n", 18 | "25.27033759984866 51.602747027489016 GEO\n", 19 | "25.27042237891931 51.60297183557495 GEO\n", 20 | "25.270507464783933 51.60319640070589 ABE\n", 21 | "25.27069468321789 51.60320703990452 ABE\n", 22 | "25.270788613761002 51.60330422209724 GEO\n", 23 | "```\n", 24 | "Want to convert this to a GML file as follows\n", 25 | "```\n", 26 | "\n", 27 | "\n", 28 | "\n", 29 | "\n", 30 | "\n", 31 | "\n", 32 | "\n", 33 | "\n", 34 | "51.6033373938 25.2708063644\n", 35 | "51.6032480159 25.2710092253\n", 36 | "51.6032018691 25.2711139644\n", 37 | "51.6022802993 25.2707782570\n", 38 | "51.6023264511 25.2706735108\n", 39 | "51.6024169049 25.2704682090\n", 40 | "\n", 41 | "\n", 42 | "51.6024169049 25.2704682090\n", 43 | "51.6026106495 25.2704661564\n", 44 | "51.6027470275 25.2703375998\n", 45 | "\n", 46 | "\n", 47 | "51.6027470275 25.2703375998\n", 48 | "51.6029718356 25.2704223789\n", 49 | "51.6031964007 25.2705074648\n", 50 | "\n", 51 | "\n", 52 | "51.6031964007 25.2705074648\n", 53 | "51.6032070399 25.2706946832\n", 54 | "51.6033042221 25.2707886138\n", 55 | "\n", 56 | "\n", 57 | "51.6033042221 25.2707886138\n", 58 | "51.6033373938 25.2708063644\n", 59 | "\n", 60 | "\n", 61 | "\n", 62 | "\n", 63 | "\n", 64 | "\n", 65 | "\n", 66 | "\n", 67 | "```" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 91, 73 | "id": "d2a4f07e-8156-4d69-840a-0b29dfbefd6a", 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "import csv\n", 78 | "import xml.etree.cElementTree as ET\n", 79 | "import xml.dom.minidom\n", 80 | "import itertools" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 92, 86 | "id": "4490ac20-4483-4260-b8cf-b744b96a8016", 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "GML = 'http://www.opengis.net/gml'\n", 91 | "ET.register_namespace('gml', GML) \n", 92 | "\n", 93 | "root = ET.Element('gml')\n", 94 | "patches = ET.SubElement(root, ET.QName(GML, 'patches'))\n", 95 | "polypatches = ET.SubElement(patches, ET.QName(GML, 'PolygonPatch'))\n", 96 | "exterior = ET.SubElement(polypatches, ET.QName(GML, 'exterior'))\n", 97 | "ring = ET.SubElement(exterior, ET.QName(GML, 'Ring'))\n", 98 | "curvemember = ET.SubElement(ring, ET.QName(GML, 'curveMember'))\n", 99 | "curve = ET.SubElement(curvemember, ET.QName(GML, 'curve'), {'gml:id':'gmlID389873'})" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 93, 105 | "id": "a8c81e63-96d2-481e-9d91-b1f20c3d07bd", 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "with open('data.csv', 'r') as f:\n", 110 | " lines = f.readlines()\n", 111 | "\n", 112 | "processed = [line.strip().split() for line in lines]" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 94, 118 | "id": "178a9bd9-f42c-48c4-aa45-37522220f171", 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "# Group all lines where the vertex type is the same\n", 123 | "iterator = itertools.groupby(processed, lambda item: item[2])\n", 124 | "\n", 125 | "# iterating over the result\n", 126 | "# element and its group\n", 127 | "for element, group in iterator:\n", 128 | " if element == 'GEO':\n", 129 | " geodesicstring = ET.SubElement(curve, ET.QName(GML, 'GeodesicString'))\n", 130 | " for vertex in list(group):\n", 131 | " value = '{},{}'.format(vertex[1], vertex[0])\n", 132 | " ET.SubElement(geodesicstring, ET.QName(GML, 'pos')).text = value\n", 133 | " if element == 'ABE':\n", 134 | " arcstring = ET.SubElement(curve, ET.QName(GML, 'ArcString'))\n", 135 | " ET.SubElement(arcstring, ET.QName(GML, 'pos')).text = last_geo_value\n", 136 | " for vertex in list(group):\n", 137 | " value = '{},{}'.format(vertex[1], vertex[0])\n", 138 | " ET.SubElement(arcstring, ET.QName(GML, 'pos')).text = value\n" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 95, 144 | "id": "62920c5d-630c-465d-85df-d130cd2cc24e", 145 | "metadata": {}, 146 | "outputs": [ 147 | { 148 | "name": "stdout", 149 | "output_type": "stream", 150 | "text": [ 151 | "\n", 152 | "\n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " 51.60333739375988,25.270806364429227\n", 161 | " 51.60324801592732,25.271009225305253\n", 162 | " 51.603201869139234,25.271113964392683\n", 163 | " 51.60228029931405,25.2707782569834\n", 164 | " 51.602326451082504,25.270673510842762\n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " 51.60241690493056,25.27046820899583\n", 169 | " 51.60261064947953,25.270466156352832\n", 170 | " \n", 171 | " \n", 172 | " 51.602747027489016,25.27033759984866\n", 173 | " 51.60297183557495,25.27042237891931\n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " 51.60319640070589,25.270507464783933\n", 178 | " 51.60320703990452,25.27069468321789\n", 179 | " \n", 180 | " \n", 181 | " 51.60330422209724,25.270788613761002\n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | "\n", 190 | "\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "dom = xml.dom.minidom.parseString(ET.tostring(root))\n", 196 | "xml_string = dom.toprettyxml(indent=' ')\n", 197 | "print(xml_string)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 96, 203 | "id": "698134dd-5a32-4adb-abed-76240ca7973a", 204 | "metadata": {}, 205 | "outputs": [], 206 | "source": [ 207 | "with open('output.gml', 'w') as f:\n", 208 | " f.write(xml_string)\n" 209 | ] 210 | } 211 | ], 212 | "metadata": { 213 | "kernelspec": { 214 | "display_name": "Python 3", 215 | "language": "python", 216 | "name": "python3" 217 | }, 218 | "language_info": { 219 | "codemirror_mode": { 220 | "name": "ipython", 221 | "version": 3 222 | }, 223 | "file_extension": ".py", 224 | "mimetype": "text/x-python", 225 | "name": "python", 226 | "nbconvert_exporter": "python", 227 | "pygments_lexer": "ipython3", 228 | "version": "3.7.11" 229 | } 230 | }, 231 | "nbformat": 4, 232 | "nbformat_minor": 5 233 | } 234 | -------------------------------------------------------------------------------- /misc/dat_to_csv.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "million-wallpaper", 6 | "metadata": {}, 7 | "source": [ 8 | "# Converting Surfer Atlas .BNA (ASCII DAT) file to a Vector Layer\n", 9 | "\n", 10 | "We have an ASCII file from Surfer in the [BNA format](http://surferhelp.goldensoftware.com/subsys/subsys_gsibna_hid_gsibna_filedesc.htm) defining every building ground plan as a polygon by listing its vertices. Hence the entry for a given building is the building number followed by the number of vertices of its boundary and the rooftop elevation (assumed flat), followed by a list of the (X,Y) coordinates of each one of the vertices of its boundary. As an example the entry for building number 186 is given below:\n", 11 | "The sequence means: Building number 105 is a polygon with 6 vertices and its rooftop elevation is 54.69 m (MSL). The (X,Y) co-ordinates of the given 6 vertices follow in the four next lines.\n", 12 | "\n", 13 | "```\n", 14 | " 105 6 54.69\n", 15 | " 1651.562500 4787.500000\n", 16 | " 1652.125000 4785.000000\n", 17 | " 1649.062500 4787.000000\n", 18 | " 1650.750000 4789.500000\n", 19 | " 1653.812500 4787.500000\n", 20 | " 1652.125000 4785.000000\n", 21 | " 106 6 58.98\n", 22 | " 1555.875000 4755.500000\n", 23 | " 1558.000000 4753.000000\n", 24 | " 1553.187500 4753.500000\n", 25 | " 1553.687500 4757.500000\n", 26 | " 1558.500000 4757.000000\n", 27 | " 1558.000000 4753.000000\n", 28 | " 107 8 62.32\n", 29 | " 1537.062500 4741.500000\n", 30 | " 1532.062500 4737.000000\n", 31 | " 1532.062500 4744.500000\n", 32 | " 1539.625000 4744.500000\n", 33 | " 1539.437500 4742.000000\n", 34 | " 1542.062500 4742.000000\n", 35 | " 1541.875000 4737.000000\n", 36 | " 1532.062500 4737.000000\n", 37 | "```\n", 38 | "\n", 39 | "We can creat a CSV with the polygon geometry stored as text in WKT format. QGIS can read this format easily and display the data." 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 43, 45 | "id": "organizational-knock", 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "input = 'Buildings.dat'\n", 50 | "output = 'Buildings.csv'" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 41, 56 | "id": "veterinary-chancellor", 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "data = []\n", 61 | "with open(input, 'r') as f:\n", 62 | " for line in f:\n", 63 | " # Get number of verticies from the first line\n", 64 | " fid, numvertices, elev = line.split()\n", 65 | " coordinates = []\n", 66 | " # Skip ahead number of lines equal to number of vertices and save the coordinates\n", 67 | " for x in range(int(numvertices)):\n", 68 | " x, y = f.readline().split()\n", 69 | " coordinates.append(('{} {}'.format(x,y)))\n", 70 | " # Discard first coordinate which is the centroid\n", 71 | " wkt = 'POLYGON (({}))'.format(','.join(coordinates[1:]))\n", 72 | " data.append({'fid': int(fid), 'elev': float(elev), 'wkt': wkt})" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 42, 78 | "id": "collective-launch", 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "import csv\n", 83 | "\n", 84 | "with open(output, 'w') as csvfile:\n", 85 | " fieldnames = ['fid', 'elev', 'wkt']\n", 86 | " writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n", 87 | " writer.writeheader()\n", 88 | " for row in data:\n", 89 | " writer.writerow(row)\n" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "id": "stock-doctrine", 95 | "metadata": {}, 96 | "source": [ 97 | "The result is a CSV file that looks like this\n", 98 | "\n", 99 | "```\n", 100 | "fid,elev,wkt\n", 101 | "2,127.69,\"POLYGON ((627.187500 7781.000000,626.125000 7785.000000,629.062500 7786.000000,630.125000 7782.000000,627.187500 7781.000000))\"\n", 102 | "3,164.42,\"POLYGON ((824.125000 7675.500000,822.687500 7679.000000,826.000000 7680.500000,827.437500 7677.000000,824.125000 7675.500000))\"\n", 103 | "4,171.19,\"POLYGON ((840.125000 7640.500000,836.812500 7652.000000,842.937500 7654.000000,846.250000 7642.500000,840.125000 7640.500000))\"\n", 104 | "```" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "id": "exceptional-washington", 110 | "metadata": {}, 111 | "source": [ 112 | "The resulting CSV can be imported using the *Delimited Text* tab in the QGIS Data Source Manager using **WKT** field as *Geometry field*\n", 113 | "\n", 114 | "![](import_wkt.png)" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "id": "steady-peoples", 120 | "metadata": {}, 121 | "source": [ 122 | "The point layers loads in QGIS. Since the data also has an `elev` attribute, we can style it using the **2.5D** renderer in QGIS.\n", 123 | "![](buildings.png)" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "id": "settled-proposition", 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [] 133 | } 134 | ], 135 | "metadata": { 136 | "kernelspec": { 137 | "display_name": "Python 3", 138 | "language": "python", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.9.1" 152 | } 153 | }, 154 | "nbformat": 4, 155 | "nbformat_minor": 5 156 | } 157 | -------------------------------------------------------------------------------- /misc/data.csv: -------------------------------------------------------------------------------- 1 | 25.270806364429227 51.60333739375988 GEO 2 | 25.271009225305253 51.60324801592732 GEO 3 | 25.271113964392683 51.603201869139234 GEO 4 | 25.2707782569834 51.60228029931405 GEO 5 | 25.270673510842762 51.602326451082504 GEO 6 | 25.27046820899583 51.60241690493056 ABE 7 | 25.270466156352832 51.60261064947953 ABE 8 | 25.27033759984866 51.602747027489016 GEO 9 | 25.27042237891931 51.60297183557495 GEO 10 | 25.270507464783933 51.60319640070589 ABE 11 | 25.27069468321789 51.60320703990452 ABE 12 | 25.270788613761002 51.60330422209724 GEO 13 | -------------------------------------------------------------------------------- /misc/import.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/misc/import.png -------------------------------------------------------------------------------- /misc/import_wkt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/misc/import_wkt.png -------------------------------------------------------------------------------- /misc/result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/misc/result.png -------------------------------------------------------------------------------- /misc/unpivot.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Unpivot GIS Data\n", 8 | "\n", 9 | "GIS analysis and visualizaiton typically needs data with 1 value per row. If your data is structured in columns, you need to 'unpivot' it to convert it to 1 value per row. Note that this will result in duplicate features, but they can be handled well in GIS.\n", 10 | "\n", 11 | "Example Excel workbook is structured like this\n", 12 | "\n", 13 | "| id | species1 | species2 | .. | value 1 | value 2 | ... |\n", 14 | "| -- | -------- | -------- | -- | ------- | ------- | --- |\n", 15 | "| 1 | s1 | s2 | .. | 10 | 20 | ... |\n", 16 | "\n", 17 | "This script will convert it to a table like follows\n", 18 | "\n", 19 | "\n", 20 | "| id | species | value |\n", 21 | "| -- | ------- | ----- |\n", 22 | "| 1 | s1 | 10 |\n", 23 | "| 1 | s2 | 20 |" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import pandas as pd" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "input = 'original.xlsx'\n", 42 | "df = pd.read_excel(input)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "df1 = pd.melt(df, id_vars=['id', 'species1'], value_vars=['value1']).rename(columns = {'value1':'value'})\n", 52 | "df1 = pd.melt(df, id_vars=['id', 'species2'], value_vars=['value2']).rename(columns = {'value2':'value'})\n", 53 | "merged = pd.concat([df1, df2])" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "# Drop rows which have null values " 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "merged = merged.dropna(axis=0, how='any')" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "output = 'unpivoted.xlsx'\n", 81 | "merged.to_excel(output, index=False)" 82 | ] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.9.1" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 4 106 | } 107 | -------------------------------------------------------------------------------- /pyqgis/README.md: -------------------------------------------------------------------------------- 1 | ## PyQGIS Scripts 2 | 3 | This folder contains a collection of useful PyQGIS Scripts 4 | 5 | ### Processing Scripts 6 | 7 | - [`attributeiterator.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/attributeiterator.py): Processing script that takes a vector layer and iterates through its attributes to create attribute indices. 8 | - [`filter_layer.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/filter_layer.py): Processing Script to Apply a Filter to a Vector Layer 9 | - [`rastercalculator.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/rastercalculator.py): Processing Script to demonstrate syntax for Raster Calculator 10 | - [`copyraster.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/copy_raster.py): Processing Script to demonstrate how to copy a raster layer. 11 | 12 | ### Python Console Scripts 13 | 14 | - [`ee_qgis.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/rastercalculator.py): Script demonstrating use of Google Earth Engine Plugin with a QGIS layer. 15 | - [`rename_layers.py`](https://github.com/spatialthoughts/projects/blob/master/pyqgis/rename_layers.py): Script to Rename Layers after using the Iterate feature in Processing Toolbox 16 | -------------------------------------------------------------------------------- /pyqgis/actions/buffer_select_action.py: -------------------------------------------------------------------------------- 1 | line_layer = QgsProject.instance().mapLayer('[% @layer_id %]') 2 | polygon_layer_name = 'buildings' 3 | distance = 20 4 | fid = [% $id %] 5 | line_feature = line_layer.getFeature(fid) 6 | line_geometry = line_feature.geometry().buffer(distance, 5) 7 | polygon_layer = QgsProject.instance().mapLayersByName(polygon_layer_name)[0] 8 | nearby_features = [p.id() for p in polygon_layer.getFeatures() 9 | if p.geometry().intersects(line_geometry) ] 10 | polygon_layer.selectByIds(nearby_features) -------------------------------------------------------------------------------- /pyqgis/actions/hello_world.py: -------------------------------------------------------------------------------- 1 | from qgis.utils import iface 2 | 3 | fid = [% $id %] 4 | layer = QgsProject.instance().mapLayer('[% @layer_id %]') 5 | new_layer = layer.materialize(QgsFeatureRequest().setFilterFids([fid])) 6 | new_layer.setName('[%NAME%]') 7 | QgsProject.instance().addMapLayer(new_layer) 8 | iface.setActiveLayer(layer) 9 | -------------------------------------------------------------------------------- /pyqgis/actions/mapillary_action.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from qgis.PyQt.QtCore import QUrl 3 | from qgis.PyQt.QtWebKitWidgets import QWebView 4 | from qgis.utils import iface 5 | 6 | # https://www.mapillary.com/developer/api-documentation#image 7 | 8 | parameters = { 9 | 'access_token': '', 10 | 'bbox': '{},{},{},{}'.format([%$x%]-0.001,[%$y%]-0.001, [%$x%]+0.001, [%$y%]+0.001), 11 | 'fields': 'thumb_1024_url', 12 | 'limit': 1 13 | } 14 | 15 | response = requests.get( 16 | 'https://graph.mapillary.com/images', params=parameters) 17 | if response.status_code == 200: 18 | data_json = response.json() 19 | if data_json['data']: 20 | url = data_json['data'][0]['thumb_1024_url'] 21 | myWV = QWebView(None) 22 | myWV.load(QUrl(url)) 23 | myWV.show() 24 | else: 25 | qgis.utils.iface.messageBar().pushMessage('No images found') 26 | -------------------------------------------------------------------------------- /pyqgis/actions/point_select_action.py: -------------------------------------------------------------------------------- 1 | line_layer = QgsProject.instance().mapLayer('[% @layer_id %]') 2 | point_layer_name = 'points' 3 | distance = 10000 4 | fid = [% $id %] 5 | line_feature = line_layer.getFeature(fid) 6 | line_geometry = line_feature.geometry().buffer(distance, 5) 7 | point_layer = QgsProject.instance().mapLayersByName(point_layer_name)[0] 8 | nearby_points = [p.id() for p in point_layer.getFeatures() 9 | if p.geometry().intersects(line_geometry) ] 10 | point_layer.selectByIds(nearby_points) 11 | -------------------------------------------------------------------------------- /pyqgis/actions/reverse_geocode_street.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyQGIS Code to be used in a QGIS Python Action for 3 | Reverse Geocoding a point layer using a street network 4 | 5 | This action to be defined on the point layer 6 | """ 7 | import math 8 | from qgis.utils import iface 9 | from qgis.core import QgsSpatialIndex, QgsVectorLayer 10 | from PyQt5.QtCore import QVariant 11 | 12 | street_layer = 'Street' 13 | street_layer_name_attr = 'NAME' 14 | left_from_field = 'L_F_ADD' 15 | left_to_field = 'L_T_ADD' 16 | right_from_field = 'R_F_ADD' 17 | right_to_field = 'R_T_ADD' 18 | address_attribute_name = 'ADDRESS' 19 | 20 | # Point layer to reverse geocode 21 | point_layer = QgsProject.instance().mapLayer('[% @layer_id %]') 22 | fid = [% $id %] 23 | feature = point_layer.getFeature(fid) 24 | geometry = feature.geometry() 25 | 26 | # Street layer 27 | street_layer = QgsProject.instance().mapLayersByName(street_layer)[0] 28 | 29 | # Use Spatial Index to find nearest features 30 | index = QgsSpatialIndex(street_layer.getFeatures()) 31 | # Get 5 nearest features based on the spatial index 32 | nearestids = index.nearestNeighbor(geometry, 5) 33 | nearest_distance = 9999 34 | # Iterate over candidate features and find the nearest one 35 | for f in street_layer.getFeatures(QgsFeatureRequest(nearestids)): 36 | distance = geometry.distance(f.geometry()) 37 | if distance < nearest_distance: 38 | nearest_feature = f 39 | nearest_distance = distance 40 | 41 | # Now we have the nearest line 42 | # Determine which side of the street is the address point 43 | point = geometry.asPoint() 44 | point_in_line = nearest_feature.geometry().closestSegmentWithContext(point)[1] 45 | 46 | out = nearest_feature.geometry().closestSegmentWithContext(point) 47 | # out is a tuple. Last item indicates relative direction of the point 48 | direction = out[-1] 49 | 50 | if direction < 0: 51 | side = 'Left' 52 | else: 53 | side = 'Right' 54 | print('Address is on the {}'.format(side)) 55 | 56 | # Locate the nearest point on the line segment 57 | street_name = nearest_feature[street_layer_name_attr] 58 | nearest_geometry = nearest_feature.geometry() 59 | nearest_point = nearest_geometry.nearestPoint(geometry) 60 | 61 | # Measure how far along is the nearest point from start of geometry 62 | distance = nearest_geometry.lineLocatePoint(nearest_point) 63 | length = nearest_geometry.length() 64 | 65 | print('Point is at {:.0%} from start'.format(distance/length)) 66 | attributes = nearest_feature.attributes() 67 | 68 | # Find min and max range of addresses 69 | if side == 'Left': 70 | from_add = int(nearest_feature[left_from_field]) 71 | to_add = int(nearest_feature[left_to_field]) 72 | else: 73 | from_add = int(nearest_feature[right_from_field]) 74 | to_add = int(nearest_feature[right_to_field]) 75 | print('from', from_add, 'to', to_add) 76 | 77 | # Interpolate the address and round up to nearest integer 78 | interpolated = from_add + ((to_add - from_add)*distance)/length 79 | 80 | rounded = math.ceil(interpolated) 81 | 82 | if side == 'Left' and rounded % 2 == 0: 83 | streetnum = rounded + 1 84 | elif side == 'Left' and rounded % 2 !=0: 85 | streetnum = rounded 86 | elif side == 'Right' and rounded % 2 == 0: 87 | streetnum = rounded 88 | elif side == 'Right' and rounded % 2 != 0: 89 | streetnum = rounded + 1 90 | 91 | address = '{:.0f}, {}'.format(streetnum, street_name) 92 | side = '{} side of street'.format(side) 93 | message = '{}, ({})'.format(address, side) 94 | iface.messageBar().pushMessage(message) 95 | 96 | # Create a point layer showing the nearest point whose address we determined 97 | vlayer = QgsVectorLayer('Point?crs=EPSG:2274', 'point', 'memory') 98 | provider = vlayer.dataProvider() 99 | provider.addAttributes([QgsField(address_attribute_name, QVariant.String)]) 100 | vlayer.updateFields() 101 | 102 | f = QgsFeature() 103 | f.setGeometry(nearest_point) 104 | f.setAttributes([address]) 105 | provider = vlayer.dataProvider() 106 | provider.addFeature(f) 107 | vlayer.updateExtents() 108 | QgsProject.instance().addMapLayer(vlayer) 109 | iface.setActiveLayer(point_layer) 110 | -------------------------------------------------------------------------------- /pyqgis/actions/tileindex_load.py: -------------------------------------------------------------------------------- 1 | from qgis.utils import iface 2 | 3 | path = r'[%location%]' 4 | iface.addRasterLayer(path) 5 | 6 | index_layer_name = 'index' 7 | index_layer = QgsProject.instance().mapLayersByName(index_layer_name)[0] 8 | iface.setActiveLayer(index_layer) 9 | -------------------------------------------------------------------------------- /pyqgis/actions/tileindex_remove.py: -------------------------------------------------------------------------------- 1 | from qgis.utils import iface 2 | from PyQt5.QtCore import QFileInfo 3 | 4 | path = r'[%location%]' 5 | layer_name = QFileInfo(path).baseName() 6 | layer_list = QgsProject.instance().mapLayersByName(layer_name) 7 | if layer_list: 8 | QgsProject.instance().removeMapLayer(layer_list[0]) 9 | iface.mapCanvas().refresh() 10 | -------------------------------------------------------------------------------- /pyqgis/actions/update_field.py: -------------------------------------------------------------------------------- 1 | from qgis.utils import iface 2 | 3 | layer_id = '[%@layer_id%]' 4 | qa_field_name = 'checked' 5 | layer = QgsProject().instance().mapLayer(layer_id) 6 | field = layer.fields().lookupField(qa_field_name) 7 | 8 | with edit(layer): 9 | layer.changeAttributeValue([%$id%], field, 'Y') 10 | iface.messageBar().pushInfo('Success', 'Field Value Updated') -------------------------------------------------------------------------------- /pyqgis/attributeiterator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | *************************************************************************** 5 | * * 6 | * This program is free software; you can redistribute it and/or modify * 7 | * it under the terms of the GNU General Public License as published by * 8 | * the Free Software Foundation; either version 2 of the License, or * 9 | * (at your option) any later version. * 10 | * * 11 | *************************************************************************** 12 | """ 13 | 14 | from qgis.PyQt.QtCore import QCoreApplication 15 | from qgis.core import (QgsProcessing, 16 | QgsFeatureSink, 17 | QgsProcessingException, 18 | QgsProcessingAlgorithm, 19 | QgsProcessingParameterFeatureSource, 20 | QgsProcessingOutputVectorLayer) 21 | from qgis import processing 22 | 23 | 24 | class AttributeIterator(QgsProcessingAlgorithm): 25 | """ 26 | This algorithm takes a vector layer and iterates through its attributes to 27 | create attribute indices. 28 | """ 29 | 30 | INPUT = 'INPUT' 31 | OUTPUT = 'OUTPUT' 32 | 33 | def tr(self, string): 34 | 35 | return QCoreApplication.translate('Processing', string) 36 | 37 | def createInstance(self): 38 | return AttributeIterator() 39 | 40 | def name(self): 41 | return 'attributeiterator' 42 | 43 | def displayName(self): 44 | return self.tr('Attribute Iterator') 45 | 46 | def group(self): 47 | return self.tr('') 48 | 49 | def groupId(self): 50 | return '' 51 | 52 | def shortHelpString(self): 53 | return self.tr("Create Attribute Indices on All Attributes of the Layer") 54 | 55 | def initAlgorithm(self, config=None): 56 | self.addParameter( 57 | QgsProcessingParameterFeatureSource( 58 | self.INPUT, 59 | self.tr('Input layer'), 60 | [QgsProcessing.TypeVectorAnyGeometry] 61 | ) 62 | ) 63 | 64 | self.addOutput( 65 | QgsProcessingOutputVectorLayer( 66 | self.OUTPUT, 67 | self.tr('Attribute Indexed layer') 68 | ) 69 | ) 70 | 71 | def processAlgorithm(self, parameters, context, feedback): 72 | source = self.parameterAsVectorLayer( 73 | parameters, 74 | self.INPUT, 75 | context 76 | ) 77 | 78 | if source is None: 79 | raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) 80 | 81 | fields = source.fields() 82 | for field in fields.names(): 83 | feedback.pushInfo('Indexing field {}'.format(field)) 84 | params = {'INPUT': source,'FIELD': field} 85 | processing.run("native:createattributeindex", params) 86 | 87 | return {self.OUTPUT: source.id()} 88 | -------------------------------------------------------------------------------- /pyqgis/copy_raster.py: -------------------------------------------------------------------------------- 1 | """Processing Script to demonstrate how to copy a raster layer. 2 | 3 | A use case of this is detailed at https://gis.stackexchange.com/questions/416616/feed-an-existing-raster-to-qgis-raster-destination-parameter-in-qgis-processing 4 | """ 5 | 6 | from qgis.core import (QgsProcessing, 7 | QgsRasterLayer, 8 | QgsProcessingAlgorithm, 9 | QgsProcessingParameterRasterLayer, 10 | QgsProcessingParameterRasterDestination) 11 | from qgis import processing 12 | 13 | 14 | class CopyRasterAlgorithm(QgsProcessingAlgorithm): 15 | INPUT_RASTER = 'INPUT_RASTER' 16 | OUTPUT = 'OUTPUT' 17 | 18 | def createInstance(self): 19 | return CopyRasterAlgorithm() 20 | 21 | def name(self): 22 | return 'Copy Raster' 23 | 24 | def displayName(self): 25 | return 'copyraster' 26 | 27 | def initAlgorithm(self, config=None): 28 | 29 | self.addParameter( 30 | QgsProcessingParameterRasterLayer( 31 | self.INPUT_RASTER, 32 | 'Input raster layer', 33 | defaultValue=None 34 | ) 35 | ) 36 | 37 | self.addParameter( 38 | QgsProcessingParameterRasterDestination( 39 | self.OUTPUT, 40 | 'OUTPUT' 41 | ) 42 | ) 43 | 44 | def processAlgorithm(self, parameters, context, feedback): 45 | inputraster = self.parameterAsRasterLayer(parameters, self.INPUT_RASTER, context) 46 | 47 | params = { 48 | 'INPUT':inputraster, 49 | 'OUTPUT': parameters['OUTPUT'] 50 | } 51 | result = processing.run("gdal:translate", params, context=context) 52 | 53 | return {self.OUTPUT: result['OUTPUT']} -------------------------------------------------------------------------------- /pyqgis/ee_qgis.py: -------------------------------------------------------------------------------- 1 | # imports and constants 2 | import ee 3 | from ee_plugin import Map 4 | import json 5 | 6 | ee.Initialize() 7 | collection = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA').filterDate('2019-01-01', '2019-12-31'); 8 | composite = collection.median(); 9 | 10 | composite_ndvi = composite.normalizedDifference(['B5','B4']); 11 | 12 | palette = [ 13 | 'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718', 14 | '74A901', '66A000', '529400', '3E8601', '207401', '056201', 15 | '004C00', '023B01', '012E01', '011D01', '011301'] 16 | 17 | # Replace the Layer Name 'layer' with the actual layer name 18 | layer = QgsProject.instance().mapLayersByName('layer')[0] 19 | geometry = json.loads(layer.getFeatures().__next__().geometry().asJson()) 20 | polygon = ee.Geometry.MultiPolygon(geometry['coordinates']) 21 | Map.addLayer(composite.clip(polygon), {'bands': ['B4', 'B3', 'B2'], 'min':0, 'max': 0.4, 'gamma': 1.2}, 'Image') 22 | 23 | Map.addLayer(composite_ndvi.clip(polygon), {'min': 0, 'max': 1, 'palette': palette}, 'Clipped NDVI'); 24 | -------------------------------------------------------------------------------- /pyqgis/filter_layer.py: -------------------------------------------------------------------------------- 1 | """Processing Script to Apply a Filter to a Vector Layer 2 | 3 | Example of how to write a processing scirpt that doesn't 4 | return a new layer but applies a filter/selection on 5 | input. 6 | """ 7 | 8 | from qgis.core import (QgsProcessing, 9 | QgsProcessingAlgorithm, 10 | QgsProcessingParameterEnum, 11 | QgsProcessingParameterVectorLayer) 12 | from qgis import processing 13 | 14 | 15 | class FilterLayerAlgorithm(QgsProcessingAlgorithm): 16 | INPUT = 'INPUT' 17 | OUTPUT = 'OUTPUT' 18 | COUNTRY = 'COUNTRY' 19 | COUNTRIES = ['United States of America', 'CANADA', 'MEXICO'] 20 | 21 | def createInstance(self): 22 | return FilterLayerAlgorithm() 23 | 24 | def name(self): 25 | return 'filterlayer' 26 | 27 | def displayName(self): 28 | return 'Filter Layer' 29 | 30 | def initAlgorithm(self, config=None): 31 | self.addParameter( 32 | QgsProcessingParameterVectorLayer( 33 | self.INPUT, 34 | 'Input layer', 35 | types=[QgsProcessing.TypeVector] 36 | ) 37 | ) 38 | 39 | self.addParameter( 40 | QgsProcessingParameterEnum(self.COUNTRY, 41 | 'Select a Country', 42 | self.COUNTRIES, 43 | defaultValue='United States of America' 44 | ) 45 | ) 46 | 47 | 48 | def processAlgorithm(self, parameters, context, feedback): 49 | input = self.parameterAsVectorLayer(parameters, self.INPUT, context) 50 | country = self.COUNTRIES[self.parameterAsEnum(parameters, self.COUNTRY, context)] 51 | 52 | expression = '\"NAME\" = \'{}\''.format(country) 53 | input.setSubsetString(expression) 54 | return {self.OUTPUT: parameters[self.INPUT]} 55 | 56 | -------------------------------------------------------------------------------- /pyqgis/gee_annual_precipitation.py: -------------------------------------------------------------------------------- 1 | """Algorithm to calculate Zonal Statistics using GEE API 2 | 3 | This script uses CHIRPS data. 4 | """ 5 | import ee 6 | ee.Initialize() 7 | 8 | import json 9 | from PyQt5.QtCore import QCoreApplication, QVariant 10 | 11 | from qgis.core import (QgsProcessing, QgsProcessingAlgorithm, 12 | QgsProcessingParameterFeatureSource, QgsProcessingParameterNumber, 13 | QgsProcessingParameterFeatureSink,QgsFields, QgsField, QgsWkbTypes, 14 | QgsFeatureSink, QgsProcessingUtils) 15 | 16 | 17 | class AnnualPrecipitation(QgsProcessingAlgorithm): 18 | """Calculates annual rainfall using GEE API for each input features""" 19 | INPUT = 'INPUT' 20 | YEAR = 'YEAR' 21 | OUTPUT = 'OUTPUT' 22 | 23 | 24 | def initAlgorithm(self, config=None): 25 | self.addParameter( 26 | QgsProcessingParameterFeatureSource( 27 | 'INPUT', 28 | self.tr('Input Layer'), 29 | types=[QgsProcessing.TypeVectorPolygon] 30 | ) 31 | ) 32 | 33 | self.addParameter( 34 | QgsProcessingParameterNumber( 35 | 'YEAR', 36 | self.tr('Year'), 37 | QgsProcessingParameterNumber.Integer, 38 | 2021, False, 1 39 | ) 40 | ) 41 | 42 | 43 | self.addParameter( 44 | QgsProcessingParameterFeatureSink( 45 | self.OUTPUT, 46 | 'Annual_Precipitation', 47 | QgsProcessing.TypeVectorAnyGeometry 48 | ) 49 | ) 50 | 51 | def processAlgorithm(self, parameters, context, feedback): 52 | source= self.parameterAsSource(parameters, self.INPUT, context) 53 | year = self.parameterAsInt(parameters, self.YEAR, context) 54 | 55 | outputFields = source.fields() 56 | newFields = QgsFields() 57 | newFields.append(QgsField('year', QVariant.Int)) 58 | newFields.append(QgsField('precipitation', QVariant.Int)) 59 | 60 | outputFields = QgsProcessingUtils.combineFields(outputFields, newFields) 61 | sink, dest_id = self.parameterAsSink( 62 | parameters, 63 | self.OUTPUT, 64 | context, 65 | outputFields, 66 | source.wkbType(), 67 | source.sourceCrs() 68 | ) 69 | feedback.pushInfo(self.tr( "Processing Input")) 70 | 71 | 72 | chirps = ee.ImageCollection('UCSB-CHG/CHIRPS/PENTAD') 73 | startDate = ee.Date.fromYMD(year, 1, 1) 74 | endDate = startDate.advance(1, 'year') 75 | filtered = chirps.filter(ee.Filter.date(startDate, endDate)) 76 | total_precipitation = filtered.sum() 77 | 78 | #features = [f for f in source.getFeatures()] 79 | 80 | total_features = 100.0 / source.featureCount() if source.featureCount() else 0 81 | 82 | for current, out_f in enumerate(source.getFeatures()): 83 | # Stop the algorithm if cancel button has been clicked 84 | if feedback.isCanceled(): 85 | break 86 | 87 | geometry = out_f.geometry() 88 | json_geometry = json.loads(geometry.asJson())['coordinates'] 89 | polygon = ee.Geometry.MultiPolygon(json_geometry) 90 | stats = total_precipitation.reduceRegion(**{ 91 | 'reducer': ee.Reducer.mean(), 92 | 'geometry': polygon, 93 | 'scale': 5000, 94 | }) 95 | precipitation = stats.getNumber('precipitation').getInfo() 96 | feedback.pushInfo(str(precipitation)) 97 | attributes = out_f.attributes() 98 | 99 | attributes.append(year) 100 | attributes.append(precipitation) 101 | 102 | out_f.setAttributes(attributes) 103 | sink.addFeature(out_f, QgsFeatureSink.FastInsert) 104 | feedback.setProgress(int(current * total_features)) 105 | 106 | return {self.OUTPUT: sink} 107 | 108 | def name(self): 109 | return 'annual_precipitation_gee' 110 | 111 | def displayName(self): 112 | return self.tr('Annual Precipitation GEE') 113 | 114 | def shortHelpString(self): 115 | return self.tr('Annual Precipitation Calculated using CHIRPS data via GEE API') 116 | 117 | def group(self): 118 | return self.tr(self.groupId()) 119 | 120 | def groupId(self): 121 | return '' 122 | 123 | def tr(self, string): 124 | return QCoreApplication.translate('Processing', string) 125 | 126 | def createInstance(self): 127 | return AnnualPrecipitation() 128 | -------------------------------------------------------------------------------- /pyqgis/rastercalculator.py: -------------------------------------------------------------------------------- 1 | """Processing Script to demonstrate syntax for Raster Calculator 2 | 3 | This script just returns the input raster as output 4 | """ 5 | 6 | from qgis.core import (QgsProcessing, 7 | QgsProcessingAlgorithm, 8 | QgsProcessingParameterRasterLayer, 9 | QgsProcessingParameterRasterDestination) 10 | from qgis import processing 11 | 12 | 13 | class RasterCalcProcessingAlgorithm(QgsProcessingAlgorithm): 14 | INPUT = 'INPUT' 15 | OUTPUT = 'OUTPUT' 16 | 17 | def createInstance(self): 18 | return RasterCalcProcessingAlgorithm() 19 | 20 | def name(self): 21 | return 'rastercalctest' 22 | 23 | def displayName(self): 24 | return 'rastercalctest' 25 | 26 | def initAlgorithm(self, config=None): 27 | self.addParameter( 28 | QgsProcessingParameterRasterLayer( 29 | self.INPUT, 'INPUT', defaultValue=None 30 | 31 | ) 32 | ) 33 | 34 | self.addParameter( 35 | QgsProcessingParameterRasterDestination( 36 | self.OUTPUT, 'OUTPUT' 37 | ) 38 | ) 39 | 40 | def processAlgorithm(self, parameters, context, feedback): 41 | input = self.parameterAsRasterLayer(parameters, self.INPUT, context) 42 | output = self.parameterAsFileOutput(parameters, self.OUTPUT, context) 43 | params = { 44 | 'CELLSIZE': 0, 45 | 'CRS': None, 46 | 'EXPRESSION': '\"{}@1\"'.format(input.name()), 47 | 'EXTENT': None, 48 | 'LAYERS': [input], 49 | 'OUTPUT': parameters['OUTPUT'], 50 | } 51 | result = processing.run('qgis:rastercalculator', params, context=context) 52 | return {self.OUTPUT: result['OUTPUT']} 53 | 54 | -------------------------------------------------------------------------------- /pyqgis/rename_layers.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyQGIS Script to Rename Layers after using Iterate feature 3 | in Processing Toolbox 4 | 5 | As of QGIS 3.16, the iterate feature doesn't allow naming 6 | the resulting layers based on a specific attribute. The 7 | resulting layers are named output_0, output_1, ... 8 | 9 | This script renames the resulting layers based on an attribute 10 | from the feature that was used in iteration. 11 | 12 | Here we clipped a roads layer using a districts polygon layer, 13 | with the districts layer in 'Iterate' mode. If the district 14 | has N features, we will get N resulting layers. This script will 15 | rename each layer based on the field value of the district layer. 16 | 17 | You can run the script from the Python Console Editor in QGIS. 18 | """ 19 | # Prefix of the output layers generated by Iterating 20 | prefix = 'clipped' 21 | # Name of the layer which was in Iterate mode 22 | parent_layer_name = 'karnataka_districts' 23 | # Attribute from the layer to be used to fetch new layer names 24 | parent_layer_attr = 'DISTRICT' 25 | 26 | parent_layer = QgsProject.instance().mapLayersByName(parent_layer_name)[0] 27 | 28 | for f in parent_layer.getFeatures(): 29 | # Layer IDs start from 1, but layer names start from 0 30 | layer_id = f.id()-1 31 | layer_name = '{}_{}'.format(prefix, layer_id) 32 | layer = QgsProject.instance().mapLayersByName(layer_name)[0] 33 | new_name = f['DISTRICT'] 34 | layer.setName(new_name) -------------------------------------------------------------------------------- /python/.gitignore: -------------------------------------------------------------------------------- 1 | data/ 2 | -------------------------------------------------------------------------------- /python/README.md: -------------------------------------------------------------------------------- 1 | ## Geospatial Python Scripts and Notebook 2 | 3 | This folder contains Scripts and Jupyter notebooks that various geospatial data processing examples. 4 | 5 | ### Data Processing and Conversions 6 | - [`ascii_to_csv.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/ascii_to_csv.ipynb): Convert an ASCII Tab-Delimited file to a CSV with WKT geometries 7 | - [`csv_to_gml.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/csv_to_gml.ipynb): Convert a CSV file to an Aeronautical Information Exchange Model (AIXM) compliant GML 8 | - [`dat_to_csv.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/dat_to_csv.ipynb): Converting Surfer Atlas .BNA (ASCII DAT) file to a Vector Layer 9 | 10 | - [`raster_from_array.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/raster_from_array.ipynb): Creating a test geospatial image using a NumPy array. 11 | 12 | ### Pandas 13 | 14 | - [`unpivot.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/unpivot.ipynb): Convert column-wise data into separate rows 15 | - [`ghcn.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/ghcn.ipynb): Convert GHCN FWF files to XLSX 16 | 17 | ### GeoPandas 18 | 19 | - [`reprojection_and_coordinate_transform.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/reprojection_and_coordinate_transform.ipynb): Code for reprojecting layers and coordinate transformation using OGR as well as GeoPandas. 20 | - [`complex_kml_to_gpkg.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/complex_kml_to_gpkg.ipynb): Converting a large KML file to a GeoPackage using GeoPandas. 21 | - [`line_interpolation.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/line_interpolation.ipynb): Creating regularly spaced points along lines using GeoPandas. 22 | 23 | ### Xarray 24 | - [`xarray_netcdf.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/xarray_netcdf.ipynb): Working with CRU NetCDF files using xarray and rioxarray. 25 | - [`xarray_wrf.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/xarray_wrf.ipynb): Converting WRF NetCDF files using xarray and rioxarray to GeoTIFF. 26 | 27 | ### Visualization 28 | 29 | - [`maritime_piracy.ipynb`](https://github.com/spatialthoughts/projects/blob/master/python/maritime_piracy.ipynb): Exploring Anti-shipping Activity Messages data. 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /python/articles/article1.txt: -------------------------------------------------------------------------------- 1 | Title: 2 | 2 Persons Trampled To Death By Elephants In 2 Days In Odisha’s Dhenkanal 3 | 4 | Description: 5 | Dhenkanal: Human casualty due to elephant attack continued in Odisha’s Dhenkanal district as a man was trampled to death by a herd on Saturday. 6 | According to sources, the incident tool place when the victim, Khirod Samal of Neulapoi village under Sadangi forest range, had gone to collect cashew nuts from a nearby orchard in the morning. He came face to face with 3 elephants who had separated from a herd and were creating a rampage in the area. 7 | 8 | Though Khirod tried to escape from the place, the elephants caught hold of him and trampled him to death. It took place hardly 100 metre from the panchayat office in the area. 9 | 10 | On being informed, forester Madhusita Pati from Joronda went to the spot along with a team of Forest officials. She sent the body for post-mortem and advised the villagers not to venture into the forest till the Forest officials send the elephants back. 11 | 12 | In a similar incident on Friday, one person was killed in elephant attack in the district. The deceased was identified as Lakshmidhar Sahu of Bali Kiari village under Angat Jarda Panchayat in Hindol forest range. He was attacked by the elephant in the morning when he had gone to the village pond. -------------------------------------------------------------------------------- /python/articles/article2.txt: -------------------------------------------------------------------------------- 1 | Title: 2 | 3-Year-Old Among 3 Of Family Trampled To Death By Elephants In Jharkhand 3 | 4 | Description: 5 | Latehar: Three members of a family including a three-year-old girl were trampled to death by a herd of elephants in Jharkhand's Latehar district, police said today. 6 | The incident happened at around 1.30 am on Friday when a 30-year-old labourer Fanu Bhuinyan was asleep with his 26-year-old wife Babita Devi and three-year-old daughter in a makeshift hut near a brick kiln unit at Malhan Panchyata, around 80-km from capital Ranchi, the police said. 7 | 8 | "A herd of elephants appeared in the brick kiln area post midnight and trampled to death all three members of the family. The jumbos also wreaked havoc in the area. Other labourers, who were working in the brick kiln, managed to escape," said Chandwa police station inspector Amit Kumar. 9 | 10 | He said that all the three bodies were brought to the police station today morning and they were sent to Latehar hospital for postmortem. -------------------------------------------------------------------------------- /python/articles/article3.txt: -------------------------------------------------------------------------------- 1 | Title: 2 | Wild elephant Karuppan to be captured again and translocated 3 | 4 | Description: 5 | With the wild elephant Karuppan trampling to death a 48-year-old daily wage worker at Perumugai in the T.N. Palayam block on Thursday, the Forest Department is gearing up to capture it again and translocate the animal. 6 | 7 | The decision comes after the elephant, which was captured in the Talavadi Hill on April 17 and translocated to a forest area at Thattakarai in the Bargur Hills, had walked over 50 km in search of food and reached Varapallam and Perumugai on Thursday. The elephant was found in the Bhavani river and later it took shelter at a sugarcane field when it trampled to death P. Sitheeswaran. Officials confirmed that the elephant involved in the attack was ‘Karuppan’. 8 | 9 | On Friday, two kumkis, Bomman and Srinivasan, arrived from the Theppakadu Elephant Camp in the Mudumalai Tiger Reserve (MTR) in the Sanjeevirayan Kovil area at Athani, near the Anthiyur forest. 10 | 11 | The elephant that crossed Sathyamangalam Road at Kallipatti and entered a forest area in the Anthiyur Forest Range was found near a waterbody in the Vadakombai area. Over 100 staff members of the ranges of Anthiyur, T.N. Palayam, Talavadi and Jerahalli continue to monitor the movement of Karuppan while veterinarians are studying the terrain to tranquilise the elephant. 12 | 13 | K. Rajkumar, Conservator of Forests and Field Director, Sathyamangalam Tiger Reserve (STR), told The Hindu that the operation would be carried out during day time and permission had been sought for translocating the elephant. A team of rangers, three veterinarians, elephant trackers and staff members are expected to begin the operation on Saturday. 14 | 15 | S. Kannaiyan, president, Talavadi Farmers’ Association, said the elephant that was earlier released into the interior forest area at Thattakarai on the Tamil Nadu-Karnataka border in the Erode Forest Division had reached the Anthiyur forest area now. “After capturing Karuppan, it should be radio-collared and the elephant should be released into a forest area outside the district,” he stressed. -------------------------------------------------------------------------------- /python/ascii_to_csv.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "charged-milan", 6 | "metadata": {}, 7 | "source": [ 8 | "# Converting Tab Delimited ASCII file to a Vector Layer\n", 9 | "\n", 10 | "We have an ASCII Tab-Delimited text in the following format\n", 11 | "```\n", 12 | "(cross section index) (no. of points along transect)\n", 13 | "x-coordinates of transect points\n", 14 | "y-coordinates of transect points\n", 15 | "pre-flood elevation z94 of transect points\n", 16 | "post-flood elevation z96 of transect points\n", 17 | "reconstructed bedrock elevation at transect points\n", 18 | "```\n", 19 | "CRS: MTM (Modified Transverse Mercator projection) zone 7 coordinates (NAD83)\n", 20 | "\n", 21 | "We can creat a CSV with the polygon geometry stored as text in WKT format. QGIS can read this format easily and display the data." 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 19, 27 | "id": "noted-processor", 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "input = 'crossSections.txt'\n", 32 | "output = 'crossSections.csv'" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "id": "unauthorized-weight", 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "data = []\n", 43 | "with open(input, 'r') as f:\n", 44 | " # skip first line\n", 45 | " f.readline()\n", 46 | " for line in f:\n", 47 | " # Get number of verticies from the first line\n", 48 | " fid, numvertices = line.split()\n", 49 | " x_coordinates = f.readline().split()\n", 50 | " y_coordinates = f.readline().split()\n", 51 | " z94_elevation = f.readline().split()\n", 52 | " z96_elevation = f.readline().split()\n", 53 | " bedrock_elevation = f.readline().split()\n", 54 | " for x, y, z94, z96, bedrock in zip(x_coordinates, y_coordinates, z94_elevation, z96_elevation, z96_elevation):\n", 55 | " data.append({'x': x, 'y': y, 'transact_id': int(fid), 'z94': float(z94), 'z96': float(z96), 'bedrock': float(bedrock)})" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 21, 61 | "id": "unexpected-contribution", 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "import csv\n", 66 | "\n", 67 | "with open(output, 'w') as csvfile:\n", 68 | " fieldnames = ['transact_id', 'z94', 'z96', 'bedrock', 'x', 'y']\n", 69 | " writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n", 70 | " writer.writeheader()\n", 71 | " for row in data:\n", 72 | " writer.writerow(row)\n" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "about-volunteer", 78 | "metadata": {}, 79 | "source": [ 80 | "The resulting CSV can be imported using the *Add Delimited Text* tab in the QGIS Data Source Manager\n", 81 | "\n", 82 | "![](import.png)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "id": "partial-secretariat", 88 | "metadata": {}, 89 | "source": [ 90 | "The point layers loads in QGIS with the correct CRS specified.\n", 91 | "\n", 92 | "![](result.png)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "id": "classical-watts", 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [] 102 | } 103 | ], 104 | "metadata": { 105 | "kernelspec": { 106 | "display_name": "Python 3", 107 | "language": "python", 108 | "name": "python3" 109 | }, 110 | "language_info": { 111 | "codemirror_mode": { 112 | "name": "ipython", 113 | "version": 3 114 | }, 115 | "file_extension": ".py", 116 | "mimetype": "text/x-python", 117 | "name": "python", 118 | "nbconvert_exporter": "python", 119 | "pygments_lexer": "ipython3", 120 | "version": "3.9.1" 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 5 125 | } 126 | -------------------------------------------------------------------------------- /python/band_combinations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [], 7 | "authorship_tag": "ABX9TyPqT708edwHPVfOCTumR6TM" 8 | }, 9 | "kernelspec": { 10 | "name": "python3", 11 | "display_name": "Python 3" 12 | }, 13 | "language_info": { 14 | "name": "python" 15 | } 16 | }, 17 | "cells": [ 18 | { 19 | "cell_type": "markdown", 20 | "source": [ 21 | "## Generating Band Combinations\n", 22 | "\n", 23 | "We have B total bands and want to generate all possible combinations of length N from the list.\n", 24 | "\n", 25 | "Our list consist of 17 GLCM bands and 3 bands from PCA of the original bands." 26 | ], 27 | "metadata": { 28 | "id": "SDTFHbuduWu2" 29 | } 30 | }, 31 | { 32 | "cell_type": "code", 33 | "source": [ 34 | "bands_list = ['b1_asm', 'b1_contrast', 'b1_corr', 'b1_var',\n", 35 | " 'b1_idm', 'b1_savg', 'b1_svar', 'b1_sent', 'b1_ent',\n", 36 | " 'b1_dvar', 'b1_dent', 'b1_imcorr1', 'b1_imcorr2',\n", 37 | " 'b1_diss', 'b1_inertia', 'b1_shade', 'b1_prom', \n", 38 | " 'pc1','pc2','pc3']\n", 39 | "len(bands_list)" 40 | ], 41 | "metadata": { 42 | "colab": { 43 | "base_uri": "https://localhost:8080/" 44 | }, 45 | "id": "hGFLi73yu9lD", 46 | "outputId": "b2ae6c19-6dc9-4e9f-deef-9f4feacf4122" 47 | }, 48 | "execution_count": 15, 49 | "outputs": [ 50 | { 51 | "output_type": "execute_result", 52 | "data": { 53 | "text/plain": [ 54 | "20" 55 | ] 56 | }, 57 | "metadata": {}, 58 | "execution_count": 15 59 | } 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "source": [ 65 | "import itertools\n", 66 | "\n", 67 | "def get_combinations(items, n):\n", 68 | " return list(itertools.combinations(items, n))\n", 69 | "\n", 70 | "combinations = get_combinations(bands_list, 4)\n", 71 | "len(combinations)\n" 72 | ], 73 | "metadata": { 74 | "colab": { 75 | "base_uri": "https://localhost:8080/" 76 | }, 77 | "id": "lI7k7n6WvKhY", 78 | "outputId": "96cee53c-f215-4106-d809-a58e34bd4ab9" 79 | }, 80 | "execution_count": 16, 81 | "outputs": [ 82 | { 83 | "output_type": "execute_result", 84 | "data": { 85 | "text/plain": [ 86 | "4845" 87 | ] 88 | }, 89 | "metadata": {}, 90 | "execution_count": 16 91 | } 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "source": [ 97 | "We also some fixed bands that must be present. Add them to each combination." 98 | ], 99 | "metadata": { 100 | "id": "SGQaMfDrvqzi" 101 | } 102 | }, 103 | { 104 | "cell_type": "code", 105 | "source": [ 106 | "fixed_bands = ['orto', 'dem', 'slope', 'curvature', 'terraces']\n", 107 | "band_combinations = [fixed_bands + list(x) for x in combinations]" 108 | ], 109 | "metadata": { 110 | "id": "8h_arZn7vcar" 111 | }, 112 | "execution_count": 17, 113 | "outputs": [] 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "source": [ 118 | "Format the result as JSON." 119 | ], 120 | "metadata": { 121 | "id": "Z8iOUe01wTRc" 122 | } 123 | }, 124 | { 125 | "cell_type": "code", 126 | "source": [ 127 | "import json\n", 128 | "output = json.dumps(band_combinations)" 129 | ], 130 | "metadata": { 131 | "id": "YPIf9qfIv_oH" 132 | }, 133 | "execution_count": 19, 134 | "outputs": [] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "source": [ 139 | "Write the results to a file." 140 | ], 141 | "metadata": { 142 | "id": "i5mkhOY3zHlR" 143 | } 144 | }, 145 | { 146 | "cell_type": "code", 147 | "source": [ 148 | "filename = 'combinations.js'\n", 149 | "with open(filename, 'w') as f:\n", 150 | " f.write(output)" 151 | ], 152 | "metadata": { 153 | "id": "5QPF-iS9wnXs" 154 | }, 155 | "execution_count": 20, 156 | "outputs": [] 157 | } 158 | ] 159 | } -------------------------------------------------------------------------------- /python/buildings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/buildings.png -------------------------------------------------------------------------------- /python/complex_kml_to_gpkg.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "b7ae9826-cfa4-4eef-9e91-91e9d42e2466", 6 | "metadata": {}, 7 | "source": [ 8 | "## Converting KML with Mixed Geometries to a GeoPackage\n", 9 | "\n", 10 | "We have a 2GB+ kml file containing complex polygons and point. The source file is simply too large for OGR or QGIS to parse as it needs to parse the entire file and runs out of memory. GeoPandas is able to successfully read it, filter it into constituent geometries and write out a GeoPackage" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 5, 16 | "id": "69c1f731-2955-4358-b5fb-b11ba12f52c5", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import geopandas as gpd" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 6, 26 | "id": "fe0bf702-0f58-4d2e-ba8c-e629b6c78d4e", 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "df = gpd.read_file('out.kml', driver='KML')" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "id": "67dee4f8-ad39-4c1c-902b-572af73316df", 36 | "metadata": {}, 37 | "source": [ 38 | "`df` is a `GeometryCollection` where each feature is a combination of a polygon and a point. Most GIS formats cannot work with these mixed geometries, we run `explode()` to separate them out." 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": 8, 44 | "id": "24dcacdf-4624-4c5a-b8c8-1de528657b13", 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "gdf = df.explode()" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "id": "4928587c-89ec-477c-804d-fdd89e74b835", 54 | "metadata": {}, 55 | "source": [ 56 | "The resulting GeoDataFrame has rows with both Points and Polygon geometry. Filter them and save the appropriate geometries into separate GeoDataFrames" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 14, 62 | "id": "bfd6589e-cfa7-4368-903f-b22e6251dfa1", 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "polygons = gdf[gdf.geometry.type == 'Polygon']\n", 67 | "points = gdf[gdf.geometry.type == 'Point']" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "id": "21655c02-6ae0-46e7-9dd1-12175ab8a576", 73 | "metadata": {}, 74 | "source": [ 75 | "Save the resulting GeoDataFrames to a single geopackage with multiple layers." 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "id": "e583b06d-78f3-425c-94d2-eabdc0f4f215", 82 | "metadata": {}, 83 | "outputs": [], 84 | "source": [ 85 | "polygons.to_file(driver='GPKG', filename='cleaned.gpkg', layer='polygons', encoding='utf-8')\n", 86 | "points.to_file(driver='GPKG', filename='cleaned.gpkg', layer='points', encoding='utf-8')" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "id": "ceefc266-1ba8-4b8d-823f-89d30f1c7941", 92 | "metadata": {}, 93 | "source": [ 94 | "Success! Now that the data is in a binary format, QGIS can easy read and render the layers.\n", 95 | "![](kml.png)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "id": "e4918419-3881-4d5e-957a-e93399dc8d6d", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [] 105 | } 106 | ], 107 | "metadata": { 108 | "kernelspec": { 109 | "display_name": "Python 3", 110 | "language": "python", 111 | "name": "python3" 112 | }, 113 | "language_info": { 114 | "codemirror_mode": { 115 | "name": "ipython", 116 | "version": 3 117 | }, 118 | "file_extension": ".py", 119 | "mimetype": "text/x-python", 120 | "name": "python", 121 | "nbconvert_exporter": "python", 122 | "pygments_lexer": "ipython3", 123 | "version": "3.9.6" 124 | } 125 | }, 126 | "nbformat": 4, 127 | "nbformat_minor": 5 128 | } 129 | -------------------------------------------------------------------------------- /python/csv_to_gml.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "69aa978b-1b03-4388-bdb9-f8a13d2da79e", 6 | "metadata": {}, 7 | "source": [ 8 | "We have a CSV file in the following format. For normal vertex, we tag it as GEO (for geodesic string) and for an arc segment, we are tagging it with a combination of two(2) successive ABE (arc-by-edge)\n", 9 | "\n", 10 | "```\n", 11 | "25.270806364429227 51.60333739375988 GEO\n", 12 | "25.271009225305253 51.60324801592732 GEO\n", 13 | "25.271113964392683 51.603201869139234 GEO\n", 14 | "25.2707782569834 51.60228029931405 GEO\n", 15 | "25.270673510842762 51.602326451082504 GEO\n", 16 | "25.27046820899583 51.60241690493056 ABE\n", 17 | "25.270466156352832 51.60261064947953 ABE\n", 18 | "25.27033759984866 51.602747027489016 GEO\n", 19 | "25.27042237891931 51.60297183557495 GEO\n", 20 | "25.270507464783933 51.60319640070589 ABE\n", 21 | "25.27069468321789 51.60320703990452 ABE\n", 22 | "25.270788613761002 51.60330422209724 GEO\n", 23 | "```\n", 24 | "Want to convert this to a GML file as follows\n", 25 | "```\n", 26 | "\n", 27 | "\n", 28 | "\n", 29 | "\n", 30 | "\n", 31 | "\n", 32 | "\n", 33 | "\n", 34 | "51.6033373938 25.2708063644\n", 35 | "51.6032480159 25.2710092253\n", 36 | "51.6032018691 25.2711139644\n", 37 | "51.6022802993 25.2707782570\n", 38 | "51.6023264511 25.2706735108\n", 39 | "51.6024169049 25.2704682090\n", 40 | "\n", 41 | "\n", 42 | "51.6024169049 25.2704682090\n", 43 | "51.6026106495 25.2704661564\n", 44 | "51.6027470275 25.2703375998\n", 45 | "\n", 46 | "\n", 47 | "51.6027470275 25.2703375998\n", 48 | "51.6029718356 25.2704223789\n", 49 | "51.6031964007 25.2705074648\n", 50 | "\n", 51 | "\n", 52 | "51.6031964007 25.2705074648\n", 53 | "51.6032070399 25.2706946832\n", 54 | "51.6033042221 25.2707886138\n", 55 | "\n", 56 | "\n", 57 | "51.6033042221 25.2707886138\n", 58 | "51.6033373938 25.2708063644\n", 59 | "\n", 60 | "\n", 61 | "\n", 62 | "\n", 63 | "\n", 64 | "\n", 65 | "\n", 66 | "\n", 67 | "```" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 91, 73 | "id": "d2a4f07e-8156-4d69-840a-0b29dfbefd6a", 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "import csv\n", 78 | "import xml.etree.cElementTree as ET\n", 79 | "import xml.dom.minidom\n", 80 | "import itertools" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 92, 86 | "id": "4490ac20-4483-4260-b8cf-b744b96a8016", 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "GML = 'http://www.opengis.net/gml'\n", 91 | "ET.register_namespace('gml', GML) \n", 92 | "\n", 93 | "root = ET.Element('gml')\n", 94 | "patches = ET.SubElement(root, ET.QName(GML, 'patches'))\n", 95 | "polypatches = ET.SubElement(patches, ET.QName(GML, 'PolygonPatch'))\n", 96 | "exterior = ET.SubElement(polypatches, ET.QName(GML, 'exterior'))\n", 97 | "ring = ET.SubElement(exterior, ET.QName(GML, 'Ring'))\n", 98 | "curvemember = ET.SubElement(ring, ET.QName(GML, 'curveMember'))\n", 99 | "curve = ET.SubElement(curvemember, ET.QName(GML, 'curve'), {'gml:id':'gmlID389873'})" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 93, 105 | "id": "a8c81e63-96d2-481e-9d91-b1f20c3d07bd", 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "with open('data.csv', 'r') as f:\n", 110 | " lines = f.readlines()\n", 111 | "\n", 112 | "processed = [line.strip().split() for line in lines]" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 94, 118 | "id": "178a9bd9-f42c-48c4-aa45-37522220f171", 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "# Group all lines where the vertex type is the same\n", 123 | "iterator = itertools.groupby(processed, lambda item: item[2])\n", 124 | "\n", 125 | "# iterating over the result\n", 126 | "# element and its group\n", 127 | "for element, group in iterator:\n", 128 | " if element == 'GEO':\n", 129 | " geodesicstring = ET.SubElement(curve, ET.QName(GML, 'GeodesicString'))\n", 130 | " for vertex in list(group):\n", 131 | " value = '{},{}'.format(vertex[1], vertex[0])\n", 132 | " ET.SubElement(geodesicstring, ET.QName(GML, 'pos')).text = value\n", 133 | " if element == 'ABE':\n", 134 | " arcstring = ET.SubElement(curve, ET.QName(GML, 'ArcString'))\n", 135 | " ET.SubElement(arcstring, ET.QName(GML, 'pos')).text = last_geo_value\n", 136 | " for vertex in list(group):\n", 137 | " value = '{},{}'.format(vertex[1], vertex[0])\n", 138 | " ET.SubElement(arcstring, ET.QName(GML, 'pos')).text = value\n" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 95, 144 | "id": "62920c5d-630c-465d-85df-d130cd2cc24e", 145 | "metadata": {}, 146 | "outputs": [ 147 | { 148 | "name": "stdout", 149 | "output_type": "stream", 150 | "text": [ 151 | "\n", 152 | "\n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " 51.60333739375988,25.270806364429227\n", 161 | " 51.60324801592732,25.271009225305253\n", 162 | " 51.603201869139234,25.271113964392683\n", 163 | " 51.60228029931405,25.2707782569834\n", 164 | " 51.602326451082504,25.270673510842762\n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " 51.60241690493056,25.27046820899583\n", 169 | " 51.60261064947953,25.270466156352832\n", 170 | " \n", 171 | " \n", 172 | " 51.602747027489016,25.27033759984866\n", 173 | " 51.60297183557495,25.27042237891931\n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " 51.60319640070589,25.270507464783933\n", 178 | " 51.60320703990452,25.27069468321789\n", 179 | " \n", 180 | " \n", 181 | " 51.60330422209724,25.270788613761002\n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | "\n", 190 | "\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "dom = xml.dom.minidom.parseString(ET.tostring(root))\n", 196 | "xml_string = dom.toprettyxml(indent=' ')\n", 197 | "print(xml_string)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 96, 203 | "id": "698134dd-5a32-4adb-abed-76240ca7973a", 204 | "metadata": {}, 205 | "outputs": [], 206 | "source": [ 207 | "with open('output.gml', 'w') as f:\n", 208 | " f.write(xml_string)\n" 209 | ] 210 | } 211 | ], 212 | "metadata": { 213 | "kernelspec": { 214 | "display_name": "Python 3", 215 | "language": "python", 216 | "name": "python3" 217 | }, 218 | "language_info": { 219 | "codemirror_mode": { 220 | "name": "ipython", 221 | "version": 3 222 | }, 223 | "file_extension": ".py", 224 | "mimetype": "text/x-python", 225 | "name": "python", 226 | "nbconvert_exporter": "python", 227 | "pygments_lexer": "ipython3", 228 | "version": "3.7.11" 229 | } 230 | }, 231 | "nbformat": 4, 232 | "nbformat_minor": 5 233 | } 234 | -------------------------------------------------------------------------------- /python/dat_to_csv.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "million-wallpaper", 6 | "metadata": {}, 7 | "source": [ 8 | "# Converting Surfer Atlas .BNA (ASCII DAT) file to a Vector Layer\n", 9 | "\n", 10 | "We have an ASCII file from Surfer in the [BNA format](http://surferhelp.goldensoftware.com/subsys/subsys_gsibna_hid_gsibna_filedesc.htm) defining every building ground plan as a polygon by listing its vertices. Hence the entry for a given building is the building number followed by the number of vertices of its boundary and the rooftop elevation (assumed flat), followed by a list of the (X,Y) coordinates of each one of the vertices of its boundary. As an example the entry for building number 186 is given below:\n", 11 | "The sequence means: Building number 105 is a polygon with 6 vertices and its rooftop elevation is 54.69 m (MSL). The (X,Y) co-ordinates of the given 6 vertices follow in the four next lines.\n", 12 | "\n", 13 | "```\n", 14 | " 105 6 54.69\n", 15 | " 1651.562500 4787.500000\n", 16 | " 1652.125000 4785.000000\n", 17 | " 1649.062500 4787.000000\n", 18 | " 1650.750000 4789.500000\n", 19 | " 1653.812500 4787.500000\n", 20 | " 1652.125000 4785.000000\n", 21 | " 106 6 58.98\n", 22 | " 1555.875000 4755.500000\n", 23 | " 1558.000000 4753.000000\n", 24 | " 1553.187500 4753.500000\n", 25 | " 1553.687500 4757.500000\n", 26 | " 1558.500000 4757.000000\n", 27 | " 1558.000000 4753.000000\n", 28 | " 107 8 62.32\n", 29 | " 1537.062500 4741.500000\n", 30 | " 1532.062500 4737.000000\n", 31 | " 1532.062500 4744.500000\n", 32 | " 1539.625000 4744.500000\n", 33 | " 1539.437500 4742.000000\n", 34 | " 1542.062500 4742.000000\n", 35 | " 1541.875000 4737.000000\n", 36 | " 1532.062500 4737.000000\n", 37 | "```\n", 38 | "\n", 39 | "We can creat a CSV with the polygon geometry stored as text in WKT format. QGIS can read this format easily and display the data." 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 43, 45 | "id": "organizational-knock", 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "input = 'Buildings.dat'\n", 50 | "output = 'Buildings.csv'" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 41, 56 | "id": "veterinary-chancellor", 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "data = []\n", 61 | "with open(input, 'r') as f:\n", 62 | " for line in f:\n", 63 | " # Get number of verticies from the first line\n", 64 | " fid, numvertices, elev = line.split()\n", 65 | " coordinates = []\n", 66 | " # Skip ahead number of lines equal to number of vertices and save the coordinates\n", 67 | " for x in range(int(numvertices)):\n", 68 | " x, y = f.readline().split()\n", 69 | " coordinates.append(('{} {}'.format(x,y)))\n", 70 | " # Discard first coordinate which is the centroid\n", 71 | " wkt = 'POLYGON (({}))'.format(','.join(coordinates[1:]))\n", 72 | " data.append({'fid': int(fid), 'elev': float(elev), 'wkt': wkt})" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 42, 78 | "id": "collective-launch", 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "import csv\n", 83 | "\n", 84 | "with open(output, 'w') as csvfile:\n", 85 | " fieldnames = ['fid', 'elev', 'wkt']\n", 86 | " writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n", 87 | " writer.writeheader()\n", 88 | " for row in data:\n", 89 | " writer.writerow(row)\n" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "id": "stock-doctrine", 95 | "metadata": {}, 96 | "source": [ 97 | "The result is a CSV file that looks like this\n", 98 | "\n", 99 | "```\n", 100 | "fid,elev,wkt\n", 101 | "2,127.69,\"POLYGON ((627.187500 7781.000000,626.125000 7785.000000,629.062500 7786.000000,630.125000 7782.000000,627.187500 7781.000000))\"\n", 102 | "3,164.42,\"POLYGON ((824.125000 7675.500000,822.687500 7679.000000,826.000000 7680.500000,827.437500 7677.000000,824.125000 7675.500000))\"\n", 103 | "4,171.19,\"POLYGON ((840.125000 7640.500000,836.812500 7652.000000,842.937500 7654.000000,846.250000 7642.500000,840.125000 7640.500000))\"\n", 104 | "```" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "id": "exceptional-washington", 110 | "metadata": {}, 111 | "source": [ 112 | "The resulting CSV can be imported using the *Delimited Text* tab in the QGIS Data Source Manager using **WKT** field as *Geometry field*\n", 113 | "\n", 114 | "![](import_wkt.png)" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "id": "steady-peoples", 120 | "metadata": {}, 121 | "source": [ 122 | "The point layers loads in QGIS. Since the data also has an `elev` attribute, we can style it using the **2.5D** renderer in QGIS.\n", 123 | "![](buildings.png)" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "id": "settled-proposition", 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [] 133 | } 134 | ], 135 | "metadata": { 136 | "kernelspec": { 137 | "display_name": "Python 3", 138 | "language": "python", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.9.1" 152 | } 153 | }, 154 | "nbformat": 4, 155 | "nbformat_minor": 5 156 | } 157 | -------------------------------------------------------------------------------- /python/data.csv: -------------------------------------------------------------------------------- 1 | 25.270806364429227 51.60333739375988 GEO 2 | 25.271009225305253 51.60324801592732 GEO 3 | 25.271113964392683 51.603201869139234 GEO 4 | 25.2707782569834 51.60228029931405 GEO 5 | 25.270673510842762 51.602326451082504 GEO 6 | 25.27046820899583 51.60241690493056 ABE 7 | 25.270466156352832 51.60261064947953 ABE 8 | 25.27033759984866 51.602747027489016 GEO 9 | 25.27042237891931 51.60297183557495 GEO 10 | 25.270507464783933 51.60319640070589 ABE 11 | 25.27069468321789 51.60320703990452 ABE 12 | 25.270788613761002 51.60330422209724 GEO 13 | -------------------------------------------------------------------------------- /python/douglas_peucker.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/douglas_peucker.gif -------------------------------------------------------------------------------- /python/douglas_peucker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/douglas_peucker.png -------------------------------------------------------------------------------- /python/import.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/import.png -------------------------------------------------------------------------------- /python/import_wkt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/import_wkt.png -------------------------------------------------------------------------------- /python/kml.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/kml.png -------------------------------------------------------------------------------- /python/line.dbf: -------------------------------------------------------------------------------- 1 | zA FIDN 0 1 -------------------------------------------------------------------------------- /python/line.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/line.png -------------------------------------------------------------------------------- /python/line.prj: -------------------------------------------------------------------------------- 1 | PROJCS["British_National_Grid",GEOGCS["GCS_OSGB_1936",DATUM["D_OSGB_1936",SPHEROID["Airy_1830",6377563.396,299.3249646]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000.0],PARAMETER["False_Northing",-100000.0],PARAMETER["Central_Meridian",-2.0],PARAMETER["Scale_Factor",0.9996012717],PARAMETER["Latitude_Of_Origin",49.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /python/line.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/line.shp -------------------------------------------------------------------------------- /python/line.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/line.shx -------------------------------------------------------------------------------- /python/line_interpolation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "447fd058-f6d3-44c4-9afa-a0708ddb4163", 6 | "metadata": {}, 7 | "source": [ 8 | "# Interpolating Points Along Lines\n", 9 | "\n", 10 | "This notebook shows how to take a line layer and generate equally spaced points along each line feature using GeoPandas.\n", 11 | "\n", 12 | "We use `shapely` library for performing the interpolation. \n", 13 | "\n", 14 | "![](line_interpolation.png)" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 114, 20 | "id": "11e026cb-c67a-4630-91e8-443bd16b39e2", 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "import geopandas as gpd\n", 25 | "from shapely.geometry import MultiPoint" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "id": "2a33f4dd-bda7-41e1-bd20-dcf40bb7e30f", 31 | "metadata": {}, 32 | "source": [ 33 | "We read a sample line layer containing 2 line features." 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 115, 39 | "id": "8915069d-8663-4a60-817a-19579e6ea047", 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "gdf = gpd.read_file('line.shp')" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 116, 49 | "id": "73d35fb0-fe94-42c4-90d3-6dfb26fdd2da", 50 | "metadata": {}, 51 | "outputs": [ 52 | { 53 | "data": { 54 | "text/html": [ 55 | "
\n", 56 | "\n", 69 | "\n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | "
FIDgeometry
00LINESTRING (503423.417 492226.902, 545830.203 ...
11LINESTRING (508389.969 484415.756, 522917.195 ...
\n", 90 | "
" 91 | ], 92 | "text/plain": [ 93 | " FID geometry\n", 94 | "0 0 LINESTRING (503423.417 492226.902, 545830.203 ...\n", 95 | "1 1 LINESTRING (508389.969 484415.756, 522917.195 ..." 96 | ] 97 | }, 98 | "execution_count": 116, 99 | "metadata": {}, 100 | "output_type": "execute_result" 101 | } 102 | ], 103 | "source": [ 104 | "gdf" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "id": "1e090d40-3398-46fe-9a98-73915fccb240", 110 | "metadata": {}, 111 | "source": [ 112 | "Check and verify that the CRS is a projected CRS. The layer must be in a Projected CRS and the distance units must be in the same unit of the CRS. This code will **NOT** work on layers which are in EPSG:4326 CRS. Re-project your data to a projected CRS before using." 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 117, 118 | "id": "7b5b49cf-7dbe-413a-87d4-ae3e69ef362a", 119 | "metadata": {}, 120 | "outputs": [ 121 | { 122 | "data": { 123 | "text/plain": [ 124 | "\n", 125 | "Name: OSGB36 / British National Grid\n", 126 | "Axis Info [cartesian]:\n", 127 | "- E[east]: Easting (metre)\n", 128 | "- N[north]: Northing (metre)\n", 129 | "Area of Use:\n", 130 | "- name: United Kingdom (UK) - offshore to boundary of UKCS within 49°45'N to 61°N and 9°W to 2°E; onshore Great Britain (England, Wales and Scotland). Isle of Man onshore.\n", 131 | "- bounds: (-9.0, 49.75, 2.01, 61.01)\n", 132 | "Coordinate Operation:\n", 133 | "- name: British National Grid\n", 134 | "- method: Transverse Mercator\n", 135 | "Datum: Ordnance Survey of Great Britain 1936\n", 136 | "- Ellipsoid: Airy 1830\n", 137 | "- Prime Meridian: Greenwich" 138 | ] 139 | }, 140 | "execution_count": 117, 141 | "metadata": {}, 142 | "output_type": "execute_result" 143 | } 144 | ], 145 | "source": [ 146 | "gdf.crs" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "id": "48d6efe6-9898-46c7-8811-f7d3158adc9d", 152 | "metadata": {}, 153 | "source": [ 154 | "Write a function that will be applied on each feature. This function uses the [interpolate function](https://shapely.readthedocs.io/en/stable/manual.html#object.interpolate) from shapely." 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 118, 160 | "id": "dcd85b95-c03f-474e-81e2-ea3ca6db3698", 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "# distance in meters\n", 165 | "distance = 1000\n", 166 | "\n", 167 | "def get_points(row):\n", 168 | " geometry = row.geometry\n", 169 | " interpolated_distance = 0\n", 170 | " \n", 171 | " points = []\n", 172 | " while interpolated_distance < geometry.length:\n", 173 | " point = geometry.interpolate(interpolated_distance)\n", 174 | " points.append(point)\n", 175 | " # Increase the distance for the next point\n", 176 | " interpolated_distance = interpolated_distance + distance\n", 177 | " return MultiPoint(points)" 178 | ] 179 | }, 180 | { 181 | "cell_type": "markdown", 182 | "id": "9a6200cc-16b2-4ece-94c9-920a2b7a3692", 183 | "metadata": {}, 184 | "source": [ 185 | "Apply the function on each line in the layer and save to a new column" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 119, 191 | "id": "4320f460-15de-4565-89e2-94d2ede130b4", 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "gdf['points'] = gdf.apply(get_points, axis=1)" 196 | ] 197 | }, 198 | { 199 | "cell_type": "markdown", 200 | "id": "83eb0b66-23af-438c-9328-01452bdcde9b", 201 | "metadata": {}, 202 | "source": [ 203 | "Set the new column as the geometry and remove the old one" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": 120, 209 | "id": "6d8ad401-ca4c-43b4-b3fc-ba1951a1437d", 210 | "metadata": {}, 211 | "outputs": [ 212 | { 213 | "data": { 214 | "text/html": [ 215 | "
\n", 216 | "\n", 229 | "\n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | "
FIDpoints
00MULTIPOINT (503423.417 492226.902, 504347.114 ...
11MULTIPOINT (508389.969 484415.756, 509243.299 ...
\n", 250 | "
" 251 | ], 252 | "text/plain": [ 253 | " FID points\n", 254 | "0 0 MULTIPOINT (503423.417 492226.902, 504347.114 ...\n", 255 | "1 1 MULTIPOINT (508389.969 484415.756, 509243.299 ..." 256 | ] 257 | }, 258 | "execution_count": 120, 259 | "metadata": {}, 260 | "output_type": "execute_result" 261 | } 262 | ], 263 | "source": [ 264 | "gdf = gdf.set_geometry('points')\n", 265 | "gdf = gdf.drop(columns=['geometry'])\n", 266 | "gdf" 267 | ] 268 | }, 269 | { 270 | "cell_type": "markdown", 271 | "id": "70e85290-7cc8-4596-a5e5-0070c44b4f37", 272 | "metadata": {}, 273 | "source": [ 274 | "Each line has many points associated with it as a MultiPoint feature. Convert them to individual features" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 121, 280 | "id": "7e649888-07f4-4124-b909-d4f4e5261ea0", 281 | "metadata": {}, 282 | "outputs": [ 283 | { 284 | "data": { 285 | "text/html": [ 286 | "
\n", 287 | "\n", 300 | "\n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | "
FIDpoints
000POINT (503423.417 492226.902)
10POINT (504347.114 492610.026)
20POINT (505270.810 492993.150)
30POINT (506194.507 493376.275)
40POINT (507118.204 493759.399)
............
1451POINT (547000.205 501886.235)
461POINT (547520.152 502740.434)
471POINT (548040.099 503594.632)
481POINT (548560.046 504448.831)
491POINT (549079.993 505303.030)
\n", 370 | "

96 rows × 2 columns

\n", 371 | "
" 372 | ], 373 | "text/plain": [ 374 | " FID points\n", 375 | "0 0 0 POINT (503423.417 492226.902)\n", 376 | " 1 0 POINT (504347.114 492610.026)\n", 377 | " 2 0 POINT (505270.810 492993.150)\n", 378 | " 3 0 POINT (506194.507 493376.275)\n", 379 | " 4 0 POINT (507118.204 493759.399)\n", 380 | "... ... ...\n", 381 | "1 45 1 POINT (547000.205 501886.235)\n", 382 | " 46 1 POINT (547520.152 502740.434)\n", 383 | " 47 1 POINT (548040.099 503594.632)\n", 384 | " 48 1 POINT (548560.046 504448.831)\n", 385 | " 49 1 POINT (549079.993 505303.030)\n", 386 | "\n", 387 | "[96 rows x 2 columns]" 388 | ] 389 | }, 390 | "execution_count": 121, 391 | "metadata": {}, 392 | "output_type": "execute_result" 393 | } 394 | ], 395 | "source": [ 396 | "gdf_final = gdf.explode()\n", 397 | "gdf_final" 398 | ] 399 | }, 400 | { 401 | "cell_type": "markdown", 402 | "id": "67e92565-df5a-4eb3-99d6-2dace3c1c511", 403 | "metadata": {}, 404 | "source": [ 405 | "Save the output as a shapefile." 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": 122, 411 | "id": "c8df105b-6d90-45e4-a9b8-ae1850f3fb98", 412 | "metadata": {}, 413 | "outputs": [], 414 | "source": [ 415 | "gdf_final.to_file('interpolated_points.shp')" 416 | ] 417 | } 418 | ], 419 | "metadata": { 420 | "kernelspec": { 421 | "display_name": "Python 3 (ipykernel)", 422 | "language": "python", 423 | "name": "python3" 424 | }, 425 | "language_info": { 426 | "codemirror_mode": { 427 | "name": "ipython", 428 | "version": 3 429 | }, 430 | "file_extension": ".py", 431 | "mimetype": "text/x-python", 432 | "name": "python", 433 | "nbconvert_exporter": "python", 434 | "pygments_lexer": "ipython3", 435 | "version": "3.10.2" 436 | } 437 | }, 438 | "nbformat": 4, 439 | "nbformat_minor": 5 440 | } 441 | -------------------------------------------------------------------------------- /python/line_interpolation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/line_interpolation.png -------------------------------------------------------------------------------- /python/netcdf_nco.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "bebd6fba-d5ea-4765-9a34-fa763315434f", 6 | "metadata": { 7 | "id": "bebd6fba-d5ea-4765-9a34-fa763315434f" 8 | }, 9 | "source": [ 10 | "## NetCDF conversion using NCO Tool\n", 11 | "\n", 12 | "Learn about [NCO](https://nco.sourceforge.net/#Definition)\n", 13 | "\n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "id": "0f5ad154-d5dd-4459-b6c4-534a2930e1e2", 19 | "metadata": { 20 | "id": "0f5ad154-d5dd-4459-b6c4-534a2930e1e2" 21 | }, 22 | "source": [ 23 | "This example notebook shows how to use the `nco` tool to flip x,y coordinates of a NetCDF file.\n", 24 | "\n", 25 | "Upload the require NetCDF file to Colab from the left-hand panel menu." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 15, 31 | "id": "b4589dab-ed54-47d5-a16c-afeca7de0273", 32 | "metadata": { 33 | "id": "b4589dab-ed54-47d5-a16c-afeca7de0273" 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "%%capture\n", 38 | "!apt install nco" 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "source": [ 44 | "!ncpdq -O -a time,y,x SST.nc SST_out2.nc" 45 | ], 46 | "metadata": { 47 | "id": "Ez_3P0u3y9mD" 48 | }, 49 | "id": "Ez_3P0u3y9mD", 50 | "execution_count": 17, 51 | "outputs": [] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "source": [ 56 | "Download the output file from the left-hand panel Files menu." 57 | ], 58 | "metadata": { 59 | "id": "M7rzAB_9zkaB" 60 | }, 61 | "id": "M7rzAB_9zkaB" 62 | } 63 | ], 64 | "metadata": { 65 | "kernelspec": { 66 | "display_name": "Python 3 (ipykernel)", 67 | "language": "python", 68 | "name": "python3" 69 | }, 70 | "language_info": { 71 | "codemirror_mode": { 72 | "name": "ipython", 73 | "version": 3 74 | }, 75 | "file_extension": ".py", 76 | "mimetype": "text/x-python", 77 | "name": "python", 78 | "nbconvert_exporter": "python", 79 | "pygments_lexer": "ipython3", 80 | "version": "3.10.5" 81 | }, 82 | "colab": { 83 | "provenance": [] 84 | } 85 | }, 86 | "nbformat": 4, 87 | "nbformat_minor": 5 88 | } -------------------------------------------------------------------------------- /python/raster_from_array.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "7999aec0-7a91-49b9-8c5a-062ea83f4ac2", 6 | "metadata": {}, 7 | "source": [ 8 | "## Create Raster from Array\n", 9 | "\n", 10 | "A script that shows how to create a test image using pixel values from an array. This type of images are useful to test various algorithms and prepare tutorials showing the effect of them." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 54, 16 | "id": "3202cc14-9826-4644-b480-84aeb423487f", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import rasterio\n", 22 | "from rasterio.transform import Affine" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "id": "c2185e8d-f6c0-4d90-a8cc-1b24d4a6860b", 28 | "metadata": {}, 29 | "source": [ 30 | "We want to create a 4x4 image. Define a 2-dimentional array. Since we are storing small integers set the data type to **Byte** (uint8). " 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 55, 36 | "id": "9dc24184-de76-45cf-93fb-227afcafc1dd", 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "array = np.array([\n", 41 | " [0, 0, 1, 1],\n", 42 | " [0, 0, 1, 1],\n", 43 | " [0, 2, 2, 2],\n", 44 | " [2, 2, 3, 3]\n", 45 | "], dtype=np.uint8)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "id": "2e3ada82-b785-499b-9657-284e4bf624dd", 51 | "metadata": {}, 52 | "source": [ 53 | "Geospatial images need a 'transform' that defines the pixel sizes and location of the upper-left pixel." 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "id": "f7d96506-175c-4927-8fa8-41dfd73b0ceb", 59 | "metadata": {}, 60 | "source": [ 61 | "We define the transform using GDAL's order of coefficients. https://gdal.org/tutorials/geotransforms_tut.html\n", 62 | "\n", 63 | "The following defines an image with X,Y coordinates of (780850,1432187) and a resolution of 1000." 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 56, 69 | "id": "9bc0a835-f6e5-47e8-88c4-641a1dbc92b3", 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "transform = Affine.from_gdal(780850, 1000, 0, 1432187, 0, -1000)" 74 | ] 75 | }, 76 | { 77 | "cell_type": "markdown", 78 | "id": "dd49e81f-e61a-4db0-81f7-9e4c9a9633de", 79 | "metadata": {}, 80 | "source": [ 81 | "Along with the transform, we need to choose a Coordinate Reference System (CRS). We use UTM Zone 43N - EPSG:32643 as the CRS." 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 57, 87 | "id": "123aacf9-3e8e-4e08-8eb0-6b2dd1ecad13", 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "crs='EPSG:32643'" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "id": "c80f731f-465b-4b08-bc80-266041156e0a", 97 | "metadata": {}, 98 | "source": [ 99 | "Create a new RasterIO dataset and write the array as a band." 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 58, 105 | "id": "5faef7d3-67eb-447d-897e-066a2865318f", 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "output_path = 'image.tif'\n", 110 | "\n", 111 | "new_dataset = rasterio.open(output_path, 'w', \n", 112 | " driver='GTiff',\n", 113 | " height=array.shape[0],\n", 114 | " width=array.shape[1],\n", 115 | " count=1,\n", 116 | " dtype=array.dtype,\n", 117 | " crs=crs,\n", 118 | " transform=transform)\n", 119 | "new_dataset.write(array, 1)\n", 120 | "new_dataset.close()" 121 | ] 122 | } 123 | ], 124 | "metadata": { 125 | "kernelspec": { 126 | "display_name": "Python 3 (ipykernel)", 127 | "language": "python", 128 | "name": "python3" 129 | }, 130 | "language_info": { 131 | "codemirror_mode": { 132 | "name": "ipython", 133 | "version": 3 134 | }, 135 | "file_extension": ".py", 136 | "mimetype": "text/x-python", 137 | "name": "python", 138 | "nbconvert_exporter": "python", 139 | "pygments_lexer": "ipython3", 140 | "version": "3.9.7" 141 | } 142 | }, 143 | "nbformat": 4, 144 | "nbformat_minor": 5 145 | } 146 | -------------------------------------------------------------------------------- /python/reprojection_and_coordinate_transform.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "eef82899-4489-4d88-88ac-9db52772fe49", 6 | "metadata": {}, 7 | "source": [ 8 | "# Reproject and Coordinate Transformation\n", 9 | "\n", 10 | "Because of a change introduced in GDAL version >3.0, a CRS created from EPSG:4326 now expects coordinates in Y, X order instead of X, Y. The reason for this change is explained at [CRS and axis order\n", 11 | "](https://gdal.org/tutorials/osr_api_tut.html#crs-and-axis-order).\n", 12 | "\n", 13 | "The following code worked fine before, but now it fails with an error. In some cases, you will get results, but it will compute the re-projected coordinates with swapped X and Y coordinates" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "id": "d72e2bcc-6772-46bd-8824-a2ab7eb008b2", 20 | "metadata": {}, 21 | "outputs": [ 22 | { 23 | "name": "stdout", 24 | "output_type": "stream", 25 | "text": [ 26 | "POINT (inf inf)\n" 27 | ] 28 | }, 29 | { 30 | "name": "stderr", 31 | "output_type": "stream", 32 | "text": [ 33 | "ERROR 1: PROJ: utm: Invalid latitude\n", 34 | "ERROR 1: Invalid coordinate\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "from osgeo import osr,ogr\n", 40 | "\n", 41 | "\n", 42 | "wkt = 'POINT (121.787 -26.012)'\n", 43 | "\n", 44 | "source = osr.SpatialReference()\n", 45 | "source.ImportFromEPSG(4326)\n", 46 | "\n", 47 | "target = osr.SpatialReference()\n", 48 | "target.ImportFromEPSG(32750)\n", 49 | "\n", 50 | "transform = osr.CoordinateTransformation(source, target)\n", 51 | "point = ogr.CreateGeometryFromWkt(wkt)\n", 52 | "point.Transform(transform)\n", 53 | "\n", 54 | "print(point)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "id": "744d9c4c-bc1e-44cc-9939-bdc6eb695fe9", 60 | "metadata": {}, 61 | "source": [ 62 | "To fix this, You need to explicitly set `SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)` on CRS created using EPSG:4326" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 9, 68 | "id": "f4df82d6-faa9-427e-b983-2d11c1a428b5", 69 | "metadata": {}, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "POINT (979368.581534574 7114191.38128267)\n" 76 | ] 77 | } 78 | ], 79 | "source": [ 80 | "from osgeo import osr,ogr\n", 81 | "\n", 82 | "wkt = 'POINT (121.787 -26.012)'\n", 83 | "\n", 84 | "source = osr.SpatialReference()\n", 85 | "source.ImportFromEPSG(4326)\n", 86 | "source.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n", 87 | "\n", 88 | "target = osr.SpatialReference()\n", 89 | "target.ImportFromEPSG(32750)\n", 90 | "\n", 91 | "transform = osr.CoordinateTransformation(source, target)\n", 92 | "point = ogr.CreateGeometryFromWkt(wkt)\n", 93 | "point.Transform(transform)\n", 94 | "\n", 95 | "print(point)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "id": "141ed0fc-36f3-4391-84b6-3e6e3a7c4551", 101 | "metadata": {}, 102 | "source": [ 103 | "Higher level libraries like GeoPandas handle this under-the-hood and users do not have to worry about it." 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 7, 109 | "id": "0b466108-26b9-4373-8624-677880b495a9", 110 | "metadata": {}, 111 | "outputs": [ 112 | { 113 | "data": { 114 | "text/html": [ 115 | "
\n", 116 | "\n", 129 | "\n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | "
idgeometry
01POINT (121.78700 -26.01200)
\n", 145 | "
" 146 | ], 147 | "text/plain": [ 148 | " id geometry\n", 149 | "0 1 POINT (121.78700 -26.01200)" 150 | ] 151 | }, 152 | "execution_count": 7, 153 | "metadata": {}, 154 | "output_type": "execute_result" 155 | } 156 | ], 157 | "source": [ 158 | "import geopandas\n", 159 | "from shapely import wkt\n", 160 | "point = wkt.loads('POINT (121.787 -26.012)')\n", 161 | "data = {'id': [1], 'geometry': [point]}\n", 162 | "gdf = geopandas.GeoDataFrame(data, crs='EPSG:4326')\n", 163 | "gdf" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": 8, 169 | "id": "f4352e3d-e964-4a4b-99dd-8423e41ed3f7", 170 | "metadata": {}, 171 | "outputs": [ 172 | { 173 | "data": { 174 | "text/html": [ 175 | "
\n", 176 | "\n", 189 | "\n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | "
idgeometry
01POINT (979368.582 7114191.381)
\n", 205 | "
" 206 | ], 207 | "text/plain": [ 208 | " id geometry\n", 209 | "0 1 POINT (979368.582 7114191.381)" 210 | ] 211 | }, 212 | "execution_count": 8, 213 | "metadata": {}, 214 | "output_type": "execute_result" 215 | } 216 | ], 217 | "source": [ 218 | "gdf.to_crs(32750)" 219 | ] 220 | } 221 | ], 222 | "metadata": { 223 | "kernelspec": { 224 | "display_name": "Python 3 (ipykernel)", 225 | "language": "python", 226 | "name": "python3" 227 | }, 228 | "language_info": { 229 | "codemirror_mode": { 230 | "name": "ipython", 231 | "version": 3 232 | }, 233 | "file_extension": ".py", 234 | "mimetype": "text/x-python", 235 | "name": "python", 236 | "nbconvert_exporter": "python", 237 | "pygments_lexer": "ipython3", 238 | "version": "3.9.6" 239 | } 240 | }, 241 | "nbformat": 4, 242 | "nbformat_minor": 5 243 | } 244 | -------------------------------------------------------------------------------- /python/result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/result.png -------------------------------------------------------------------------------- /python/select_subset_from_file.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "23dda0e2-cea7-45e6-b0af-ae3435268198", 6 | "metadata": {}, 7 | "source": [ 8 | "## Filter a Shapefile using an Excel Sheet\n", 9 | "\n", 10 | "This notebook shows how to read data from an Excel sheet containing IDs that need to be selected from another shapefile. We used Pandas `isin()` function to apply a filter for the ids from the Excel file." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "id": "2a44d79b-8f3f-414b-89bd-5ac89f6c005f", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import geopandas as gpd\n", 21 | "import pandas as pd\n", 22 | "import numpy as np" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 29, 28 | "id": "65dcbe4a-cff1-452e-9362-27c45d902de5", 29 | "metadata": {}, 30 | "outputs": [ 31 | { 32 | "data": { 33 | "text/html": [ 34 | "
\n", 35 | "\n", 48 | "\n", 49 | " \n", 50 | " \n", 51 | " \n", 52 | " \n", 53 | " \n", 54 | " \n", 55 | " \n", 56 | " \n", 57 | " \n", 58 | " \n", 59 | " \n", 60 | " \n", 61 | " \n", 62 | " \n", 63 | " \n", 64 | " \n", 65 | " \n", 66 | " \n", 67 | " \n", 68 | " \n", 69 | " \n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | "
fidosm_idcodefclassnamerefonewaymaxspeedlayerbridgetunnelgeometry
01.043549535114secondaryMahatma Gandhi RoadNoneF300.0FFLINESTRING (77.59928 12.97672, 77.59950 12.976...
12.082858615114secondaryNoneNoneB00.0FFLINESTRING (76.65944 12.31809, 76.65904 12.318...
23.082858685113primaryBangalore Nilgiri RoadNoneF00.0FFLINESTRING (76.65906 12.31389, 76.65912 12.313...
34.082858905115tertiarySri Harsha RoadNoneF00.0FFLINESTRING (76.65600 12.30895, 76.65646 12.308...
45.082858925114secondaryAshoka RoadNoneF00.0FFLINESTRING (76.65615 12.30989, 76.65600 12.308...
.......................................
4460144602.07632934235114secondaryKamaraj RoadNoneF00.0FFLINESTRING (77.60806 12.97517, 77.60797 12.97508)
4460244603.07632934245114secondaryMahatma Gandhi RoadNoneF300.0FFLINESTRING (77.60798 12.97519, 77.60806 12.97517)
4460344604.07632959785114secondary18th Cross RoadNoneB00.0FFLINESTRING (77.57115 13.00849, 77.57156 13.008...
4460444605.07634050525113primaryVarthur RoadNoneF00.0FFLINESTRING (77.70140 12.95693, 77.70164 12.956...
4460544606.07634050535113primaryField Marshal Cariappa RoadNoneF300.0FFLINESTRING (77.61040 12.97360, 77.61052 12.97365)
\n", 234 | "

44606 rows × 12 columns

\n", 235 | "
" 236 | ], 237 | "text/plain": [ 238 | " fid osm_id code fclass name ref \\\n", 239 | "0 1.0 4354953 5114 secondary Mahatma Gandhi Road None \n", 240 | "1 2.0 8285861 5114 secondary None None \n", 241 | "2 3.0 8285868 5113 primary Bangalore Nilgiri Road None \n", 242 | "3 4.0 8285890 5115 tertiary Sri Harsha Road None \n", 243 | "4 5.0 8285892 5114 secondary Ashoka Road None \n", 244 | "... ... ... ... ... ... ... \n", 245 | "44601 44602.0 763293423 5114 secondary Kamaraj Road None \n", 246 | "44602 44603.0 763293424 5114 secondary Mahatma Gandhi Road None \n", 247 | "44603 44604.0 763295978 5114 secondary 18th Cross Road None \n", 248 | "44604 44605.0 763405052 5113 primary Varthur Road None \n", 249 | "44605 44606.0 763405053 5113 primary Field Marshal Cariappa Road None \n", 250 | "\n", 251 | " oneway maxspeed layer bridge tunnel \\\n", 252 | "0 F 30 0.0 F F \n", 253 | "1 B 0 0.0 F F \n", 254 | "2 F 0 0.0 F F \n", 255 | "3 F 0 0.0 F F \n", 256 | "4 F 0 0.0 F F \n", 257 | "... ... ... ... ... ... \n", 258 | "44601 F 0 0.0 F F \n", 259 | "44602 F 30 0.0 F F \n", 260 | "44603 B 0 0.0 F F \n", 261 | "44604 F 0 0.0 F F \n", 262 | "44605 F 30 0.0 F F \n", 263 | "\n", 264 | " geometry \n", 265 | "0 LINESTRING (77.59928 12.97672, 77.59950 12.976... \n", 266 | "1 LINESTRING (76.65944 12.31809, 76.65904 12.318... \n", 267 | "2 LINESTRING (76.65906 12.31389, 76.65912 12.313... \n", 268 | "3 LINESTRING (76.65600 12.30895, 76.65646 12.308... \n", 269 | "4 LINESTRING (76.65615 12.30989, 76.65600 12.308... \n", 270 | "... ... \n", 271 | "44601 LINESTRING (77.60806 12.97517, 77.60797 12.97508) \n", 272 | "44602 LINESTRING (77.60798 12.97519, 77.60806 12.97517) \n", 273 | "44603 LINESTRING (77.57115 13.00849, 77.57156 13.008... \n", 274 | "44604 LINESTRING (77.70140 12.95693, 77.70164 12.956... \n", 275 | "44605 LINESTRING (77.61040 12.97360, 77.61052 12.97365) \n", 276 | "\n", 277 | "[44606 rows x 12 columns]" 278 | ] 279 | }, 280 | "execution_count": 29, 281 | "metadata": {}, 282 | "output_type": "execute_result" 283 | } 284 | ], 285 | "source": [ 286 | "roads = gpd.read_file('all_roads.shp')\n", 287 | "roads" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 21, 293 | "id": "9f02acdc-0e1c-4522-9d57-77d4d18cac58", 294 | "metadata": {}, 295 | "outputs": [], 296 | "source": [ 297 | "roads['osm_id'] = roads['osm_id'].astype(np.int64)" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": 22, 303 | "id": "ab5cec37-0085-40e0-90eb-215a2fc21d19", 304 | "metadata": {}, 305 | "outputs": [ 306 | { 307 | "data": { 308 | "text/html": [ 309 | "
\n", 310 | "\n", 323 | "\n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | " \n", 370 | " \n", 371 | " \n", 372 | " \n", 373 | " \n", 374 | " \n", 375 | " \n", 376 | "
osm_id
022839392
122839393
222839393
359859093
470379000
......
257757854892
258757854893
259757854894
260757854895
261757867990
\n", 377 | "

262 rows × 1 columns

\n", 378 | "
" 379 | ], 380 | "text/plain": [ 381 | " osm_id\n", 382 | "0 22839392\n", 383 | "1 22839393\n", 384 | "2 22839393\n", 385 | "3 59859093\n", 386 | "4 70379000\n", 387 | ".. ...\n", 388 | "257 757854892\n", 389 | "258 757854893\n", 390 | "259 757854894\n", 391 | "260 757854895\n", 392 | "261 757867990\n", 393 | "\n", 394 | "[262 rows x 1 columns]" 395 | ] 396 | }, 397 | "execution_count": 22, 398 | "metadata": {}, 399 | "output_type": "execute_result" 400 | } 401 | ], 402 | "source": [ 403 | "subset = pd.read_excel('subset.xlsx')\n", 404 | "subset" 405 | ] 406 | }, 407 | { 408 | "cell_type": "code", 409 | "execution_count": null, 410 | "id": "c43bc1d6-5b2b-4850-aaec-7a3d740f47d0", 411 | "metadata": {}, 412 | "outputs": [], 413 | "source": [ 414 | "id_list = subset['osm_id']" 415 | ] 416 | }, 417 | { 418 | "cell_type": "code", 419 | "execution_count": 28, 420 | "id": "06d01e92-d707-4aef-91e2-b19bda7abc0c", 421 | "metadata": {}, 422 | "outputs": [ 423 | { 424 | "data": { 425 | "text/html": [ 426 | "
\n", 427 | "\n", 440 | "\n", 441 | " \n", 442 | " \n", 443 | " \n", 444 | " \n", 445 | " \n", 446 | " \n", 447 | " \n", 448 | " \n", 449 | " \n", 450 | " \n", 451 | " \n", 452 | " \n", 453 | " \n", 454 | " \n", 455 | " \n", 456 | " \n", 457 | " \n", 458 | " \n", 459 | " \n", 460 | " \n", 461 | " \n", 462 | " \n", 463 | " \n", 464 | " \n", 465 | " \n", 466 | " \n", 467 | " \n", 468 | " \n", 469 | " \n", 470 | " \n", 471 | " \n", 472 | " \n", 473 | " \n", 474 | " \n", 475 | " \n", 476 | " \n", 477 | " \n", 478 | " \n", 479 | " \n", 480 | " \n", 481 | " \n", 482 | " \n", 483 | " \n", 484 | " \n", 485 | " \n", 486 | " \n", 487 | " \n", 488 | " \n", 489 | " \n", 490 | " \n", 491 | " \n", 492 | " \n", 493 | " \n", 494 | " \n", 495 | " \n", 496 | " \n", 497 | " \n", 498 | " \n", 499 | " \n", 500 | " \n", 501 | " \n", 502 | " \n", 503 | " \n", 504 | " \n", 505 | " \n", 506 | " \n", 507 | " \n", 508 | " \n", 509 | " \n", 510 | " \n", 511 | " \n", 512 | " \n", 513 | " \n", 514 | " \n", 515 | " \n", 516 | " \n", 517 | " \n", 518 | " \n", 519 | " \n", 520 | " \n", 521 | " \n", 522 | " \n", 523 | " \n", 524 | " \n", 525 | " \n", 526 | " \n", 527 | " \n", 528 | " \n", 529 | " \n", 530 | " \n", 531 | " \n", 532 | " \n", 533 | " \n", 534 | " \n", 535 | " \n", 536 | " \n", 537 | " \n", 538 | " \n", 539 | " \n", 540 | " \n", 541 | " \n", 542 | " \n", 543 | " \n", 544 | " \n", 545 | " \n", 546 | " \n", 547 | " \n", 548 | " \n", 549 | " \n", 550 | " \n", 551 | " \n", 552 | " \n", 553 | " \n", 554 | " \n", 555 | " \n", 556 | " \n", 557 | " \n", 558 | " \n", 559 | " \n", 560 | " \n", 561 | " \n", 562 | " \n", 563 | " \n", 564 | " \n", 565 | " \n", 566 | " \n", 567 | " \n", 568 | " \n", 569 | " \n", 570 | " \n", 571 | " \n", 572 | " \n", 573 | " \n", 574 | " \n", 575 | " \n", 576 | " \n", 577 | " \n", 578 | " \n", 579 | " \n", 580 | " \n", 581 | " \n", 582 | " \n", 583 | " \n", 584 | " \n", 585 | " \n", 586 | " \n", 587 | " \n", 588 | " \n", 589 | " \n", 590 | " \n", 591 | " \n", 592 | " \n", 593 | " \n", 594 | " \n", 595 | " \n", 596 | " \n", 597 | " \n", 598 | " \n", 599 | " \n", 600 | " \n", 601 | " \n", 602 | " \n", 603 | " \n", 604 | " \n", 605 | " \n", 606 | " \n", 607 | " \n", 608 | " \n", 609 | " \n", 610 | " \n", 611 | " \n", 612 | " \n", 613 | " \n", 614 | " \n", 615 | " \n", 616 | " \n", 617 | " \n", 618 | " \n", 619 | " \n", 620 | " \n", 621 | " \n", 622 | " \n", 623 | " \n", 624 | " \n", 625 | "
fidosm_idcodefclassnamerefonewaymaxspeedlayerbridgetunnelgeometry
154155.0228393925112trunkNoneNH150AB00.0FFLINESTRING (76.53588 13.58250, 76.53623 13.583...
155156.0228393925112trunkNoneNH150AB00.0FFLINESTRING (76.53056 13.65858, 76.52986 13.659...
156157.0228393925112trunkNoneNH150AB00.0FFLINESTRING (76.53002 13.66245, 76.53007 13.662...
157158.0228393925112trunkNoneNH150AB00.0FFLINESTRING (76.53029 13.66308, 76.53031 13.663...
158159.0228393935115tertiaryNoneNoneB00.0FFLINESTRING (76.47633 13.57369, 76.47637 13.576...
.......................................
4422244223.07578548925115tertiaryNoneNoneB00.0FFLINESTRING (76.53855 13.59433, 76.53946 13.594...
4422344224.07578548935115tertiaryNoneNoneB00.0FFLINESTRING (76.52967 13.60141, 76.52974 13.600...
4422444225.07578548945115tertiaryNoneNoneB00.0FFLINESTRING (76.58205 13.55130, 76.58193 13.551...
4422544226.07578548955115tertiaryNoneNoneB00.0FFLINESTRING (76.57364 13.57775, 76.57367 13.577...
4422944230.07578679905115tertiaryNoneNoneB00.0FFLINESTRING (76.57557 13.35487, 76.57557 13.354...
\n", 626 | "

270 rows × 12 columns

\n", 627 | "
" 628 | ], 629 | "text/plain": [ 630 | " fid osm_id code fclass name ref oneway maxspeed \\\n", 631 | "154 155.0 22839392 5112 trunk None NH150A B 0 \n", 632 | "155 156.0 22839392 5112 trunk None NH150A B 0 \n", 633 | "156 157.0 22839392 5112 trunk None NH150A B 0 \n", 634 | "157 158.0 22839392 5112 trunk None NH150A B 0 \n", 635 | "158 159.0 22839393 5115 tertiary None None B 0 \n", 636 | "... ... ... ... ... ... ... ... ... \n", 637 | "44222 44223.0 757854892 5115 tertiary None None B 0 \n", 638 | "44223 44224.0 757854893 5115 tertiary None None B 0 \n", 639 | "44224 44225.0 757854894 5115 tertiary None None B 0 \n", 640 | "44225 44226.0 757854895 5115 tertiary None None B 0 \n", 641 | "44229 44230.0 757867990 5115 tertiary None None B 0 \n", 642 | "\n", 643 | " layer bridge tunnel geometry \n", 644 | "154 0.0 F F LINESTRING (76.53588 13.58250, 76.53623 13.583... \n", 645 | "155 0.0 F F LINESTRING (76.53056 13.65858, 76.52986 13.659... \n", 646 | "156 0.0 F F LINESTRING (76.53002 13.66245, 76.53007 13.662... \n", 647 | "157 0.0 F F LINESTRING (76.53029 13.66308, 76.53031 13.663... \n", 648 | "158 0.0 F F LINESTRING (76.47633 13.57369, 76.47637 13.576... \n", 649 | "... ... ... ... ... \n", 650 | "44222 0.0 F F LINESTRING (76.53855 13.59433, 76.53946 13.594... \n", 651 | "44223 0.0 F F LINESTRING (76.52967 13.60141, 76.52974 13.600... \n", 652 | "44224 0.0 F F LINESTRING (76.58205 13.55130, 76.58193 13.551... \n", 653 | "44225 0.0 F F LINESTRING (76.57364 13.57775, 76.57367 13.577... \n", 654 | "44229 0.0 F F LINESTRING (76.57557 13.35487, 76.57557 13.354... \n", 655 | "\n", 656 | "[270 rows x 12 columns]" 657 | ] 658 | }, 659 | "execution_count": 28, 660 | "metadata": {}, 661 | "output_type": "execute_result" 662 | } 663 | ], 664 | "source": [ 665 | "roads[roads['osm_id'].isin(id_list.values)]" 666 | ] 667 | } 668 | ], 669 | "metadata": { 670 | "kernelspec": { 671 | "display_name": "Python 3 (ipykernel)", 672 | "language": "python", 673 | "name": "python3" 674 | }, 675 | "language_info": { 676 | "codemirror_mode": { 677 | "name": "ipython", 678 | "version": 3 679 | }, 680 | "file_extension": ".py", 681 | "mimetype": "text/x-python", 682 | "name": "python", 683 | "nbconvert_exporter": "python", 684 | "pygments_lexer": "ipython3", 685 | "version": "3.10.5" 686 | } 687 | }, 688 | "nbformat": 4, 689 | "nbformat_minor": 5 690 | } 691 | -------------------------------------------------------------------------------- /python/simple_animation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/simple_animation.gif -------------------------------------------------------------------------------- /python/stacked_barchart.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/stacked_barchart.jpg -------------------------------------------------------------------------------- /python/unpivot.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Unpivot GIS Data\n", 8 | "\n", 9 | "GIS analysis and visualizaiton typically needs data with 1 value per row. If your data is structured in columns, you need to 'unpivot' it to convert it to 1 value per row. Note that this will result in duplicate features, but they can be handled well in GIS.\n", 10 | "\n", 11 | "Example Excel workbook is structured like this\n", 12 | "\n", 13 | "| id | species1 | species2 | .. | value 1 | value 2 | ... |\n", 14 | "| -- | -------- | -------- | -- | ------- | ------- | --- |\n", 15 | "| 1 | s1 | s2 | .. | 10 | 20 | ... |\n", 16 | "\n", 17 | "This script will convert it to a table like follows\n", 18 | "\n", 19 | "\n", 20 | "| id | species | value |\n", 21 | "| -- | ------- | ----- |\n", 22 | "| 1 | s1 | 10 |\n", 23 | "| 1 | s2 | 20 |" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import pandas as pd" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "input = 'original.xlsx'\n", 42 | "df = pd.read_excel(input)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "df1 = pd.melt(df, id_vars=['id', 'species1'], value_vars=['value1']).rename(columns = {'value1':'value'})\n", 52 | "df1 = pd.melt(df, id_vars=['id', 'species2'], value_vars=['value2']).rename(columns = {'value2':'value'})\n", 53 | "merged = pd.concat([df1, df2])" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "# Drop rows which have null values " 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "merged = merged.dropna(axis=0, how='any')" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "output = 'unpivoted.xlsx'\n", 81 | "merged.to_excel(output, index=False)" 82 | ] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.9.1" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 4 106 | } 107 | -------------------------------------------------------------------------------- /python/visvalingam_whyatt.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/visvalingam_whyatt.gif -------------------------------------------------------------------------------- /python/visvalingam_whyatt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spatialthoughts/projects/48433946a268b82197aa28e08bc12ca03b4c8ed6/python/visvalingam_whyatt.png -------------------------------------------------------------------------------- /qgis/freestyle/palette.txt: -------------------------------------------------------------------------------- 1 | # QGIS Generated Color Map Export File 2 | INTERPOLATION:INTERPOLATED 3 | 0,0,8,255,255,0 4 | 1000,101,146,82,255,1000 5 | 1500,190,202,130,255,1500 6 | 2000,241,225,145,255,2000 7 | 2500,244,200,126,255,2500 8 | 3000,197,147,117,255,3000 9 | 4000,204,169,170,255,4000 10 | 4500,251,238,253,255,4500 11 | 5000,255,255,255,255,5000 12 | --------------------------------------------------------------------------------