├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md ├── ee_ppipe ├── __init__.py ├── acl_changer.py ├── aoi.json ├── batch_copy.py ├── batch_mover.py ├── batch_remover.py ├── batch_uploader.py ├── batch_uploading.py ├── cleanup.py ├── cli_aoi2json.py ├── cli_jsonparse.py ├── cli_metadata.py ├── collectionprop.py ├── config.py ├── download.py ├── ee_auth.py ├── ee_ls.py ├── ee_ppipe.py ├── ee_ppipe_gui.bat ├── gee_assets_errors.log ├── gee_assets_info.log ├── gooey_config.json ├── kml_aoi.py ├── logconfig.json ├── message.py ├── metadata_loader.py ├── ogr2ft.py ├── planet_key.py ├── query.py ├── taskreport.py ├── testme.kml └── wrs_grid.csv ├── requirements.txt └── windows executable └── eePlanet-GUI.exe /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Windows image file caches 2 | Thumbs.db 3 | ehthumbs.db 4 | 5 | # Folder config file 6 | Desktop.ini 7 | 8 | # Recycle Bin used on file shares 9 | $RECYCLE.BIN/ 10 | 11 | # Windows Installer files 12 | *.cab 13 | *.msi 14 | *.msm 15 | *.msp 16 | 17 | # Windows shortcuts 18 | *.lnk 19 | 20 | # ========================= 21 | # Operating System Files 22 | # ========================= 23 | 24 | # OSX 25 | # ========================= 26 | 27 | .DS_Store 28 | .AppleDouble 29 | .LSOverride 30 | 31 | # Thumbnails 32 | ._* 33 | 34 | # Files that might appear in the root of a volume 35 | .DocumentRevisions-V100 36 | .fseventsd 37 | .Spotlight-V100 38 | .TemporaryItems 39 | .Trashes 40 | .VolumeIcon.icns 41 | 42 | # Directories potentially created on remote AFP share 43 | .AppleDB 44 | .AppleDesktop 45 | Network Trash Folder 46 | Temporary Items 47 | .apdisk 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {2017} {Samapriya Roy} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Planet GEE Pipeline GUI 2 | 3 | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.817739.svg)](https://doi.org/10.5281/zenodo.817739) 4 | [![JetStream](https://img.shields.io/badge/SupportedBy%3A-JetStream-brightgreen.svg)](https://jetstream-cloud.org/) 5 | [![Planet](https://img.shields.io/badge/SupportedBy%3A-Planet%20Ambassador%20Program-brightgreen.svg)](https://www.planet.com/products/education-and-research/) 6 | 7 | The Planet Pipeline GUI came from the actual CLI (command line interface tools) to enable the use of tools required to access control and download planet labs assets (PlanetScope and RapidEye OrthoTiles) as well as parse metadata in a tabular form which maybe required by other applications. 8 | 9 | ![GUI](http://i.imgur.com/ld9xJu6.gif) 10 | ## Table of contents 11 | * [Installation](#installation) 12 | * [Usage examples](#usage-examples) 13 | * [Planet Tools](#planet-tools) 14 | * [Planet Key](#planet-key) 15 | * [AOI JSON](#aoi-json) 16 | * [Activate or Check Asset](#activate-or-check-asset) 17 | * [Download Asset](#download-asset) 18 | * [Metadata Parser](#metadata-parser) 19 | * [Earth Engine Tools](#earth-engine-tools) 20 | * [EE User](#ee-user) 21 | * [Create](#create) 22 | * [Upload a directory with images and associate properties with each image:](#upload-a-directory-with-images-and-associate-properties-with-each-image) 23 | * [Upload a directory with images with specific NoData value to a selected destination:](#upload-a-directory-with-images-with-specific-nodata-value-to-a-selected-destination) 24 | * [Task Query](#task-query) 25 | * [Task Query during ingestion](#task-query-during-ingestion) 26 | * [Task Report](#task-report) 27 | * [Delete a collection with content:](#delete-a-collection-with-content) 28 | * [Assets Move](#assets-move) 29 | * [Assets Copy](#assets-copy) 30 | * [Assets Access](#assets-access) 31 | * [Set Collection Property](#set-collection-property) 32 | * [Convert to Fusion Table](#convert-to-fusion-table) 33 | * [Cleanup Utility](#cleanup-utility) 34 | * [Cancel all tasks](#cancel-all-tasks) 35 | * [Credits](#credits) 36 | ## Installation 37 | We assume Earth Engine Python API is installed and EE authorised as desribed [here](https://developers.google.com/earth-engine/python_install). We also assume Planet Python API is installed you can install by simply running 38 | ``` 39 | pip install planet 40 | ``` 41 | Further instructions can be found [here](https://www.planet.com/docs/api-quickstart-examples/cli/) 42 | 43 | You require two important packages for this to run 44 | ``` 45 | WxPython(which is what the GUI is built on) 46 | for windows(Tested in Windows 10) 47 | https://wxpython.org/download.php 48 | pip install wxPython 49 | 50 | for linux(Tested in Ubuntu 16) 51 | sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu utopic main restricted universe" 52 | sudo apt-get update 53 | apt-cache search python-wxgtk3.0 54 | sudo apt-get install python-wxgtk3.0 55 | ``` 56 | This toolbox also uses some functionality from GDAL 57 | For installing GDAL in Ubuntu 58 | ``` 59 | sudo add-apt-repository ppa:ubuntugis/ppa && sudo apt-get update 60 | sudo apt-get install gdal-bin 61 | ``` 62 | For Windows I found this [guide](https://sandbox.idre.ucla.edu/sandbox/tutorials/installing-gdal-for-windows) from UCLA 63 | 64 | ## Usage examples 65 | Usage examples have been segmented into two parts focusing on both planet tools as well as earth engine tools, earth engine tools include additional developments in CLI which allows you to recursively interact with their python API. To run the tool open a command prompt window or terminal and type 66 | 67 | ``` 68 | python ee_ppipe.py 69 | ``` 70 | For windows user there is now a whole executable that you can double click and start in the folder windows executable. This tool does not require admin privileges since I created this for use by everyone. 71 | 72 | ## Planet Tools 73 | The Planet Toolsets consists of tools required to access control and download planet labs assets (PlanetScope and RapidEye OrthoTiles) as well as parse metadata in a tabular form which maybe required by other applications. 74 | 75 | ### Planet Key 76 | This tool basically asks you to input your Planet API Key using a password prompt this is then used for all subsequent tools 77 | 78 | ![planet_key](http://i.imgur.com/tv3FENS.gif) 79 | 80 | If using on a private machine the Key is saved as a csv file for all future runs of the tool. 81 | 82 | ### AOI JSON 83 | The aoijson tab within the toolset allows you to create filters and structure your existing input file to that which can be used with Planet's API. The tool requires inputs with start and end date, along with cloud cover. You can choose from multiple input files types such as KML, Zipped Shapefile, GeoJSON, WKT or even Landsat Tiles based on PathRow numbers. The geo option asks you to select existing files which will be converted into formatted JSON file called aoi.json. If using WRS as an option just type in the 6 digit PathRow combination and it will create a json file for you. 84 | 85 | ![aoijson](http://i.imgur.com/1lLXKDP.gif) 86 | 87 | ### Activate or Check Asset 88 | The activatepl tab allows the users to either check or activate planet assets, in this case only PSOrthoTile and REOrthoTile are supported because I was only interested in these two asset types for my work but can be easily extended to other asset types. This tool makes use of an existing json file sturctured for use within Planet API or the aoi.json file created earlier 89 | 90 | ![activatepl](http://i.imgur.com/n2rdw6M.gif) 91 | 92 | ### Download Asset 93 | Having metadata helps in organising your asstets, but is not mandatory - you can skip it. 94 | The downloadpl tab allows the users to download assets. The platform can download Asset or Asset_XML which is the metadata file to desired folders.One again I was only interested in these two asset types(PSOrthoTile and REOrthoTile) for my work but can be easily extended to other asset types. 95 | 96 | ![downloadpl](http://i.imgur.com/muFYdqo.jpg) 97 | 98 | ### Metadata Parser 99 | The metadata tab is a more powerful tool and consists of metadata parsing for PlanetScope OrthoTile RapiEye OrthoTile along with Digital Globe MultiSpectral and DigitalGlobe PanChromatic datasets. This was developed as a standalone to process xml metadata files from multiple sources and is important step is the user plans to upload these assets to Google Earth Engine. The combine Planet-GEE Pipeline tool will be made available soon for testing. 100 | 101 | ![metadata](http://i.imgur.com/lpYPrSv.jpg) 102 | 103 | ##Earth Engine Tools 104 | The ambition is apart from helping user with batch actions on assets along with interacting and extending capabilities of existing GEE CLI. It is developed case by case basis to include more features in the future as it becomes available or as need arises. This is also a seperate package for earth engine users to use and can be downloaded [here](https://github.com/samapriya/gee_asset_manager_addon) 105 | 106 | ### EE User 107 | This tool is designed to allow different users to change earth engine authentication credentials. The tool invokes the authentication call and copies the authentication key verification website to the clipboard which can then be pasted onto a browser and the generated key can be pasted back 108 | ![eeuser](http://i.imgur.com/LkMIZuc.jpg) 109 | 110 | ### Create 111 | This tool allows you to create a collection or folder in your earth engine root directory. The tool uses the system cli to achieve this and this has been included so as to reduce the need to switch between multiple tools and CLI. 112 | ![create](http://i.imgur.com/BCyCyUj.png) 113 | 114 | ### Upload a directory with images to your myfolder/mycollection and associate properties with each image: 115 | 116 | ![upload](http://i.imgur.com/O7jgpBV.jpg) 117 | 118 | The script will prompt the user for Google account password. The program will also check that all properties in path_to_metadata.csv do not contain any illegal characters for GEE. Don't need metadata? Simply skip this option. 119 | 120 | ### Task Query 121 | This script counts all currently running and ready tasks along with failed tasks. 122 | 123 | ![taskquery](http://i.imgur.com/tnEJS5h.jpg) 124 | 125 | ### Task Query during ingestion 126 | This script can be used intermittently to look at running, failed and ready(waiting) tasks during ingestion. This script is a special case using query tasks only when uploading assets to collection by providing collection pathway to see how collection size increases. 127 | 128 | ![taskquery_ingestion](http://i.imgur.com/XX17Yvn.jpg) 129 | 130 | 131 | ### Task Report 132 | Sometimes it is important to generate a report based on all tasks that is running or has finished. Generated report includes taskId, data time, task status and type 133 | 134 | ![taskreport](http://i.imgur.com/lcllphp.jpg) 135 | 136 | ### Delete a collection with content: 137 | 138 | The delete is recursive, meaning it will delete also all children assets: images, collections and folders. Use with caution! 139 | 140 | ![delete](http://i.imgur.com/WESpx2O.jpg) 141 | 142 | ### Assets Move 143 | This script allows us to recursively move assets from one collection to the other. 144 | 145 | ![assets-move](http://i.imgur.com/Wprb0wA.jpg) 146 | 147 | ### Assets Copy 148 | This script allows us to recursively copy assets from one collection to the other. If you have read acess to assets from another user this will also allow you to copy assets from their collections. 149 | 150 | ![assets-copy](http://i.imgur.com/4DYfu9x.jpg) 151 | 152 | ### Assets Access 153 | This tool allows you to set asset acess for either folder , collection or image recursively meaning you can add collection access properties for multiple assets at the same time. 154 | 155 | ![assets-access](http://i.imgur.com/t0ncujT.jpg) 156 | 157 | ### Set Collection Property 158 | This script is derived from the ee tool to set collection properties and will set overall properties for collection. 159 | 160 | ![collectionprop](http://i.imgur.com/nIhw4DC.jpg) 161 | 162 | 163 | ### Convert to Fusion Table 164 | Once validated with gdal and google fusion table it can be used to convert any geoObject to google fusion table. Forked and contributed by Gennadii [here](https://github.com/gena/ogr2ft). The scripts can be used only with a specific google account 165 | 166 | ![ogrft](http://i.imgur.com/frRaQuZ.jpg) 167 | 168 | ### Cleanup Utility 169 | This script is used to clean folders once all processes have been completed. In short this is a function to clear folder on local machine. 170 | 171 | ![cleanup](http://i.imgur.com/WOVzk3A.jpg) 172 | 173 | ### Cancel all tasks 174 | This is a simpler tool, can be called directly from the earthengine cli as well 175 | 176 | ![cancel](http://i.imgur.com/bTT9vRI.jpg) 177 | 178 | 179 | # Credits 180 | [JetStream](https://jetstream-cloud.org/) A portion of the work is suported by JetStream Grant TG-GEO160014. 181 | 182 | Also supported by [Planet Labs Ambassador Program](https://www.planet.com/markets/ambassador-signup/) 183 | -------------------------------------------------------------------------------- /ee_ppipe/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | __author__ = 'Samapriya Roy' 4 | __email__ = 'samapriya@gmail.com' 5 | __version__ = '0.1.0' 6 | -------------------------------------------------------------------------------- /ee_ppipe/acl_changer.py: -------------------------------------------------------------------------------- 1 | import ee 2 | import ee.mapclient 3 | import subprocess 4 | 5 | ##initialize earth engine 6 | ee.Initialize() 7 | 8 | ##request type of asset, asset path and email to give permission 9 | def access(mode,asset,user): 10 | if mode=='folder': 11 | try: 12 | for line in subprocess.check_output("earthengine ls"+" "+asset).split('\n'): 13 | asst=line 14 | print(asst) 15 | asset_acl=subprocess.check_output("earthengine acl ch "+asst+" -u"+" "+user) 16 | print(ee.data.getAssetAcl(asst)) 17 | except Exception: 18 | print("Permissions Changed") 19 | elif mode=='collection': 20 | try: 21 | asset_acl=subprocess.check_output("earthengine acl ch "+asset+" "+" -u"+" "+user) 22 | print(ee.data.getAssetAcl(asset)) 23 | print("Permissions Changed") 24 | except Exception: 25 | print("Permissions Error Check Again") 26 | elif mode=='image': 27 | try: 28 | asset_acl=subprocess.check_output("earthengine acl ch "+asset+" "+" -u"+" "+user) 29 | print(ee.data.getAssetAcl(asset)) 30 | print("Permissions Changed") 31 | except Exception: 32 | print("Permissions Error Check Again") 33 | -------------------------------------------------------------------------------- /ee_ppipe/aoi.json: -------------------------------------------------------------------------------- 1 | {"config": [{"field_name": "geometry", "config": {"type": "Polygon","coordinates":[[[-92.16027067390579, 29.96971980341075], [-92.16394061879356, 29.95751578496616], [-92.33631569280861, 29.27321264712756], [-91.57016657684537, 29.24348573701204], [-91.27087004160693, 29.86523810839692], [-92.16027067390579, 29.96971980341075]]]}, "type": "GeometryFilter"}, {"field_name": "gsd", "config": {"gte":1,"lte":9.99}, "type": "RangeFilter"}, {"field_name": "acquired", "config": {"gte":"2016-06-01T04:00:00.000Z","lte":"2016-08-31T03:59:59.999Z"}, "type": "DateRangeFilter"}, {"field_name": "cloud_cover", "config": {"gte":0,"lte":0.15}, "type": "RangeFilter"}], "type": "AndFilter"} -------------------------------------------------------------------------------- /ee_ppipe/batch_copy.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import sys 4 | import ee 5 | import subprocess 6 | import string 7 | import os 8 | import ee 9 | 10 | def copy(initial,final): 11 | for line in subprocess.check_output("earthengine ls "+initial,shell=True).split('\n'): 12 | try: 13 | src= line 14 | dest=line.replace(initial,final) 15 | com=(str('earthengine cp ')+str(src)+' '+str(dest)) 16 | process = subprocess.call(com,shell=True) 17 | except Exception: 18 | print(com) 19 | with open(errorlogcopy.csv,'a') as csvfile: 20 | writer=csv.writer(csvfile,delimiter=',') 21 | writer.writerow([com]) 22 | csvfile.close() 23 | print("Assets Copied") 24 | -------------------------------------------------------------------------------- /ee_ppipe/batch_mover.py: -------------------------------------------------------------------------------- 1 | import fnmatch 2 | import logging 3 | import sys 4 | import fnmatch 5 | import logging 6 | import sys 7 | import ee 8 | import subprocess 9 | import string 10 | import os 11 | import ee 12 | 13 | def mover(assetpath,destinationpath): 14 | for line in subprocess.check_output("earthengine ls "+assetpath,shell=True).split('\n'): 15 | try: 16 | src= line 17 | dest=line.replace(assetpath,destinationpath) 18 | com=(str('earthengine mv ')+str(src)+' '+str(dest)) 19 | process = subprocess.call(com,shell=True) 20 | except Exception: 21 | print(com) 22 | with open(errorlogmove.csv,'a') as csvfile: 23 | writer=csv.writer(csvfile,delimiter=',') 24 | writer.writerow([com]) 25 | csvfile.close() 26 | print("Assets Move Completed") 27 | -------------------------------------------------------------------------------- /ee_ppipe/batch_remover.py: -------------------------------------------------------------------------------- 1 | import fnmatch 2 | import logging 3 | import sys 4 | 5 | import ee 6 | 7 | 8 | def delete(asset_path): 9 | root = asset_path[:asset_path.rfind('/')] 10 | all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] 11 | filtered_names = fnmatch.filter(all_assets_names, asset_path) 12 | if not filtered_names: 13 | logging.warning('Nothing to remove. Exiting.') 14 | sys.exit(1) 15 | else: 16 | for path in filtered_names: 17 | __delete_recursive(path) 18 | logging.info('Collection %s removed', path) 19 | 20 | 21 | def __delete_recursive(asset_path): 22 | info = ee.data.getInfo(asset_path) 23 | if not info: 24 | logging.warning('Nothing to delete.') 25 | sys.exit(1) 26 | elif info['type'] == 'Image': 27 | pass 28 | elif info['type'] == 'Folder': 29 | items_in_destination = ee.data.getList({'id': asset_path}) 30 | for item in items_in_destination: 31 | logging.info('Removing items in %s folder', item['id']) 32 | delete(item['id']) 33 | else: 34 | items_in_destination = ee.data.getList({'id': asset_path}) 35 | for item in items_in_destination: 36 | ee.data.deleteAsset(item['id']) 37 | ee.data.deleteAsset(asset_path) 38 | -------------------------------------------------------------------------------- /ee_ppipe/batch_uploader.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import getpass 3 | import glob 4 | import logging 5 | import os 6 | import sys 7 | import csv 8 | import time 9 | 10 | if sys.version_info > (3, 0): 11 | from urllib.parse import unquote 12 | else: 13 | from urllib import unquote 14 | 15 | import ee 16 | import requests 17 | import retrying 18 | from requests_toolbelt.multipart import encoder 19 | from bs4 import BeautifulSoup 20 | 21 | from metadata_loader import load_metadata_from_csv, validate_metadata_from_csv 22 | 23 | 24 | def upload(user, source_path, destination_path, metadata_path=None, multipart_upload=False, nodata_value=None): 25 | """ 26 | Uploads content of a given directory to GEE. The function first uploads an asset to Google Cloud Storage (GCS) 27 | and then uses ee.data.startIngestion to put it into GEE, Due to GCS intermediate step, users is asked for 28 | Google's account name and password. 29 | 30 | In case any exception happens during the upload, the function will repeat the call a given number of times, after 31 | which the error will be propagated further. 32 | 33 | :param user: name of a Google account 34 | :param source_path: path to a directory 35 | :param destination_path: where to upload (absolute path) 36 | :param metadata_path: (optional) path to file with metadata 37 | :param multipart_upload: (optional) alternative mode op upload - use if the other one fails 38 | :param nodata_value: (optinal) value to burn into raster for missind data in the image 39 | :return: 40 | """ 41 | submitted_tasks_id = {} 42 | 43 | __verify_path_for_upload(destination_path) 44 | 45 | path = os.path.join(os.path.expanduser(source_path), '*.tif') 46 | all_images_paths = glob.glob(path) 47 | 48 | if len(all_images_paths) == 0: 49 | logging.error('%s does not contain any tif images.', path) 50 | sys.exit(1) 51 | 52 | metadata = load_metadata_from_csv(metadata_path) if metadata_path else None 53 | 54 | password = getpass.getpass() 55 | google_session = __get_google_auth_session(user, password) 56 | 57 | __create_image_collection(destination_path) 58 | 59 | images_for_upload_path = __find_remaining_assets_for_upload(all_images_paths, destination_path) 60 | no_images = len(images_for_upload_path) 61 | 62 | if no_images == 0: 63 | logging.error('No images found that match %s. Exiting...', path) 64 | sys.exit(1) 65 | 66 | failed_asset_writer = FailedAssetsWriter() 67 | 68 | for current_image_no, image_path in enumerate(images_for_upload_path): 69 | logging.info('Processing image %d out of %d: %s', current_image_no+1, no_images, image_path) 70 | filename = __get_filename_from_path(path=image_path) 71 | 72 | asset_full_path = destination_path + '/' + filename 73 | 74 | if metadata and not filename in metadata: 75 | logging.warning("No metadata exists for image %s: it will not be ingested", filename) 76 | failed_asset_writer.writerow([filename, 0, 'Missing metadata']) 77 | continue 78 | 79 | properties = metadata[filename] if metadata else None 80 | 81 | try: 82 | task_id = __upload_to_gcs_and_start_ingestion_task(asset_full_path, google_session, image_path, 83 | properties, multipart_upload, nodata_value) 84 | submitted_tasks_id[task_id] = filename 85 | __periodic_check(current_image=current_image_no, period=20, tasks=submitted_tasks_id, writer=failed_asset_writer) 86 | except Exception as e: 87 | logging.exception('Upload of %s has failed.', filename) 88 | failed_asset_writer.writerow([filename, 0, str(e)]) 89 | 90 | __check_for_failed_tasks_and_report(tasks=submitted_tasks_id, writer=failed_asset_writer) 91 | failed_asset_writer.close() 92 | 93 | 94 | def __verify_path_for_upload(path): 95 | folder = path[:path.rfind('/')] 96 | response = ee.data.getInfo(folder) 97 | if not response: 98 | logging.error('%s is not a valid destination. Make sure full path is provided e.g. users/user/nameofcollection ' 99 | 'or projects/myproject/myfolder/newcollection and that you have write access there.', path) 100 | sys.exit(1) 101 | 102 | 103 | def __find_remaining_assets_for_upload(path_to_local_assets, path_remote): 104 | local_assets = [__get_filename_from_path(path) for path in path_to_local_assets] 105 | if __collection_exist(path_remote): 106 | remote_assets = __get_asset_names_from_collection(path_remote) 107 | if len(remote_assets) > 0: 108 | assets_left_for_upload = set(local_assets) - set(remote_assets) 109 | if len(assets_left_for_upload) == 0: 110 | logging.warning('Collection already exists and contains all assets provided for upload. Exiting ...') 111 | sys.exit(1) 112 | 113 | logging.info('Collection already exists. %d assets left for upload to %s.', len(assets_left_for_upload), path_remote) 114 | assets_left_for_upload_full_path = [path for path in path_to_local_assets 115 | if __get_filename_from_path(path) in assets_left_for_upload] 116 | return assets_left_for_upload_full_path 117 | 118 | return path_to_local_assets 119 | 120 | 121 | def retry_if_ee_error(exception): 122 | return isinstance(exception, ee.EEException) 123 | 124 | 125 | @retrying.retry(retry_on_exception=retry_if_ee_error, wait_exponential_multiplier=1000, wait_exponential_max=4000, stop_max_attempt_number=3) 126 | def __upload_to_gcs_and_start_ingestion_task(asset_full_path, google_session, image_path, properties, 127 | multipart_upload, nodata_value): 128 | asset_request = __upload_file(session=google_session, 129 | file_path=image_path, 130 | asset_name=asset_full_path, 131 | use_multipart=multipart_upload, 132 | properties=properties, 133 | nodata=nodata_value) 134 | task_id = ee.data.newTaskId(1)[0] 135 | _ = ee.data.startIngestion(task_id, asset_request) 136 | return task_id 137 | 138 | 139 | def __validate_metadata(path_for_upload, metadata_path): 140 | validation_result = validate_metadata_from_csv(metadata_path) 141 | keys_in_metadata = {result.keys for result in validation_result} 142 | images_paths = glob.glob(os.path.join(path_for_upload, '*.tif*')) 143 | keys_in_data = {__get_filename_from_path(path) for path in images_paths} 144 | missing_keys = keys_in_data - keys_in_metadata 145 | 146 | if missing_keys: 147 | logging.warning('%d images does not have a corresponding key in metadata', len(missing_keys)) 148 | print('\n'.join(e for e in missing_keys)) 149 | else: 150 | logging.info('All images have metadata available') 151 | 152 | if not validation_result.success: 153 | print('Validation finished with errors. Type "y" to continue, default NO: ') 154 | choice = input().lower() 155 | if choice not in ['y', 'yes']: 156 | logging.info('Application will terminate') 157 | exit(1) 158 | 159 | 160 | def __extract_metadata_for_image(filename, metadata): 161 | if filename in metadata: 162 | return metadata[filename] 163 | else: 164 | logging.warning('Metadata for %s not found', filename) 165 | return None 166 | 167 | 168 | def __get_google_auth_session(username, password): 169 | google_accounts_url = 'https://accounts.google.com' 170 | authentication_url = 'https://accounts.google.com/ServiceLoginAuth' 171 | 172 | session = requests.session() 173 | 174 | login_html = session.get(google_accounts_url) 175 | soup_login = BeautifulSoup(login_html.content, 'html.parser').find('form').find_all('input') 176 | payload = {} 177 | for u in soup_login: 178 | if u.has_attr('value'): 179 | payload[u['name']] = u['value'] 180 | 181 | payload['Email'] = username 182 | payload['Passwd'] = password 183 | 184 | auto = login_html.headers.get('X-Auto-Login') 185 | follow_up = unquote(unquote(auto)).split('continue=')[-1] 186 | #galx = login_html.cookies['GALX'] 187 | 188 | payload['continue'] = follow_up 189 | #payload['GALX'] = galx 190 | 191 | session.post(authentication_url, data=payload) 192 | 193 | # get url and discard; somehow it does not work for the first time 194 | session.get('https://ee-api.appspot.com/assets/upload/geturl?') 195 | 196 | return session 197 | 198 | 199 | def __get_upload_url(session): 200 | r = session.get('https://ee-api.appspot.com/assets/upload/geturl?') 201 | if r.text.startswith('\n'): 202 | logging.error('Incorrect credentials. Probably. If you are sure the credentials are OK, refresh the authentication token. ' 203 | 'If it did not work report a problem. They might have changed something in the Matrix.') 204 | sys.exit(1) 205 | d = ast.literal_eval(r.text) 206 | return d['url'] 207 | 208 | 209 | def __upload_file(session, file_path, asset_name, use_multipart, properties=None, nodata=None): 210 | with open(file_path, 'rb') as f: 211 | upload_url = __get_upload_url(session) 212 | 213 | if use_multipart: 214 | form = encoder.MultipartEncoder({ 215 | "documents": (file_path, f, "application/octet-stream"), 216 | "composite": "NONE", 217 | }) 218 | headers = {"Prefer": "respond-async", "Content-Type": form.content_type} 219 | resp = session.post(upload_url, headers=headers, data=form) 220 | else: 221 | files = {'file': f} 222 | resp = session.post(upload_url, files=files) 223 | 224 | gsid = resp.json()[0] 225 | asset_data = {"id": asset_name, 226 | "tilesets": [ 227 | {"sources": [ 228 | {"primaryPath": gsid, 229 | "additionalPaths": [] 230 | } 231 | ]} 232 | ], 233 | "bands": [], 234 | "properties": properties, 235 | "missingData": {"value": nodata} 236 | } 237 | return asset_data 238 | 239 | 240 | def __periodic_check(current_image, period, tasks, writer): 241 | if (current_image + 1) % period == 0: 242 | logging.info('Periodic check') 243 | __check_for_failed_tasks_and_report(tasks=tasks, writer=writer) 244 | # Time to check how many tasks are running! 245 | __wait_for_tasks_to_complete(waiting_time=10, no_allowed_tasks_running=20) 246 | 247 | 248 | def __check_for_failed_tasks_and_report(tasks, writer): 249 | if len(tasks) == 0: 250 | return 251 | 252 | statuses = ee.data.getTaskStatus(tasks.keys()) 253 | 254 | for status in statuses: 255 | if status['state'] == 'FAILED': 256 | task_id = status['id'] 257 | filename = tasks[task_id] 258 | error_message = status['error_message'] 259 | writer.writerow(filename, task_id, error_message) 260 | logging.error('Ingestion of image %s has failed with message %s', filename, error_message) 261 | 262 | tasks.clear() 263 | 264 | 265 | def __get_filename_from_path(path): 266 | return os.path.splitext(os.path.basename(os.path.normpath(path)))[0] 267 | 268 | 269 | def __get_number_of_running_tasks(): 270 | return len([task for task in ee.data.getTaskList() if task['state'] == 'RUNNING']) 271 | 272 | 273 | def __wait_for_tasks_to_complete(waiting_time, no_allowed_tasks_running): 274 | tasks_running = __get_number_of_running_tasks() 275 | while tasks_running > no_allowed_tasks_running: 276 | logging.info('Number of running tasks is %d. Sleeping for %d s until it goes down to %d', 277 | tasks_running, waiting_time, no_allowed_tasks_running) 278 | time.sleep(waiting_time) 279 | tasks_running = __get_number_of_running_tasks() 280 | 281 | 282 | def __collection_exist(path): 283 | return True if ee.data.getInfo(path) else False 284 | 285 | 286 | def __create_image_collection(full_path_to_collection): 287 | if __collection_exist(full_path_to_collection): 288 | logging.warning("Collection %s already exists", full_path_to_collection) 289 | else: 290 | ee.data.createAsset({'type': ee.data.ASSET_TYPE_IMAGE_COLL}, full_path_to_collection) 291 | logging.info('New collection %s created', full_path_to_collection) 292 | 293 | 294 | def __get_asset_names_from_collection(collection_path): 295 | assets_list = ee.data.getList(params={'id': collection_path}) 296 | assets_names = [os.path.basename(asset['id']) for asset in assets_list] 297 | return assets_names 298 | 299 | 300 | class FailedAssetsWriter(object): 301 | 302 | def __init__(self): 303 | self.initialized = False 304 | 305 | def writerow(self, row): 306 | if not self.initialized: 307 | if sys.version_info > (3, 0): 308 | self.failed_upload_file = open('failed_upload.csv', 'w') 309 | else: 310 | self.failed_upload_file = open('failed_upload.csv', 'wb') 311 | self.failed_upload_writer = csv.writer(self.failed_upload_file) 312 | self.failed_upload_writer.writerow(['filename', 'task_id', 'error_msg']) 313 | self.initialized = True 314 | self.failed_upload_writer.writerow(row) 315 | 316 | def close(self): 317 | if self.initialized: 318 | self.failed_upload_file.close() 319 | self.initialized = False 320 | -------------------------------------------------------------------------------- /ee_ppipe/batch_uploading.py: -------------------------------------------------------------------------------- 1 | import os 2 | import csv 3 | import getpass 4 | def upload(user,source_path,destination_path,metadata_path,nodata_value): 5 | os.system("python ppipe.py upload -u "+str(user)+" --source "+str(source_path)+" --dest "+str(destination_path)+" -m "+str(metadata_path)+" --nodata "+str(nodata_value)) 6 | -------------------------------------------------------------------------------- /ee_ppipe/cleanup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | def cleanout(dirpath): 4 | shutil.rmtree(dirpath) 5 | os.mkdir(dirpath) 6 | print("Directory Cleaned") 7 | -------------------------------------------------------------------------------- /ee_ppipe/cli_aoi2json.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | import json 4 | from pprint import pprint 5 | import argparse 6 | import sys 7 | import time 8 | import os 9 | import csv 10 | import shapefile 11 | import string 12 | p1='{"config": [{"field_name": "geometry", "config": {"type": "Polygon","coordinates":' 13 | p2='}, "type": "GeometryFilter"}, {"field_name": "gsd", "config": {"gte":1,"lte":9.99}, "type": "RangeFilter"}, {"field_name": "acquired", "config": {"gte":"' 14 | p3='T04:00:00.000Z","lte":"' 15 | p4='T03:59:59.999Z"}, "type": "DateRangeFilter"}, {"field_name": "cloud_cover", "config": {"gte":0' 16 | p5=',"lte":' 17 | p6='}, "type": "RangeFilter"}], "type": "AndFilter"}' 18 | 19 | dir_path = os.path.dirname(os.path.realpath(__file__)) 20 | 21 | def aoijson(start,end,cloud,inputfile,geo,loc): 22 | if inputfile == 'KML': 23 | os.system("python kml_aoi.py --start "+start+" --end "+end+" --cloud "+cloud+" --geo "+geo+" --loc "+loc) 24 | elif inputfile=='WRS': 25 | with open(dir_path+'/wrs_grid.csv', 'rb') as f: 26 | reader = csv.reader(f) 27 | for row in reader: 28 | if row[13]== geo: 29 | a=str(row[14]) 30 | strpd=a.split(':')[3].strip('}') 31 | filenames = p1+strpd+p2+str(start)+p3+str(end)+p4+p5+str(cloud)+p6 32 | with open(loc+'/aoi.json', 'w') as outfile: 33 | outfile.write(filenames) 34 | outfile.close() 35 | elif inputfile == 'GJSON': 36 | raw= open(geo) 37 | for line in raw: 38 | fields=line.strip().split(":")[7] 39 | f2=fields.strip().split("}")[0] 40 | filenames = p1+f2+p2+str(start)+p3+str(end)+p4+p5+str(cloud)+p6 41 | with open(loc+'/aoi.json', 'w') as outfile: 42 | outfile.write(filenames) 43 | outfile.close() 44 | elif inputfile == 'SHP': 45 | reader = shapefile.Reader(geo) 46 | fields = reader.fields[1:] 47 | field_names = [field[0] for field in fields] 48 | buffer = [] 49 | for sr in reader.shapeRecords(): 50 | atr = dict(zip(field_names, sr.record)) 51 | geom = sr.shape.__geo_interface__ 52 | buffer.append(dict(type="Feature", \ 53 | geometry=geom, properties=atr)) 54 | geom2=str(geom).replace("(",'[') 55 | geom3=str(geom2).replace(")",']') 56 | geom4=str(geom3).replace(",]",']') 57 | #print(geom5) 58 | # write the GeoJSON file 59 | with open(loc+'/int.geojson','w') as csvfile: 60 | writer=csv.writer(csvfile) 61 | writer.writerow([str(geom4)]) 62 | raw= open(loc+'/int.geojson') 63 | for line in raw: 64 | fields=line.strip().split(":")[2] 65 | f2=fields.strip().split("}")[0] 66 | filenames = p1+f2+p2+str(start)+p3+str(end)+p4+p5+str(cloud)+p6 67 | with open(loc+'/aoi.json', 'w') as outfile: 68 | outfile.write(filenames) 69 | outfile.close() 70 | elif inputfile == 'WKT': 71 | raw= open(geo) 72 | for line in raw: 73 | l1=str(line).replace("POLYGON ((","[[[") 74 | l2=str(l1).replace("))","]]]") 75 | l3=str(l2).replace(", ","],[") 76 | l4=str(l3).replace(" ",",") 77 | filenames = p1+l4+p2+str(start)+p3+str(end)+p4+p5+str(cloud)+p6 78 | with open(loc+'/aoi.json', 'w') as outfile: 79 | outfile.write(filenames) 80 | outfile.close() 81 | -------------------------------------------------------------------------------- /ee_ppipe/cli_jsonparse.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | import json 4 | from pprint import pprint 5 | import argparse 6 | import sys 7 | import time 8 | import progressbar 9 | def main(): 10 | parser = argparse.ArgumentParser() 11 | parser.add_argument('--start', help='Start date in YYYY-MM-DD?') 12 | parser.add_argument('--end', help='End date in YYYY-MM-DD?') 13 | parser.add_argument('--cloud', help='Maximum Cloud Cover(0-1) representing 0-100') 14 | parser.add_argument('--asset', type=str,help='Whether PlanetScope or RapidEye assets(PS/RE)') 15 | parser.add_argument('--geo', default='./map.geojson',help='map.geojson file') 16 | parser.add_argument('--activate',default='2000',help='Enter estimated time for activation') 17 | args = parser.parse_args() 18 | sys.stdout.write(str(parsed(args))) 19 | 20 | def parsed(args): 21 | if args.asset == 'PS': 22 | subprocess.call("mkdir rexml psxml dgxml PlanetScope RapidEye dg", shell=False) 23 | raw= open(args.geo) 24 | for line in raw: 25 | fields=line.strip().split(":")[7] 26 | f2=fields.strip().split("}") 27 | with open('./poly.txt', 'w') as poly: 28 | poly.write(str(f2[0])) 29 | poly.close() 30 | with open('./st.txt', 'w') as strt: 31 | strt.write(args.start) 32 | strt.close() 33 | with open('./end.txt', 'w') as endr: 34 | endr.write(args.end) 35 | endr.close() 36 | with open('./cld.txt', 'w') as cld: 37 | cld.write(args.cloud) 38 | cld.close() 39 | filenames = ['./p1.txt', './poly.txt','./p2.txt','./st.txt','./p3.txt','./end.txt','./p4.txt','./p5.txt','./cld.txt','./p6.txt'] 40 | with open('./aoi.json', 'w') as outfile: 41 | for fname in filenames: 42 | with open(fname) as infile: 43 | outfile.write(infile.read()) 44 | subprocess.call("python download.py --query aoi.json --activate PSOrthoTile analytic", shell=False) 45 | subprocess.call("python download.py --query aoi.json --activate PSOrthoTile analytic_xml", shell=False) 46 | bar=progressbar.ProgressBar() 47 | for i in bar(range(int(args.activate))): 48 | time.sleep(1) 49 | subprocess.call("python download.py --query aoi.json --download ""./PlanetScope/"" PSOrthoTile analytic", shell=False) 50 | subprocess.call("python download.py --query aoi.json --download ""./psxml/"" PSOrthoTile analytic_xml", shell=False) 51 | elif args.asset == 'RE': 52 | subprocess.call("mkdir rexml psxml dgxml PlanetScope RapidEye dg", shell=False) 53 | raw= open(args.geo) 54 | for line in raw: 55 | fields=line.strip().split(":")[7] 56 | f2=fields.strip().split("}") 57 | with open('./poly.txt', 'w') as poly: 58 | poly.write(str(f2[0])) 59 | poly.close() 60 | with open('./st.txt', 'w') as strt: 61 | strt.write(args.start) 62 | strt.close() 63 | with open('./end.txt', 'w') as endr: 64 | endr.write(args.end) 65 | endr.close() 66 | with open('./cld.txt', 'w') as cld: 67 | cld.write(args.cloud) 68 | cld.close() 69 | filenames = ['./p1.txt', './poly.txt','./p2.txt','./st.txt','./p3.txt','./end.txt','./p4.txt','./p5.txt','./cld.txt','./p6.txt'] 70 | with open('./aoi.json', 'w') as outfile: 71 | for fname in filenames: 72 | with open(fname) as infile: 73 | outfile.write(infile.read()) 74 | subprocess.call("python download.py --query aoi.json --activate REOrthoTile analytic", shell=True) 75 | subprocess.call("python download.py --query aoi.json --activate REOrthoTile analytic_xml", shell=True) 76 | for i in range(int(args.activate)): 77 | print i, 78 | sys.stdout.flush() 79 | time.sleep(1) 80 | subprocess.call("python download.py --query aoi.json --download ""./RapidEye/"" REOrthoTile analytic", shell=True) 81 | subprocess.call("python download.py --query aoi.json --download ""./rexml/"" REOrthoTile analytic_xml", shell=True) 82 | 83 | if __name__ == '__main__': 84 | main() 85 | -------------------------------------------------------------------------------- /ee_ppipe/cli_metadata.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | import os 4 | import sys 5 | import time 6 | import csv 7 | import sys 8 | 9 | def metadata(asset,mf,mfile,errorlog): 10 | if asset == 'PSO': # PS OrthoTile Analytic 11 | folder = mf 12 | with open(mfile,'wb') as csvfile: 13 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "tileID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle","rsf","refCoeffB1","refCoeffB2","refCoeffB3","refCoeffB4"], delimiter=',') 14 | writer.writeheader() 15 | with open(errorlog,'wb') as csvfile: 16 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 17 | writer.writeheader() 18 | for filename in os.listdir(folder): 19 | infilename = os.path.join(folder,filename) 20 | fsp=filename.split("_x")[0] 21 | try: 22 | from xml.dom import minidom #This gets the main xml parse tree 23 | xmldoc=minidom.parse(infilename) 24 | ps=xmldoc.getElementsByTagName("ps:EarthObservationMetaData")[0] 25 | observation=xmldoc.getElementsByTagName("ps:EarthObservationResult") [0] 26 | eopfilename=xmldoc.getElementsByTagName("eop:fileName")[0].firstChild.data 27 | meta=xmldoc.getElementsByTagName("ps:EarthObservationMetaData")[0] 28 | acquisition= meta.getElementsByTagName("eop:acquisitionDate")[0].firstChild.data 29 | tile=meta.getElementsByTagName("ps:tileId")[0].firstChild.data 30 | equip=xmldoc.getElementsByTagName("eop:Platform")[0] 31 | platform=equip.getElementsByTagName("eop:shortName")[0].firstChild.data 32 | sid=equip.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 33 | equip=xmldoc.getElementsByTagName("eop:instrument")[0] 34 | sattype=equip.getElementsByTagName("eop:shortName")[0].firstChild.data 35 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 36 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 37 | psb=xmldoc.getElementsByTagName("ps:bandNumber")[0].firstChild.data 38 | psb1=xmldoc.getElementsByTagName("ps:bandNumber")[1].firstChild.data 39 | psb3=xmldoc.getElementsByTagName("ps:bandNumber")[2].firstChild.data 40 | psb4=xmldoc.getElementsByTagName("ps:bandNumber")[3].firstChild.data 41 | psbrad=xmldoc.getElementsByTagName("ps:radiometricScaleFactor")[0].firstChild.data 42 | psb1ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[0].firstChild.data 43 | psb2ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[1].firstChild.data 44 | psb3ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[2].firstChild.data 45 | psb4ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[3].firstChild.data 46 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 47 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 48 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 49 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 50 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 51 | print("ID_Name:", eopfilename.split(".")[0]) 52 | print("Acquisition Date:", acquisition.split("T")[0]) 53 | print("Satellite Type:", platform) 54 | print("ShortName:", sattype) 55 | print("Satellite ID:", str(sid)) 56 | print("Tile ID:", tile) 57 | print("Number of Bands:", bands) 58 | print("Cloud Cover:", format(float(cloud),'.2f')) 59 | print("PS Incidence Angle",format(float(psia),'.4f')) 60 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 61 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 62 | print("PS Azimuth angle",format(float(psaz),'.2f')) 63 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 64 | print("Radiometric Scale Factor",psbrad) 65 | print("ReflectanceCoefficient B1",format(float(psb1ref),'.8f')) 66 | print("ReflectanceCoefficient B2",format(float(psb2ref),'.8f')) 67 | print("ReflectanceCoefficient B3",format(float(psb3ref),'.8f')) 68 | print("ReflectanceCoefficient B4",format(float(psb4ref),'.8f')) 69 | date_time = acquisition.split("T")[0] 70 | pattern = '%Y-%m-%d' 71 | epoch = int(time.mktime(time.strptime(date_time, pattern)))*1000 72 | print("epoch time", epoch) 73 | with open(mfile,'a') as csvfile: 74 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 75 | writer.writerow([fsp,epoch,platform,sattype,str(sid),tile,bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 76 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f'),psbrad,format(float(psb1ref),'.8f'), 77 | format(float(psb2ref),'.8f'),format(float(psb3ref),'.8f'),format(float(psb4ref),'.8f')]) 78 | csvfile.close() 79 | except Exception: 80 | print(infilename) 81 | with open(errorlog,'a') as csvfile: 82 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 83 | writer.writerow([infilename]) 84 | csvfile.close() 85 | 86 | if asset == 'PSO_DN': #PS OrthoTile Analytic Derivative DN 87 | folder = mf 88 | with open(mfile,'wb') as csvfile: 89 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "tileID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle"], delimiter=',') 90 | writer.writeheader() 91 | with open(errorlog,'wb') as csvfile: 92 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 93 | writer.writeheader() 94 | for filename in os.listdir(folder): 95 | infilename = os.path.join(folder, filename) 96 | fsp = filename.split('_x')[0] 97 | try: 98 | from xml.dom import minidom 99 | xmldoc = minidom.parse(infilename) 100 | ps4band=xmldoc.getElementsByTagName('ps:EarthObservationMetaData')[0] 101 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 102 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 103 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 104 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 105 | tile=xmldoc.getElementsByTagName("ps:tileId")[0].firstChild.data 106 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 107 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 108 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 109 | psb=xmldoc.getElementsByTagName("ps:bandNumber")[0].firstChild.data 110 | psb1=xmldoc.getElementsByTagName("ps:bandNumber")[1].firstChild.data 111 | psb3=xmldoc.getElementsByTagName("ps:bandNumber")[2].firstChild.data 112 | psb4=xmldoc.getElementsByTagName("ps:bandNumber")[3].firstChild.data 113 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 114 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 115 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 116 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 117 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 118 | print ('ID_Name:', eopfilename.split('.')[0]) 119 | print ('Acquisition Date:', acquisition.split('T')[0]) 120 | print("Acquisition Date:", acquisition.split("T")[0]) 121 | print("Satellite Type:", platform) 122 | print("ShortName:", sattype) 123 | print("Satellite ID:", str(sid)) 124 | print("Tile ID:",tile) 125 | print("Number of Bands:", bands) 126 | print("Cloud Cover:", format(float(cloud),'.2f')) 127 | print("PS Incidence Angle",format(float(psia),'.4f')) 128 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 129 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 130 | print("PS Azimuth angle",format(float(psaz),'.2f')) 131 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 132 | date_time = acquisition.split('T')[0] 133 | pattern = '%Y-%m-%d' 134 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 135 | print ('epoch time', epoch) 136 | with open(mfile,'a') as csvfile: 137 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 138 | writer.writerow([fsp,epoch,platform,sattype,str(sid),tile,bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 139 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f')]) 140 | csvfile.close() 141 | except Exception: 142 | print(infilename) 143 | with open(errorlog,'a') as csvfile: 144 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 145 | writer.writerow([infilename]) 146 | csvfile.close() 147 | if asset == 'PSO_V': #PS OrthoTile Analytic Derivative Visual 148 | folder = mf 149 | with open(mfile,'wb') as csvfile: 150 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "tileID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle"], delimiter=',') 151 | writer.writeheader() 152 | with open(errorlog,'wb') as csvfile: 153 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 154 | writer.writeheader() 155 | for filename in os.listdir(folder): 156 | infilename = os.path.join(folder, filename) 157 | fsp = filename.split('_x')[0] 158 | try: 159 | from xml.dom import minidom 160 | xmldoc = minidom.parse(infilename) 161 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 162 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 163 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 164 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 165 | tile=xmldoc.getElementsByTagName("ps:tileId")[0].firstChild.data 166 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 167 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 168 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 169 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 170 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 171 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 172 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 173 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 174 | print ('ID_Name:', eopfilename.split('.')[0]) 175 | print ('Acquisition Date:', acquisition.split('T')[0]) 176 | print("Acquisition Date:", acquisition.split("T")[0]) 177 | print("Satellite Type:", platform) 178 | print("ShortName:", sattype) 179 | print("Satellite ID:", str(sid)) 180 | print("Tile ID:",tile) 181 | print("Number of Bands:", bands) 182 | print("Cloud Cover:", format(float(cloud),'.2f')) 183 | print("PS Incidence Angle",format(float(psia),'.4f')) 184 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 185 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 186 | print("PS Azimuth angle",format(float(psaz),'.2f')) 187 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 188 | date_time = acquisition.split('T')[0] 189 | pattern = '%Y-%m-%d' 190 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 191 | print ('epoch time', epoch) 192 | with open(mfile,'a') as csvfile: 193 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 194 | writer.writerow([fsp,epoch,platform,sattype,str(sid),tile,bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 195 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f')]) 196 | csvfile.close() 197 | except Exception: 198 | print(infilename) 199 | with open(errorlog,'a') as csvfile: 200 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 201 | writer.writerow([infilename]) 202 | csvfile.close() 203 | if asset == 'PS4B': #PS 4 Band Scene Derivative Analytic 204 | folder = mf 205 | with open(mfile,'wb') as csvfile: 206 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle","rsf","refCoeffB1","refCoeffB2","refCoeffB3","refCoeffB4"], delimiter=',') 207 | writer.writeheader() 208 | with open(errorlog,'wb') as csvfile: 209 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 210 | writer.writeheader() 211 | for filename in os.listdir(folder): 212 | infilename = os.path.join(folder, filename) 213 | fsp = filename.split('_x')[0] 214 | try: 215 | from xml.dom import minidom 216 | xmldoc = minidom.parse(infilename) 217 | ps4band=xmldoc.getElementsByTagName('ps:EarthObservationMetaData')[0] 218 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 219 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 220 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 221 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 222 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 223 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 224 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 225 | psb=xmldoc.getElementsByTagName("ps:bandNumber")[0].firstChild.data 226 | psb1=xmldoc.getElementsByTagName("ps:bandNumber")[1].firstChild.data 227 | psb3=xmldoc.getElementsByTagName("ps:bandNumber")[2].firstChild.data 228 | psb4=xmldoc.getElementsByTagName("ps:bandNumber")[3].firstChild.data 229 | psbrad=xmldoc.getElementsByTagName("ps:radiometricScaleFactor")[0].firstChild.data 230 | psb1ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[0].firstChild.data 231 | psb2ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[1].firstChild.data 232 | psb3ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[2].firstChild.data 233 | psb4ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[3].firstChild.data 234 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 235 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 236 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 237 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 238 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 239 | print ('ID_Name:', eopfilename.split('.')[0]) 240 | print ('Acquisition Date:', acquisition.split('T')[0]) 241 | print("Acquisition Date:", acquisition.split("T")[0]) 242 | print("ShortName:", sattype) 243 | print("Satellite ID:", str(sid)) 244 | print("Number of Bands:", bands) 245 | print("Cloud Cover:", format(float(cloud),'.2f')) 246 | print("PS Incidence Angle",format(float(psia),'.4f')) 247 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 248 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 249 | print("PS Azimuth angle",format(float(psaz),'.2f')) 250 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 251 | print("Radiometric Scale Factor",psbrad) 252 | print("ReflectanceCoefficient B1",format(float(psb1ref),'.8f')) 253 | print("ReflectanceCoefficient B2",format(float(psb2ref),'.8f')) 254 | print("ReflectanceCoefficient B3",format(float(psb3ref),'.8f')) 255 | print("ReflectanceCoefficient B4",format(float(psb4ref),'.8f')) 256 | date_time = acquisition.split('T')[0] 257 | pattern = '%Y-%m-%d' 258 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 259 | with open(mfile,'a') as csvfile: 260 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 261 | writer.writerow([fsp,epoch,platform,sattype,str(sid),bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 262 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f'),psbrad,format(float(psb1ref),'.8f'), 263 | format(float(psb2ref),'.8f'),format(float(psb3ref),'.8f'),format(float(psb4ref),'.8f')]) 264 | csvfile.close() 265 | except Exception: 266 | print(infilename) 267 | with open(errorlog,'a') as csvfile: 268 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 269 | writer.writerow([infilename]) 270 | csvfile.close() 271 | if asset == 'PS4B_DN': #PS 4 Band Scene Derivative DN 272 | folder = mf 273 | with open(mfile,'wb') as csvfile: 274 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle"], delimiter=',') 275 | writer.writeheader() 276 | with open(errorlog,'wb') as csvfile: 277 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 278 | writer.writeheader() 279 | for filename in os.listdir(folder): 280 | infilename = os.path.join(folder, filename) 281 | fsp = filename.split('_x')[0] 282 | try: 283 | from xml.dom import minidom 284 | xmldoc = minidom.parse(infilename) 285 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 286 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 287 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 288 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 289 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 290 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 291 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 292 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 293 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 294 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 295 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 296 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 297 | print ('ID_Name:', eopfilename.split('.')[0]) 298 | print ('Acquisition Date:', acquisition.split('T')[0]) 299 | print("Acquisition Date:", acquisition.split("T")[0]) 300 | print("Satellite Type:", platform) 301 | print("ShortName:", sattype) 302 | print("Number of Bands:", bands) 303 | print("Cloud Cover:", format(float(cloud),'.2f')) 304 | print("PS Incidence Angle",format(float(psia),'.4f')) 305 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 306 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 307 | print("PS Azimuth angle",format(float(psaz),'.2f')) 308 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 309 | date_time = acquisition.split('T')[0] 310 | pattern = '%Y-%m-%d' 311 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 312 | with open(mfile,'a') as csvfile: 313 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 314 | writer.writerow([fsp,epoch,platform,sattype,str(sid),bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 315 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f')]) 316 | csvfile.close() 317 | except Exception: 318 | print(infilename) 319 | with open(errorlog,'a') as csvfile: 320 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 321 | writer.writerow([infilename]) 322 | csvfile.close() 323 | if asset == 'PS3B': #PS 3 Band Scene Derivative Analytic 324 | folder = mf 325 | with open(mfile,'wb') as csvfile: 326 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle","rsf","refCoeffB1","refCoeffB2","refCoeffB3"], delimiter=',') 327 | writer.writeheader() 328 | with open(errorlog,'wb') as csvfile: 329 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 330 | writer.writeheader() 331 | for filename in os.listdir(folder): 332 | infilename = os.path.join(folder, filename) 333 | fsp = filename.split('_x')[0] 334 | print(fsp) 335 | try: 336 | from xml.dom import minidom 337 | xmldoc = minidom.parse(infilename) 338 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 339 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 340 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 341 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 342 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 343 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 344 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 345 | psb=xmldoc.getElementsByTagName("ps:bandNumber")[0].firstChild.data 346 | psb1=xmldoc.getElementsByTagName("ps:bandNumber")[1].firstChild.data 347 | psb3=xmldoc.getElementsByTagName("ps:bandNumber")[2].firstChild.data 348 | psbrad=xmldoc.getElementsByTagName("ps:radiometricScaleFactor")[0].firstChild.data 349 | psb1ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[0].firstChild.data 350 | psb2ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[1].firstChild.data 351 | psb3ref=xmldoc.getElementsByTagName("ps:reflectanceCoefficient")[2].firstChild.data 352 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 353 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 354 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 355 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 356 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 357 | print ('ID_Name:', eopfilename.split('.')[0]) 358 | print ('Acquisition Date:', acquisition.split('T')[0]) 359 | print("Acquisition Date:", acquisition.split("T")[0]) 360 | print("ShortName:", sattype) 361 | print("Satellite ID:", str(sid)) 362 | print("Number of Bands:", bands) 363 | print("Cloud Cover:", format(float(cloud),'.2f')) 364 | print("PS Incidence Angle",format(float(psia),'.4f')) 365 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 366 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 367 | print("PS Azimuth angle",format(float(psaz),'.2f')) 368 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 369 | print("Radiometric Scale Factor",psbrad) 370 | print("ReflectanceCoefficient B1",format(float(psb1ref),'.8f')) 371 | print("ReflectanceCoefficient B2",format(float(psb2ref),'.8f')) 372 | print("ReflectanceCoefficient B3",format(float(psb3ref),'.8f')) 373 | date_time = acquisition.split('T')[0] 374 | pattern = '%Y-%m-%d' 375 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 376 | with open(mfile,'a') as csvfile: 377 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 378 | writer.writerow([fsp,epoch,platform,sattype,str(sid),bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 379 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f'),psbrad,format(float(psb1ref),'.8f'), 380 | format(float(psb2ref),'.8f'),format(float(psb3ref),'.8f')]) 381 | csvfile.close() 382 | except Exception: 383 | print(infilename) 384 | with open(errorlog,'a') as csvfile: 385 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 386 | writer.writerow([infilename]) 387 | csvfile.close() 388 | if asset == 'PS3B_DN': #PS 3 Band Scene Derivative DN 389 | folder = mf 390 | with open(mfile,'wb') as csvfile: 391 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle"], delimiter=',') 392 | writer.writeheader() 393 | with open(errorlog,'wb') as csvfile: 394 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 395 | writer.writeheader() 396 | for filename in os.listdir(folder): 397 | infilename = os.path.join(folder, filename) 398 | fsp = filename.split('_x')[0] 399 | try: 400 | from xml.dom import minidom 401 | xmldoc = minidom.parse(infilename) 402 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 403 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 404 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 405 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 406 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 407 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 408 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 409 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 410 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 411 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 412 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 413 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 414 | print ('ID_Name:', eopfilename.split('.')[0]) 415 | print ('Acquisition Date:', acquisition.split('T')[0]) 416 | print("Acquisition Date:", acquisition.split("T")[0]) 417 | print("Satellite Type:", platform) 418 | print("ShortName:", sattype) 419 | print("Number of Bands:", bands) 420 | print("Cloud Cover:", format(float(cloud),'.2f')) 421 | print("PS Incidence Angle",format(float(psia),'.4f')) 422 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 423 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 424 | print("PS Azimuth angle",format(float(psaz),'.2f')) 425 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 426 | date_time = acquisition.split('T')[0] 427 | pattern = '%Y-%m-%d' 428 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 429 | with open(mfile,'a') as csvfile: 430 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 431 | writer.writerow([fsp,epoch,platform,sattype,str(sid),bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 432 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f')]) 433 | csvfile.close() 434 | except Exception: 435 | print(infilename) 436 | with open(errorlog,'a') as csvfile: 437 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 438 | writer.writerow([infilename]) 439 | csvfile.close() 440 | if asset == 'PS3B_V': #PS 3 Band Scene Derivative Visual 441 | folder = mf 442 | with open(mfile,'wb') as csvfile: 443 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satType","satID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle"], delimiter=',') 444 | writer.writeheader() 445 | with open(errorlog,'wb') as csvfile: 446 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 447 | writer.writeheader() 448 | for filename in os.listdir(folder): 449 | infilename = os.path.join(folder, filename) 450 | fsp = filename.split('_x')[0] 451 | try: 452 | from xml.dom import minidom 453 | xmldoc = minidom.parse(infilename) 454 | eopfilename = xmldoc.getElementsByTagName('eop:identifier')[0].firstChild.data 455 | acquisition = xmldoc.getElementsByTagName('eop:acquisitionDate')[0].firstChild.data 456 | bands=xmldoc.getElementsByTagName("ps:numBands")[0].firstChild.data 457 | platform=xmldoc.getElementsByTagName("eop:shortName")[0].firstChild.data 458 | sid=xmldoc.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 459 | sattype=xmldoc.getElementsByTagName("eop:shortName")[1].firstChild.data 460 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 461 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 462 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 463 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 464 | psaz=xmldoc.getElementsByTagName("ps:azimuthAngle")[0].firstChild.data 465 | pssca=xmldoc.getElementsByTagName("ps:spaceCraftViewAngle")[0].firstChild.data 466 | print ('ID_Name:', eopfilename.split('.')[0]) 467 | print ('Acquisition Date:', acquisition.split('T')[0]) 468 | print("Acquisition Date:", acquisition.split("T")[0]) 469 | print("Satellite Type:", platform) 470 | print("ShortName:", sattype) 471 | print("Number of Bands:", bands) 472 | print("Cloud Cover:", format(float(cloud),'.2f')) 473 | print("PS Incidence Angle",format(float(psia),'.4f')) 474 | print("PS illumination azimuth angle",format(float(psilaz),'.2f')) 475 | print("PS illumination elevation angle",format(float(psilelv),'.2f')) 476 | print("PS Azimuth angle",format(float(psaz),'.2f')) 477 | print("PS SpaceCraft angle",format(float(pssca),'.4f')) 478 | date_time = acquisition.split('T')[0] 479 | pattern = '%Y-%m-%d' 480 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 481 | print ('epoch time', epoch) 482 | with open(mfile,'a') as csvfile: 483 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 484 | writer.writerow([fsp,epoch,platform,sattype,str(sid),bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'), 485 | format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f')]) 486 | csvfile.close() 487 | except Exception: 488 | print(infilename) 489 | with open(errorlog,'a') as csvfile: 490 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 491 | writer.writerow([infilename]) 492 | csvfile.close() 493 | if asset == 'REO': 494 | folder = mf 495 | with open(mfile,'wb') as csvfile: 496 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satID", "tileID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle","rsf"], delimiter=',') 497 | writer.writeheader() 498 | with open(errorlog,'wb') as csvfile: 499 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 500 | writer.writeheader() 501 | for filename in os.listdir(folder): 502 | print(filename) 503 | infilename = os.path.join(folder,filename) 504 | fsp=filename.split("_x")[0] 505 | try: 506 | from xml.dom import minidom 507 | xmldoc=minidom.parse(infilename) 508 | re=xmldoc.getElementsByTagName("re:EarthObservationMetaData")[0] 509 | eopfilename=xmldoc.getElementsByTagName("eop:identifier")[0].firstChild.data 510 | product=xmldoc.getElementsByTagName("re:EarthObservationResult")[0] 511 | bands=product.getElementsByTagName("re:numBands")[0].firstChild.data 512 | downlink=xmldoc.getElementsByTagName("eop:downlinkedTo")[0] 513 | acquisition= downlink.getElementsByTagName("eop:acquisitionDate")[0].firstChild.data 514 | tile=xmldoc.getElementsByTagName("re:tileId")[0].firstChild.data 515 | equip=xmldoc.getElementsByTagName("eop:EarthObservationEquipment")[0] 516 | platform=equip.getElementsByTagName("eop:shortName")[0].firstChild.data 517 | sid=equip.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 518 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 519 | date_time = acquisition.split("T")[0] 520 | pattern = '%Y-%m-%d' 521 | epoch = int(time.mktime(time.strptime(date_time, pattern)))*1000 522 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 523 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 524 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 525 | psaz=xmldoc.getElementsByTagName("re:azimuthAngle")[0].firstChild.data 526 | pssca=xmldoc.getElementsByTagName("re:spaceCraftViewAngle")[0].firstChild.data 527 | psrad=xmldoc.getElementsByTagName("re:radiometricScaleFactor")[0].firstChild.data 528 | print("ID_Name:", eopfilename.split(".")[0]) 529 | print("Acquisition Date:", acquisition.split("T")[0]) 530 | print("Satellite Type:", str(platform)) 531 | print("Satellite ID:", str(sid)) 532 | print("Tile ID:", tile) 533 | print("Number of Bands:", bands) 534 | print("Cloud Cover:", format(float(cloud),'.2f')) 535 | print("Epoch Time:",epoch) 536 | print("RE Incidence Angle",format(float(psia),'.4f')) 537 | print("RE illumination azimuth angle",format(float(psilaz),'.2f')) 538 | print("RE illumination elevation angle",format(float(psilelv),'.2f')) 539 | print("RE Azimuth angle",format(float(psaz),'.2f')) 540 | print("RE SpaceCraft angle",format(float(pssca),'.4f')) 541 | print("Radiometric Scale Factor", format(float(psrad),'.18f')) 542 | with open(mfile,'a') as csvfile: 543 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 544 | writer.writerow([fsp,epoch,str(platform),str(sid),tile,bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'),format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f'),format(float(psrad),'.18f')]) 545 | csvfile.close() 546 | except Exception: 547 | print(infilename) 548 | with open(errorlog,'a') as csvfile: 549 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 550 | writer.writerow([infilename]) 551 | csvfile.close() 552 | if asset == 'REO_V': 553 | folder = mf 554 | with open(mfile,'wb') as csvfile: 555 | writer=csv.DictWriter(csvfile,fieldnames=["id_no", "system:time_start", "platform", "satID", "tileID", "numBands", "cloudcover","incAngle","illAzAngle","illElvAngle","azAngle","spcAngle","rsf"], delimiter=',') 556 | writer.writeheader() 557 | with open(errorlog,'wb') as csvfile: 558 | writer=csv.DictWriter(csvfile,fieldnames=["id_no"], delimiter=',') 559 | writer.writeheader() 560 | for filename in os.listdir(folder): 561 | print(filename) 562 | infilename = os.path.join(folder,filename) 563 | fsp=filename.split("_x")[0] 564 | try: 565 | from xml.dom import minidom 566 | xmldoc=minidom.parse(infilename) 567 | eopfilename=xmldoc.getElementsByTagName("eop:identifier")[0].firstChild.data 568 | bands=xmldoc.getElementsByTagName("re:numBands")[0].firstChild.data 569 | acquisition= xmldoc.getElementsByTagName("eop:acquisitionDate")[0].firstChild.data 570 | tile=xmldoc.getElementsByTagName("re:tileId")[0].firstChild.data 571 | equip=xmldoc.getElementsByTagName("eop:EarthObservationEquipment")[0] 572 | platform=equip.getElementsByTagName("eop:shortName")[0].firstChild.data 573 | sid=equip.getElementsByTagName("eop:serialIdentifier")[0].firstChild.data 574 | cloud=xmldoc.getElementsByTagName("opt:cloudCoverPercentage")[0].firstChild.data 575 | date_time = acquisition.split("T")[0] 576 | pattern = '%Y-%m-%d' 577 | epoch = int(time.mktime(time.strptime(date_time, pattern)))*1000 578 | psia=xmldoc.getElementsByTagName("eop:incidenceAngle")[0].firstChild.data 579 | psilaz=xmldoc.getElementsByTagName("opt:illuminationAzimuthAngle")[0].firstChild.data 580 | psilelv=xmldoc.getElementsByTagName("opt:illuminationElevationAngle")[0].firstChild.data 581 | psaz=xmldoc.getElementsByTagName("re:azimuthAngle")[0].firstChild.data 582 | pssca=xmldoc.getElementsByTagName("re:spaceCraftViewAngle")[0].firstChild.data 583 | psrad=xmldoc.getElementsByTagName("re:radiometricScaleFactor")[0].firstChild.data 584 | print("ID_Name:", eopfilename.split(".")[0]) 585 | print("Acquisition Date:", acquisition.split("T")[0]) 586 | print("Satellite Type:", str(platform)) 587 | print("Satellite ID:", str(sid)) 588 | print("Tile ID:", tile) 589 | print("Number of Bands:", bands) 590 | print("Cloud Cover:", format(float(cloud),'.2f')) 591 | print("Epoch Time:",epoch) 592 | print("RE Incidence Angle",format(float(psia),'.4f')) 593 | print("RE illumination azimuth angle",format(float(psilaz),'.2f')) 594 | print("RE illumination elevation angle",format(float(psilelv),'.2f')) 595 | print("RE Azimuth angle",format(float(psaz),'.2f')) 596 | print("RE SpaceCraft angle",format(float(pssca),'.4f')) 597 | print("Radiometric Scale Factor", format(float(psrad),'.18f')) 598 | with open(mfile,'a') as csvfile: 599 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 600 | writer.writerow([fsp,epoch,str(platform),str(sid),tile,bands,format(float(cloud),'.2f'),format(float(psia),'.4f'),format(float(psilaz),'.2f'),format(float(psilelv),'.2f'),format(float(psaz),'.2f'),format(float(pssca),'.4f'),format(float(psrad),'.18f')]) 601 | csvfile.close() 602 | except Exception: 603 | print(infilename) 604 | with open(errorlog,'a') as csvfile: 605 | writer=csv.writer(csvfile,delimiter=',',lineterminator='\n') 606 | writer.writerow([infilename]) 607 | csvfile.close() 608 | if asset == 'DGMS': 609 | folder = mf 610 | with open(mfile, 'wb') as csvfile: 611 | writer = csv.DictWriter(csvfile, fieldnames=['id_no', 'satName', 'prodID', 'catID', 'satID', 'stripID', 'numBands', 'cloudcover', 'system:time_start', 'sunaz', 'sunelv', 'sataz', 'satelv', 'absfB1', 'absfB2', 612 | 'absfB3', 'absfB4', 'absfB5', 'absfB6', 'absfB7', 'absfB8', 'effbwB1', 'effbwB2', 'effbwB3', 'effbwB4', 'effbwB5', 'effbwB6', 'effbwB7', 'effbwB8'], delimiter=',') 613 | writer.writeheader() 614 | for filename in os.listdir(folder): 615 | print filename 616 | infilename = os.path.join(folder, filename) 617 | fsp = filename.split('.')[0] 618 | try: 619 | from xml.dom import minidom 620 | xmldoc = minidom.parse(infilename) 621 | pid = xmldoc.getElementsByTagName('PRODUCTORDERID')[0].firstChild.data 622 | cid = xmldoc.getElementsByTagName('PRODUCTCATALOGID')[0].firstChild.data 623 | satid = xmldoc.getElementsByTagName('SATID')[0].firstChild.data 624 | stripid = xmldoc.getElementsByTagName('STRIPID')[0].firstChild.data 625 | acquisition = xmldoc.getElementsByTagName('STARTTIME')[0].firstChild.data 626 | cloud = xmldoc.getElementsByTagName('CLOUDCOVER')[0].firstChild.data 627 | sunaz = xmldoc.getElementsByTagName('MEANSUNAZ')[0].firstChild.data 628 | sunelv = xmldoc.getElementsByTagName('MEANSUNEL')[0].firstChild.data 629 | sataz = xmldoc.getElementsByTagName('MEANSATAZ')[0].firstChild.data 630 | satelv = xmldoc.getElementsByTagName('MEANSATEL')[0].firstChild.data 631 | satelv = xmldoc.getElementsByTagName('MEANOFFNADIRVIEWANGLE')[0].firstChild.data 632 | date_time = acquisition.split('T')[0] 633 | pattern = '%Y-%m-%d' 634 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 635 | if satid == 'QB02': 636 | satname = 'QuickBird' 637 | bands = 4 638 | absfb1 = xmldoc.getElementsByTagName('ABSCALFACTOR')[0].firstChild.data 639 | absfb2 = xmldoc.getElementsByTagName('ABSCALFACTOR')[1].firstChild.data 640 | absfb3 = xmldoc.getElementsByTagName('ABSCALFACTOR')[2].firstChild.data 641 | absfb4 = xmldoc.getElementsByTagName('ABSCALFACTOR')[3].firstChild.data 642 | absfb5 = 0 643 | absfb6 = 0 644 | absfb7 = 0 645 | absfb8 = 0 646 | effbwb1 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[0].firstChild.data 647 | effbwb2 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[1].firstChild.data 648 | effbwb3 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[2].firstChild.data 649 | effbwb4 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[3].firstChild.data 650 | effbwb5 = 0 651 | effbwb6 = 0 652 | effbwb7 = 0 653 | effbwb8 = 0 654 | elif satid == 'GE01': 655 | satname = 'Geoeye' 656 | bands = 4 657 | absfb1 = xmldoc.getElementsByTagName('ABSCALFACTOR')[0].firstChild.data 658 | absfb2 = xmldoc.getElementsByTagName('ABSCALFACTOR')[1].firstChild.data 659 | absfb3 = xmldoc.getElementsByTagName('ABSCALFACTOR')[2].firstChild.data 660 | absfb4 = xmldoc.getElementsByTagName('ABSCALFACTOR')[3].firstChild.data 661 | absfb5 = 0 662 | absfb6 = 0 663 | absfb7 = 0 664 | absfb8 = 0 665 | effbwb1 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[0].firstChild.data 666 | effbwb2 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[1].firstChild.data 667 | effbwb3 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[2].firstChild.data 668 | effbwb4 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[3].firstChild.data 669 | effbwb5 = 0 670 | effbwb6 = 0 671 | effbwb7 = 0 672 | effbwb8 = 0 673 | elif satid == 'WV02': 674 | satname = 'WorldView-2' 675 | bands = 8 676 | absfb1 = xmldoc.getElementsByTagName('ABSCALFACTOR')[0].firstChild.data 677 | absfb2 = xmldoc.getElementsByTagName('ABSCALFACTOR')[1].firstChild.data 678 | absfb3 = xmldoc.getElementsByTagName('ABSCALFACTOR')[2].firstChild.data 679 | absfb4 = xmldoc.getElementsByTagName('ABSCALFACTOR')[3].firstChild.data 680 | absfb5 = xmldoc.getElementsByTagName('ABSCALFACTOR')[4].firstChild.data 681 | absfb6 = xmldoc.getElementsByTagName('ABSCALFACTOR')[5].firstChild.data 682 | absfb7 = xmldoc.getElementsByTagName('ABSCALFACTOR')[6].firstChild.data 683 | absfb8 = xmldoc.getElementsByTagName('ABSCALFACTOR')[7].firstChild.data 684 | effbwb1 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[0].firstChild.data 685 | effbwb2 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[1].firstChild.data 686 | effbwb3 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[2].firstChild.data 687 | effbwb4 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[3].firstChild.data 688 | effbwb5 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[4].firstChild.data 689 | effbwb6 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[5].firstChild.data 690 | effbwb7 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[6].firstChild.data 691 | effbwb8 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[7].firstChild.data 692 | else: 693 | satname = 'WorldView-3' 694 | bands = 8 695 | absfb1 = xmldoc.getElementsByTagName('ABSCALFACTOR')[0].firstChild.data 696 | absfb2 = xmldoc.getElementsByTagName('ABSCALFACTOR')[1].firstChild.data 697 | absfb3 = xmldoc.getElementsByTagName('ABSCALFACTOR')[2].firstChild.data 698 | absfb4 = xmldoc.getElementsByTagName('ABSCALFACTOR')[3].firstChild.data 699 | absfb5 = xmldoc.getElementsByTagName('ABSCALFACTOR')[4].firstChild.data 700 | absfb6 = xmldoc.getElementsByTagName('ABSCALFACTOR')[5].firstChild.data 701 | absfb7 = xmldoc.getElementsByTagName('ABSCALFACTOR')[6].firstChild.data 702 | absfb8 = xmldoc.getElementsByTagName('ABSCALFACTOR')[7].firstChild.data 703 | effbwb1 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[0].firstChild.data 704 | effbwb2 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[1].firstChild.data 705 | effbwb3 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[2].firstChild.data 706 | effbwb4 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[3].firstChild.data 707 | effbwb5 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[4].firstChild.data 708 | effbwb6 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[5].firstChild.data 709 | effbwb7 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[6].firstChild.data 710 | effbwb8 = xmldoc.getElementsByTagName('EFFECTIVEBANDWIDTH')[7].firstChild.data 711 | print ( 712 | 'ID_Name:', fsp) 713 | print ('Satellite Name:', satname) 714 | print ('Acquisition Date:', acquisition.split('T')[0]) 715 | print ('Product Order ID:', str(pid)) 716 | print ('Product Catalog ID:', str(cid)) 717 | print ('Satellite ID:', str(satid)) 718 | print ('Strip ID:', stripid) 719 | print ('Number of Bands:', bands) 720 | print ('Cloud Cover:', cloud[:4]) 721 | print ('Epoch Time:', epoch) 722 | print ('Abscal Factor', absfb1) 723 | print ('Abscal Factor', absfb2) 724 | print ('Abscal Factor', absfb3) 725 | print ('Abscal Factor', absfb4) 726 | print ('Eff Bandwith', effbwb1) 727 | print ('Eff Bandwith', effbwb2) 728 | print ('Eff Bandwith', effbwb3) 729 | print ('Eff Bandwith', effbwb4) 730 | print ('Sun Elevation', format(float(sunelv), '.2f')) 731 | print ('Sun Azimuth', format(float(sunaz), '.2f')) 732 | print ('Sat Elevation', format(float(satelv), '.2f')) 733 | print ('Sat Azimuth', format(float(sataz), '.2f')) 734 | with open(mfile, 'a') as csvfile: 735 | writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') 736 | writer.writerow([fsp, satname, pid, cid, satid, stripid, bands, format(float(cloud), '.2f'), epoch, format(float(sunaz), '.2f'), format(float(sunelv), '.2f'), 737 | format(float(sataz), '.2f'), format(float(satelv), '.2f'), format(float(absfb1), '.6f'), format(float(absfb2), '.6f'), format(float(absfb3), '.6f'), format(float(absfb4), '.6f'), 738 | format(float(absfb5), '.6f'), format(float(absfb6), '.6f'), format(float(absfb7), '.6f'), format(float(absfb8), '.6f'), format(float(effbwb1), '.6f'), format(float(effbwb2), '.6f'), 739 | format(float(effbwb3), '.6f'), format(float(effbwb4), '.6f'), format(float(effbwb5), '.6f'), format(float(effbwb6), '.6f'), format(float(effbwb7), '.6f'), format(float(effbwb8), '.6f')]) 740 | csvfile.close() 741 | except Exception: 742 | print infilename 743 | with open(errorlog, 'wb') as csvfile: 744 | writer = csv.writer(csvfile, delimiter=',') 745 | writer.writerow([infilename]) 746 | csvfile.close() 747 | 748 | if asset == 'DGP': 749 | folder = mf 750 | with open(mfile, 'wb') as csvfile: 751 | writer = csv.DictWriter(csvfile, fieldnames=['id_no', 'satName', 'prodID', 'catID', 'satID', 'stripID', 'numBands', 'cloudcover', 'system:time_start'], delimiter=',') 752 | writer.writeheader() 753 | for filename in os.listdir(folder): 754 | print filename 755 | infilename = os.path.join(folder, filename) 756 | fsp = filename.split('.')[0] 757 | try: 758 | from xml.dom import minidom 759 | xmldoc = minidom.parse(infilename) 760 | pid = xmldoc.getElementsByTagName('PRODUCTORDERID')[0].firstChild.data 761 | cid = xmldoc.getElementsByTagName('PRODUCTCATALOGID')[0].firstChild.data 762 | satid = xmldoc.getElementsByTagName('SATID')[0].firstChild.data 763 | stripid = xmldoc.getElementsByTagName('STRIPID')[0].firstChild.data 764 | acquisition = xmldoc.getElementsByTagName('STARTTIME')[0].firstChild.data 765 | cloud = xmldoc.getElementsByTagName('CLOUDCOVER')[0].firstChild.data 766 | date_time = acquisition.split('T')[0] 767 | pattern = '%Y-%m-%d' 768 | epoch = int(time.mktime(time.strptime(date_time, pattern))) * 1000 769 | if satid == 'QB02': 770 | satname = 'QuickBird' 771 | elif satid == 'GE01': 772 | satname = 'Geoeye' 773 | elif satid == 'WV02': 774 | satname = 'WorldView-2' 775 | else: 776 | satname = 'WorldView-3' 777 | if satid == 'QB02': 778 | bands = 1 779 | elif satid == 'GE01': 780 | bands = 1 781 | elif satid == 'WV02': 782 | bands = 1 783 | else: 784 | bands = 1 785 | print ( 786 | 'ID_Name:', fsp) 787 | print ('Satellite Name:', satname) 788 | print ('Acquisition Date:', acquisition.split('T')[0]) 789 | print ('Product Order ID:', str(pid)) 790 | print ('Product Catalog ID:', str(cid)) 791 | print ('Satellite ID:', str(satid)) 792 | print ('Strip ID:', stripid) 793 | print ('Number of Bands:', bands) 794 | print ('Cloud Cover:', cloud[:4]) 795 | print ('Epoch Time:', epoch) 796 | with open(mfile, 'a') as csvfile: 797 | writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') 798 | writer.writerow([fsp, satname, pid, cid, satid, stripid, bands, cloud[:4], epoch]) 799 | csvfile.close() 800 | except Exception: 801 | print infilename 802 | with open(errorlog, 'wb') as csvfile: 803 | writer = csv.writer(csvfile, delimiter=',') 804 | writer.writerow([infilename]) 805 | csvfile.close() 806 | 807 | 808 | if __name__ == '__main__': 809 | main() 810 | -------------------------------------------------------------------------------- /ee_ppipe/collectionprop.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import subprocess 3 | import os,sys,time,csv,sys 4 | import json,ee 5 | from pprint import pprint 6 | 7 | def collprop(imcoll,prop): 8 | tset=str("earthengine asset set "+'"'+imcoll+'"'+" --property "+'"'+str(prop)+'"') 9 | tinfo=str("earthengine asset info "+imcoll) 10 | assetset=subprocess.call(tset,shell=True) 11 | print("Asset Property Set") 12 | print("New Asset Info========>>>>>") 13 | for line in subprocess.check_output(tinfo,shell=True).split('\n'): 14 | print(line) 15 | 16 | 17 | -------------------------------------------------------------------------------- /ee_ppipe/config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging.config 3 | import os 4 | 5 | default_config = { 6 | "version": 1, 7 | "disable_existing_loggers": False, 8 | "formatters": { 9 | "simple": { 10 | "format": "%(asctime)s :: %(name)s :: %(levelname)s :: %(message)s" 11 | } 12 | }, 13 | 14 | "handlers": { 15 | "console": { 16 | "class": "logging.StreamHandler", 17 | "level": "DEBUG", 18 | "formatter": "simple", 19 | "stream": "ext://sys.stdout" 20 | }, 21 | 22 | "info_file_handler": { 23 | "class": "logging.handlers.RotatingFileHandler", 24 | "level": "INFO", 25 | "formatter": "simple", 26 | "filename": "gee_assets_info.log", 27 | "maxBytes": 10485760, 28 | "backupCount": 20, 29 | "encoding": "utf8" 30 | }, 31 | 32 | "error_file_handler": { 33 | "class": "logging.handlers.RotatingFileHandler", 34 | "level": "ERROR", 35 | "formatter": "simple", 36 | "filename": "gee_assets_errors.log", 37 | "maxBytes": 10485760, 38 | "backupCount": 20, 39 | "encoding": "utf8" 40 | } 41 | }, 42 | 43 | "root": { 44 | "level": "INFO", 45 | "handlers": ["console", "info_file_handler", "error_file_handler"] 46 | } 47 | } 48 | 49 | def setup_logging(): 50 | path = os.path.join(os.path.dirname(__file__), 'logconfig.json') 51 | try: 52 | with open(path, 'rt') as f: 53 | config = json.load(f) 54 | except Exception as e: 55 | logging.exception('Could not load logconfig.json. Loading default logging configuration.') 56 | config = default_config 57 | logging.config.dictConfig(config) 58 | -------------------------------------------------------------------------------- /ee_ppipe/download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import os 5 | import requests 6 | import json 7 | import sys 8 | import logging 9 | import datetime 10 | import csv 11 | from retrying import retry 12 | 13 | 14 | ASSET_URL = 'https://api.planet.com/data/v1/item-types/{}/items/{}/assets/' 15 | SEARCH_URL = 'https://api.planet.com/data/v1/quick-search' 16 | 17 | f=open("./pkey.csv") 18 | for row in csv.reader(f): 19 | #print(str(row).strip("[']")) 20 | os.environ['PLANET_API_KEY']=str(row).strip("[']") 21 | 22 | # set up auth 23 | SESSION = requests.Session() 24 | SESSION.auth = (os.environ.get('PLANET_API_KEY'), '') 25 | 26 | BASE_QUERY_STRING = '''{"config": [{"field_name": "geometry", "config": {"type": "Polygon", "coordinates": %s}, "type": "GeometryFilter"}, {"field_name": "acquired", "config": {"gte": "%s", "lte": "%s"}, "type": "DateRangeFilter"}], "type": "AndFilter"}''' 27 | SAT_FILTER_QUERY_DICT = { "field_name": "satellite_id", "config": [], "type": "StringInFilter"} 28 | 29 | class RateLimitException(Exception): 30 | pass 31 | 32 | 33 | def handle_page(page): 34 | return [item['id'] for item in page['features']] 35 | 36 | 37 | def retry_if_rate_limit_error(exception): 38 | """Return True if we should retry (in this case when it's a rate_limit 39 | error), False otherwise""" 40 | return isinstance(exception, RateLimitException) 41 | 42 | 43 | def check_status(result, msg=None, text=True): 44 | 45 | if text: 46 | logging.info('Response: {} - {}'.format(result.status_code, result.text)) 47 | else: 48 | # Logging option for when the result is a massive geotif 49 | logging.info('Response: {}'.format(result.status_code)) 50 | 51 | if result.status_code == 429: 52 | error_msg = 'Error code 429: rate limit exceeded - retrying' 53 | print(error_msg) 54 | raise RateLimitException('Rate limit error') 55 | elif result.status_code == 401: 56 | error_msg = "Error code 401: the API Key you provided is invalid, or does not have the required permissions for this AOI or TOI.\n 1. Ensure your API key is stored in your *nix environment ('export PLANET_API_KEY=Your_API_Key'), or passed as an argument in the command ('--key Your_API_Key')\n 2. Check that it is correct at http://planet.com/account\n 3. Confirm you have the right permissions to access this AOI and TOI with your Account Manager" 57 | print(error_msg) 58 | sys.exit(1) 59 | elif result.status_code == 400: 60 | error_msg = 'Error code {}: {}'.format(result.status_code, result.text) 61 | print(error_msg) 62 | sys.exit(1) 63 | else: 64 | if msg: 65 | print(msg) 66 | return True 67 | 68 | 69 | def parse_bbox_args(args): 70 | assert args.bbox 71 | 72 | def parse_date(date_str): 73 | return datetime.datetime.strptime(date_str, '%Y-%m-%d').isoformat() 74 | 75 | if args.start_date: 76 | start = parse_date(args.start_date) 77 | else: 78 | raise Exception('Use of --bbox argument requires use of --start-date argument') 79 | 80 | if args.end_date: 81 | end = parase_date(args.end_date) 82 | else: 83 | end = datetime.datetime.utcnow().isoformat() 84 | 85 | # tack on a Z for full compliance with RFC 3339 86 | if not start.endswith('Z'): 87 | start += 'Z' 88 | if not end.endswith('Z'): 89 | end += 'Z' 90 | 91 | return args.bbox, start, end 92 | 93 | 94 | def bbox_to_coords(bbox): 95 | xmin, ymin, xmax, ymax = [float(i) for i in bbox] 96 | coords = [[[xmin, ymax], [xmin, ymin], [xmax, ymin], 97 | [xmax, ymax], [xmin, ymax]]] 98 | return coords 99 | 100 | 101 | def build_bbox_query(bbox, start, end, sat_list=None): 102 | coords = bbox_to_coords(bbox) 103 | query = BASE_QUERY_STRING % (coords, start, end) 104 | query = json.loads(query) 105 | 106 | if sat_list: 107 | SAT_FILTER_QUERY_DICT['config'] = sat_list 108 | query['config'].append(SAT_FILTER_QUERY_DICT) 109 | return query 110 | 111 | 112 | def filter_ids_by_sat(id_list, sat_list): 113 | '''For when we can't do server-side filtering''' 114 | return [img_id for sat_id in sat_list for img_id in id_list 115 | if sat_id in img_id] 116 | 117 | 118 | def build_query(args, sat_list=None): 119 | if args.query: 120 | # load query from json file 121 | try: 122 | with open(args.query, 'r') as fp: 123 | query = json.load(fp) 124 | except: 125 | print("Error: could not load JSON file " + args.query + ". Please check it exists, and that the syntax is valid at http://jsonlint.com/.") 126 | raise 127 | 128 | # build bbox query and add sat_list filter if supplied 129 | else: 130 | bbox, start, end = parse_bbox_args(args) 131 | 132 | if sat_list: 133 | query = build_bbox_query(bbox, start, end, sat_list) 134 | else: 135 | query = build_bbox_query(bbox, start, end) 136 | 137 | return query 138 | 139 | 140 | @retry( 141 | wait_exponential_multiplier=1000, 142 | wait_exponential_max=10000, 143 | retry_on_exception=retry_if_rate_limit_error, 144 | stop_max_attempt_number=5) 145 | def run_search(search_request): 146 | print('Searching...') 147 | 148 | logging.info('Request: {} - {}'.format(SEARCH_URL, search_request)) 149 | 150 | result = SESSION.post(SEARCH_URL, json=search_request) 151 | 152 | check_status(result) 153 | 154 | page = result.json() 155 | final_list = handle_page(page) 156 | 157 | while page['_links'].get('_next') is not None: 158 | page_url = page['_links'].get('_next') 159 | page = SESSION.get(page_url).json() 160 | ids = handle_page(page) 161 | final_list += ids 162 | 163 | return [fid for fid in final_list] 164 | 165 | 166 | @retry( 167 | wait_exponential_multiplier=1000, 168 | wait_exponential_max=10000, 169 | retry_on_exception=retry_if_rate_limit_error, 170 | stop_max_attempt_number=5) 171 | def activate(item_id, item_type, asset_type): 172 | url = ASSET_URL.format(item_type, item_id) 173 | logging.info('Request: {}'.format(url)) 174 | 175 | result = SESSION.get(url) 176 | 177 | check_status(result) 178 | 179 | try: 180 | status = result.json()[asset_type]['status'] 181 | if status == 'active': 182 | print('{} {} {}: already active'.format(item_id, asset_type, item_type)) 183 | return False 184 | else: 185 | item_activation_url = result.json()[asset_type]['_links']['activate'] 186 | 187 | result = SESSION.post(item_activation_url) 188 | 189 | msg = '{} {} {}: started activation'.format(item_id, item_type, asset_type) 190 | return check_status(result, msg) 191 | except KeyError: 192 | print('Could not activate - asset type \'{}\' not found for {}'.format(asset_type, item_id)) 193 | return False 194 | 195 | 196 | @retry( 197 | wait_exponential_multiplier=1000, 198 | wait_exponential_max=10000, 199 | retry_on_exception=retry_if_rate_limit_error, 200 | stop_max_attempt_number=5) 201 | def check_activation(item_id, item_type, asset_type): 202 | url = ASSET_URL.format(item_type, item_id) 203 | logging.info('Request: {}'.format(url)) 204 | result = SESSION.get(url) 205 | 206 | check_status(result) 207 | 208 | try: 209 | status = result.json()[asset_type]['status'] 210 | msg = '{} {} {}: {}'.format(item_id, item_type, asset_type, status) 211 | print(msg) 212 | 213 | if status == 'active': 214 | return True 215 | else: 216 | return False 217 | except KeyError: 218 | print('Could not check activation status - asset type \'{}\' not found for {}'.format(asset_type, item_id)) 219 | return False 220 | 221 | 222 | @retry( 223 | wait_exponential_multiplier=1000, 224 | wait_exponential_max=10000, 225 | retry_on_exception=retry_if_rate_limit_error, 226 | stop_max_attempt_number=5) 227 | def download(url, path, item_id, asset_type, overwrite): 228 | fname = '{}_{}.tif'.format(item_id, asset_type) 229 | local_path = os.path.join(path, fname) 230 | 231 | if not overwrite and os.path.exists(local_path): 232 | print('File {} exists - skipping ...'.format(local_path)) 233 | else: 234 | print('Downloading file to {}'.format(local_path)) 235 | 236 | logging.info('Request: {}'.format(url)) 237 | # memory-efficient download, per 238 | # stackoverflow.com/questions/16694907/how-to-download-large-file-in-python-with-requests-py 239 | result = requests.get(url) 240 | 241 | if check_status(result, text=False): 242 | f = open(local_path, 'wb') 243 | for chunk in result.iter_content(chunk_size=512 * 1024): 244 | # filter out keep-alive new chunks 245 | if chunk: 246 | f.write(chunk) 247 | f.close() 248 | 249 | return True 250 | 251 | 252 | def process_activation(func, id_list, item_type, asset_type, activate_or_check): 253 | results = [] 254 | 255 | for item_id in id_list: 256 | result = func(item_id, item_type, asset_type) 257 | results.append(result) 258 | 259 | if activate_or_check == 'activate': 260 | msg = 'Requested activation for {} of {} items' 261 | print(msg.format(results.count(True), len(results))) 262 | 263 | if activate_or_check == 'check': 264 | msg = '{} of {} items are active' 265 | print(msg.format(results.count(True), len(results))) 266 | 267 | return results 268 | 269 | 270 | def process_download(path, id_list, item_type, asset_type, overwrite): 271 | results = [] 272 | 273 | # check on directory structure 274 | if not os.path.exists(path): 275 | raise IOError('Directory {} does not exist - please ensure that it does.'.format(path)) 276 | 277 | # now start downloading each file 278 | for item_id in id_list: 279 | url = ASSET_URL.format(item_type, item_id) 280 | logging.info('Request: {}'.format(url)) 281 | result = SESSION.get(url) 282 | 283 | check_status(result) 284 | 285 | try: 286 | if result.json()[asset_type]['status'] == 'active': 287 | download_url = result.json()[asset_type]['location'] 288 | result = download(download_url, path, item_id, asset_type, overwrite) 289 | else: 290 | result = False 291 | except KeyError: 292 | print('Could not check activation status - asset type \'{}\' not found for {}'.format(asset_type, item_id)) 293 | result = False 294 | 295 | 296 | results.append(result) 297 | 298 | msg = 'Successfully downloaded {} of {} files to {}. {} were not active.' 299 | print(msg.format(results.count(True), len(results), args.download, results.count(False))) 300 | 301 | return results 302 | 303 | 304 | if __name__ == '__main__': 305 | parser = argparse.ArgumentParser() 306 | parser.add_argument('--idlist', help='Location of file containing image ids (one per line) to process') 307 | parser.add_argument('--query', help='Path to json file containing query') 308 | parser.add_argument('--search', help='Search for images', action='store_true') 309 | parser.add_argument('--bbox', help='Bounding box for query in geographic (latlon) coordinates', 310 | metavar=('XMIN', 'YMIN', 'XMAX', 'YMAX'), nargs=4) 311 | parser.add_argument('--activate', help='Activate assets', action='store_true') 312 | parser.add_argument('--check', help='Check activation status', action='store_true') 313 | parser.add_argument('--download', help='Path where downloaded files should be stored') 314 | parser.add_argument('--overwrite', help='Overwrite existing downloads', action='store_true') 315 | parser.add_argument('--start-date', help='Start date for query (e.g. 2016-01-01)') 316 | parser.add_argument('--end-date', help='End date for query (e.g. 2016-04-01) - optional: uses current date if not supplied along with start date') 317 | parser.add_argument('--satlist', help='Location of file containing satellite ids (one per line) to use for filter') 318 | parser.add_argument('--sats', help='Alternative to --satlist, no need for an external file. #dovecrush', nargs='*') 319 | parser.add_argument('--key', help='Set API key') 320 | parser.add_argument('--debug', help='Debug mode', action='store_true') 321 | parser.add_argument('item', help='Item type (e.g. REOrthoTile or PSOrthoTile)') 322 | parser.add_argument('asset', help='Asset type (e.g. visual, analytic, analytic_xml)') 323 | 324 | args = parser.parse_args() 325 | 326 | # override API key taken from environment (possibly missing) 327 | if args.key: 328 | SESSION.auth = (args.key, '') 329 | 330 | # ensure there's a way to retrieve a list of image ids 331 | if not args.idlist and not args.query and not args.bbox: 332 | parser.error('Error: please supply an --idlist, --query, or --bbox argument.') 333 | 334 | # set log level if --debug 335 | if args.debug: 336 | logging.basicConfig(level=logging.INFO) 337 | else: 338 | logging.basicConfig(level=logging.ERROR) 339 | 340 | # handle list of sat ids if there is one 341 | if args.satlist and args.sats: 342 | raise Exception('Error: please specify only one of --satlist or --sats.') 343 | elif args.satlist: 344 | with open(args.satlist) as f: 345 | sat_list = [i.strip() for i in f.readlines()] 346 | elif args.sats: 347 | sat_list = args.sats 348 | else: 349 | sat_list = None 350 | 351 | # load list of image ids, filtering by sat id if appropriate 352 | if args.idlist: 353 | with open(args.idlist) as f: 354 | id_list = [i.strip() for i in f.readlines()] 355 | 356 | if sat_list: 357 | id_list = filter_ids_by_sat(id_list, sat_list) 358 | 359 | # otherwise, load query from file or build from scratch 360 | else: 361 | if sat_list: 362 | query = build_query(args, sat_list) 363 | else: 364 | query = build_query(args) 365 | 366 | # if we don't have a list of image ids already, create and run 367 | # the final query! 368 | if not args.idlist: 369 | # Create full search request object 370 | search_payload = {'item_types': [args.item], 'filter': query} 371 | 372 | id_list = run_search(search_payload) 373 | 374 | # Special case to do sat id filtering for a "standard" user-supplied 375 | # JSON query. This is needed because we don't want to try to parse/modify 376 | # the user's original query. 377 | if args.query and sat_list: 378 | id_list = filter_ids_by_sat(id_list, sat_list) 379 | 380 | # ok we've got our list of image ids, let's do some stuff 381 | print('%d available images' % len(id_list)) 382 | 383 | # nothing more to be done if we're just searching 384 | if args.search: 385 | pass 386 | 387 | # start activation for all images in id_list 388 | elif args.activate: 389 | results = process_activation(activate, id_list, args.item, 390 | args.asset, 'activate') 391 | 392 | # check activation status 393 | elif args.check: 394 | results = process_activation(check_activation, id_list, args.item, 395 | args.asset, 'check') 396 | 397 | # download everything 398 | elif args.download: 399 | results = process_download(args.download, id_list, args.item, 400 | args.asset, args.overwrite) 401 | 402 | else: 403 | parser.error('Error: no action supplied. Please check help (--help) or revise command.') 404 | 405 | 406 | '''Sample commands, for testing. 407 | python download.py --query redding.json --search PSScene3Band visual 408 | python download.py --query redding.json --check PSScene3Band visual 409 | python download.py --query redding.json --activate PSScene3Band visual 410 | python download.py --query redding.json --download /tmp PSScene3Band visual 411 | python download.py --idlist ids_small.txt --check PSScene3Band visual 412 | python download.py --idlist ids_small.txt --activate PSScene3Band visual 413 | python download.py --idlist ids_small.txt --download /tmp PSScene3Band visual 414 | python download.py --search --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 415 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 416 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-08-01 --end-date 2016-12-31 417 | python download.py --activate --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 418 | python download.py --download ~/Downloads/ --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 419 | python download.py --query redding.json --search PSScene3Band visual --satlist sats_redding.txt 420 | python download.py --query redding.json --search PSScene3Band visual --sats 0c2b 0c19 421 | python download.py --query redding.json --sats 0c2b 0c19 --search PSScene3Band visual 422 | python download.py --search --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 --sats 0e0e 0c38 423 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 --sats 0e0e 0c38 424 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-08-01 --end-date 2016-12-31 --sats 0e0e 0c38 425 | python download.py --search --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 --satlist sats_miami.txt 426 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 --sats sats_miami.txt 427 | python download.py --check --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-08-01 --end-date 2016-12-31 --sats 0e0e 0c38 428 | python download.py --activate --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 --sats sats_miami.txt 429 | python download.py --activate --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-08-01 --end-date 2016-12-31 --sats 0e0e 0c38 430 | python download.py --activate --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 431 | python download.py --download ~/Downloads/ --bbox -80.209624 25.7777338 -80 26 PSOrthoTile analytic --start-date 2016-01-01 432 | ''' 433 | -------------------------------------------------------------------------------- /ee_ppipe/ee_auth.py: -------------------------------------------------------------------------------- 1 | import os 2 | import ee 3 | import subprocess 4 | import sys 5 | from ee import oauth 6 | import re 7 | import getpass 8 | import clipboard 9 | import time 10 | auth_url = ee.oauth.get_authorization_url() 11 | clipboard.copy(auth_url) 12 | print("Authentication link copied: Go to browser and click paste") 13 | time.sleep(10) 14 | print("Enter your GEE API Token") 15 | password=str(getpass.getpass()) 16 | auth_code=str(password) 17 | token = ee.oauth.request_token(auth_code) 18 | ee.oauth.write_token(token) 19 | print('\nSuccessfully saved authorization token.') 20 | 21 | -------------------------------------------------------------------------------- /ee_ppipe/ee_ls.py: -------------------------------------------------------------------------------- 1 | import ee 2 | import ee.mapclient 3 | import subprocess 4 | import csv 5 | import os 6 | 7 | ##initialize earth engine 8 | ee.Initialize() 9 | 10 | def lst(location, typ=None, items=None,f=None): 11 | if items > 0: 12 | if typ=='print': 13 | for line in subprocess.check_output("earthengine ls"+" "+'"'+location+'"'+" --max_items "+str(items),shell=True).split('\n'): 14 | print(line.replace(location,'').strip("/")) 15 | elif typ =='report': 16 | os.system("earthengine ls"+" "+location+" --max_items "+str(items)+" >>"+f+"filelist.txt") 17 | else: 18 | if typ=='print': 19 | for line in subprocess.check_output(["earthengine ls"+" "+location],shell=True).split('\n'): 20 | print(line.replace(location,'').strip("/")) 21 | elif typ =='report': 22 | os.system("earthengine ls"+" "+location+" >>"+f+"filelist.txt") 23 | -------------------------------------------------------------------------------- /ee_ppipe/ee_ppipe.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import argparse 4 | import logging 5 | import os 6 | import ee 7 | import subprocess 8 | from gooey import Gooey, GooeyParser 9 | from gooey import GooeyParser 10 | from batch_copy import copy 11 | from batch_remover import delete 12 | from batch_uploading import upload 13 | from config import setup_logging 14 | from message import display_message 15 | from query import taskquery 16 | from batch_mover import mover 17 | from cleanup import cleanout 18 | from collectionprop import collprop 19 | from taskreport import genreport 20 | from acl_changer import access 21 | from cli_aoi2json import aoijson 22 | from cli_metadata import metadata 23 | from ee_ls import lst 24 | import getpass 25 | import csv 26 | from ee import oauth 27 | import re 28 | import time 29 | import clipboard 30 | 31 | def planet_key_entry(): 32 | print("Enter your Planet API Key") 33 | password=getpass.getpass() 34 | with open('./pkey.csv','w') as completed: 35 | writer=csv.writer(completed,delimiter=',',lineterminator='\n') 36 | writer.writerow([password]) 37 | def planet_key_from_parser(args): 38 | planet_key_entry() 39 | def ee_auth_entry(): 40 | auth_url = ee.oauth.get_authorization_url() 41 | clipboard.copy(auth_url) 42 | print("Authentication link copied: Go to browser and click paste") 43 | time.sleep(10) 44 | print("Enter your GEE API Token") 45 | password=str(getpass.getpass()) 46 | auth_code=str(password) 47 | token = ee.oauth.request_token(auth_code) 48 | ee.oauth.write_token(token) 49 | print('\nSuccessfully saved authorization token.') 50 | def ee_user_from_parser(args): 51 | ee_auth_entry() 52 | def create_from_parser(args): 53 | typ=str(args.typ) 54 | ee_path=str(args.path) 55 | os.system("earthengine create "+typ+" "+ee_path) 56 | def aoijson_from_parser(args): 57 | aoijson(start=args.start,end=args.end,cloud=args.cloud,inputfile=args.inputfile,geo=args.geo,loc=args.loc) 58 | def metadata_from_parser(args): 59 | metadata(asset=args.asset,mf=args.mf,mfile=args.mfile,errorlog=args.errorlog) 60 | 61 | def activatepl_from_parser(args): 62 | aoi_json=str(args.aoi) 63 | action_planet=str(args.action) 64 | asset_type=str(args.asst) 65 | try: 66 | os.system("python download.py --query "+args.aoi+" --"+args.action+" "+args.asst) 67 | except Exception: 68 | print(' ') 69 | def downloadpl_from_parser(args): 70 | aoi_json=str(args.aoi) 71 | action_planet=str(args.action) 72 | planet_pathway=str(args.pathway) 73 | asset_type=str(args.asst) 74 | try: 75 | os.system("python download.py --query "+args.aoi+" --"+args.action+" "+args.pathway+" "+asset_type) 76 | except Exception: 77 | print(' ') 78 | 79 | def cancel_all_running_tasks(): 80 | logging.info('Attempting to cancel all running tasks') 81 | running_tasks = [task for task in ee.data.getTaskList() if task['state'] == 'RUNNING'] 82 | for task in running_tasks: 83 | ee.data.cancelTask(task['id']) 84 | logging.info('Cancel all request completed') 85 | 86 | def cancel_all_running_tasks_from_parser(args): 87 | cancel_all_running_tasks() 88 | 89 | def delete_collection_from_parser(args): 90 | delete(args.id) 91 | 92 | def upload_from_parser(args): 93 | upload(user=args.user, 94 | source_path=args.source, 95 | destination_path=args.dest, 96 | metadata_path=args.metadata, 97 | nodata_value=args.nodata) 98 | def ft_from_parser(args): 99 | input_file=str(args.i) 100 | output_ft=str(args.o) 101 | os.system("ogr2ft.py -i "+input_file+" -o "+output_ft) 102 | def taskquery_from_parser(args): 103 | taskquery(destination=args.destination) 104 | def mover_from_parser(args): 105 | mover(assetpath=args.assetpath,destinationpath=args.finalpath) 106 | def copy_from_parser(args): 107 | copy(initial=args.initial,final=args.final) 108 | def access_from_parser(args): 109 | copy(mode=args.mode,asset=args.asset,user=args.user) 110 | def cleanout_from_parser(args): 111 | cleanout(args.dirpath) 112 | def tasks(): 113 | tasklist=subprocess.check_output("earthengine task list",shell=True) 114 | taskready=tasklist.count("READY") 115 | taskrunning=tasklist.count("RUNNING") 116 | taskfailed=tasklist.count("FAILED") 117 | print("Running Tasks:",taskrunning) 118 | print("Ready Tasks:",taskready) 119 | print("Failed Tasks:",taskfailed) 120 | def tasks_from_parser(args): 121 | tasks() 122 | def genreport_from_parser(args): 123 | genreport(report=args.r) 124 | def collprop_from_parser(args): 125 | collprop(imcoll=args.coll,prop=args.p) 126 | def lst_from_parser(args): 127 | lst(location=args.location,typ=args.type,items=args.items,f=args.folder) 128 | spacing=" " 129 | from gooey import Gooey, GooeyParser 130 | @Gooey(dump_build_config=True, program_name="Planet and EE Pipeline") 131 | def main(args=None): 132 | setup_logging() 133 | parser = GooeyParser(description='Planet and EE Pipeline') 134 | subparsers = parser.add_subparsers() 135 | ##Planet Assets Tools 136 | parser_planet_key = subparsers.add_parser('planet_key', help='Enter your planet API Key') 137 | parser_planet_key.set_defaults(func=planet_key_from_parser) 138 | 139 | parser_aoijson=subparsers.add_parser('aoijson',help='Convert KML/SHP/WKT/GeoJSON file to aoi.json file with structured query for use with Planet API 1.0') 140 | parser_aoijson.add_argument('--start', default='Start date in YYYY-MM-DD',help='Start date in YYYY-MM-DD?',widget='DateChooser') 141 | parser_aoijson.add_argument('--end', default='End date in YYYY-MM-DD',help='End date in YYYY-MM-DD?',widget='DateChooser') 142 | parser_aoijson.add_argument('--cloud', default='Maximum Cloud Cover(0-1)',help='Maximum Cloud Cover(0-1) representing 0-100') 143 | parser_aoijson.add_argument('--inputfile',default='Choose a KML/SHP/geojson/WKT file or Landsat WRS',choices=['KML', 'SHP','GJSON','WKT','WRS'],help='Choose a KML/SHP/geojson/WKT file or Landsat WRS') 144 | parser_aoijson.add_argument('--geo', default='map.geojson/aoi.kml/aoi.shp/aoi.wkt file or 6 digit WRS PathRow',help='map.geojson/aoi.kml/aoi.shp/aoi.wkt file',widget="MultiFileChooser") 145 | parser_aoijson.add_argument('--loc', help='Location where aoi.json file is to be stored',widget="MultiDirChooser") 146 | parser_aoijson.set_defaults(func=aoijson_from_parser) 147 | 148 | parser_activatepl=subparsers.add_parser('activatepl',description='Tool to query and/or activate Planet Assets') 149 | parser_activatepl.add_argument('--aoi',default='Choose JSON file to be used with Planet API/Created Earlier',help='Choose JSON file created earlier',widget="MultiFileChooser") 150 | parser_activatepl.add_argument('--action',choices=['check', 'activate'],help='Check/activate') 151 | parser_activatepl.add_argument('--asst',choices=['PSOrthoTile analytic','PSOrthoTile analytic_dn','PSOrthoTile visual','PSScene4Band analytic','PSScene4Band analytic_dn','PSScene3Band analytic','PSScene3Band analytic_dn','PSScene3Band visual','REOrthoTile analytic','REOrthoTile visual'],help='PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual') 152 | parser_activatepl.set_defaults(func=activatepl_from_parser) 153 | 154 | parser_downloadpl=subparsers.add_parser('downloadpl',help='Tool to download Planet Assets') 155 | parser_downloadpl.add_argument('--aoi', default='Choose JSON file to be used with Planet API/Created Earlier',help='Choose JSON file created earlier',widget="MultiFileChooser") 156 | parser_downloadpl.add_argument('--action', default='download',help='choose download') 157 | parser_downloadpl.add_argument('--asst',choices=['PSOrthoTile analytic','PSOrthoTile analytic_dn','PSOrthoTile visual','PSScene4Band analytic','PSScene4Band analytic_dn','PSScene3Band analytic','PSScene3Band analytic_dn','PSScene3Band visual','REOrthoTile analytic','REOrthoTile visual','PSOrthoTile analytic_xml','PSOrthoTile analytic_dn_xml','PSOrthoTile visual_xml','PSScene4Band analytic_xml','PSScene4Band analytic_dn_xml','PSScene3Band analytic_xml','PSScene3Band analytic_dn_xml','PSScene3Band visual_xml','REOrthoTile analytic_xml','REOrthoTile visual_xml'],help='PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual') 158 | parser_downloadpl.add_argument('--pathway',default='Folder where you want to save assets',help='Folder Path where PlanetAssets are saved example ./PlanetScope ./RapidEye',widget="MultiDirChooser") 159 | parser_downloadpl.set_defaults(func=downloadpl_from_parser) 160 | 161 | parser_metadata=subparsers.add_parser('metadata',help='Tool to tabulate and convert all metadata files from Planet or Digital Globe Assets') 162 | parser_metadata.add_argument('--asset', default='PS',choices=['PSO','PSO_DN','PSO_V','PS4B','PS4B_DN','PS3B','PS3B_DN','PS3B_V','REO','REO_V','DGMS','DGP'],help='RapidEye/PlantScope/DigitalGlobe MS/DigitalGlobe Pan(RE/PS/DGMS/DGP)?') 163 | parser_metadata.add_argument('--mf', default='Metadata folder',help='Metadata folder',widget="MultiDirChooser") 164 | parser_metadata.add_argument('--mfile',default='Metadata filename browse and create file and click open',help='Metadata filename to be exported with Path.csv',widget="MultiFileChooser") 165 | parser_metadata.add_argument('--errorlog',default='Error log browse and create file and click open',help='Errorlog to be exported along with Path.csv',widget="MultiFileChooser") 166 | parser_metadata.set_defaults(func=metadata_from_parser) 167 | 168 | ##Earth Engine Tools 169 | parser_ee_user = subparsers.add_parser('ee_user', help='Get Earth Engine API Key & Paste it back to Command line/shell to change user') 170 | parser_ee_user.set_defaults(func=ee_user_from_parser) 171 | 172 | parser_create = subparsers.add_parser('create',help='Allows the user to create an asset collection or folder in Google Earth Engine') 173 | parser_create.add_argument('--typ', help='Specify type: collection or folder', required=True) 174 | parser_create.add_argument('--path', help='This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection', required=True) 175 | parser_create.set_defaults(func=create_from_parser) 176 | 177 | parser_upload = subparsers.add_parser('upload', help='Batch Asset Uploader to Earth Engine.') 178 | required_named = parser_upload.add_argument_group('Required named arguments.') 179 | required_named.add_argument('-u', '--user', help='Google account name (gmail address).', required=True) 180 | required_named.add_argument('--source', help='Path to the directory with images for upload.', required=True) 181 | required_named.add_argument('--dest', help='Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection', required=True) 182 | optional_named = parser_upload.add_argument_group('Optional named arguments') 183 | optional_named.add_argument('-m', '--metadata', help='Path to CSV with metadata.') 184 | optional_named.add_argument('--nodata', type=int, help='The value to burn into the raster as NoData (missing data)') 185 | parser_upload.set_defaults(func=upload_from_parser) 186 | 187 | parser_lst = subparsers.add_parser('lst',help='List assets in a folder/collection or write as text file') 188 | parser_lst.add_argument('--location', help='This it the location of your folder/collection', required=True) 189 | parser_lst.add_argument('--type', help='Whether you want the list to be printed or output as text', required=True) 190 | parser_lst.add_argument('--items', help="Number of items to list") 191 | parser_lst.add_argument('--folder',help="Folder location for report to be exported") 192 | parser_lst.set_defaults(func=lst_from_parser) 193 | 194 | parser_tasks=subparsers.add_parser('tasks',help='Queries currently running, enqued,failed') 195 | parser_tasks.set_defaults(func=tasks_from_parser) 196 | 197 | parser_taskquery=subparsers.add_parser('taskquery',help='Queries currently running, enqued,failed ingestions and uploaded assets') 198 | parser_taskquery.add_argument('--destination',default='users/folder/collection',help='Full path to asset where you are uploading files') 199 | parser_taskquery.set_defaults(func=taskquery_from_parser) 200 | 201 | parser_genreport=subparsers.add_parser('report',help='Create a report of all tasks and exports to a CSV file') 202 | parser_genreport.add_argument('--r',default='Folder Path where the reports will be saved',help='Folder Path where the reports will be saved',widget="MultiDirChooser") 203 | parser_genreport.set_defaults(func=genreport_from_parser) 204 | 205 | parser_cancel = subparsers.add_parser('cancel', help='Cancel all running tasks') 206 | parser_cancel.set_defaults(func=cancel_all_running_tasks_from_parser) 207 | 208 | parser_mover=subparsers.add_parser('mover',help='Moves all assets from one collection to another') 209 | parser_mover.add_argument('--assetpath',default='users/folder/collection1',help='Existing path of assets') 210 | parser_mover.add_argument('--finalpath',default='users/folder/collection2',help='New path for assets') 211 | parser_mover.set_defaults(func=mover_from_parser) 212 | 213 | parser_copy=subparsers.add_parser('copy',help='Copies all assets from one collection to another: Including copying from other users if you have read permission to their assets') 214 | parser_copy.add_argument('--initial',default='users/folder/collection1',help='Existing path of assets') 215 | parser_copy.add_argument('--final',default='users/folder/collection2',help='New path for assets') 216 | parser_copy.set_defaults(func=copy_from_parser) 217 | 218 | parser_collprop=subparsers.add_parser('collprop',help='Sets Overall Properties for Image Collection') 219 | parser_collprop.add_argument('--coll',default='users/folder/collection',help='Path of Image Collection') 220 | parser_collprop.add_argument('--p',default='system:description=Description',help='system:description=Description|system:title=title') 221 | parser_collprop.set_defaults(func=collprop_from_parser) 222 | 223 | parser_ft = subparsers.add_parser('access',help='Sets Permissions for Images, Collection or all assets in EE Folder Example: python ee_permissions.py --mode "folder" --asset "users/john/doe" --user "jimmy@doe.com:R"') 224 | parser_ft.add_argument('--mode', default='folder|collection|image',choices=['folder','collection','image'],help='This lets you select if you want to change permission or folder/collection/image', required=True) 225 | parser_ft.add_argument('--asset', default='users/folder/collection',help='This is the path to the earth engine asset whose permission you are changing folder/collection/image', required=True) 226 | parser_ft.add_argument('--user', default='john@doe.com:R',help="""This is the email address to whom you want to give read or write permission Usage: "john@doe.com:R" or "john@doe.com:W" R/W refers to read or write permission""", required=True) 227 | parser_ft.set_defaults(func=access_from_parser) 228 | 229 | parser_delete = subparsers.add_parser('delete', help='Deletes collection and all items inside. Supports Unix-like wildcards.') 230 | parser_delete.add_argument('id', default='users/folder/collection',help='Full path to asset for deletion. Recursively removes all folders, collections and images.') 231 | parser_delete.set_defaults(func=delete_collection_from_parser) 232 | 233 | parser_ft = subparsers.add_parser('convert2ft',help='Uploads a given feature collection to Google Fusion Table.') 234 | parser_ft.add_argument('--i', help='input feature source (KML, SHP, SpatiLite, etc.)', required=True,widget="MultiFileChooser",default='input feature source (KML, SHP, SpatiLite, etc.)') 235 | parser_ft.add_argument('--o', help='output Fusion Table name', required=True) 236 | parser_ft.add_argument('--add_missing', help='add missing features from the last inserted feature index', action='store_true', required=False, default=False) 237 | parser_ft.set_defaults(func=ft_from_parser) 238 | 239 | parser_cleanout=subparsers.add_parser('cleanout',help='Clear folders with datasets from earlier downloaded') 240 | parser_cleanout.add_argument('--dirpath',help='Folder you want to delete after all processes have been completed',widget="MultiDirChooser") 241 | parser_cleanout.set_defaults(func=cleanout_from_parser) 242 | 243 | args = parser.parse_args() 244 | 245 | ee.Initialize() 246 | args.func(args) 247 | display_message() 248 | def here_is_smore(): 249 | pass 250 | 251 | 252 | if __name__ == '__main__': 253 | main() 254 | -------------------------------------------------------------------------------- /ee_ppipe/ee_ppipe_gui.bat: -------------------------------------------------------------------------------- 1 | python ee_ppipe.py -------------------------------------------------------------------------------- /ee_ppipe/gee_assets_errors.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/samapriya/Planet-GEE-Pipeline-GUI/aee6fc76c17cf6c6ce225572465f876fd14b0079/ee_ppipe/gee_assets_errors.log -------------------------------------------------------------------------------- /ee_ppipe/gee_assets_info.log: -------------------------------------------------------------------------------- 1 | 2017-06-08 12:27:47,917 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 2 | 2017-06-08 12:27:47,917 :: oauth2client.client :: INFO :: Refreshing access_token 3 | 2017-06-08 12:28:25,092 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 4 | 2017-06-08 12:28:25,092 :: oauth2client.client :: INFO :: Refreshing access_token 5 | 2017-06-08 12:29:16,198 :: root :: WARNING :: Collection users/samapriya/dgbuffer already exists 6 | 2017-06-08 12:29:17,378 :: root :: INFO :: Processing image 1 out of 3: C:\planet_demo\ps\229190_1552719_2016-08-26_0c78_analytic.tif 7 | 2017-06-08 12:33:00,344 :: root :: INFO :: Processing image 2 out of 3: C:\planet_demo\ps\229190_1552818_2016-08-26_0c78_analytic.tif 8 | 2017-06-08 12:36:19,427 :: root :: INFO :: Processing image 3 out of 3: C:\planet_demo\ps\229190_1552820_2016-08-26_0c78_analytic.tif 9 | 2017-06-11 10:28:49,566 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 10 | 2017-06-11 10:28:49,566 :: oauth2client.client :: INFO :: Refreshing access_token 11 | 2017-06-11 10:38:10,349 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 12 | 2017-06-11 10:38:10,349 :: oauth2client.client :: INFO :: Refreshing access_token 13 | 2017-06-11 17:45:33,352 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 14 | 2017-06-11 17:45:33,352 :: oauth2client.client :: INFO :: Refreshing access_token 15 | 2017-06-11 17:56:48,338 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 16 | 2017-06-11 17:56:48,338 :: oauth2client.client :: INFO :: Refreshing access_token 17 | 2017-06-11 17:56:58,779 :: root :: WARNING :: Collection users/samapriya/dgbuffer already exists 18 | 2017-06-11 17:57:00,016 :: root :: INFO :: Processing image 1 out of 3: C:\planet_demo\ps\229190_1552719_2016-08-26_0c78_analytic.tif 19 | 2017-06-11 17:58:15,888 :: root :: INFO :: Processing image 2 out of 3: C:\planet_demo\ps\229190_1552818_2016-08-26_0c78_analytic.tif 20 | 2017-06-11 17:59:18,563 :: root :: INFO :: Processing image 3 out of 3: C:\planet_demo\ps\229190_1552820_2016-08-26_0c78_analytic.tif 21 | 2017-06-11 18:02:01,369 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 22 | 2017-06-11 18:02:01,369 :: oauth2client.client :: INFO :: Refreshing access_token 23 | 2017-06-11 18:04:36,885 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 24 | 2017-06-11 18:04:36,885 :: oauth2client.client :: INFO :: Refreshing access_token 25 | 2017-06-11 18:04:48,733 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 26 | 2017-06-11 18:04:48,733 :: oauth2client.client :: INFO :: Refreshing access_token 27 | 2017-06-11 18:05:12,604 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 28 | 2017-06-11 18:05:12,604 :: oauth2client.client :: INFO :: Refreshing access_token 29 | 2017-06-11 19:15:17,759 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 30 | 2017-06-11 19:15:17,759 :: oauth2client.client :: INFO :: Refreshing access_token 31 | 2017-06-11 22:14:41,565 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 32 | 2017-06-11 22:14:41,569 :: oauth2client.client :: INFO :: Refreshing access_token 33 | 2017-06-11 22:18:38,075 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 34 | 2017-06-11 22:18:38,075 :: oauth2client.client :: INFO :: Refreshing access_token 35 | 2017-06-11 23:00:28,992 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 36 | 2017-06-11 23:00:28,992 :: oauth2client.client :: INFO :: Refreshing access_token 37 | 2017-06-11 23:00:38,543 :: root :: WARNING :: Collection users/samapriya/dgbuffer already exists 38 | 2017-06-11 23:00:39,841 :: root :: INFO :: Collection already exists. 2 assets left for upload to users/samapriya/dgbuffer. 39 | 2017-06-11 23:00:39,841 :: root :: INFO :: Processing image 1 out of 2: C:\planet_demo\ps\229190_1552719_2016-08-26_0c78_analytic.tif 40 | 2017-06-11 23:08:30,497 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 41 | 2017-06-11 23:08:30,513 :: oauth2client.client :: INFO :: Refreshing access_token 42 | 2017-06-11 23:08:40,608 :: root :: WARNING :: Collection users/samapriya/dgbuffer already exists 43 | 2017-06-11 23:08:42,036 :: root :: INFO :: Processing image 1 out of 3: C:\planet_demo\ps\229190_1552719_2016-08-26_0c78_analytic.tif 44 | 2017-06-11 23:10:01,012 :: root :: INFO :: Processing image 2 out of 3: C:\planet_demo\ps\229190_1552818_2016-08-26_0c78_analytic.tif 45 | 2017-06-11 23:11:01,512 :: root :: INFO :: Processing image 3 out of 3: C:\planet_demo\ps\229190_1552820_2016-08-26_0c78_analytic.tif 46 | 2017-06-14 10:06:23,262 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 47 | 2017-06-14 10:06:23,262 :: oauth2client.client :: INFO :: Refreshing access_token 48 | 2017-06-14 10:07:15,927 :: oauth2client.transport :: INFO :: Attempting refresh to obtain initial access_token 49 | 2017-06-14 10:07:15,927 :: oauth2client.client :: INFO :: Refreshing access_token 50 | -------------------------------------------------------------------------------- /ee_ppipe/gooey_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "language_dir": "C:\\Python27\\ArcGIS10.4\\lib\\site-packages\\gooey\\languages", 3 | "num_optional_cols": 2, 4 | "num_required_cols": 2, 5 | "target": "\"C:\\Python27\\ArcGIS10.4\\python.exe\" -u \"ee_ppipe.py\"", 6 | "language": "english", 7 | "manual_start": false, 8 | "progress_expr": null, 9 | "disable_stop_button": false, 10 | "monospace_display": false, 11 | "default_size": [ 12 | 610, 13 | 530 14 | ], 15 | "image_dir": "default", 16 | "group_by_type": true, 17 | "disable_progress_bar_animation": false, 18 | "auto_start": false, 19 | "widgets": { 20 | "planet_key": { 21 | "command": "planet_key", 22 | "contents": [] 23 | }, 24 | "aoijson": { 25 | "command": "aoijson", 26 | "contents": [ 27 | { 28 | "data": { 29 | "commands": [ 30 | "--start" 31 | ], 32 | "display_name": "start", 33 | "help": "Start date in YYYY-MM-DD?", 34 | "default": "Start date in YYYY-MM-DD", 35 | "nargs": "", 36 | "choices": [] 37 | }, 38 | "required": false, 39 | "type": "DateChooser" 40 | }, 41 | { 42 | "data": { 43 | "commands": [ 44 | "--end" 45 | ], 46 | "display_name": "end", 47 | "help": "End date in YYYY-MM-DD?", 48 | "default": "End date in YYYY-MM-DD", 49 | "nargs": "", 50 | "choices": [] 51 | }, 52 | "required": false, 53 | "type": "DateChooser" 54 | }, 55 | { 56 | "data": { 57 | "commands": [ 58 | "--cloud" 59 | ], 60 | "display_name": "cloud", 61 | "help": "Maximum Cloud Cover(0-1) representing 0-100", 62 | "default": "Maximum Cloud Cover(0-1)", 63 | "nargs": "", 64 | "choices": [] 65 | }, 66 | "required": false, 67 | "type": "TextField" 68 | }, 69 | { 70 | "data": { 71 | "commands": [ 72 | "--inputfile" 73 | ], 74 | "display_name": "inputfile", 75 | "help": "Choose a KML/SHP/geojson/WKT file or Landsat WRS", 76 | "default": "Choose a KML/SHP/geojson/WKT file or Landsat WRS", 77 | "nargs": "", 78 | "choices": [ 79 | "KML", 80 | "SHP", 81 | "GJSON", 82 | "WKT", 83 | "WRS" 84 | ] 85 | }, 86 | "required": false, 87 | "type": "Dropdown" 88 | }, 89 | { 90 | "data": { 91 | "commands": [ 92 | "--geo" 93 | ], 94 | "display_name": "geo", 95 | "help": "map.geojson/aoi.kml/aoi.shp/aoi.wkt file", 96 | "default": "map.geojson/aoi.kml/aoi.shp/aoi.wkt file or 6 digit WRS PathRow", 97 | "nargs": "", 98 | "choices": [] 99 | }, 100 | "required": false, 101 | "type": "MultiFileChooser" 102 | }, 103 | { 104 | "data": { 105 | "commands": [ 106 | "--loc" 107 | ], 108 | "display_name": "loc", 109 | "help": "Location where aoi.json file is to be stored", 110 | "default": null, 111 | "nargs": "", 112 | "choices": [] 113 | }, 114 | "required": false, 115 | "type": "MultiDirChooser" 116 | } 117 | ] 118 | }, 119 | "activatepl": { 120 | "command": "activatepl", 121 | "contents": [ 122 | { 123 | "data": { 124 | "commands": [ 125 | "--aoi" 126 | ], 127 | "display_name": "aoi", 128 | "help": "Choose JSON file created earlier", 129 | "default": "Choose JSON file to be used with Planet API/Created Earlier", 130 | "nargs": "", 131 | "choices": [] 132 | }, 133 | "required": false, 134 | "type": "MultiFileChooser" 135 | }, 136 | { 137 | "data": { 138 | "commands": [ 139 | "--action" 140 | ], 141 | "display_name": "action", 142 | "help": "Check/activate", 143 | "default": null, 144 | "nargs": "", 145 | "choices": [ 146 | "check", 147 | "activate" 148 | ] 149 | }, 150 | "required": false, 151 | "type": "Dropdown" 152 | }, 153 | { 154 | "data": { 155 | "commands": [ 156 | "--asst" 157 | ], 158 | "display_name": "asst", 159 | "help": "PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual", 160 | "default": null, 161 | "nargs": "", 162 | "choices": [ 163 | "PSOrthoTile analytic", 164 | "PSOrthoTile analytic_dn", 165 | "PSOrthoTile visual", 166 | "PSScene4Band analytic", 167 | "PSScene4Band analytic_dn", 168 | "PSScene3Band analytic", 169 | "PSScene3Band analytic_dn", 170 | "PSScene3Band visual", 171 | "REOrthoTile analytic", 172 | "REOrthoTile visual" 173 | ] 174 | }, 175 | "required": false, 176 | "type": "Dropdown" 177 | } 178 | ] 179 | }, 180 | "downloadpl": { 181 | "command": "downloadpl", 182 | "contents": [ 183 | { 184 | "data": { 185 | "commands": [ 186 | "--aoi" 187 | ], 188 | "display_name": "aoi", 189 | "help": "Choose JSON file created earlier", 190 | "default": "Choose JSON file to be used with Planet API/Created Earlier", 191 | "nargs": "", 192 | "choices": [] 193 | }, 194 | "required": false, 195 | "type": "MultiFileChooser" 196 | }, 197 | { 198 | "data": { 199 | "commands": [ 200 | "--action" 201 | ], 202 | "display_name": "action", 203 | "help": "choose download", 204 | "default": "download", 205 | "nargs": "", 206 | "choices": [] 207 | }, 208 | "required": false, 209 | "type": "TextField" 210 | }, 211 | { 212 | "data": { 213 | "commands": [ 214 | "--asst" 215 | ], 216 | "display_name": "asst", 217 | "help": "PSOrthoTile analytic,PSOrthoTile analytic_dn,PSOrthoTile visual,PSScene4Band analytic,PSScene4Band analytic_dn,PSScene3Band analytic,PSScene3Band analytic_dn,PSScene3Band visual,REOrthoTile analytic,REOrthoTile visual", 218 | "default": null, 219 | "nargs": "", 220 | "choices": [ 221 | "PSOrthoTile analytic", 222 | "PSOrthoTile analytic_dn", 223 | "PSOrthoTile visual", 224 | "PSScene4Band analytic", 225 | "PSScene4Band analytic_dn", 226 | "PSScene3Band analytic", 227 | "PSScene3Band analytic_dn", 228 | "PSScene3Band visual", 229 | "REOrthoTile analytic", 230 | "REOrthoTile visual", 231 | "PSOrthoTile analytic_xml", 232 | "PSOrthoTile analytic_dn_xml", 233 | "PSOrthoTile visual_xml", 234 | "PSScene4Band analytic_xml", 235 | "PSScene4Band analytic_dn_xml", 236 | "PSScene3Band analytic_xml", 237 | "PSScene3Band analytic_dn_xml", 238 | "PSScene3Band visual_xml", 239 | "REOrthoTile analytic_xml", 240 | "REOrthoTile visual_xml" 241 | ] 242 | }, 243 | "required": false, 244 | "type": "Dropdown" 245 | }, 246 | { 247 | "data": { 248 | "commands": [ 249 | "--pathway" 250 | ], 251 | "display_name": "pathway", 252 | "help": "Folder Path where PlanetAssets are saved example ./PlanetScope ./RapidEye", 253 | "default": "Folder where you want to save assets", 254 | "nargs": "", 255 | "choices": [] 256 | }, 257 | "required": false, 258 | "type": "MultiDirChooser" 259 | } 260 | ] 261 | }, 262 | "metadata": { 263 | "command": "metadata", 264 | "contents": [ 265 | { 266 | "data": { 267 | "commands": [ 268 | "--asset" 269 | ], 270 | "display_name": "asset", 271 | "help": "RapidEye/PlantScope/DigitalGlobe MS/DigitalGlobe Pan(RE/PS/DGMS/DGP)?", 272 | "default": "PS", 273 | "nargs": "", 274 | "choices": [ 275 | "PSO", 276 | "PSO_DN", 277 | "PSO_V", 278 | "PS4B", 279 | "PS4B_DN", 280 | "PS3B", 281 | "PS3B_DN", 282 | "PS3B_V", 283 | "REO", 284 | "REO_V", 285 | "DGMS", 286 | "DGP" 287 | ] 288 | }, 289 | "required": false, 290 | "type": "Dropdown" 291 | }, 292 | { 293 | "data": { 294 | "commands": [ 295 | "--mf" 296 | ], 297 | "display_name": "mf", 298 | "help": "Metadata folder", 299 | "default": "Metadata folder", 300 | "nargs": "", 301 | "choices": [] 302 | }, 303 | "required": false, 304 | "type": "MultiDirChooser" 305 | }, 306 | { 307 | "data": { 308 | "commands": [ 309 | "--mfile" 310 | ], 311 | "display_name": "mfile", 312 | "help": "Metadata filename to be exported with Path.csv", 313 | "default": "Metadata filename browse and create file and click open", 314 | "nargs": "", 315 | "choices": [] 316 | }, 317 | "required": false, 318 | "type": "MultiFileChooser" 319 | }, 320 | { 321 | "data": { 322 | "commands": [ 323 | "--errorlog" 324 | ], 325 | "display_name": "errorlog", 326 | "help": "Errorlog to be exported along with Path.csv", 327 | "default": "Error log browse and create file and click open", 328 | "nargs": "", 329 | "choices": [] 330 | }, 331 | "required": false, 332 | "type": "MultiFileChooser" 333 | } 334 | ] 335 | }, 336 | "ee_user": { 337 | "command": "ee_user", 338 | "contents": [] 339 | }, 340 | "create": { 341 | "command": "create", 342 | "contents": [ 343 | { 344 | "data": { 345 | "commands": [ 346 | "--typ" 347 | ], 348 | "display_name": "typ", 349 | "help": "Specify type: collection or folder", 350 | "default": null, 351 | "nargs": "", 352 | "choices": [] 353 | }, 354 | "required": true, 355 | "type": "TextField" 356 | }, 357 | { 358 | "data": { 359 | "commands": [ 360 | "--path" 361 | ], 362 | "display_name": "path", 363 | "help": "This is the path for the earth engine asset to be created full path is needsed eg: users/johndoe/collection", 364 | "default": null, 365 | "nargs": "", 366 | "choices": [] 367 | }, 368 | "required": true, 369 | "type": "TextField" 370 | } 371 | ] 372 | }, 373 | "upload": { 374 | "command": "upload", 375 | "contents": [ 376 | { 377 | "data": { 378 | "commands": [ 379 | "-u", 380 | "--user" 381 | ], 382 | "display_name": "user", 383 | "help": "Google account name (gmail address).", 384 | "default": null, 385 | "nargs": "", 386 | "choices": [] 387 | }, 388 | "required": true, 389 | "type": "TextField" 390 | }, 391 | { 392 | "data": { 393 | "commands": [ 394 | "--source" 395 | ], 396 | "display_name": "source", 397 | "help": "Path to the directory with images for upload.", 398 | "default": null, 399 | "nargs": "", 400 | "choices": [] 401 | }, 402 | "required": true, 403 | "type": "TextField" 404 | }, 405 | { 406 | "data": { 407 | "commands": [ 408 | "--dest" 409 | ], 410 | "display_name": "dest", 411 | "help": "Destination. Full path for upload to Google Earth Engine, e.g. users/pinkiepie/myponycollection", 412 | "default": null, 413 | "nargs": "", 414 | "choices": [] 415 | }, 416 | "required": true, 417 | "type": "TextField" 418 | }, 419 | { 420 | "data": { 421 | "commands": [ 422 | "-m", 423 | "--metadata" 424 | ], 425 | "display_name": "metadata", 426 | "help": "Path to CSV with metadata.", 427 | "default": null, 428 | "nargs": "", 429 | "choices": [] 430 | }, 431 | "required": false, 432 | "type": "TextField" 433 | }, 434 | { 435 | "data": { 436 | "commands": [ 437 | "--nodata" 438 | ], 439 | "display_name": "nodata", 440 | "help": "The value to burn into the raster as NoData (missing data)", 441 | "default": null, 442 | "nargs": "", 443 | "choices": [] 444 | }, 445 | "required": false, 446 | "type": "TextField" 447 | } 448 | ] 449 | }, 450 | "lst": { 451 | "command": "lst", 452 | "contents": [ 453 | { 454 | "data": { 455 | "commands": [ 456 | "--location" 457 | ], 458 | "display_name": "location", 459 | "help": "This it the location of your folder/collection", 460 | "default": null, 461 | "nargs": "", 462 | "choices": [] 463 | }, 464 | "required": true, 465 | "type": "TextField" 466 | }, 467 | { 468 | "data": { 469 | "commands": [ 470 | "--type" 471 | ], 472 | "display_name": "type", 473 | "help": "Whether you want the list to be printed or output as text", 474 | "default": null, 475 | "nargs": "", 476 | "choices": [] 477 | }, 478 | "required": true, 479 | "type": "TextField" 480 | }, 481 | { 482 | "data": { 483 | "commands": [ 484 | "--items" 485 | ], 486 | "display_name": "items", 487 | "help": "Number of items to list", 488 | "default": null, 489 | "nargs": "", 490 | "choices": [] 491 | }, 492 | "required": false, 493 | "type": "TextField" 494 | }, 495 | { 496 | "data": { 497 | "commands": [ 498 | "--folder" 499 | ], 500 | "display_name": "folder", 501 | "help": "Folder location for report to be exported", 502 | "default": null, 503 | "nargs": "", 504 | "choices": [] 505 | }, 506 | "required": false, 507 | "type": "TextField" 508 | } 509 | ] 510 | }, 511 | "tasks": { 512 | "command": "tasks", 513 | "contents": [] 514 | }, 515 | "taskquery": { 516 | "command": "taskquery", 517 | "contents": [ 518 | { 519 | "data": { 520 | "commands": [ 521 | "--destination" 522 | ], 523 | "display_name": "destination", 524 | "help": "Full path to asset where you are uploading files", 525 | "default": "users/folder/collection", 526 | "nargs": "", 527 | "choices": [] 528 | }, 529 | "required": false, 530 | "type": "TextField" 531 | } 532 | ] 533 | }, 534 | "report": { 535 | "command": "report", 536 | "contents": [ 537 | { 538 | "data": { 539 | "commands": [ 540 | "--r" 541 | ], 542 | "display_name": "r", 543 | "help": "Folder Path where the reports will be saved", 544 | "default": "Folder Path where the reports will be saved", 545 | "nargs": "", 546 | "choices": [] 547 | }, 548 | "required": false, 549 | "type": "MultiDirChooser" 550 | } 551 | ] 552 | }, 553 | "cancel": { 554 | "command": "cancel", 555 | "contents": [] 556 | }, 557 | "mover": { 558 | "command": "mover", 559 | "contents": [ 560 | { 561 | "data": { 562 | "commands": [ 563 | "--assetpath" 564 | ], 565 | "display_name": "assetpath", 566 | "help": "Existing path of assets", 567 | "default": "users/folder/collection1", 568 | "nargs": "", 569 | "choices": [] 570 | }, 571 | "required": false, 572 | "type": "TextField" 573 | }, 574 | { 575 | "data": { 576 | "commands": [ 577 | "--finalpath" 578 | ], 579 | "display_name": "finalpath", 580 | "help": "New path for assets", 581 | "default": "users/folder/collection2", 582 | "nargs": "", 583 | "choices": [] 584 | }, 585 | "required": false, 586 | "type": "TextField" 587 | } 588 | ] 589 | }, 590 | "copy": { 591 | "command": "copy", 592 | "contents": [ 593 | { 594 | "data": { 595 | "commands": [ 596 | "--initial" 597 | ], 598 | "display_name": "initial", 599 | "help": "Existing path of assets", 600 | "default": "users/folder/collection1", 601 | "nargs": "", 602 | "choices": [] 603 | }, 604 | "required": false, 605 | "type": "TextField" 606 | }, 607 | { 608 | "data": { 609 | "commands": [ 610 | "--final" 611 | ], 612 | "display_name": "final", 613 | "help": "New path for assets", 614 | "default": "users/folder/collection2", 615 | "nargs": "", 616 | "choices": [] 617 | }, 618 | "required": false, 619 | "type": "TextField" 620 | } 621 | ] 622 | }, 623 | "collprop": { 624 | "command": "collprop", 625 | "contents": [ 626 | { 627 | "data": { 628 | "commands": [ 629 | "--coll" 630 | ], 631 | "display_name": "coll", 632 | "help": "Path of Image Collection", 633 | "default": "users/folder/collection", 634 | "nargs": "", 635 | "choices": [] 636 | }, 637 | "required": false, 638 | "type": "TextField" 639 | }, 640 | { 641 | "data": { 642 | "commands": [ 643 | "--p" 644 | ], 645 | "display_name": "p", 646 | "help": "system:description=Description|system:title=title", 647 | "default": "system:description=Description", 648 | "nargs": "", 649 | "choices": [] 650 | }, 651 | "required": false, 652 | "type": "TextField" 653 | } 654 | ] 655 | }, 656 | "access": { 657 | "command": "access", 658 | "contents": [ 659 | { 660 | "data": { 661 | "commands": [ 662 | "--mode" 663 | ], 664 | "display_name": "mode", 665 | "help": "This lets you select if you want to change permission or folder/collection/image", 666 | "default": "folder|collection|image", 667 | "nargs": "", 668 | "choices": [ 669 | "folder", 670 | "collection", 671 | "image" 672 | ] 673 | }, 674 | "required": true, 675 | "type": "Dropdown" 676 | }, 677 | { 678 | "data": { 679 | "commands": [ 680 | "--asset" 681 | ], 682 | "display_name": "asset", 683 | "help": "This is the path to the earth engine asset whose permission you are changing folder/collection/image", 684 | "default": "users/folder/collection", 685 | "nargs": "", 686 | "choices": [] 687 | }, 688 | "required": true, 689 | "type": "TextField" 690 | }, 691 | { 692 | "data": { 693 | "commands": [ 694 | "--user" 695 | ], 696 | "display_name": "user", 697 | "help": "This is the email address to whom you want to give read or write permission Usage: \"john@doe.com:R\" or \"john@doe.com:W\" R/W refers to read or write permission", 698 | "default": "john@doe.com:R", 699 | "nargs": "", 700 | "choices": [] 701 | }, 702 | "required": true, 703 | "type": "TextField" 704 | } 705 | ] 706 | }, 707 | "delete": { 708 | "command": "delete", 709 | "contents": [ 710 | { 711 | "data": { 712 | "commands": [], 713 | "display_name": "id", 714 | "help": "Full path to asset for deletion. Recursively removes all folders, collections and images.", 715 | "default": "users/folder/collection", 716 | "nargs": "", 717 | "choices": [] 718 | }, 719 | "required": true, 720 | "type": "TextField" 721 | } 722 | ] 723 | }, 724 | "convert2ft": { 725 | "command": "convert2ft", 726 | "contents": [ 727 | { 728 | "data": { 729 | "commands": [ 730 | "--i" 731 | ], 732 | "display_name": "i", 733 | "help": "input feature source (KML, SHP, SpatiLite, etc.)", 734 | "default": "input feature source (KML, SHP, SpatiLite, etc.)", 735 | "nargs": "", 736 | "choices": [] 737 | }, 738 | "required": true, 739 | "type": "MultiFileChooser" 740 | }, 741 | { 742 | "data": { 743 | "commands": [ 744 | "--o" 745 | ], 746 | "display_name": "o", 747 | "help": "output Fusion Table name", 748 | "default": null, 749 | "nargs": "", 750 | "choices": [] 751 | }, 752 | "required": true, 753 | "type": "TextField" 754 | }, 755 | { 756 | "data": { 757 | "commands": [ 758 | "--add_missing" 759 | ], 760 | "display_name": "add_missing", 761 | "help": "add missing features from the last inserted feature index", 762 | "default": false, 763 | "nargs": "", 764 | "choices": [] 765 | }, 766 | "required": false, 767 | "type": "CheckBox" 768 | } 769 | ] 770 | }, 771 | "cleanout": { 772 | "command": "cleanout", 773 | "contents": [ 774 | { 775 | "data": { 776 | "commands": [ 777 | "--dirpath" 778 | ], 779 | "display_name": "dirpath", 780 | "help": "Folder you want to delete after all processes have been completed", 781 | "default": null, 782 | "nargs": "", 783 | "choices": [] 784 | }, 785 | "required": false, 786 | "type": "MultiDirChooser" 787 | } 788 | ] 789 | } 790 | }, 791 | "program_name": "Planet and EE Pipeline", 792 | "progress_regex": null, 793 | "program_description": "Planet and EE Pipeline", 794 | "layout_type": "column", 795 | "show_advanced": true 796 | } -------------------------------------------------------------------------------- /ee_ppipe/kml_aoi.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | import json 4 | from pprint import pprint 5 | import argparse 6 | import sys 7 | import time 8 | from osgeo import ogr 9 | import csv 10 | import shapefile 11 | import string 12 | p1='{"config": [{"field_name": "geometry", "config": {"type": "Polygon","coordinates":' 13 | p2='}, "type": "GeometryFilter"}, {"field_name": "gsd", "config": {"gte":1,"lte":9.99}, "type": "RangeFilter"}, {"field_name": "acquired", "config": {"gte":"' 14 | p3='T04:00:00.000Z","lte":"' 15 | p4='T03:59:59.999Z"}, "type": "DateRangeFilter"}, {"field_name": "cloud_cover", "config": {"gte":0' 16 | p5=',"lte":' 17 | p6='}, "type": "RangeFilter"}], "type": "AndFilter"}' 18 | def main(): 19 | parser = argparse.ArgumentParser('Tool to convert KML, Shapefile,WKT or GeoJSON file to AreaOfInterest.JSON file with structured query for use with Planet API 1.0') 20 | parser.add_argument('--start', help='Start date in YYYY-MM-DD?') 21 | parser.add_argument('--end', help='End date in YYYY-MM-DD?') 22 | parser.add_argument('--cloud', help='Maximum Cloud Cover(0-1) representing 0-100') 23 | parser.add_argument('--geo', default='./map.geojson',help='map.geojson/aoi.kml/aoi.shp/aoi.wkt file') 24 | parser.add_argument('--loc',help='Output location for kml file') 25 | args = parser.parse_args() 26 | sys.stdout.write(str(parsed(args))) 27 | 28 | def parsed(args): 29 | kml_file=args.geo 30 | def kml2geojson(kml_file): 31 | drv = ogr.GetDriverByName('KML') 32 | kml_ds = drv.Open(kml_file) 33 | for kml_lyr in kml_ds: 34 | for feat in kml_lyr: 35 | outfile=feat.ExportToJson() 36 | geom2=str(outfile).replace(", 0.0",'') 37 | with open(args.loc+'./kmlout.geojson','w') as csvfile: 38 | writer=csv.writer(csvfile) 39 | writer.writerow([geom2]) 40 | kml2geojson(args.geo) 41 | raw= open(args.loc+'./kmlout.geojson') 42 | for line in raw: 43 | fields=line.strip().split(":")[3] 44 | f2=fields.strip().split("}")[0] 45 | filenames = p1+f2+p2+str(args.start)+p3+str(args.end)+p4+p5+str(args.cloud)+p6 46 | with open(args.loc+'./aoi.json', 'w') as outfile: 47 | outfile.write(filenames) 48 | outfile.close() 49 | if __name__ == '__main__': 50 | main() 51 | -------------------------------------------------------------------------------- /ee_ppipe/logconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "format": "%(asctime)s :: %(name)s :: %(levelname)s :: %(message)s" 7 | } 8 | }, 9 | 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "DEBUG", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | }, 17 | 18 | "info_file_handler": { 19 | "class": "logging.handlers.RotatingFileHandler", 20 | "level": "INFO", 21 | "formatter": "simple", 22 | "filename": "gee_assets_info.log", 23 | "maxBytes": 10485760, 24 | "backupCount": 20, 25 | "encoding": "utf8" 26 | }, 27 | 28 | "error_file_handler": { 29 | "class": "logging.handlers.RotatingFileHandler", 30 | "level": "ERROR", 31 | "formatter": "simple", 32 | "filename": "gee_assets_errors.log", 33 | "maxBytes": 10485760, 34 | "backupCount": 20, 35 | "encoding": "utf8" 36 | } 37 | }, 38 | 39 | "root": { 40 | "level": "INFO", 41 | "handlers": ["console", "info_file_handler", "error_file_handler"] 42 | } 43 | } -------------------------------------------------------------------------------- /ee_ppipe/message.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | 4 | program_message = \ 5 | ''' 6 | 7 | ''' 8 | 9 | def display_message(): 10 | message = program_message.format('\n-'.join(sys.argv[1:])).split('\n') 11 | delay = 1.8 / len(message) 12 | 13 | for line in message: 14 | print line 15 | time.sleep(delay) 16 | 17 | -------------------------------------------------------------------------------- /ee_ppipe/metadata_loader.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import logging 3 | import re 4 | import collections 5 | import ast 6 | 7 | 8 | ValidationResult = collections.namedtuple('ValidationResult', ['success', 'keys']) 9 | 10 | 11 | class IllegalPropertyName(Exception): 12 | pass 13 | 14 | 15 | def validate_metadata_from_csv(path): 16 | """ 17 | Check if metadata is ok 18 | :param path: 19 | :return: true / false 20 | """ 21 | all_keys = [] 22 | 23 | with open(path, mode='r') as metadata_file: 24 | logging.info('Running metatdata validator for %s', path) 25 | success = True 26 | reader = csv.reader(metadata_file) 27 | header = next(reader) 28 | 29 | if not properties_allowed(properties=header, validator=allowed_property_key): 30 | raise IllegalPropertyName('The header has illegal name.') 31 | 32 | for row in reader: 33 | all_keys.append(row[0]) 34 | if not properties_allowed(properties=row, validator=allowed_property_value): 35 | success = False 36 | 37 | logging.info('Validation successful') if success else logging.error('Validation failed') 38 | 39 | return ValidationResult(success=success, keys=all_keys) 40 | 41 | 42 | def load_metadata_from_csv(path): 43 | """ 44 | Grabs properties from the give csv file. The csv should be organised as follows: 45 | filename (without extension), property1, property2, ... 46 | 47 | Example: 48 | id_no,class,category,binomial 49 | my_file_1,GASTROPODA,EN,Aaadonta constricta 50 | my_file_2,GASTROPODA,CR,Aaadonta irregularis 51 | 52 | The corresponding files are my_file_1.tif and my_file_2.tif. 53 | 54 | The program will turn the above into a json object: 55 | 56 | { id_no: my_file_1, class: GASTROPODA, category: EN, binomial: Aaadonta constricta}, 57 | { id_no: my_file_2, class: GASTROPODA, category: CR, binomial: Aaadonta irregularis} 58 | 59 | :param path to csv: 60 | :return: dictionary of dictionaries 61 | """ 62 | with open(path, mode='r') as metadata_file: 63 | reader = csv.reader(metadata_file) 64 | header = next(reader) 65 | 66 | if not properties_allowed(properties=header, validator=allowed_property_key): 67 | raise IllegalPropertyName() 68 | 69 | metadata = {} 70 | 71 | for row in reader: 72 | if properties_allowed(properties=row, validator=allowed_property_value): 73 | values = [] 74 | for item in row: 75 | try: 76 | values.append(ast.literal_eval(item)) 77 | except (ValueError, SyntaxError) as e: 78 | values.append(item) 79 | metadata[row[0]] = dict(zip(header, values)) 80 | 81 | return metadata 82 | 83 | 84 | def properties_allowed(properties, validator): 85 | return all(validator(prop) for prop in properties) 86 | 87 | 88 | def allowed_property_value(prop): 89 | if prop: 90 | return True 91 | else: 92 | logging.warning('Illegal property: empty string or None') 93 | return False 94 | 95 | 96 | def allowed_property_key(prop): 97 | google_special_properties = ('system:description', 98 | 'system:provider_url', 99 | 'system:tags', 100 | 'system:time_end', 101 | 'system:time_start', 102 | 'system:title') 103 | 104 | if prop in google_special_properties or re.match("^[A-Za-z0-9_]+$", prop): 105 | return True 106 | else: 107 | logging.warning('Property name %s is invalid. Special properties [system:description, system:provider_url, ' 108 | 'system:tags, system:time_end, system:time_start, system:title] are allowed; other property ' 109 | 'keys must contain only letters, digits and underscores.') 110 | return False 111 | 112 | 113 | def is_legal_gee_metadata(row): 114 | key = row[0] 115 | values = row[1:] 116 | re.match("^[A-Za-z0-9_]+$", ' asss_sasa') 117 | -------------------------------------------------------------------------------- /ee_ppipe/ogr2ft.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import json 4 | import webbrowser 5 | 6 | import shapely.wkt 7 | 8 | from osgeo import ogr 9 | from osgeo import gdal 10 | 11 | class OAuth2(object): 12 | def __init__(self): 13 | self._scope = 'https://www.googleapis.com/auth/fusiontables' 14 | self._config_path = os.path.expanduser('~/.config/ogr2ft/credentials') 15 | 16 | def get_refresh_token(self): 17 | try: 18 | refresh_token = json.load(open(self._config_path))['refresh_token'] 19 | except IOError: 20 | return self._request_refresh_token() 21 | 22 | return refresh_token 23 | 24 | def _request_refresh_token(self): 25 | # create configuration file dir 26 | config_dir = os.path.dirname(self._config_path) 27 | if not os.path.isdir(config_dir): 28 | os.makedirs(config_dir) 29 | 30 | # open browser and ask for authorization 31 | auth_request_url = gdal.GOA2GetAuthorizationURL(self._scope) 32 | print('Authorize access to your Fusion Tables, and paste the resulting code below: ' + auth_request_url) 33 | # webbrowser.open_new(auth_request_url) 34 | 35 | auth_code = raw_input('Please enter authorization code: ').strip() 36 | 37 | refresh_token = gdal.GOA2GetRefreshToken(auth_code, self._scope) 38 | 39 | # save it 40 | json.dump({'refresh_token': refresh_token}, open(self._config_path, 'w')) 41 | 42 | return refresh_token 43 | 44 | 45 | def copy_features(src_layer, dst_layer, fix_geometry, simplify_geometry, start_index, total): 46 | index = 0 47 | batch_size = 200 48 | index_batch = 0 49 | for feat in src_layer: 50 | if index < start_index: 51 | index = index + 1 52 | continue 53 | 54 | try: 55 | geom = shapely.wkt.loads(feat.GetGeometryRef().ExportToWkt()) 56 | except Exception as e: 57 | print('Error({0}), skipping geometry.'.format(e)) 58 | continue 59 | 60 | if fix_geometry and not geom.is_valid: 61 | geom = geom.buffer(0.0) 62 | 63 | if simplify_geometry: 64 | geom = geom.simplify(0.004) 65 | 66 | f = ogr.Feature(dst_layer.GetLayerDefn()) 67 | 68 | # set field values 69 | for i in range(feat.GetFieldCount()): 70 | fd = feat.GetFieldDefnRef(i) 71 | f.SetField(fd.GetName(), feat.GetField(fd.GetName())) 72 | 73 | # set geometry 74 | f.SetGeometry(ogr.CreateGeometryFromWkt(geom.to_wkt())) 75 | 76 | if index_batch == 0: 77 | dst_layer.StartTransaction() 78 | 79 | # create feature 80 | feature = dst_layer.CreateFeature(f) 81 | 82 | f.Destroy() 83 | 84 | index_batch = index_batch + 1 85 | 86 | if index_batch >= batch_size or index == total - 1: 87 | dst_layer.CommitTransaction() 88 | count = dst_layer.GetFeatureCount() # update number of inserted features 89 | print('Inserted {0} of {1} features ({2:.2f}%)'.format(count, total, 100. * float(count) / total)) 90 | 91 | index_batch = 0 92 | 93 | if index == total - 1: 94 | break 95 | 96 | index = index + 1 97 | 98 | def _get_ft_ds(): 99 | refresh_token = OAuth2().get_refresh_token() 100 | ft_driver = ogr.GetDriverByName('GFT') 101 | 102 | return ft_driver.Open('GFT:refresh=' + refresh_token, True) 103 | 104 | def convert(input_file, output_fusion_table, add_missing=False): 105 | dst_ds = _get_ft_ds() 106 | 107 | src_ds = ogr.Open(input_file) 108 | src_layer = src_ds.GetLayerByIndex(0) 109 | 110 | gdal.UseExceptions() # avoid ERROR 1: ... 111 | try: 112 | dst_layer = dst_ds.GetLayerByName(output_fusion_table) 113 | except RuntimeError: 114 | pass 115 | gdal.DontUseExceptions() # avoid ERROR 1: ... 116 | 117 | is_new = False 118 | if dst_layer: 119 | if not add_missing: 120 | print('Error: feature table already exists: ' + output_fusion_table + ', exiting ...') 121 | print('Use --add-missing to insert missing features') 122 | return 123 | 124 | total = src_layer.GetFeatureCount() 125 | count = dst_layer.GetFeatureCount() 126 | 127 | print('Warning: feature table already exists: ' + output_fusion_table) 128 | if count == total: 129 | print('All done, exiting ...') 130 | else: 131 | print('Inserting missing ' + str(total - count) + ' of ' + str(total) + ' features ...') 132 | else: 133 | print('Creating Fusion Table: ' + output_fusion_table) 134 | 135 | # create new layer and copy schema 136 | dst_layer = dst_ds.CreateLayer(output_fusion_table) 137 | if '.kml' in input_file: 138 | f = src_layer.GetFeature(1) # bug? 139 | else: 140 | f = src_layer.GetFeature(0) 141 | [dst_layer.CreateField(f.GetFieldDefnRef(i)) for i in range(f.GetFieldCount())] 142 | is_new = True 143 | 144 | # copy features, retry during crashes 145 | fix_geometry = True 146 | simplify_geometry = False 147 | 148 | # re-open source layer, otherwise src_layer.GetFeature(1) seems to ifluence iterator in copy_features ?!? 149 | src_ds.Destroy() 150 | src_ds = ogr.Open(input_file) 151 | src_layer = src_ds.GetLayerByIndex(0) 152 | 153 | total = src_layer.GetFeatureCount() 154 | 155 | if is_new: 156 | count = 0 157 | else: 158 | count = dst_layer.GetFeatureCount() 159 | 160 | while count < total: 161 | try: 162 | copy_features(src_layer, dst_layer, fix_geometry, simplify_geometry, count, total) 163 | time.sleep(2) # bad, is there a better way to wait until fusion table updates are finished? 164 | count = dst_layer.GetFeatureCount() 165 | except RuntimeError: 166 | time.sleep(2) # bad, is there a better way to wait until fusion table updates are finished? 167 | count = dst_layer.GetFeatureCount() # update number of inserted features 168 | print('Retrying, {0} ({1:.2f}%)'.format(count, 100. * float(count) / total)) 169 | 170 | src_ds.Destroy() 171 | dst_ds.Destroy() 172 | 173 | if __name__ == '__main__': 174 | import argparse 175 | 176 | parser = argparse.ArgumentParser(description='Uploads a given feature collection to Google Fusion Table.') 177 | 178 | parser.add_argument('-i', '--input-file', help='input feature source (KML, SHP, SpatiLite, etc.)', required=True) 179 | parser.add_argument('-o', '--output-fusion-table', help='output Fusion Table name', required=True) 180 | parser.add_argument('-a', '--add-missing', help='add missing features from the last inserted feature index', action='store_true', required=False, default=False) 181 | 182 | args = parser.parse_args() 183 | 184 | convert(args.input_file, args.output_fusion_table, args.add_missing) 185 | -------------------------------------------------------------------------------- /ee_ppipe/planet_key.py: -------------------------------------------------------------------------------- 1 | import os 2 | import csv 3 | import getpass 4 | print("Enter your Planet API Key") 5 | password=getpass.getpass() 6 | with open('./pkey.csv','w') as completed: 7 | writer=csv.writer(completed,delimiter=',',lineterminator='\n') 8 | writer.writerow([password]) 9 | -------------------------------------------------------------------------------- /ee_ppipe/query.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import subprocess 3 | import sys 4 | import os,sys,time,csv,sys 5 | import subprocess 6 | import os 7 | import json 8 | from pprint import pprint 9 | import argparse 10 | import sys 11 | import time 12 | import fnmatch 13 | import logging 14 | import sys 15 | import ee 16 | 17 | def taskquery(destination): 18 | tcom=str("earthengine ls "+destination) 19 | tasklist=subprocess.check_output("earthengine task list",shell=True) 20 | assetlist=subprocess.check_output(tcom,shell=True) 21 | taskready=tasklist.count("READY") 22 | taskrunning=tasklist.count("RUNNING") 23 | taskfailed=tasklist.count("FAILED") 24 | totalfiles=assetlist.count(destination) 25 | print("Running Tasks:",taskrunning) 26 | print("Ready Tasks:",taskready) 27 | print("Failed Tasks:",taskfailed) 28 | print("Assets Uploaded:",totalfiles) 29 | 30 | 31 | -------------------------------------------------------------------------------- /ee_ppipe/taskreport.py: -------------------------------------------------------------------------------- 1 | import ee 2 | import ee.mapclient 3 | import subprocess 4 | import csv 5 | from datetime import datetime 6 | import time 7 | import datetime 8 | import re 9 | ee.Initialize() 10 | def genreport(report): 11 | with open(report+'/Tasks_failed.csv','wb') as failed: 12 | writer=csv.DictWriter(failed,fieldnames=["Task ID","Task Type", "Start Date","Start Time","End Date","End Time","Task Description", "Error Message","Source Script", "Output State"],delimiter=',') 13 | writer.writeheader() 14 | with open(report+'/Tasks_completed.csv','wb') as completed: 15 | writer=csv.DictWriter(completed,fieldnames=["Task ID","Task Type", "Start Date","Start Time","End Date","End Time","Task Description", "Output Url", "Output State"],delimiter=',') 16 | writer.writeheader() 17 | with open(report+'/Tasks_canceled.csv','wb') as canceled: 18 | writer=csv.DictWriter(canceled,fieldnames=["Task ID","Task Type", "Start Date","Start Time","End Date","End Time","Task Description", "Source Script", "Output State"],delimiter=',') 19 | writer.writeheader() 20 | try: 21 | for line in subprocess.check_output("earthengine task list",shell=True).split('\n'): 22 | tsk=line.split(' ')[0] 23 | ur=ee.data.getTaskStatus(tsk) 24 | error=str(ur).split('state')[1].split(',')[0].strip("': u'.") 25 | mode = error 26 | if mode == 'FAILED': 27 | tsktype=str(ur).split('task_type')[1].split(',')[0].strip("': u'.") 28 | tskdesc=str(ur).split("'description'")[1].split(',')[0].strip("': u'.") 29 | outurl=str(ur).split('source_url')[1].split(',')[0].strip("': u'.") 30 | strttime=str(ur).split('start_timestamp_ms')[1].split(',')[0].strip("': u'.L") 31 | endtime=str(ur).split('update_timestamp_ms')[1].split(',')[0].strip("': u'.L") 32 | errmsg=str(ur).split('error_message')[1].split(',')[0].strip("': u'.") 33 | state=str(ur).split('state')[1].split(',')[0].strip("': u'.") 34 | tskid=str(ur).split("'id'")[1].split(',')[0].strip("': u'.'}]") 35 | v=int(strttime)/1000 36 | w=int(endtime)/1000 37 | start=datetime.datetime.fromtimestamp(v).strftime('%Y-%m-%d %H:%M:%S.%f') 38 | startdate=start.split(' ')[0] 39 | starttime=start.split(' ')[1].split('.')[0] 40 | end=datetime.datetime.fromtimestamp(w).strftime('%Y-%m-%d %H:%M:%S.%f') 41 | enddate=end.split(' ')[0] 42 | endtime=end.split(' ')[1].split('.')[0] 43 | print(tsktype.title()) 44 | print(tskdesc) 45 | print(outurl) 46 | print(start) 47 | print(end) 48 | print(errmsg) 49 | print(tskid) 50 | print(state) 51 | with open(report+'/Tasks_failed.csv','a') as failed: 52 | writer=csv.writer(failed,delimiter=',',lineterminator='\n') 53 | writer.writerow([tskid,tsktype,startdate,starttime,enddate,endtime,tskdesc,errmsg,outurl,state]) 54 | elif mode == 'CANCELED': 55 | tsktype=str(ur).split('task_type')[1].split(',')[0].strip("': u'.") 56 | tskdesc=str(ur).split("'description'")[1].split(':')[1].split(',')[0].strip("': u'.") 57 | outurl=str(ur).split('source_url')[1].split(',')[0].strip("[': u'.]") 58 | strttime=str(ur).split('start_timestamp_ms')[1].split(',')[0].strip("': u'.L") 59 | endtime=str(ur).split('update_timestamp_ms')[1].split(',')[0].strip("': u'.L") 60 | state=str(ur).split('state')[1].split(',')[0].strip("': u'.") 61 | tskid=str(ur).split("'id'")[1].split(',')[0].strip("': u'.'}]") 62 | v=int(strttime)/1000 63 | w=int(endtime)/1000 64 | start=datetime.datetime.fromtimestamp(v).strftime('%Y-%m-%d %H:%M:%S.%f') 65 | startdate=start.split(' ')[0] 66 | starttime=start.split(' ')[1].split('.')[0] 67 | end=datetime.datetime.fromtimestamp(w).strftime('%Y-%m-%d %H:%M:%S.%f') 68 | enddate=end.split(' ')[0] 69 | endtime=end.split(' ')[1].split('.')[0] 70 | print(tsktype.title()) 71 | print(tskdesc) 72 | print(outurl) 73 | print(start) 74 | print(end) 75 | print(tskid) 76 | print(state) 77 | with open(report+'/Tasks_canceled.csv','a') as canceled: 78 | writer=csv.writer(canceled,delimiter=',',lineterminator='\n') 79 | writer.writerow([tskid,tsktype,startdate,starttime,enddate,endtime,tskdesc,outurl,state]) 80 | elif mode == 'COMPLETED': 81 | tsktype=str(ur).split('task_type')[1].split(',')[0].strip("': u'.") 82 | tskdesc=str(ur).split("'description'")[1].split(':')[1].split(',')[0].strip("': u'.") 83 | outurl=str(ur).split('output_url')[1].split(',')[0].strip("[': u'.]") 84 | strttime=str(ur).split('start_timestamp_ms')[1].split(',')[0].strip("': u'.L") 85 | endtime=str(ur).split('update_timestamp_ms')[1].split(',')[0].strip("': u'.L") 86 | state=str(ur).split('state')[1].split(',')[0].strip("': u'.") 87 | tskid=str(ur).split("'id'")[1].split(',')[0].strip("': u'.'}]") 88 | v=int(strttime)/1000 89 | w=int(endtime)/1000 90 | start=datetime.datetime.fromtimestamp(v).strftime('%Y-%m-%d %H:%M:%S.%f') 91 | startdate=start.split(' ')[0] 92 | starttime=start.split(' ')[1].split('.')[0] 93 | end=datetime.datetime.fromtimestamp(w).strftime('%Y-%m-%d %H:%M:%S.%f') 94 | enddate=end.split(' ')[0] 95 | endtime=end.split(' ')[1].split('.')[0] 96 | print(tsktype.title()) 97 | print(tskdesc) 98 | print(outurl) 99 | print(start) 100 | print(end) 101 | print(tskid) 102 | print(state) 103 | with open(report+'/Tasks_completed.csv','a') as completed: 104 | writer=csv.writer(completed,delimiter=',',lineterminator='\n') 105 | writer.writerow([tskid,tsktype,startdate,starttime,enddate,endtime,tskdesc,outurl,state]) 106 | completed.close() 107 | failed.close() 108 | canceled.close() 109 | except Exception: 110 | with open(report+'/Errorlog.csv','wb') as csvfile: 111 | writer=csv.writer(csvfile,delimiter=',') 112 | writer.writerow([tskid]) 113 | csvfile.close() 114 | -------------------------------------------------------------------------------- /ee_ppipe/testme.kml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | testme.kml 5 | 6 | 7 | normal 8 | #s_ylw-pushpin 9 | 10 | 11 | highlight 12 | #s_ylw-pushpin_hl 13 | 14 | 15 | 24 | 33 | 34 | testme 35 | #m_ylw-pushpin 36 | 37 | 1 38 | 39 | 40 | 41 | -92.16027067390579,29.96971980341075,0 -92.16394061879356,29.95751578496616,0 -92.33631569280861,29.27321264712756,0 -91.57016657684537,29.24348573701204,0 -91.27087004160693,29.86523810839692,0 -92.16027067390579,29.96971980341075,0 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | planet >= 0.0.6 2 | earthengine_api >= 0.1.99 3 | requests >= 2.10.0 4 | retrying >= 1.3.3 5 | bs4 >= 0.0.1 6 | requests_toolbelt >= 0.7.0 7 | pytest >= 3.0.0 8 | future >= 0.16.0 9 | pprint >= 0.1 10 | pyopenssl >= 0.13 11 | simplejson >= 3.8.2 12 | pycrypto >= 2.6 13 | google-api-python-client >= 1.5.4 14 | pyshp >= 1.2.10 15 | gdal >= 1.11.4 16 | clipboard >= 0.0.4 17 | -------------------------------------------------------------------------------- /windows executable/eePlanet-GUI.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/samapriya/Planet-GEE-Pipeline-GUI/aee6fc76c17cf6c6ce225572465f876fd14b0079/windows executable/eePlanet-GUI.exe --------------------------------------------------------------------------------