├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── atmcorr ├── __init__.py ├── atmcorr_timeseries.py ├── atmospheric.py ├── cloudRemover ├── cloudRemover.py ├── ee_requests.py ├── interpolated_lookup_tables.py ├── kml_reader.py ├── mission_specifics.py ├── plots.py ├── postProcessing.py └── timeSeries.py ├── ee-atmcorr-coefficients-timeseries.py └── ee-atmcorr-timeseries.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | # added by Sam Murphy (2017-06-22) 2 | files/ 3 | 4 | #### 5 | #### The following is a standard gitignore from: https://github.com/github/gitignore/edit/master/Python.gitignore 6 | #### 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # C extensions 14 | *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | .hypothesis/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | 108 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | MAINTAINER Sam Murphy 4 | 5 | RUN apt-get update && \ 6 | \ 7 | apt-get install -y --no-install-recommends \ 8 | bzip2 \ 9 | build-essential \ 10 | git \ 11 | libssl-dev \ 12 | libffi-dev \ 13 | python3 \ 14 | python3-dev \ 15 | python3-pip \ 16 | wget \ 17 | && \ 18 | apt-get clean && \ 19 | rm -rf /var/lib/apt/lists/* 20 | 21 | RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ 22 | /bin/bash /Miniconda3-latest-Linux-x86_64.sh -b -p /miniconda && \ 23 | rm Miniconda3-latest-Linux-x86_64.sh 24 | 25 | ENV PATH=/miniconda/bin:${PATH} 26 | 27 | RUN conda update -y conda && \ 28 | conda config --add channels conda-forge && \ 29 | conda install -y \ 30 | py6s \ 31 | pandas \ 32 | jupyter 33 | 34 | 35 | RUN conda install -c anaconda pip && \ 36 | pip install \ 37 | earthengine-api \ 38 | openpyxl \ 39 | oauth2client 40 | 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ____ 2 | # ! 3 | ## This repo is no longer under development 4 | and is probably broken given the pace of the Google Earth Engine team. I am now the CEO of [Earthscope](https://earthscope.io), a startup company from [Entrepreneur First](https://joinef.com), so have no time to squash any bugs that I introduced (sorry) or that have since appeared... 5 | 6 | The rest of the repo is 'as was', use at your own peril. 7 | ____ 8 | 9 | 10 | ## Atmospheric Correction of Sentinel2 and Landsat 11 | 12 | Consider using [gee-atmcorr-S2](https://github.com/samsammurphy/gee-atmcorr-S2) if you are atmospherically correcting a small number of images (e.g. 10s). It uses [Py6S](http://py6s.readthedocs.io/en/latest/) directly and has less set up time. 13 | 14 | ## Purpose 15 | 16 | This repo is for atmospherically correcting large numbers (e.g. 100s) of Sentinel2 and Landsat images. Although automated, it has a longer set up time as it will download then interpolate look up tables. However, it should run considerably faster. Time series have the following properties: 17 | 18 | * atmospherically corrected 19 | * cloud-masked 20 | * saved to excel 21 | * pretty plots 22 | 23 | ## Bonus 24 | 25 | This approach might also be more suitable for onboard processing (e.g. drones, nanosats) as the computational heavy lifting can be done in advanced. 26 | 27 | ## Installation 28 | 29 | Install [Docker](https://docs.docker.com/install/) then build the Dockerfile 30 | 31 | `docker build /path/to/Dockerfile -t atmcorr-timeseries` 32 | 33 | ## Usage 34 | 35 | Run the Docker container. 36 | 37 | `docker run -i -t -p 8888:8888 atmcorr-timeseries` 38 | 39 | and authenticate the Earth Engine API. 40 | 41 | `earthengine authenticate` 42 | 43 | grab the source code 44 | 45 | `git clone https://github.com/samsammurphy/ee-atmcorr-timeseries` 46 | 47 | and run the Jupyter Notebook: 48 | 49 | ``` 50 | cd ee-atmcorr-timeseries/jupyter_notebooks 51 | jupyter-notebook ee-atmcorr-timeseries.ipynb --ip='*' --port=8888 --allow-root 52 | ``` 53 | 54 | This will print out a URL that you can copy/paste into your web browser to run the code. 55 | 56 | If the URL is *http://(something_in_parentheses)* then you will need to change the parentheses and its contents for *localhost*. A valid URL should look something like.. 57 | 58 | http://localhost:8888/?token=... 59 | 60 | ## Notes on setup-time VS run-time 61 | 62 | This code is optimized to run atmospheric correction of large image collections. It trades setup-time (i.e. ~30 mins) for run time. Setup is only performed once and is fully automated. This solves the problem of running radiative transfer code for each image which would take ~2 secs/scene, 500 scenes would therefore take over 16 mins (everytime). 63 | 64 | It does this using the [6S emulator](https://github.com/samsammurphy/6S_emulator) which is based on n-dimensional interpolated lookup tables (iLUTs). These iLUTs are automatically downloaded and constructed locally. 65 | -------------------------------------------------------------------------------- /atmcorr/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/samsammurphy/ee-atmcorr-timeseries/411840d0009c2af89d2d56a2a1d8f95be8956c20/atmcorr/__init__.py -------------------------------------------------------------------------------- /atmcorr/atmcorr_timeseries.py: -------------------------------------------------------------------------------- 1 | # This Python file uses the following encoding: utf-8 2 | """ 3 | atmcorr_timeseries.py, Sam Murphy (2017-06-26) 4 | 5 | This module calculates surface surface reflectance through time for an Earth Engine 6 | feature collection of cloud free radiances and atmospheric correction inputs. 7 | Uses a 6S emulator (i.e. interpolated look up tables and elliptical orbit 8 | correction) 9 | """ 10 | 11 | import math 12 | import atmcorr.mission_specifics as mission_s 13 | 14 | def atmcorr(radiance, perihelion, day_of_year): 15 | """ 16 | Atmospherically corrects radiance using correction coefficients 17 | at perihelion adjusted for Earth's ellipitcal orbit 18 | """ 19 | 20 | # elliptical orbit correction 21 | elliptical_orbit_correction = 0.03275104*math.cos(math.radians(day_of_year/1.04137484)) + 0.96804905 22 | 23 | # correction coefficients 24 | a = perihelion[0] * elliptical_orbit_correction 25 | b = perihelion[1] * elliptical_orbit_correction 26 | 27 | # surface reflectance 28 | try: 29 | SR = (radiance - a) / b 30 | except: 31 | SR = None 32 | 33 | return SR 34 | 35 | 36 | def surface_reflectance_timeseries(meanRadiance, iLUTs, mission): 37 | """ 38 | Atmospherically corrects mean (cloud-free) pixel radiances 39 | returning a time series of surface reflectance values. 40 | """ 41 | 42 | feature_collection = meanRadiance['features'] 43 | 44 | # band names 45 | ee_bandnames = mission_s.ee_bandnames(mission) 46 | py6s_bandnames = mission_s.py6s_bandnames(mission) 47 | 48 | # time series output variable 49 | timeSeries = {'timeStamp':[], 'mission':mission} 50 | for ee_bandname in ee_bandnames: 51 | timeSeries[ee_bandname] = [] 52 | 53 | # atmospherically correct each scene in collection 54 | for feature in feature_collection: 55 | 56 | # time stamp 57 | timeSeries['timeStamp'].append(feature['properties']['timeStamp']) 58 | 59 | # mean average pixel radiances 60 | mean_averages = feature['properties']['mean_averages'] 61 | 62 | # atmospheric correction inputs 63 | atmcorr_inputs = feature['properties']['atmcorr_inputs'] 64 | solar_z = atmcorr_inputs['solar_z'] # solar zenith [degrees] 65 | h2o = atmcorr_inputs['h2o'] # water vapour column 66 | o3 = atmcorr_inputs['o3'] # ozone 67 | aot = atmcorr_inputs['aot'] # aerosol optical thickness 68 | alt = atmcorr_inputs['alt'] # altitude (above sea level, [km]) 69 | day_of_year = atmcorr_inputs['doy'] # i.e. Jan 1st = 1 70 | 71 | # atmospheric correction (each waveband) 72 | for i, ee_bandname in enumerate(ee_bandnames): 73 | radiance = mean_averages[ee_bandname] 74 | iLUT = iLUTs.iLUTs[py6s_bandnames[i]] 75 | perihelion = iLUT(solar_z, h2o, o3, aot, alt) 76 | timeSeries[ee_bandname].append(atmcorr(radiance, perihelion, day_of_year)) 77 | 78 | return timeSeries -------------------------------------------------------------------------------- /atmcorr/atmospheric.py: -------------------------------------------------------------------------------- 1 | """ 2 | atmospheric.py, Sam Murphy (2016-10-26) 3 | 4 | Atmospheric water vapour, ozone and AOT from GEE 5 | 6 | Usage 7 | H2O = Atmospheric.water(geom,date) 8 | O3 = Atmospheric.ozone(geom,date) 9 | AOT = Atmospheric.aerosol(geom,date) 10 | 11 | """ 12 | 13 | 14 | import ee 15 | 16 | class Atmospheric(): 17 | 18 | def round_date(date,xhour): 19 | """ 20 | rounds a date of to the closest 'x' hours 21 | """ 22 | y = date.get('year') 23 | m = date.get('month') 24 | d = date.get('day') 25 | H = date.get('hour') 26 | HH = H.divide(xhour).round().multiply(xhour) 27 | return date.fromYMD(y,m,d).advance(HH,'hour') 28 | 29 | def round_month(date): 30 | """ 31 | round date to closest month 32 | """ 33 | # start of THIS month 34 | m1 = date.fromYMD(date.get('year'),date.get('month'),ee.Number(1)) 35 | 36 | # start of NEXT month 37 | m2 = m1.advance(1,'month') 38 | 39 | # difference from date 40 | d1 = ee.Number(date.difference(m1,'day')).abs() 41 | d2 = ee.Number(date.difference(m2,'day')).abs() 42 | 43 | # return closest start of month 44 | return ee.Date(ee.Algorithms.If(d2.gt(d1),m1,m2)) 45 | 46 | 47 | 48 | def water(geom,date): 49 | """ 50 | Water vapour column above target at time of image aquisition. 51 | 52 | (Kalnay et al., 1996, The NCEP/NCAR 40-Year Reanalysis Project. Bull. 53 | Amer. Meteor. Soc., 77, 437-471) 54 | """ 55 | 56 | # Point geometry required 57 | centroid = geom.centroid() 58 | 59 | # H2O datetime is in 6 hour intervals 60 | H2O_date = Atmospheric.round_date(date,6) 61 | 62 | # filtered water collection 63 | water_ic = ee.ImageCollection('NCEP_RE/surface_wv').filterDate(H2O_date, H2O_date.advance(1,'month')) 64 | 65 | # water image 66 | water_img = ee.Image(water_ic.first()) 67 | 68 | # water_vapour at target 69 | water = water_img.reduceRegion(reducer=ee.Reducer.mean(), geometry=centroid).get('pr_wtr') 70 | 71 | # convert to Py6S units (Google = kg/m^2, Py6S = g/cm^2) 72 | water_Py6S_units = ee.Number(water).divide(10) 73 | 74 | return water_Py6S_units 75 | 76 | 77 | 78 | def ozone(geom,date): 79 | """ 80 | returns ozone measurement from merged TOMS/OMI dataset 81 | 82 | OR 83 | 84 | uses our fill value (which is mean value for that latlon and day-of-year) 85 | 86 | """ 87 | 88 | # Point geometry required 89 | centroid = geom.centroid() 90 | 91 | def ozone_measurement(centroid,O3_date): 92 | 93 | # filtered ozone collection 94 | ozone_ic = ee.ImageCollection('TOMS/MERGED').filterDate(O3_date, O3_date.advance(1,'month')) 95 | 96 | # ozone image 97 | ozone_img = ee.Image(ozone_ic.first()) 98 | 99 | # ozone value IF TOMS/OMI image exists ELSE use fill value 100 | ozone = ee.Algorithms.If(ozone_img,\ 101 | ozone_img.reduceRegion(reducer=ee.Reducer.mean(), geometry=centroid).get('ozone'),\ 102 | ozone_fill(centroid,O3_date)) 103 | 104 | return ozone 105 | 106 | def ozone_fill(centroid,O3_date): 107 | """ 108 | Gets our ozone fill value (i.e. mean value for that doy and latlon) 109 | 110 | you can see it 111 | 1) compared to LEDAPS: https://code.earthengine.google.com/8e62a5a66e4920e701813e43c0ecb83e 112 | 2) as a video: https://www.youtube.com/watch?v=rgqwvMRVguI&feature=youtu.be 113 | 114 | """ 115 | 116 | # ozone fills (i.e. one band per doy) 117 | ozone_fills = ee.ImageCollection('users/samsammurphy/public/ozone_fill').toList(366) 118 | 119 | # day of year index 120 | jan01 = ee.Date.fromYMD(O3_date.get('year'),1,1) 121 | doy_index = date.difference(jan01,'day').toInt()# (NB. index is one less than doy, so no need to +1) 122 | 123 | # day of year image 124 | fill_image = ee.Image(ozone_fills.get(doy_index)) 125 | 126 | # return scalar fill value 127 | return fill_image.reduceRegion(reducer=ee.Reducer.mean(), geometry=centroid).get('ozone') 128 | 129 | # O3 datetime in 24 hour intervals 130 | O3_date = Atmospheric.round_date(date,24) 131 | 132 | # TOMS temporal gap 133 | TOMS_gap = ee.DateRange('1994-11-01','1996-08-01') 134 | 135 | # avoid TOMS gap entirely 136 | ozone = ee.Algorithms.If(TOMS_gap.contains(O3_date),ozone_fill(centroid,O3_date),ozone_measurement(centroid,O3_date)) 137 | 138 | # fix other data gaps (e.g. spatial, missing images, etc..) 139 | ozone = ee.Algorithms.If(ozone,ozone,ozone_fill(centroid,O3_date)) 140 | 141 | #convert to Py6S units 142 | ozone_Py6S_units = ee.Number(ozone).divide(1000)# (i.e. Dobson units are milli-atm-cm ) 143 | 144 | return ozone_Py6S_units 145 | 146 | 147 | def aerosol(geom,date): 148 | """ 149 | Aerosol Optical Thickness. 150 | 151 | try: 152 | MODIS Aerosol Product (monthly) 153 | except: 154 | fill value 155 | """ 156 | 157 | def aerosol_fill(date): 158 | """ 159 | MODIS AOT fill value for this month (i.e. no data gaps) 160 | """ 161 | return ee.Image('users/samsammurphy/public/AOT_stack')\ 162 | .select([ee.String('AOT_').cat(date.format('M'))])\ 163 | .rename(['AOT_550']) 164 | 165 | 166 | def aerosol_this_month(date): 167 | """ 168 | MODIS AOT original data product for this month (i.e. some data gaps) 169 | """ 170 | # image for this month 171 | img = ee.Image(\ 172 | ee.ImageCollection('MODIS/006/MOD08_M3')\ 173 | .filterDate(Atmospheric.round_month(date))\ 174 | .first()\ 175 | ) 176 | 177 | # fill missing month (?) 178 | img = ee.Algorithms.If(img,\ 179 | # all good 180 | img\ 181 | .select(['Aerosol_Optical_Depth_Land_Mean_Mean_550'])\ 182 | .divide(1000)\ 183 | .rename(['AOT_550']),\ 184 | # missing month 185 | aerosol_fill(date)) 186 | 187 | return img 188 | 189 | 190 | def get_AOT(AOT_band,geom): 191 | """ 192 | AOT scalar value for target 193 | """ 194 | return ee.Image(AOT_band).reduceRegion(reducer=ee.Reducer.mean(),\ 195 | geometry=geom.centroid())\ 196 | .get('AOT_550') 197 | 198 | 199 | after_modis_start = date.difference(ee.Date('2000-03-01'),'month').gt(0) 200 | 201 | AOT_band = ee.Algorithms.If(after_modis_start, aerosol_this_month(date), aerosol_fill(date)) 202 | 203 | AOT = get_AOT(AOT_band,geom) 204 | 205 | AOT = ee.Algorithms.If(AOT,AOT,get_AOT(aerosol_fill(date),geom)) 206 | # i.e. check reduce region worked (else force fill value) 207 | 208 | return AOT -------------------------------------------------------------------------------- /atmcorr/cloudRemover.py: -------------------------------------------------------------------------------- 1 | """ 2 | cloudRemover.py, Sam Murphy (2017-07-11) 3 | 4 | Collection of cloud removal methods for Sentinel 2 and Landsat 5 | 6 | for details: https://github.com/samsammurphy/cloud-masking-sentinel2 7 | """ 8 | 9 | import ee 10 | import math 11 | 12 | def ESAclouds(toa): 13 | """ 14 | European Space Agency (ESA) clouds from 'QA60', i.e. Quality Assessment band at 60m 15 | 16 | parsed by Nick Clinton 17 | """ 18 | 19 | qa = toa.select('QA60') 20 | 21 | # bits 10 and 11 are clouds and cirrus 22 | cloudBitMask = int(2**10) 23 | cirrusBitMask = int(2**11) 24 | 25 | # both flags set to zero indicates clear conditions. 26 | clear = qa.bitwiseAnd(cloudBitMask).eq(0).And(\ 27 | qa.bitwiseAnd(cirrusBitMask).eq(0)) 28 | 29 | # cloud is not clear 30 | cloud = clear.eq(0) 31 | 32 | return cloud 33 | 34 | def shadowMask(toa,cloudMask): 35 | """ 36 | Finds cloud shadows in images 37 | 38 | Originally by Gennadii Donchyts, adapted by Ian Housman 39 | """ 40 | 41 | def potentialShadow(cloudHeight): 42 | """ 43 | Finds potential shadow areas from array of cloud heights 44 | 45 | returns an image stack (i.e. list of images) 46 | """ 47 | cloudHeight = ee.Number(cloudHeight) 48 | 49 | # shadow vector length 50 | shadowVector = zenith.tan().multiply(cloudHeight) 51 | 52 | # x and y components of shadow vector length 53 | x = azimuth.cos().multiply(shadowVector).divide(nominalScale).round() 54 | y = azimuth.sin().multiply(shadowVector).divide(nominalScale).round() 55 | 56 | # affine translation of clouds 57 | cloudShift = cloudMask.changeProj(cloudMask.projection(), cloudMask.projection().translate(x, y)) # could incorporate shadow stretch? 58 | 59 | return cloudShift 60 | 61 | # solar geometry (radians) 62 | azimuth = ee.Number(toa.get('solar_azimuth')).multiply(math.pi).divide(180.0).add(ee.Number(0.5).multiply(math.pi)) 63 | zenith = ee.Number(0.5).multiply(math.pi ).subtract(ee.Number(toa.get('solar_zenith')).multiply(math.pi).divide(180.0)) 64 | 65 | # find potential shadow areas based on cloud and solar geometry 66 | nominalScale = cloudMask.projection().nominalScale() 67 | cloudHeights = ee.List.sequence(500,4000,500) 68 | potentialShadowStack = cloudHeights.map(potentialShadow) 69 | potentialShadow = ee.ImageCollection.fromImages(potentialShadowStack).max() 70 | 71 | # shadows are not clouds 72 | potentialShadow = potentialShadow.And(cloudMask.Not()) 73 | 74 | # (modified) dark pixel detection 75 | darkPixels = toa.normalizedDifference(['green', 'swir2']).gt(0.25) 76 | 77 | # shadows are dark 78 | shadow = potentialShadow.And(darkPixels).rename(['shadows']) 79 | 80 | # might be scope for one last check here. Dark surfaces (e.g. water, basalt, etc.) cause shadow commission errors. 81 | # perhaps using a NDWI (e.g. green and nir) 82 | 83 | return shadow 84 | 85 | # 86 | 87 | class CloudRemover: 88 | 89 | ESAclouds = ESAclouds 90 | shadowMask = shadowMask 91 | 92 | def sentinel2mask(img): 93 | """ 94 | Masks cloud (and shadow) pixels from Sentinel 2 image 95 | """ 96 | 97 | # top of atmosphere reflectance 98 | toa = img.select(['B1','B2','B3','B4','B6','B8A','B9','B10', 'B11','B12'],\ 99 | ['aerosol', 'blue', 'green', 'red', 'red2','red4','h2o', 'cirrus','swir1', 'swir2'])\ 100 | .divide(10000).addBands(img.select('QA60'))\ 101 | .set('solar_azimuth',img.get('MEAN_SOLAR_AZIMUTH_ANGLE'))\ 102 | .set('solar_zenith',img.get('MEAN_SOLAR_ZENITH_ANGLE')) 103 | 104 | # ESA clouds 105 | ESAcloud = CloudRemover.ESAclouds(toa) 106 | 107 | # Shadow 108 | shadow = CloudRemover.shadowMask(toa, ESAcloud) 109 | 110 | # cloud and shadow mask 111 | mask = ESAcloud.Or(shadow).eq(0) 112 | 113 | return img.updateMask(mask) 114 | 115 | def landsatMask(img): 116 | """ 117 | Masks cloud (and shadow) pixels from Landsat images 118 | """ 119 | 120 | # FMASK 121 | fmask = img.select('fmask') 122 | 123 | # cloud and shadow 124 | cloud = fmask.eq(4) 125 | shadow = fmask.eq(2) 126 | 127 | # cloudFree pixels are not cloud or shadow 128 | cloudFree = cloud.Or(shadow).eq(0) 129 | 130 | return img.updateMask(cloudFree) 131 | 132 | def fromMission(mission): 133 | 134 | switch = { 135 | 'sentinel2': CloudRemover.sentinel2mask, 136 | 'landsat8': CloudRemover.landsatMask, 137 | 'landsat7': CloudRemover.landsatMask, 138 | 'landsat5': CloudRemover.landsatMask, 139 | 'landsat4': CloudRemover.landsatMask, 140 | } 141 | 142 | return switch[mission.lower()] 143 | 144 | 145 | 146 | 147 | -------------------------------------------------------------------------------- /atmcorr/ee_requests.py: -------------------------------------------------------------------------------- 1 | """ 2 | ee_requests.py, Sam Murphy (2017-06-22) 3 | 4 | Set's up batch requests for reduced data from Earth Engine. 5 | 6 | Returns a feature collection which could be input into, for exampled: 7 | 8 | - export (e.g to csv) 9 | - getInfo (e.g. for usage in a notebook with less end-user hoops to jump through) 10 | 11 | ..depending on workflow 12 | """ 13 | 14 | import ee 15 | from atmcorr.atmospheric import Atmospheric 16 | from atmcorr.cloudRemover import CloudRemover 17 | import atmcorr.mission_specifics as mission_s 18 | 19 | class AtmcorrInput: 20 | """ 21 | Grabs the inputs required for atmospheric correction with 6S emulator 22 | """ 23 | 24 | # global elevation (kilometers) 25 | elevation = ee.Image('USGS/GMTED2010').divide(1000) 26 | 27 | def get(): 28 | 29 | altitude = AtmcorrInput.elevation.reduceRegion(\ 30 | reducer = ee.Reducer.mean(),\ 31 | geometry = TimeSeries.geom.centroid()\ 32 | ) 33 | 34 | return ee.Dictionary({ 35 | 'solar_z':mission_s.solar_z(TimeSeries.image, TimeSeries.mission), 36 | 'h2o':Atmospheric.water(TimeSeries.geom,TimeSeries.date), 37 | 'o3':Atmospheric.ozone(TimeSeries.geom,TimeSeries.date), 38 | 'aot':Atmospheric.aerosol(TimeSeries.geom,TimeSeries.date), 39 | 'alt':altitude.get('be75'), 40 | 'doy':TimeSeries.day_of_year 41 | }) 42 | 43 | class TimeSeries: 44 | """ 45 | This class is used to extract (cloud-free) mean-average radiance values 46 | contained within an earth engine geometry for all images in a collection, 47 | It also gathers the atmospheric correction input variables required to 48 | get surface reflectance from at-sensor radiance. 49 | """ 50 | 51 | def meanReduce(image, geom): 52 | """ 53 | Calculates mean average pixel values in a geometry 54 | """ 55 | 56 | mean_averages = image.reduceRegion(\ 57 | reducer = ee.Reducer.mean(),\ 58 | geometry = geom) 59 | 60 | return mean_averages 61 | 62 | def radianceFromTOA(): 63 | """ 64 | calculate at-sensor radiance from top-of-atmosphere (TOA) reflectance 65 | """ 66 | 67 | # top of atmosphere reflectance 68 | toa = mission_s.TOA(TimeSeries.image, TimeSeries.mission) 69 | 70 | # solar irradiances 71 | ESUNs = mission_s.ESUNs(TimeSeries.image, TimeSeries.mission) 72 | 73 | # wavebands 74 | bands = mission_s.ee_bandnames(TimeSeries.mission) 75 | 76 | # solar zenith (radians) 77 | theta = mission_s.solar_z(TimeSeries.image, TimeSeries.mission).multiply(0.017453293) 78 | 79 | # circular math 80 | pi = ee.Number(3.14159265359) 81 | 82 | # Earth-Sun distance squared (AU) 83 | d = ee.Number(TimeSeries.day_of_year).subtract(4).multiply(0.017202).cos().multiply(-0.01672).add(1) 84 | d_squared = d.multiply(d) 85 | 86 | # radiace at-sensor 87 | rad = toa.select(ee.List(bands)).multiply(ESUNs).multiply(theta.cos()).divide(pi).divide(d_squared) 88 | 89 | return rad 90 | 91 | def extractor(image): 92 | 93 | # update TimeSeries class 94 | TimeSeries.image = image 95 | TimeSeries.date = ee.Date(image.get('system:time_start')) 96 | jan01 = ee.Date.fromYMD(TimeSeries.date.get('year'),1,1) 97 | TimeSeries.day_of_year = ee.Number(TimeSeries.date.difference(jan01,'day')).add(1) 98 | 99 | # remove clouds and shadows? 100 | if TimeSeries.removeClouds: 101 | cloudRemover = TimeSeries.cloudRemover.fromMission(TimeSeries.mission) 102 | TimeSeries.image = cloudRemover(image) 103 | 104 | # radiance at-sensor 105 | radiance = TimeSeries.radianceFromTOA() 106 | 107 | # mean average radiance 108 | mean_averages = TimeSeries.meanReduce(radiance, TimeSeries.geom) 109 | 110 | # atmospheric correction inputs 111 | atmcorr_inputs = AtmcorrInput.get() 112 | 113 | # export to feature collection 114 | properties = { 115 | 'imageID':image.get('system:index'), 116 | 'timeStamp':ee.Number(image.get('system:time_start')).divide(1000), 117 | 'mean_averages':mean_averages, 118 | 'atmcorr_inputs':atmcorr_inputs 119 | } 120 | 121 | return ee.Feature(TimeSeries.geom, properties) 122 | 123 | def request_meanRadiance(geom, startDate, stopDate, mission, removeClouds): 124 | """ 125 | Creates Earth Engine invocation for mean radiance values within a fixed 126 | geometry over an image collection (optionally applies cloud mask first) 127 | """ 128 | 129 | # initialize 130 | 131 | # time and a place 132 | TimeSeries.geom = geom 133 | TimeSeries.startDate = startDate 134 | TimeSeries.stopDate = stopDate 135 | 136 | # satellite mission 137 | TimeSeries.mission = mission 138 | 139 | # cloud removal 140 | TimeSeries.removeClouds = removeClouds 141 | TimeSeries.cloudRemover = CloudRemover 142 | 143 | # Earth Engine image collection 144 | ic = ee.ImageCollection(mission_s.eeCollection(mission))\ 145 | .filterBounds(geom)\ 146 | .filterDate(startDate, stopDate)\ 147 | .filter(mission_s.sunAngleFilter(mission)) 148 | 149 | return ic.map(TimeSeries.extractor).sort('timestamp') -------------------------------------------------------------------------------- /atmcorr/interpolated_lookup_tables.py: -------------------------------------------------------------------------------- 1 | """ 2 | interpolated_lookup_tables.py, Sam Murphy (2017-06-22) 3 | 4 | 5 | The interpolated_lookup_table.handler manages loading, downloading 6 | and interpolating the look up tables used by the 6S emulator 7 | 8 | """ 9 | 10 | import os 11 | import sys 12 | import glob 13 | import pickle 14 | import urllib.request 15 | import zipfile 16 | import time 17 | from itertools import product 18 | from scipy.interpolate import LinearNDInterpolator 19 | import atmcorr.mission_specifics as mission_s 20 | 21 | class handler: 22 | """ 23 | The interpolated_lookup_table.handler manages loading, downloading 24 | and interpolating the look up tables used by the 6S emulator 25 | """ 26 | 27 | def __init__(self, mission, path=False): 28 | 29 | self.userDefinedPath = path 30 | self.mission = mission 31 | self.supportedMissions = ['Sentinel2', 'Landsat8', 'Landsat7', 'Landsat5', 'Landsat4'] 32 | 33 | # default file paths 34 | self.bin_path = os.path.dirname(os.path.abspath(__file__)) 35 | self.base_path = os.path.dirname(self.bin_path) 36 | self.files_path = os.path.join(self.base_path,'files') 37 | self.py6S_sensor = mission_s.py6S_sensor(self.mission) 38 | self.LUT_path = os.path.join(self.files_path,'LUTs',self.py6S_sensor,\ 39 | 'Continental','view_zenith_0') 40 | self.iLUT_path = os.path.join(self.files_path,'iLUTs',self.py6S_sensor,\ 41 | 'Continental','view_zenith_0') 42 | 43 | def download_LUTs(self): 44 | """ 45 | Downloads the look-up tables for a given satellite mission 46 | """ 47 | 48 | # directory for zip file 49 | zip_dir = os.path.join(self.files_path,'LUTs') 50 | if not os.path.isdir(zip_dir): 51 | os.makedirs(zip_dir) 52 | 53 | # Sentinel 2 and Landsats URL switch 54 | getURL = { 55 | 'S2A_MSI':"https://www.dropbox.com/s/aq873gil0ph47fm/S2A_MSI.zip?dl=1", 56 | 'LANDSAT_OLI':'https://www.dropbox.com/s/49ikr48d2qqwkhm/LANDSAT_OLI.zip?dl=1', 57 | 'LANDSAT_ETM':'https://www.dropbox.com/s/z6vv55cz5tow6tj/LANDSAT_ETM.zip?dl=1', 58 | 'LANDSAT_TM':'https://www.dropbox.com/s/uyiab5r9kl50m2f/LANDSAT_TM.zip?dl=1', 59 | 'LANDSAT_TM':'https://www.dropbox.com/s/uyiab5r9kl50m2f/LANDSAT_TM.zip?dl=1' 60 | } 61 | 62 | # download LUTs data 63 | print('downloading look up table (.lut) files..') 64 | url = getURL[self.py6S_sensor] 65 | u = urllib.request.urlopen(url) 66 | data = u.read() 67 | u.close() 68 | 69 | # save to zip file 70 | zip_filepath = os.path.join(zip_dir,self.py6S_sensor+'.zip') 71 | with open(zip_filepath, "wb") as f : 72 | f.write(data) 73 | 74 | # extract LUTs directory 75 | with zipfile.ZipFile(zip_filepath,"r") as zip_ref: 76 | zip_ref.extractall(zip_dir) 77 | 78 | # delete zip file 79 | os.remove(zip_filepath) 80 | 81 | print('download successful') 82 | 83 | 84 | def interpolate_LUTs(self): 85 | """ 86 | interpolates look up table files (.lut) 87 | """ 88 | 89 | filepaths = sorted(glob.glob(self.LUT_path+os.path.sep+'*.lut')) 90 | if filepaths: 91 | print('\n...Running n-dimensional interpolation may take a several minutes (only need to do this once)...') 92 | try: 93 | for fpath in filepaths: 94 | fname = os.path.basename(fpath) 95 | fid, ext = os.path.splitext(fname) 96 | ilut_filepath = os.path.join(self.iLUT_path,fid+'.ilut') 97 | if os.path.isfile(ilut_filepath): 98 | print('iLUT file already exists (skipping interpolation): {}'.format(fname)) 99 | else: 100 | print('Interpolating: '+fname) 101 | 102 | # load look up table 103 | LUT = pickle.load(open(fpath,"rb")) 104 | 105 | # input variables (all permutations) 106 | invars = LUT['config']['invars'] 107 | inputs = list(product(invars['solar_zs'], 108 | invars['H2Os'], 109 | invars['O3s'], 110 | invars['AOTs'], 111 | invars['alts'])) 112 | 113 | # output variables (6S correction coefficients) 114 | outputs = LUT['outputs'] 115 | 116 | # piecewise linear interpolant in n-dimensions 117 | t = time.time() 118 | interpolator = LinearNDInterpolator(inputs,outputs) 119 | print('Interpolation took {:.2f} (secs) = '.format(time.time()-t)) 120 | 121 | # save new interpolated LUT file 122 | pickle.dump(interpolator, open(ilut_filepath, 'wb' )) 123 | 124 | except: 125 | 126 | print('interpolation error') 127 | 128 | else: 129 | 130 | print('look up tables files (.lut) not found in LUTs directory:\n{}'.format(self.LUT_path)) 131 | 132 | def load_iluts_from_path(self): 133 | """ 134 | looks for .ilut files in self.iLUT_path and loads them into self.iLUTs 135 | """ 136 | 137 | print('Loading interpolated look up tables (.ilut) for {}..'.format(self.mission)) 138 | 139 | ilut_files = glob.glob(self.iLUT_path+os.path.sep+'*.ilut') 140 | if ilut_files: 141 | try: 142 | for f in ilut_files: 143 | bandName_py6s = os.path.basename(f).split('.')[0][-2:] 144 | self.iLUTs[bandName_py6s] = pickle.load(open(f,'rb')) 145 | print('Success') 146 | return 147 | except: 148 | print('error loading file: \n'+f) 149 | else: 150 | print('Interpolated look-up table files not found in:\n{}'.format(self.iLUT_path)) 151 | 152 | def load_iluts_from_mission(self): 153 | """ 154 | 1) loads iLUTs from default path 155 | 2) else, downloads look-up tables and interpolates them. 156 | """ 157 | 158 | # check satellite mission is supported 159 | if self.mission.title() not in self.supportedMissions: 160 | print("mission '{0.mission}' not in supported missions:\n{0.supportedMissions}".format(self)) 161 | sys.exit(1) 162 | else: 163 | # use standardized format internally 164 | self.mission = self.mission.title() 165 | 166 | # try loading from default first 167 | try: 168 | self.load_iluts_from_path() 169 | except: 170 | pass 171 | 172 | def get(self): 173 | """ 174 | 175 | Loads interpolated look-up files in one of two ways: 176 | 177 | 1) if self.iLUT_path is defined: 178 | 179 | - load all .ilut files in that path 180 | 181 | 2) if self.mission is defined: 182 | 183 | - download lut files (i.e. look-up tables) 184 | - interpolate lut files (creating ilut files; note new 'i' prefix) 185 | - load the ilut files 186 | 187 | """ 188 | 189 | # create iLUTs dictionary 190 | self.iLUTs = {} 191 | 192 | # try loading from user defined-path 193 | if self.userDefinedPath: 194 | self.iLUT_path = self.userDefinedPath 195 | self.load_iluts_from_path() 196 | return 197 | 198 | # create default file paths? 199 | if not os.path.isdir(self.files_path): 200 | os.makedirs(self.files_path) 201 | if not os.path.isdir(self.LUT_path): 202 | os.makedirs(self.LUT_path) 203 | if not os.path.isdir(self.iLUT_path): 204 | os.makedirs(self.iLUT_path) 205 | 206 | # search default file paths for this mission 207 | if self.mission: 208 | self.load_iluts_from_mission() 209 | if self.iLUTs: 210 | return 211 | 212 | # try downloading? 213 | try: 214 | self.download_LUTs() 215 | self.interpolate_LUTs() 216 | self.load_iluts_from_path() 217 | except: 218 | pass 219 | 220 | # otherwise return error 221 | if not self.iLUT_path or not self.mission: 222 | print('must define self.path or self.mission of iLUT.handler() instance') 223 | sys.exit(1) 224 | 225 | 226 | 227 | # debugging 228 | # iLUTs = handler() 229 | # iLUTs.mission = 'Landsat7' 230 | # iLUTs.get() 231 | # print(iLUTs.iLUTs) -------------------------------------------------------------------------------- /atmcorr/kml_reader.py: -------------------------------------------------------------------------------- 1 | """ 2 | Reads kml files 3 | """ 4 | 5 | import os 6 | import ee 7 | from fastkml import kml 8 | 9 | def read_kml(fileName, polygonName): 10 | """ 11 | Atmospherically corrects radiance using correction coefficients 12 | at perihelion adjusted for Earth's ellipitcal orbit 13 | """ 14 | 15 | # read kml from file 16 | try: 17 | base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) 18 | fpath = os.path.join(base_dir,'files','kml',fileName) 19 | with open(fpath,'rb') as file: 20 | kml_string = file.read() 21 | except: 22 | print('problem loading kml file: \n'+fpath) 23 | return 24 | 25 | # kml object 26 | k = kml.KML() 27 | k.from_string(kml_string) 28 | 29 | # parse the object 30 | document = list(k.features()) 31 | folder = list(document[0].features()) 32 | polygons = list(folder[0].features()) 33 | names = [p.name for p in polygons] 34 | 35 | # find polygon 36 | polygon = polygons[names.index(polygonName)].geometry 37 | 38 | # get coordinates 39 | coords = polygon.exterior.coords 40 | 41 | # create list of lonlat points 42 | lon = [x[0] for x in coords] 43 | lat = [x[1] for x in coords] 44 | lonlat = [x for t in zip(lon,lat) for x in t] 45 | 46 | # earth engine geometry 47 | return ee.Geometry.Polygon(lonlat) 48 | -------------------------------------------------------------------------------- /atmcorr/mission_specifics.py: -------------------------------------------------------------------------------- 1 | """ 2 | mission_specifics.py, Sam Murphy (2017-06-28) 3 | 4 | Information on satellite missions stored here (e.g. wavebands, etc.) 5 | """ 6 | 7 | import ee 8 | 9 | 10 | def ee_bandnames(mission): 11 | """ 12 | visible to short-wave infrared wavebands (EarthEngine nomenclature) 13 | 14 | notes: 15 | [1] skipped Landsat7 'PAN' to fit Py6S 16 | """ 17 | 18 | switch = { 19 | 'Sentinel2':['B1','B2','B3','B4','B5','B6','B7','B8','B8A','B9','B10','B11','B12'], 20 | 'Landsat8':['B1','B2','B3','B4','B5','B6','B7','B8','B9'], 21 | 'Landsat7':['B1','B2','B3','B4','B5','B7'], 22 | 'Landsat5':['B1','B2','B3','B4','B5','B7'], 23 | 'Landsat4':['B1','B2','B3','B4','B5','B7'] 24 | } 25 | 26 | return switch[mission] 27 | 28 | def py6s_bandnames(mission): 29 | """ 30 | visible to short-wave infrared wavebands (Py6S nomenclature) 31 | 32 | notes: 33 | [1] Landsat8 'B8' === 'PAN' 34 | [2] Landsat7 'PAN' is missing? 35 | 36 | """ 37 | 38 | switch = { 39 | 'Sentinel2':['01','02','03','04','05','06','07','08','09','10','11','12','13'], 40 | 'Landsat8':['B1','B2','B3','B4','B5','B6','B7','B8','B9'], 41 | 'Landsat7':['B1','B2','B3','B4','B5','B7'], 42 | 'Landsat5':['B1','B2','B3','B4','B5','B7'], 43 | 'Landsat4':['B1','B2','B3','B4','B5','B7'] 44 | } 45 | 46 | return switch[mission] 47 | 48 | def common_bandnames(mission): 49 | """ 50 | visible to short-wave infrared wavebands (common bandnames) 51 | """ 52 | 53 | switch = { 54 | 'Sentinel2':['aerosol','blue','green','red', 55 | 'redEdge1','redEdge2','redEdge3','nir','redEdge4', 56 | 'waterVapour','cirrus','swir1','swir2'], 57 | 'Landsat8':['aerosol','blue','green','red','nir','swir1','swir2','pan','cirrus'], 58 | 'Landsat7':['blue','green','red','nir','swir1','swir2'], 59 | 'Landsat5':['blue','green','red','nir','swir1','swir2'], 60 | 'Landsat4':['blue','green','red','nir','swir1','swir2'] 61 | } 62 | 63 | return switch[mission] 64 | 65 | def py6S_sensor(mission): 66 | """ 67 | Py6S satellite_sensor name from satellite mission name 68 | """ 69 | 70 | switch = { 71 | 'Sentinel2':'S2A_MSI', 72 | 'Landsat8':'LANDSAT_OLI', 73 | 'Landsat7':'LANDSAT_ETM', 74 | 'Landsat5':'LANDSAT_TM', 75 | 'Landsat4':'LANDSAT_TM' 76 | } 77 | 78 | return switch[mission] 79 | 80 | def eeCollection(mission): 81 | """ 82 | Earth Engine image collection name from satellite mission name 83 | """ 84 | 85 | switch = { 86 | 'Sentinel2':'COPERNICUS/S2', 87 | 'Landsat8':'LANDSAT/LC8_L1T_TOA_FMASK', 88 | 'Landsat7':'LANDSAT/LE7_L1T_TOA_FMASK', 89 | 'Landsat5':'LANDSAT/LT5_L1T_TOA_FMASK', 90 | 'Landsat4':'LANDSAT/LT4_L1T_TOA_FMASK' 91 | } 92 | 93 | return switch[mission] 94 | 95 | def sunAngleFilter(mission): 96 | """ 97 | Sun angle filter avoids where elevation < 15 degrees 98 | """ 99 | 100 | switch = { 101 | 'Sentinel2':ee.Filter.lt('MEAN_SOLAR_ZENITH_ANGLE',75), 102 | 'Landsat8':ee.Filter.gt('SUN_ELEVATION',15), 103 | 'Landsat7':ee.Filter.gt('SUN_ELEVATION',15), 104 | 'Landsat5':ee.Filter.gt('SUN_ELEVATION',15), 105 | 'Landsat4':ee.Filter.gt('SUN_ELEVATION',15) 106 | } 107 | 108 | return switch[mission] 109 | 110 | def ESUNs(img, mission): 111 | """ 112 | ESUN (Exoatmospheric spectral irradiance) 113 | 114 | References 115 | ---------- 116 | 117 | Landsat 4 [1] 118 | Landsat 5 [1] 119 | Landsat 7 [1] 120 | Landsat 8 [2] 121 | 122 | 123 | [1] Chander et al. (2009) Summary of current radiometric calibration 124 | coefficients for Landsat MSS, TM, ETM+, and EO-1 ALI sensors. 125 | Remote Sensing of Environment. 113, 898-903 126 | 127 | [2] Benjamin Leutner (https://github.com/bleutner/RStoolbox) 128 | """ 129 | 130 | sentinel2 = ee.Image([ 131 | ee.Number(img.get('SOLAR_IRRADIANCE_B1')), 132 | ee.Number(img.get('SOLAR_IRRADIANCE_B2')), 133 | ee.Number(img.get('SOLAR_IRRADIANCE_B3')), 134 | ee.Number(img.get('SOLAR_IRRADIANCE_B4')), 135 | ee.Number(img.get('SOLAR_IRRADIANCE_B5')), 136 | ee.Number(img.get('SOLAR_IRRADIANCE_B6')), 137 | ee.Number(img.get('SOLAR_IRRADIANCE_B7')), 138 | ee.Number(img.get('SOLAR_IRRADIANCE_B8')), 139 | ee.Number(img.get('SOLAR_IRRADIANCE_B8A')), 140 | ee.Number(img.get('SOLAR_IRRADIANCE_B9')), 141 | ee.Number(img.get('SOLAR_IRRADIANCE_B10')), 142 | ee.Number(img.get('SOLAR_IRRADIANCE_B11')), 143 | ee.Number(img.get('SOLAR_IRRADIANCE_B12')) 144 | ]) 145 | 146 | Landsat8 = ee.Image([1895.33, 2004.57, 1820.75, 1549.49, 951.76, 247.55, 85.46, 1723.8, 366.97]) 147 | Landsat7 = ee.Image([1997, 1812, 1533, 1039, 230.8, 84.9]) # PAN = 1362 (removed to match Py6S) 148 | Landsat5 = ee.Image([1983, 1796, 1536, 1031, 220, 83.44]) 149 | Landsat4 = ee.Image([1983, 1795, 1539, 1028, 219.8, 83.49]) 150 | 151 | switch = { 152 | 'Sentinel2':sentinel2, 153 | 'Landsat8':Landsat8, 154 | 'Landsat7':Landsat7, 155 | 'Landsat5':Landsat5, 156 | 'Landsat4':Landsat4 157 | } 158 | 159 | return switch[mission] 160 | 161 | def solar_z(image, mission): 162 | """ 163 | solar zenith angle (degrees) 164 | """ 165 | 166 | def sentinel2(image): 167 | return ee.Number(image.get('MEAN_SOLAR_ZENITH_ANGLE')) 168 | 169 | def landsat(image): 170 | return ee.Number(90).subtract(image.get('SUN_ELEVATION')) 171 | 172 | switch = { 173 | 174 | 'Sentinel2':sentinel2, 175 | 'Landsat8':landsat, 176 | 'Landsat7':landsat, 177 | 'Landsat5':landsat, 178 | 'Landsat4':landsat 179 | } 180 | 181 | getSolarZenith = switch[mission] 182 | 183 | return getSolarZenith(image) 184 | 185 | def TOA(image, mission): 186 | 187 | switch = { 188 | 189 | 'Sentinel2':image.divide(10000), 190 | 'Landsat8':image, 191 | 'Landsat7':image, 192 | 'Landsat5':image, 193 | 'Landsat4':image 194 | } 195 | 196 | return switch[mission] -------------------------------------------------------------------------------- /atmcorr/plots.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from matplotlib import pylab as plt 3 | import matplotlib.dates as mdates 4 | 5 | def figure_plotting_space(): 6 | """ 7 | defines the plotting space 8 | """ 9 | 10 | fig = plt.figure(figsize=(10,10)) 11 | bar_height = 0.04 12 | mini_gap = 0.03 13 | gap = 0.05 14 | graph_height = 0.24 15 | 16 | axH = fig.add_axes([0.1,gap+3*graph_height+2.5*mini_gap,0.87,bar_height]) 17 | axS = fig.add_axes([0.1,gap+2*graph_height+2*mini_gap,0.87,graph_height]) 18 | axV = fig.add_axes([0.1,gap+graph_height+mini_gap,0.87,graph_height]) 19 | 20 | return fig, axH, axS, axV 21 | 22 | def plot_colorbar(ax,image,ylabel=False): 23 | """ 24 | display a colorbar (e.g. hue-stretch) 25 | """ 26 | 27 | # plot image inside of figure 'axes' 28 | ax.imshow(image, interpolation='nearest', aspect='auto') 29 | ax.set_xticks([]) 30 | ax.set_yticks([]) 31 | ax.set_ylabel(ylabel) 32 | 33 | def plot_timeseries(DF, ax, name, startDate, stopDate, ylim=False): 34 | """ 35 | plots timeseries graphs 36 | """ 37 | 38 | # original time series 39 | ax.plot(DF[name],color='#1f77b4') 40 | ax.set_ylabel(name) 41 | ax.set_ylim(ylim) 42 | ax.set_xlim(pd.datetime.strptime(startDate,'%Y-%m-%d'),\ 43 | pd.datetime.strptime(stopDate,'%Y-%m-%d')) 44 | 45 | # boxcar average 46 | ax.plot(DF[name].rolling(180).mean(),color='red') 47 | 48 | # make the dates exact 49 | ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d') 50 | 51 | def plotTimeSeries(DF, hue_stretch, startDate, stopDate): 52 | 53 | fig, axH, axS, axV = figure_plotting_space() 54 | plot_colorbar(axH,[hue_stretch], ylabel='hue') 55 | plot_timeseries(DF, axS,'sat', startDate, stopDate, ylim=[0,1]) 56 | plot_timeseries(DF, axV,'val', startDate, stopDate) -------------------------------------------------------------------------------- /atmcorr/postProcessing.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import colorsys 3 | 4 | def hsv(DF): 5 | """ 6 | Hue-staturation-value 7 | """ 8 | rgb = list(zip(DF['red'], DF['green'], DF['blue'])) 9 | DF['hue'] = [colorsys.rgb_to_hsv(x[0], x[1], x[2])[0] for x in rgb] 10 | DF['sat'] = [colorsys.rgb_to_hsv(x[0], x[1], x[2])[1] for x in rgb] 11 | DF['val'] = [colorsys.rgb_to_hsv(x[0], x[1], x[2])[2] for x in rgb] 12 | return DF 13 | 14 | 15 | def postProcessing(allTimeSeries, startDate, stopDate): 16 | 17 | # create a dataframe 18 | df = pd.DataFrame.from_dict(allTimeSeries) 19 | 20 | # timestamp as index 21 | df.index = [pd.datetime.utcfromtimestamp(t) for t in allTimeSeries['timeStamp']] 22 | df = df.drop('timeStamp', axis=1) 23 | 24 | # resample to daily 25 | daily = df.resample('D').mean() 26 | 27 | # fill in NaNs 28 | interpolated = daily.interpolate().ffill().bfill() 29 | 30 | # clip time series 31 | DF = interpolated.truncate(before=startDate, after=stopDate) 32 | 33 | # lets add hue-saturation-value color space 34 | DF = hsv(DF) 35 | 36 | return DF -------------------------------------------------------------------------------- /atmcorr/timeSeries.py: -------------------------------------------------------------------------------- 1 | import os 2 | import ee 3 | import pandas as pd 4 | 5 | import atmcorr.interpolated_lookup_tables as iLUT 6 | from atmcorr.ee_requests import request_meanRadiance 7 | from atmcorr.atmcorr_timeseries import surface_reflectance_timeseries 8 | from atmcorr.mission_specifics import ee_bandnames, common_bandnames 9 | 10 | def timeseries_extrator(geom, startDate, stopDate, mission, removeClouds=True): 11 | """ 12 | This is the function for extracting atmospherically corrected, 13 | cloud-free time series for a given satellite mission. 14 | """ 15 | 16 | # interpolated lookup tables 17 | iLUTs = iLUT.handler(mission) 18 | iLUTs.get() 19 | 20 | # earth engine request 21 | print('Getting data from Earth Engine.. ') 22 | request = request_meanRadiance(geom, ee.Date(startDate), ee.Date(stopDate), \ 23 | mission, removeClouds) 24 | meanRadiance = request.getInfo() 25 | print('Data collection complete') 26 | 27 | # return if no pixels available 28 | num = len(meanRadiance['features']) 29 | if num == 0: 30 | return {} 31 | else: 32 | print('number of valid images = {}'.format(num)) 33 | 34 | # atmospheric correction 35 | print('Running atmospheric correction') 36 | timeseries = surface_reflectance_timeseries(meanRadiance, iLUTs, mission) 37 | print('Done') 38 | 39 | return timeseries 40 | 41 | def extractAllTimeSeries(target, geom, startDate, stopDate, missions, removeClouds=True): 42 | """ 43 | Extracts time series for each mission and join them together 44 | """ 45 | 46 | # will store results here (and use consistent band names) 47 | allTimeSeries = { 48 | 'blue':[], 49 | 'green':[], 50 | 'red':[], 51 | 'nir':[], 52 | 'swir1':[], 53 | 'swir2':[], 54 | 'timeStamp':[] 55 | } 56 | 57 | # for mission in ['Landsat4']: 58 | for mission in missions: 59 | 60 | timeseries = timeseries_extrator(geom, startDate, stopDate, mission, removeClouds=removeClouds) 61 | 62 | # names of wavebands 63 | eeNames = ee_bandnames(mission) 64 | commonNames = common_bandnames(mission) 65 | 66 | # add mission to timeseries 67 | for key in timeseries.keys(): 68 | if key[0] == 'B': 69 | commonName = commonNames[eeNames.index(key)] 70 | if commonName in allTimeSeries.keys(): 71 | allTimeSeries[commonName].append(timeseries[key]) 72 | if key == 'timeStamp': 73 | allTimeSeries['timeStamp'].append(timeseries['timeStamp']) 74 | 75 | # flatten each variables (from separate missions) into a single list 76 | def flatten(multilist): 77 | if isinstance(multilist[0], list): 78 | return [item for sublist in multilist for item in sublist] 79 | else: 80 | return multilist 81 | 82 | for key in allTimeSeries.keys(): 83 | allTimeSeries[key] = flatten(allTimeSeries[key]) 84 | 85 | return allTimeSeries 86 | 87 | def saveToExcel(target, allTimeSeries): 88 | basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 89 | excel_dir = os.path.join(basedir,'files','excel') 90 | if not os.path.exists(excel_dir): 91 | os.makedirs(excel_dir) 92 | 93 | # create pandas data frame 94 | df = pd.DataFrame.from_dict(allTimeSeries) 95 | 96 | # save to excel 97 | df.to_excel(os.path.join(excel_dir, target+'.xlsx'), index=False) 98 | 99 | def loadFromExcel(target): 100 | basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 101 | excel_path = os.path.join(basedir,'files','excel',target+'.xlsx') 102 | 103 | if os.path.isfile(excel_path): 104 | print('Loading from excel file') 105 | return pd.read_excel(excel_path).to_dict(orient='list') 106 | 107 | def timeSeries(target, geom, startDate, stopDate, missions, removeClouds=True): 108 | """ 109 | time series flow 110 | 1) try loading from excel 111 | 2) run the extraction 112 | 3) save to excel 113 | """ 114 | 115 | # try loading from excel first 116 | try: 117 | allTimeSeries = loadFromExcel(target) 118 | if allTimeSeries: 119 | return allTimeSeries 120 | except: 121 | pass 122 | 123 | # run extraction 124 | allTimeSeries = extractAllTimeSeries(target, geom, startDate, stopDate, missions) 125 | 126 | # save to excel 127 | saveToExcel(target, allTimeSeries) 128 | 129 | return allTimeSeries -------------------------------------------------------------------------------- /ee-atmcorr-coefficients-timeseries.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Derive atmospheric correction coefficients for multiple images and optionally export corrected images. 3 | # Created on Mon Aug 6 4 | # @authors: Aman Verma, Preeti Rao 5 | 6 | # standard modules 7 | import ee 8 | from pprint import pprint 9 | import datetime 10 | import math 11 | import pickle 12 | ee.Initialize() 13 | # package modules 14 | from atmcorr.atmospheric import Atmospheric 15 | from atmcorr.timeSeries import timeSeries 16 | 17 | 18 | # AOI and type 19 | target = 'forest' 20 | geom = ee.Geometry.Rectangle(85.5268682942167402, 25.6240533612814261, 21 | 85.7263954375090407, 25.8241594034421382) 22 | # satellite missions, 23 | MISSIONS = ['Sentinel2'] 24 | # Change this to location of iLUTs 25 | DIRPATH = './files/iLUTs/S2A_MSI/Continental/view_zenith_0/' 26 | # start and end of time series 27 | START_DATE = '2016-11-19' # YYYY-MM-DD 28 | STOP_DATE = '2017-02-17' # YYYY-MM-DD 29 | NO_OF_BANDS = 13 30 | # the following creates interpolated lookup tables. 31 | _ = timeSeries(target, geom, START_DATE, STOP_DATE, MISSIONS) 32 | 33 | SRTM = ee.Image('CGIAR/SRTM90_V4') # Shuttle Radar Topography mission covers *most* of the Earth 34 | altitude = SRTM.reduceRegion(reducer=ee.Reducer.mean(), geometry=geom.centroid()).get('elevation').getInfo() 35 | KM = altitude/1000 # i.e. Py6S uses units of kilometers 36 | 37 | # The Sentinel-2 image collection 38 | S2 = ee.ImageCollection('COPERNICUS/S2').filterBounds(geom)\ 39 | .filterDate(START_DATE, STOP_DATE).sort('system:time_start') 40 | S2List = S2.toList(S2.size()) # must loop through lists 41 | 42 | NO_OF_IMAGES = S2.size().getInfo() # no. of images in the collection 43 | 44 | 45 | def atm_corr_image(imageInfo: dict) -> dict: 46 | """Retrieves atmospheric params from image. 47 | 48 | imageInfo is a dictionary created from an ee.Image object 49 | """ 50 | atmParams = {} 51 | # Python uses seconds, EE uses milliseconds: 52 | scene_date = datetime.datetime.utcfromtimestamp(imageInfo['system:time_start']/1000) 53 | dt1 = ee.Date(str(scene_date).rsplit(sep=' ')[0]) 54 | 55 | atmParams['doy'] = scene_date.timetuple().tm_yday 56 | atmParams['solar_z'] = imageInfo['MEAN_SOLAR_ZENITH_ANGLE'] 57 | atmParams['h2o'] = Atmospheric.water(geom, dt1).getInfo() 58 | atmParams['o3'] = Atmospheric.ozone(geom, dt1).getInfo() 59 | atmParams['aot'] = Atmospheric.aerosol(geom, dt1).getInfo() 60 | 61 | return atmParams 62 | 63 | 64 | def get_corr_coef(imageInfo: dict, atmParams: dict) -> list: 65 | """Gets correction coefficients for each band in the image. 66 | 67 | Uses DIRPATH global variable 68 | Uses NO_OF_BANDS global variable 69 | Uses KM global variable 70 | Returns list of 2-length lists 71 | """ 72 | corr_coefs = [] 73 | # string list with padding of 2 74 | bandNos = [str(i).zfill(2) for i in range(1, NO_OF_BANDS + 1)] 75 | for band in bandNos: 76 | filepath = DIRPATH + 'S2A_MSI_' + band + '.ilut' 77 | with open(filepath, 'rb') as ilut_file: 78 | iluTable = pickle.load(ilut_file) 79 | a, b = iluTable(atmParams['solar_z'], atmParams['h2o'], atmParams['o3'], atmParams['aot'], KM) 80 | elliptical_orbit_correction = 0.03275104*math.cos(atmParams['doy']/59.66638337) + 0.96804905 81 | a *= elliptical_orbit_correction 82 | b *= elliptical_orbit_correction 83 | corr_coefs.append([a, b]) 84 | return corr_coefs 85 | 86 | 87 | def toa_to_rad_multiplier(bandname: str, imageInfo: dict, atmParams: dict) -> float: 88 | """Returns a multiplier for converting TOA reflectance to radiance 89 | 90 | bandname is a string like 'B1' 91 | """ 92 | ESUN = imageInfo['SOLAR_IRRADIANCE_'+bandname] 93 | # solar exoatmospheric spectral irradiance 94 | solar_angle_correction = math.cos(math.radians(atmParams['solar_z'])) 95 | # Earth-Sun distance (from day of year) 96 | d = 1 - 0.01672 * math.cos(0.9856 * (atmParams['doy']-4)) 97 | # http://physics.stackexchange.com/questions/177949/earth-sun-distance-on-a-given-day-of-the-year 98 | # conversion factor 99 | multiplier = ESUN*solar_angle_correction/(math.pi*d**2) 100 | # at-sensor radiance 101 | 102 | return multiplier 103 | 104 | 105 | def atm_corr_band(image, imageInfo: dict, atmParams: dict): 106 | """Atmospherically correct image 107 | 108 | Converts toa reflectance to radiance. 109 | Applies correction coefficients to get surface reflectance 110 | Returns ee.Image object 111 | """ 112 | oldImage = ee.Image(image).divide(10000) 113 | newImage = ee.Image() 114 | cor_coeff_list = get_corr_coef(imageInfo, atmParams) 115 | bandnames = oldImage.bandNames().getInfo() 116 | for ii in range(NO_OF_BANDS): 117 | img2RadMultiplier = toa_to_rad_multiplier(bandnames[ii], imageInfo, atmParams) 118 | imgRad = oldImage.select(bandnames[ii]).multiply(img2RadMultiplier) 119 | constImageA = ee.Image.constant(cor_coeff_list[ii][0]) 120 | constImageB = ee.Image.constant(cor_coeff_list[ii][1]) 121 | surRef = imgRad.subtract(constImageA).divide(constImageB) 122 | newImage = newImage.addBands(surRef) 123 | 124 | # unpack a list of the band indexes: 125 | return newImage.select(*list(range(NO_OF_BANDS))) 126 | 127 | 128 | S3 = S2List 129 | SrList = ee.List([0]) # Can't init empty list so need a garbage element 130 | export_list = [] 131 | coeff_list = [] 132 | for i in range(NO_OF_IMAGES): 133 | iInfo = S3.get(i).getInfo() 134 | iInfoProps = iInfo['properties'] 135 | atmVars = atm_corr_image(iInfoProps) 136 | corrCoeffs = get_corr_coef(iInfoProps, atmVars) 137 | coeff_list.append(corrCoeffs) 138 | # Uncomment the rest as you please to get an ee.List with the images or even export them to EE. 139 | # img = atm_corr_band(ee.Image(S2List.get(i)), iInfoProps, atmVars) 140 | # export = ee.batch.Export.image.toDrive( 141 | # image=img, 142 | # fileNamePrefix='sen2_' + str(i), 143 | # description='py', 144 | # scale = 10, 145 | # folder = "gee_img", 146 | # maxPixels = 1e13 147 | # ) 148 | # export_list.append(export) 149 | # SrList = SrList.add(img) 150 | 151 | # SrList = SrList.slice(1) # Need to remove the first element from the list which is garbage 152 | # for task in export_list: 153 | # task.start() 154 | with open('coeff_list.txt', 'w') as f: 155 | pprint(coeff_list, stream=f) 156 | -------------------------------------------------------------------------------- /ee-atmcorr-timeseries.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Atmospherically Corrected Earth Engine Time Series " 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "### Overview\n", 15 | "\n", 16 | "This notebook creates atmospherically corrected time series of satellite imagery using [Google Earth Engine](https://earthengine.google.com/) and the [6S emulator](https://github.com/samsammurphy/6S_emulator). \n", 17 | "\n", 18 | "### Supported missions\n", 19 | "\n", 20 | "* Sentintel2\n", 21 | "* Landsat8\n", 22 | "* Landsat7\n", 23 | "* Landsat5\n", 24 | "* Landsat4\n", 25 | "\n", 26 | "### Output\n", 27 | "\n", 28 | "Average, cloud-*free* pixel values \n", 29 | "\n", 30 | "### Cloud masking\n", 31 | "\n", 32 | "Uses standard cloud masks, i.e. FMASK for Landsat and ESA-QA60 for Sentinel 2. There is no guarantee they will find all clouds, a discussion on more advance and/or alternative cloud masking strategies is available [here](https://groups.google.com/forum/#!searchin/google-earth-engine-developers/cloud$20AND$20sentinel2%7Csort:relevance/google-earth-engine-developers/i63DS-Dg8Sg/FWenONUFBwAJ)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "#### Initialize" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": { 46 | "collapsed": true 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "# standard modules\n", 51 | "import os\n", 52 | "import sys\n", 53 | "import ee\n", 54 | "import colorsys\n", 55 | "from IPython.display import display, Image\n", 56 | "%matplotlib inline\n", 57 | "ee.Initialize()\n", 58 | "\n", 59 | "# custom modules\n", 60 | "# base_dir = os.path.dirname(os.getcwd())\n", 61 | "# sys.path.append(os.path.join(base_dir,'atmcorr'))\n", 62 | "from atmcorr.timeSeries import timeSeries\n", 63 | "from atmcorr.postProcessing import postProcessing\n", 64 | "from atmcorr.plots import plotTimeSeries" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": {}, 70 | "source": [ 71 | "### User Input" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": { 78 | "collapsed": true 79 | }, 80 | "outputs": [], 81 | "source": [ 82 | "target = 'forest'\n", 83 | "geom = ee.Geometry.Rectangle(-82.10941, 37.33251, -82.08195, 37.34698)\n", 84 | "\n", 85 | "\n", 86 | "# start and end of time series\n", 87 | "startDate = '1990-01-01'# YYYY-MM-DD\n", 88 | "stopDate = '2017-01-01'# YYYY-MM-DD\n", 89 | "\n", 90 | "# satellite missions\n", 91 | "missions = ['Sentinel2', 'Landsat8', 'Landsat7', 'Landsat5', 'Landsat4']" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "### All time series\n", 99 | "This function extracts cloud-free time series for each mission, atmospherically corrects them and joins them together" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "allTimeSeries = timeSeries(target, geom, startDate, stopDate, missions)" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "metadata": {}, 114 | "source": [ 115 | "### Data post-processing\n", 116 | "Resample into daily intervals using liner interpolation and calculate hue-saturation-value from RGB." 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": { 123 | "collapsed": true 124 | }, 125 | "outputs": [], 126 | "source": [ 127 | "DF = postProcessing(allTimeSeries, startDate, stopDate)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": {}, 133 | "source": [ 134 | "### Hue Stretch\n", 135 | "We visualize *hue* by taking a [HSV](http://infohost.nmt.edu/tcc/help/pubs/colortheory/web/hsv.html) color triplet and 'strecthing' the *saturation* and *value* (i.e. setting them to 1) then converting the new 'stretched' HSV color-triplet back into RGB for display on the screen." 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "metadata": { 142 | "collapsed": true 143 | }, 144 | "outputs": [], 145 | "source": [ 146 | "hue_stretch = [colorsys.hsv_to_rgb(hue,1,1) for hue in DF['hue']]" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "metadata": {}, 152 | "source": [ 153 | "### bringing it all together...\n", 154 | "make a pretty graph to help us do some science." 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "metadata": {}, 161 | "outputs": [], 162 | "source": [ 163 | "plotTimeSeries(DF, hue_stretch, startDate, stopDate)" 164 | ] 165 | }, 166 | { 167 | "cell_type": "markdown", 168 | "metadata": {}, 169 | "source": [ 170 | "This graph shows the Hue, Saturation and Value ([HSV](https://en.wikipedia.org/wiki/HSL_and_HSV)) of the target area through time." 171 | ] 172 | } 173 | ], 174 | "metadata": { 175 | "anaconda-cloud": {}, 176 | "kernelspec": { 177 | "display_name": "Python 3", 178 | "language": "python", 179 | "name": "python3" 180 | }, 181 | "language_info": { 182 | "codemirror_mode": { 183 | "name": "ipython", 184 | "version": 3 185 | }, 186 | "file_extension": ".py", 187 | "mimetype": "text/x-python", 188 | "name": "python", 189 | "nbconvert_exporter": "python", 190 | "pygments_lexer": "ipython3", 191 | "version": "3.6.2" 192 | } 193 | }, 194 | "nbformat": 4, 195 | "nbformat_minor": 2 196 | } 197 | --------------------------------------------------------------------------------