├── sar_asf_to_gee ├── __init__.py ├── _modidx.py ├── core.py ├── asf_hyp3.py └── asf_static.py ├── images └── screenshot_coherence.png ├── MANIFEST.in ├── .gitignore ├── .github └── workflows │ ├── test.yaml │ └── deploy.yaml ├── styles.css ├── pixi.toml ├── _quarto.yml ├── settings.ini ├── setup.py ├── 00_core.ipynb ├── LICENSE ├── README.md ├── 02_asf_static.ipynb ├── index.ipynb └── 01_asf_hyp3.ipynb /sar_asf_to_gee/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.0.1" 2 | -------------------------------------------------------------------------------- /images/screenshot_coherence.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gee-community/sar-asf-to-gee/main/images/screenshot_coherence.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include settings.ini 2 | include LICENSE 3 | include CONTRIBUTING.md 4 | include README.md 5 | recursive-exclude * __pycache__ 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # pixi environments 2 | .pixi 3 | 4 | /.quarto/ 5 | sar-asf-to-gee.code-workspace 6 | 7 | *.pyc 8 | 9 | .ipynb_checkpoints/ 10 | downloads/ 11 | temp/ 12 | temp_downloads/ -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [workflow_dispatch, pull_request, push] 3 | 4 | jobs: 5 | test: 6 | runs-on: ubuntu-latest 7 | steps: [uses: fastai/workflows/nbdev-ci@master] 8 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy to GitHub Pages 2 | 3 | permissions: 4 | contents: write 5 | pages: write 6 | 7 | on: 8 | push: 9 | branches: [ "main", "master" ] 10 | workflow_dispatch: 11 | jobs: 12 | deploy: 13 | runs-on: ubuntu-latest 14 | steps: [uses: fastai/workflows/quarto-ghp@master] 15 | -------------------------------------------------------------------------------- /sar_asf_to_gee/_modidx.py: -------------------------------------------------------------------------------- 1 | # Autogenerated by nbdev 2 | 3 | d = { 'settings': { 'branch': 'master', 4 | 'doc_baseurl': '/sar-asf-to-gee', 5 | 'doc_host': 'https://gee-community.github.io', 6 | 'git_url': 'https://github.com/gee-community/sar-asf-to-gee', 7 | 'lib_path': 'sar_asf_to_gee'}, 8 | 'syms': {'sar_asf_to_gee.core': {'sar_asf_to_gee.core.foo': ('core.html#foo', 'sar_asf_to_gee/core.py')}}} -------------------------------------------------------------------------------- /styles.css: -------------------------------------------------------------------------------- 1 | .cell { 2 | margin-bottom: 1rem; 3 | } 4 | 5 | .cell > .sourceCode { 6 | margin-bottom: 0; 7 | } 8 | 9 | .cell-output > pre { 10 | margin-bottom: 0; 11 | } 12 | 13 | .cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre { 14 | margin-left: 0.8rem; 15 | margin-top: 0; 16 | background: none; 17 | border-left: 2px solid lightsalmon; 18 | border-top-left-radius: 0; 19 | border-top-right-radius: 0; 20 | } 21 | 22 | .cell-output > .sourceCode { 23 | border: none; 24 | } 25 | 26 | .cell-output > .sourceCode { 27 | background: none; 28 | margin-top: 0; 29 | } 30 | 31 | div.description { 32 | padding-left: 2px; 33 | padding-top: 5px; 34 | font-style: italic; 35 | font-size: 135%; 36 | opacity: 70%; 37 | } 38 | -------------------------------------------------------------------------------- /pixi.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "sar-asf-to-gee" 3 | version = "0.1.0" 4 | description = "Add a short description here" 5 | authors = ["Tyler Erickson "] 6 | channels = ["conda-forge"] 7 | platforms = ["osx-arm64"] 8 | 9 | [tasks] 10 | jlab = "jupyter lab --ContentsManager.allow_hidden=True" 11 | 12 | [dependencies] 13 | python = "3.11.*" 14 | nbdev = "2.2.10.*" 15 | jupyter = "1.0.0.*" 16 | hyp3_sdk = ">=6.0.0,<6.1" 17 | asf_search = ">=6.7.3,<6.8" 18 | gcsfs = "2023.12.2.post1.*" 19 | pandas = ">=2.2.0,<2.3" 20 | pyarrow = ">=14.0.2,<14.1" 21 | earthengine-api = ">=0.1.386,<0.2" 22 | boto3 = ">=1.34.25,<1.35" 23 | requests = ">=2.31.0,<2.32" 24 | xarray = ">=2024.1.1,<2024.2" 25 | h5netcdf = ">=1.3.0,<1.4" 26 | netcdf4 = ">=1.6.5,<1.7" 27 | h5py = ">=3.10.0,<3.11" 28 | rioxarray = ">=0.15.1,<0.16" 29 | gdal = ">=3.8.3,<3.9" 30 | rio-cogeo = ">=5.2.0,<5.3" 31 | -------------------------------------------------------------------------------- /_quarto.yml: -------------------------------------------------------------------------------- 1 | ipynb-filters: [nbdev_filter] 2 | 3 | project: 4 | type: website 5 | output-dir: _docs 6 | preview: 7 | port: 3000 8 | browser: false 9 | 10 | format: 11 | html: 12 | theme: cosmo 13 | css: styles.css 14 | toc: true 15 | toc-depth: 4 16 | 17 | website: 18 | title: "sar-asf-to-gee" 19 | site-url: "https://gee-community.github.io/sar-asf-to-gee" 20 | description: "Facilitates transferring on-demand SAR products processed by ASF's HyP3 to Google Earth Engine." 21 | twitter-card: true 22 | open-graph: true 23 | repo-branch: master 24 | repo-url: "https://github.com/gee-community/sar-asf-to-gee" 25 | repo-actions: [issue] 26 | navbar: 27 | background: primary 28 | search: true 29 | right: 30 | - icon: github 31 | href: "https://github.com/gee-community/sar-asf-to-gee" 32 | sidebar: 33 | style: "floating" 34 | 35 | metadata-files: 36 | - sidebar.yml 37 | - custom.yml 38 | -------------------------------------------------------------------------------- /settings.ini: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | # All sections below are required unless otherwise specified. 3 | # See https://github.com/fastai/nbdev/blob/master/settings.ini for examples. 4 | 5 | ### Python library ### 6 | repo = sar-asf-to-gee 7 | lib_name = %(repo)s 8 | version = 0.0.1 9 | min_python = 3.11 10 | license = apache2 11 | 12 | ### nbdev ### 13 | doc_path = _docs 14 | lib_path = sar_asf_to_gee 15 | nbs_path = . 16 | recursive = False 17 | tst_flags = notest 18 | 19 | ### Docs ### 20 | branch = master 21 | custom_sidebar = False 22 | doc_host = https://%(user)s.github.io 23 | doc_baseurl = /%(repo)s 24 | git_url = https://github.com/%(user)s/%(repo)s 25 | title = %(lib_name)s 26 | 27 | ### PyPI ### 28 | audience = Developers 29 | author = Tyler Erickson 30 | author_email = tyler@vorgeo.com 31 | copyright = 2023 ownwards, %(author)s 32 | description = Facilitates transferring on-demand SAR products processed by ASF's HyP3 to Google Earth Engine. 33 | keywords = nbdev jupyter notebook python 34 | language = English 35 | status = 3 36 | user = gee-community 37 | 38 | ### Optional ### 39 | # requirements = fastcore pandas 40 | # dev_requirements = 41 | # console_scripts = -------------------------------------------------------------------------------- /sar_asf_to_gee/core.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../00_core.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = ['FORMAT_GEE_DATETIME_STRING', 'create_gee_image_collection', 'filter_jobs'] 5 | 6 | # %% ../00_core.ipynb 3 7 | import datetime 8 | from dateutil import parser 9 | import logging 10 | 11 | import ee 12 | 13 | # %% ../00_core.ipynb 4 14 | FORMAT_GEE_DATETIME_STRING = '%Y-%m-%dT%H:%M:%SZ' 15 | 16 | # Create a GEE image collection, if needed. 17 | def create_gee_image_collection(gee_gcp_project, gee_image_collection): 18 | try: 19 | ee.data.createAsset( 20 | value={'type': 'ImageCollection'}, 21 | path=f'projects/{gee_gcp_project}/assets/{gee_image_collection}' 22 | ) 23 | logging.debug('Succeeded in creating asset.') 24 | except ee.EEException as e: 25 | if str(e).startswith('Cannot overwrite asset'): 26 | logging.info('Unable to create GEE asset. It may already exist.') 27 | else: 28 | raise(e) 29 | 30 | # %% ../00_core.ipynb 5 31 | def filter_jobs( 32 | jobs, 33 | expired=None, 34 | status_code=None, 35 | ): 36 | "Filter ASF batch jobs by specified criteria." 37 | # jobs = hyp3_batch.jobs 38 | # Filter by expiration status. 39 | if expired is False: 40 | jobs = [ 41 | job for job in jobs 42 | if parser.parse(job.to_dict()['expiration_time']) > datetime.datetime.now(datetime.timezone.utc) 43 | ] 44 | elif expired is True: 45 | jobs = [ 46 | job for job in jobs 47 | if parser.parse(job.to_dict()['expiration_time']) <= datetime.datetime.now(datetime.timezone.utc) 48 | ] 49 | # Filter by status code. 50 | if isinstance(status_code, str): 51 | print('Status code is a string') 52 | status_code = [status_code] 53 | if isinstance(status_code, list): 54 | print('Status code is a list') 55 | jobs = [ 56 | job for job in jobs 57 | if job.to_dict()['status_code'] in status_code 58 | ] 59 | return jobs 60 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from pkg_resources import parse_version 2 | from configparser import ConfigParser 3 | import setuptools, shlex 4 | assert parse_version(setuptools.__version__)>=parse_version('36.2') 5 | 6 | # note: all settings are in settings.ini; edit there, not here 7 | config = ConfigParser(delimiters=['=']) 8 | config.read('settings.ini', encoding='utf-8') 9 | cfg = config['DEFAULT'] 10 | 11 | cfg_keys = 'version description keywords author author_email'.split() 12 | expected = cfg_keys + "lib_name user branch license status min_python audience language".split() 13 | for o in expected: assert o in cfg, "missing expected setting: {}".format(o) 14 | setup_cfg = {o:cfg[o] for o in cfg_keys} 15 | 16 | licenses = { 17 | 'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'), 18 | 'mit': ('MIT License', 'OSI Approved :: MIT License'), 19 | 'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'), 20 | 'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'), 21 | 'bsd3': ('BSD License', 'OSI Approved :: BSD License'), 22 | } 23 | statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', 24 | '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] 25 | py_versions = '3.6 3.7 3.8 3.9 3.10'.split() 26 | 27 | requirements = shlex.split(cfg.get('requirements', '')) 28 | if cfg.get('pip_requirements'): requirements += shlex.split(cfg.get('pip_requirements', '')) 29 | min_python = cfg['min_python'] 30 | lic = licenses.get(cfg['license'].lower(), (cfg['license'], None)) 31 | dev_requirements = (cfg.get('dev_requirements') or '').split() 32 | 33 | setuptools.setup( 34 | name = cfg['lib_name'], 35 | license = lic[0], 36 | classifiers = [ 37 | 'Development Status :: ' + statuses[int(cfg['status'])], 38 | 'Intended Audience :: ' + cfg['audience'].title(), 39 | 'Natural Language :: ' + cfg['language'].title(), 40 | ] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]] + (['License :: ' + lic[1] ] if lic[1] else []), 41 | url = cfg['git_url'], 42 | packages = setuptools.find_packages(), 43 | include_package_data = True, 44 | install_requires = requirements, 45 | extras_require={ 'dev': dev_requirements }, 46 | dependency_links = cfg.get('dep_links','').split(), 47 | python_requires = '>=' + cfg['min_python'], 48 | long_description = open('README.md', encoding='utf-8').read(), 49 | long_description_content_type = 'text/markdown', 50 | zip_safe = False, 51 | entry_points = { 52 | 'console_scripts': cfg.get('console_scripts','').split(), 53 | 'nbdev': [f'{cfg.get("lib_path")}={cfg.get("lib_path")}._modidx:d'] 54 | }, 55 | **setup_cfg) 56 | 57 | 58 | -------------------------------------------------------------------------------- /00_core.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# core\n", 8 | "\n", 9 | "> Fill in a module description here" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "#| default_exp core" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": null, 24 | "metadata": {}, 25 | "outputs": [], 26 | "source": [ 27 | "#| hide\n", 28 | "from nbdev.showdoc import *" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "#| export\n", 38 | "import datetime\n", 39 | "from dateutil import parser\n", 40 | "import logging\n", 41 | "\n", 42 | "import ee" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "#| export\n", 52 | "FORMAT_GEE_DATETIME_STRING = '%Y-%m-%dT%H:%M:%SZ'\n", 53 | "\n", 54 | "# Create a GEE image collection, if needed.\n", 55 | "def create_gee_image_collection(gee_gcp_project, gee_image_collection):\n", 56 | " try:\n", 57 | " ee.data.createAsset(\n", 58 | " value={'type': 'ImageCollection'},\n", 59 | " path=f'projects/{gee_gcp_project}/assets/{gee_image_collection}'\n", 60 | " )\n", 61 | " logging.debug('Succeeded in creating asset.')\n", 62 | " except ee.EEException as e:\n", 63 | " if str(e).startswith('Cannot overwrite asset'):\n", 64 | " logging.info('Unable to create GEE asset. It may already exist.')\n", 65 | " else:\n", 66 | " raise(e)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "#| export\n", 76 | "def filter_jobs(\n", 77 | " jobs,\n", 78 | " expired=None,\n", 79 | " status_code=None,\n", 80 | "):\n", 81 | " \"Filter ASF batch jobs by specified criteria.\"\n", 82 | " # jobs = hyp3_batch.jobs\n", 83 | " # Filter by expiration status.\n", 84 | " if expired is False:\n", 85 | " jobs = [\n", 86 | " job for job in jobs\n", 87 | " if parser.parse(job.to_dict()['expiration_time']) > datetime.datetime.now(datetime.timezone.utc)\n", 88 | " ]\n", 89 | " elif expired is True:\n", 90 | " jobs = [\n", 91 | " job for job in jobs\n", 92 | " if parser.parse(job.to_dict()['expiration_time']) <= datetime.datetime.now(datetime.timezone.utc)\n", 93 | " ]\n", 94 | " # Filter by status code.\n", 95 | " if isinstance(status_code, str):\n", 96 | " print('Status code is a string')\n", 97 | " status_code = [status_code]\n", 98 | " if isinstance(status_code, list):\n", 99 | " print('Status code is a list')\n", 100 | " jobs = [\n", 101 | " job for job in jobs\n", 102 | " if job.to_dict()['status_code'] in status_code\n", 103 | " ]\n", 104 | " return jobs" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": {}, 111 | "outputs": [], 112 | "source": [ 113 | "#| hide\n", 114 | "import nbdev; nbdev.nbdev_export()" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [] 123 | } 124 | ], 125 | "metadata": { 126 | "kernelspec": { 127 | "display_name": "Python 3 (ipykernel)", 128 | "language": "python", 129 | "name": "python3" 130 | } 131 | }, 132 | "nbformat": 4, 133 | "nbformat_minor": 4 134 | } 135 | -------------------------------------------------------------------------------- /sar_asf_to_gee/asf_hyp3.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../01_asf_hyp3.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = ['Transfer'] 5 | 6 | # %% ../01_asf_hyp3.ipynb 3 7 | import datetime 8 | import logging 9 | import tempfile 10 | import os 11 | import re 12 | import subprocess 13 | import zipfile 14 | from pprint import pprint 15 | 16 | import asf_search 17 | from IPython.display import JSON 18 | import ee 19 | from fastcore.basics import patch 20 | import gcsfs 21 | from hyp3_sdk import HyP3 22 | from rio_cogeo import cogeo 23 | 24 | from . import core 25 | 26 | # %% ../01_asf_hyp3.ipynb 21 27 | class Transfer(): 28 | def __init__( 29 | self, 30 | job_dict, # HyP3 job dictionary 31 | gcs_bucket, # GCS bucket 32 | gee_gcp_project, # GCP project used by Earth Engine 33 | gee_image_collection=None, # Name of the Earth Engine ImageCollection (optional) 34 | local_storage=None, 35 | ): 36 | self.job_dict = job_dict 37 | self.gcs_bucket = gcs_bucket 38 | self.gee_gcp_project = gee_gcp_project 39 | self.gee_image_collection = gee_image_collection 40 | if local_storage: 41 | self.tempdir = None 42 | self.local_storage = local_storage 43 | else: 44 | self.tempdir = tempfile.TemporaryDirectory() 45 | self.local_storage = self.tempdir.name 46 | logging.debug(f'created temporary directory: {self.tempdir.name}') 47 | 48 | # %% ../01_asf_hyp3.ipynb 25 49 | @patch 50 | def to_local( 51 | self:Transfer, 52 | ): 53 | "Transfer HyP3 results to local system, unzip, and update the job dictionary." 54 | logging.info(f'Starting hpy3_results_to_local()') 55 | for file in self.job_dict['files']: 56 | logging.info(f'Processing {file["filename"]}') 57 | asf_search.download_url( 58 | url=file['url'], 59 | path=self.local_storage, 60 | filename=file['filename'], 61 | ) 62 | # Unzip the file 63 | logging.info(f' Unzipping the file') 64 | with zipfile.ZipFile(os.path.join(self.local_storage, file['filename']), 'r') as zip_ref: 65 | zip_ref.extractall(self.local_storage) 66 | 67 | # List the TIF files. 68 | scene_name = file['filename'].removesuffix('.zip') 69 | tifs = [x for x in os.listdir( 70 | os.path.join('temp_downloads', scene_name)) 71 | if x.endswith('.tif')] 72 | 73 | for count, tif in enumerate(tifs): 74 | logging.info(f' Converting to a Cloud Optimized GeoTIFF. {count + 1}/{len(tifs)}') 75 | subprocess.run([ 76 | "rio", 77 | "cogeo", 78 | "create", 79 | os.path.join(self.local_storage, scene_name, tif), 80 | os.path.join(self.local_storage, scene_name, tif) 81 | ]) 82 | 83 | tif_dict = {} 84 | pattern = rf'^({scene_name}_(.+).tif)$' 85 | for i in tifs: 86 | groups = re.search(pattern, i).groups() 87 | tif_dict[groups[1]] = os.path.join(scene_name, groups[0]) 88 | 89 | file['extracted'] = tif_dict 90 | 91 | # %% ../01_asf_hyp3.ipynb 31 92 | @patch 93 | def to_gcs( 94 | self:Transfer, 95 | ): 96 | logging.info('Starting to_gcs()') 97 | 98 | fs = gcsfs.GCSFileSystem(token='google_default') 99 | 100 | for file in self.job_dict['files']: 101 | for band, filename in file['extracted'].items(): 102 | gcs_path = f'{self.gcs_bucket}/{filename}' 103 | if fs.exists(gcs_path): 104 | logging.info(f'GCS file already exists:\n {gcs_path}') 105 | else: 106 | logging.info(f'Starting to transfer file to GCS:\n {gcs_path}') 107 | # Transfer the local file to GCS. 108 | fs.put_file( 109 | lpath=f"{self.local_storage}/{filename}", 110 | rpath=gcs_path 111 | ) 112 | logging.info(f'Transferred file to GCS: {gcs_path}') 113 | 114 | # %% ../01_asf_hyp3.ipynb 35 115 | @patch 116 | def create_gee_asset( 117 | self:Transfer, 118 | ): 119 | "Create an Earth Engine asset." 120 | logging.info(f'Starting create_gee_asset()') 121 | 122 | ee.Initialize(project=self.gee_gcp_project) 123 | 124 | core.create_gee_image_collection(self.gee_gcp_project, self.gee_image_collection) 125 | 126 | granule_names = self.job_dict['job_parameters']['granules'] 127 | granules = asf_search.granule_search(granule_names) 128 | 129 | granule_times = [datetime.datetime.fromisoformat(x.properties['stopTime']) for x in granules] 130 | start_time = min(granule_times) 131 | end_time = max(granule_times) 132 | 133 | id = f"{self.job_dict['job_id']}" 134 | 135 | props = granules[0].properties 136 | description = (f"{props['platform']}" 137 | f" - {props['processingLevel']}" 138 | f" - {props['beamModeType']}") 139 | 140 | for file_dict in self.job_dict['files']: 141 | for band, filename in file_dict['extracted'].items(): 142 | 143 | # Skip non-geocoded (native range-doppler coordinates) TIFFs. 144 | if filename.endswith('_rdr.tif'): 145 | continue 146 | 147 | gcs_path = f'{self.gcs_bucket}/{filename}' 148 | 149 | request = { 150 | 'type': 'IMAGE', 151 | 'bands': { # TODO: Update this once multi-band COG assets are supported 152 | 'id': band 153 | }, 154 | 'gcs_location': { 155 | 'uris': [f'gs://{gcs_path}'] 156 | }, 157 | 'properties': { 158 | 'source': file_dict['url'], 159 | 'band': band # TODO: Remove this once multi-band COG assets are supported 160 | }, 161 | 'startTime': start_time.strftime(core.FORMAT_GEE_DATETIME_STRING), 162 | 'endTime': end_time.strftime(core.FORMAT_GEE_DATETIME_STRING), 163 | 'description': description 164 | } 165 | 166 | path_parts = [ 167 | 'projects', 168 | self.gee_gcp_project, 169 | 'assets', 170 | self.gee_image_collection, 171 | # TODO: Remove the band suffix once multi-band COG assets are supported 172 | f'{id}_{band}'.replace(".", "_") 173 | ] 174 | assetname = os.path.join(*[x for x in path_parts if x is not None]) 175 | 176 | logging.debug(f'request = {request}') 177 | logging.debug(f'assetname = {assetname}') 178 | try: 179 | ee.data.createAsset( 180 | value=request, 181 | path=assetname 182 | ) 183 | logging.info(f'Finished creating a GEE asset:\n {assetname}.') 184 | except ee.EEException as e: 185 | print(f'e = {e}') 186 | if "does not exist or doesn't allow this operation" in str(e): 187 | raise(e) 188 | else: 189 | raise(e) # TODO: Add logic to parse the EEException message. 190 | logging.info('GEE asset already exists. Skipping.') 191 | -------------------------------------------------------------------------------- /sar_asf_to_gee/asf_static.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../02_asf_static.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = ['SearchOpera'] 5 | 6 | # %% ../02_asf_static.ipynb 3 7 | import logging 8 | import os 9 | import re 10 | import tempfile 11 | 12 | import asf_search 13 | import ee 14 | from fastcore.basics import patch 15 | import gcsfs 16 | from IPython.display import JSON 17 | import pandas as pd 18 | 19 | from sar_asf_to_gee.core import ( 20 | FORMAT_GEE_DATETIME_STRING, 21 | create_gee_image_collection 22 | ) 23 | 24 | # %% ../02_asf_static.ipynb 7 25 | class SearchOpera(): 26 | 27 | LOCAL_PROPNAME = 'local_paths' 28 | GCS_PATH_PROPNAME = 'gcs_path' 29 | GEE_ASSET_PROPNAME = 'gee_asset' 30 | 31 | def __init__( 32 | self, 33 | search_opts, 34 | gcs_bucket, # GCS bucket 35 | gee_gcp_project, # GCP project used by Earth Engine 36 | gee_image_collection=None, # Name of the Earth Engine ImageCollection (optional) 37 | local_storage=None, 38 | ): 39 | self.search_opts = search_opts 40 | self.gcs_bucket = gcs_bucket 41 | self.gee_gcp_project = gee_gcp_project 42 | self.gee_image_collection = gee_image_collection 43 | if local_storage: 44 | self.tempdir = None 45 | self.local_storage = local_storage 46 | else: 47 | self.tempdir = tempfile.TemporaryDirectory() 48 | self.local_storage = self.tempdir.name 49 | logging.debug(f'created temporary directory: {self.tempdir.name}') 50 | self._search_results=None 51 | # self._extracted={} 52 | # self._gcs_path={} 53 | 54 | # %% ../02_asf_static.ipynb 10 55 | @patch 56 | def search_count( 57 | self:SearchOpera, 58 | ): 59 | "Returns a count of records (w/ duplicates)" 60 | return asf_search.search_count(**self.search_opts) 61 | 62 | # %% ../02_asf_static.ipynb 13 63 | @patch 64 | def search( 65 | self:SearchOpera, 66 | ): 67 | if not self._search_results: 68 | self._search_results = asf_search.search(**self.search_opts) 69 | return self._search_results 70 | 71 | # %% ../02_asf_static.ipynb 16 72 | @patch 73 | def as_dataframe( 74 | self:SearchOpera, 75 | ): 76 | "Returns results as a pandas dataframe (w/o duplicates)" 77 | df = pd.DataFrame.from_records([r.properties for r in self.search()]) 78 | # For datasets that have been processed more than once, retain the last result. 79 | df = (df.sort_values(by=['processingDate']) 80 | .drop_duplicates(subset=['groupID', 81 | 'beamMode', 82 | 'processingLevel', 83 | 'startTime', 84 | 'stopTime'], keep='last') 85 | ) 86 | return df 87 | 88 | # %% ../02_asf_static.ipynb 18 89 | @patch 90 | def scene_list( 91 | self:SearchOpera, 92 | ): 93 | return self.as_dataframe()['sceneName'].to_list() 94 | 95 | # %% ../02_asf_static.ipynb 22 96 | @patch 97 | def to_local( 98 | self:SearchOpera, 99 | ): 100 | "Transfer static ASF results to local system, unzip, and update the job dictionary." 101 | logging.info(f'Starting asf_static.to_local()') 102 | 103 | for r in self.search(): 104 | logging.info(f' Processing {r.properties["fileID"]}') 105 | r.properties['url_set'] = get_urls(r) 106 | r.properties[self.LOCAL_PROPNAME] = {} 107 | for url_key, url_value in r.properties['url_set'].items(): 108 | filename = f'{r.properties["fileID"]}_{url_key}.tif' 109 | asf_search.download_url( 110 | url=url_value, 111 | path=self.local_storage, 112 | filename=filename 113 | ) 114 | r.properties[self.LOCAL_PROPNAME][url_key] = os.path.join(self.local_storage, filename) 115 | # display(JSON(r.properties)) 116 | logging.info(f'Finished asf_static.to_local()') 117 | 118 | # %% ../02_asf_static.ipynb 27 119 | @patch 120 | def to_gcs( 121 | self:SearchOpera, 122 | ): 123 | logging.info('Starting to_gcs()') 124 | 125 | fs = gcsfs.GCSFileSystem(token='google_default') 126 | 127 | if not fs.exists(self.gcs_bucket): 128 | print('Bucket does not exist!!!') 129 | fs.mkdir(self.gcs_bucket) 130 | 131 | for r in self.search(): 132 | logging.info(f' Transferring {r.properties["fileID"]}') 133 | r.properties[self.GCS_PATH_PROPNAME] = {} 134 | 135 | for key, local_filepath in r.properties[self.LOCAL_PROPNAME].items(): 136 | path_split = os.path.split(local_filepath) 137 | filename = path_split[-1] 138 | gcs_path = f'{self.gcs_bucket}/{filename}' 139 | 140 | if fs.exists(gcs_path): 141 | logging.info(f' GCS file already exists:\n {gcs_path}') 142 | else: 143 | logging.info(f' Starting to transfer file to GCS:\n {gcs_path}') 144 | # Transfer the local file to GCS. 145 | print('filename', filename) 146 | print('gcs_path', gcs_path) 147 | fs.put_file( 148 | lpath=filepath, 149 | rpath=gcs_path 150 | ) 151 | logging.info(f' Transferred file to GCS: {gcs_path}') 152 | r.properties[self.GCS_PATH_PROPNAME][key] = gcs_path 153 | 154 | # %% ../02_asf_static.ipynb 32 155 | @patch 156 | def create_gee_asset( 157 | self:SearchOpera, 158 | ): 159 | "Create an Earth Engine asset." 160 | logging.info(f'Starting create_gee_asset()') 161 | 162 | ee.Initialize(project=self.gee_gcp_project) 163 | 164 | create_gee_image_collection(self.gee_gcp_project, self.gee_image_collection) 165 | 166 | for r in self.search(): 167 | logging.info(f' Creating GEE assets for {r.properties["fileID"]}') 168 | r.properties[self.GEE_ASSET_PROPNAME] = {} 169 | 170 | display(JSON({'r.meta': r.meta, 'r.properties': r.properties})) 171 | start_time = r.properties['startTime'] 172 | end_time = r.properties['stopTime'] 173 | description = (f"{r.properties['platform']}" 174 | f" - {r.properties['processingLevel']}" 175 | f" - {r.properties['beamModeType']}") 176 | # id = f"{self.job_dict['job_id']}" 177 | id = r.properties["fileID"] 178 | 179 | for band, gcs_path in r.properties[self.GCS_PATH_PROPNAME].items(): 180 | print('band', band) 181 | print('gcs_path', gcs_path) 182 | 183 | request = { 184 | 'type': 'IMAGE', 185 | 'bands': { # TODO: Update this once multi-band COG assets are supported 186 | 'id': band 187 | }, 188 | 'gcs_location': { 189 | 'uris': [f'gs://{gcs_path}'] 190 | }, 191 | 'properties': { 192 | 'source': r.properties['url'], 193 | 'band': band # TODO: Remove this once multi-band COG assets are supported 194 | }, 195 | 'startTime': start_time, #.strftime(FORMAT_GEE_DATETIME_STRING), 196 | 'endTime': end_time, #.strftime(FORMAT_GEE_DATETIME_STRING), 197 | 'description': description 198 | } 199 | 200 | path_parts = [ 201 | 'projects', 202 | self.gee_gcp_project, 203 | 'assets', 204 | self.gee_image_collection, 205 | # TODO: Remove the band suffix once multi-band COG assets are supported 206 | f'{id}_{band}'.replace(".", "_") 207 | ] 208 | assetname = os.path.join(*[x for x in path_parts if x is not None]) 209 | 210 | 211 | logging.debug(f'request = {request}') 212 | logging.debug(f'assetname = {assetname}') 213 | try: 214 | ee.data.createAsset( 215 | value=request, 216 | path=assetname 217 | ) 218 | logging.info(f'Finished creating a GEE asset:\n {assetname}.') 219 | except ee.EEException as e: 220 | print(f'e = {e}') 221 | if "does not exist or doesn't allow this operation" in str(e): 222 | raise(e) 223 | else: 224 | raise(e) # TODO: Add logic to parse the EEException message. 225 | logging.info('GEE asset already exists. Skipping.') 226 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sar-asf-to-gee 2 | 3 | 4 | 5 | # Overview 6 | 7 | This package facilitates the transfer of ASF HyP3 processed SAR datasets 8 | to Google Cloud Storage and the creation of Google Earth Engine assets. 9 | 10 | ![Coherence image](images/screenshot_coherence.png) *Screenshot showing 11 | a coherence image produced by ASF HyP3, visualized in the Google Earth 12 | Engine Code Editor.* 13 | 14 | # Install 15 | 16 | ## Prerequisites 17 | 18 | 1. A NASA Earthdata account 19 | - You can register for an Earthdata Login here: 20 | https://www.earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/earthdata-login 21 | - Create a `.netfc` file that contains the Earthdata authentication 22 | credentials. See: 23 | https://nasa-openscapes.github.io/2021-Cloud-Hackathon/tutorials/04_NASA_Earthdata_Authentication.html 24 | 2. A Google Cloud account 25 | - Create Google Cloud authentication credentials using the [Google 26 | Cloud Command Line Interface](https://cloud.google.com/cli) with 27 | the command: `gcloud auth application-default login` 28 | - See: 29 | https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login 30 | 3. A Google Earth Engine account 31 | - Create Google Earth Engine authentication credentials using the 32 | [Google Earth Engine Command Lin 33 | Tool](https://developers.google.com/earth-engine/guides/command_line) 34 | with the command: `earthengine authenticate` 35 | - See: https://developers.google.com/earth-engine/guides/auth 36 | 37 | ## Package Installation 38 | 39 | Install directly from the GitHub repo. 40 | 41 | ``` sh 42 | pip install git+https://github.com/gee-community/sar-asf-to-gee.git 43 | ``` 44 | 45 | # How to use 46 | 47 | This package assumes that you have already created datasets using the 48 | [ASF HyP3](https://hyp3-docs.asf.alaska.edu/) processing system using 49 | either the [ASF Data Search 50 | Vertex](https://hyp3-docs.asf.alaska.edu/using/vertex/) web interface or 51 | programmatically using the [HyP3 SDK for 52 | Python](https://hyp3-docs.asf.alaska.edu/using/sdk/) or [HyP3 REST 53 | API](https://hyp3-docs.asf.alaska.edu/using/api/). 54 | 55 | The [status of previously submitted 56 | jobs](https://search.asf.alaska.edu/#/?searchType=On%20Demand) can found 57 | within the Vertex application. 58 | 59 | ## Imports 60 | 61 | ``` python 62 | from hyp3_sdk import HyP3 63 | 64 | from sar_asf_to_gee import core 65 | from sar_asf_to_gee import asf_hyp3 66 | ``` 67 | 68 | Create an instance of the HyP3 class, which is used to query for jobs. 69 | 70 | ``` python 71 | hyp3 = HyP3() 72 | ``` 73 | 74 | ## InSAR GAMMA Example 75 | 76 | Find HyP3 InSAR GAMMA jobs that are completed but not expired. 77 | 78 | ``` python 79 | job_name = 'INSAR_GAMMA_processing_example' 80 | batch_completed = hyp3.find_jobs( 81 | name=job_name, 82 | status_code='SUCCEEDED', 83 | ) 84 | jobs = core.filter_jobs(batch_completed.jobs, expired=False) 85 | print(f'Found {len(jobs)} unexpired jobs.') 86 | ``` 87 | 88 | Found 1 unexpired jobs. 89 | 90 | Loop through the completed jobs, transferring the results locally, then 91 | to Google Cloud Storage, and then create an Earth Engine asset. 92 | 93 | ``` python 94 | for job in jobs: 95 | print(f'Processing {job.name}') 96 | t = asf_hyp3.Transfer( 97 | job_dict=job.to_dict(), 98 | gcs_bucket='hyp3-data-staging', 99 | gee_gcp_project='sar-asf-to-gee', 100 | gee_image_collection='test_image_collection_INSAR_GAMMA', 101 | local_storage='temp_downloads', 102 | ) 103 | print(f' Transferring files from ASF to local computer...') 104 | t.to_local() 105 | print(f' Transferring files from local computer to Google Cloud Storage...') 106 | t.to_gcs() 107 | print(f' Creating Google Earth Engine assets...') 108 | t.create_gee_asset() 109 | print(f' Done.') 110 | ``` 111 | 112 | Processing INSAR_GAMMA_processing_example 113 | Transferring files from ASF to local computer... 114 | Transferring files from local computer to Google Cloud Storage... 115 | Creating Google Earth Engine assets... 116 | Done. 117 | 118 | /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/.pixi/envs/default/lib/python3.11/site-packages/asf_search/download/download.py:65: UserWarning: File already exists, skipping download: temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2.zip 119 | warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}') 120 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_corr.tif 121 | 122 | Adding overviews... 123 | Updating dataset tags... 124 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_corr.tif 125 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_unw_phase.tif 126 | 127 | Adding overviews... 128 | Updating dataset tags... 129 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_unw_phase.tif 130 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_amp.tif 131 | 132 | Adding overviews... 133 | Updating dataset tags... 134 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_amp.tif 135 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_water_mask.tif 136 | 137 | Adding overviews... 138 | Updating dataset tags... 139 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_water_mask.tif 140 | 141 | ## InSAR Burst example 142 | 143 | Find HyP3 InSAR Burst jobs that are completed but not expired. 144 | 145 | ``` python 146 | job_name = 'INSAR_ISCE_BURST_processing_example' 147 | batch_completed = hyp3.find_jobs( 148 | name=job_name, 149 | status_code='SUCCEEDED', 150 | ) 151 | jobs = core.filter_jobs(batch_completed.jobs, expired=False) 152 | print(f'Found {len(jobs)} unexpired jobs.') 153 | ``` 154 | 155 | Found 1 unexpired jobs. 156 | 157 | ``` python 158 | for job in jobs: 159 | print(f'Processing {job.name}') 160 | t = asf_hyp3.Transfer( 161 | job_dict=job.to_dict(), 162 | gcs_bucket='hyp3-data-staging', 163 | gee_gcp_project='sar-asf-to-gee', 164 | gee_image_collection='test_image_collection_INSAR_ISCE_BURST', 165 | local_storage='temp_downloads', 166 | ) 167 | print(f' Transferring files from ASF to local computer...') 168 | t.to_local() 169 | print(f' Transferring files from local computer to Google Cloud Storage...') 170 | t.to_gcs() 171 | print(f' Creating Google Earth Engine assets...') 172 | t.create_gee_asset() 173 | print(f' Done.') 174 | ``` 175 | 176 | Processing INSAR_ISCE_BURST_processing_example 177 | Transferring files from ASF to local computer... 178 | Transferring files from local computer to Google Cloud Storage... 179 | Creating Google Earth Engine assets... 180 | Done. 181 | 182 | /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/.pixi/envs/default/lib/python3.11/site-packages/asf_search/download/download.py:65: UserWarning: File already exists, skipping download: temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E.zip 183 | warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}') 184 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_unw_phase.tif 185 | 186 | Adding overviews... 187 | Updating dataset tags... 188 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_unw_phase.tif 189 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase_rdr.tif 190 | 191 | Updating dataset tags... 192 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase_rdr.tif 193 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_corr.tif 194 | 195 | Adding overviews... 196 | Updating dataset tags... 197 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_corr.tif 198 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_theta.tif 199 | 200 | Adding overviews... 201 | Updating dataset tags... 202 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_theta.tif 203 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lat_rdr.tif 204 | 205 | Updating dataset tags... 206 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lat_rdr.tif 207 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_conncomp.tif 208 | 209 | Adding overviews... 210 | Updating dataset tags... 211 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_conncomp.tif 212 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_dem.tif 213 | 214 | Adding overviews... 215 | Updating dataset tags... 216 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_dem.tif 217 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_phi.tif 218 | 219 | Adding overviews... 220 | Updating dataset tags... 221 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_phi.tif 222 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_los_rdr.tif 223 | 224 | Updating dataset tags... 225 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_los_rdr.tif 226 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lon_rdr.tif 227 | 228 | Updating dataset tags... 229 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lon_rdr.tif 230 | Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase.tif 231 | 232 | Adding overviews... 233 | Updating dataset tags... 234 | Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase.tif 235 | -------------------------------------------------------------------------------- /02_asf_static.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# ASF Static RTC Files\n", 8 | "\n", 9 | "> Module for accessing static files hosted by ASF. " 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "#| default_exp asf_static" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "# Module Imports" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "#| export\n", 35 | "import logging\n", 36 | "import os\n", 37 | "import re\n", 38 | "import tempfile\n", 39 | "\n", 40 | "import asf_search\n", 41 | "import ee\n", 42 | "from fastcore.basics import patch\n", 43 | "import gcsfs\n", 44 | "from IPython.display import JSON\n", 45 | "import pandas as pd\n", 46 | "\n", 47 | "from sar_asf_to_gee.core import (\n", 48 | " FORMAT_GEE_DATETIME_STRING,\n", 49 | " create_gee_image_collection\n", 50 | ")" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "# Setup" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "logger = logging.getLogger()\n", 67 | "logger.setLevel(logging.INFO)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": {}, 73 | "source": [ 74 | "# ASF Static OPERA Files" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "#| export\n", 84 | "class SearchOpera(): \n", 85 | "\n", 86 | " LOCAL_PROPNAME = 'local_paths'\n", 87 | " GCS_PATH_PROPNAME = 'gcs_path'\n", 88 | " GEE_ASSET_PROPNAME = 'gee_asset'\n", 89 | " \n", 90 | " def __init__(\n", 91 | " self,\n", 92 | " search_opts,\n", 93 | " gcs_bucket, # GCS bucket\n", 94 | " gee_gcp_project, # GCP project used by Earth Engine\n", 95 | " gee_image_collection=None, # Name of the Earth Engine ImageCollection (optional)\n", 96 | " local_storage=None,\n", 97 | " ):\n", 98 | " self.search_opts = search_opts\n", 99 | " self.gcs_bucket = gcs_bucket\n", 100 | " self.gee_gcp_project = gee_gcp_project\n", 101 | " self.gee_image_collection = gee_image_collection\n", 102 | " if local_storage:\n", 103 | " self.tempdir = None\n", 104 | " self.local_storage = local_storage\n", 105 | " else:\n", 106 | " self.tempdir = tempfile.TemporaryDirectory() \n", 107 | " self.local_storage = self.tempdir.name\n", 108 | " logging.debug(f'created temporary directory: {self.tempdir.name}')\n", 109 | " self._search_results=None\n", 110 | " # self._extracted={}\n", 111 | " # self._gcs_path={}" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "search_opts = {\n", 121 | " 'dataset': 'OPERA-S1',\n", 122 | " 'processingLevel': 'RTC',\n", 123 | " 'intersectsWith': 'Point (-122.299 37.702)', # San Francisco\n", 124 | " 'start': '2023-12-14 00:00',\n", 125 | " 'end': '2024-01-01 00:00',\n", 126 | " 'maxResults': 50 \n", 127 | "}\n", 128 | "obj = SearchOpera(\n", 129 | " search_opts,\n", 130 | " gcs_bucket='asf-static-data-staging',\n", 131 | " local_storage='temp_downloads',\n", 132 | " gee_gcp_project='sar-asf-to-gee',\n", 133 | " gee_image_collection=f'example-opera-s1-rtc',\n", 134 | ")" 135 | ] 136 | }, 137 | { 138 | "cell_type": "markdown", 139 | "metadata": {}, 140 | "source": [ 141 | "Create a method to return the count of search results." 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [ 150 | "#| export\n", 151 | "@patch\n", 152 | "def search_count(\n", 153 | " self:SearchOpera,\n", 154 | "):\n", 155 | " \"Returns a count of records (w/ duplicates)\"\n", 156 | " return asf_search.search_count(**self.search_opts)" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": null, 162 | "metadata": {}, 163 | "outputs": [], 164 | "source": [ 165 | "obj.search_count()" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "Create a method to return the search results." 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [ 181 | "#| export\n", 182 | "@patch\n", 183 | "def search(\n", 184 | " self:SearchOpera,\n", 185 | "):\n", 186 | " if not self._search_results:\n", 187 | " self._search_results = asf_search.search(**self.search_opts)\n", 188 | " return self._search_results" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "metadata": {}, 195 | "outputs": [], 196 | "source": [ 197 | "obj.search()" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "Return search results as a dataframe." 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": null, 210 | "metadata": {}, 211 | "outputs": [], 212 | "source": [ 213 | "#| export\n", 214 | "@patch\n", 215 | "def as_dataframe(\n", 216 | " self:SearchOpera,\n", 217 | "):\n", 218 | " \"Returns results as a pandas dataframe (w/o duplicates)\"\n", 219 | " df = pd.DataFrame.from_records([r.properties for r in self.search()])\n", 220 | " # For datasets that have been processed more than once, retain the last result.\n", 221 | " df = (df.sort_values(by=['processingDate'])\n", 222 | " .drop_duplicates(subset=['groupID',\n", 223 | " 'beamMode',\n", 224 | " 'processingLevel',\n", 225 | " 'startTime',\n", 226 | " 'stopTime'], keep='last')\n", 227 | " )\n", 228 | " return df" 229 | ] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "metadata": {}, 235 | "outputs": [], 236 | "source": [ 237 | "obj.as_dataframe()" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": null, 243 | "metadata": {}, 244 | "outputs": [], 245 | "source": [ 246 | "#| export\n", 247 | "@patch\n", 248 | "def scene_list(\n", 249 | " self:SearchOpera,\n", 250 | "):\n", 251 | " return self.as_dataframe()['sceneName'].to_list()" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "obj.scene_list()" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "Transfer files locally." 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": null, 273 | "metadata": {}, 274 | "outputs": [], 275 | "source": [ 276 | "def get_urls(r):\n", 277 | " # Construct a dictionary of URLs for the polarization bands.\n", 278 | " pols = r.properties['polarization']\n", 279 | " pattern = re.compile(f\"({'|'.join(pols) + '|mask'})\\.tif$\")\n", 280 | " tif_dict = {}\n", 281 | " for url in [r.properties['url']] + r.properties['additionalUrls']:\n", 282 | " m = pattern.search(url)\n", 283 | " if m:\n", 284 | " tif_dict[m.group(1)] = url\n", 285 | " return tif_dict" 286 | ] 287 | }, 288 | { 289 | "cell_type": "code", 290 | "execution_count": null, 291 | "metadata": {}, 292 | "outputs": [], 293 | "source": [ 294 | "#| export\n", 295 | "@patch\n", 296 | "def to_local(\n", 297 | " self:SearchOpera,\n", 298 | "):\n", 299 | " \"Transfer static ASF results to local system, unzip, and update the job dictionary.\" \n", 300 | " logging.info(f'Starting asf_static.to_local()')\n", 301 | "\n", 302 | " for r in self.search():\n", 303 | " logging.info(f' Processing {r.properties[\"fileID\"]}')\n", 304 | " r.properties['url_set'] = get_urls(r)\n", 305 | " r.properties[self.LOCAL_PROPNAME] = {}\n", 306 | " for url_key, url_value in r.properties['url_set'].items():\n", 307 | " filename = f'{r.properties[\"fileID\"]}_{url_key}.tif'\n", 308 | " asf_search.download_url(\n", 309 | " url=url_value,\n", 310 | " path=self.local_storage,\n", 311 | " filename=filename\n", 312 | " )\n", 313 | " r.properties[self.LOCAL_PROPNAME][url_key] = os.path.join(self.local_storage, filename)\n", 314 | " # display(JSON(r.properties))\n", 315 | " logging.info(f'Finished asf_static.to_local()')" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": null, 321 | "metadata": {}, 322 | "outputs": [], 323 | "source": [ 324 | "from pprint import pprint\n", 325 | "obj.to_local()" 326 | ] 327 | }, 328 | { 329 | "cell_type": "markdown", 330 | "metadata": {}, 331 | "source": [ 332 | "Display the extracted local files." 333 | ] 334 | }, 335 | { 336 | "cell_type": "code", 337 | "execution_count": null, 338 | "metadata": {}, 339 | "outputs": [], 340 | "source": [ 341 | "for r in obj.search():\n", 342 | " display(JSON(r.properties['local_paths']))" 343 | ] 344 | }, 345 | { 346 | "cell_type": "markdown", 347 | "metadata": {}, 348 | "source": [ 349 | "## Transfer to Google Cloud Storage\n", 350 | "\n", 351 | "Create an instance method for transferring results from a local computer to Google Cloud Storage." 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": null, 357 | "metadata": {}, 358 | "outputs": [], 359 | "source": [ 360 | "#| export\n", 361 | "@patch\n", 362 | "def to_gcs(\n", 363 | " self:SearchOpera,\n", 364 | "):\n", 365 | " logging.info('Starting to_gcs()')\n", 366 | "\n", 367 | " fs = gcsfs.GCSFileSystem(token='google_default')\n", 368 | "\n", 369 | " if not fs.exists(self.gcs_bucket):\n", 370 | " print('Bucket does not exist!!!')\n", 371 | " fs.mkdir(self.gcs_bucket)\n", 372 | "\n", 373 | " for r in self.search():\n", 374 | " logging.info(f' Transferring {r.properties[\"fileID\"]}')\n", 375 | " r.properties[self.GCS_PATH_PROPNAME] = {}\n", 376 | " \n", 377 | " for key, local_filepath in r.properties[self.LOCAL_PROPNAME].items():\n", 378 | " path_split = os.path.split(local_filepath)\n", 379 | " filename = path_split[-1]\n", 380 | " gcs_path = f'{self.gcs_bucket}/{filename}'\n", 381 | " \n", 382 | " if fs.exists(gcs_path):\n", 383 | " logging.info(f' GCS file already exists:\\n {gcs_path}')\n", 384 | " else:\n", 385 | " logging.info(f' Starting to transfer file to GCS:\\n {gcs_path}')\n", 386 | " # Transfer the local file to GCS.\n", 387 | " print('filename', filename)\n", 388 | " print('gcs_path', gcs_path)\n", 389 | " fs.put_file(\n", 390 | " lpath=filepath,\n", 391 | " rpath=gcs_path\n", 392 | " ) \n", 393 | " logging.info(f' Transferred file to GCS: {gcs_path}')\n", 394 | " r.properties[self.GCS_PATH_PROPNAME][key] = gcs_path" 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": null, 400 | "metadata": {}, 401 | "outputs": [], 402 | "source": [ 403 | "obj.to_gcs()" 404 | ] 405 | }, 406 | { 407 | "cell_type": "markdown", 408 | "metadata": {}, 409 | "source": [ 410 | "Display the GCS paths." 411 | ] 412 | }, 413 | { 414 | "cell_type": "code", 415 | "execution_count": null, 416 | "metadata": {}, 417 | "outputs": [], 418 | "source": [ 419 | "for r in obj.search():\n", 420 | " display(JSON(r.properties[obj.GCS_PATH_PROPNAME]))" 421 | ] 422 | }, 423 | { 424 | "cell_type": "markdown", 425 | "metadata": {}, 426 | "source": [ 427 | "## Create a GEE Asset" 428 | ] 429 | }, 430 | { 431 | "cell_type": "code", 432 | "execution_count": null, 433 | "metadata": {}, 434 | "outputs": [], 435 | "source": [ 436 | "#| export\n", 437 | "@patch\n", 438 | "def create_gee_asset(\n", 439 | " self:SearchOpera,\n", 440 | "):\n", 441 | " \"Create an Earth Engine asset.\"\n", 442 | " logging.info(f'Starting create_gee_asset()')\n", 443 | " \n", 444 | " ee.Initialize(project=self.gee_gcp_project)\n", 445 | " \n", 446 | " create_gee_image_collection(self.gee_gcp_project, self.gee_image_collection)\n", 447 | "\n", 448 | " for r in self.search():\n", 449 | " logging.info(f' Creating GEE assets for {r.properties[\"fileID\"]}')\n", 450 | " r.properties[self.GEE_ASSET_PROPNAME] = {}\n", 451 | "\n", 452 | " display(JSON({'r.meta': r.meta, 'r.properties': r.properties}))\n", 453 | " start_time = r.properties['startTime']\n", 454 | " end_time = r.properties['stopTime']\n", 455 | " description = (f\"{r.properties['platform']}\"\n", 456 | " f\" - {r.properties['processingLevel']}\"\n", 457 | " f\" - {r.properties['beamModeType']}\")\n", 458 | " # id = f\"{self.job_dict['job_id']}\"\n", 459 | " id = r.properties[\"fileID\"]\n", 460 | " \n", 461 | " for band, gcs_path in r.properties[self.GCS_PATH_PROPNAME].items():\n", 462 | " print('band', band)\n", 463 | " print('gcs_path', gcs_path)\n", 464 | "\n", 465 | " request = {\n", 466 | " 'type': 'IMAGE',\n", 467 | " 'bands': { # TODO: Update this once multi-band COG assets are supported\n", 468 | " 'id': band\n", 469 | " },\n", 470 | " 'gcs_location': {\n", 471 | " 'uris': [f'gs://{gcs_path}']\n", 472 | " },\n", 473 | " 'properties': {\n", 474 | " 'source': r.properties['url'],\n", 475 | " 'band': band # TODO: Remove this once multi-band COG assets are supported\n", 476 | " },\n", 477 | " 'startTime': start_time, #.strftime(FORMAT_GEE_DATETIME_STRING),\n", 478 | " 'endTime': end_time, #.strftime(FORMAT_GEE_DATETIME_STRING),\n", 479 | " 'description': description\n", 480 | " }\n", 481 | "\n", 482 | " path_parts = [\n", 483 | " 'projects',\n", 484 | " self.gee_gcp_project,\n", 485 | " 'assets',\n", 486 | " self.gee_image_collection,\n", 487 | " # TODO: Remove the band suffix once multi-band COG assets are supported\n", 488 | " f'{id}_{band}'.replace(\".\", \"_\") \n", 489 | " ]\n", 490 | " assetname = os.path.join(*[x for x in path_parts if x is not None])\n", 491 | "\n", 492 | "\n", 493 | " logging.debug(f'request = {request}')\n", 494 | " logging.debug(f'assetname = {assetname}')\n", 495 | " try:\n", 496 | " ee.data.createAsset(\n", 497 | " value=request,\n", 498 | " path=assetname\n", 499 | " ) \n", 500 | " logging.info(f'Finished creating a GEE asset:\\n {assetname}.')\n", 501 | " except ee.EEException as e:\n", 502 | " print(f'e = {e}')\n", 503 | " if \"does not exist or doesn't allow this operation\" in str(e):\n", 504 | " raise(e)\n", 505 | " else:\n", 506 | " raise(e) # TODO: Add logic to parse the EEException message.\n", 507 | " logging.info('GEE asset already exists. Skipping.')" 508 | ] 509 | }, 510 | { 511 | "cell_type": "code", 512 | "execution_count": null, 513 | "metadata": {}, 514 | "outputs": [], 515 | "source": [ 516 | "obj.create_gee_asset()" 517 | ] 518 | }, 519 | { 520 | "cell_type": "code", 521 | "execution_count": null, 522 | "metadata": {}, 523 | "outputs": [], 524 | "source": [ 525 | "#| hide\n", 526 | "import nbdev; nbdev.nbdev_export()" 527 | ] 528 | }, 529 | { 530 | "cell_type": "code", 531 | "execution_count": null, 532 | "metadata": {}, 533 | "outputs": [], 534 | "source": [] 535 | } 536 | ], 537 | "metadata": { 538 | "kernelspec": { 539 | "display_name": "Python 3 (ipykernel)", 540 | "language": "python", 541 | "name": "python3" 542 | } 543 | }, 544 | "nbformat": 4, 545 | "nbformat_minor": 4 546 | } 547 | -------------------------------------------------------------------------------- /index.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# sar-asf-to-gee\n", 8 | "\n", 9 | "> Facilitates transferring on-demand SAR products processed by ASF's HyP3 to Google Earth Engine." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "# Overview\n", 17 | "\n", 18 | "This package facilitates the transfer of ASF HyP3 processed SAR datasets to Google Cloud Storage and the creation of Google Earth Engine assets." 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "![Coherence image](images/screenshot_coherence.png)\n", 26 | "*Screenshot showing a coherence image produced by ASF HyP3, visualized in the Google Earth Engine Code Editor.*" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "# Install" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "## Prerequisites\n", 41 | "\n", 42 | "1. A NASA Earthdata account\n", 43 | " - You can register for an Earthdata Login here: https://www.earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/earthdata-login\n", 44 | " - Create a `.netfc` file that contains the Earthdata authentication credentials. See: https://nasa-openscapes.github.io/2021-Cloud-Hackathon/tutorials/04_NASA_Earthdata_Authentication.html\n", 45 | "\n", 46 | "2. A Google Cloud account\n", 47 | " - Create Google Cloud authentication credentials using the [Google Cloud Command Line Interface](https://cloud.google.com/cli) with the command: `gcloud auth application-default login`\n", 48 | " - See: https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login\n", 49 | "\n", 50 | "3. A Google Earth Engine account\n", 51 | " - Create Google Earth Engine authentication credentials using the [Google Earth Engine Command Lin Tool](https://developers.google.com/earth-engine/guides/command_line) with the command: `earthengine authenticate`\n", 52 | " - See: https://developers.google.com/earth-engine/guides/auth" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "metadata": {}, 58 | "source": [ 59 | "## Package Installation\n", 60 | "\n", 61 | "Install directly from the GitHub repo." 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "```sh\n", 69 | "pip install git+https://github.com/gee-community/sar-asf-to-gee.git\n", 70 | "```" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "# How to use" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "This package assumes that you have already created datasets using the [ASF HyP3](https://hyp3-docs.asf.alaska.edu/) processing system using either the [ASF Data Search Vertex](https://hyp3-docs.asf.alaska.edu/using/vertex/) web interface or programmatically using the [HyP3 SDK for Python](https://hyp3-docs.asf.alaska.edu/using/sdk/) or [HyP3 REST API](https://hyp3-docs.asf.alaska.edu/using/api/).\n", 85 | "\n", 86 | "The [status of previously submitted jobs](https://search.asf.alaska.edu/#/?searchType=On%20Demand) can found within the Vertex application." 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "## Imports" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "from hyp3_sdk import HyP3\n", 103 | "\n", 104 | "from sar_asf_to_gee import core\n", 105 | "from sar_asf_to_gee import asf_hyp3" 106 | ] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "metadata": {}, 111 | "source": [ 112 | "Create an instance of the HyP3 class, which is used to query for jobs." 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "hyp3 = HyP3()" 122 | ] 123 | }, 124 | { 125 | "cell_type": "markdown", 126 | "metadata": {}, 127 | "source": [ 128 | "## InSAR GAMMA Example" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "Find HyP3 InSAR GAMMA jobs that are completed but not expired." 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "metadata": {}, 142 | "outputs": [ 143 | { 144 | "name": "stdout", 145 | "output_type": "stream", 146 | "text": [ 147 | "Found 1 unexpired jobs.\n" 148 | ] 149 | } 150 | ], 151 | "source": [ 152 | "job_name = 'INSAR_GAMMA_processing_example'\n", 153 | "batch_completed = hyp3.find_jobs(\n", 154 | " name=job_name,\n", 155 | " status_code='SUCCEEDED',\n", 156 | ")\n", 157 | "jobs = core.filter_jobs(batch_completed.jobs, expired=False)\n", 158 | "print(f'Found {len(jobs)} unexpired jobs.')" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "Loop through the completed jobs, transferring the results locally, then to Google Cloud Storage, and then create an Earth Engine asset." 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": null, 171 | "metadata": {}, 172 | "outputs": [ 173 | { 174 | "name": "stdout", 175 | "output_type": "stream", 176 | "text": [ 177 | "Processing INSAR_GAMMA_processing_example\n", 178 | " Transferring files from ASF to local computer...\n" 179 | ] 180 | }, 181 | { 182 | "name": "stderr", 183 | "output_type": "stream", 184 | "text": [ 185 | "/Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/.pixi/envs/default/lib/python3.11/site-packages/asf_search/download/download.py:65: UserWarning: File already exists, skipping download: temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2.zip\n", 186 | " warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}')\n", 187 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_corr.tif\n", 188 | "\n", 189 | "Adding overviews...\n", 190 | "Updating dataset tags...\n", 191 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_corr.tif\n", 192 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_unw_phase.tif\n", 193 | "\n", 194 | "Adding overviews...\n", 195 | "Updating dataset tags...\n", 196 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_unw_phase.tif\n", 197 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_amp.tif\n", 198 | "\n", 199 | "Adding overviews...\n", 200 | "Updating dataset tags...\n", 201 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_amp.tif\n", 202 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_water_mask.tif\n", 203 | "\n", 204 | "Adding overviews...\n", 205 | "Updating dataset tags...\n", 206 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2/S1AA_20231206T185929_20240311T185926_VVR096_INT80_G_ueF_39A2_water_mask.tif\n" 207 | ] 208 | }, 209 | { 210 | "name": "stdout", 211 | "output_type": "stream", 212 | "text": [ 213 | " Transferring files from local computer to Google Cloud Storage...\n", 214 | " Creating Google Earth Engine assets...\n", 215 | " Done.\n" 216 | ] 217 | } 218 | ], 219 | "source": [ 220 | "for job in jobs:\n", 221 | " print(f'Processing {job.name}')\n", 222 | " t = asf_hyp3.Transfer(\n", 223 | " job_dict=job.to_dict(),\n", 224 | " gcs_bucket='hyp3-data-staging',\n", 225 | " gee_gcp_project='sar-asf-to-gee',\n", 226 | " gee_image_collection='test_image_collection_INSAR_GAMMA',\n", 227 | " local_storage='temp_downloads',\n", 228 | " )\n", 229 | " print(f' Transferring files from ASF to local computer...')\n", 230 | " t.to_local()\n", 231 | " print(f' Transferring files from local computer to Google Cloud Storage...')\n", 232 | " t.to_gcs()\n", 233 | " print(f' Creating Google Earth Engine assets...')\n", 234 | " t.create_gee_asset()\n", 235 | " print(f' Done.')" 236 | ] 237 | }, 238 | { 239 | "cell_type": "markdown", 240 | "metadata": {}, 241 | "source": [ 242 | "## InSAR Burst example" 243 | ] 244 | }, 245 | { 246 | "cell_type": "markdown", 247 | "metadata": {}, 248 | "source": [ 249 | "Find HyP3 InSAR Burst jobs that are completed but not expired." 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": null, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "name": "stdout", 259 | "output_type": "stream", 260 | "text": [ 261 | "Found 1 unexpired jobs.\n" 262 | ] 263 | } 264 | ], 265 | "source": [ 266 | "job_name = 'INSAR_ISCE_BURST_processing_example'\n", 267 | "batch_completed = hyp3.find_jobs(\n", 268 | " name=job_name,\n", 269 | " status_code='SUCCEEDED',\n", 270 | ")\n", 271 | "jobs = core.filter_jobs(batch_completed.jobs, expired=False)\n", 272 | "print(f'Found {len(jobs)} unexpired jobs.')" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": null, 278 | "metadata": {}, 279 | "outputs": [ 280 | { 281 | "name": "stdout", 282 | "output_type": "stream", 283 | "text": [ 284 | "Processing INSAR_ISCE_BURST_processing_example\n", 285 | " Transferring files from ASF to local computer...\n" 286 | ] 287 | }, 288 | { 289 | "name": "stderr", 290 | "output_type": "stream", 291 | "text": [ 292 | "/Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/.pixi/envs/default/lib/python3.11/site-packages/asf_search/download/download.py:65: UserWarning: File already exists, skipping download: temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E.zip\n", 293 | " warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}')\n", 294 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_unw_phase.tif\n", 295 | "\n", 296 | "Adding overviews...\n", 297 | "Updating dataset tags...\n", 298 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_unw_phase.tif\n", 299 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase_rdr.tif\n", 300 | "\n", 301 | "Updating dataset tags...\n", 302 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase_rdr.tif\n", 303 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_corr.tif\n", 304 | "\n", 305 | "Adding overviews...\n", 306 | "Updating dataset tags...\n", 307 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_corr.tif\n", 308 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_theta.tif\n", 309 | "\n", 310 | "Adding overviews...\n", 311 | "Updating dataset tags...\n", 312 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_theta.tif\n", 313 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lat_rdr.tif\n", 314 | "\n", 315 | "Updating dataset tags...\n", 316 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lat_rdr.tif\n", 317 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_conncomp.tif\n", 318 | "\n", 319 | "Adding overviews...\n", 320 | "Updating dataset tags...\n", 321 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_conncomp.tif\n", 322 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_dem.tif\n", 323 | "\n", 324 | "Adding overviews...\n", 325 | "Updating dataset tags...\n", 326 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_dem.tif\n", 327 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_phi.tif\n", 328 | "\n", 329 | "Adding overviews...\n", 330 | "Updating dataset tags...\n", 331 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lv_phi.tif\n", 332 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_los_rdr.tif\n", 333 | "\n", 334 | "Updating dataset tags...\n", 335 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_los_rdr.tif\n", 336 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lon_rdr.tif\n", 337 | "\n", 338 | "Updating dataset tags...\n", 339 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_lon_rdr.tif\n", 340 | "Reading input: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase.tif\n", 341 | "\n", 342 | "Adding overviews...\n", 343 | "Updating dataset tags...\n", 344 | "Writing output to: /Users/tylere/Documents/GitHub/gee-community/sar-asf-to-gee/temp_downloads/S1_184906_IW1_20240104_20240116_VV_INT80_E33E/S1_184906_IW1_20240104_20240116_VV_INT80_E33E_wrapped_phase.tif\n" 345 | ] 346 | }, 347 | { 348 | "name": "stdout", 349 | "output_type": "stream", 350 | "text": [ 351 | " Transferring files from local computer to Google Cloud Storage...\n", 352 | " Creating Google Earth Engine assets...\n", 353 | " Done.\n" 354 | ] 355 | } 356 | ], 357 | "source": [ 358 | "for job in jobs:\n", 359 | " print(f'Processing {job.name}')\n", 360 | " t = asf_hyp3.Transfer(\n", 361 | " job_dict=job.to_dict(),\n", 362 | " gcs_bucket='hyp3-data-staging',\n", 363 | " gee_gcp_project='sar-asf-to-gee',\n", 364 | " gee_image_collection='test_image_collection_INSAR_ISCE_BURST',\n", 365 | " local_storage='temp_downloads',\n", 366 | " )\n", 367 | " print(f' Transferring files from ASF to local computer...')\n", 368 | " t.to_local()\n", 369 | " print(f' Transferring files from local computer to Google Cloud Storage...')\n", 370 | " t.to_gcs()\n", 371 | " print(f' Creating Google Earth Engine assets...')\n", 372 | " t.create_gee_asset()\n", 373 | " print(f' Done.')" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": null, 379 | "metadata": {}, 380 | "outputs": [], 381 | "source": [] 382 | } 383 | ], 384 | "metadata": { 385 | "kernelspec": { 386 | "display_name": "Python 3 (ipykernel)", 387 | "language": "python", 388 | "name": "python3" 389 | } 390 | }, 391 | "nbformat": 4, 392 | "nbformat_minor": 4 393 | } 394 | -------------------------------------------------------------------------------- /01_asf_hyp3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "f49cf34e-e85d-414d-85cf-e42fa55391d7", 6 | "metadata": {}, 7 | "source": [ 8 | "# ASF HyP3\n", 9 | "\n", 10 | "> Module for transferring HyP3 processed data to Earth Engine. " 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "id": "faf2fbf9-4e0b-4437-81fd-dffb36d06e76", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "#| default_exp asf_hyp3" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "id": "d826cfbc-773c-4f0a-8f46-e390e54f63c6", 26 | "metadata": {}, 27 | "source": [ 28 | "# Module Imports" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "id": "bcfbb841-a470-4f65-9243-d8fda6ac788f", 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "#| export\n", 39 | "import datetime\n", 40 | "import logging\n", 41 | "import tempfile\n", 42 | "import os\n", 43 | "import re\n", 44 | "import subprocess\n", 45 | "import zipfile\n", 46 | "from pprint import pprint\n", 47 | "\n", 48 | "import asf_search\n", 49 | "from IPython.display import JSON\n", 50 | "import ee\n", 51 | "from fastcore.basics import patch\n", 52 | "import gcsfs\n", 53 | "from hyp3_sdk import HyP3\n", 54 | "from rio_cogeo import cogeo\n", 55 | "\n", 56 | "from sar_asf_to_gee import core" 57 | ] 58 | }, 59 | { 60 | "cell_type": "markdown", 61 | "id": "d83c3402-0888-4d11-be83-025d18802b26", 62 | "metadata": {}, 63 | "source": [ 64 | "# Prerequisites\n", 65 | "\n", 66 | "Authenticate to NASA Earthdata\n", 67 | "- See: https://nasa-openscapes.github.io/2021-Cloud-Hackathon/tutorials/04_NASA_Earthdata_Authentication.html\n", 68 | "- See: https://urs.earthdata.nasa.gov/documentation/for_users/data_access/create_net_rc_file\n", 69 | "\n", 70 | "Authenticate to Google Cloud\n", 71 | "- See: https://cloud.google.com/sdk/gcloud/reference/auth/application-default/loginca\n", 72 | "- `gcloud auth application-default login`\n", 73 | "\n", 74 | "Authenticate to Google Earth Engine\n", 75 | "- See: https://developers.google.com/earth-engine/guides/auth\n", 76 | "- `earthengine authenticate`" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "id": "0ccc41f8-4e15-4cbf-a0fc-2555c4b97fb4", 82 | "metadata": {}, 83 | "source": [ 84 | "# Setup" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "id": "c51e5dd5-b75f-4268-9108-58582abe9481", 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [ 94 | "# Create a HyP3 instance.\n", 95 | "hyp3 = HyP3()" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "id": "8b36274a-cd96-4e3f-8ad9-50f903970150", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "# Set the logging level to display detailed information.\n", 106 | "logging.basicConfig(level=logging.INFO)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "id": "ac7f1889-9520-4eda-b9e8-53acfeeff6b5", 112 | "metadata": {}, 113 | "source": [ 114 | "# ASF HyP3 Files" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "id": "6c28c1eb-72b0-4fb5-bd04-f7c1e272e450", 120 | "metadata": {}, 121 | "source": [ 122 | "[HyP3](https://hyp3-docs.asf.alaska.edu/) processing jobs can be initiated in a variety of ways, including the [Vertex](https://hyp3-docs.asf.alaska.edu/using/vertex/) web application and the [HyP3 Python SDK](https://hyp3-docs.asf.alaska.edu/using/sdk/)." 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "id": "c782b715-17cf-446f-a86e-6c21d6b41e66", 128 | "metadata": {}, 129 | "source": [ 130 | "## Starting HyP3 processing\n", 131 | "\n" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": null, 137 | "id": "e1c6c021-75fa-4334-85b0-235570180918", 138 | "metadata": {}, 139 | "outputs": [], 140 | "source": [ 141 | "job_type = 'RTC_GAMMA'\n", 142 | "job_name = 'RTC_processing_example'\n", 143 | "granule_for_rtc = 'S1A_IW_SLC__1SSV_20150621T120220_20150621T120232_006471_008934_72D8'\n", 144 | "def submit_job():\n", 145 | " return hyp3.submit_rtc_job(granule_for_rtc, job_name)" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "id": "96ce8cb9-bc47-4bc9-b546-6503f9149841", 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "# job_type = 'INSAR_GAMMA'\n", 156 | "# job_name = 'INSAR_GAMMA_processing_example'\n", 157 | "# granule1 = 'S1A_IW_SLC__1SDV_20240311T185926_20240311T185953_052938_066872_3CAD'\n", 158 | "# granule2 = 'S1A_IW_SLC__1SDV_20231206T185929_20231206T185956_051538_0638B3_78A8'\n", 159 | "# def submit_job():\n", 160 | "# return hyp3.submit_insar_job(granule1, granule2, job_name)" 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "id": "e298d6bc-904d-4e9b-891a-1d125d3a113a", 167 | "metadata": {}, 168 | "outputs": [], 169 | "source": [ 170 | "# job_type = 'INSAR_ISCE_BURST'\n", 171 | "# job_name = 'INSAR_ISCE_BURST_processing_example'\n", 172 | "# burst1 = 'S1_184906_IW1_20240104T154111_VV_3C7F-BURST'\n", 173 | "# burst2 = 'S1_184906_IW1_20240116T154110_VV_D1E5-BURST'\n", 174 | "# def submit_job():\n", 175 | "# return hyp3.submit_insar_isce_burst_job(burst1, burst2, job_name)" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "id": "8144e6a2-ad27-40e7-8d28-5b84717c9b94", 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "batch_completed = hyp3.find_jobs(\n", 186 | " job_type=job_type,\n", 187 | " name=job_name\n", 188 | ")\n", 189 | "print(f'Number of {job_type} jobs = {len(batch_completed)}')" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": null, 195 | "id": "8ccd8b6f-66dd-4bb6-b4db-e151ebc976a3", 196 | "metadata": {}, 197 | "outputs": [], 198 | "source": [ 199 | "batch_active = core.filter_jobs(batch_completed.jobs, expired=False)\n", 200 | "print(f'Number of active {job_type} jobs = {len(batch_active)}')" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": null, 206 | "id": "02926d84-d493-4934-8b45-b9081be3737a", 207 | "metadata": {}, 208 | "outputs": [], 209 | "source": [ 210 | "if not batch_active:\n", 211 | " print('Job results for {job_name} were not found. Starting a new job.')\n", 212 | " job = submit_job()\n", 213 | " #job = hyp3.watch(job)" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "id": "b28e9c05-f722-462a-a2f7-72f3312c782c", 219 | "metadata": {}, 220 | "source": [ 221 | "## Finding HyP3 Files\n", 222 | "\n", 223 | "The status of previously submitted jobs can be checked on the following page:\n", 224 | "https://search.asf.alaska.edu/#/?searchType=On%20Demand" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": null, 230 | "id": "1a055f18-9839-4a6c-8035-13e81ccb5d94", 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "batch_succeeded = [job for job in batch_active \n", 235 | " if job.to_dict()['status_code'] == 'SUCCEEDED']\n", 236 | "if len(batch_succeeded) == 0:\n", 237 | " print(f'No successful {job_type} jobs found. Please wait until one of the current active jobs finishes.')\n", 238 | "else:\n", 239 | " print(f'Number of successful {job_type} jobs = {len(batch_succeeded)}')\n", 240 | " print(f'Selecting the latest successful job.')\n", 241 | " job = batch_active[0]" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "id": "e46ef897-3d50-4f2d-9a5b-bd8e9e850d78", 248 | "metadata": {}, 249 | "outputs": [], 250 | "source": [ 251 | "display(JSON(job.to_dict()))" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "id": "b413bdfd-cd45-4981-a646-e998917df691", 257 | "metadata": {}, 258 | "source": [ 259 | "# Transfer completed jobs\n", 260 | "\n", 261 | "Create a class that can be used to transfer data between ASF HyP3, a local machine, Google Cloud Storage, and Earth Engine." 262 | ] 263 | }, 264 | { 265 | "cell_type": "code", 266 | "execution_count": null, 267 | "id": "782c340d-916a-46c2-8abb-69118fdcd090", 268 | "metadata": {}, 269 | "outputs": [], 270 | "source": [ 271 | "#| export\n", 272 | "class Transfer():\n", 273 | " def __init__(\n", 274 | " self,\n", 275 | " job_dict, # HyP3 job dictionary \n", 276 | " gcs_bucket, # GCS bucket\n", 277 | " gee_gcp_project, # GCP project used by Earth Engine\n", 278 | " gee_image_collection=None, # Name of the Earth Engine ImageCollection (optional)\n", 279 | " local_storage=None,\n", 280 | " ):\n", 281 | " self.job_dict = job_dict\n", 282 | " self.gcs_bucket = gcs_bucket\n", 283 | " self.gee_gcp_project = gee_gcp_project\n", 284 | " self.gee_image_collection = gee_image_collection\n", 285 | " if local_storage:\n", 286 | " self.tempdir = None\n", 287 | " self.local_storage = local_storage\n", 288 | " else:\n", 289 | " self.tempdir = tempfile.TemporaryDirectory() \n", 290 | " self.local_storage = self.tempdir.name\n", 291 | " logging.debug(f'created temporary directory: {self.tempdir.name}')" 292 | ] 293 | }, 294 | { 295 | "cell_type": "markdown", 296 | "id": "1d686734-d895-4518-be87-dd4e22cf609b", 297 | "metadata": {}, 298 | "source": [ 299 | "Create an Transfer class instance." 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "id": "60ad19d5-01a9-40e2-9115-fc3ca840c417", 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "t = Transfer(\n", 310 | " job_dict=job.to_dict(),\n", 311 | " gcs_bucket='hyp3-data-staging',\n", 312 | " gee_gcp_project='sar-asf-to-gee',\n", 313 | " gee_image_collection=f'HyP3-{job_name}',\n", 314 | " local_storage='temp_downloads',\n", 315 | ")" 316 | ] 317 | }, 318 | { 319 | "cell_type": "markdown", 320 | "id": "cc9d1035-0177-4c8b-8671-0509af523fb9", 321 | "metadata": {}, 322 | "source": [ 323 | "Create a class method for transferring results from HyP3 to a local computer." 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": null, 329 | "id": "fe411cbe-99ac-4197-a914-4602c41a6869", 330 | "metadata": {}, 331 | "outputs": [], 332 | "source": [ 333 | "#| export\n", 334 | "@patch\n", 335 | "def to_local(\n", 336 | " self:Transfer, \n", 337 | "):\n", 338 | " \"Transfer HyP3 results to local system, unzip, and update the job dictionary.\" \n", 339 | " logging.info(f'Starting hpy3_results_to_local()')\n", 340 | " for file in self.job_dict['files']:\n", 341 | " logging.info(f'Processing {file[\"filename\"]}')\n", 342 | " asf_search.download_url(\n", 343 | " url=file['url'],\n", 344 | " path=self.local_storage,\n", 345 | " filename=file['filename'],\n", 346 | " )\n", 347 | " # Unzip the file\n", 348 | " logging.info(f' Unzipping the file')\n", 349 | " with zipfile.ZipFile(os.path.join(self.local_storage, file['filename']), 'r') as zip_ref:\n", 350 | " zip_ref.extractall(self.local_storage)\n", 351 | "\n", 352 | " # List the TIF files.\n", 353 | " scene_name = file['filename'].removesuffix('.zip')\n", 354 | " tifs = [x for x in os.listdir(\n", 355 | " os.path.join('temp_downloads', scene_name))\n", 356 | " if x.endswith('.tif')]\n", 357 | "\n", 358 | " for count, tif in enumerate(tifs):\n", 359 | " logging.info(f' Converting to a Cloud Optimized GeoTIFF. {count + 1}/{len(tifs)}')\n", 360 | " subprocess.run([\n", 361 | " \"rio\",\n", 362 | " \"cogeo\",\n", 363 | " \"create\",\n", 364 | " os.path.join(self.local_storage, scene_name, tif),\n", 365 | " os.path.join(self.local_storage, scene_name, tif)\n", 366 | " ])\n", 367 | " \n", 368 | " tif_dict = {}\n", 369 | " pattern = rf'^({scene_name}_(.+).tif)$'\n", 370 | " for i in tifs:\n", 371 | " groups = re.search(pattern, i).groups()\n", 372 | " tif_dict[groups[1]] = os.path.join(scene_name, groups[0])\n", 373 | " \n", 374 | " file['extracted'] = tif_dict" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": null, 380 | "id": "d666679d-6f7f-4935-91ff-8ba8dc3f73bf", 381 | "metadata": {}, 382 | "outputs": [], 383 | "source": [ 384 | "t.to_local()" 385 | ] 386 | }, 387 | { 388 | "cell_type": "markdown", 389 | "id": "c9715313-fed7-4821-851c-db9f684a8346", 390 | "metadata": {}, 391 | "source": [ 392 | "Display the job dictionary, which now includes the list of extracted files (`root` => `files` => # => `extracted`)." 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": null, 398 | "id": "0e5a4086-69e8-40da-afe4-3edb5df12f19", 399 | "metadata": {}, 400 | "outputs": [], 401 | "source": [ 402 | "display(JSON(t.job_dict))" 403 | ] 404 | }, 405 | { 406 | "cell_type": "markdown", 407 | "id": "e9ef4739-d762-484b-bfbc-423dfc3365d5", 408 | "metadata": {}, 409 | "source": [ 410 | "## Transfer to Google Cloud Storage" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "id": "01938b7b-ffb9-460f-ab12-863d9244616b", 416 | "metadata": {}, 417 | "source": [ 418 | "Create a method for transferring results from a local computer to Google Cloud Storage." 419 | ] 420 | }, 421 | { 422 | "cell_type": "code", 423 | "execution_count": null, 424 | "id": "33878c48-e085-4732-910f-cd2930778a8e", 425 | "metadata": {}, 426 | "outputs": [], 427 | "source": [ 428 | "#| export\n", 429 | "@patch\n", 430 | "def to_gcs(\n", 431 | " self:Transfer,\n", 432 | "):\n", 433 | " logging.info('Starting to_gcs()')\n", 434 | "\n", 435 | " fs = gcsfs.GCSFileSystem(token='google_default')\n", 436 | "\n", 437 | " for file in self.job_dict['files']:\n", 438 | " for band, filename in file['extracted'].items():\n", 439 | " gcs_path = f'{self.gcs_bucket}/{filename}'\n", 440 | " if fs.exists(gcs_path):\n", 441 | " logging.info(f'GCS file already exists:\\n {gcs_path}')\n", 442 | " else:\n", 443 | " logging.info(f'Starting to transfer file to GCS:\\n {gcs_path}')\n", 444 | " # Transfer the local file to GCS.\n", 445 | " fs.put_file(\n", 446 | " lpath=f\"{self.local_storage}/{filename}\",\n", 447 | " rpath=gcs_path\n", 448 | " ) \n", 449 | " logging.info(f'Transferred file to GCS: {gcs_path}')" 450 | ] 451 | }, 452 | { 453 | "cell_type": "markdown", 454 | "id": "8c2210fb-af38-4074-bae3-4b0dbcd3b275", 455 | "metadata": {}, 456 | "source": [ 457 | "Transfer the files to Google Cloud Storage. If your Google Cloud authentication credentials have expired, you will get an error and will need to reauthenticate\n", 458 | "`gcloud auth application-default login`" 459 | ] 460 | }, 461 | { 462 | "cell_type": "code", 463 | "execution_count": null, 464 | "id": "0a168f76-a382-4a21-8837-ec4c970ba166", 465 | "metadata": {}, 466 | "outputs": [], 467 | "source": [ 468 | "t.to_gcs()" 469 | ] 470 | }, 471 | { 472 | "cell_type": "markdown", 473 | "id": "7af23f8a-a15e-416d-88fb-85956c394725", 474 | "metadata": {}, 475 | "source": [ 476 | "## Create a GEE Assets" 477 | ] 478 | }, 479 | { 480 | "cell_type": "code", 481 | "execution_count": null, 482 | "id": "bbad9a43-a49d-4ec8-815f-7160939210c2", 483 | "metadata": {}, 484 | "outputs": [], 485 | "source": [ 486 | "#| export\n", 487 | "@patch\n", 488 | "def create_gee_asset(\n", 489 | " self:Transfer,\n", 490 | "):\n", 491 | " \"Create an Earth Engine asset.\"\n", 492 | " logging.info(f'Starting create_gee_asset()')\n", 493 | " \n", 494 | " ee.Initialize(project=self.gee_gcp_project)\n", 495 | " \n", 496 | " core.create_gee_image_collection(self.gee_gcp_project, self.gee_image_collection)\n", 497 | "\n", 498 | " granule_names = self.job_dict['job_parameters']['granules']\n", 499 | " granules = asf_search.granule_search(granule_names)\n", 500 | "\n", 501 | " granule_times = [datetime.datetime.fromisoformat(x.properties['stopTime']) for x in granules]\n", 502 | " start_time = min(granule_times)\n", 503 | " end_time = max(granule_times)\n", 504 | " \n", 505 | " id = f\"{self.job_dict['job_id']}\"\n", 506 | "\n", 507 | " props = granules[0].properties\n", 508 | " description = (f\"{props['platform']}\"\n", 509 | " f\" - {props['processingLevel']}\"\n", 510 | " f\" - {props['beamModeType']}\")\n", 511 | " \n", 512 | " for file_dict in self.job_dict['files']:\n", 513 | " for band, filename in file_dict['extracted'].items():\n", 514 | "\n", 515 | " # Skip non-geocoded (native range-doppler coordinates) TIFFs.\n", 516 | " if filename.endswith('_rdr.tif'):\n", 517 | " continue\n", 518 | " \n", 519 | " gcs_path = f'{self.gcs_bucket}/{filename}'\n", 520 | " \n", 521 | " request = {\n", 522 | " 'type': 'IMAGE',\n", 523 | " 'bands': { # TODO: Update this once multi-band COG assets are supported\n", 524 | " 'id': band\n", 525 | " },\n", 526 | " 'gcs_location': {\n", 527 | " 'uris': [f'gs://{gcs_path}']\n", 528 | " },\n", 529 | " 'properties': {\n", 530 | " 'source': file_dict['url'],\n", 531 | " 'band': band # TODO: Remove this once multi-band COG assets are supported\n", 532 | " },\n", 533 | " 'startTime': start_time.strftime(core.FORMAT_GEE_DATETIME_STRING),\n", 534 | " 'endTime': end_time.strftime(core.FORMAT_GEE_DATETIME_STRING),\n", 535 | " 'description': description\n", 536 | " }\n", 537 | "\n", 538 | " path_parts = [\n", 539 | " 'projects',\n", 540 | " self.gee_gcp_project,\n", 541 | " 'assets',\n", 542 | " self.gee_image_collection,\n", 543 | " # TODO: Remove the band suffix once multi-band COG assets are supported\n", 544 | " f'{id}_{band}'.replace(\".\", \"_\") \n", 545 | " ]\n", 546 | " assetname = os.path.join(*[x for x in path_parts if x is not None])\n", 547 | "\n", 548 | " logging.debug(f'request = {request}')\n", 549 | " logging.debug(f'assetname = {assetname}')\n", 550 | " try:\n", 551 | " ee.data.createAsset(\n", 552 | " value=request,\n", 553 | " path=assetname\n", 554 | " ) \n", 555 | " logging.info(f'Finished creating a GEE asset:\\n {assetname}.')\n", 556 | " except ee.EEException as e:\n", 557 | " print(f'e = {e}')\n", 558 | " if \"does not exist or doesn't allow this operation\" in str(e):\n", 559 | " raise(e)\n", 560 | " else:\n", 561 | " raise(e) # TODO: Add logic to parse the EEException message.\n", 562 | " logging.info('GEE asset already exists. Skipping.')" 563 | ] 564 | }, 565 | { 566 | "cell_type": "code", 567 | "execution_count": null, 568 | "id": "c2b3c84f-0c22-43b4-81d6-352cbcf2df10", 569 | "metadata": {}, 570 | "outputs": [], 571 | "source": [ 572 | "t.create_gee_asset()" 573 | ] 574 | }, 575 | { 576 | "cell_type": "markdown", 577 | "id": "c92d60fd-c24b-4d6f-b443-4ecb6bb64954", 578 | "metadata": {}, 579 | "source": [ 580 | "The Google Earth Engine Code Editor can be used to visualize these assets. Here is a template script that demonstrates basic visualization:\n", 581 | "https://code.earthengine.google.com/4140085702fc842227dc641426acb983\n", 582 | "Note that you will need to update the script to reference Earth Engine assets that you have permissions to access (for example: assets that you have created)." 583 | ] 584 | }, 585 | { 586 | "cell_type": "code", 587 | "execution_count": null, 588 | "id": "2229cc5c-4390-4ff6-96f5-9c6f2b3eadc1", 589 | "metadata": {}, 590 | "outputs": [], 591 | "source": [ 592 | "#| hide\n", 593 | "import nbdev; nbdev.nbdev_export()" 594 | ] 595 | }, 596 | { 597 | "cell_type": "code", 598 | "execution_count": null, 599 | "id": "16261c4b-6499-4674-8e51-df0b8bc862a6", 600 | "metadata": {}, 601 | "outputs": [], 602 | "source": [] 603 | } 604 | ], 605 | "metadata": { 606 | "kernelspec": { 607 | "display_name": "Python 3 (ipykernel)", 608 | "language": "python", 609 | "name": "python3" 610 | } 611 | }, 612 | "nbformat": 4, 613 | "nbformat_minor": 5 614 | } 615 | --------------------------------------------------------------------------------